././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1744763843.259803 dill-0.4.0/0000755000076500000240000000000014777575703012227 5ustar00mmckernsstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1674962506.0 dill-0.4.0/.codecov.yml0000644000076500000240000000147614365363112014440 0ustar00mmckernsstaffcomment: false coverage: status: project: default: # Commits pushed to master should not make the overall # project coverage decrease by more than 1%: target: auto threshold: 1% patch: default: # Be tolerant on slight code coverage diff on PRs to limit # noisy red coverage status on github PRs. # Note The coverage stats are still uploaded # to codecov so that PR reviewers can see uncovered lines # in the github diff if they install the codecov browser # extension: # https://github.com/codecov/browser-extension target: auto threshold: 1% fixes: # reduces pip-installed path to git root and # remove dist-name from setup-installed path - "*/site-packages/::" - "*/site-packages/dill-*::" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1674962506.0 dill-0.4.0/.coveragerc0000644000076500000240000000104314365363112014324 0ustar00mmckernsstaff[run] # source = dill include = */dill/* omit = */tests/* */info.py branch = true # timid = true # parallel = true # and need to 'combine' data files # concurrency = multiprocessing # thread # data_file = $TRAVIS_BUILD_DIR/.coverage # debug = trace [paths] source = dill */site-packages/dill */site-packages/dill-*/dill [report] include = */dill/* exclude_lines = pragma: no cover raise NotImplementedError if __name__ == .__main__.: # show_missing = true ignore_errors = true # pragma: no branch # noqa ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1674962506.0 dill-0.4.0/.gitignore0000644000076500000240000000011514365363112014172 0ustar00mmckernsstaff.tox/ .cache/ *.egg-info/ *.pyc /docs/build /build /README /dill/__info__.py ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1699846651.0 dill-0.4.0/.readthedocs.yml0000644000076500000240000000051514524314773015302 0ustar00mmckernsstaff# readthedocs configuration file # see https://docs.readthedocs.io/en/stable/config-file/v2.html version: 2 # configure sphinx: configuration: docs/source/conf.py # build build: os: ubuntu-22.04 tools: python: "3.10" # install python: install: - method: pip path: . - requirements: docs/requirements.txt ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1741749904.0 dill-0.4.0/.travis.yml0000644000076500000240000000306514764177220014330 0ustar00mmckernsstaffdist: jammy language: python matrix: include: - python: '3.8' env: - python: '3.9' dist: focal env: - COVERAGE="true" - NUMPY="true" - python: '3.10' env: - python: '3.11' env: - python: '3.12' env: - python: '3.13' env: - python: '3.14-dev' env: - python: 'pypy3.8-7.3.9' # at 7.3.11 env: - python: 'pypy3.9-7.3.9' # at 7.3.16 env: - python: 'pypy3.10-7.3.19' env: - python: 'pypy3.11-7.3.19' env: allow_failures: - python: '3.14-dev' # CI missing - python: 'pypy3.10-7.3.19' # CI missing - python: 'pypy3.11-7.3.19' # CI missing fast_finish: true cache: pip: true before_install: - set -e # fail on any error - if [[ $COVERAGE == "true" ]]; then pip install coverage; fi - if [[ $NUMPY == "true" ]]; then pip install numpy; fi install: - python -m pip install . script: - for test in dill/tests/__init__.py; do echo $test ; if [[ $COVERAGE == "true" ]]; then coverage run -a $test > /dev/null; else python $test > /dev/null; fi ; done - for test in dill/tests/test_*.py; do echo $test ; if [[ $COVERAGE == "true" ]]; then coverage run -a $test > /dev/null; else python $test > /dev/null; fi ; done after_success: - if [[ $COVERAGE == "true" ]]; then bash <(curl -s https://codecov.io/bash); else echo ''; fi - if [[ $COVERAGE == "true" ]]; then coverage report; fi ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/LICENSE0000644000076500000240000000337614735133336013227 0ustar00mmckernsstaffCopyright (c) 2004-2016 California Institute of Technology. Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. All rights reserved. This software is available subject to the conditions and terms laid out below. By downloading and using this software you are agreeing to the following conditions. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the names of the copyright holders nor the names of any of the contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1674962506.0 dill-0.4.0/MANIFEST.in0000644000076500000240000000031314365363112013740 0ustar00mmckernsstaffinclude LICENSE include README* include MANIFEST.in include pyproject.toml include tox.ini include version.py include scripts/* recursive-include docs * include .* prune .git prune .coverage prune .eggs ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1744763843.259364 dill-0.4.0/PKG-INFO0000644000076500000240000002367214777575703013336 0ustar00mmckernsstaffMetadata-Version: 2.1 Name: dill Version: 0.4.0 Summary: serialize all of Python Home-page: https://github.com/uqfoundation/dill Download-URL: https://pypi.org/project/dill/#files Author: Mike McKerns Author-email: mmckerns@uqfoundation.org Maintainer: Mike McKerns Maintainer-email: mmckerns@uqfoundation.org License: BSD-3-Clause Project-URL: Documentation, http://dill.rtfd.io Project-URL: Source Code, https://github.com/uqfoundation/dill Project-URL: Bug Tracker, https://github.com/uqfoundation/dill/issues Platform: Linux Platform: Windows Platform: Mac Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Science/Research Classifier: License :: OSI Approved :: BSD License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3.13 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Topic :: Scientific/Engineering Classifier: Topic :: Software Development Requires-Python: >=3.8 License-File: LICENSE Provides-Extra: readline Provides-Extra: graph Requires-Dist: objgraph>=1.7.2; extra == "graph" Provides-Extra: profile Requires-Dist: gprof2dot>=2022.7.29; extra == "profile" ----------------------------- dill: serialize all of Python ----------------------------- About Dill ========== ``dill`` extends Python's ``pickle`` module for serializing and de-serializing Python objects to the majority of the built-in Python types. Serialization is the process of converting an object to a byte stream, and the inverse of which is converting a byte stream back to a Python object hierarchy. ``dill`` provides the user the same interface as the ``pickle`` module, and also includes some additional features. In addition to pickling Python objects, ``dill`` provides the ability to save the state of an interpreter session in a single command. Hence, it would be feasible to save an interpreter session, close the interpreter, ship the pickled file to another computer, open a new interpreter, unpickle the session and thus continue from the 'saved' state of the original interpreter session. ``dill`` can be used to store Python objects to a file, but the primary usage is to send Python objects across the network as a byte stream. ``dill`` is quite flexible, and allows arbitrary user defined classes and functions to be serialized. Thus ``dill`` is not intended to be secure against erroneously or maliciously constructed data. It is left to the user to decide whether the data they unpickle is from a trustworthy source. ``dill`` is part of ``pathos``, a Python framework for heterogeneous computing. ``dill`` is in active development, so any user feedback, bug reports, comments, or suggestions are highly appreciated. A list of issues is located at https://github.com/uqfoundation/dill/issues, with a legacy list maintained at https://uqfoundation.github.io/project/pathos/query. Major Features ============== ``dill`` can pickle the following standard types: - none, type, bool, int, float, complex, bytes, str, - tuple, list, dict, file, buffer, builtin, - Python classes, namedtuples, dataclasses, metaclasses, - instances of classes, - set, frozenset, array, functions, exceptions ``dill`` can also pickle more 'exotic' standard types: - functions with yields, nested functions, lambdas, - cell, method, unboundmethod, module, code, methodwrapper, - methoddescriptor, getsetdescriptor, memberdescriptor, wrapperdescriptor, - dictproxy, slice, notimplemented, ellipsis, quit ``dill`` cannot yet pickle these standard types: - frame, generator, traceback ``dill`` also provides the capability to: - save and load Python interpreter sessions - save and extract the source code from functions and classes - interactively diagnose pickling errors Current Release =============== The latest released version of ``dill`` is available from: https://pypi.org/project/dill ``dill`` is distributed under a 3-clause BSD license. Development Version =================== You can get the latest development version with all the shiny new features at: https://github.com/uqfoundation If you have a new contribution, please submit a pull request. Installation ============ ``dill`` can be installed with ``pip``:: $ pip install dill To optionally include the ``objgraph`` diagnostic tool in the install:: $ pip install dill[graph] To optionally include the ``gprof2dot`` diagnostic tool in the install:: $ pip install dill[profile] For windows users, to optionally install session history tools:: $ pip install dill[readline] Requirements ============ ``dill`` requires: - ``python`` (or ``pypy``), **>=3.8** - ``setuptools``, **>=42** Optional requirements: - ``objgraph``, **>=1.7.2** - ``gprof2dot``, **>=2022.7.29** - ``pyreadline``, **>=1.7.1** (on windows) Basic Usage =========== ``dill`` is a drop-in replacement for ``pickle``. Existing code can be updated to allow complete pickling using:: >>> import dill as pickle or:: >>> from dill import dumps, loads ``dumps`` converts the object to a unique byte string, and ``loads`` performs the inverse operation:: >>> squared = lambda x: x**2 >>> loads(dumps(squared))(3) 9 There are a number of options to control serialization which are provided as keyword arguments to several ``dill`` functions: * with *protocol*, the pickle protocol level can be set. This uses the same value as the ``pickle`` module, *DEFAULT_PROTOCOL*. * with *byref=True*, ``dill`` to behave a lot more like pickle with certain objects (like modules) pickled by reference as opposed to attempting to pickle the object itself. * with *recurse=True*, objects referred to in the global dictionary are recursively traced and pickled, instead of the default behavior of attempting to store the entire global dictionary. * with *fmode*, the contents of the file can be pickled along with the file handle, which is useful if the object is being sent over the wire to a remote system which does not have the original file on disk. Options are *HANDLE_FMODE* for just the handle, *CONTENTS_FMODE* for the file content and *FILE_FMODE* for content and handle. * with *ignore=False*, objects reconstructed with types defined in the top-level script environment use the existing type in the environment rather than a possibly different reconstructed type. The default serialization can also be set globally in *dill.settings*. Thus, we can modify how ``dill`` handles references to the global dictionary locally or globally:: >>> import dill.settings >>> dumps(absolute) == dumps(absolute, recurse=True) False >>> dill.settings['recurse'] = True >>> dumps(absolute) == dumps(absolute, recurse=True) True ``dill`` also includes source code inspection, as an alternate to pickling:: >>> import dill.source >>> print(dill.source.getsource(squared)) squared = lambda x:x**2 To aid in debugging pickling issues, use *dill.detect* which provides tools like pickle tracing:: >>> import dill.detect >>> with dill.detect.trace(): >>> dumps(squared) ┬ F1: at 0x7fe074f8c280> ├┬ F2: │└ # F2 [34 B] ├┬ Co: at 0x7fe07501eb30, file "", line 1> │├┬ F2: ││└ # F2 [19 B] │└ # Co [87 B] ├┬ D1: │└ # D1 [22 B] ├┬ D2: │└ # D2 [2 B] ├┬ D2: │├┬ D2: ││└ # D2 [2 B] │└ # D2 [23 B] └ # F1 [180 B] With trace, we see how ``dill`` stored the lambda (``F1``) by first storing ``_create_function``, the underlying code object (``Co``) and ``_create_code`` (which is used to handle code objects), then we handle the reference to the global dict (``D2``) plus other dictionaries (``D1`` and ``D2``) that save the lambda object's state. A ``#`` marks when the object is actually stored. More Information ================ Probably the best way to get started is to look at the documentation at http://dill.rtfd.io. Also see ``dill.tests`` for a set of scripts that demonstrate how ``dill`` can serialize different Python objects. You can run the test suite with ``python -m dill.tests``. The contents of any pickle file can be examined with ``undill``. As ``dill`` conforms to the ``pickle`` interface, the examples and documentation found at http://docs.python.org/library/pickle.html also apply to ``dill`` if one will ``import dill as pickle``. The source code is also generally well documented, so further questions may be resolved by inspecting the code itself. Please feel free to submit a ticket on github, or ask a question on stackoverflow (**@Mike McKerns**). If you would like to share how you use ``dill`` in your work, please send an email (to **mmckerns at uqfoundation dot org**). Citation ======== If you use ``dill`` to do research that leads to publication, we ask that you acknowledge use of ``dill`` by citing the following in your publication:: M.M. McKerns, L. Strand, T. Sullivan, A. Fang, M.A.G. Aivazis, "Building a framework for predictive science", Proceedings of the 10th Python in Science Conference, 2011; http://arxiv.org/pdf/1202.1056 Michael McKerns and Michael Aivazis, "pathos: a framework for heterogeneous computing", 2010- ; https://uqfoundation.github.io/project/pathos Please see https://uqfoundation.github.io/project/pathos or http://arxiv.org/pdf/1202.1056 for further information. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1727743522.0 dill-0.4.0/README.md0000644000076500000240000002247714676643042013510 0ustar00mmckernsstaffdill ==== serialize all of Python About Dill ---------- ``dill`` extends Python's ``pickle`` module for serializing and de-serializing Python objects to the majority of the built-in Python types. Serialization is the process of converting an object to a byte stream, and the inverse of which is converting a byte stream back to a Python object hierarchy. ``dill`` provides the user the same interface as the ``pickle`` module, and also includes some additional features. In addition to pickling Python objects, ``dill`` provides the ability to save the state of an interpreter session in a single command. Hence, it would be feasible to save an interpreter session, close the interpreter, ship the pickled file to another computer, open a new interpreter, unpickle the session and thus continue from the 'saved' state of the original interpreter session. ``dill`` can be used to store Python objects to a file, but the primary usage is to send Python objects across the network as a byte stream. ``dill`` is quite flexible, and allows arbitrary user defined classes and functions to be serialized. Thus ``dill`` is not intended to be secure against erroneously or maliciously constructed data. It is left to the user to decide whether the data they unpickle is from a trustworthy source. ``dill`` is part of ``pathos``, a Python framework for heterogeneous computing. ``dill`` is in active development, so any user feedback, bug reports, comments, or suggestions are highly appreciated. A list of issues is located at https://github.com/uqfoundation/dill/issues, with a legacy list maintained at https://uqfoundation.github.io/project/pathos/query. Major Features -------------- ``dill`` can pickle the following standard types: * none, type, bool, int, float, complex, bytes, str, * tuple, list, dict, file, buffer, builtin, * Python classes, namedtuples, dataclasses, metaclasses, * instances of classes, * set, frozenset, array, functions, exceptions ``dill`` can also pickle more 'exotic' standard types: * functions with yields, nested functions, lambdas, * cell, method, unboundmethod, module, code, methodwrapper, * methoddescriptor, getsetdescriptor, memberdescriptor, wrapperdescriptor, * dictproxy, slice, notimplemented, ellipsis, quit ``dill`` cannot yet pickle these standard types: * frame, generator, traceback ``dill`` also provides the capability to: * save and load Python interpreter sessions * save and extract the source code from functions and classes * interactively diagnose pickling errors Current Release [![Downloads](https://static.pepy.tech/personalized-badge/dill?period=total&units=international_system&left_color=grey&right_color=blue&left_text=pypi%20downloads)](https://pepy.tech/project/dill) [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/dill?color=blue&label=conda%20downloads)](https://anaconda.org/conda-forge/dill) [![Stack Overflow](https://img.shields.io/badge/stackoverflow-get%20help-black.svg)](https://stackoverflow.com/questions/tagged/dill) --------------- The latest released version of ``dill`` is available from: https://pypi.org/project/dill ``dill`` is distributed under a 3-clause BSD license. Development Version [![Support](https://img.shields.io/badge/support-the%20UQ%20Foundation-purple.svg?style=flat&colorA=grey&colorB=purple)](http://www.uqfoundation.org/pages/donate.html) [![Documentation Status](https://readthedocs.org/projects/dill/badge/?version=latest)](https://dill.readthedocs.io/en/latest/?badge=latest) [![Build Status](https://app.travis-ci.com/uqfoundation/dill.svg?label=build&logo=travis&branch=master)](https://app.travis-ci.com/github/uqfoundation/dill) [![codecov](https://codecov.io/gh/uqfoundation/dill/branch/master/graph/badge.svg)](https://codecov.io/gh/uqfoundation/dill) ------------------- You can get the latest development version with all the shiny new features at: https://github.com/uqfoundation If you have a new contribution, please submit a pull request. Installation ------------ ``dill`` can be installed with ``pip``:: $ pip install dill To optionally include the ``objgraph`` diagnostic tool in the install:: $ pip install dill[graph] To optionally include the ``gprof2dot`` diagnostic tool in the install:: $ pip install dill[profile] For windows users, to optionally install session history tools:: $ pip install dill[readline] Requirements ------------ ``dill`` requires: * ``python`` (or ``pypy``), **>=3.8** * ``setuptools``, **>=42** Optional requirements: * ``objgraph``, **>=1.7.2** * ``gprof2dot``, **>=2022.7.29** * ``pyreadline``, **>=1.7.1** (on windows) Basic Usage ----------- ``dill`` is a drop-in replacement for ``pickle``. Existing code can be updated to allow complete pickling using:: >>> import dill as pickle or:: >>> from dill import dumps, loads ``dumps`` converts the object to a unique byte string, and ``loads`` performs the inverse operation:: >>> squared = lambda x: x**2 >>> loads(dumps(squared))(3) 9 There are a number of options to control serialization which are provided as keyword arguments to several ``dill`` functions: * with *protocol*, the pickle protocol level can be set. This uses the same value as the ``pickle`` module, *DEFAULT_PROTOCOL*. * with *byref=True*, ``dill`` to behave a lot more like pickle with certain objects (like modules) pickled by reference as opposed to attempting to pickle the object itself. * with *recurse=True*, objects referred to in the global dictionary are recursively traced and pickled, instead of the default behavior of attempting to store the entire global dictionary. * with *fmode*, the contents of the file can be pickled along with the file handle, which is useful if the object is being sent over the wire to a remote system which does not have the original file on disk. Options are *HANDLE_FMODE* for just the handle, *CONTENTS_FMODE* for the file content and *FILE_FMODE* for content and handle. * with *ignore=False*, objects reconstructed with types defined in the top-level script environment use the existing type in the environment rather than a possibly different reconstructed type. The default serialization can also be set globally in *dill.settings*. Thus, we can modify how ``dill`` handles references to the global dictionary locally or globally:: >>> import dill.settings >>> dumps(absolute) == dumps(absolute, recurse=True) False >>> dill.settings['recurse'] = True >>> dumps(absolute) == dumps(absolute, recurse=True) True ``dill`` also includes source code inspection, as an alternate to pickling:: >>> import dill.source >>> print(dill.source.getsource(squared)) squared = lambda x:x**2 To aid in debugging pickling issues, use *dill.detect* which provides tools like pickle tracing:: >>> import dill.detect >>> with dill.detect.trace(): >>> dumps(squared) ┬ F1: at 0x7fe074f8c280> ├┬ F2: │└ # F2 [34 B] ├┬ Co: at 0x7fe07501eb30, file "", line 1> │├┬ F2: ││└ # F2 [19 B] │└ # Co [87 B] ├┬ D1: │└ # D1 [22 B] ├┬ D2: │└ # D2 [2 B] ├┬ D2: │├┬ D2: ││└ # D2 [2 B] │└ # D2 [23 B] └ # F1 [180 B] With trace, we see how ``dill`` stored the lambda (``F1``) by first storing ``_create_function``, the underlying code object (``Co``) and ``_create_code`` (which is used to handle code objects), then we handle the reference to the global dict (``D2``) plus other dictionaries (``D1`` and ``D2``) that save the lambda object's state. A ``#`` marks when the object is actually stored. More Information ---------------- Probably the best way to get started is to look at the documentation at http://dill.rtfd.io. Also see ``dill.tests`` for a set of scripts that demonstrate how ``dill`` can serialize different Python objects. You can run the test suite with ``python -m dill.tests``. The contents of any pickle file can be examined with ``undill``. As ``dill`` conforms to the ``pickle`` interface, the examples and documentation found at http://docs.python.org/library/pickle.html also apply to ``dill`` if one will ``import dill as pickle``. The source code is also generally well documented, so further questions may be resolved by inspecting the code itself. Please feel free to submit a ticket on github, or ask a question on stackoverflow (**@Mike McKerns**). If you would like to share how you use ``dill`` in your work, please send an email (to **mmckerns at uqfoundation dot org**). Citation -------- If you use ``dill`` to do research that leads to publication, we ask that you acknowledge use of ``dill`` by citing the following in your publication:: M.M. McKerns, L. Strand, T. Sullivan, A. Fang, M.A.G. Aivazis, "Building a framework for predictive science", Proceedings of the 10th Python in Science Conference, 2011; http://arxiv.org/pdf/1202.1056 Michael McKerns and Michael Aivazis, "pathos: a framework for heterogeneous computing", 2010- ; https://uqfoundation.github.io/project/pathos Please see https://uqfoundation.github.io/project/pathos or http://arxiv.org/pdf/1202.1056 for further information. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744763843.1822062 dill-0.4.0/dill/0000755000076500000240000000000014777575703013153 5ustar00mmckernsstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/__diff.py0000644000076500000240000001575214735133336014727 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ Module to show if an object has changed since it was memorised """ import builtins import os import sys import types try: import numpy.ma HAS_NUMPY = True except ImportError: HAS_NUMPY = False # pypy doesn't use reference counting getrefcount = getattr(sys, 'getrefcount', lambda x:0) # memo of objects indexed by id to a tuple (attributes, sequence items) # attributes is a dict indexed by attribute name to attribute id # sequence items is either a list of ids, of a dictionary of keys to ids memo = {} id_to_obj = {} # types that cannot have changing attributes builtins_types = set((str, list, dict, set, frozenset, int)) dont_memo = set(id(i) for i in (memo, sys.modules, sys.path_importer_cache, os.environ, id_to_obj)) def get_attrs(obj): """ Gets all the attributes of an object though its __dict__ or return None """ if type(obj) in builtins_types \ or type(obj) is type and obj in builtins_types: return return getattr(obj, '__dict__', None) def get_seq(obj, cache={str: False, frozenset: False, list: True, set: True, dict: True, tuple: True, type: False, types.ModuleType: False, types.FunctionType: False, types.BuiltinFunctionType: False}): """ Gets all the items in a sequence or return None """ try: o_type = obj.__class__ except AttributeError: o_type = type(obj) hsattr = hasattr if o_type in cache: if cache[o_type]: if hsattr(obj, "copy"): return obj.copy() return obj elif HAS_NUMPY and o_type in (numpy.ndarray, numpy.ma.core.MaskedConstant): if obj.shape and obj.size: return obj else: return [] elif hsattr(obj, "__contains__") and hsattr(obj, "__iter__") \ and hsattr(obj, "__len__") and hsattr(o_type, "__contains__") \ and hsattr(o_type, "__iter__") and hsattr(o_type, "__len__"): cache[o_type] = True if hsattr(obj, "copy"): return obj.copy() return obj else: cache[o_type] = False return None def memorise(obj, force=False): """ Adds an object to the memo, and recursively adds all the objects attributes, and if it is a container, its items. Use force=True to update an object already in the memo. Updating is not recursively done. """ obj_id = id(obj) if obj_id in memo and not force or obj_id in dont_memo: return id_ = id g = get_attrs(obj) if g is None: attrs_id = None else: attrs_id = dict((key,id_(value)) for key, value in g.items()) s = get_seq(obj) if s is None: seq_id = None elif hasattr(s, "items"): seq_id = dict((id_(key),id_(value)) for key, value in s.items()) elif not hasattr(s, "__len__"): #XXX: avoid TypeError from unexpected case seq_id = None else: seq_id = [id_(i) for i in s] memo[obj_id] = attrs_id, seq_id id_to_obj[obj_id] = obj mem = memorise if g is not None: [mem(value) for key, value in g.items()] if s is not None: if hasattr(s, "items"): [(mem(key), mem(item)) for key, item in s.items()] else: if hasattr(s, '__len__'): [mem(item) for item in s] else: mem(s) def release_gone(): itop, mp, src = id_to_obj.pop, memo.pop, getrefcount [(itop(id_), mp(id_)) for id_, obj in list(id_to_obj.items()) if src(obj) < 4] #XXX: correct for pypy? def whats_changed(obj, seen=None, simple=False, first=True): """ Check an object against the memo. Returns a list in the form (attribute changes, container changed). Attribute changes is a dict of attribute name to attribute value. container changed is a boolean. If simple is true, just returns a boolean. None for either item means that it has not been checked yet """ # Special cases if first: # ignore the _ variable, which only appears in interactive sessions if "_" in builtins.__dict__: del builtins._ if seen is None: seen = {} obj_id = id(obj) if obj_id in seen: if simple: return any(seen[obj_id]) return seen[obj_id] # Safety checks if obj_id in dont_memo: seen[obj_id] = [{}, False] if simple: return False return seen[obj_id] elif obj_id not in memo: if simple: return True else: raise RuntimeError("Object not memorised " + str(obj)) seen[obj_id] = ({}, False) chngd = whats_changed id_ = id # compare attributes attrs = get_attrs(obj) if attrs is None: changed = {} else: obj_attrs = memo[obj_id][0] obj_get = obj_attrs.get changed = dict((key,None) for key in obj_attrs if key not in attrs) for key, o in attrs.items(): if id_(o) != obj_get(key, None) or chngd(o, seen, True, False): changed[key] = o # compare sequence items = get_seq(obj) seq_diff = False if (items is not None) and (hasattr(items, '__len__')): obj_seq = memo[obj_id][1] if (len(items) != len(obj_seq)): seq_diff = True elif hasattr(obj, "items"): # dict type obj obj_get = obj_seq.get for key, item in items.items(): if id_(item) != obj_get(id_(key)) \ or chngd(key, seen, True, False) \ or chngd(item, seen, True, False): seq_diff = True break else: for i, j in zip(items, obj_seq): # list type obj if id_(i) != j or chngd(i, seen, True, False): seq_diff = True break seen[obj_id] = changed, seq_diff if simple: return changed or seq_diff return changed, seq_diff def has_changed(*args, **kwds): kwds['simple'] = True # ignore simple if passed in return whats_changed(*args, **kwds) __import__ = __import__ def _imp(*args, **kwds): """ Replaces the default __import__, to allow a module to be memorised before the user can change it """ before = set(sys.modules.keys()) mod = __import__(*args, **kwds) after = set(sys.modules.keys()).difference(before) for m in after: memorise(sys.modules[m]) return mod builtins.__import__ = _imp if hasattr(builtins, "_"): del builtins._ # memorise all already imported modules. This implies that this must be # imported first for any changes to be recorded for mod in list(sys.modules.values()): memorise(mod) release_gone() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744763842.0 dill-0.4.0/dill/__info__.py0000644000076500000240000002500414777575702015254 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE ''' ----------------------------- dill: serialize all of Python ----------------------------- About Dill ========== ``dill`` extends Python's ``pickle`` module for serializing and de-serializing Python objects to the majority of the built-in Python types. Serialization is the process of converting an object to a byte stream, and the inverse of which is converting a byte stream back to a Python object hierarchy. ``dill`` provides the user the same interface as the ``pickle`` module, and also includes some additional features. In addition to pickling Python objects, ``dill`` provides the ability to save the state of an interpreter session in a single command. Hence, it would be feasible to save an interpreter session, close the interpreter, ship the pickled file to another computer, open a new interpreter, unpickle the session and thus continue from the 'saved' state of the original interpreter session. ``dill`` can be used to store Python objects to a file, but the primary usage is to send Python objects across the network as a byte stream. ``dill`` is quite flexible, and allows arbitrary user defined classes and functions to be serialized. Thus ``dill`` is not intended to be secure against erroneously or maliciously constructed data. It is left to the user to decide whether the data they unpickle is from a trustworthy source. ``dill`` is part of ``pathos``, a Python framework for heterogeneous computing. ``dill`` is in active development, so any user feedback, bug reports, comments, or suggestions are highly appreciated. A list of issues is located at https://github.com/uqfoundation/dill/issues, with a legacy list maintained at https://uqfoundation.github.io/project/pathos/query. Major Features ============== ``dill`` can pickle the following standard types: - none, type, bool, int, float, complex, bytes, str, - tuple, list, dict, file, buffer, builtin, - Python classes, namedtuples, dataclasses, metaclasses, - instances of classes, - set, frozenset, array, functions, exceptions ``dill`` can also pickle more 'exotic' standard types: - functions with yields, nested functions, lambdas, - cell, method, unboundmethod, module, code, methodwrapper, - methoddescriptor, getsetdescriptor, memberdescriptor, wrapperdescriptor, - dictproxy, slice, notimplemented, ellipsis, quit ``dill`` cannot yet pickle these standard types: - frame, generator, traceback ``dill`` also provides the capability to: - save and load Python interpreter sessions - save and extract the source code from functions and classes - interactively diagnose pickling errors Current Release =============== The latest released version of ``dill`` is available from: https://pypi.org/project/dill ``dill`` is distributed under a 3-clause BSD license. Development Version =================== You can get the latest development version with all the shiny new features at: https://github.com/uqfoundation If you have a new contribution, please submit a pull request. Installation ============ ``dill`` can be installed with ``pip``:: $ pip install dill To optionally include the ``objgraph`` diagnostic tool in the install:: $ pip install dill[graph] To optionally include the ``gprof2dot`` diagnostic tool in the install:: $ pip install dill[profile] For windows users, to optionally install session history tools:: $ pip install dill[readline] Requirements ============ ``dill`` requires: - ``python`` (or ``pypy``), **>=3.8** - ``setuptools``, **>=42** Optional requirements: - ``objgraph``, **>=1.7.2** - ``gprof2dot``, **>=2022.7.29** - ``pyreadline``, **>=1.7.1** (on windows) Basic Usage =========== ``dill`` is a drop-in replacement for ``pickle``. Existing code can be updated to allow complete pickling using:: >>> import dill as pickle or:: >>> from dill import dumps, loads ``dumps`` converts the object to a unique byte string, and ``loads`` performs the inverse operation:: >>> squared = lambda x: x**2 >>> loads(dumps(squared))(3) 9 There are a number of options to control serialization which are provided as keyword arguments to several ``dill`` functions: * with *protocol*, the pickle protocol level can be set. This uses the same value as the ``pickle`` module, *DEFAULT_PROTOCOL*. * with *byref=True*, ``dill`` to behave a lot more like pickle with certain objects (like modules) pickled by reference as opposed to attempting to pickle the object itself. * with *recurse=True*, objects referred to in the global dictionary are recursively traced and pickled, instead of the default behavior of attempting to store the entire global dictionary. * with *fmode*, the contents of the file can be pickled along with the file handle, which is useful if the object is being sent over the wire to a remote system which does not have the original file on disk. Options are *HANDLE_FMODE* for just the handle, *CONTENTS_FMODE* for the file content and *FILE_FMODE* for content and handle. * with *ignore=False*, objects reconstructed with types defined in the top-level script environment use the existing type in the environment rather than a possibly different reconstructed type. The default serialization can also be set globally in *dill.settings*. Thus, we can modify how ``dill`` handles references to the global dictionary locally or globally:: >>> import dill.settings >>> dumps(absolute) == dumps(absolute, recurse=True) False >>> dill.settings['recurse'] = True >>> dumps(absolute) == dumps(absolute, recurse=True) True ``dill`` also includes source code inspection, as an alternate to pickling:: >>> import dill.source >>> print(dill.source.getsource(squared)) squared = lambda x:x**2 To aid in debugging pickling issues, use *dill.detect* which provides tools like pickle tracing:: >>> import dill.detect >>> with dill.detect.trace(): >>> dumps(squared) ┬ F1: at 0x7fe074f8c280> ├┬ F2: │└ # F2 [34 B] ├┬ Co: at 0x7fe07501eb30, file "", line 1> │├┬ F2: ││└ # F2 [19 B] │└ # Co [87 B] ├┬ D1: │└ # D1 [22 B] ├┬ D2: │└ # D2 [2 B] ├┬ D2: │├┬ D2: ││└ # D2 [2 B] │└ # D2 [23 B] └ # F1 [180 B] With trace, we see how ``dill`` stored the lambda (``F1``) by first storing ``_create_function``, the underlying code object (``Co``) and ``_create_code`` (which is used to handle code objects), then we handle the reference to the global dict (``D2``) plus other dictionaries (``D1`` and ``D2``) that save the lambda object's state. A ``#`` marks when the object is actually stored. More Information ================ Probably the best way to get started is to look at the documentation at http://dill.rtfd.io. Also see ``dill.tests`` for a set of scripts that demonstrate how ``dill`` can serialize different Python objects. You can run the test suite with ``python -m dill.tests``. The contents of any pickle file can be examined with ``undill``. As ``dill`` conforms to the ``pickle`` interface, the examples and documentation found at http://docs.python.org/library/pickle.html also apply to ``dill`` if one will ``import dill as pickle``. The source code is also generally well documented, so further questions may be resolved by inspecting the code itself. Please feel free to submit a ticket on github, or ask a question on stackoverflow (**@Mike McKerns**). If you would like to share how you use ``dill`` in your work, please send an email (to **mmckerns at uqfoundation dot org**). Citation ======== If you use ``dill`` to do research that leads to publication, we ask that you acknowledge use of ``dill`` by citing the following in your publication:: M.M. McKerns, L. Strand, T. Sullivan, A. Fang, M.A.G. Aivazis, "Building a framework for predictive science", Proceedings of the 10th Python in Science Conference, 2011; http://arxiv.org/pdf/1202.1056 Michael McKerns and Michael Aivazis, "pathos: a framework for heterogeneous computing", 2010- ; https://uqfoundation.github.io/project/pathos Please see https://uqfoundation.github.io/project/pathos or http://arxiv.org/pdf/1202.1056 for further information. ''' __version__ = '0.4.0' __author__ = 'Mike McKerns' __license__ = ''' Copyright (c) 2004-2016 California Institute of Technology. Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. All rights reserved. This software is available subject to the conditions and terms laid out below. By downloading and using this software you are agreeing to the following conditions. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the names of the copyright holders nor the names of any of the contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/__init__.py0000644000076500000240000000732614735133336015256 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE # author, version, license, and long description try: # the package is installed from .__info__ import __version__, __author__, __doc__, __license__ except: # pragma: no cover import os import sys parent = os.path.dirname(os.path.abspath(os.path.dirname(__file__))) sys.path.append(parent) # get distribution meta info from version import (__version__, __author__, get_license_text, get_readme_as_rst) __license__ = get_license_text(os.path.join(parent, 'LICENSE')) __license__ = "\n%s" % __license__ __doc__ = get_readme_as_rst(os.path.join(parent, 'README.md')) del os, sys, parent, get_license_text, get_readme_as_rst from ._dill import ( dump, dumps, load, loads, copy, Pickler, Unpickler, register, pickle, pickles, check, DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, HANDLE_FMODE, CONTENTS_FMODE, FILE_FMODE, PickleError, PickleWarning, PicklingError, PicklingWarning, UnpicklingError, UnpicklingWarning, ) from .session import ( dump_module, load_module, load_module_asdict, dump_session, load_session # backward compatibility ) from . import detect, logger, session, source, temp # get global settings from .settings import settings # make sure "trace" is turned off logger.trace(False) objects = {} # local import of dill._objects #from . import _objects #objects.update(_objects.succeeds) #del _objects # local import of dill.objtypes from . import objtypes as types def load_types(pickleable=True, unpickleable=True): """load pickleable and/or unpickleable types to ``dill.types`` ``dill.types`` is meant to mimic the ``types`` module, providing a registry of object types. By default, the module is empty (for import speed purposes). Use the ``load_types`` function to load selected object types to the ``dill.types`` module. Args: pickleable (bool, default=True): if True, load pickleable types. unpickleable (bool, default=True): if True, load unpickleable types. Returns: None """ from importlib import reload # local import of dill.objects from . import _objects if pickleable: objects.update(_objects.succeeds) else: [objects.pop(obj,None) for obj in _objects.succeeds] if unpickleable: objects.update(_objects.failures) else: [objects.pop(obj,None) for obj in _objects.failures] objects.update(_objects.registered) del _objects # reset contents of types to 'empty' [types.__dict__.pop(obj) for obj in list(types.__dict__.keys()) \ if obj.find('Type') != -1] # add corresponding types from objects to types reload(types) def extend(use_dill=True): '''add (or remove) dill types to/from the pickle registry by default, ``dill`` populates its types to ``pickle.Pickler.dispatch``. Thus, all ``dill`` types are available upon calling ``'import pickle'``. To drop all ``dill`` types from the ``pickle`` dispatch, *use_dill=False*. Args: use_dill (bool, default=True): if True, extend the dispatch table. Returns: None ''' from ._dill import _revert_extension, _extend if use_dill: _extend() else: _revert_extension() return extend() def license(): """print license""" print (__license__) return def citation(): """print citation""" print (__doc__[-491:-118]) return # end of file ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1741747501.0 dill-0.4.0/dill/_dill.py0000644000076500000240000026226114764172455014612 0ustar00mmckernsstaff# -*- coding: utf-8 -*- # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2015 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ dill: a utility for serialization of python objects The primary functions in `dill` are :func:`dump` and :func:`dumps` for serialization ("pickling") to a file or to a string, respectively, and :func:`load` and :func:`loads` for deserialization ("unpickling"), similarly, from a file or from a string. Other notable functions are :func:`~dill.dump_module` and :func:`~dill.load_module`, which are used to save and restore module objects, including an intepreter session. Based on code written by Oren Tirosh and Armin Ronacher. Extended to a (near) full set of the builtin types (in types module), and coded to the pickle interface, by . Initial port to python3 by Jonathan Dobson, continued by mmckerns. Tested against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns. Tested against CH16+ Std. Lib. ... TBD. """ from __future__ import annotations __all__ = [ 'dump','dumps','load','loads','copy', 'Pickler','Unpickler','register','pickle','pickles','check', 'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','HANDLE_FMODE','CONTENTS_FMODE','FILE_FMODE', 'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError', 'UnpicklingWarning', ] __module__ = 'dill' import warnings from .logger import adapter as logger from .logger import trace as _trace log = logger # backward compatibility (see issue #582) import os import sys diff = None _use_diff = False OLD38 = (sys.hexversion < 0x3080000) OLD39 = (sys.hexversion < 0x3090000) OLD310 = (sys.hexversion < 0x30a0000) OLD312a7 = (sys.hexversion < 0x30c00a7) #XXX: get types from .objtypes ? import builtins as __builtin__ from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler from pickle import GLOBAL, POP from _thread import LockType from _thread import RLock as RLockType try: from _thread import _ExceptHookArgs as ExceptHookArgsType except ImportError: ExceptHookArgsType = None try: from _thread import _ThreadHandle as ThreadHandleType except ImportError: ThreadHandleType = None #from io import IOBase from types import CodeType, FunctionType, MethodType, GeneratorType, \ TracebackType, FrameType, ModuleType, BuiltinMethodType BufferType = memoryview #XXX: unregistered ClassType = type # no 'old-style' classes EllipsisType = type(Ellipsis) #FileType = IOBase NotImplementedType = type(NotImplemented) SliceType = slice TypeType = type # 'new-style' classes #XXX: unregistered XRangeType = range from types import MappingProxyType as DictProxyType, new_class from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError import __main__ as _main_module import marshal import gc # import zlib import abc import dataclasses from weakref import ReferenceType, ProxyType, CallableProxyType from collections import OrderedDict from enum import Enum, EnumMeta from functools import partial from operator import itemgetter, attrgetter GENERATOR_FAIL = False import importlib.machinery EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES) try: import ctypes HAS_CTYPES = True # if using `pypy`, pythonapi is not found IS_PYPY = not hasattr(ctypes, 'pythonapi') except ImportError: HAS_CTYPES = False IS_PYPY = False NumpyUfuncType = None NumpyDType = None NumpyArrayType = None try: if not importlib.machinery.PathFinder().find_spec('numpy'): raise ImportError("No module named 'numpy'") NumpyUfuncType = True NumpyDType = True NumpyArrayType = True except ImportError: pass def __hook__(): global NumpyArrayType, NumpyDType, NumpyUfuncType from numpy import ufunc as NumpyUfuncType from numpy import ndarray as NumpyArrayType from numpy import dtype as NumpyDType return True if NumpyArrayType: # then has numpy def ndarraysubclassinstance(obj_type): if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__): return False # anything below here is a numpy array (or subclass) instance __hook__() # import numpy (so the following works!!!) # verify that __reduce__ has not been overridden if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \ or obj_type.__reduce__ is not NumpyArrayType.__reduce__: return False return True def numpyufunc(obj_type): return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__) def numpydtype(obj_type): if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__): return False # anything below here is a numpy dtype __hook__() # import numpy (so the following works!!!) return obj_type is type(NumpyDType) # handles subclasses else: def ndarraysubclassinstance(obj): return False def numpyufunc(obj): return False def numpydtype(obj): return False from types import GetSetDescriptorType, ClassMethodDescriptorType, \ WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \ MethodWrapperType #XXX: unused # make sure to add these 'hand-built' types to _typemap CellType = type((lambda x: lambda y: x)(0).__closure__[0]) PartialType = type(partial(int, base=2)) SuperType = type(super(Exception, TypeError())) ItemGetterType = type(itemgetter(0)) AttrGetterType = type(attrgetter('__repr__')) try: from functools import _lru_cache_wrapper as LRUCacheType except ImportError: LRUCacheType = None if not isinstance(LRUCacheType, type): LRUCacheType = None def get_file_type(*args, **kwargs): open = kwargs.pop("open", __builtin__.open) f = open(os.devnull, *args, **kwargs) t = type(f) f.close() return t IS_PYODIDE = sys.platform == 'emscripten' FileType = get_file_type('rb', buffering=0) TextWrapperType = get_file_type('r', buffering=-1) BufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1) BufferedReaderType = get_file_type('rb', buffering=-1) BufferedWriterType = get_file_type('wb', buffering=-1) try: from _pyio import open as _open PyTextWrapperType = get_file_type('r', buffering=-1, open=_open) PyBufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1, open=_open) PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open) PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open) except ImportError: PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None from io import BytesIO as StringIO InputType = OutputType = None from socket import socket as SocketType #FIXME: additionally calls ForkingPickler.register several times from multiprocessing.reduction import _reduce_socket as reduce_socket try: #pragma: no cover IS_IPYTHON = __IPYTHON__ # is True ExitType = None # IPython.core.autocall.ExitAutocall IPYTHON_SINGLETONS = ('exit', 'quit', 'get_ipython') except NameError: IS_IPYTHON = False try: ExitType = type(exit) # apparently 'exit' can be removed except NameError: ExitType = None IPYTHON_SINGLETONS = () import inspect import typing ### Shims for different versions of Python and dill class Sentinel(object): """ Create a unique sentinel object that is pickled as a constant. """ def __init__(self, name, module_name=None): self.name = name if module_name is None: # Use the calling frame's module self.__module__ = inspect.currentframe().f_back.f_globals['__name__'] else: self.__module__ = module_name # pragma: no cover def __repr__(self): return self.__module__ + '.' + self.name # pragma: no cover def __copy__(self): return self # pragma: no cover def __deepcopy__(self, memo): return self # pragma: no cover def __reduce__(self): return self.name def __reduce_ex__(self, protocol): return self.name from . import _shims from ._shims import Reduce, Getattr ### File modes #: Pickles the file handle, preserving mode. The position of the unpickled #: object is as for a new file handle. HANDLE_FMODE = 0 #: Pickles the file contents, creating a new file if on load the file does #: not exist. The position = min(pickled position, EOF) and mode is chosen #: as such that "best" preserves behavior of the original file. CONTENTS_FMODE = 1 #: Pickles the entire file (handle and contents), preserving mode and position. FILE_FMODE = 2 ### Shorthands (modified from python2.5/lib/pickle.py) def copy(obj, *args, **kwds): """ Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`). See :func:`dumps` and :func:`loads` for keyword arguments. """ ignore = kwds.pop('ignore', Unpickler.settings['ignore']) return loads(dumps(obj, *args, **kwds), ignore=ignore) def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None): """ Pickle an object to a file. See :func:`dumps` for keyword arguments. """ from .settings import settings protocol = settings['protocol'] if protocol is None else int(protocol) _kwds = kwds.copy() _kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse)) Pickler(file, protocol, **_kwds).dump(obj) return def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None): """ Pickle an object to a string. *protocol* is the pickler protocol, as defined for Python *pickle*. If *byref=True*, then dill behaves a lot more like pickle as certain objects (like modules) are pickled by reference as opposed to attempting to pickle the object itself. If *recurse=True*, then objects referred to in the global dictionary are recursively traced and pickled, instead of the default behavior of attempting to store the entire global dictionary. This is needed for functions defined via *exec()*. *fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`, or :const:`FILE_FMODE`) indicates how file handles will be pickled. For example, when pickling a data file handle for transfer to a remote compute service, *FILE_FMODE* will include the file contents in the pickle and cursor position so that a remote method can operate transparently on an object with an open file handle. Default values for keyword arguments can be set in :mod:`dill.settings`. """ file = StringIO() dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio) return file.getvalue() def load(file, ignore=None, **kwds): """ Unpickle an object from a file. See :func:`loads` for keyword arguments. """ return Unpickler(file, ignore=ignore, **kwds).load() def loads(str, ignore=None, **kwds): """ Unpickle an object from a string. If *ignore=False* then objects whose class is defined in the module *__main__* are updated to reference the existing class in *__main__*, otherwise they are left to refer to the reconstructed type, which may be different. Default values for keyword arguments can be set in :mod:`dill.settings`. """ file = StringIO(str) return load(file, ignore, **kwds) # def dumpzs(obj, protocol=None): # """pickle an object to a compressed string""" # return zlib.compress(dumps(obj, protocol)) # def loadzs(str): # """unpickle an object from a compressed string""" # return loads(zlib.decompress(str)) ### End: Shorthands ### class MetaCatchingDict(dict): def get(self, key, default=None): try: return self[key] except KeyError: return default def __missing__(self, key): if issubclass(key, type): return save_type else: raise KeyError() class PickleWarning(Warning, PickleError): pass class PicklingWarning(PickleWarning, PicklingError): pass class UnpicklingWarning(PickleWarning, UnpicklingError): pass ### Extend the Picklers class Pickler(StockPickler): """python's Pickler extended to interpreter sessions""" dispatch: typing.Dict[type, typing.Callable[[Pickler, typing.Any], None]] \ = MetaCatchingDict(StockPickler.dispatch.copy()) """The dispatch table, a dictionary of serializing functions used by Pickler to save objects of specific types. Use :func:`pickle` or :func:`register` to associate types to custom functions. :meta hide-value: """ _session = False from .settings import settings def __init__(self, file, *args, **kwds): settings = Pickler.settings _byref = kwds.pop('byref', None) #_strictio = kwds.pop('strictio', None) _fmode = kwds.pop('fmode', None) _recurse = kwds.pop('recurse', None) StockPickler.__init__(self, file, *args, **kwds) self._main = _main_module self._diff_cache = {} self._byref = settings['byref'] if _byref is None else _byref self._strictio = False #_strictio self._fmode = settings['fmode'] if _fmode is None else _fmode self._recurse = settings['recurse'] if _recurse is None else _recurse self._postproc = OrderedDict() self._file = file def save(self, obj, save_persistent_id=True): # numpy hack obj_type = type(obj) if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch): # register if the object is a numpy ufunc # thanks to Paul Kienzle for pointing out ufuncs didn't pickle if numpyufunc(obj_type): @register(obj_type) def save_numpy_ufunc(pickler, obj): logger.trace(pickler, "Nu: %s", obj) name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) StockPickler.save_global(pickler, obj, name=name) logger.trace(pickler, "# Nu") return # NOTE: the above 'save' performs like: # import copy_reg # def udump(f): return f.__name__ # def uload(name): return getattr(numpy, name) # copy_reg.pickle(NumpyUfuncType, udump, uload) # register if the object is a numpy dtype if numpydtype(obj_type): @register(obj_type) def save_numpy_dtype(pickler, obj): logger.trace(pickler, "Dt: %s", obj) pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj) logger.trace(pickler, "# Dt") return # NOTE: the above 'save' performs like: # import copy_reg # def uload(name): return type(NumpyDType(name)) # def udump(f): return uload, (f.type,) # copy_reg.pickle(NumpyDTypeType, udump, uload) # register if the object is a subclassed numpy array instance if ndarraysubclassinstance(obj_type): @register(obj_type) def save_numpy_array(pickler, obj): logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype) npdict = getattr(obj, '__dict__', None) f, args, state = obj.__reduce__() pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj) logger.trace(pickler, "# Nu") return # end numpy hack if GENERATOR_FAIL and obj_type is GeneratorType: msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType raise PicklingError(msg) StockPickler.save(self, obj, save_persistent_id) save.__doc__ = StockPickler.save.__doc__ def dump(self, obj): #NOTE: if settings change, need to update attributes logger.trace_setup(self) StockPickler.dump(self, obj) dump.__doc__ = StockPickler.dump.__doc__ class Unpickler(StockUnpickler): """python's Unpickler extended to interpreter sessions and more types""" from .settings import settings _session = False def find_class(self, module, name): if (module, name) == ('__builtin__', '__main__'): return self._main.__dict__ #XXX: above set w/save_module_dict elif (module, name) == ('__builtin__', 'NoneType'): return type(None) #XXX: special case: NoneType missing if module == 'dill.dill': module = 'dill._dill' return StockUnpickler.find_class(self, module, name) def __init__(self, *args, **kwds): settings = Pickler.settings _ignore = kwds.pop('ignore', None) StockUnpickler.__init__(self, *args, **kwds) self._main = _main_module self._ignore = settings['ignore'] if _ignore is None else _ignore def load(self): #NOTE: if settings change, need to update attributes obj = StockUnpickler.load(self) if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'): if not self._ignore: # point obj class to main try: obj.__class__ = getattr(self._main, type(obj).__name__) except (AttributeError,TypeError): pass # defined in a file #_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ? return obj load.__doc__ = StockUnpickler.load.__doc__ pass ''' def dispatch_table(): """get the dispatch table of registered types""" return Pickler.dispatch ''' pickle_dispatch_copy = StockPickler.dispatch.copy() def pickle(t, func): """expose :attr:`~Pickler.dispatch` table for user-created extensions""" Pickler.dispatch[t] = func return def register(t): """decorator to register types to Pickler's :attr:`~Pickler.dispatch` table""" def proxy(func): Pickler.dispatch[t] = func return func return proxy def _revert_extension(): """drop dill-registered types from pickle's dispatch table""" for type, func in list(StockPickler.dispatch.items()): if func.__module__ == __name__: del StockPickler.dispatch[type] if type in pickle_dispatch_copy: StockPickler.dispatch[type] = pickle_dispatch_copy[type] def use_diff(on=True): """ Reduces size of pickles by only including object which have changed. Decreases pickle size but increases CPU time needed. Also helps avoid some unpickleable objects. MUST be called at start of script, otherwise changes will not be recorded. """ global _use_diff, diff _use_diff = on if _use_diff and diff is None: try: from . import diff as d except ImportError: import diff as d diff = d def _create_typemap(): import types d = dict(list(__builtin__.__dict__.items()) + \ list(types.__dict__.items())).items() for key, value in d: if getattr(value, '__module__', None) == 'builtins' \ and type(value) is type: yield key, value return _reverse_typemap = dict(_create_typemap()) _reverse_typemap.update({ 'PartialType': PartialType, 'SuperType': SuperType, 'ItemGetterType': ItemGetterType, 'AttrGetterType': AttrGetterType, }) if sys.hexversion < 0x30800a2: _reverse_typemap.update({ 'CellType': CellType, }) # "Incidental" implementation specific types. Unpickling these types in another # implementation of Python (PyPy -> CPython) is not guaranteed to work # This dictionary should contain all types that appear in Python implementations # but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types x=OrderedDict() _incedental_reverse_typemap = { 'FileType': FileType, 'BufferedRandomType': BufferedRandomType, 'BufferedReaderType': BufferedReaderType, 'BufferedWriterType': BufferedWriterType, 'TextWrapperType': TextWrapperType, 'PyBufferedRandomType': PyBufferedRandomType, 'PyBufferedReaderType': PyBufferedReaderType, 'PyBufferedWriterType': PyBufferedWriterType, 'PyTextWrapperType': PyTextWrapperType, } _incedental_reverse_typemap.update({ "DictKeysType": type({}.keys()), "DictValuesType": type({}.values()), "DictItemsType": type({}.items()), "OdictKeysType": type(x.keys()), "OdictValuesType": type(x.values()), "OdictItemsType": type(x.items()), }) if ExitType: _incedental_reverse_typemap['ExitType'] = ExitType if InputType: _incedental_reverse_typemap['InputType'] = InputType _incedental_reverse_typemap['OutputType'] = OutputType ''' try: import symtable _incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table) except: #FIXME: fails to pickle pass if sys.hexversion >= 0x30a00a0: _incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines()) ''' if sys.hexversion >= 0x30b00b0 and not IS_PYPY: from types import GenericAlias _incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,)))) ''' _incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions()) ''' try: import winreg _incedental_reverse_typemap["HKEYType"] = winreg.HKEYType except ImportError: pass _reverse_typemap.update(_incedental_reverse_typemap) _incedental_types = set(_incedental_reverse_typemap.values()) del x _typemap = dict((v, k) for k, v in _reverse_typemap.items()) def _unmarshal(string): return marshal.loads(string) def _load_type(name): return _reverse_typemap[name] def _create_type(typeobj, *args): return typeobj(*args) def _create_function(fcode, fglobals, fname=None, fdefaults=None, fclosure=None, fdict=None, fkwdefaults=None): # same as FunctionType, but enable passing __dict__ to new function, # __dict__ is the storehouse for attributes added after function creation func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure) if fdict is not None: func.__dict__.update(fdict) #XXX: better copy? option to copy? if fkwdefaults is not None: func.__kwdefaults__ = fkwdefaults # 'recurse' only stores referenced modules/objects in fglobals, # thus we need to make sure that we have __builtins__ as well if "__builtins__" not in func.__globals__: func.__globals__["__builtins__"] = globals()["__builtins__"] # assert id(fglobals) == id(func.__globals__) return func class match: """ Make avaialable a limited structural pattern matching-like syntax for Python < 3.10 Patterns can be only tuples (without types) currently. Inspired by the package pattern-matching-PEP634. Usage: >>> with match(args) as m: >>> if m.case(('x', 'y')): >>> # use m.x and m.y >>> elif m.case(('x', 'y', 'z')): >>> # use m.x, m.y and m.z Equivalent native code for Python >= 3.10: >>> match args: >>> case (x, y): >>> # use x and y >>> case (x, y, z): >>> # use x, y and z """ def __init__(self, value): self.value = value self._fields = None def __enter__(self): return self def __exit__(self, *exc_info): return False def case(self, args): # *args, **kwargs): """just handles tuple patterns""" if len(self.value) != len(args): # + len(kwargs): return False #if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())): # return False self.args = args # (*args, *kwargs) return True @property def fields(self): # Only bind names to values if necessary. if self._fields is None: self._fields = dict(zip(self.args, self.value)) return self._fields def __getattr__(self, item): return self.fields[item] ALL_CODE_PARAMS = [ # Version New attribute CodeType parameters ((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'), ((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'), ((3,11,'p'), 'co_qualname', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable freevars cellvars'), ((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'), ((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'), ((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'), ] for version, new_attr, params in ALL_CODE_PARAMS: if hasattr(CodeType, new_attr): CODE_VERSION = version CODE_PARAMS = params.split() break ENCODE_PARAMS = set(CODE_PARAMS).intersection( ['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable']) def _create_code(*args): if not isinstance(args[0], int): # co_lnotab stored from >= 3.10 LNOTAB, *args = args else: # from < 3.10 (or pre-LNOTAB storage) LNOTAB = b'' with match(args) as m: # Python 3.11/3.12a (18 members) if m.case(( 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14] 'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:] )): if CODE_VERSION == (3,11): return CodeType( *args[:6], args[6].encode() if hasattr(args[6], 'encode') else args[6], # code *args[7:14], args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable args[16], args[17], ) fields = m.fields # PyPy 3.11 7.3.19+ (17 members) elif m.case(( 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', # args[6:13] 'firstlineno', 'linetable', 'freevars', 'cellvars' # args[13:] )): if CODE_VERSION == (3,11,'p'): return CodeType( *args[:6], args[6].encode() if hasattr(args[6], 'encode') else args[6], # code *args[7:14], args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable args[15], args[16], ) fields = m.fields # Python 3.10 or 3.8/3.9 (16 members) elif m.case(( 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13] 'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:] )): if CODE_VERSION == (3,10) or CODE_VERSION == (3,8): return CodeType( *args[:6], args[6].encode() if hasattr(args[6], 'encode') else args[6], # code *args[7:13], args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable args[14], args[15], ) fields = m.fields if CODE_VERSION >= (3,10): fields['linetable'] = m.LNOTAB_OR_LINETABLE else: fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE # Python 3.7 (15 args) elif m.case(( 'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5] 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12] 'lnotab', 'freevars', 'cellvars' # args[12:] )): if CODE_VERSION == (3,7): return CodeType( *args[:5], args[5].encode() if hasattr(args[5], 'encode') else args[5], # code *args[6:12], args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab args[13], args[14], ) fields = m.fields # Python 3.11a (20 members) elif m.case(( 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14] 'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:] )): if CODE_VERSION == (3,11,'a'): return CodeType( *args[:6], args[6].encode() if hasattr(args[6], 'encode') else args[6], # code *args[7:14], *(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable args[18], args[19], ) fields = m.fields else: raise UnpicklingError("pattern match for code object failed") # The args format doesn't match this version. fields.setdefault('posonlyargcount', 0) # from python <= 3.7 fields.setdefault('lnotab', LNOTAB) # from python >= 3.10 fields.setdefault('linetable', b'') # from python <= 3.9 fields.setdefault('qualname', fields['name']) # from python <= 3.10 fields.setdefault('exceptiontable', b'') # from python <= 3.10 fields.setdefault('endlinetable', None) # from python != 3.11a fields.setdefault('columntable', None) # from python != 3.11a args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k] for k in CODE_PARAMS) return CodeType(*args) def _create_ftype(ftypeobj, func, args, kwds): if kwds is None: kwds = {} if args is None: args = () return ftypeobj(func, *args, **kwds) def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245 if not argz: return typing.Tuple[()].copy_with(()) if argz == ((),): return typing.Tuple[()] return typing.Tuple[argz] if ThreadHandleType: def _create_thread_handle(ident, done, *args): #XXX: ignores 'blocking' from threading import _make_thread_handle handle = _make_thread_handle(ident) if done: handle._set_done() return handle def _create_lock(locked, *args): #XXX: ignores 'blocking' from threading import Lock lock = Lock() if locked: if not lock.acquire(False): raise UnpicklingError("Cannot acquire lock") return lock def _create_rlock(count, owner, *args): #XXX: ignores 'blocking' lock = RLockType() if owner is not None: lock._acquire_restore((count, owner)) if owner and not lock._is_owned(): raise UnpicklingError("Cannot acquire lock") return lock # thanks to matsjoyce for adding all the different file modes def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0 # only pickles the handle, not the file contents... good? or StringIO(data)? # (for file contents see: http://effbot.org/librarybook/copy-reg.htm) # NOTE: handle special cases first (are there more special cases?) names = {'':sys.__stdin__, '':sys.__stdout__, '':sys.__stderr__} #XXX: better fileno=(0,1,2) ? if name in list(names.keys()): f = names[name] #XXX: safer "f=sys.stdin" elif name == '': f = os.tmpfile() elif name == '': import tempfile f = tempfile.TemporaryFile(mode) else: try: exists = os.path.exists(name) except Exception: exists = False if not exists: if strictio: raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name) elif "r" in mode and fmode != FILE_FMODE: name = '' # or os.devnull? current_size = 0 # or maintain position? else: current_size = os.path.getsize(name) if position > current_size: if strictio: raise ValueError("invalid buffer size") elif fmode == CONTENTS_FMODE: position = current_size # try to open the file by name # NOTE: has different fileno try: #FIXME: missing: *buffering*, encoding, softspace if fmode == FILE_FMODE: f = open(name, mode if "w" in mode else "w") f.write(fdata) if "w" not in mode: f.close() f = open(name, mode) elif name == '': # file did not exist import tempfile f = tempfile.TemporaryFile(mode) # treat x mode as w mode elif fmode == CONTENTS_FMODE \ and ("w" in mode or "x" in mode): # stop truncation when opening flags = os.O_CREAT if "+" in mode: flags |= os.O_RDWR else: flags |= os.O_WRONLY f = os.fdopen(os.open(name, flags), mode) # set name to the correct value r = getattr(f, "buffer", f) r = getattr(r, "raw", r) r.name = name assert f.name == name else: f = open(name, mode) except (IOError, FileNotFoundError): err = sys.exc_info()[1] raise UnpicklingError(err) if closed: f.close() elif position >= 0 and fmode != HANDLE_FMODE: f.seek(position) return f def _create_stringi(value, position, closed): f = StringIO(value) if closed: f.close() else: f.seek(position) return f def _create_stringo(value, position, closed): f = StringIO() if closed: f.close() else: f.write(value) f.seek(position) return f class _itemgetter_helper(object): def __init__(self): self.items = [] def __getitem__(self, item): self.items.append(item) return class _attrgetter_helper(object): def __init__(self, attrs, index=None): self.attrs = attrs self.index = index def __getattribute__(self, attr): attrs = object.__getattribute__(self, "attrs") index = object.__getattribute__(self, "index") if index is None: index = len(attrs) attrs.append(attr) else: attrs[index] = ".".join([attrs[index], attr]) return type(self)(attrs, index) class _dictproxy_helper(dict): def __ror__(self, a): return a _dictproxy_helper_instance = _dictproxy_helper() __d = {} try: # In CPython 3.9 and later, this trick can be used to exploit the # implementation of the __or__ function of MappingProxyType to get the true # mapping referenced by the proxy. It may work for other implementations, # but is not guaranteed. MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance) except Exception: MAPPING_PROXY_TRICK = False del __d # _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill # whose _create_cell functions do not have a default value. # _CELL_REF can be safely removed entirely (replaced by empty tuples for calls # to _create_cell) once breaking changes are allowed. _CELL_REF = None _CELL_EMPTY = Sentinel('_CELL_EMPTY') def _create_cell(contents=None): if contents is not _CELL_EMPTY: value = contents return (lambda: value).__closure__[0] def _create_weakref(obj, *args): from weakref import ref if obj is None: # it's dead from collections import UserDict return ref(UserDict(), *args) return ref(obj, *args) def _create_weakproxy(obj, callable=False, *args): from weakref import proxy if obj is None: # it's dead if callable: return proxy(lambda x:x, *args) from collections import UserDict return proxy(UserDict(), *args) return proxy(obj, *args) def _eval_repr(repr_str): return eval(repr_str) def _create_array(f, args, state, npdict=None): #array = numpy.core.multiarray._reconstruct(*args) array = f(*args) array.__setstate__(state) if npdict is not None: # we also have saved state in __dict__ array.__dict__.update(npdict) return array def _create_dtypemeta(scalar_type): if NumpyDType is True: __hook__() # a bit hacky I think if scalar_type is None: return NumpyDType return type(NumpyDType(scalar_type)) def _create_namedtuple(name, fieldnames, modulename, defaults=None): class_ = _import_module(modulename + '.' + name, safe=True) if class_ is not None: return class_ import collections t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename) return t def _create_capsule(pointer, name, context, destructor): attr_found = False try: # based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231 uname = name.decode('utf8') for i in range(1, uname.count('.')+1): names = uname.rsplit('.', i) try: module = __import__(names[0]) except ImportError: pass obj = module for attr in names[1:]: obj = getattr(obj, attr) capsule = obj attr_found = True break except Exception: pass if attr_found: if _PyCapsule_IsValid(capsule, name): return capsule raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name)) else: #warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning) capsule = _PyCapsule_New(pointer, name, destructor) _PyCapsule_SetContext(capsule, context) return capsule def _getattr(objclass, name, repr_str): # hack to grab the reference directly try: #XXX: works only for __builtin__ ? attr = repr_str.split("'")[3] return eval(attr+'.__dict__["'+name+'"]') except Exception: try: attr = objclass.__dict__ if type(attr) is DictProxyType: attr = attr[name] else: attr = getattr(objclass,name) except (AttributeError, KeyError): attr = getattr(objclass,name) return attr def _get_attr(self, name): # stop recursive pickling return getattr(self, name, None) or getattr(__builtin__, name) def _import_module(import_name, safe=False): try: if import_name.startswith('__runtime__.'): return sys.modules[import_name] elif '.' in import_name: items = import_name.split('.') module = '.'.join(items[:-1]) obj = items[-1] submodule = getattr(__import__(module, None, None, [obj]), obj) if isinstance(submodule, (ModuleType, type)): return submodule return __import__(import_name, None, None, [obj]) else: return __import__(import_name) except (ImportError, AttributeError, KeyError): if safe: return None raise # https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333 def _getattribute(obj, name): for subpath in name.split('.'): if subpath == '': raise AttributeError("Can't get local attribute {!r} on {!r}" .format(name, obj)) try: parent = obj obj = getattr(obj, subpath) except AttributeError: raise AttributeError("Can't get attribute {!r} on {!r}" .format(name, obj)) return obj, parent def _locate_function(obj, pickler=None): module_name = getattr(obj, '__module__', None) if module_name in ['__main__', None] or \ pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__: return False if hasattr(obj, '__qualname__'): module = _import_module(module_name, safe=True) try: found, _ = _getattribute(module, obj.__qualname__) return found is obj except AttributeError: return False else: found = _import_module(module_name + '.' + obj.__name__, safe=True) return found is obj def _setitems(dest, source): for k, v in source.items(): dest[k] = v def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None): if obj is Getattr.NO_DEFAULT: obj = Reduce(reduction) # pragma: no cover if is_pickler_dill is None: is_pickler_dill = is_dill(pickler, child=True) if is_pickler_dill: # assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!' # if not hasattr(pickler, 'x'): pickler.x = 0 # print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse) # pickler.x += 1 if postproc_list is None: postproc_list = [] # Recursive object not supported. Default to a global instead. if id(obj) in pickler._postproc: name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else '' warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning) pickler.save_global(obj) return pickler._postproc[id(obj)] = postproc_list # TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations pickler.save_reduce(*reduction, obj=obj) if is_pickler_dill: # pickler.x -= 1 # print(pickler.x*' ', 'pop', obj, id(obj)) postproc = pickler._postproc.pop(id(obj)) # assert postproc_list == postproc, 'Stack tampered!' for reduction in reversed(postproc): if reduction[0] is _setitems: # use the internal machinery of pickle.py to speedup when # updating a dictionary in postproc dest, source = reduction[1] if source: pickler.write(pickler.get(pickler.memo[id(dest)][0])) if sys.hexversion < 0x30e00a1: pickler._batch_setitems(iter(source.items())) else: pickler._batch_setitems(iter(source.items()), obj=obj) else: # Updating with an empty dictionary. Same as doing nothing. continue else: pickler.save_reduce(*reduction) # pop None created by calling preprocessing step off stack pickler.write(POP) #@register(CodeType) #def save_code(pickler, obj): # logger.trace(pickler, "Co: %s", obj) # pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj) # logger.trace(pickler, "# Co") # return # The following function is based on 'save_codeobject' from 'cloudpickle' # Copyright (c) 2012, Regents of the University of California. # Copyright (c) 2009 `PiCloud, Inc. `_. # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE @register(CodeType) def save_code(pickler, obj): logger.trace(pickler, "Co: %s", obj) if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args) args = ( obj.co_lnotab, # for < python 3.10 [not counted in args] obj.co_argcount, obj.co_posonlyargcount, obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable, obj.co_columntable, obj.co_exceptiontable, obj.co_freevars, obj.co_cellvars ) elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args) with warnings.catch_warnings(): if not OLD312a7: # issue 597 warnings.filterwarnings('ignore', category=DeprecationWarning) args = ( obj.co_lnotab, # for < python 3.10 [not counted in args] obj.co_argcount, obj.co_posonlyargcount, obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable, obj.co_freevars, obj.co_cellvars ) elif hasattr(obj, "co_qualname"): # pypy 3.11 7.3.19+ (17 args) args = ( obj.co_lnotab, obj.co_argcount, obj.co_posonlyargcount, obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, obj.co_firstlineno, obj.co_linetable, obj.co_freevars, obj.co_cellvars ) elif hasattr(obj, "co_linetable"): # python 3.10 (16 args) args = ( obj.co_lnotab, # for < python 3.10 [not counted in args] obj.co_argcount, obj.co_posonlyargcount, obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name, obj.co_firstlineno, obj.co_linetable, obj.co_freevars, obj.co_cellvars ) elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args) args = ( obj.co_argcount, obj.co_posonlyargcount, obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name, obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, obj.co_cellvars ) else: # python 3.7 (15 args) args = ( obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name, obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, obj.co_cellvars ) pickler.save_reduce(_create_code, args, obj=obj) logger.trace(pickler, "# Co") return def _repr_dict(obj): """Make a short string representation of a dictionary.""" return "<%s object at %#012x>" % (type(obj).__name__, id(obj)) @register(dict) def save_module_dict(pickler, obj): if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \ not (pickler._session and pickler._first_pass): logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8')) logger.trace(pickler, "# D1") elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__): logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general? logger.trace(pickler, "# D3") elif '__name__' in obj and obj != _main_module.__dict__ \ and type(obj['__name__']) is str \ and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None): logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8')) logger.trace(pickler, "# D4") else: logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj if is_dill(pickler, child=False) and pickler._session: # we only care about session the first pass thru pickler._first_pass = False StockPickler.save_dict(pickler, obj) logger.trace(pickler, "# D2") return if not OLD310 and MAPPING_PROXY_TRICK: def save_dict_view(dicttype): def save_dict_view_for_function(func): def _save_dict_view(pickler, obj): logger.trace(pickler, "Dkvi: <%s>", obj) mapping = obj.mapping | _dictproxy_helper_instance pickler.save_reduce(func, (mapping,), obj=obj) logger.trace(pickler, "# Dkvi") return _save_dict_view return [ (funcname, save_dict_view_for_function(getattr(dicttype, funcname))) for funcname in ('keys', 'values', 'items') ] else: # The following functions are based on 'cloudpickle' # https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940 # Copyright (c) 2012, Regents of the University of California. # Copyright (c) 2009 `PiCloud, Inc. `_. # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE def save_dict_view(dicttype): def save_dict_keys(pickler, obj): logger.trace(pickler, "Dk: <%s>", obj) dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),)) pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj) logger.trace(pickler, "# Dk") def save_dict_values(pickler, obj): logger.trace(pickler, "Dv: <%s>", obj) dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),)) pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj) logger.trace(pickler, "# Dv") def save_dict_items(pickler, obj): logger.trace(pickler, "Di: <%s>", obj) pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj) logger.trace(pickler, "# Di") return ( ('keys', save_dict_keys), ('values', save_dict_values), ('items', save_dict_items) ) for __dicttype in ( dict, OrderedDict ): __obj = __dicttype() for __funcname, __savefunc in save_dict_view(__dicttype): __tview = type(getattr(__obj, __funcname)()) if __tview not in Pickler.dispatch: Pickler.dispatch[__tview] = __savefunc del __dicttype, __obj, __funcname, __tview, __savefunc @register(ClassType) def save_classobj(pickler, obj): #FIXME: enable pickler._byref if not _locate_function(obj, pickler): logger.trace(pickler, "C1: %s", obj) pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__, obj.__dict__), obj=obj) #XXX: or obj.__dict__.copy()), obj=obj) ? logger.trace(pickler, "# C1") else: logger.trace(pickler, "C2: %s", obj) name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) StockPickler.save_global(pickler, obj, name=name) logger.trace(pickler, "# C2") return @register(typing._GenericAlias) def save_generic_alias(pickler, obj): args = obj.__args__ if type(obj.__reduce__()) is str: logger.trace(pickler, "Ga0: %s", obj) StockPickler.save_global(pickler, obj, name=obj.__reduce__()) logger.trace(pickler, "# Ga0") elif obj.__origin__ is tuple and (not args or args == ((),)): logger.trace(pickler, "Ga1: %s", obj) pickler.save_reduce(_create_typing_tuple, (args,), obj=obj) logger.trace(pickler, "# Ga1") else: logger.trace(pickler, "Ga2: %s", obj) StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj) logger.trace(pickler, "# Ga2") return if ThreadHandleType: @register(ThreadHandleType) def save_thread_handle(pickler, obj): logger.trace(pickler, "Th: %s", obj) pickler.save_reduce(_create_thread_handle, (obj.ident, obj.is_done()), obj=obj) logger.trace(pickler, "# Th") return @register(LockType) #XXX: copied Thread will have new Event (due to new Lock) def save_lock(pickler, obj): logger.trace(pickler, "Lo: %s", obj) pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj) logger.trace(pickler, "# Lo") return @register(RLockType) def save_rlock(pickler, obj): logger.trace(pickler, "RL: %s", obj) r = obj.__repr__() # don't use _release_save as it unlocks the lock count = int(r.split('count=')[1].split()[0].rstrip('>')) owner = int(r.split('owner=')[1].split()[0]) pickler.save_reduce(_create_rlock, (count,owner,), obj=obj) logger.trace(pickler, "# RL") return #@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL def save_socket(pickler, obj): logger.trace(pickler, "So: %s", obj) pickler.save_reduce(*reduce_socket(obj)) logger.trace(pickler, "# So") return def _save_file(pickler, obj, open_): if obj.closed: position = 0 else: obj.flush() if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__): position = -1 else: position = obj.tell() if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE: f = open_(obj.name, "r") fdata = f.read() f.close() else: fdata = "" if is_dill(pickler, child=True): strictio = pickler._strictio fmode = pickler._fmode else: strictio = False fmode = 0 # HANDLE_FMODE pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position, obj.closed, open_, strictio, fmode, fdata), obj=obj) return @register(FileType) #XXX: in 3.x has buffer=0, needs different _create? @register(BufferedReaderType) @register(BufferedWriterType) @register(TextWrapperType) def save_file(pickler, obj): logger.trace(pickler, "Fi: %s", obj) f = _save_file(pickler, obj, open) logger.trace(pickler, "# Fi") return f if BufferedRandomType: @register(BufferedRandomType) def save_file(pickler, obj): logger.trace(pickler, "Fi: %s", obj) f = _save_file(pickler, obj, open) logger.trace(pickler, "# Fi") return f if PyTextWrapperType: @register(PyBufferedReaderType) @register(PyBufferedWriterType) @register(PyTextWrapperType) def save_file(pickler, obj): logger.trace(pickler, "Fi: %s", obj) f = _save_file(pickler, obj, _open) logger.trace(pickler, "# Fi") return f if PyBufferedRandomType: @register(PyBufferedRandomType) def save_file(pickler, obj): logger.trace(pickler, "Fi: %s", obj) f = _save_file(pickler, obj, _open) logger.trace(pickler, "# Fi") return f # The following two functions are based on 'saveCStringIoInput' # and 'saveCStringIoOutput' from spickle # Copyright (c) 2011 by science+computing ag # License: http://www.apache.org/licenses/LICENSE-2.0 if InputType: @register(InputType) def save_stringi(pickler, obj): logger.trace(pickler, "Io: %s", obj) if obj.closed: value = ''; position = 0 else: value = obj.getvalue(); position = obj.tell() pickler.save_reduce(_create_stringi, (value, position, \ obj.closed), obj=obj) logger.trace(pickler, "# Io") return @register(OutputType) def save_stringo(pickler, obj): logger.trace(pickler, "Io: %s", obj) if obj.closed: value = ''; position = 0 else: value = obj.getvalue(); position = obj.tell() pickler.save_reduce(_create_stringo, (value, position, \ obj.closed), obj=obj) logger.trace(pickler, "# Io") return if LRUCacheType is not None: from functools import lru_cache @register(LRUCacheType) def save_lru_cache(pickler, obj): logger.trace(pickler, "LRU: %s", obj) if OLD39: kwargs = obj.cache_info() args = (kwargs.maxsize,) else: kwargs = obj.cache_parameters() args = (kwargs['maxsize'], kwargs['typed']) if args != lru_cache.__defaults__: wrapper = Reduce(lru_cache, args, is_callable=True) else: wrapper = lru_cache pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj) logger.trace(pickler, "# LRU") return @register(SuperType) def save_super(pickler, obj): logger.trace(pickler, "Su: %s", obj) pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj) logger.trace(pickler, "# Su") return if IS_PYPY: @register(MethodType) def save_instancemethod0(pickler, obj): code = getattr(obj.__func__, '__code__', None) if code is not None and type(code) is not CodeType \ and getattr(obj.__self__, obj.__name__) == obj: # Some PyPy builtin functions have no module name logger.trace(pickler, "Me2: %s", obj) # TODO: verify that this works for all PyPy builtin methods pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj) logger.trace(pickler, "# Me2") return logger.trace(pickler, "Me1: %s", obj) pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj) logger.trace(pickler, "# Me1") return else: @register(MethodType) def save_instancemethod0(pickler, obj): logger.trace(pickler, "Me1: %s", obj) pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj) logger.trace(pickler, "# Me1") return if not IS_PYPY: @register(MemberDescriptorType) @register(GetSetDescriptorType) @register(MethodDescriptorType) @register(WrapperDescriptorType) @register(ClassMethodDescriptorType) def save_wrapper_descriptor(pickler, obj): logger.trace(pickler, "Wr: %s", obj) pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__, obj.__repr__()), obj=obj) logger.trace(pickler, "# Wr") return else: @register(MemberDescriptorType) @register(GetSetDescriptorType) def save_wrapper_descriptor(pickler, obj): logger.trace(pickler, "Wr: %s", obj) pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__, obj.__repr__()), obj=obj) logger.trace(pickler, "# Wr") return @register(CellType) def save_cell(pickler, obj): try: f = obj.cell_contents except ValueError: # cell is empty logger.trace(pickler, "Ce3: %s", obj) # _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7. # It unpickles to a sentinel object _dill._CELL_EMPTY, also created in # _shims.py. This object is not present in Python 3 because the cell's # contents can be deleted in newer versions of Python. The reduce object # will instead unpickle to None if unpickled in Python 3. # When breaking changes are made to dill, (_shims._CELL_EMPTY,) can # be replaced by () OR the delattr function can be removed repending on # whichever is more convienient. pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj) # Call the function _delattr on the cell's cell_contents attribute # The result of this function call will be None pickler.save_reduce(_shims._delattr, (obj, 'cell_contents')) # pop None created by calling _delattr off stack pickler.write(POP) logger.trace(pickler, "# Ce3") return if is_dill(pickler, child=True): if id(f) in pickler._postproc: # Already seen. Add to its postprocessing. postproc = pickler._postproc[id(f)] else: # Haven't seen it. Add to the highest possible object and set its # value as late as possible to prevent cycle. postproc = next(iter(pickler._postproc.values()), None) if postproc is not None: logger.trace(pickler, "Ce2: %s", obj) # _CELL_REF is defined in _shims.py to support older versions of # dill. When breaking changes are made to dill, (_CELL_REF,) can # be replaced by () pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj) postproc.append((_shims._setattr, (obj, 'cell_contents', f))) logger.trace(pickler, "# Ce2") return logger.trace(pickler, "Ce1: %s", obj) pickler.save_reduce(_create_cell, (f,), obj=obj) logger.trace(pickler, "# Ce1") return if MAPPING_PROXY_TRICK: @register(DictProxyType) def save_dictproxy(pickler, obj): logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj mapping = obj | _dictproxy_helper_instance pickler.save_reduce(DictProxyType, (mapping,), obj=obj) logger.trace(pickler, "# Mp") return else: @register(DictProxyType) def save_dictproxy(pickler, obj): logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj) logger.trace(pickler, "# Mp") return @register(SliceType) def save_slice(pickler, obj): logger.trace(pickler, "Sl: %s", obj) pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj) logger.trace(pickler, "# Sl") return @register(XRangeType) @register(EllipsisType) @register(NotImplementedType) def save_singleton(pickler, obj): logger.trace(pickler, "Si: %s", obj) pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj) logger.trace(pickler, "# Si") return def _proxy_helper(obj): # a dead proxy returns a reference to None """get memory address of proxy's reference object""" _repr = repr(obj) try: _str = str(obj) except ReferenceError: # it's a dead proxy return id(None) if _str == _repr: return id(obj) # it's a repr try: # either way, it's a proxy from here address = int(_str.rstrip('>').split(' at ')[-1], base=16) except ValueError: # special case: proxy of a 'type' if not IS_PYPY: address = int(_repr.rstrip('>').split(' at ')[-1], base=16) else: objects = iter(gc.get_objects()) for _obj in objects: if repr(_obj) == _str: return id(_obj) # all bad below... nothing found so throw ReferenceError msg = "Cannot reference object for proxy at '%s'" % id(obj) raise ReferenceError(msg) return address def _locate_object(address, module=None): """get object located at the given memory address (inverse of id(obj))""" special = [None, True, False] #XXX: more...? for obj in special: if address == id(obj): return obj if module: objects = iter(module.__dict__.values()) else: objects = iter(gc.get_objects()) for obj in objects: if address == id(obj): return obj # all bad below... nothing found so throw ReferenceError or TypeError try: address = hex(address) except TypeError: raise TypeError("'%s' is not a valid memory address" % str(address)) raise ReferenceError("Cannot reference object at '%s'" % address) @register(ReferenceType) def save_weakref(pickler, obj): refobj = obj() logger.trace(pickler, "R1: %s", obj) #refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None" pickler.save_reduce(_create_weakref, (refobj,), obj=obj) logger.trace(pickler, "# R1") return @register(ProxyType) @register(CallableProxyType) def save_weakproxy(pickler, obj): # Must do string substitution here and use %r to avoid ReferenceError. logger.trace(pickler, "R2: %r" % obj) refobj = _locate_object(_proxy_helper(obj)) pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj) logger.trace(pickler, "# R2") return def _is_builtin_module(module): if not hasattr(module, "__file__"): return True if module.__file__ is None: return False # If a module file name starts with prefix, it should be a builtin # module, so should always be pickled as a reference. names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"] rp = os.path.realpath # See https://github.com/uqfoundation/dill/issues/566 return ( any( module.__file__.startswith(getattr(sys, name)) or rp(module.__file__).startswith(rp(getattr(sys, name))) for name in names if hasattr(sys, name) ) or module.__file__.endswith(EXTENSION_SUFFIXES) or 'site-packages' in module.__file__ ) def _is_imported_module(module): return getattr(module, '__loader__', None) is not None or module in sys.modules.values() @register(ModuleType) def save_module(pickler, obj): if False: #_use_diff: if obj.__name__.split('.', 1)[0] != "dill": try: changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0] except RuntimeError: # not memorised module, probably part of dill pass else: logger.trace(pickler, "M2: %s with diff", obj) logger.info("Diff: %s", changed.keys()) pickler.save_reduce(_import_module, (obj.__name__,), obj=obj, state=changed) logger.trace(pickler, "# M2") return logger.trace(pickler, "M1: %s", obj) pickler.save_reduce(_import_module, (obj.__name__,), obj=obj) logger.trace(pickler, "# M1") else: builtin_mod = _is_builtin_module(obj) is_session_main = is_dill(pickler, child=True) and obj is pickler._main if (obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod or is_session_main): logger.trace(pickler, "M1: %s", obj) # Hack for handling module-type objects in load_module(). mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__ # Second references are saved as __builtin__.__main__ in save_module_dict(). main_dict = obj.__dict__.copy() for item in ('__builtins__', '__loader__'): main_dict.pop(item, None) for item in IPYTHON_SINGLETONS: #pragma: no cover if getattr(main_dict.get(item), '__module__', '').startswith('IPython'): del main_dict[item] pickler.save_reduce(_import_module, (mod_name,), obj=obj, state=main_dict) logger.trace(pickler, "# M1") elif obj.__name__ == "dill._dill": logger.trace(pickler, "M2: %s", obj) pickler.save_global(obj, name="_dill") logger.trace(pickler, "# M2") else: logger.trace(pickler, "M2: %s", obj) pickler.save_reduce(_import_module, (obj.__name__,), obj=obj) logger.trace(pickler, "# M2") return # The following function is based on '_extract_class_dict' from 'cloudpickle' # Copyright (c) 2012, Regents of the University of California. # Copyright (c) 2009 `PiCloud, Inc. `_. # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE def _get_typedict_type(cls, clsdict, attrs, postproc_list): """Retrieve a copy of the dict of a class without the inherited methods""" if len(cls.__bases__) == 1: inherited_dict = cls.__bases__[0].__dict__ else: inherited_dict = {} for base in reversed(cls.__bases__): inherited_dict.update(base.__dict__) to_remove = [] for name, value in dict.items(clsdict): try: base_value = inherited_dict[name] if value is base_value and hasattr(value, '__qualname__'): to_remove.append(name) except KeyError: pass for name in to_remove: dict.pop(clsdict, name) if issubclass(type(cls), type): clsdict.pop('__dict__', None) clsdict.pop('__weakref__', None) # clsdict.pop('__prepare__', None) return clsdict, attrs def _get_typedict_abc(obj, _dict, attrs, postproc_list): if hasattr(abc, '_get_dump'): (registry, _, _, _) = abc._get_dump(obj) register = obj.register postproc_list.extend((register, (reg(),)) for reg in registry) elif hasattr(obj, '_abc_registry'): registry = obj._abc_registry register = obj.register postproc_list.extend((register, (reg,)) for reg in registry) else: raise PicklingError("Cannot find registry of ABC %s", obj) if '_abc_registry' in _dict: _dict.pop('_abc_registry', None) _dict.pop('_abc_cache', None) _dict.pop('_abc_negative_cache', None) # _dict.pop('_abc_negative_cache_version', None) else: _dict.pop('_abc_impl', None) return _dict, attrs @register(TypeType) def save_type(pickler, obj, postproc_list=None): if obj in _typemap: logger.trace(pickler, "T1: %s", obj) # if obj in _incedental_types: # warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning) pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj) logger.trace(pickler, "# T1") elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]): # special case: namedtuples logger.trace(pickler, "T6: %s", obj) obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) if obj.__name__ != obj_name: if postproc_list is None: postproc_list = [] postproc_list.append((setattr, (obj, '__qualname__', obj_name))) if not obj._field_defaults: _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__)), obj=obj, postproc_list=postproc_list) else: defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults] _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults)), obj=obj, postproc_list=postproc_list) logger.trace(pickler, "# T6") return # special caes: NoneType, NotImplementedType, EllipsisType, EnumMeta, etc elif obj is type(None): logger.trace(pickler, "T7: %s", obj) #XXX: pickler.save_reduce(type, (None,), obj=obj) pickler.write(GLOBAL + b'__builtin__\nNoneType\n') logger.trace(pickler, "# T7") elif obj is NotImplementedType: logger.trace(pickler, "T7: %s", obj) pickler.save_reduce(type, (NotImplemented,), obj=obj) logger.trace(pickler, "# T7") elif obj is EllipsisType: logger.trace(pickler, "T7: %s", obj) pickler.save_reduce(type, (Ellipsis,), obj=obj) logger.trace(pickler, "# T7") elif obj is EnumMeta: logger.trace(pickler, "T7: %s", obj) pickler.write(GLOBAL + b'enum\nEnumMeta\n') logger.trace(pickler, "# T7") elif obj is ExceptHookArgsType: #NOTE: must be after NoneType for pypy logger.trace(pickler, "T7: %s", obj) pickler.write(GLOBAL + b'threading\nExceptHookArgs\n') logger.trace(pickler, "# T7") else: _byref = getattr(pickler, '_byref', None) obj_recursive = id(obj) in getattr(pickler, '_postproc', ()) incorrectly_named = not _locate_function(obj, pickler) if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over if postproc_list is None: postproc_list = [] # thanks to Tom Stepleton pointing out pickler._session unneeded logger.trace(pickler, "T2: %s", obj) _dict, attrs = _get_typedict_type(obj, obj.__dict__.copy(), None, postproc_list) # copy dict proxy to a dict #print (_dict) #print ("%s\n%s" % (type(obj), obj.__name__)) #print ("%s\n%s" % (obj.__bases__, obj.__dict__)) slots = _dict.get('__slots__', ()) if type(slots) == str: # __slots__ accepts a single string slots = (slots,) for name in slots: _dict.pop(name, None) if isinstance(obj, abc.ABCMeta): logger.trace(pickler, "ABC: %s", obj) _dict, attrs = _get_typedict_abc(obj, _dict, attrs, postproc_list) logger.trace(pickler, "# ABC") qualname = getattr(obj, '__qualname__', None) if attrs is not None: for k, v in attrs.items(): postproc_list.append((setattr, (obj, k, v))) # TODO: Consider using the state argument to save_reduce? if qualname is not None: postproc_list.append((setattr, (obj, '__qualname__', qualname))) if not hasattr(obj, '__orig_bases__'): _save_with_postproc(pickler, (_create_type, ( type(obj), obj.__name__, obj.__bases__, _dict )), obj=obj, postproc_list=postproc_list) else: # This case will always work, but might be overkill. _metadict = { 'metaclass': type(obj) } if _dict: _dict_update = PartialType(_setitems, source=_dict) else: _dict_update = None _save_with_postproc(pickler, (new_class, ( obj.__name__, obj.__orig_bases__, _metadict, _dict_update )), obj=obj, postproc_list=postproc_list) logger.trace(pickler, "# T2") else: obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) logger.trace(pickler, "T4: %s", obj) if incorrectly_named: warnings.warn( "Cannot locate reference to %r." % (obj,), PicklingWarning, stacklevel=3, ) if obj_recursive: warnings.warn( "Cannot pickle %r: %s.%s has recursive self-references that " "trigger a RecursionError." % (obj, obj.__module__, obj_name), PicklingWarning, stacklevel=3, ) #print (obj.__dict__) #print ("%s\n%s" % (type(obj), obj.__name__)) #print ("%s\n%s" % (obj.__bases__, obj.__dict__)) StockPickler.save_global(pickler, obj, name=obj_name) logger.trace(pickler, "# T4") return @register(property) @register(abc.abstractproperty) def save_property(pickler, obj): logger.trace(pickler, "Pr: %s", obj) pickler.save_reduce(type(obj), (obj.fget, obj.fset, obj.fdel, obj.__doc__), obj=obj) logger.trace(pickler, "# Pr") @register(staticmethod) @register(classmethod) @register(abc.abstractstaticmethod) @register(abc.abstractclassmethod) def save_classmethod(pickler, obj): logger.trace(pickler, "Cm: %s", obj) orig_func = obj.__func__ # if type(obj.__dict__) is dict: # if obj.__dict__: # state = obj.__dict__ # else: # state = None # else: # state = (None, {'__dict__', obj.__dict__}) pickler.save_reduce(type(obj), (orig_func,), obj=obj) logger.trace(pickler, "# Cm") @register(FunctionType) def save_function(pickler, obj): if not _locate_function(obj, pickler): if type(obj.__code__) is not CodeType: # Some PyPy builtin functions have no module name, and thus are not # able to be located module_name = getattr(obj, '__module__', None) if module_name is None: module_name = __builtin__.__name__ module = _import_module(module_name, safe=True) _pypy_builtin = False try: found, _ = _getattribute(module, obj.__qualname__) if getattr(found, '__func__', None) is obj: _pypy_builtin = True except AttributeError: pass if _pypy_builtin: logger.trace(pickler, "F3: %s", obj) pickler.save_reduce(getattr, (found, '__func__'), obj=obj) logger.trace(pickler, "# F3") return logger.trace(pickler, "F1: %s", obj) _recurse = getattr(pickler, '_recurse', None) _postproc = getattr(pickler, '_postproc', None) _main_modified = getattr(pickler, '_main_modified', None) _original_main = getattr(pickler, '_original_main', __builtin__)#'None' postproc_list = [] if _recurse: # recurse to get all globals referred to by obj from .detect import globalvars globs_copy = globalvars(obj, recurse=True, builtin=True) # Add the name of the module to the globs dictionary to prevent # the duplication of the dictionary. Pickle the unpopulated # globals dictionary and set the remaining items after the function # is created to correctly handle recursion. globs = {'__name__': obj.__module__} else: globs_copy = obj.__globals__ # If the globals is the __dict__ from the module being saved as a # session, substitute it by the dictionary being actually saved. if _main_modified and globs_copy is _original_main.__dict__: globs_copy = getattr(pickler, '_main', _original_main).__dict__ globs = globs_copy # If the globals is a module __dict__, do not save it in the pickle. elif globs_copy is not None and obj.__module__ is not None and \ getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy: globs = globs_copy else: globs = {'__name__': obj.__module__} if globs_copy is not None and globs is not globs_copy: # In the case that the globals are copied, we need to ensure that # the globals dictionary is updated when all objects in the # dictionary are already created. glob_ids = {id(g) for g in globs_copy.values()} for stack_element in _postproc: if stack_element in glob_ids: _postproc[stack_element].append((_setitems, (globs, globs_copy))) break else: postproc_list.append((_setitems, (globs, globs_copy))) closure = obj.__closure__ state_dict = {} for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'): fattr = getattr(obj, fattrname, None) if fattr is not None: state_dict[fattrname] = fattr if obj.__qualname__ != obj.__name__: state_dict['__qualname__'] = obj.__qualname__ if '__name__' not in globs or obj.__module__ != globs['__name__']: state_dict['__module__'] = obj.__module__ state = obj.__dict__ if type(state) is not dict: state_dict['__dict__'] = state state = None if state_dict: state = state, state_dict _save_with_postproc(pickler, (_create_function, ( obj.__code__, globs, obj.__name__, obj.__defaults__, closure ), state), obj=obj, postproc_list=postproc_list) # Lift closure cell update to earliest function (#458) if _postproc: topmost_postproc = next(iter(_postproc.values()), None) if closure and topmost_postproc: for cell in closure: possible_postproc = (setattr, (cell, 'cell_contents', obj)) try: topmost_postproc.remove(possible_postproc) except ValueError: continue # Change the value of the cell pickler.save_reduce(*possible_postproc) # pop None created by calling preprocessing step off stack pickler.write(POP) logger.trace(pickler, "# F1") else: logger.trace(pickler, "F2: %s", obj) name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) StockPickler.save_global(pickler, obj, name=name) logger.trace(pickler, "# F2") return if HAS_CTYPES and hasattr(ctypes, 'pythonapi'): _PyCapsule_New = ctypes.pythonapi.PyCapsule_New _PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p) _PyCapsule_New.restype = ctypes.py_object _PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer _PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p) _PyCapsule_GetPointer.restype = ctypes.c_void_p _PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor _PyCapsule_GetDestructor.argtypes = (ctypes.py_object,) _PyCapsule_GetDestructor.restype = ctypes.c_void_p _PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext _PyCapsule_GetContext.argtypes = (ctypes.py_object,) _PyCapsule_GetContext.restype = ctypes.c_void_p _PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName _PyCapsule_GetName.argtypes = (ctypes.py_object,) _PyCapsule_GetName.restype = ctypes.c_char_p _PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid _PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p) _PyCapsule_IsValid.restype = ctypes.c_bool _PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext _PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p) _PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor _PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p) _PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName _PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p) _PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer _PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p) #from _socket import CAPI as _testcapsule _testcapsule_name = b'dill._dill._testcapsule' _testcapsule = _PyCapsule_New( ctypes.cast(_PyCapsule_New, ctypes.c_void_p), ctypes.c_char_p(_testcapsule_name), None ) PyCapsuleType = type(_testcapsule) @register(PyCapsuleType) def save_capsule(pickler, obj): logger.trace(pickler, "Cap: %s", obj) name = _PyCapsule_GetName(obj) #warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning) pointer = _PyCapsule_GetPointer(obj, name) context = _PyCapsule_GetContext(obj) destructor = _PyCapsule_GetDestructor(obj) pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj) logger.trace(pickler, "# Cap") _incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType _reverse_typemap['PyCapsuleType'] = PyCapsuleType _incedental_types.add(PyCapsuleType) else: _testcapsule = None ############################# # A quick fix for issue #500 # This should be removed when a better solution is found. if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"): @register(dataclasses._HAS_DEFAULT_FACTORY_CLASS) def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj): logger.trace(pickler, "DcHDF: %s", obj) pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n") logger.trace(pickler, "# DcHDF") if hasattr(dataclasses, "MISSING"): @register(type(dataclasses.MISSING)) def save_dataclasses_MISSING_TYPE(pickler, obj): logger.trace(pickler, "DcM: %s", obj) pickler.write(GLOBAL + b"dataclasses\nMISSING\n") logger.trace(pickler, "# DcM") if hasattr(dataclasses, "KW_ONLY"): @register(type(dataclasses.KW_ONLY)) def save_dataclasses_KW_ONLY_TYPE(pickler, obj): logger.trace(pickler, "DcKWO: %s", obj) pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n") logger.trace(pickler, "# DcKWO") if hasattr(dataclasses, "_FIELD_BASE"): @register(dataclasses._FIELD_BASE) def save_dataclasses_FIELD_BASE(pickler, obj): logger.trace(pickler, "DcFB: %s", obj) pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n") logger.trace(pickler, "# DcFB") ############################# # quick sanity checking def pickles(obj,exact=False,safe=False,**kwds): """ Quick check if object pickles with dill. If *exact=True* then an equality test is done to check if the reconstructed object matches the original object. If *safe=True* then any exception will raised in copy signal that the object is not picklable, otherwise only pickling errors will be trapped. Additional keyword arguments are as :func:`dumps` and :func:`loads`. """ if safe: exceptions = (Exception,) # RuntimeError, ValueError else: exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError) try: pik = copy(obj, **kwds) #FIXME: should check types match first, then check content if "exact" try: #FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ result = bool(pik.all() == obj.all()) except (AttributeError, TypeError): warnings.filterwarnings('ignore') #FIXME: be specific result = pik == obj if warnings.filters: del warnings.filters[0] if hasattr(result, 'toarray'): # for unusual types like sparse matrix result = result.toarray().all() if result: return True if not exact: result = type(pik) == type(obj) if result: return result # class instances might have been dumped with byref=False return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType? return False except exceptions: return False def check(obj, *args, **kwds): """ Check pickling of an object across another process. *python* is the path to the python interpreter (defaults to sys.executable) Set *verbose=True* to print the unpickled object in the other process. Additional keyword arguments are as :func:`dumps` and :func:`loads`. """ # == undocumented == # python -- the string path or executable name of the selected python # verbose -- if True, be verbose about printing warning messages # all other args and kwds are passed to dill.dumps #FIXME: ignore on load verbose = kwds.pop('verbose', False) python = kwds.pop('python', None) if python is None: import sys python = sys.executable # type check isinstance(python, str) import subprocess fail = True try: _obj = dumps(obj, *args, **kwds) fail = False finally: if fail and verbose: print("DUMP FAILED") #FIXME: fails if python interpreter path contains spaces # Use the following instead (which also processes the 'ignore' keyword): # ignore = kwds.pop('ignore', None) # unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore)) # cmd = [python, "-c", "import dill; print(%s)"%unpickle] # msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED" msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj)) msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED" if verbose: print(msg) return # use to protect against missing attributes def is_dill(pickler, child=None): "check the dill-ness of your pickler" if child is False or not hasattr(pickler.__class__, 'mro'): return 'dill' in pickler.__module__ return Pickler in pickler.__class__.mro() def _extend(): """extend pickle with all of dill's registered types""" # need to have pickle not choke on _main_module? use is_dill(pickler) for t,func in Pickler.dispatch.items(): try: StockPickler.dispatch[t] = func except Exception: #TypeError, PicklingError, UnpicklingError logger.trace(pickler, "skip: %s", t) return del diff, _use_diff, use_diff # EOF ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1739768803.0 dill-0.4.0/dill/_objects.py0000644000076500000240000004643414754541743015320 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ all Python Standard Library objects (currently: CH 1-15 @ 2.7) and some other common objects (i.e. numpy.ndarray) """ __all__ = ['registered','failures','succeeds'] # helper imports import warnings; warnings.filterwarnings("ignore", category=DeprecationWarning) import sys import queue as Queue #import dbm as anydbm #XXX: delete foo from io import BytesIO as StringIO import re import array import collections import codecs import struct import dataclasses import datetime import calendar import weakref import pprint import decimal import numbers import functools import itertools import operator import tempfile import shelve import zlib import gzip import zipfile import tarfile import csv import hashlib import hmac import os import logging import logging.handlers import optparse #import __hello__ import threading import socket import contextlib try: import bz2 import sqlite3 import dbm.ndbm as dbm HAS_ALL = True except ImportError: # Ubuntu HAS_ALL = False try: #import curses #from curses import textpad, panel HAS_CURSES = True except ImportError: # Windows HAS_CURSES = False try: import ctypes HAS_CTYPES = True # if using `pypy`, pythonapi is not found IS_PYPY = not hasattr(ctypes, 'pythonapi') except ImportError: # MacPorts HAS_CTYPES = False IS_PYPY = False IS_PYODIDE = sys.platform == 'emscripten' # helper objects class _class: def _method(self): pass # @classmethod # def _clsmethod(cls): #XXX: test me # pass # @staticmethod # def _static(self): #XXX: test me # pass class _class2: def __call__(self): pass _instance2 = _class2() class _newclass(object): def _method(self): pass # @classmethod # def _clsmethod(cls): #XXX: test me # pass # @staticmethod # def _static(self): #XXX: test me # pass class _newclass2(object): __slots__ = ['descriptor'] def _function(x): yield x def _function2(): try: raise except Exception: from sys import exc_info e, er, tb = exc_info() return er, tb if HAS_CTYPES: class _Struct(ctypes.Structure): pass _Struct._fields_ = [("_field", ctypes.c_int),("next", ctypes.POINTER(_Struct))] _filedescrip, _tempfile = tempfile.mkstemp('r') # deleted in cleanup if sys.hexversion < 0x30d00a1: _tmpf = tempfile.TemporaryFile('w') # emits OSError 9 in python 3.13 else: _tmpf = tempfile.NamedTemporaryFile('w').file # for > python 3.9 # objects used by dill for type declaration registered = d = {} # objects dill fails to pickle failures = x = {} # all other type objects succeeds = a = {} # types module (part of CH 8) a['BooleanType'] = bool(1) a['BuiltinFunctionType'] = len a['BuiltinMethodType'] = a['BuiltinFunctionType'] a['BytesType'] = _bytes = codecs.latin_1_encode('\x00')[0] # bytes(1) a['ClassType'] = _class a['ComplexType'] = complex(1) a['DictType'] = _dict = {} a['DictionaryType'] = a['DictType'] a['FloatType'] = float(1) a['FunctionType'] = _function a['InstanceType'] = _instance = _class() a['IntType'] = _int = int(1) a['ListType'] = _list = [] a['NoneType'] = None a['ObjectType'] = object() a['StringType'] = _str = str(1) a['TupleType'] = _tuple = () a['TypeType'] = type a['LongType'] = _int a['UnicodeType'] = _str # built-in constants (CH 4) a['CopyrightType'] = copyright # built-in types (CH 5) a['ClassObjectType'] = _newclass # a['ClassInstanceType'] = _newclass() # a['SetType'] = _set = set() a['FrozenSetType'] = frozenset() # built-in exceptions (CH 6) a['ExceptionType'] = _exception = _function2()[0] # string services (CH 7) a['SREPatternType'] = _srepattern = re.compile('') # data types (CH 8) a['ArrayType'] = array.array("f") a['DequeType'] = collections.deque([0]) a['DefaultDictType'] = collections.defaultdict(_function, _dict) a['TZInfoType'] = datetime.tzinfo() a['DateTimeType'] = datetime.datetime.today() a['CalendarType'] = calendar.Calendar() # numeric and mathematical types (CH 9) a['DecimalType'] = decimal.Decimal(1) # data compression and archiving (CH 12) a['TarInfoType'] = tarfile.TarInfo() # generic operating system services (CH 15) a['LoggerType'] = _logger = logging.getLogger() a['FormatterType'] = logging.Formatter() # pickle ok a['FilterType'] = logging.Filter() # pickle ok a['LogRecordType'] = logging.makeLogRecord(_dict) # pickle ok a['OptionParserType'] = _oparser = optparse.OptionParser() # pickle ok a['OptionGroupType'] = optparse.OptionGroup(_oparser,"foo") # pickle ok a['OptionType'] = optparse.Option('--foo') # pickle ok if HAS_CTYPES: z = x if IS_PYPY else a z['CCharType'] = _cchar = ctypes.c_char() z['CWCharType'] = ctypes.c_wchar() # fail == 2.6 z['CByteType'] = ctypes.c_byte() z['CUByteType'] = ctypes.c_ubyte() z['CShortType'] = ctypes.c_short() z['CUShortType'] = ctypes.c_ushort() z['CIntType'] = ctypes.c_int() z['CUIntType'] = ctypes.c_uint() z['CLongType'] = ctypes.c_long() z['CULongType'] = ctypes.c_ulong() z['CLongLongType'] = ctypes.c_longlong() z['CULongLongType'] = ctypes.c_ulonglong() z['CFloatType'] = ctypes.c_float() z['CDoubleType'] = ctypes.c_double() z['CSizeTType'] = ctypes.c_size_t() del z a['CLibraryLoaderType'] = ctypes.cdll a['StructureType'] = _Struct # if not IS_PYPY: # a['BigEndianStructureType'] = ctypes.BigEndianStructure() #NOTE: also LittleEndianStructureType and UnionType... abstract classes #NOTE: remember for ctypesobj.contents creates a new python object #NOTE: ctypes.c_int._objects is memberdescriptor for object's __dict__ #NOTE: base class of all ctypes data types is non-public _CData import fractions import io from io import StringIO as TextIO # built-in functions (CH 2) a['ByteArrayType'] = bytearray([1]) # numeric and mathematical types (CH 9) a['FractionType'] = fractions.Fraction() a['NumberType'] = numbers.Number() # generic operating system services (CH 15) a['IOBaseType'] = io.IOBase() a['RawIOBaseType'] = io.RawIOBase() a['TextIOBaseType'] = io.TextIOBase() a['BufferedIOBaseType'] = io.BufferedIOBase() a['UnicodeIOType'] = TextIO() # the new StringIO a['LoggerAdapterType'] = logging.LoggerAdapter(_logger,_dict) # pickle ok if HAS_CTYPES: z = x if IS_PYPY else a z['CBoolType'] = ctypes.c_bool(1) z['CLongDoubleType'] = ctypes.c_longdouble() del z import argparse # data types (CH 8) a['OrderedDictType'] = collections.OrderedDict(_dict) a['CounterType'] = collections.Counter(_dict) if HAS_CTYPES: z = x if IS_PYPY else a z['CSSizeTType'] = ctypes.c_ssize_t() del z # generic operating system services (CH 15) a['NullHandlerType'] = logging.NullHandler() # pickle ok # new 2.7 a['ArgParseFileType'] = argparse.FileType() # pickle ok # -- pickle fails on all below here ----------------------------------------- # types module (part of CH 8) a['CodeType'] = compile('','','exec') a['DictProxyType'] = type.__dict__ a['DictProxyType2'] = _newclass.__dict__ a['EllipsisType'] = Ellipsis a['ClosedFileType'] = open(os.devnull, 'wb', buffering=0).close() a['GetSetDescriptorType'] = array.array.typecode a['LambdaType'] = _lambda = lambda x: lambda y: x #XXX: works when not imported! a['MemberDescriptorType'] = _newclass2.descriptor if not IS_PYPY: a['MemberDescriptorType2'] = datetime.timedelta.days a['MethodType'] = _method = _class()._method #XXX: works when not imported! a['ModuleType'] = datetime a['NotImplementedType'] = NotImplemented a['SliceType'] = slice(1) a['UnboundMethodType'] = _class._method #XXX: works when not imported! d['TextWrapperType'] = open(os.devnull, 'r') # same as mode='w','w+','r+' if not IS_PYODIDE: d['BufferedRandomType'] = open(os.devnull, 'r+b') # same as mode='w+b' d['BufferedReaderType'] = open(os.devnull, 'rb') # (default: buffering=-1) d['BufferedWriterType'] = open(os.devnull, 'wb') try: # oddities: deprecated from _pyio import open as _open d['PyTextWrapperType'] = _open(os.devnull, 'r', buffering=-1) if not IS_PYODIDE: d['PyBufferedRandomType'] = _open(os.devnull, 'r+b', buffering=-1) d['PyBufferedReaderType'] = _open(os.devnull, 'rb', buffering=-1) d['PyBufferedWriterType'] = _open(os.devnull, 'wb', buffering=-1) except ImportError: pass # other (concrete) object types z = d if sys.hexversion < 0x30800a2 else a z['CellType'] = (_lambda)(0).__closure__[0] del z a['XRangeType'] = _xrange = range(1) a['MethodDescriptorType'] = type.__dict__['mro'] a['WrapperDescriptorType'] = type.__repr__ #a['WrapperDescriptorType2'] = type.__dict__['__module__']#XXX: GetSetDescriptor a['ClassMethodDescriptorType'] = type.__dict__['__prepare__'] # built-in functions (CH 2) _methodwrap = (1).__lt__ a['MethodWrapperType'] = _methodwrap a['StaticMethodType'] = staticmethod(_method) a['ClassMethodType'] = classmethod(_method) a['PropertyType'] = property() d['SuperType'] = super(Exception, _exception) # string services (CH 7) _in = _bytes a['InputType'] = _cstrI = StringIO(_in) a['OutputType'] = _cstrO = StringIO() # data types (CH 8) a['WeakKeyDictionaryType'] = weakref.WeakKeyDictionary() a['WeakValueDictionaryType'] = weakref.WeakValueDictionary() a['ReferenceType'] = weakref.ref(_instance) a['DeadReferenceType'] = weakref.ref(_class()) a['ProxyType'] = weakref.proxy(_instance) a['DeadProxyType'] = weakref.proxy(_class()) a['CallableProxyType'] = weakref.proxy(_instance2) a['DeadCallableProxyType'] = weakref.proxy(_class2()) a['QueueType'] = Queue.Queue() # numeric and mathematical types (CH 9) d['PartialType'] = functools.partial(int,base=2) a['IzipType'] = zip('0','1') d['ItemGetterType'] = operator.itemgetter(0) d['AttrGetterType'] = operator.attrgetter('__repr__') # file and directory access (CH 10) _fileW = _cstrO # data persistence (CH 11) if HAS_ALL: x['ConnectionType'] = _conn = sqlite3.connect(':memory:') x['CursorType'] = _conn.cursor() a['ShelveType'] = shelve.Shelf({}) # data compression and archiving (CH 12) if HAS_ALL: x['BZ2FileType'] = bz2.BZ2File(os.devnull) x['BZ2CompressorType'] = bz2.BZ2Compressor() x['BZ2DecompressorType'] = bz2.BZ2Decompressor() #x['ZipFileType'] = _zip = zipfile.ZipFile(os.devnull,'w') #_zip.write(_tempfile,'x') [causes annoying warning/error printed on import] #a['ZipInfoType'] = _zip.getinfo('x') a['TarFileType'] = tarfile.open(fileobj=_fileW,mode='w') # file formats (CH 13) x['DialectType'] = csv.get_dialect('excel') if sys.hexversion < 0x30d00a1: import xdrlib a['PackerType'] = xdrlib.Packer() # optional operating system services (CH 16) a['LockType'] = threading.Lock() a['RLockType'] = threading.RLock() # generic operating system services (CH 15) # also closed/open and r/w/etc... a['NamedLoggerType'] = _logger = logging.getLogger(__name__) #a['FrozenModuleType'] = __hello__ #FIXME: prints "Hello world..." # interprocess communication (CH 17) x['SocketType'] = _socket = socket.socket() x['SocketPairType'] = socket.socketpair()[0] # python runtime services (CH 27) a['GeneratorContextManagerType'] = contextlib.contextmanager(max)([1]) try: # ipython __IPYTHON__ is True # is ipython except NameError: # built-in constants (CH 4) a['QuitterType'] = quit d['ExitType'] = a['QuitterType'] try: # numpy #FIXME: slow... 0.05 to 0.1 sec to import numpy from numpy import ufunc as _numpy_ufunc from numpy import array as _numpy_array from numpy import int32 as _numpy_int32 a['NumpyUfuncType'] = _numpy_ufunc a['NumpyArrayType'] = _numpy_array a['NumpyInt32Type'] = _numpy_int32 except ImportError: pass # generic operating system services (CH 15) a['FileHandlerType'] = logging.FileHandler(os.devnull) a['RotatingFileHandlerType'] = logging.handlers.RotatingFileHandler(os.devnull) a['SocketHandlerType'] = logging.handlers.SocketHandler('localhost',514) a['MemoryHandlerType'] = logging.handlers.MemoryHandler(1) # data types (CH 8) a['WeakSetType'] = weakref.WeakSet() # 2.7 # generic operating system services (CH 15) [errors when dill is imported] #a['ArgumentParserType'] = _parser = argparse.ArgumentParser('PROG') #a['NamespaceType'] = _parser.parse_args() # pickle ok #a['SubParsersActionType'] = _parser.add_subparsers() #a['MutuallyExclusiveGroupType'] = _parser.add_mutually_exclusive_group() #a['ArgumentGroupType'] = _parser.add_argument_group() # -- dill fails in some versions below here --------------------------------- # types module (part of CH 8) d['FileType'] = open(os.devnull, 'rb', buffering=0) # same 'wb','wb+','rb+' # built-in functions (CH 2) # Iterators: a['ListIteratorType'] = iter(_list) # empty vs non-empty a['SetIteratorType'] = iter(_set) #XXX: empty vs non-empty #FIXME: list_iterator a['TupleIteratorType']= iter(_tuple) # empty vs non-empty a['XRangeIteratorType'] = iter(_xrange) # empty vs non-empty a["BytesIteratorType"] = iter(b'') a["BytearrayIteratorType"] = iter(bytearray(b'')) z = x if IS_PYPY else a z["CallableIteratorType"] = iter(iter, None) del z x["MemoryIteratorType"] = iter(memoryview(b'')) a["ListReverseiteratorType"] = reversed([]) X = a['OrderedDictType'] d["OdictKeysType"] = X.keys() d["OdictValuesType"] = X.values() d["OdictItemsType"] = X.items() a["OdictIteratorType"] = iter(X.keys()) #FIXME: list_iterator del X #FIXME: list_iterator a['DictionaryItemIteratorType'] = iter(type.__dict__.items()) a['DictionaryKeyIteratorType'] = iter(type.__dict__.keys()) a['DictionaryValueIteratorType'] = iter(type.__dict__.values()) if sys.hexversion >= 0x30800a0: a["DictReversekeyiteratorType"] = reversed({}.keys()) a["DictReversevalueiteratorType"] = reversed({}.values()) a["DictReverseitemiteratorType"] = reversed({}.items()) try: import symtable #FIXME: fails to pickle x["SymtableEntryType"] = symtable.symtable("", "string", "exec")._table except ImportError: pass if sys.hexversion >= 0x30a00a0 and not IS_PYPY: x['LineIteratorType'] = compile('3', '', 'eval').co_lines() if sys.hexversion >= 0x30b00b0 and not IS_PYPY: from types import GenericAlias d["GenericAliasIteratorType"] = iter(GenericAlias(list, (int,))) x['PositionsIteratorType'] = compile('3', '', 'eval').co_positions() # data types (CH 8) a['PrettyPrinterType'] = pprint.PrettyPrinter() # file and directory access (CH 10) a['TemporaryFileType'] = _tmpf # data compression and archiving (CH 12) x['GzipFileType'] = gzip.GzipFile(fileobj=_fileW) # generic operating system services (CH 15) a['StreamHandlerType'] = logging.StreamHandler() # numeric and mathematical types (CH 9) z = a if sys.hexversion < 0x30e00a1 else x z['CountType'] = itertools.count(0) #FIXME: __reduce__ removed in 3.14.0a1 z['ChainType'] = itertools.chain('0','1') z['ProductType'] = itertools.product('0','1') z['CycleType'] = itertools.cycle('0') z['PermutationsType'] = itertools.permutations('0') z['CombinationsType'] = itertools.combinations('0',1) z['RepeatType'] = itertools.repeat(0) z['CompressType'] = itertools.compress('0',[1]) del z #XXX: ...and etc # -- dill fails on all below here ------------------------------------------- # types module (part of CH 8) x['GeneratorType'] = _generator = _function(1) #XXX: priority x['FrameType'] = _generator.gi_frame #XXX: inspect.currentframe() x['TracebackType'] = _function2()[1] #(see: inspect.getouterframes,getframeinfo) # other (concrete) object types # (also: Capsule / CObject ?) # built-in functions (CH 2) # built-in types (CH 5) # string services (CH 7) x['StructType'] = struct.Struct('c') x['CallableIteratorType'] = _srepattern.finditer('') x['SREMatchType'] = _srepattern.match('') x['SREScannerType'] = _srepattern.scanner('') x['StreamReader'] = codecs.StreamReader(_cstrI) #XXX: ... and etc # python object persistence (CH 11) # x['DbShelveType'] = shelve.open('foo','n')#,protocol=2) #XXX: delete foo if HAS_ALL: z = a if IS_PYPY else x z['DbmType'] = dbm.open(_tempfile,'n') del z # x['DbCursorType'] = _dbcursor = anydbm.open('foo','n') #XXX: delete foo # x['DbType'] = _dbcursor.db # data compression and archiving (CH 12) x['ZlibCompressType'] = zlib.compressobj() x['ZlibDecompressType'] = zlib.decompressobj() # file formats (CH 13) x['CSVReaderType'] = csv.reader(_cstrI) x['CSVWriterType'] = csv.writer(_cstrO) x['CSVDictReaderType'] = csv.DictReader(_cstrI) x['CSVDictWriterType'] = csv.DictWriter(_cstrO,{}) # cryptographic services (CH 14) x['HashType'] = hashlib.md5() if (sys.hexversion < 0x30800a1): x['HMACType'] = hmac.new(_in) else: x['HMACType'] = hmac.new(_in, digestmod='md5') # generic operating system services (CH 15) if HAS_CURSES: pass #x['CursesWindowType'] = _curwin = curses.initscr() #FIXME: messes up tty #x['CursesTextPadType'] = textpad.Textbox(_curwin) #x['CursesPanelType'] = panel.new_panel(_curwin) if HAS_CTYPES: x['CCharPType'] = ctypes.c_char_p() x['CWCharPType'] = ctypes.c_wchar_p() x['CVoidPType'] = ctypes.c_void_p() if sys.platform[:3] == 'win': x['CDLLType'] = _cdll = ctypes.cdll.msvcrt else: x['CDLLType'] = _cdll = ctypes.CDLL(None) if not IS_PYPY: x['PyDLLType'] = _pydll = ctypes.pythonapi x['FuncPtrType'] = _cdll._FuncPtr() x['CCharArrayType'] = ctypes.create_string_buffer(1) x['CWCharArrayType'] = ctypes.create_unicode_buffer(1) x['CParamType'] = ctypes.byref(_cchar) x['LPCCharType'] = ctypes.pointer(_cchar) x['LPCCharObjType'] = _lpchar = ctypes.POINTER(ctypes.c_char) x['NullPtrType'] = _lpchar() x['NullPyObjectType'] = ctypes.py_object() x['PyObjectType'] = ctypes.py_object(lambda :None) z = a if IS_PYPY else x z['FieldType'] = _field = _Struct._field z['CFUNCTYPEType'] = _cfunc = ctypes.CFUNCTYPE(ctypes.c_char) if sys.hexversion < 0x30c00b3: x['CFunctionType'] = _cfunc(str) del z # numeric and mathematical types (CH 9) a['MethodCallerType'] = operator.methodcaller('mro') # 2.6 # built-in types (CH 5) x['MemoryType'] = memoryview(_in) # 2.7 x['MemoryType2'] = memoryview(bytearray(_in)) # 2.7 d['DictItemsType'] = _dict.items() # 2.7 d['DictKeysType'] = _dict.keys() # 2.7 d['DictValuesType'] = _dict.values() # 2.7 # generic operating system services (CH 15) a['RawTextHelpFormatterType'] = argparse.RawTextHelpFormatter('PROG') a['RawDescriptionHelpFormatterType'] = argparse.RawDescriptionHelpFormatter('PROG') a['ArgDefaultsHelpFormatterType'] = argparse.ArgumentDefaultsHelpFormatter('PROG') z = a if IS_PYPY else x z['CmpKeyType'] = _cmpkey = functools.cmp_to_key(_methodwrap) # 2.7, >=3.2 z['CmpKeyObjType'] = _cmpkey('0') #2.7, >=3.2 del z # oddities: removed, etc x['BufferType'] = x['MemoryType'] from dill._dill import _testcapsule if _testcapsule is not None: d['PyCapsuleType'] = _testcapsule del _testcapsule if hasattr(dataclasses, '_HAS_DEFAULT_FACTORY'): a['DataclassesHasDefaultFactoryType'] = dataclasses._HAS_DEFAULT_FACTORY if hasattr(dataclasses, 'MISSING'): a['DataclassesMissingType'] = dataclasses.MISSING if hasattr(dataclasses, 'KW_ONLY'): a['DataclassesKWOnlyType'] = dataclasses.KW_ONLY if hasattr(dataclasses, '_FIELD_BASE'): a['DataclassesFieldBaseType'] = dataclasses._FIELD # -- cleanup ---------------------------------------------------------------- a.update(d) # registered also succeed if sys.platform[:3] == 'win': os.close(_filedescrip) # required on win32 os.remove(_tempfile) # EOF ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/_shims.py0000644000076500000240000001475314735133336015003 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Author: Anirudh Vegesana (avegesan@cs.stanford.edu) # Copyright (c) 2021-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ Provides shims for compatibility between versions of dill and Python. Compatibility shims should be provided in this file. Here are two simple example use cases. Deprecation of constructor function: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Assume that we were transitioning _import_module in _dill.py to the builtin function importlib.import_module when present. @move_to(_dill) def _import_module(import_name): ... # code already in _dill.py _import_module = Getattr(importlib, 'import_module', Getattr(_dill, '_import_module', None)) The code will attempt to find import_module in the importlib module. If not present, it will use the _import_module function in _dill. Emulate new Python behavior in older Python versions: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ CellType.cell_contents behaves differently in Python 3.6 and 3.7. It is read-only in Python 3.6 and writable and deletable in 3.7. if _dill.OLD37 and _dill.HAS_CTYPES and ...: @move_to(_dill) def _setattr(object, name, value): if type(object) is _dill.CellType and name == 'cell_contents': _PyCell_Set.argtypes = (ctypes.py_object, ctypes.py_object) _PyCell_Set(object, value) else: setattr(object, name, value) ... # more cases below _setattr = Getattr(_dill, '_setattr', setattr) _dill._setattr will be used when present to emulate Python 3.7 functionality in older versions of Python while defaulting to the standard setattr in 3.7+. See this PR for the discussion that lead to this system: https://github.com/uqfoundation/dill/pull/443 """ import inspect import sys _dill = sys.modules['dill._dill'] class Reduce(object): """ Reduce objects are wrappers used for compatibility enforcement during unpickle-time. They should only be used in calls to pickler.save and other Reduce objects. They are only evaluated within unpickler.load. Pickling a Reduce object makes the two implementations equivalent: pickler.save(Reduce(*reduction)) pickler.save_reduce(*reduction, obj=reduction) """ __slots__ = ['reduction'] def __new__(cls, *reduction, **kwargs): """ Args: *reduction: a tuple that matches the format given here: https://docs.python.org/3/library/pickle.html#object.__reduce__ is_callable: a bool to indicate that the object created by unpickling `reduction` is callable. If true, the current Reduce is allowed to be used as the function in further save_reduce calls or Reduce objects. """ is_callable = kwargs.get('is_callable', False) # Pleases Py2. Can be removed later if is_callable: self = object.__new__(_CallableReduce) else: self = object.__new__(Reduce) self.reduction = reduction return self def __repr__(self): return 'Reduce%s' % (self.reduction,) def __copy__(self): return self # pragma: no cover def __deepcopy__(self, memo): return self # pragma: no cover def __reduce__(self): return self.reduction def __reduce_ex__(self, protocol): return self.__reduce__() class _CallableReduce(Reduce): # A version of Reduce for functions. Used to trick pickler.save_reduce into # thinking that Reduce objects of functions are themselves meaningful functions. def __call__(self, *args, **kwargs): reduction = self.__reduce__() func = reduction[0] f_args = reduction[1] obj = func(*f_args) return obj(*args, **kwargs) __NO_DEFAULT = _dill.Sentinel('Getattr.NO_DEFAULT') def Getattr(object, name, default=__NO_DEFAULT): """ A Reduce object that represents the getattr operation. When unpickled, the Getattr will access an attribute 'name' of 'object' and return the value stored there. If the attribute doesn't exist, the default value will be returned if present. The following statements are equivalent: Getattr(collections, 'OrderedDict') Getattr(collections, 'spam', None) Getattr(*args) Reduce(getattr, (collections, 'OrderedDict')) Reduce(getattr, (collections, 'spam', None)) Reduce(getattr, args) During unpickling, the first two will result in collections.OrderedDict and None respectively because the first attribute exists and the second one does not, forcing it to use the default value given in the third argument. """ if default is Getattr.NO_DEFAULT: reduction = (getattr, (object, name)) else: reduction = (getattr, (object, name, default)) return Reduce(*reduction, is_callable=callable(default)) Getattr.NO_DEFAULT = __NO_DEFAULT del __NO_DEFAULT def move_to(module, name=None): def decorator(func): if name is None: fname = func.__name__ else: fname = name module.__dict__[fname] = func func.__module__ = module.__name__ return func return decorator def register_shim(name, default): """ A easier to understand and more compact way of "softly" defining a function. These two pieces of code are equivalent: if _dill.OLD3X: def _create_class(): ... _create_class = register_shim('_create_class', types.new_class) if _dill.OLD3X: @move_to(_dill) def _create_class(): ... _create_class = Getattr(_dill, '_create_class', types.new_class) Intuitively, it creates a function or object in the versions of dill/python that require special reimplementations, and use a core library or default implementation if that function or object does not exist. """ func = globals().get(name) if func is not None: _dill.__dict__[name] = func func.__module__ = _dill.__name__ if default is Getattr.NO_DEFAULT: reduction = (getattr, (_dill, name)) else: reduction = (getattr, (_dill, name, default)) return Reduce(*reduction, is_callable=callable(default)) ###################### ## Compatibility Shims are defined below ###################### _CELL_EMPTY = register_shim('_CELL_EMPTY', None) _setattr = register_shim('_setattr', setattr) _delattr = register_shim('_delattr', delattr) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1739768803.0 dill-0.4.0/dill/detect.py0000644000076500000240000002570714754541743015000 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ Methods for detecting objects leading to pickling failures. """ import dis from inspect import ismethod, isfunction, istraceback, isframe, iscode from .pointers import parent, reference, at, parents, children from .logger import trace __all__ = ['baditems','badobjects','badtypes','code','errors','freevars', 'getmodule','globalvars','nestedcode','nestedglobals','outermost', 'referredglobals','referrednested','trace','varnames'] def getmodule(object, _filename=None, force=False): """get the module of the object""" from inspect import getmodule as getmod module = getmod(object, _filename) if module or not force: return module import builtins from .source import getname name = getname(object, force=True) return builtins if name in vars(builtins).keys() else None def outermost(func): # is analogous to getsource(func,enclosing=True) """get outermost enclosing object (i.e. the outer function in a closure) NOTE: this is the object-equivalent of getsource(func, enclosing=True) """ if ismethod(func): _globals = func.__func__.__globals__ or {} elif isfunction(func): _globals = func.__globals__ or {} else: return #XXX: or raise? no matches _globals = _globals.items() # get the enclosing source from .source import getsourcelines try: lines,lnum = getsourcelines(func, enclosing=True) except Exception: #TypeError, IOError lines,lnum = [],None code = ''.join(lines) # get all possible names,objects that are named in the enclosing source _locals = ((name,obj) for (name,obj) in _globals if name in code) # now only save the objects that generate the enclosing block for name,obj in _locals: #XXX: don't really need 'name' try: if getsourcelines(obj) == (lines,lnum): return obj except Exception: #TypeError, IOError pass return #XXX: or raise? no matches def nestedcode(func, recurse=True): #XXX: or return dict of {co_name: co} ? """get the code objects for any nested functions (e.g. in a closure)""" func = code(func) if not iscode(func): return [] #XXX: or raise? no matches nested = set() for co in func.co_consts: if co is None: continue co = code(co) if co: nested.add(co) if recurse: nested |= set(nestedcode(co, recurse=True)) return list(nested) def code(func): """get the code object for the given function or method NOTE: use dill.source.getsource(CODEOBJ) to get the source code """ if ismethod(func): func = func.__func__ if isfunction(func): func = func.__code__ if istraceback(func): func = func.tb_frame if isframe(func): func = func.f_code if iscode(func): return func return #XXX: ugly: parse dis.dis for name after " len(referrednested(func)), try calling func(). If possible, python builds code objects, but delays building functions until func() is called. """ import gc funcs = set() # get the code objects, and try to track down by referrence for co in nestedcode(func, recurse): # look for function objects that refer to the code object for obj in gc.get_referrers(co): # get methods _ = getattr(obj, '__func__', None) # ismethod if getattr(_, '__code__', None) is co: funcs.add(obj) # get functions elif getattr(obj, '__code__', None) is co: funcs.add(obj) # get frame objects elif getattr(obj, 'f_code', None) is co: funcs.add(obj) # get code objects elif hasattr(obj, 'co_code') and obj is co: funcs.add(obj) # frameobjs => func.__code__.co_varnames not in func.__code__.co_cellvars # funcobjs => func.__code__.co_cellvars not in func.__code__.co_varnames # frameobjs are not found, however funcobjs are... # (see: test_mixins.quad ... and test_mixins.wtf) # after execution, code objects get compiled, and then may be found by gc return list(funcs) def freevars(func): """get objects defined in enclosing code that are referred to by func returns a dict of {name:object}""" if ismethod(func): func = func.__func__ if isfunction(func): closures = func.__closure__ or () func = func.__code__.co_freevars # get freevars else: return {} def get_cell_contents(): for name, c in zip(func, closures): try: cell_contents = c.cell_contents except ValueError: # cell is empty continue yield name, c.cell_contents return dict(get_cell_contents()) # thanks to Davies Liu for recursion of globals def nestedglobals(func, recurse=True): """get the names of any globals found within func""" func = code(func) if func is None: return list() import sys from .temp import capture CAN_NULL = sys.hexversion >= 0x30b00a7 # NULL may be prepended >= 3.11a7 names = set() with capture('stdout') as out: try: dis.dis(func) #XXX: dis.dis(None) disassembles last traceback except IndexError: pass #FIXME: HACK for IS_PYPY (3.11) for line in out.getvalue().splitlines(): if '_GLOBAL' in line: name = line.split('(')[-1].split(')')[0] if CAN_NULL: names.add(name.replace('NULL + ', '').replace(' + NULL', '')) else: names.add(name) for co in getattr(func, 'co_consts', tuple()): if co and recurse and iscode(co): names.update(nestedglobals(co, recurse=True)) return list(names) def referredglobals(func, recurse=True, builtin=False): """get the names of objects in the global scope referred to by func""" return globalvars(func, recurse, builtin).keys() def globalvars(func, recurse=True, builtin=False): """get objects defined in global scope that are referred to by func return a dict of {name:object}""" if ismethod(func): func = func.__func__ if isfunction(func): globs = vars(getmodule(sum)).copy() if builtin else {} # get references from within closure orig_func, func = func, set() for obj in orig_func.__closure__ or {}: try: cell_contents = obj.cell_contents except ValueError: # cell is empty pass else: _vars = globalvars(cell_contents, recurse, builtin) or {} func.update(_vars) #XXX: (above) be wary of infinte recursion? globs.update(_vars) # get globals globs.update(orig_func.__globals__ or {}) # get names of references if not recurse: func.update(orig_func.__code__.co_names) else: func.update(nestedglobals(orig_func.__code__)) # find globals for all entries of func for key in func.copy(): #XXX: unnecessary...? nested_func = globs.get(key) if nested_func is orig_func: #func.remove(key) if key in func else None continue #XXX: globalvars(func, False)? func.update(globalvars(nested_func, True, builtin)) elif iscode(func): globs = vars(getmodule(sum)).copy() if builtin else {} #globs.update(globals()) if not recurse: func = func.co_names # get names else: orig_func = func.co_name # to stop infinite recursion func = set(nestedglobals(func)) # find globals for all entries of func for key in func.copy(): #XXX: unnecessary...? if key is orig_func: #func.remove(key) if key in func else None continue #XXX: globalvars(func, False)? nested_func = globs.get(key) func.update(globalvars(nested_func, True, builtin)) else: return {} #NOTE: if name not in __globals__, then we skip it... return dict((name,globs[name]) for name in func if name in globs) def varnames(func): """get names of variables defined by func returns a tuple (local vars, local vars referrenced by nested functions)""" func = code(func) if not iscode(func): return () #XXX: better ((),())? or None? return func.co_varnames, func.co_cellvars def baditems(obj, exact=False, safe=False): #XXX: obj=globals() ? """get items in object that fail to pickle""" if not hasattr(obj,'__iter__'): # is not iterable return [j for j in (badobjects(obj,0,exact,safe),) if j is not None] obj = obj.values() if getattr(obj,'values',None) else obj _obj = [] # can't use a set, as items may be unhashable [_obj.append(badobjects(i,0,exact,safe)) for i in obj if i not in _obj] return [j for j in _obj if j is not None] def badobjects(obj, depth=0, exact=False, safe=False): """get objects that fail to pickle""" from dill import pickles if not depth: if pickles(obj,exact,safe): return None return obj return dict(((attr, badobjects(getattr(obj,attr),depth-1,exact,safe)) \ for attr in dir(obj) if not pickles(getattr(obj,attr),exact,safe))) def badtypes(obj, depth=0, exact=False, safe=False): """get types for objects that fail to pickle""" from dill import pickles if not depth: if pickles(obj,exact,safe): return None return type(obj) return dict(((attr, badtypes(getattr(obj,attr),depth-1,exact,safe)) \ for attr in dir(obj) if not pickles(getattr(obj,attr),exact,safe))) def errors(obj, depth=0, exact=False, safe=False): """get errors for objects that fail to pickle""" from dill import pickles, copy if not depth: try: pik = copy(obj) if exact: assert pik == obj, \ "Unpickling produces %s instead of %s" % (pik,obj) assert type(pik) == type(obj), \ "Unpickling produces %s instead of %s" % (type(pik),type(obj)) return None except Exception: import sys return sys.exc_info()[1] _dict = {} for attr in dir(obj): try: _attr = getattr(obj,attr) except Exception: import sys _dict[attr] = sys.exc_info()[1] continue if not pickles(_attr,exact,safe): _dict[attr] = errors(_attr,depth-1,exact,safe) return _dict # EOF ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/logger.py0000644000076500000240000002560714735133336015000 0ustar00mmckernsstaff#!/usr/bin/env python # -*- coding: utf-8 -*- # # Author: Leonardo Gama (@leogama) # Copyright (c) 2022-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ Logging utilities for dill. The 'logger' object is dill's top-level logger. The 'adapter' object wraps the logger and implements a 'trace()' method that generates a detailed tree-style trace for the pickling call at log level INFO. The 'trace()' function sets and resets dill's logger log level, enabling and disabling the pickling trace. The trace shows a tree structure depicting the depth of each object serialized *with dill save functions*, but not the ones that use save functions from 'pickle._Pickler.dispatch'. If the information is available, it also displays the size in bytes that the object contributed to the pickle stream (including its child objects). Sample trace output: >>> import dill, dill.tests >>> dill.detect.trace(True) >>> dill.dump_session(main=dill.tests) ┬ M1: ├┬ F2: │└ # F2 [32 B] ├┬ D2: │├┬ T4: ││└ # T4 [35 B] │├┬ D2: ││├┬ T4: │││└ # T4 [50 B] ││├┬ D2: │││└ # D2 [84 B] ││└ # D2 [413 B] │└ # D2 [763 B] └ # M1 [813 B] """ __all__ = ['adapter', 'logger', 'trace'] import codecs import contextlib import locale import logging import math import os from functools import partial from typing import TextIO, Union import dill # Tree drawing characters: Unicode to ASCII map. ASCII_MAP = str.maketrans({"│": "|", "├": "|", "┬": "+", "└": "`"}) ## Notes about the design choices ## # Here is some domumentation of the Standard Library's logging internals that # can't be found completely in the official documentation. dill's logger is # obtained by calling logging.getLogger('dill') and therefore is an instance of # logging.getLoggerClass() at the call time. As this is controlled by the user, # in order to add some functionality to it it's necessary to use a LoggerAdapter # to wrap it, overriding some of the adapter's methods and creating new ones. # # Basic calling sequence # ====================== # # Python's logging functionality can be conceptually divided into five steps: # 0. Check logging level -> abort if call level is greater than logger level # 1. Gather information -> construct a LogRecord from passed arguments and context # 2. Filter (optional) -> discard message if the record matches a filter # 3. Format -> format message with args, then format output string with message plus record # 4. Handle -> write the formatted string to output as defined in the handler # # dill.logging.logger.log -> # or logger.info, etc. # Logger.log -> \ # Logger._log -> }- accept 'extra' parameter for custom record entries # Logger.makeRecord -> / # LogRecord.__init__ # Logger.handle -> # Logger.callHandlers -> # Handler.handle -> # Filterer.filter -> # Filter.filter # StreamHandler.emit -> # Handler.format -> # Formatter.format -> # LogRecord.getMessage # does: record.message = msg % args # Formatter.formatMessage -> # PercentStyle.format # does: self._fmt % vars(record) # # NOTE: All methods from the second line on are from logging.__init__.py class TraceAdapter(logging.LoggerAdapter): """ Tracks object tree depth and calculates pickled object size. A single instance of this wraps the module's logger, as the logging API doesn't allow setting it directly with a custom Logger subclass. The added 'trace()' method receives a pickle instance as the first argument and creates extra values to be added in the LogRecord from it, then calls 'info()'. Usage of logger with 'trace()' method: >>> from dill.logger import adapter as logger #NOTE: not dill.logger.logger >>> ... >>> def save_atype(pickler, obj): >>> logger.trace(pickler, "Message with %s and %r etc. placeholders", 'text', obj) >>> ... """ def __init__(self, logger): self.logger = logger def addHandler(self, handler): formatter = TraceFormatter("%(prefix)s%(message)s%(suffix)s", handler=handler) handler.setFormatter(formatter) self.logger.addHandler(handler) def removeHandler(self, handler): self.logger.removeHandler(handler) def process(self, msg, kwargs): # A no-op override, as we don't have self.extra. return msg, kwargs def trace_setup(self, pickler): # Called by Pickler.dump(). if not dill._dill.is_dill(pickler, child=False): return if self.isEnabledFor(logging.INFO): pickler._trace_depth = 1 pickler._size_stack = [] else: pickler._trace_depth = None def trace(self, pickler, msg, *args, **kwargs): if not hasattr(pickler, '_trace_depth'): logger.info(msg, *args, **kwargs) return if pickler._trace_depth is None: return extra = kwargs.get('extra', {}) pushed_obj = msg.startswith('#') size = None try: # Streams are not required to be tellable. size = pickler._file.tell() frame = pickler.framer.current_frame try: size += frame.tell() except AttributeError: # PyPy may use a BytesBuilder as frame size += len(frame) except (AttributeError, TypeError): pass if size is not None: if not pushed_obj: pickler._size_stack.append(size) else: size -= pickler._size_stack.pop() extra['size'] = size if pushed_obj: pickler._trace_depth -= 1 extra['depth'] = pickler._trace_depth kwargs['extra'] = extra self.info(msg, *args, **kwargs) if not pushed_obj: pickler._trace_depth += 1 class TraceFormatter(logging.Formatter): """ Generates message prefix and suffix from record. This Formatter adds prefix and suffix strings to the log message in trace mode (an also provides empty string defaults for normal logs). """ def __init__(self, *args, handler=None, **kwargs): super().__init__(*args, **kwargs) try: encoding = handler.stream.encoding if encoding is None: raise AttributeError except AttributeError: encoding = locale.getpreferredencoding() try: encoding = codecs.lookup(encoding).name except LookupError: self.is_utf8 = False else: self.is_utf8 = (encoding == codecs.lookup('utf-8').name) def format(self, record): fields = {'prefix': "", 'suffix': ""} if getattr(record, 'depth', 0) > 0: if record.msg.startswith("#"): prefix = (record.depth - 1)*"│" + "└" elif record.depth == 1: prefix = "┬" else: prefix = (record.depth - 2)*"│" + "├┬" if not self.is_utf8: prefix = prefix.translate(ASCII_MAP) + "-" fields['prefix'] = prefix + " " if hasattr(record, 'size') and record.size is not None and record.size >= 1: # Show object size in human-readable form. power = int(math.log(record.size, 2)) // 10 size = record.size >> power*10 fields['suffix'] = " [%d %sB]" % (size, "KMGTP"[power] + "i" if power else "") vars(record).update(fields) return super().format(record) logger = logging.getLogger('dill') logger.propagate = False adapter = TraceAdapter(logger) stderr_handler = logging._StderrHandler() adapter.addHandler(stderr_handler) def trace(arg: Union[bool, TextIO, str, os.PathLike] = None, *, mode: str = 'a') -> None: """print a trace through the stack when pickling; useful for debugging With a single boolean argument, enable or disable the tracing. Example usage: >>> import dill >>> dill.detect.trace(True) >>> dill.dump_session() Alternatively, ``trace()`` can be used as a context manager. With no arguments, it just takes care of restoring the tracing state on exit. Either a file handle, or a file name and (optionally) a file mode may be specitfied to redirect the tracing output in the ``with`` block context. A log function is yielded by the manager so the user can write extra information to the file. Example usage: >>> from dill import detect >>> D = {'a': 42, 'b': {'x': None}} >>> with detect.trace(): >>> dumps(D) ┬ D2: ├┬ D2: │└ # D2 [8 B] └ # D2 [22 B] >>> squared = lambda x: x**2 >>> with detect.trace('output.txt', mode='w') as log: >>> log("> D = %r", D) >>> dumps(D) >>> log("> squared = %r", squared) >>> dumps(squared) Arguments: arg: a boolean value, or an optional file-like or path-like object for the context manager mode: mode string for ``open()`` if a file name is passed as the first argument """ if repr(arg) not in ('False', 'True'): return TraceManager(file=arg, mode=mode) logger.setLevel(logging.INFO if arg else logging.WARNING) class TraceManager(contextlib.AbstractContextManager): """context manager version of trace(); can redirect the trace to a file""" def __init__(self, file, mode): self.file = file self.mode = mode self.redirect = file is not None self.file_is_stream = hasattr(file, 'write') def __enter__(self): if self.redirect: stderr_handler.flush() if self.file_is_stream: self.handler = logging.StreamHandler(self.file) else: self.handler = logging.FileHandler(self.file, self.mode) adapter.removeHandler(stderr_handler) adapter.addHandler(self.handler) self.old_level = adapter.getEffectiveLevel() adapter.setLevel(logging.INFO) return adapter.info def __exit__(self, *exc_info): adapter.setLevel(self.old_level) if self.redirect: adapter.removeHandler(self.handler) adapter.addHandler(stderr_handler) if not self.file_is_stream: self.handler.close() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/objtypes.py0000644000076500000240000000134014735133336015344 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ all Python Standard Library object types (currently: CH 1-15 @ 2.7) and some other common object types (i.e. numpy.ndarray) to load more objects and types, use dill.load_types() """ # non-local import of dill.objects from dill import objects for _type in objects.keys(): exec("%s = type(objects['%s'])" % (_type,_type)) del objects try: del _type except NameError: pass ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/pointers.py0000644000076500000240000001056314735133336015357 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE __all__ = ['parent', 'reference', 'at', 'parents', 'children'] import gc import sys from ._dill import _proxy_helper as reference from ._dill import _locate_object as at def parent(obj, objtype, ignore=()): """ >>> listiter = iter([4,5,6,7]) >>> obj = parent(listiter, list) >>> obj == [4,5,6,7] # actually 'is', but don't have handle any longer True NOTE: objtype can be a single type (e.g. int or list) or a tuple of types. WARNING: if obj is a sequence (e.g. list), may produce unexpected results. Parent finds *one* parent (e.g. the last member of the sequence). """ depth = 1 #XXX: always looking for the parent (only, right?) chain = parents(obj, objtype, depth, ignore) parent = chain.pop() if parent is obj: return None return parent def parents(obj, objtype, depth=1, ignore=()): #XXX: objtype=object ? """Find the chain of referents for obj. Chain will end with obj. objtype: an object type or tuple of types to search for depth: search depth (e.g. depth=2 is 'grandparents') ignore: an object or tuple of objects to ignore in the search """ edge_func = gc.get_referents # looking for refs, not back_refs predicate = lambda x: isinstance(x, objtype) # looking for parent type #if objtype is None: predicate = lambda x: True #XXX: in obj.mro() ? ignore = (ignore,) if not hasattr(ignore, '__len__') else ignore ignore = (id(obj) for obj in ignore) chain = find_chain(obj, predicate, edge_func, depth)[::-1] #XXX: should pop off obj... ? return chain def children(obj, objtype, depth=1, ignore=()): #XXX: objtype=object ? """Find the chain of referrers for obj. Chain will start with obj. objtype: an object type or tuple of types to search for depth: search depth (e.g. depth=2 is 'grandchildren') ignore: an object or tuple of objects to ignore in the search NOTE: a common thing to ignore is all globals, 'ignore=(globals(),)' NOTE: repeated calls may yield different results, as python stores the last value in the special variable '_'; thus, it is often good to execute something to replace '_' (e.g. >>> 1+1). """ edge_func = gc.get_referrers # looking for back_refs, not refs predicate = lambda x: isinstance(x, objtype) # looking for child type #if objtype is None: predicate = lambda x: True #XXX: in obj.mro() ? ignore = (ignore,) if not hasattr(ignore, '__len__') else ignore ignore = (id(obj) for obj in ignore) chain = find_chain(obj, predicate, edge_func, depth, ignore) #XXX: should pop off obj... ? return chain # more generic helper function (cut-n-paste from objgraph) # Source at http://mg.pov.lt/objgraph/ # Copyright (c) 2008-2010 Marius Gedminas # Copyright (c) 2010 Stefano Rivera # Released under the MIT licence (see objgraph/objgrah.py) def find_chain(obj, predicate, edge_func, max_depth=20, extra_ignore=()): queue = [obj] depth = {id(obj): 0} parent = {id(obj): None} ignore = set(extra_ignore) ignore.add(id(extra_ignore)) ignore.add(id(queue)) ignore.add(id(depth)) ignore.add(id(parent)) ignore.add(id(ignore)) ignore.add(id(sys._getframe())) # this function ignore.add(id(sys._getframe(1))) # find_chain/find_backref_chain, likely gc.collect() while queue: target = queue.pop(0) if predicate(target): chain = [target] while parent[id(target)] is not None: target = parent[id(target)] chain.append(target) return chain tdepth = depth[id(target)] if tdepth < max_depth: referrers = edge_func(target) ignore.add(id(referrers)) for source in referrers: if id(source) in ignore: continue if id(source) not in depth: depth[id(source)] = tdepth + 1 parent[id(source)] = target queue.append(source) return [obj] # not found # backward compatibility refobject = at # EOF ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/session.py0000644000076500000240000005576514735133336015214 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Author: Leonardo Gama (@leogama) # Copyright (c) 2008-2015 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ Pickle and restore the intepreter session. """ __all__ = [ 'dump_module', 'load_module', 'load_module_asdict', 'dump_session', 'load_session' # backward compatibility ] import re import os import sys import warnings import pathlib import tempfile TEMPDIR = pathlib.PurePath(tempfile.gettempdir()) # Type hints. from typing import Optional, Union from dill import _dill, Pickler, Unpickler from ._dill import ( BuiltinMethodType, FunctionType, MethodType, ModuleType, TypeType, _import_module, _is_builtin_module, _is_imported_module, _main_module, _reverse_typemap, __builtin__, UnpicklingError, ) def _module_map(): """get map of imported modules""" from collections import defaultdict from types import SimpleNamespace modmap = SimpleNamespace( by_name=defaultdict(list), by_id=defaultdict(list), top_level={}, ) for modname, module in sys.modules.items(): if modname in ('__main__', '__mp_main__') or not isinstance(module, ModuleType): continue if '.' not in modname: modmap.top_level[id(module)] = modname for objname, modobj in module.__dict__.items(): modmap.by_name[objname].append((modobj, modname)) modmap.by_id[id(modobj)].append((modobj, objname, modname)) return modmap IMPORTED_AS_TYPES = (ModuleType, TypeType, FunctionType, MethodType, BuiltinMethodType) if 'PyCapsuleType' in _reverse_typemap: IMPORTED_AS_TYPES += (_reverse_typemap['PyCapsuleType'],) IMPORTED_AS_MODULES = ('ctypes', 'typing', 'subprocess', 'threading', r'concurrent\.futures(\.\w+)?', r'multiprocessing(\.\w+)?') IMPORTED_AS_MODULES = tuple(re.compile(x) for x in IMPORTED_AS_MODULES) def _lookup_module(modmap, name, obj, main_module): """lookup name or id of obj if module is imported""" for modobj, modname in modmap.by_name[name]: if modobj is obj and sys.modules[modname] is not main_module: return modname, name __module__ = getattr(obj, '__module__', None) if isinstance(obj, IMPORTED_AS_TYPES) or (__module__ is not None and any(regex.fullmatch(__module__) for regex in IMPORTED_AS_MODULES)): for modobj, objname, modname in modmap.by_id[id(obj)]: if sys.modules[modname] is not main_module: return modname, objname return None, None def _stash_modules(main_module): modmap = _module_map() newmod = ModuleType(main_module.__name__) imported = [] imported_as = [] imported_top_level = [] # keep separated for backward compatibility original = {} for name, obj in main_module.__dict__.items(): if obj is main_module: original[name] = newmod # self-reference elif obj is main_module.__dict__: original[name] = newmod.__dict__ # Avoid incorrectly matching a singleton value in another package (ex.: __doc__). elif any(obj is singleton for singleton in (None, False, True)) \ or isinstance(obj, ModuleType) and _is_builtin_module(obj): # always saved by ref original[name] = obj else: source_module, objname = _lookup_module(modmap, name, obj, main_module) if source_module is not None: if objname == name: imported.append((source_module, name)) else: imported_as.append((source_module, objname, name)) else: try: imported_top_level.append((modmap.top_level[id(obj)], name)) except KeyError: original[name] = obj if len(original) < len(main_module.__dict__): newmod.__dict__.update(original) newmod.__dill_imported = imported newmod.__dill_imported_as = imported_as newmod.__dill_imported_top_level = imported_top_level if getattr(newmod, '__loader__', None) is None and _is_imported_module(main_module): # Trick _is_imported_module() to force saving as an imported module. newmod.__loader__ = True # will be discarded by save_module() return newmod else: return main_module def _restore_modules(unpickler, main_module): try: for modname, name in main_module.__dict__.pop('__dill_imported'): main_module.__dict__[name] = unpickler.find_class(modname, name) for modname, objname, name in main_module.__dict__.pop('__dill_imported_as'): main_module.__dict__[name] = unpickler.find_class(modname, objname) for modname, name in main_module.__dict__.pop('__dill_imported_top_level'): main_module.__dict__[name] = __import__(modname) except KeyError: pass #NOTE: 06/03/15 renamed main_module to main def dump_module( filename: Union[str, os.PathLike] = None, module: Optional[Union[ModuleType, str]] = None, refimported: bool = False, **kwds ) -> None: """Pickle the current state of :py:mod:`__main__` or another module to a file. Save the contents of :py:mod:`__main__` (e.g. from an interactive interpreter session), an imported module, or a module-type object (e.g. built with :py:class:`~types.ModuleType`), to a file. The pickled module can then be restored with the function :py:func:`load_module`. Args: filename: a path-like object or a writable stream. If `None` (the default), write to a named file in a temporary directory. module: a module object or the name of an importable module. If `None` (the default), :py:mod:`__main__` is saved. refimported: if `True`, all objects identified as having been imported into the module's namespace are saved by reference. *Note:* this is similar but independent from ``dill.settings[`byref`]``, as ``refimported`` refers to virtually all imported objects, while ``byref`` only affects select objects. **kwds: extra keyword arguments passed to :py:class:`Pickler()`. Raises: :py:exc:`PicklingError`: if pickling fails. Examples: - Save current interpreter session state: >>> import dill >>> squared = lambda x: x*x >>> dill.dump_module() # save state of __main__ to /tmp/session.pkl - Save the state of an imported/importable module: >>> import dill >>> import pox >>> pox.plus_one = lambda x: x+1 >>> dill.dump_module('pox_session.pkl', module=pox) - Save the state of a non-importable, module-type object: >>> import dill >>> from types import ModuleType >>> foo = ModuleType('foo') >>> foo.values = [1,2,3] >>> import math >>> foo.sin = math.sin >>> dill.dump_module('foo_session.pkl', module=foo, refimported=True) - Restore the state of the saved modules: >>> import dill >>> dill.load_module() >>> squared(2) 4 >>> pox = dill.load_module('pox_session.pkl') >>> pox.plus_one(1) 2 >>> foo = dill.load_module('foo_session.pkl') >>> [foo.sin(x) for x in foo.values] [0.8414709848078965, 0.9092974268256817, 0.1411200080598672] - Use `refimported` to save imported objects by reference: >>> import dill >>> from html.entities import html5 >>> type(html5), len(html5) (dict, 2231) >>> import io >>> buf = io.BytesIO() >>> dill.dump_module(buf) # saves __main__, with html5 saved by value >>> len(buf.getvalue()) # pickle size in bytes 71665 >>> buf = io.BytesIO() >>> dill.dump_module(buf, refimported=True) # html5 saved by reference >>> len(buf.getvalue()) 438 *Changed in version 0.3.6:* Function ``dump_session()`` was renamed to ``dump_module()``. Parameters ``main`` and ``byref`` were renamed to ``module`` and ``refimported``, respectively. Note: Currently, ``dill.settings['byref']`` and ``dill.settings['recurse']`` don't apply to this function. """ for old_par, par in [('main', 'module'), ('byref', 'refimported')]: if old_par in kwds: message = "The argument %r has been renamed %r" % (old_par, par) if old_par == 'byref': message += " to distinguish it from dill.settings['byref']" warnings.warn(message + ".", PendingDeprecationWarning) if locals()[par]: # the defaults are None and False raise TypeError("both %r and %r arguments were used" % (par, old_par)) refimported = kwds.pop('byref', refimported) module = kwds.pop('main', module) from .settings import settings protocol = settings['protocol'] main = module if main is None: main = _main_module elif isinstance(main, str): main = _import_module(main) if not isinstance(main, ModuleType): raise TypeError("%r is not a module" % main) if hasattr(filename, 'write'): file = filename else: if filename is None: filename = str(TEMPDIR/'session.pkl') file = open(filename, 'wb') try: pickler = Pickler(file, protocol, **kwds) pickler._original_main = main if refimported: main = _stash_modules(main) pickler._main = main #FIXME: dill.settings are disabled pickler._byref = False # disable pickling by name reference pickler._recurse = False # disable pickling recursion for globals pickler._session = True # is best indicator of when pickling a session pickler._first_pass = True pickler._main_modified = main is not pickler._original_main pickler.dump(main) finally: if file is not filename: # if newly opened file file.close() return # Backward compatibility. def dump_session(filename=None, main=None, byref=False, **kwds): warnings.warn("dump_session() has been renamed dump_module()", PendingDeprecationWarning) dump_module(filename, module=main, refimported=byref, **kwds) dump_session.__doc__ = dump_module.__doc__ class _PeekableReader: """lightweight stream wrapper that implements peek()""" def __init__(self, stream): self.stream = stream def read(self, n): return self.stream.read(n) def readline(self): return self.stream.readline() def tell(self): return self.stream.tell() def close(self): return self.stream.close() def peek(self, n): stream = self.stream try: if hasattr(stream, 'flush'): stream.flush() position = stream.tell() stream.seek(position) # assert seek() works before reading chunk = stream.read(n) stream.seek(position) return chunk except (AttributeError, OSError): raise NotImplementedError("stream is not peekable: %r", stream) from None def _make_peekable(stream): """return stream as an object with a peek() method""" import io if hasattr(stream, 'peek'): return stream if not (hasattr(stream, 'tell') and hasattr(stream, 'seek')): try: return io.BufferedReader(stream) except Exception: pass return _PeekableReader(stream) def _identify_module(file, main=None): """identify the name of the module stored in the given file-type object""" from pickletools import genops UNICODE = {'UNICODE', 'BINUNICODE', 'SHORT_BINUNICODE'} found_import = False try: for opcode, arg, pos in genops(file.peek(256)): if not found_import: if opcode.name in ('GLOBAL', 'SHORT_BINUNICODE') and \ arg.endswith('_import_module'): found_import = True else: if opcode.name in UNICODE: return arg else: raise UnpicklingError("reached STOP without finding main module") except (NotImplementedError, ValueError) as error: # ValueError occours when the end of the chunk is reached (without a STOP). if isinstance(error, NotImplementedError) and main is not None: # file is not peekable, but we have main. return None raise UnpicklingError("unable to identify main module") from error def load_module( filename: Union[str, os.PathLike] = None, module: Optional[Union[ModuleType, str]] = None, **kwds ) -> Optional[ModuleType]: """Update the selected module (default is :py:mod:`__main__`) with the state saved at ``filename``. Restore a module to the state saved with :py:func:`dump_module`. The saved module can be :py:mod:`__main__` (e.g. an interpreter session), an imported module, or a module-type object (e.g. created with :py:class:`~types.ModuleType`). When restoring the state of a non-importable module-type object, the current instance of this module may be passed as the argument ``main``. Otherwise, a new instance is created with :py:class:`~types.ModuleType` and returned. Args: filename: a path-like object or a readable stream. If `None` (the default), read from a named file in a temporary directory. module: a module object or the name of an importable module; the module name and kind (i.e. imported or non-imported) must match the name and kind of the module stored at ``filename``. **kwds: extra keyword arguments passed to :py:class:`Unpickler()`. Raises: :py:exc:`UnpicklingError`: if unpickling fails. :py:exc:`ValueError`: if the argument ``main`` and module saved at ``filename`` are incompatible. Returns: A module object, if the saved module is not :py:mod:`__main__` or a module instance wasn't provided with the argument ``main``. Examples: - Save the state of some modules: >>> import dill >>> squared = lambda x: x*x >>> dill.dump_module() # save state of __main__ to /tmp/session.pkl >>> >>> import pox # an imported module >>> pox.plus_one = lambda x: x+1 >>> dill.dump_module('pox_session.pkl', module=pox) >>> >>> from types import ModuleType >>> foo = ModuleType('foo') # a module-type object >>> foo.values = [1,2,3] >>> import math >>> foo.sin = math.sin >>> dill.dump_module('foo_session.pkl', module=foo, refimported=True) - Restore the state of the interpreter: >>> import dill >>> dill.load_module() # updates __main__ from /tmp/session.pkl >>> squared(2) 4 - Load the saved state of an importable module: >>> import dill >>> pox = dill.load_module('pox_session.pkl') >>> pox.plus_one(1) 2 >>> import sys >>> pox in sys.modules.values() True - Load the saved state of a non-importable module-type object: >>> import dill >>> foo = dill.load_module('foo_session.pkl') >>> [foo.sin(x) for x in foo.values] [0.8414709848078965, 0.9092974268256817, 0.1411200080598672] >>> import math >>> foo.sin is math.sin # foo.sin was saved by reference True >>> import sys >>> foo in sys.modules.values() False - Update the state of a non-importable module-type object: >>> import dill >>> from types import ModuleType >>> foo = ModuleType('foo') >>> foo.values = ['a','b'] >>> foo.sin = lambda x: x*x >>> dill.load_module('foo_session.pkl', module=foo) >>> [foo.sin(x) for x in foo.values] [0.8414709848078965, 0.9092974268256817, 0.1411200080598672] *Changed in version 0.3.6:* Function ``load_session()`` was renamed to ``load_module()``. Parameter ``main`` was renamed to ``module``. See also: :py:func:`load_module_asdict` to load the contents of module saved with :py:func:`dump_module` into a dictionary. """ if 'main' in kwds: warnings.warn( "The argument 'main' has been renamed 'module'.", PendingDeprecationWarning ) if module is not None: raise TypeError("both 'module' and 'main' arguments were used") module = kwds.pop('main') main = module if hasattr(filename, 'read'): file = filename else: if filename is None: filename = str(TEMPDIR/'session.pkl') file = open(filename, 'rb') try: file = _make_peekable(file) #FIXME: dill.settings are disabled unpickler = Unpickler(file, **kwds) unpickler._session = True # Resolve unpickler._main pickle_main = _identify_module(file, main) if main is None and pickle_main is not None: main = pickle_main if isinstance(main, str): if main.startswith('__runtime__.'): # Create runtime module to load the session into. main = ModuleType(main.partition('.')[-1]) else: main = _import_module(main) if main is not None: if not isinstance(main, ModuleType): raise TypeError("%r is not a module" % main) unpickler._main = main else: main = unpickler._main # Check against the pickle's main. is_main_imported = _is_imported_module(main) if pickle_main is not None: is_runtime_mod = pickle_main.startswith('__runtime__.') if is_runtime_mod: pickle_main = pickle_main.partition('.')[-1] error_msg = "can't update{} module{} %r with the saved state of{} module{} %r" if is_runtime_mod and is_main_imported: raise ValueError( error_msg.format(" imported", "", "", "-type object") % (main.__name__, pickle_main) ) if not is_runtime_mod and not is_main_imported: raise ValueError( error_msg.format("", "-type object", " imported", "") % (pickle_main, main.__name__) ) if main.__name__ != pickle_main: raise ValueError(error_msg.format("", "", "", "") % (main.__name__, pickle_main)) # This is for find_class() to be able to locate it. if not is_main_imported: runtime_main = '__runtime__.%s' % main.__name__ sys.modules[runtime_main] = main loaded = unpickler.load() finally: if not hasattr(filename, 'read'): # if newly opened file file.close() try: del sys.modules[runtime_main] except (KeyError, NameError): pass assert loaded is main _restore_modules(unpickler, main) if main is _main_module or main is module: return None else: return main # Backward compatibility. def load_session(filename=None, main=None, **kwds): warnings.warn("load_session() has been renamed load_module().", PendingDeprecationWarning) load_module(filename, module=main, **kwds) load_session.__doc__ = load_module.__doc__ def load_module_asdict( filename: Union[str, os.PathLike] = None, update: bool = False, **kwds ) -> dict: """ Load the contents of a saved module into a dictionary. ``load_module_asdict()`` is the near-equivalent of:: lambda filename: vars(dill.load_module(filename)).copy() however, does not alter the original module. Also, the path of the loaded module is stored in the ``__session__`` attribute. Args: filename: a path-like object or a readable stream. If `None` (the default), read from a named file in a temporary directory. update: if `True`, initialize the dictionary with the current state of the module prior to loading the state stored at filename. **kwds: extra keyword arguments passed to :py:class:`Unpickler()` Raises: :py:exc:`UnpicklingError`: if unpickling fails Returns: A copy of the restored module's dictionary. Note: If ``update`` is True, the corresponding module may first be imported into the current namespace before the saved state is loaded from filename to the dictionary. Note that any module that is imported into the current namespace as a side-effect of using ``update`` will not be modified by loading the saved module in filename to a dictionary. Example: >>> import dill >>> alist = [1, 2, 3] >>> anum = 42 >>> dill.dump_module() >>> anum = 0 >>> new_var = 'spam' >>> main = dill.load_module_asdict() >>> main['__name__'], main['__session__'] ('__main__', '/tmp/session.pkl') >>> main is globals() # loaded objects don't reference globals False >>> main['alist'] == alist True >>> main['alist'] is alist # was saved by value False >>> main['anum'] == anum # changed after the session was saved False >>> new_var in main # would be True if the option 'update' was set False """ if 'module' in kwds: raise TypeError("'module' is an invalid keyword argument for load_module_asdict()") if hasattr(filename, 'read'): file = filename else: if filename is None: filename = str(TEMPDIR/'session.pkl') file = open(filename, 'rb') try: file = _make_peekable(file) main_name = _identify_module(file) old_main = sys.modules.get(main_name) main = ModuleType(main_name) if update: if old_main is None: old_main = _import_module(main_name) main.__dict__.update(old_main.__dict__) else: main.__builtins__ = __builtin__ sys.modules[main_name] = main load_module(file, **kwds) finally: if not hasattr(filename, 'read'): # if newly opened file file.close() try: if old_main is None: del sys.modules[main_name] else: sys.modules[main_name] = old_main except NameError: # failed before setting old_main pass main.__session__ = str(filename) return main.__dict__ # Internal exports for backward compatibility with dill v0.3.5.1 # Can't be placed in dill._dill because of circular import problems. for name in ( '_lookup_module', '_module_map', '_restore_modules', '_stash_modules', 'dump_session', 'load_session' # backward compatibility functions ): setattr(_dill, name, globals()[name]) del name ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/settings.py0000644000076500000240000000116614735133336015353 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ global settings for Pickler """ from pickle import DEFAULT_PROTOCOL settings = { #'main' : None, 'protocol' : DEFAULT_PROTOCOL, 'byref' : False, #'strictio' : False, 'fmode' : 0, #HANDLE_FMODE 'recurse' : False, 'ignore' : False, } del DEFAULT_PROTOCOL ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/source.py0000644000076500000240000013070314735133336015013 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE # # inspired by inspect.py from Python-2.7.6 # inspect.py author: 'Ka-Ping Yee ' # inspect.py merged into original dill.source by Mike McKerns 4/13/14 """ Extensions to python's 'inspect' module, which can be used to retrieve information from live python objects. The methods defined in this module are augmented to facilitate access to source code of interactively defined functions and classes, as well as provide access to source code for objects defined in a file. """ __all__ = ['findsource', 'getsourcelines', 'getsource', 'indent', 'outdent', \ '_wrap', 'dumpsource', 'getname', '_namespace', 'getimport', \ '_importable', 'importable','isdynamic', 'isfrommain'] import linecache import re from inspect import (getblock, getfile, getmodule, getsourcefile, indentsize, isbuiltin, isclass, iscode, isframe, isfunction, ismethod, ismodule, istraceback) from tokenize import TokenError from ._dill import IS_IPYTHON def isfrommain(obj): "check if object was built in __main__" module = getmodule(obj) if module and module.__name__ == '__main__': return True return False def isdynamic(obj): "check if object was built in the interpreter" try: file = getfile(obj) except TypeError: file = None if file == '' and isfrommain(obj): return True return False def _matchlambda(func, line): """check if lambda object 'func' matches raw line of code 'line'""" from .detect import code as getcode from .detect import freevars, globalvars, varnames dummy = lambda : '__this_is_a_big_dummy_function__' # process the line (removing leading whitespace, etc) lhs,rhs = line.split('lambda ',1)[-1].split(":", 1) #FIXME: if !1 inputs try: #FIXME: unsafe _ = eval("lambda %s : %s" % (lhs,rhs), globals(),locals()) except Exception: _ = dummy # get code objects, for comparison _, code = getcode(_).co_code, getcode(func).co_code # check if func is in closure _f = [line.count(i) for i in freevars(func).keys()] if not _f: # not in closure # check if code matches if _ == code: return True return False # weak check on freevars if not all(_f): return False #XXX: VERY WEAK # weak check on varnames and globalvars _f = varnames(func) _f = [line.count(i) for i in _f[0]+_f[1]] if _f and not all(_f): return False #XXX: VERY WEAK _f = [line.count(i) for i in globalvars(func).keys()] if _f and not all(_f): return False #XXX: VERY WEAK # check if func is a double lambda if (line.count('lambda ') > 1) and (lhs in freevars(func).keys()): _lhs,_rhs = rhs.split('lambda ',1)[-1].split(":",1) #FIXME: if !1 inputs try: #FIXME: unsafe _f = eval("lambda %s : %s" % (_lhs,_rhs), globals(),locals()) except Exception: _f = dummy # get code objects, for comparison _, code = getcode(_f).co_code, getcode(func).co_code if len(_) != len(code): return False #NOTE: should be same code same order, but except for 't' and '\x88' _ = set((i,j) for (i,j) in zip(_,code) if i != j) if len(_) != 1: return False #('t','\x88') return True # check indentsize if not indentsize(line): return False #FIXME: is this a good check??? # check if code 'pattern' matches #XXX: or pattern match against dis.dis(code)? (or use uncompyle2?) _ = _.split(_[0]) # 't' #XXX: remove matching values if starts the same? _f = code.split(code[0]) # '\x88' #NOTE: should be same code different order, with different first element _ = dict(re.match(r'([\W\D\S])(.*)', _[i]).groups() for i in range(1,len(_))) _f = dict(re.match(r'([\W\D\S])(.*)', _f[i]).groups() for i in range(1,len(_f))) if (_.keys() == _f.keys()) and (sorted(_.values()) == sorted(_f.values())): return True return False def findsource(object): """Return the entire source file and starting line number for an object. For interactively-defined objects, the 'file' is the interpreter's history. The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a list of all the lines in the file and the line number indexes a line in that list. An IOError is raised if the source code cannot be retrieved, while a TypeError is raised for objects where the source code is unavailable (e.g. builtins).""" module = getmodule(object) try: file = getfile(module) except TypeError: file = None is_module_main = (module and module.__name__ == '__main__' and not file) if IS_IPYTHON and is_module_main: #FIXME: quick fix for functions and classes in IPython interpreter try: file = getfile(object) sourcefile = getsourcefile(object) except TypeError: if isclass(object): for object_method in filter(isfunction, object.__dict__.values()): # look for a method of the class file_candidate = getfile(object_method) if not file_candidate.startswith('': pat1 = r'(.*(?': pat1 = r'(.*(?' if stdin: lnum = len(lines) - 1 # can't get lnum easily, so leverage pat if not pat1: pat1 = r'^(\s*def\s)|(.*(? 0: #XXX: won't find decorators in ? line = lines[lnum] if pat1.match(line): if not stdin: break # co_firstlineno does the job if name == '': # hackery needed to confirm a match if _matchlambda(obj, line): break else: # not a lambda, just look for the name if name in line: # need to check for decorator... hats = 0 for _lnum in range(lnum-1,-1,-1): if pat2.match(lines[_lnum]): hats += 1 else: break lnum = lnum - hats break lnum = lnum - 1 return lines, lnum try: # turn instances into classes if not isclass(object) and isclass(type(object)): # __class__ object = object.__class__ #XXX: sometimes type(class) is better? #XXX: we don't find how the instance was built except AttributeError: pass if isclass(object): name = object.__name__ pat = re.compile(r'^(\s*)class\s*' + name + r'\b') # make some effort to find the best matching class definition: # use the one with the least indentation, which is the one # that's most probably not inside a function definition. candidates = [] for i in range(len(lines)-1,-1,-1): match = pat.match(lines[i]) if match: # if it's at toplevel, it's already the best one if lines[i][0] == 'c': return lines, i # else add whitespace to candidate list candidates.append((match.group(1), i)) if candidates: # this will sort by whitespace, and by line number, # less whitespace first #XXX: should sort high lnum before low candidates.sort() return lines, candidates[0][1] else: raise IOError('could not find class definition') raise IOError('could not find code object') def getblocks(object, lstrip=False, enclosing=False, locate=False): """Return a list of source lines and starting line number for an object. Interactively-defined objects refer to lines in the interpreter's history. If enclosing=True, then also return any enclosing code. If lstrip=True, ensure there is no indentation in the first line of code. If locate=True, then also return the line number for the block of code. DEPRECATED: use 'getsourcelines' instead """ lines, lnum = findsource(object) if ismodule(object): if lstrip: lines = _outdent(lines) return ([lines], [0]) if locate is True else [lines] #XXX: 'enclosing' means: closures only? or classes and files? indent = indentsize(lines[lnum]) block = getblock(lines[lnum:]) #XXX: catch any TokenError here? if not enclosing or not indent: if lstrip: block = _outdent(block) return ([block], [lnum]) if locate is True else [block] pat1 = r'^(\s*def\s)|(.*(? indent: #XXX: should be >= ? line += len(code) - skip elif target in ''.join(code): blocks.append(code) # save code block as the potential winner _lnum.append(line - skip) # save the line number for the match line += len(code) - skip else: line += 1 skip = 0 # find skip: the number of consecutive decorators elif pat2.match(lines[line]): try: code = getblock(lines[line:]) except TokenError: code = [lines[line]] skip = 1 for _line in code[1:]: # skip lines that are decorators if not pat2.match(_line): break skip += 1 line += skip # no match: reset skip and go to the next line else: line +=1 skip = 0 if not blocks: blocks = [block] _lnum = [lnum] if lstrip: blocks = [_outdent(block) for block in blocks] # return last match return (blocks, _lnum) if locate is True else blocks def getsourcelines(object, lstrip=False, enclosing=False): """Return a list of source lines and starting line number for an object. Interactively-defined objects refer to lines in the interpreter's history. The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a list of the lines corresponding to the object and the line number indicates where in the original source file the first line of code was found. An IOError is raised if the source code cannot be retrieved, while a TypeError is raised for objects where the source code is unavailable (e.g. builtins). If lstrip=True, ensure there is no indentation in the first line of code. If enclosing=True, then also return any enclosing code.""" code, n = getblocks(object, lstrip=lstrip, enclosing=enclosing, locate=True) return code[-1], n[-1] #NOTE: broke backward compatibility 4/16/14 (was lstrip=True, force=True) def getsource(object, alias='', lstrip=False, enclosing=False, \ force=False, builtin=False): """Return the text of the source code for an object. The source code for interactively-defined objects are extracted from the interpreter's history. The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a single string. An IOError is raised if the source code cannot be retrieved, while a TypeError is raised for objects where the source code is unavailable (e.g. builtins). If alias is provided, then add a line of code that renames the object. If lstrip=True, ensure there is no indentation in the first line of code. If enclosing=True, then also return any enclosing code. If force=True, catch (TypeError,IOError) and try to use import hooks. If builtin=True, force an import for any builtins """ # hascode denotes a callable hascode = _hascode(object) # is a class instance type (and not in builtins) instance = _isinstance(object) # get source lines; if fail, try to 'force' an import try: # fails for builtins, and other assorted object types lines, lnum = getsourcelines(object, enclosing=enclosing) except (TypeError, IOError): # failed to get source, resort to import hooks if not force: # don't try to get types that findsource can't get raise if not getmodule(object): # get things like 'None' and '1' if not instance: return getimport(object, alias, builtin=builtin) # special handling (numpy arrays, ...) _import = getimport(object, builtin=builtin) name = getname(object, force=True) _alias = "%s = " % alias if alias else "" if alias == name: _alias = "" return _import+_alias+"%s\n" % name else: #FIXME: could use a good bit of cleanup, since using getimport... if not instance: return getimport(object, alias, builtin=builtin) # now we are dealing with an instance... name = object.__class__.__name__ module = object.__module__ if module in ['builtins','__builtin__']: return getimport(object, alias, builtin=builtin) else: #FIXME: leverage getimport? use 'from module import name'? lines, lnum = ["%s = __import__('%s', fromlist=['%s']).%s\n" % (name,module,name,name)], 0 obj = eval(lines[0].lstrip(name + ' = ')) lines, lnum = getsourcelines(obj, enclosing=enclosing) # strip leading indent (helps ensure can be imported) if lstrip or alias: lines = _outdent(lines) # instantiate, if there's a nice repr #XXX: BAD IDEA??? if instance: #and force: #XXX: move into findsource or getsourcelines ? if '(' in repr(object): lines.append('%r\n' % object) #else: #XXX: better to somehow to leverage __reduce__ ? # reconstructor,args = object.__reduce__() # _ = reconstructor(*args) else: # fall back to serialization #XXX: bad idea? #XXX: better not duplicate work? #XXX: better new/enclose=True? lines = dumpsource(object, alias='', new=force, enclose=False) lines, lnum = [line+'\n' for line in lines.split('\n')][:-1], 0 #else: object.__code__ # raise AttributeError # add an alias to the source code if alias: if hascode: skip = 0 for line in lines: # skip lines that are decorators if not line.startswith('@'): break skip += 1 #XXX: use regex from findsource / getsourcelines ? if lines[skip].lstrip().startswith('def '): # we have a function if alias != object.__name__: lines.append('\n%s = %s\n' % (alias, object.__name__)) elif 'lambda ' in lines[skip]: # we have a lambda if alias != lines[skip].split('=')[0].strip(): lines[skip] = '%s = %s' % (alias, lines[skip]) else: # ...try to use the object's name if alias != object.__name__: lines.append('\n%s = %s\n' % (alias, object.__name__)) else: # class or class instance if instance: if alias != lines[-1].split('=')[0].strip(): lines[-1] = ('%s = ' % alias) + lines[-1] else: name = getname(object, force=True) or object.__name__ if alias != name: lines.append('\n%s = %s\n' % (alias, name)) return ''.join(lines) def _hascode(object): '''True if object has an attribute that stores it's __code__''' return getattr(object,'__code__',None) or getattr(object,'func_code',None) def _isinstance(object): '''True if object is a class instance type (and is not a builtin)''' if _hascode(object) or isclass(object) or ismodule(object): return False if istraceback(object) or isframe(object) or iscode(object): return False # special handling (numpy arrays, ...) if not getmodule(object) and getmodule(type(object)).__name__ in ['numpy']: return True # # check if is instance of a builtin # if not getmodule(object) and getmodule(type(object)).__name__ in ['__builtin__','builtins']: # return False _types = ('") if not repr(type(object)).startswith(_types): #FIXME: weak hack return False if not getmodule(object) or object.__module__ in ['builtins','__builtin__'] or getname(object, force=True) in ['array']: return False return True # by process of elimination... it's what we want def _intypes(object): '''check if object is in the 'types' module''' import types # allow user to pass in object or object.__name__ if type(object) is not type(''): object = getname(object, force=True) if object == 'ellipsis': object = 'EllipsisType' return True if hasattr(types, object) else False def _isstring(object): #XXX: isstringlike better? '''check if object is a string-like type''' return isinstance(object, (str, bytes)) def indent(code, spaces=4): '''indent a block of code with whitespace (default is 4 spaces)''' indent = indentsize(code) from numbers import Integral if isinstance(spaces, Integral): spaces = ' '*spaces # if '\t' is provided, will indent with a tab nspaces = indentsize(spaces) # blank lines (etc) need to be ignored lines = code.split('\n') ## stq = "'''"; dtq = '"""' ## in_stq = in_dtq = False for i in range(len(lines)): #FIXME: works... but shouldn't indent 2nd+ lines of multiline doc _indent = indentsize(lines[i]) if indent > _indent: continue lines[i] = spaces+lines[i] ## #FIXME: may fail when stq and dtq in same line (depends on ordering) ## nstq, ndtq = lines[i].count(stq), lines[i].count(dtq) ## if not in_dtq and not in_stq: ## lines[i] = spaces+lines[i] # we indent ## # entering a comment block ## if nstq%2: in_stq = not in_stq ## if ndtq%2: in_dtq = not in_dtq ## # leaving a comment block ## elif in_dtq and ndtq%2: in_dtq = not in_dtq ## elif in_stq and nstq%2: in_stq = not in_stq ## else: pass if lines[-1].strip() == '': lines[-1] = '' return '\n'.join(lines) def _outdent(lines, spaces=None, all=True): '''outdent lines of code, accounting for docs and line continuations''' indent = indentsize(lines[0]) if spaces is None or spaces > indent or spaces < 0: spaces = indent for i in range(len(lines) if all else 1): #FIXME: works... but shouldn't outdent 2nd+ lines of multiline doc _indent = indentsize(lines[i]) if spaces > _indent: _spaces = _indent else: _spaces = spaces lines[i] = lines[i][_spaces:] return lines def outdent(code, spaces=None, all=True): '''outdent a block of code (default is to strip all leading whitespace)''' indent = indentsize(code) if spaces is None or spaces > indent or spaces < 0: spaces = indent #XXX: will this delete '\n' in some cases? if not all: return code[spaces:] return '\n'.join(_outdent(code.split('\n'), spaces=spaces, all=all)) # _wrap provides an wrapper to correctly exec and load into locals __globals__ = globals() __locals__ = locals() def _wrap(f): """ encapsulate a function and it's __import__ """ def func(*args, **kwds): try: # _ = eval(getsource(f, force=True)) #XXX: safer but less robust exec(getimportable(f, alias='_'), __globals__, __locals__) except Exception: raise ImportError('cannot import name ' + f.__name__) return _(*args, **kwds) func.__name__ = f.__name__ func.__doc__ = f.__doc__ return func def _enclose(object, alias=''): #FIXME: needs alias to hold returned object """create a function enclosure around the source of some object""" #XXX: dummy and stub should append a random string dummy = '__this_is_a_big_dummy_enclosing_function__' stub = '__this_is_a_stub_variable__' code = 'def %s():\n' % dummy code += indent(getsource(object, alias=stub, lstrip=True, force=True)) code += indent('return %s\n' % stub) if alias: code += '%s = ' % alias code += '%s(); del %s\n' % (dummy, dummy) #code += "globals().pop('%s',lambda :None)()\n" % dummy return code def dumpsource(object, alias='', new=False, enclose=True): """'dump to source', where the code includes a pickled object. If new=True and object is a class instance, then create a new instance using the unpacked class source code. If enclose, then create the object inside a function enclosure (thus minimizing any global namespace pollution). """ from dill import dumps pik = repr(dumps(object)) code = 'import dill\n' if enclose: stub = '__this_is_a_stub_variable__' #XXX: *must* be same _enclose.stub pre = '%s = ' % stub new = False #FIXME: new=True doesn't work with enclose=True else: stub = alias pre = '%s = ' % stub if alias else alias # if a 'new' instance is not needed, then just dump and load if not new or not _isinstance(object): code += pre + 'dill.loads(%s)\n' % pik else: #XXX: other cases where source code is needed??? code += getsource(object.__class__, alias='', lstrip=True, force=True) mod = repr(object.__module__) # should have a module (no builtins here) code += pre + 'dill.loads(%s.replace(b%s,bytes(__name__,"UTF-8")))\n' % (pik,mod) #code += 'del %s' % object.__class__.__name__ #NOTE: kills any existing! if enclose: # generation of the 'enclosure' dummy = '__this_is_a_big_dummy_object__' dummy = _enclose(dummy, alias=alias) # hack to replace the 'dummy' with the 'real' code dummy = dummy.split('\n') code = dummy[0]+'\n' + indent(code) + '\n'.join(dummy[-3:]) return code #XXX: better 'dumpsourcelines', returning list of lines? def getname(obj, force=False, fqn=False): #XXX: throw(?) to raise error on fail? """get the name of the object. for lambdas, get the name of the pointer """ if fqn: return '.'.join(_namespace(obj)) #NOTE: returns 'type' module = getmodule(obj) if not module: # things like "None" and "1" if not force: return None #NOTE: returns 'instance' NOT 'type' #FIXME? # handle some special cases if hasattr(obj, 'dtype') and not obj.shape: return getname(obj.__class__) + "(" + repr(obj.tolist()) + ")" return repr(obj) try: #XXX: 'wrong' for decorators and curried functions ? # if obj.func_closure: ...use logic from getimportable, etc ? name = obj.__name__ if name == '': return getsource(obj).split('=',1)[0].strip() # handle some special cases if module.__name__ in ['builtins','__builtin__']: if name == 'ellipsis': name = 'EllipsisType' return name except AttributeError: #XXX: better to just throw AttributeError ? if not force: return None name = repr(obj) if name.startswith('<'): # or name.split('('): return None return name def _namespace(obj): """_namespace(obj); return namespace hierarchy (as a list of names) for the given object. For an instance, find the class hierarchy. For example: >>> from functools import partial >>> p = partial(int, base=2) >>> _namespace(p) [\'functools\', \'partial\'] """ # mostly for functions and modules and such #FIXME: 'wrong' for decorators and curried functions try: #XXX: needs some work and testing on different types module = qual = str(getmodule(obj)).split()[1].strip('>').strip('"').strip("'") qual = qual.split('.') if ismodule(obj): return qual # get name of a lambda, function, etc name = getname(obj) or obj.__name__ # failing, raise AttributeError # check special cases (NoneType, ...) if module in ['builtins','__builtin__']: # BuiltinFunctionType if _intypes(name): return ['types'] + [name] return qual + [name] #XXX: can be wrong for some aliased objects except Exception: pass # special case: numpy.inf and numpy.nan (we don't want them as floats) if str(obj) in ['inf','nan','Inf','NaN']: # is more, but are they needed? return ['numpy'] + [str(obj)] # mostly for classes and class instances and such module = getattr(obj.__class__, '__module__', None) qual = str(obj.__class__) try: qual = qual[qual.index("'")+1:-2] except ValueError: pass # str(obj.__class__) made the 'try' unnecessary qual = qual.split(".") if module in ['builtins','__builtin__']: # check special cases (NoneType, Ellipsis, ...) if qual[-1] == 'ellipsis': qual[-1] = 'EllipsisType' if _intypes(qual[-1]): module = 'types' #XXX: BuiltinFunctionType qual = [module] + qual return qual #NOTE: 05/25/14 broke backward compatibility: added 'alias' as 3rd argument def _getimport(head, tail, alias='', verify=True, builtin=False): """helper to build a likely import string from head and tail of namespace. ('head','tail') are used in the following context: "from head import tail" If verify=True, then test the import string before returning it. If builtin=True, then force an import for builtins where possible. If alias is provided, then rename the object on import. """ # special handling for a few common types if tail in ['Ellipsis', 'NotImplemented'] and head in ['types']: head = len.__module__ elif tail in ['None'] and head in ['types']: _alias = '%s = ' % alias if alias else '' if alias == tail: _alias = '' return _alias+'%s\n' % tail # we don't need to import from builtins, so return '' # elif tail in ['NoneType','int','float','long','complex']: return '' #XXX: ? if head in ['builtins','__builtin__']: # special cases (NoneType, Ellipsis, ...) #XXX: BuiltinFunctionType if tail == 'ellipsis': tail = 'EllipsisType' if _intypes(tail): head = 'types' elif not builtin: _alias = '%s = ' % alias if alias else '' if alias == tail: _alias = '' return _alias+'%s\n' % tail else: pass # handle builtins below # get likely import string if not head: _str = "import %s" % tail else: _str = "from %s import %s" % (head, tail) _alias = " as %s\n" % alias if alias else "\n" if alias == tail: _alias = "\n" _str += _alias # FIXME: fails on most decorators, currying, and such... # (could look for magic __wrapped__ or __func__ attr) # (could fix in 'namespace' to check obj for closure) if verify and not head.startswith('dill.'):# weird behavior for dill #print(_str) try: exec(_str) #XXX: check if == obj? (name collision) except ImportError: #XXX: better top-down or bottom-up recursion? _head = head.rsplit(".",1)[0] #(or get all, then compare == obj?) if not _head: raise if _head != head: _str = _getimport(_head, tail, alias, verify) return _str #XXX: rename builtin to force? vice versa? verify to force? (as in getsource) #NOTE: 05/25/14 broke backward compatibility: added 'alias' as 2nd argument def getimport(obj, alias='', verify=True, builtin=False, enclosing=False): """get the likely import string for the given object obj is the object to inspect If verify=True, then test the import string before returning it. If builtin=True, then force an import for builtins where possible. If enclosing=True, get the import for the outermost enclosing callable. If alias is provided, then rename the object on import. """ if enclosing: from .detect import outermost _obj = outermost(obj) obj = _obj if _obj else obj # get the namespace qual = _namespace(obj) head = '.'.join(qual[:-1]) tail = qual[-1] # for named things... with a nice repr #XXX: move into _namespace? try: # look for '<...>' and be mindful it might be in lists, dicts, etc... name = repr(obj).split('<',1)[1].split('>',1)[1] name = None # we have a 'object'-style repr except Exception: # it's probably something 'importable' if head in ['builtins','__builtin__']: name = repr(obj) #XXX: catch [1,2], (1,2), set([1,2])... others? elif _isinstance(obj): name = getname(obj, force=True).split('(')[0] else: name = repr(obj).split('(')[0] #if not repr(obj).startswith('<'): name = repr(obj).split('(')[0] #else: name = None if name: # try using name instead of tail try: return _getimport(head, name, alias, verify, builtin) except ImportError: pass except SyntaxError: if head in ['builtins','__builtin__']: _alias = '%s = ' % alias if alias else '' if alias == name: _alias = '' return _alias+'%s\n' % name else: pass try: #if type(obj) is type(abs): _builtin = builtin # BuiltinFunctionType #else: _builtin = False return _getimport(head, tail, alias, verify, builtin) except ImportError: raise # could do some checking against obj except SyntaxError: if head in ['builtins','__builtin__']: _alias = '%s = ' % alias if alias else '' if alias == tail: _alias = '' return _alias+'%s\n' % tail raise # could do some checking against obj def _importable(obj, alias='', source=None, enclosing=False, force=True, \ builtin=True, lstrip=True): """get an import string (or the source code) for the given object This function will attempt to discover the name of the object, or the repr of the object, or the source code for the object. To attempt to force discovery of the source code, use source=True, to attempt to force the use of an import, use source=False; otherwise an import will be sought for objects not defined in __main__. The intent is to build a string that can be imported from a python file. obj is the object to inspect. If alias is provided, then rename the object with the given alias. If source=True, use these options: If enclosing=True, then also return any enclosing code. If force=True, catch (TypeError,IOError) and try to use import hooks. If lstrip=True, ensure there is no indentation in the first line of code. If source=False, use these options: If enclosing=True, get the import for the outermost enclosing callable. If force=True, then don't test the import string before returning it. If builtin=True, then force an import for builtins where possible. """ if source is None: source = True if isfrommain(obj) else False if source: # first try to get the source try: return getsource(obj, alias, enclosing=enclosing, \ force=force, lstrip=lstrip, builtin=builtin) except Exception: pass try: if not _isinstance(obj): return getimport(obj, alias, enclosing=enclosing, \ verify=(not force), builtin=builtin) # first 'get the import', then 'get the instance' _import = getimport(obj, enclosing=enclosing, \ verify=(not force), builtin=builtin) name = getname(obj, force=True) if not name: raise AttributeError("object has no atribute '__name__'") _alias = "%s = " % alias if alias else "" if alias == name: _alias = "" return _import+_alias+"%s\n" % name except Exception: pass if not source: # try getsource, only if it hasn't been tried yet try: return getsource(obj, alias, enclosing=enclosing, \ force=force, lstrip=lstrip, builtin=builtin) except Exception: pass # get the name (of functions, lambdas, and classes) # or hope that obj can be built from the __repr__ #XXX: what to do about class instances and such? obj = getname(obj, force=force) # we either have __repr__ or __name__ (or None) if not obj or obj.startswith('<'): raise AttributeError("object has no atribute '__name__'") _alias = '%s = ' % alias if alias else '' if alias == obj: _alias = '' return _alias+'%s\n' % obj #XXX: possible failsafe... (for example, for instances when source=False) # "import dill; result = dill.loads(); # repr()" def _closuredimport(func, alias='', builtin=False): """get import for closured objects; return a dict of 'name' and 'import'""" import re from .detect import freevars, outermost free_vars = freevars(func) func_vars = {} # split into 'funcs' and 'non-funcs' for name,obj in list(free_vars.items()): if not isfunction(obj): continue # get import for 'funcs' fobj = free_vars.pop(name) src = getsource(fobj) if src.lstrip().startswith('@'): # we have a decorator src = getimport(fobj, alias=alias, builtin=builtin) else: # we have to "hack" a bit... and maybe be lucky encl = outermost(func) # pattern: 'func = enclosing(fobj' pat = r'.*[\w\s]=\s*'+getname(encl)+r'\('+getname(fobj) mod = getname(getmodule(encl)) #HACK: get file containing 'outer' function; is func there? lines,_ = findsource(encl) candidate = [line for line in lines if getname(encl) in line and \ re.match(pat, line)] if not candidate: mod = getname(getmodule(fobj)) #HACK: get file containing 'inner' function; is func there? lines,_ = findsource(fobj) candidate = [line for line in lines \ if getname(fobj) in line and re.match(pat, line)] if not len(candidate): raise TypeError('import could not be found') candidate = candidate[-1] name = candidate.split('=',1)[0].split()[-1].strip() src = _getimport(mod, name, alias=alias, builtin=builtin) func_vars[name] = src if not func_vars: name = outermost(func) mod = getname(getmodule(name)) if not mod or name is func: # then it can be handled by getimport name = getname(func, force=True) #XXX: better key? src = getimport(func, alias=alias, builtin=builtin) else: lines,_ = findsource(name) # pattern: 'func = enclosing(' candidate = [line for line in lines if getname(name) in line and \ re.match(r'.*[\w\s]=\s*'+getname(name)+r'\(', line)] if not len(candidate): raise TypeError('import could not be found') candidate = candidate[-1] name = candidate.split('=',1)[0].split()[-1].strip() src = _getimport(mod, name, alias=alias, builtin=builtin) func_vars[name] = src return func_vars #XXX: should be able to use __qualname__ def _closuredsource(func, alias=''): """get source code for closured objects; return a dict of 'name' and 'code blocks'""" #FIXME: this entire function is a messy messy HACK # - pollutes global namespace # - fails if name of freevars are reused # - can unnecessarily duplicate function code from .detect import freevars free_vars = freevars(func) func_vars = {} # split into 'funcs' and 'non-funcs' for name,obj in list(free_vars.items()): if not isfunction(obj): # get source for 'non-funcs' free_vars[name] = getsource(obj, force=True, alias=name) continue # get source for 'funcs' fobj = free_vars.pop(name) src = getsource(fobj, alias) # DO NOT include dependencies # if source doesn't start with '@', use name as the alias if not src.lstrip().startswith('@'): #FIXME: 'enclose' in dummy; src = importable(fobj,alias=name)# wrong ref 'name' org = getsource(func, alias, enclosing=False, lstrip=True) src = (src, org) # undecorated first, then target else: #NOTE: reproduces the code! org = getsource(func, enclosing=True, lstrip=False) src = importable(fobj, alias, source=True) # include dependencies src = (org, src) # target first, then decorated func_vars[name] = src src = ''.join(free_vars.values()) if not func_vars: #FIXME: 'enclose' in dummy; wrong ref 'name' org = getsource(func, alias, force=True, enclosing=False, lstrip=True) src = (src, org) # variables first, then target else: src = (src, None) # just variables (better '' instead of None?) func_vars[None] = src # FIXME: remove duplicates (however, order is important...) return func_vars def importable(obj, alias='', source=None, builtin=True): """get an importable string (i.e. source code or the import string) for the given object, including any required objects from the enclosing and global scope This function will attempt to discover the name of the object, or the repr of the object, or the source code for the object. To attempt to force discovery of the source code, use source=True, to attempt to force the use of an import, use source=False; otherwise an import will be sought for objects not defined in __main__. The intent is to build a string that can be imported from a python file. obj is the object to inspect. If alias is provided, then rename the object with the given alias. If builtin=True, then force an import for builtins where possible. """ #NOTE: we always 'force', and 'lstrip' as necessary #NOTE: for 'enclosing', use importable(outermost(obj)) if source is None: source = True if isfrommain(obj) else False elif builtin and isbuiltin(obj): source = False tried_source = tried_import = False while True: if not source: # we want an import try: if _isinstance(obj): # for instances, punt to _importable return _importable(obj, alias, source=False, builtin=builtin) src = _closuredimport(obj, alias=alias, builtin=builtin) if len(src) == 0: raise NotImplementedError('not implemented') if len(src) > 1: raise NotImplementedError('not implemented') return list(src.values())[0] except Exception: if tried_source: raise tried_import = True # we want the source try: src = _closuredsource(obj, alias=alias) if len(src) == 0: raise NotImplementedError('not implemented') # groan... an inline code stitcher def _code_stitcher(block): "stitch together the strings in tuple 'block'" if block[0] and block[-1]: block = '\n'.join(block) elif block[0]: block = block[0] elif block[-1]: block = block[-1] else: block = '' return block # get free_vars first _src = _code_stitcher(src.pop(None)) _src = [_src] if _src else [] # get func_vars for xxx in src.values(): xxx = _code_stitcher(xxx) if xxx: _src.append(xxx) # make a single source string if not len(_src): src = '' elif len(_src) == 1: src = _src[0] else: src = '\n'.join(_src) # get source code of objects referred to by obj in global scope from .detect import globalvars obj = globalvars(obj) #XXX: don't worry about alias? recurse? etc? obj = list(getsource(_obj,name,force=True) for (name,_obj) in obj.items() if not isbuiltin(_obj)) obj = '\n'.join(obj) if obj else '' # combine all referred-to source (global then enclosing) if not obj: return src if not src: return obj return obj + src except Exception: if tried_import: raise tried_source = True source = not source # should never get here return # backward compatibility def getimportable(obj, alias='', byname=True, explicit=False): return importable(obj,alias,source=(not byname),builtin=explicit) #return outdent(_importable(obj,alias,source=(not byname),builtin=explicit)) def likely_import(obj, passive=False, explicit=False): return getimport(obj, verify=(not passive), builtin=explicit) def _likely_import(first, last, passive=False, explicit=True): return _getimport(first, last, verify=(not passive), builtin=explicit) _get_name = getname getblocks_from_history = getblocks # EOF ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/temp.py0000644000076500000240000001753314735133336014465 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ Methods for serialized objects (or source code) stored in temporary files and file-like objects. """ #XXX: better instead to have functions write to any given file-like object ? #XXX: currently, all file-like objects are created by the function... __all__ = ['dump_source', 'dump', 'dumpIO_source', 'dumpIO',\ 'load_source', 'load', 'loadIO_source', 'loadIO',\ 'capture'] import contextlib @contextlib.contextmanager def capture(stream='stdout'): """builds a context that temporarily replaces the given stream name >>> with capture('stdout') as out: ... print ("foo!") ... >>> print (out.getvalue()) foo! """ import sys from io import StringIO orig = getattr(sys, stream) setattr(sys, stream, StringIO()) try: yield getattr(sys, stream) finally: setattr(sys, stream, orig) def b(x): # deal with b'foo' versus 'foo' import codecs return codecs.latin_1_encode(x)[0] def load_source(file, **kwds): """load an object that was stored with dill.temp.dump_source file: filehandle alias: string name of stored object mode: mode to open the file, one of: {'r', 'rb'} >>> f = lambda x: x**2 >>> pyfile = dill.temp.dump_source(f, alias='_f') >>> _f = dill.temp.load_source(pyfile) >>> _f(4) 16 """ alias = kwds.pop('alias', None) mode = kwds.pop('mode', 'r') fname = getattr(file, 'name', file) # fname=file.name or fname=file (if str) source = open(fname, mode=mode, **kwds).read() if not alias: tag = source.strip().splitlines()[-1].split() if tag[0] != '#NAME:': stub = source.splitlines()[0] raise IOError("unknown name for code: %s" % stub) alias = tag[-1] local = {} exec(source, local) _ = eval("%s" % alias, local) return _ def dump_source(object, **kwds): """write object source to a NamedTemporaryFile (instead of dill.dump) Loads with "import" or "dill.temp.load_source". Returns the filehandle. >>> f = lambda x: x**2 >>> pyfile = dill.temp.dump_source(f, alias='_f') >>> _f = dill.temp.load_source(pyfile) >>> _f(4) 16 >>> f = lambda x: x**2 >>> pyfile = dill.temp.dump_source(f, dir='.') >>> modulename = os.path.basename(pyfile.name).split('.py')[0] >>> exec('from %s import f as _f' % modulename) >>> _f(4) 16 Optional kwds: If 'alias' is specified, the object will be renamed to the given string. If 'prefix' is specified, the file name will begin with that prefix, otherwise a default prefix is used. If 'dir' is specified, the file will be created in that directory, otherwise a default directory is used. If 'text' is specified and true, the file is opened in text mode. Else (the default) the file is opened in binary mode. On some operating systems, this makes no difference. NOTE: Keep the return value for as long as you want your file to exist ! """ #XXX: write a "load_source"? from .source import importable, getname import tempfile kwds.setdefault('delete', True) kwds.pop('suffix', '') # this is *always* '.py' alias = kwds.pop('alias', '') #XXX: include an alias so a name is known name = str(alias) or getname(object) name = "\n#NAME: %s\n" % name #XXX: assumes kwds['dir'] is writable and on $PYTHONPATH file = tempfile.NamedTemporaryFile(suffix='.py', **kwds) file.write(b(''.join([importable(object, alias=alias),name]))) file.flush() return file def load(file, **kwds): """load an object that was stored with dill.temp.dump file: filehandle mode: mode to open the file, one of: {'r', 'rb'} >>> dumpfile = dill.temp.dump([1, 2, 3, 4, 5]) >>> dill.temp.load(dumpfile) [1, 2, 3, 4, 5] """ import dill as pickle mode = kwds.pop('mode', 'rb') name = getattr(file, 'name', file) # name=file.name or name=file (if str) return pickle.load(open(name, mode=mode, **kwds)) def dump(object, **kwds): """dill.dump of object to a NamedTemporaryFile. Loads with "dill.temp.load". Returns the filehandle. >>> dumpfile = dill.temp.dump([1, 2, 3, 4, 5]) >>> dill.temp.load(dumpfile) [1, 2, 3, 4, 5] Optional kwds: If 'suffix' is specified, the file name will end with that suffix, otherwise there will be no suffix. If 'prefix' is specified, the file name will begin with that prefix, otherwise a default prefix is used. If 'dir' is specified, the file will be created in that directory, otherwise a default directory is used. If 'text' is specified and true, the file is opened in text mode. Else (the default) the file is opened in binary mode. On some operating systems, this makes no difference. NOTE: Keep the return value for as long as you want your file to exist ! """ import dill as pickle import tempfile kwds.setdefault('delete', True) file = tempfile.NamedTemporaryFile(**kwds) pickle.dump(object, file) file.flush() return file def loadIO(buffer, **kwds): """load an object that was stored with dill.temp.dumpIO buffer: buffer object >>> dumpfile = dill.temp.dumpIO([1, 2, 3, 4, 5]) >>> dill.temp.loadIO(dumpfile) [1, 2, 3, 4, 5] """ import dill as pickle from io import BytesIO as StringIO value = getattr(buffer, 'getvalue', buffer) # value or buffer.getvalue if value != buffer: value = value() # buffer.getvalue() return pickle.load(StringIO(value)) def dumpIO(object, **kwds): """dill.dump of object to a buffer. Loads with "dill.temp.loadIO". Returns the buffer object. >>> dumpfile = dill.temp.dumpIO([1, 2, 3, 4, 5]) >>> dill.temp.loadIO(dumpfile) [1, 2, 3, 4, 5] """ import dill as pickle from io import BytesIO as StringIO file = StringIO() pickle.dump(object, file) file.flush() return file def loadIO_source(buffer, **kwds): """load an object that was stored with dill.temp.dumpIO_source buffer: buffer object alias: string name of stored object >>> f = lambda x:x**2 >>> pyfile = dill.temp.dumpIO_source(f, alias='_f') >>> _f = dill.temp.loadIO_source(pyfile) >>> _f(4) 16 """ alias = kwds.pop('alias', None) source = getattr(buffer, 'getvalue', buffer) # source or buffer.getvalue if source != buffer: source = source() # buffer.getvalue() source = source.decode() # buffer to string if not alias: tag = source.strip().splitlines()[-1].split() if tag[0] != '#NAME:': stub = source.splitlines()[0] raise IOError("unknown name for code: %s" % stub) alias = tag[-1] local = {} exec(source, local) _ = eval("%s" % alias, local) return _ def dumpIO_source(object, **kwds): """write object source to a buffer (instead of dill.dump) Loads by with dill.temp.loadIO_source. Returns the buffer object. >>> f = lambda x:x**2 >>> pyfile = dill.temp.dumpIO_source(f, alias='_f') >>> _f = dill.temp.loadIO_source(pyfile) >>> _f(4) 16 Optional kwds: If 'alias' is specified, the object will be renamed to the given string. """ from .source import importable, getname from io import BytesIO as StringIO alias = kwds.pop('alias', '') #XXX: include an alias so a name is known name = str(alias) or getname(object) name = "\n#NAME: %s\n" % name #XXX: assumes kwds['dir'] is writable and on $PYTHONPATH file = StringIO() file.write(b(''.join([importable(object, alias=alias),name]))) file.flush() return file del contextlib # EOF ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744763843.2461464 dill-0.4.0/dill/tests/0000755000076500000240000000000014777575703014315 5ustar00mmckernsstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/__init__.py0000644000076500000240000000073714735133336016417 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2018-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ to run this test suite, first build and install `dill`. $ python -m pip install ../.. then run the tests with: $ python -m dill.tests or, if `nose` is installed: $ nosetests """ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/__main__.py0000644000076500000240000000160314735133336016371 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2018-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import glob import os import sys import subprocess as sp python = sys.executable try: import pox python = pox.which_python(version=True) or python except ImportError: pass shell = sys.platform[:3] == 'win' suite = os.path.dirname(__file__) or os.path.curdir tests = glob.glob(suite + os.path.sep + 'test_*.py') if __name__ == '__main__': failed = 0 for test in tests: p = sp.Popen([python, test], shell=shell).wait() if p: print('F', end='', flush=True) failed = 1 else: print('.', end='', flush=True) print('') exit(failed) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_abc.py0000644000076500000240000001020314735133336016431 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2023-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ test dill's ability to pickle abstract base class objects """ import dill import abc from abc import ABC import warnings from types import FunctionType dill.settings['recurse'] = True class OneTwoThree(ABC): @abc.abstractmethod def foo(self): """A method""" pass @property @abc.abstractmethod def bar(self): """Property getter""" pass @bar.setter @abc.abstractmethod def bar(self, value): """Property setter""" pass @classmethod @abc.abstractmethod def cfoo(cls): """Class method""" pass @staticmethod @abc.abstractmethod def sfoo(): """Static method""" pass class EasyAsAbc(OneTwoThree): def __init__(self): self._bar = None def foo(self): return "Instance Method FOO" @property def bar(self): return self._bar @bar.setter def bar(self, value): self._bar = value @classmethod def cfoo(cls): return "Class Method CFOO" @staticmethod def sfoo(): return "Static Method SFOO" def test_abc_non_local(): assert dill.copy(OneTwoThree) is not OneTwoThree assert dill.copy(EasyAsAbc) is not EasyAsAbc with warnings.catch_warnings(): warnings.simplefilter("ignore", dill.PicklingWarning) assert dill.copy(OneTwoThree, byref=True) is OneTwoThree assert dill.copy(EasyAsAbc, byref=True) is EasyAsAbc instance = EasyAsAbc() # Set a property that StockPickle can't preserve instance.bar = lambda x: x**2 depickled = dill.copy(instance) assert type(depickled) is type(instance) #NOTE: issue #612, test_abc_local #NOTE: dill.copy of local (or non-local) classes should (not) be the same? assert type(depickled.bar) is FunctionType assert depickled.bar(3) == 9 assert depickled.sfoo() == "Static Method SFOO" assert depickled.cfoo() == "Class Method CFOO" assert depickled.foo() == "Instance Method FOO" def test_abc_local(): """ Test using locally scoped ABC class """ class LocalABC(ABC): @abc.abstractmethod def foo(self): pass def baz(self): return repr(self) labc = dill.copy(LocalABC) assert labc is not LocalABC assert type(labc) is type(LocalABC) #NOTE: dill.copy of local (or non-local) classes should (not) be the same? # # .LocalABC'> class Real(labc): def foo(self): return "True!" def baz(self): return "My " + super(Real, self).baz() real = Real() assert real.foo() == "True!" try: labc() except TypeError as e: # Expected error pass else: print('Failed to raise type error') assert False labc2, pik = dill.copy((labc, Real())) assert 'Real' == type(pik).__name__ assert '.Real' in type(pik).__qualname__ assert type(pik) is not Real assert labc2 is not LocalABC assert labc2 is not labc assert isinstance(pik, labc2) assert not isinstance(pik, labc) assert not isinstance(pik, LocalABC) assert pik.baz() == "My " + repr(pik) def test_meta_local_no_cache(): """ Test calling metaclass and cache registration """ LocalMetaABC = abc.ABCMeta('LocalMetaABC', (), {}) class ClassyClass: pass class KlassyClass: pass LocalMetaABC.register(ClassyClass) assert not issubclass(KlassyClass, LocalMetaABC) assert issubclass(ClassyClass, LocalMetaABC) res = dill.dumps((LocalMetaABC, ClassyClass, KlassyClass)) lmabc, cc, kc = dill.loads(res) assert type(lmabc) == type(LocalMetaABC) assert not issubclass(kc, lmabc) assert issubclass(cc, lmabc) if __name__ == '__main__': test_abc_non_local() test_abc_local() test_meta_local_no_cache() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_check.py0000644000076500000240000000256414735133336016774 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE from dill import check import sys from dill.temp import capture #FIXME: this doesn't catch output... it's from the internal call def raise_check(func, **kwds): try: with capture('stdout') as out: check(func, **kwds) except Exception: e = sys.exc_info()[1] raise AssertionError(str(e)) else: assert 'Traceback' not in out.getvalue() finally: out.close() f = lambda x:x**2 def test_simple(verbose=None): raise_check(f, verbose=verbose) def test_recurse(verbose=None): raise_check(f, recurse=True, verbose=verbose) def test_byref(verbose=None): raise_check(f, byref=True, verbose=verbose) def test_protocol(verbose=None): raise_check(f, protocol=True, verbose=verbose) def test_python(verbose=None): raise_check(f, python=None, verbose=verbose) #TODO: test incompatible versions #TODO: test dump failure #TODO: test load failure if __name__ == '__main__': test_simple() test_recurse() test_byref() test_protocol() test_python() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_classdef.py0000644000076500000240000002063014735133336017475 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import dill from enum import EnumMeta import sys dill.settings['recurse'] = True # test classdefs class _class: def _method(self): pass def ok(self): return True class _class2: def __call__(self): pass def ok(self): return True class _newclass(object): def _method(self): pass def ok(self): return True class _newclass2(object): def __call__(self): pass def ok(self): return True class _meta(type): pass def __call__(self): pass def ok(self): return True _mclass = _meta("_mclass", (object,), {"__call__": __call__, "ok": ok}) del __call__ del ok o = _class() oc = _class2() n = _newclass() nc = _newclass2() m = _mclass() if sys.hexversion < 0x03090000: import typing class customIntList(typing.List[int]): pass else: class customIntList(list[int]): pass # test pickles for class instances def test_class_instances(): assert dill.pickles(o) assert dill.pickles(oc) assert dill.pickles(n) assert dill.pickles(nc) assert dill.pickles(m) def test_class_objects(): clslist = [_class,_class2,_newclass,_newclass2,_mclass] objlist = [o,oc,n,nc,m] _clslist = [dill.dumps(obj) for obj in clslist] _objlist = [dill.dumps(obj) for obj in objlist] for obj in clslist: globals().pop(obj.__name__) del clslist for obj in ['o','oc','n','nc']: globals().pop(obj) del objlist del obj for obj,cls in zip(_objlist,_clslist): _cls = dill.loads(cls) _obj = dill.loads(obj) assert _obj.ok() assert _cls.ok(_cls()) if _cls.__name__ == "_mclass": assert type(_cls).__name__ == "_meta" # test NoneType def test_specialtypes(): assert dill.pickles(type(None)) assert dill.pickles(type(NotImplemented)) assert dill.pickles(type(Ellipsis)) assert dill.pickles(type(EnumMeta)) from collections import namedtuple Z = namedtuple("Z", ['a','b']) Zi = Z(0,1) X = namedtuple("Y", ['a','b']) X.__name__ = "X" X.__qualname__ = "X" #XXX: name must 'match' or fails to pickle Xi = X(0,1) Bad = namedtuple("FakeName", ['a','b']) Badi = Bad(0,1) Defaults = namedtuple('Defaults', ['x', 'y'], defaults=[1]) Defaultsi = Defaults(2) # test namedtuple def test_namedtuple(): assert Z is dill.loads(dill.dumps(Z)) assert Zi == dill.loads(dill.dumps(Zi)) assert X is dill.loads(dill.dumps(X)) assert Xi == dill.loads(dill.dumps(Xi)) assert Defaults is dill.loads(dill.dumps(Defaults)) assert Defaultsi == dill.loads(dill.dumps(Defaultsi)) assert Bad is not dill.loads(dill.dumps(Bad)) assert Bad._fields == dill.loads(dill.dumps(Bad))._fields assert tuple(Badi) == tuple(dill.loads(dill.dumps(Badi))) class A: class B(namedtuple("C", ["one", "two"])): '''docstring''' B.__module__ = 'testing' a = A() assert dill.copy(a) assert dill.copy(A.B).__name__ == 'B' assert dill.copy(A.B).__qualname__.endswith('..A.B') assert dill.copy(A.B).__doc__ == 'docstring' assert dill.copy(A.B).__module__ == 'testing' from typing import NamedTuple def A(): class B(NamedTuple): x: int return B assert type(dill.copy(A()(8))).__qualname__ == type(A()(8)).__qualname__ def test_dtype(): try: import numpy as np dti = np.dtype('int') assert np.dtype == dill.copy(np.dtype) assert dti == dill.copy(dti) except ImportError: pass def test_array_nested(): try: import numpy as np x = np.array([1]) y = (x,) assert y == dill.copy(y) except ImportError: pass def test_array_subclass(): try: import numpy as np class TestArray(np.ndarray): def __new__(cls, input_array, color): obj = np.asarray(input_array).view(cls) obj.color = color return obj def __array_finalize__(self, obj): if obj is None: return if isinstance(obj, type(self)): self.color = obj.color def __getnewargs__(self): return np.asarray(self), self.color a1 = TestArray(np.zeros(100), color='green') if not dill._dill.IS_PYPY: assert dill.pickles(a1) assert a1.__dict__ == dill.copy(a1).__dict__ a2 = a1[0:9] if not dill._dill.IS_PYPY: assert dill.pickles(a2) assert a2.__dict__ == dill.copy(a2).__dict__ class TestArray2(np.ndarray): color = 'blue' a3 = TestArray2([1,2,3,4,5]) a3.color = 'green' if not dill._dill.IS_PYPY: assert dill.pickles(a3) assert a3.__dict__ == dill.copy(a3).__dict__ except ImportError: pass def test_method_decorator(): class A(object): @classmethod def test(cls): pass a = A() res = dill.dumps(a) new_obj = dill.loads(res) new_obj.__class__.test() # test slots class Y(object): __slots__ = ('y', '__weakref__') def __init__(self, y): self.y = y value = 123 y = Y(value) class Y2(object): __slots__ = 'y' def __init__(self, y): self.y = y def test_slots(): assert dill.pickles(Y) assert dill.pickles(y) assert dill.pickles(Y.y) assert dill.copy(y).y == value assert dill.copy(Y2(value)).y == value def test_origbases(): assert dill.copy(customIntList).__orig_bases__ == customIntList.__orig_bases__ def test_attr(): import attr @attr.s class A: a = attr.ib() v = A(1) assert dill.copy(v) == v def test_metaclass(): class metaclass_with_new(type): def __new__(mcls, name, bases, ns, **kwds): cls = super().__new__(mcls, name, bases, ns, **kwds) assert mcls is not None assert cls.method(mcls) return cls def method(cls, mcls): return isinstance(cls, mcls) l = locals() exec("""class subclass_with_new(metaclass=metaclass_with_new): def __new__(cls): self = super().__new__(cls) return self""", None, l) subclass_with_new = l['subclass_with_new'] assert dill.copy(subclass_with_new()) def test_enummeta(): from http import HTTPStatus import enum assert dill.copy(HTTPStatus.OK) is HTTPStatus.OK assert dill.copy(enum.EnumMeta) is enum.EnumMeta def test_inherit(): #NOTE: see issue #612 class Foo: w = 0 x = 1 y = 1.1 a = () b = (1,) n = None class Bar(Foo): w = 2 x = 1 y = 1.1 z = 0.2 a = () b = (1,) c = (2,) n = None Baz = dill.copy(Bar) import platform is_pypy = platform.python_implementation() == 'PyPy' assert Bar.__dict__ == Baz.__dict__ # ints assert 'w' in Bar.__dict__ and 'w' in Baz.__dict__ assert Bar.__dict__['w'] is Baz.__dict__['w'] assert 'x' in Bar.__dict__ and 'x' in Baz.__dict__ assert Bar.__dict__['x'] is Baz.__dict__['x'] # floats assert 'y' in Bar.__dict__ and 'y' in Baz.__dict__ same = Bar.__dict__['y'] is Baz.__dict__['y'] assert same if is_pypy else not same assert 'z' in Bar.__dict__ and 'z' in Baz.__dict__ same = Bar.__dict__['z'] is Baz.__dict__['z'] assert same if is_pypy else not same # tuples assert 'a' in Bar.__dict__ and 'a' in Baz.__dict__ assert Bar.__dict__['a'] is Baz.__dict__['a'] assert 'b' in Bar.__dict__ and 'b' in Baz.__dict__ assert Bar.__dict__['b'] is not Baz.__dict__['b'] assert 'c' in Bar.__dict__ and 'c' in Baz.__dict__ assert Bar.__dict__['c'] is not Baz.__dict__['c'] # None assert 'n' in Bar.__dict__ and 'n' in Baz.__dict__ assert Bar.__dict__['n'] is Baz.__dict__['n'] if __name__ == '__main__': test_class_instances() test_class_objects() test_specialtypes() test_namedtuple() test_dtype() test_array_nested() test_array_subclass() test_method_decorator() test_slots() test_origbases() test_metaclass() test_enummeta() test_inherit() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_dataclasses.py0000644000076500000240000000157214735133336020204 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Author: Anirudh Vegesana (avegesan@cs.stanford.edu) # Copyright (c) 2022-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ test pickling a dataclass """ import dill import dataclasses def test_dataclasses(): # Issue #500 @dataclasses.dataclass class A: x: int y: str @dataclasses.dataclass class B: a: A a = A(1, "test") before = B(a) save = dill.dumps(before) after = dill.loads(save) assert before != after # classes don't match assert before == B(A(**dataclasses.asdict(after.a))) assert dataclasses.asdict(before) == dataclasses.asdict(after) if __name__ == '__main__': test_dataclasses() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_detect.py0000644000076500000240000001006014735133336017155 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE from dill.detect import baditems, badobjects, badtypes, errors, parent, at, globalvars from dill import settings from dill._dill import IS_PYPY from pickle import PicklingError import inspect import sys import os def test_bad_things(): f = inspect.currentframe() assert baditems(f) == [f] #assert baditems(globals()) == [f] #XXX assert badobjects(f) is f assert badtypes(f) == type(f) assert type(errors(f)) is TypeError d = badtypes(f, 1) assert isinstance(d, dict) assert list(badobjects(f, 1).keys()) == list(d.keys()) assert list(errors(f, 1).keys()) == list(d.keys()) s = set([(err.__class__.__name__,err.args[0]) for err in list(errors(f, 1).values())]) a = dict(s) if not os.environ.get('COVERAGE'): #XXX: travis-ci proxy = 0 if type(f.f_locals) is dict else 1 assert len(s) == len(a) + proxy # TypeError (and possibly PicklingError) n = 2 assert len(a) is n if 'PicklingError' in a.keys() else n-1 def test_parent(): x = [4,5,6,7] listiter = iter(x) obj = parent(listiter, list) assert obj is x if IS_PYPY: assert parent(obj, int) is None else: assert parent(obj, int) is x[-1] # python oddly? finds last int assert at(id(at)) is at a, b, c = 1, 2, 3 def squared(x): return a+x**2 def foo(x): def bar(y): return squared(x)+y return bar class _class: def _method(self): pass def ok(self): return True def test_globals(): def f(): a def g(): b def h(): c assert globalvars(f) == dict(a=1, b=2, c=3) res = globalvars(foo, recurse=True) assert set(res) == set(['squared', 'a']) res = globalvars(foo, recurse=False) assert res == {} zap = foo(2) res = globalvars(zap, recurse=True) assert set(res) == set(['squared', 'a']) res = globalvars(zap, recurse=False) assert set(res) == set(['squared']) del zap res = globalvars(squared) assert set(res) == set(['a']) # FIXME: should find referenced __builtins__ #res = globalvars(_class, recurse=True) #assert set(res) == set(['True']) #res = globalvars(_class, recurse=False) #assert res == {} #res = globalvars(_class.ok, recurse=True) #assert set(res) == set(['True']) #res = globalvars(_class.ok, recurse=False) #assert set(res) == set(['True']) #98 dill ignores __getstate__ in interactive lambdas bar = [0] class Foo(object): def __init__(self): pass def __getstate__(self): bar[0] = bar[0]+1 return {} def __setstate__(self, data): pass f = Foo() def test_getstate(): from dill import dumps, loads dumps(f) b = bar[0] dumps(lambda: f, recurse=False) # doesn't call __getstate__ assert bar[0] == b dumps(lambda: f, recurse=True) # calls __getstate__ assert bar[0] == b + 1 #97 serialize lambdas in test files def test_deleted(): global sin from dill import dumps, loads from math import sin, pi def sinc(x): return sin(x)/x settings['recurse'] = True _sinc = dumps(sinc) sin = globals().pop('sin') sin = 1 del sin sinc_ = loads(_sinc) # no NameError... pickling preserves 'sin' res = sinc_(1) from math import sin assert sinc(1) == res def test_lambdify(): try: from sympy import symbols, lambdify except ImportError: return settings['recurse'] = True x = symbols("x") y = x**2 f = lambdify([x], y) z = min d = globals() globalvars(f, recurse=True, builtin=True) assert z is min assert d is globals() if __name__ == '__main__': test_bad_things() test_parent() test_globals() test_getstate() test_deleted() test_lambdify() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_dictviews.py0000644000076500000240000000247114735133336017715 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Author: Anirudh Vegesana (avegesan@cs.stanford.edu) # Copyright (c) 2021-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import dill from dill._dill import OLD310, MAPPING_PROXY_TRICK, DictProxyType def test_dictproxy(): assert dill.copy(DictProxyType({'a': 2})) def test_dictviews(): x = {'a': 1} assert dill.copy(x.keys()) assert dill.copy(x.values()) assert dill.copy(x.items()) def test_dictproxy_trick(): if not OLD310 and MAPPING_PROXY_TRICK: x = {'a': 1} all_views = (x.values(), x.items(), x.keys(), x) seperate_views = dill.copy(all_views) new_x = seperate_views[-1] new_x['b'] = 2 new_x['c'] = 1 assert len(new_x) == 3 and len(x) == 1 assert len(seperate_views[0]) == 3 and len(all_views[0]) == 1 assert len(seperate_views[1]) == 3 and len(all_views[1]) == 1 assert len(seperate_views[2]) == 3 and len(all_views[2]) == 1 assert dict(all_views[1]) == x assert dict(seperate_views[1]) == new_x if __name__ == '__main__': test_dictproxy() test_dictviews() test_dictproxy_trick() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_diff.py0000644000076500000240000000515314735133336016624 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE from dill import __diff as diff import sys IS_PYPY = not hasattr(sys, 'getrefcount') class A: pass def test_diff(): a = A() b = A() c = A() a.a = b b.a = c diff.memorise(a) assert not diff.has_changed(a) c.a = 1 assert diff.has_changed(a) diff.memorise(c, force=True) assert not diff.has_changed(a) c.a = 2 assert diff.has_changed(a) changed = diff.whats_changed(a) assert list(changed[0].keys()) == ["a"] assert not changed[1] a2 = [] b2 = [a2] c2 = [b2] diff.memorise(c2) assert not diff.has_changed(c2) a2.append(1) assert diff.has_changed(c2) changed = diff.whats_changed(c2) assert changed[0] == {} assert changed[1] a3 = {} b3 = {1: a3} c3 = {1: b3} diff.memorise(c3) assert not diff.has_changed(c3) a3[1] = 1 assert diff.has_changed(c3) changed = diff.whats_changed(c3) assert changed[0] == {} assert changed[1] if not IS_PYPY: import abc # make sure the "_abc_invaldation_counter" doesn't make test fail diff.memorise(abc.ABCMeta, force=True) assert not diff.has_changed(abc) abc.ABCMeta.zzz = 1 assert diff.has_changed(abc) changed = diff.whats_changed(abc) assert list(changed[0].keys()) == ["ABCMeta"] assert not changed[1] ''' import Queue diff.memorise(Queue, force=True) assert not diff.has_changed(Queue) Queue.Queue.zzz = 1 assert diff.has_changed(Queue) changed = diff.whats_changed(Queue) assert list(changed[0].keys()) == ["Queue"] assert not changed[1] import math diff.memorise(math, force=True) assert not diff.has_changed(math) math.zzz = 1 assert diff.has_changed(math) changed = diff.whats_changed(math) assert list(changed[0].keys()) == ["zzz"] assert not changed[1] ''' a = A() b = A() c = A() a.a = b b.a = c diff.memorise(a) assert not diff.has_changed(a) c.a = 1 assert diff.has_changed(a) diff.memorise(c, force=True) assert not diff.has_changed(a) del c.a assert diff.has_changed(a) changed = diff.whats_changed(a) assert list(changed[0].keys()) == ["a"] assert not changed[1] if __name__ == '__main__': test_diff() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_extendpickle.py0000644000076500000240000000244314735133336020372 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import dill as pickle from io import BytesIO as StringIO def my_fn(x): return x * 17 def test_extend(): obj = lambda : my_fn(34) assert obj() == 578 obj_io = StringIO() pickler = pickle.Pickler(obj_io) pickler.dump(obj) obj_str = obj_io.getvalue() obj2_io = StringIO(obj_str) unpickler = pickle.Unpickler(obj2_io) obj2 = unpickler.load() assert obj2() == 578 def test_isdill(): obj_io = StringIO() pickler = pickle.Pickler(obj_io) assert pickle._dill.is_dill(pickler) is True pickler = pickle._dill.StockPickler(obj_io) assert pickle._dill.is_dill(pickler) is False try: import multiprocess as mp pickler = mp.reduction.ForkingPickler(obj_io) assert pickle._dill.is_dill(pickler, child=True) is True assert pickle._dill.is_dill(pickler, child=False) is False except Exception: pass if __name__ == '__main__': test_extend() test_isdill() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_fglobals.py0000644000076500000240000000321414735133336017501 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2021-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import dill dill.settings['recurse'] = True def get_fun_with_strftime(): def fun_with_strftime(): import datetime return datetime.datetime.strptime("04-01-1943", "%d-%m-%Y").strftime( "%Y-%m-%d %H:%M:%S" ) return fun_with_strftime def get_fun_with_strftime2(): import datetime return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') def test_doc_dill_issue_219(): back_fn = dill.loads(dill.dumps(get_fun_with_strftime())) assert back_fn() == "1943-01-04 00:00:00" dupl = dill.loads(dill.dumps(get_fun_with_strftime2)) assert dupl() == get_fun_with_strftime2() def get_fun_with_internal_import(): def fun_with_import(): import re return re.compile("$") return fun_with_import def test_method_with_internal_import_should_work(): import re back_fn = dill.loads(dill.dumps(get_fun_with_internal_import())) import inspect if hasattr(inspect, 'getclosurevars'): vars = inspect.getclosurevars(back_fn) assert vars.globals == {} assert vars.nonlocals == {} assert back_fn() == re.compile("$") assert "__builtins__" in back_fn.__globals__ if __name__ == "__main__": import sys if (sys.version_info[:3] != (3,10,0) or sys.version_info[3] != 'alpha'): test_doc_dill_issue_219() test_method_with_internal_import_should_work() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_file.py0000644000076500000240000003241214735133336016631 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import os import sys import string import random import dill dill.settings['recurse'] = True fname = "_test_file.txt" rand_chars = list(string.ascii_letters) + ["\n"] * 40 # bias newline buffer_error = ValueError("invalid buffer size") dne_error = FileNotFoundError("[Errno 2] No such file or directory: '%s'" % fname) def write_randomness(number=200): f = open(fname, "w") for i in range(number): f.write(random.choice(rand_chars)) f.close() f = open(fname, "r") contents = f.read() f.close() return contents def trunc_file(): open(fname, "w").close() def throws(op, args, exc): try: op(*args) except type(exc): return sys.exc_info()[1].args == exc.args else: return False def teardown_module(): if os.path.exists(fname): os.remove(fname) def bench(strictio, fmode, skippypy): import platform if skippypy and platform.python_implementation() == 'PyPy': # Skip for PyPy... return # file exists, with same contents # read write_randomness() f = open(fname, "r") _f = dill.loads(dill.dumps(f, fmode=fmode))#, strictio=strictio)) assert _f.mode == f.mode assert _f.tell() == f.tell() assert _f.read() == f.read() f.close() _f.close() # write f = open(fname, "w") f.write("hello") f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) f1mode = f.mode ftell = f.tell() f.close() f2 = dill.loads(f_dumped) #FIXME: fails due to pypy/issues/1233 # TypeError: expected py_object instance instead of str f2mode = f2.mode f2tell = f2.tell() f2name = f2.name f2.write(" world!") f2.close() if fmode == dill.HANDLE_FMODE: assert open(fname).read() == " world!" assert f2mode == f1mode assert f2tell == 0 elif fmode == dill.CONTENTS_FMODE: assert open(fname).read() == "hello world!" assert f2mode == f1mode assert f2tell == ftell assert f2name == fname elif fmode == dill.FILE_FMODE: assert open(fname).read() == "hello world!" assert f2mode == f1mode assert f2tell == ftell else: raise RuntimeError("Unknown file mode '%s'" % fmode) # append trunc_file() f = open(fname, "a") f.write("hello") f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) f1mode = f.mode ftell = f.tell() f.close() f2 = dill.loads(f_dumped) f2mode = f2.mode f2tell = f2.tell() f2.write(" world!") f2.close() assert f2mode == f1mode if fmode == dill.CONTENTS_FMODE: assert open(fname).read() == "hello world!" assert f2tell == ftell elif fmode == dill.HANDLE_FMODE: assert open(fname).read() == "hello world!" assert f2tell == ftell elif fmode == dill.FILE_FMODE: assert open(fname).read() == "hello world!" assert f2tell == ftell else: raise RuntimeError("Unknown file mode '%s'" % fmode) # file exists, with different contents (smaller size) # read write_randomness() f = open(fname, "r") fstr = f.read() f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) f1mode = f.mode ftell = f.tell() f.close() _flen = 150 _fstr = write_randomness(number=_flen) if strictio: # throw error if ftell > EOF assert throws(dill.loads, (f_dumped,), buffer_error) else: f2 = dill.loads(f_dumped) assert f2.mode == f1mode if fmode == dill.CONTENTS_FMODE: assert f2.tell() == _flen assert f2.read() == "" f2.seek(0) assert f2.read() == _fstr assert f2.tell() == _flen # 150 elif fmode == dill.HANDLE_FMODE: assert f2.tell() == 0 assert f2.read() == _fstr assert f2.tell() == _flen # 150 elif fmode == dill.FILE_FMODE: assert f2.tell() == ftell # 200 assert f2.read() == "" f2.seek(0) assert f2.read() == fstr assert f2.tell() == ftell # 200 else: raise RuntimeError("Unknown file mode '%s'" % fmode) f2.close() # write write_randomness() f = open(fname, "w") f.write("hello") f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) f1mode = f.mode ftell = f.tell() f.close() fstr = open(fname).read() f = open(fname, "w") f.write("h") _ftell = f.tell() f.close() if strictio: # throw error if ftell > EOF assert throws(dill.loads, (f_dumped,), buffer_error) else: f2 = dill.loads(f_dumped) f2mode = f2.mode f2tell = f2.tell() f2.write(" world!") f2.close() if fmode == dill.CONTENTS_FMODE: assert open(fname).read() == "h world!" assert f2mode == f1mode assert f2tell == _ftell elif fmode == dill.HANDLE_FMODE: assert open(fname).read() == " world!" assert f2mode == f1mode assert f2tell == 0 elif fmode == dill.FILE_FMODE: assert open(fname).read() == "hello world!" assert f2mode == f1mode assert f2tell == ftell else: raise RuntimeError("Unknown file mode '%s'" % fmode) f2.close() # append trunc_file() f = open(fname, "a") f.write("hello") f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) f1mode = f.mode ftell = f.tell() f.close() fstr = open(fname).read() f = open(fname, "w") f.write("h") _ftell = f.tell() f.close() if strictio: # throw error if ftell > EOF assert throws(dill.loads, (f_dumped,), buffer_error) else: f2 = dill.loads(f_dumped) f2mode = f2.mode f2tell = f2.tell() f2.write(" world!") f2.close() assert f2mode == f1mode if fmode == dill.CONTENTS_FMODE: # position of writes cannot be changed on some OSs assert open(fname).read() == "h world!" assert f2tell == _ftell elif fmode == dill.HANDLE_FMODE: assert open(fname).read() == "h world!" assert f2tell == _ftell elif fmode == dill.FILE_FMODE: assert open(fname).read() == "hello world!" assert f2tell == ftell else: raise RuntimeError("Unknown file mode '%s'" % fmode) f2.close() # file does not exist # read write_randomness() f = open(fname, "r") fstr = f.read() f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) f1mode = f.mode ftell = f.tell() f.close() os.remove(fname) if strictio: # throw error if file DNE assert throws(dill.loads, (f_dumped,), dne_error) else: f2 = dill.loads(f_dumped) assert f2.mode == f1mode if fmode == dill.CONTENTS_FMODE: # FIXME: this fails on systems where f2.tell() always returns 0 # assert f2.tell() == ftell # 200 assert f2.read() == "" f2.seek(0) assert f2.read() == "" assert f2.tell() == 0 elif fmode == dill.FILE_FMODE: assert f2.tell() == ftell # 200 assert f2.read() == "" f2.seek(0) assert f2.read() == fstr assert f2.tell() == ftell # 200 elif fmode == dill.HANDLE_FMODE: assert f2.tell() == 0 assert f2.read() == "" assert f2.tell() == 0 else: raise RuntimeError("Unknown file mode '%s'" % fmode) f2.close() # write write_randomness() f = open(fname, "w+") f.write("hello") f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) ftell = f.tell() f1mode = f.mode f.close() os.remove(fname) if strictio: # throw error if file DNE assert throws(dill.loads, (f_dumped,), dne_error) else: f2 = dill.loads(f_dumped) f2mode = f2.mode f2tell = f2.tell() f2.write(" world!") f2.close() if fmode == dill.CONTENTS_FMODE: assert open(fname).read() == " world!" assert f2mode == 'w+' assert f2tell == 0 elif fmode == dill.HANDLE_FMODE: assert open(fname).read() == " world!" assert f2mode == f1mode assert f2tell == 0 elif fmode == dill.FILE_FMODE: assert open(fname).read() == "hello world!" assert f2mode == f1mode assert f2tell == ftell else: raise RuntimeError("Unknown file mode '%s'" % fmode) # append trunc_file() f = open(fname, "a") f.write("hello") f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) ftell = f.tell() f1mode = f.mode f.close() os.remove(fname) if strictio: # throw error if file DNE assert throws(dill.loads, (f_dumped,), dne_error) else: f2 = dill.loads(f_dumped) f2mode = f2.mode f2tell = f2.tell() f2.write(" world!") f2.close() assert f2mode == f1mode if fmode == dill.CONTENTS_FMODE: assert open(fname).read() == " world!" assert f2tell == 0 elif fmode == dill.HANDLE_FMODE: assert open(fname).read() == " world!" assert f2tell == 0 elif fmode == dill.FILE_FMODE: assert open(fname).read() == "hello world!" assert f2tell == ftell else: raise RuntimeError("Unknown file mode '%s'" % fmode) # file exists, with different contents (larger size) # read write_randomness() f = open(fname, "r") fstr = f.read() f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) f1mode = f.mode ftell = f.tell() f.close() _flen = 250 _fstr = write_randomness(number=_flen) # XXX: no safe_file: no way to be 'safe'? f2 = dill.loads(f_dumped) assert f2.mode == f1mode if fmode == dill.CONTENTS_FMODE: assert f2.tell() == ftell # 200 assert f2.read() == _fstr[ftell:] f2.seek(0) assert f2.read() == _fstr assert f2.tell() == _flen # 250 elif fmode == dill.HANDLE_FMODE: assert f2.tell() == 0 assert f2.read() == _fstr assert f2.tell() == _flen # 250 elif fmode == dill.FILE_FMODE: assert f2.tell() == ftell # 200 assert f2.read() == "" f2.seek(0) assert f2.read() == fstr assert f2.tell() == ftell # 200 else: raise RuntimeError("Unknown file mode '%s'" % fmode) f2.close() # XXX: other alternatives? # write f = open(fname, "w") f.write("hello") f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) f1mode = f.mode ftell = f.tell() fstr = open(fname).read() f.write(" and goodbye!") _ftell = f.tell() f.close() # XXX: no safe_file: no way to be 'safe'? f2 = dill.loads(f_dumped) f2mode = f2.mode f2tell = f2.tell() f2.write(" world!") f2.close() if fmode == dill.CONTENTS_FMODE: assert open(fname).read() == "hello world!odbye!" assert f2mode == f1mode assert f2tell == ftell elif fmode == dill.HANDLE_FMODE: assert open(fname).read() == " world!" assert f2mode == f1mode assert f2tell == 0 elif fmode == dill.FILE_FMODE: assert open(fname).read() == "hello world!" assert f2mode == f1mode assert f2tell == ftell else: raise RuntimeError("Unknown file mode '%s'" % fmode) f2.close() # append trunc_file() f = open(fname, "a") f.write("hello") f_dumped = dill.dumps(f, fmode=fmode)#, strictio=strictio) f1mode = f.mode ftell = f.tell() fstr = open(fname).read() f.write(" and goodbye!") _ftell = f.tell() f.close() # XXX: no safe_file: no way to be 'safe'? f2 = dill.loads(f_dumped) f2mode = f2.mode f2tell = f2.tell() f2.write(" world!") f2.close() assert f2mode == f1mode if fmode == dill.CONTENTS_FMODE: assert open(fname).read() == "hello and goodbye! world!" assert f2tell == ftell elif fmode == dill.HANDLE_FMODE: assert open(fname).read() == "hello and goodbye! world!" assert f2tell == _ftell elif fmode == dill.FILE_FMODE: assert open(fname).read() == "hello world!" assert f2tell == ftell else: raise RuntimeError("Unknown file mode '%s'" % fmode) f2.close() def test_nostrictio_handlefmode(): bench(False, dill.HANDLE_FMODE, False) teardown_module() def test_nostrictio_filefmode(): bench(False, dill.FILE_FMODE, False) teardown_module() def test_nostrictio_contentsfmode(): bench(False, dill.CONTENTS_FMODE, True) teardown_module() #bench(True, dill.HANDLE_FMODE, False) #bench(True, dill.FILE_FMODE, False) #bench(True, dill.CONTENTS_FMODE, True) if __name__ == '__main__': test_nostrictio_handlefmode() test_nostrictio_filefmode() test_nostrictio_contentsfmode() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_functions.py0000644000076500000240000001025314735133336017721 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2019-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import functools import dill import sys dill.settings['recurse'] = True def function_a(a): return a def function_b(b, b1): return b + b1 def function_c(c, c1=1): return c + c1 def function_d(d, d1, d2=1): """doc string""" return d + d1 + d2 function_d.__module__ = 'a module' exec(''' def function_e(e, *e1, e2=1, e3=2): return e + sum(e1) + e2 + e3''') globalvar = 0 @functools.lru_cache(None) def function_with_cache(x): global globalvar globalvar += x return globalvar def function_with_unassigned_variable(): if False: value = None return (lambda: value) def test_issue_510(): # A very bizzare use of functions and methods that pickle doesn't get # correctly for odd reasons. class Foo: def __init__(self): def f2(self): return self self.f2 = f2.__get__(self) import dill, pickletools f = Foo() f1 = dill.copy(f) assert f1.f2() is f1 def test_functions(): dumped_func_a = dill.dumps(function_a) assert dill.loads(dumped_func_a)(0) == 0 dumped_func_b = dill.dumps(function_b) assert dill.loads(dumped_func_b)(1,2) == 3 dumped_func_c = dill.dumps(function_c) assert dill.loads(dumped_func_c)(1) == 2 assert dill.loads(dumped_func_c)(1, 2) == 3 dumped_func_d = dill.dumps(function_d) assert dill.loads(dumped_func_d).__doc__ == function_d.__doc__ assert dill.loads(dumped_func_d).__module__ == function_d.__module__ assert dill.loads(dumped_func_d)(1, 2) == 4 assert dill.loads(dumped_func_d)(1, 2, 3) == 6 assert dill.loads(dumped_func_d)(1, 2, d2=3) == 6 function_with_cache(1) globalvar = 0 dumped_func_cache = dill.dumps(function_with_cache) assert function_with_cache(2) == 3 assert function_with_cache(1) == 1 assert function_with_cache(3) == 6 assert function_with_cache(2) == 3 empty_cell = function_with_unassigned_variable() cell_copy = dill.loads(dill.dumps(empty_cell)) assert 'empty' in str(cell_copy.__closure__[0]) try: cell_copy() except Exception: # this is good pass else: raise AssertionError('cell_copy() did not read an empty cell') exec(''' dumped_func_e = dill.dumps(function_e) assert dill.loads(dumped_func_e)(1, 2) == 6 assert dill.loads(dumped_func_e)(1, 2, 3) == 9 assert dill.loads(dumped_func_e)(1, 2, e2=3) == 8 assert dill.loads(dumped_func_e)(1, 2, e2=3, e3=4) == 10 assert dill.loads(dumped_func_e)(1, 2, 3, e2=4) == 12 assert dill.loads(dumped_func_e)(1, 2, 3, e2=4, e3=5) == 15''') def test_code_object(): import warnings from dill._dill import ALL_CODE_PARAMS, CODE_PARAMS, CODE_VERSION, _create_code code = function_c.__code__ warnings.filterwarnings('ignore', category=DeprecationWarning) # issue 597 LNOTAB = getattr(code, 'co_lnotab', b'') if warnings.filters: del warnings.filters[0] fields = {f: getattr(code, 'co_'+f) for f in CODE_PARAMS} fields.setdefault('posonlyargcount', 0) # python >= 3.8 fields.setdefault('lnotab', LNOTAB) # python <= 3.9 fields.setdefault('linetable', b'') # python >= 3.10 fields.setdefault('qualname', fields['name']) # python >= 3.11 fields.setdefault('exceptiontable', b'') # python >= 3.11 fields.setdefault('endlinetable', None) # python == 3.11a fields.setdefault('columntable', None) # python == 3.11a for version, _, params in ALL_CODE_PARAMS: args = tuple(fields[p] for p in params.split()) try: _create_code(*args) if version >= (3,10): _create_code(fields['lnotab'], *args) except Exception as error: raise Exception("failed to construct code object with format version {}".format(version)) from error if __name__ == '__main__': test_functions() test_issue_510() test_code_object() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_functors.py0000644000076500000240000000164214735133336017556 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import functools import dill dill.settings['recurse'] = True def f(a, b, c): # without keywords pass def g(a, b, c=2): # with keywords pass def h(a=1, b=2, c=3): # without args pass def test_functools(): fp = functools.partial(f, 1, 2) gp = functools.partial(g, 1, c=2) hp = functools.partial(h, 1, c=2) bp = functools.partial(int, base=2) assert dill.pickles(fp, safe=True) assert dill.pickles(gp, safe=True) assert dill.pickles(hp, safe=True) assert dill.pickles(bp, safe=True) if __name__ == '__main__': test_functools() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_logger.py0000644000076500000240000000452114735133336017171 0ustar00mmckernsstaff#!/usr/bin/env python # Author: Leonardo Gama (@leogama) # Copyright (c) 2022-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import logging import re import tempfile import dill from dill import detect from dill.logger import stderr_handler, adapter as logger try: from StringIO import StringIO except ImportError: from io import StringIO test_obj = {'a': (1, 2), 'b': object(), 'f': lambda x: x**2, 'big': list(range(10))} def test_logging(should_trace): buffer = StringIO() handler = logging.StreamHandler(buffer) logger.addHandler(handler) try: dill.dumps(test_obj) if should_trace: regex = re.compile(r'(\S*┬ \w.*[^)]' # begin pickling object r'|│*└ # \w.* \[\d+ (\wi)?B])' # object written (with size) ) for line in buffer.getvalue().splitlines(): assert regex.fullmatch(line) return buffer.getvalue() else: assert buffer.getvalue() == "" finally: logger.removeHandler(handler) buffer.close() def test_trace_to_file(stream_trace): file = tempfile.NamedTemporaryFile(mode='r') with detect.trace(file.name, mode='w'): dill.dumps(test_obj) file_trace = file.read() file.close() # Apparently, objects can change location in memory... reghex = re.compile(r'0x[0-9A-Za-z]+') file_trace, stream_trace = reghex.sub('0x', file_trace), reghex.sub('0x', stream_trace) # PyPy prints dictionary contents with repr(dict)... regdict = re.compile(r'(dict\.__repr__ of ).*') file_trace, stream_trace = regdict.sub(r'\1{}>', file_trace), regdict.sub(r'\1{}>', stream_trace) assert file_trace == stream_trace if __name__ == '__main__': logger.removeHandler(stderr_handler) test_logging(should_trace=False) detect.trace(True) test_logging(should_trace=True) detect.trace(False) test_logging(should_trace=False) loglevel = logging.ERROR logger.setLevel(loglevel) with detect.trace(): stream_trace = test_logging(should_trace=True) test_logging(should_trace=False) assert logger.getEffectiveLevel() == loglevel test_trace_to_file(stream_trace) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_mixins.py0000644000076500000240000000764714735133336017235 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import dill dill.settings['recurse'] = True def wtf(x,y,z): def zzz(): return x def yyy(): return y def xxx(): return z return zzz,yyy def quad(a=1, b=1, c=0): inverted = [False] def invert(): inverted[0] = not inverted[0] def dec(f): def func(*args, **kwds): x = f(*args, **kwds) if inverted[0]: x = -x return a*x**2 + b*x + c func.__wrapped__ = f func.invert = invert func.inverted = inverted return func return dec @quad(a=0,b=2) def double_add(*args): return sum(args) fx = sum([1,2,3]) ### to make it interesting... def quad_factory(a=1,b=1,c=0): def dec(f): def func(*args,**kwds): fx = f(*args,**kwds) return a*fx**2 + b*fx + c return func return dec @quad_factory(a=0,b=4,c=0) def quadish(x): return x+1 quadratic = quad_factory() def doubler(f): def inner(*args, **kwds): fx = f(*args, **kwds) return 2*fx return inner @doubler def quadruple(x): return 2*x def test_mixins(): # test mixins assert double_add(1,2,3) == 2*fx double_add.invert() assert double_add(1,2,3) == -2*fx _d = dill.copy(double_add) assert _d(1,2,3) == -2*fx #_d.invert() #FIXME: fails seemingly randomly #assert _d(1,2,3) == 2*fx assert _d.__wrapped__(1,2,3) == fx # XXX: issue or feature? in python3.4, inverted is linked through copy if not double_add.inverted[0]: double_add.invert() # test some stuff from source and pointers ds = dill.source dd = dill.detect assert ds.getsource(dd.freevars(quadish)['f']) == '@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n' assert ds.getsource(dd.freevars(quadruple)['f']) == '@doubler\ndef quadruple(x):\n return 2*x\n' assert ds.importable(quadish, source=False) == 'from %s import quadish\n' % __name__ assert ds.importable(quadruple, source=False) == 'from %s import quadruple\n' % __name__ assert ds.importable(quadratic, source=False) == 'from %s import quadratic\n' % __name__ assert ds.importable(double_add, source=False) == 'from %s import double_add\n' % __name__ assert ds.importable(quadruple, source=True) == 'def doubler(f):\n def inner(*args, **kwds):\n fx = f(*args, **kwds)\n return 2*fx\n return inner\n\n@doubler\ndef quadruple(x):\n return 2*x\n' #***** #FIXME: this needs work result = ds.importable(quadish, source=True) a,b,c,_,result = result.split('\n',4) assert result == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n\n@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n' assert set([a,b,c]) == set(['a = 0', 'c = 0', 'b = 4']) result = ds.importable(quadratic, source=True) a,b,c,result = result.split('\n',3) assert result == '\ndef dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n' assert set([a,b,c]) == set(['a = 1', 'c = 0', 'b = 1']) result = ds.importable(double_add, source=True) a,b,c,d,_,result = result.split('\n',5) assert result == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n\n@quad(a=0,b=2)\ndef double_add(*args):\n return sum(args)\n' assert set([a,b,c,d]) == set(['a = 0', 'c = 0', 'b = 2', 'inverted = [True]']) #***** if __name__ == '__main__': test_mixins() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_module.py0000644000076500000240000000362714735133336017205 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import sys import dill import test_mixins as module from importlib import reload dill.settings['recurse'] = True cached = (module.__cached__ if hasattr(module, "__cached__") else module.__file__.split(".", 1)[0] + ".pyc") module.a = 1234 pik_mod = dill.dumps(module) module.a = 0 # remove module del sys.modules[module.__name__] del module module = dill.loads(pik_mod) def test_attributes(): #assert hasattr(module, "a") and module.a == 1234 #FIXME: -m dill.tests assert module.double_add(1, 2, 3) == 2 * module.fx # Restart, and test use_diff reload(module) try: dill.use_diff() module.a = 1234 pik_mod = dill.dumps(module) module.a = 0 # remove module del sys.modules[module.__name__] del module module = dill.loads(pik_mod) def test_diff_attributes(): assert hasattr(module, "a") and module.a == 1234 assert module.double_add(1, 2, 3) == 2 * module.fx except AttributeError: def test_diff_attributes(): pass # clean up import os if os.path.exists(cached): os.remove(cached) pycache = os.path.join(os.path.dirname(module.__file__), "__pycache__") if os.path.exists(pycache) and not os.listdir(pycache): os.removedirs(pycache) # test when module is None import math def get_lambda(str, **kwarg): return eval(str, kwarg, None) obj = get_lambda('lambda x: math.exp(x)', math=math) def test_module_is_none(): assert obj.__module__ is None assert dill.copy(obj)(3) == obj(3) if __name__ == '__main__': test_attributes() test_diff_attributes() test_module_is_none() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_moduledict.py0000644000076500000240000000223614735133336020044 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import dill dill.settings['recurse'] = True def f(func): def w(*args): return f(*args) return w @f def f2(): pass # check when __main__ and on import def test_decorated(): assert dill.pickles(f2) import doctest import logging logging.basicConfig(level=logging.DEBUG) class SomeUnreferencedUnpicklableClass(object): def __reduce__(self): raise Exception unpicklable = SomeUnreferencedUnpicklableClass() # This works fine outside of Doctest: def test_normal(): serialized = dill.dumps(lambda x: x) # should not try to pickle unpicklable object in __globals__ def tests(): """ >>> serialized = dill.dumps(lambda x: x) """ return #print("\n\nRunning Doctest:") def test_doctest(): doctest.testmod() if __name__ == '__main__': test_decorated() test_normal() test_doctest() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_nested.py0000644000076500000240000000611214735133336017172 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ test dill's ability to handle nested functions """ import os import math import dill as pickle pickle.settings['recurse'] = True # the nested function: pickle should fail here, but dill is ok. def adder(augend): zero = [0] def inner(addend): return addend + augend + zero[0] return inner # rewrite the nested function using a class: standard pickle should work here. class cadder(object): def __init__(self, augend): self.augend = augend self.zero = [0] def __call__(self, addend): return addend + self.augend + self.zero[0] # rewrite again, but as an old-style class class c2adder: def __init__(self, augend): self.augend = augend self.zero = [0] def __call__(self, addend): return addend + self.augend + self.zero[0] # some basic class stuff class basic(object): pass class basic2: pass x = 5 y = 1 def test_basic(): a = [0, 1, 2] pa = pickle.dumps(a) pmath = pickle.dumps(math) #XXX: FAILS in pickle pmap = pickle.dumps(map) # ... la = pickle.loads(pa) lmath = pickle.loads(pmath) lmap = pickle.loads(pmap) assert list(map(math.sin, a)) == list(lmap(lmath.sin, la)) def test_basic_class(): pbasic2 = pickle.dumps(basic2) _pbasic2 = pickle.loads(pbasic2)() pbasic = pickle.dumps(basic) _pbasic = pickle.loads(pbasic)() def test_c2adder(): pc2adder = pickle.dumps(c2adder) pc2add5 = pickle.loads(pc2adder)(x) assert pc2add5(y) == x+y def test_pickled_cadder(): pcadder = pickle.dumps(cadder) pcadd5 = pickle.loads(pcadder)(x) assert pcadd5(y) == x+y def test_raw_adder_and_inner(): add5 = adder(x) assert add5(y) == x+y def test_pickled_adder(): padder = pickle.dumps(adder) padd5 = pickle.loads(padder)(x) assert padd5(y) == x+y def test_pickled_inner(): add5 = adder(x) pinner = pickle.dumps(add5) #XXX: FAILS in pickle p5add = pickle.loads(pinner) assert p5add(y) == x+y def test_moduledict_where_not_main(): try: from . import test_moduledict except ImportError: import test_moduledict name = 'test_moduledict.py' if os.path.exists(name) and os.path.exists(name+'c'): os.remove(name+'c') if os.path.exists(name) and hasattr(test_moduledict, "__cached__") \ and os.path.exists(test_moduledict.__cached__): os.remove(getattr(test_moduledict, "__cached__")) if os.path.exists("__pycache__") and not os.listdir("__pycache__"): os.removedirs("__pycache__") if __name__ == '__main__': test_basic() test_basic_class() test_c2adder() test_pickled_cadder() test_raw_adder_and_inner() test_pickled_adder() test_pickled_inner() test_moduledict_where_not_main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_objects.py0000644000076500000240000000361314735133336017344 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ demonstrate dill's ability to pickle different python types test pickling of all Python Standard Library objects (currently: CH 1-14 @ 2.7) """ import dill as pickle pickle.settings['recurse'] = True #pickle.detect.trace(True) #import pickle # get all objects for testing from dill import load_types, objects, extend load_types(pickleable=True,unpickleable=False) # uncomment the next two lines to test cloudpickle #extend(False) #import cloudpickle as pickle # helper objects class _class: def _method(self): pass # objects that *fail* if imported special = {} special['LambdaType'] = _lambda = lambda x: lambda y: x special['MethodType'] = _method = _class()._method special['UnboundMethodType'] = _class._method objects.update(special) def pickles(name, exact=False, verbose=True): """quick check if object pickles with dill""" obj = objects[name] try: pik = pickle.loads(pickle.dumps(obj)) if exact: try: assert pik == obj except AssertionError: assert type(obj) == type(pik) if verbose: print ("weak: %s %s" % (name, type(obj))) else: assert type(obj) == type(pik) except Exception: if verbose: print ("fails: %s %s" % (name, type(obj))) def test_objects(verbose=True): for member in objects.keys(): #pickles(member, exact=True, verbose=verbose) pickles(member, exact=False, verbose=verbose) if __name__ == '__main__': import warnings warnings.simplefilter('ignore') test_objects(verbose=False) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702238.0 dill-0.4.0/dill/tests/test_properties.py0000644000076500000240000000250214735133336020103 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import sys import dill dill.settings['recurse'] = True class Foo(object): def __init__(self): self._data = 1 def _get_data(self): return self._data def _set_data(self, x): self._data = x data = property(_get_data, _set_data) def test_data_not_none(): FooS = dill.copy(Foo) assert FooS.data.fget is not None assert FooS.data.fset is not None assert FooS.data.fdel is None def test_data_unchanged(): FooS = dill.copy(Foo) try: res = FooS().data except Exception: e = sys.exc_info()[1] raise AssertionError(str(e)) else: assert res == 1 def test_data_changed(): FooS = dill.copy(Foo) try: f = FooS() f.data = 1024 res = f.data except Exception: e = sys.exc_info()[1] raise AssertionError(str(e)) else: assert res == 1024 if __name__ == '__main__': test_data_not_none() test_data_unchanged() test_data_changed() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_pycapsule.py0000644000076500000240000000261114735133337017716 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Author: Anirudh Vegesana (avegesan@cs.stanford.edu) # Copyright (c) 2022-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ test pickling a PyCapsule object """ import dill import warnings test_pycapsule = None if dill._dill._testcapsule is not None: import ctypes def test_pycapsule(): name = ctypes.create_string_buffer(b'dill._testcapsule') capsule = dill._dill._PyCapsule_New( ctypes.cast(dill._dill._PyCapsule_New, ctypes.c_void_p), name, None ) with warnings.catch_warnings(): warnings.simplefilter("ignore") dill.copy(capsule) dill._testcapsule = capsule with warnings.catch_warnings(): warnings.simplefilter("ignore") dill.copy(capsule) dill._testcapsule = None try: with warnings.catch_warnings(): warnings.simplefilter("ignore", dill.PicklingWarning) dill.copy(capsule) except dill.UnpicklingError: pass else: raise AssertionError("Expected a different error") if __name__ == '__main__': if test_pycapsule is not None: test_pycapsule() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_recursive.py0000644000076500000240000001012614735133337017720 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2019-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import dill from functools import partial import warnings def copy(obj, byref=False, recurse=False): if byref: try: return dill.copy(obj, byref=byref, recurse=recurse) except Exception: pass else: raise AssertionError('Copy of %s with byref=True should have given a warning!' % (obj,)) warnings.simplefilter('ignore') val = dill.copy(obj, byref=byref, recurse=recurse) warnings.simplefilter('error') return val else: return dill.copy(obj, byref=byref, recurse=recurse) class obj1(object): def __init__(self): super(obj1, self).__init__() class obj2(object): def __init__(self): super(obj2, self).__init__() class obj3(object): super_ = super def __init__(self): obj3.super_(obj3, self).__init__() def test_super(): assert copy(obj1(), byref=True) assert copy(obj1(), byref=True, recurse=True) assert copy(obj1(), recurse=True) assert copy(obj1()) assert copy(obj2(), byref=True) assert copy(obj2(), byref=True, recurse=True) assert copy(obj2(), recurse=True) assert copy(obj2()) assert copy(obj3(), byref=True) assert copy(obj3(), byref=True, recurse=True) assert copy(obj3(), recurse=True) assert copy(obj3()) def get_trigger(model): pass class Machine(object): def __init__(self): self.child = Model() self.trigger = partial(get_trigger, self) self.child.trigger = partial(get_trigger, self.child) class Model(object): pass def test_partial(): assert copy(Machine(), byref=True) assert copy(Machine(), byref=True, recurse=True) assert copy(Machine(), recurse=True) assert copy(Machine()) class Machine2(object): def __init__(self): self.go = partial(self.member, self) def member(self, model): pass class SubMachine(Machine2): def __init__(self): super(SubMachine, self).__init__() def test_partials(): assert copy(SubMachine(), byref=True) assert copy(SubMachine(), byref=True, recurse=True) assert copy(SubMachine(), recurse=True) assert copy(SubMachine()) class obj4(object): def __init__(self): super(obj4, self).__init__() a = self class obj5(object): def __init__(self): super(obj5, self).__init__() self.a = a self.b = obj5() def test_circular_reference(): assert copy(obj4()) obj4_copy = dill.loads(dill.dumps(obj4())) assert type(obj4_copy) is type(obj4_copy).__init__.__closure__[0].cell_contents assert type(obj4_copy.b) is type(obj4_copy.b).__init__.__closure__[0].cell_contents def f(): def g(): return g return g def test_function_cells(): assert copy(f()) def fib(n): assert n >= 0 if n <= 1: return n else: return fib(n-1) + fib(n-2) def test_recursive_function(): global fib fib2 = copy(fib, recurse=True) fib3 = copy(fib) fib4 = fib del fib assert fib2(5) == 5 for _fib in (fib3, fib4): try: _fib(5) except Exception: # This is expected to fail because fib no longer exists pass else: raise AssertionError("Function fib shouldn't have been found") fib = fib4 def collection_function_recursion(): d = {} def g(): return d d['g'] = g return g def test_collection_function_recursion(): g = copy(collection_function_recursion()) assert g()['g'] is g if __name__ == '__main__': with warnings.catch_warnings(): warnings.simplefilter('error') test_super() test_partial() test_partials() test_circular_reference() test_function_cells() test_recursive_function() test_collection_function_recursion() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_registered.py0000644000076500000240000000304514735133337020050 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2022-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ test pickling registered objects """ import dill from dill._objects import failures, registered, succeeds import warnings warnings.filterwarnings('ignore') def check(d, ok=True): res = [] for k,v in d.items(): try: z = dill.copy(v) if ok: res.append(k) except: if not ok: res.append(k) return res fails = check(failures) try: assert not bool(fails) except AssertionError as e: print("FAILS: %s" % fails) raise e from None register = check(registered, ok=False) try: assert not bool(register) except AssertionError as e: print("REGISTER: %s" % register) raise e from None success = check(succeeds, ok=False) try: assert not bool(success) except AssertionError as e: print("SUCCESS: %s" % success) raise e from None import builtins import types q = dill._dill._reverse_typemap p = {k:v for k,v in q.items() if k not in vars(builtins) and k not in vars(types)} diff = set(p.keys()).difference(registered.keys()) try: assert not bool(diff) except AssertionError as e: print("DIFF: %s" % diff) raise e from None miss = set(registered.keys()).difference(p.keys()) try: assert not bool(miss) except AssertionError as e: print("MISS: %s" % miss) raise e from None ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_restricted.py0000644000076500000240000000141714735133337020064 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Kirill Makhonin (@kirillmakhonin) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import dill class RestrictedType: def __bool__(*args, **kwargs): raise Exception('Restricted function') __eq__ = __lt__ = __le__ = __ne__ = __gt__ = __ge__ = __hash__ = __bool__ glob_obj = RestrictedType() def restricted_func(): a = glob_obj def test_function_with_restricted_object(): deserialized = dill.loads(dill.dumps(restricted_func, recurse=True)) if __name__ == '__main__': test_function_with_restricted_object() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_selected.py0000644000076500000240000000627214735133337017510 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ testing some selected object types """ import dill dill.settings['recurse'] = True verbose = False def test_dict_contents(): c = type.__dict__ for i,j in c.items(): #try: ok = dill.pickles(j) #except Exception: # print ("FAIL: %s with %s" % (i, dill.detect.errors(j))) if verbose: print ("%s: %s, %s" % (ok, type(j), j)) assert ok if verbose: print ("") def _g(x): yield x; def _f(): try: raise except Exception: from sys import exc_info e, er, tb = exc_info() return er, tb class _d(object): def _method(self): pass from dill import objects from dill import load_types load_types(pickleable=True,unpickleable=False) _newclass = objects['ClassObjectType'] # some clean-up #FIXME: should happen internal to dill objects['TemporaryFileType'].close() objects['TextWrapperType'].close() if 'BufferedRandomType' in objects: objects['BufferedRandomType'].close() objects['BufferedReaderType'].close() objects['BufferedWriterType'].close() objects['FileType'].close() del objects # getset_descriptor for new-style classes (fails on '_method', if not __main__) def test_class_descriptors(): d = _d.__dict__ for i in d.values(): ok = dill.pickles(i) if verbose: print ("%s: %s, %s" % (ok, type(i), i)) assert ok if verbose: print ("") od = _newclass.__dict__ for i in od.values(): ok = dill.pickles(i) if verbose: print ("%s: %s, %s" % (ok, type(i), i)) assert ok if verbose: print ("") # (__main__) class instance for new-style classes def test_class(): o = _d() oo = _newclass() ok = dill.pickles(o) if verbose: print ("%s: %s, %s" % (ok, type(o), o)) assert ok ok = dill.pickles(oo) if verbose: print ("%s: %s, %s" % (ok, type(oo), oo)) assert ok if verbose: print ("") # frames, generators, and tracebacks (all depend on frame) def test_frame_related(): g = _g(1) f = g.gi_frame e,t = _f() _is = lambda ok: ok ok = dill.pickles(f) if verbose: print ("%s: %s, %s" % (ok, type(f), f)) assert not ok ok = dill.pickles(g) if verbose: print ("%s: %s, %s" % (ok, type(g), g)) assert _is(not ok) #XXX: dill fails ok = dill.pickles(t) if verbose: print ("%s: %s, %s" % (ok, type(t), t)) assert not ok #XXX: dill fails ok = dill.pickles(e) if verbose: print ("%s: %s, %s" % (ok, type(e), e)) assert ok if verbose: print ("") def test_typing(): import typing x = typing.Any assert x == dill.copy(x) x = typing.Dict[int, str] assert x == dill.copy(x) x = typing.List[int] assert x == dill.copy(x) x = typing.Tuple[int, str] assert x == dill.copy(x) x = typing.Tuple[int] assert x == dill.copy(x) x = typing.Tuple[()] assert x == dill.copy(x) x = typing.Tuple[()].copy_with(()) assert x == dill.copy(x) return if __name__ == '__main__': test_frame_related() test_dict_contents() test_class() test_class_descriptors() test_typing() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_session.py0000644000076500000240000002366114735133337017404 0ustar00mmckernsstaff#!/usr/bin/env python # Author: Leonardo Gama (@leogama) # Copyright (c) 2022-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import atexit import os import sys import __main__ from contextlib import suppress from io import BytesIO import dill session_file = os.path.join(os.path.dirname(__file__), 'session-refimported-%s.pkl') ################### # Child process # ################### def _error_line(error, obj, refimported): import traceback line = traceback.format_exc().splitlines()[-2].replace('[obj]', '['+repr(obj)+']') return "while testing (with refimported=%s): %s" % (refimported, line.lstrip()) if __name__ == '__main__' and len(sys.argv) >= 3 and sys.argv[1] == '--child': # Test session loading in a fresh interpreter session. refimported = (sys.argv[2] == 'True') dill.load_module(session_file % refimported, module='__main__') def test_modules(refimported): # FIXME: In this test setting with CPython 3.7, 'calendar' is not included # in sys.modules, independent of the value of refimported. Tried to # run garbage collection just before loading the session with no luck. It # fails even when preceding them with 'import calendar'. Needed to run # these kinds of tests in a supbrocess. Failing test sample: # assert globals()['day_name'] is sys.modules['calendar'].__dict__['day_name'] try: for obj in ('json', 'url', 'local_mod', 'sax', 'dom'): assert globals()[obj].__name__ in sys.modules assert 'calendar' in sys.modules and 'cmath' in sys.modules import calendar, cmath for obj in ('Calendar', 'isleap'): assert globals()[obj] is sys.modules['calendar'].__dict__[obj] assert __main__.day_name.__module__ == 'calendar' if refimported: assert __main__.day_name is calendar.day_name assert __main__.complex_log is cmath.log except AssertionError as error: error.args = (_error_line(error, obj, refimported),) raise test_modules(refimported) sys.exit() #################### # Parent process # #################### # Create various kinds of objects to test different internal logics. ## Modules. import json # top-level module import urllib as url # top-level module under alias from xml import sax # submodule import xml.dom.minidom as dom # submodule under alias import test_dictviews as local_mod # non-builtin top-level module ## Imported objects. from calendar import Calendar, isleap, day_name # class, function, other object from cmath import log as complex_log # imported with alias ## Local objects. x = 17 empty = None names = ['Alice', 'Bob', 'Carol'] def squared(x): return x**2 cubed = lambda x: x**3 class Person: def __init__(self, name, age): self.name = name self.age = age person = Person(names[0], x) class CalendarSubclass(Calendar): def weekdays(self): return [day_name[i] for i in self.iterweekdays()] cal = CalendarSubclass() selfref = __main__ # Setup global namespace for session saving tests. class TestNamespace: test_globals = globals().copy() def __init__(self, **extra): self.extra = extra def __enter__(self): self.backup = globals().copy() globals().clear() globals().update(self.test_globals) globals().update(self.extra) return self def __exit__(self, *exc_info): globals().clear() globals().update(self.backup) def _clean_up_cache(module): cached = module.__file__.split('.', 1)[0] + '.pyc' cached = module.__cached__ if hasattr(module, '__cached__') else cached pycache = os.path.join(os.path.dirname(module.__file__), '__pycache__') for remove, file in [(os.remove, cached), (os.removedirs, pycache)]: with suppress(OSError): remove(file) atexit.register(_clean_up_cache, local_mod) def _test_objects(main, globals_copy, refimported): try: main_dict = __main__.__dict__ global Person, person, Calendar, CalendarSubclass, cal, selfref for obj in ('json', 'url', 'local_mod', 'sax', 'dom'): assert globals()[obj].__name__ == globals_copy[obj].__name__ for obj in ('x', 'empty', 'names'): assert main_dict[obj] == globals_copy[obj] for obj in ['squared', 'cubed']: assert main_dict[obj].__globals__ is main_dict assert main_dict[obj](3) == globals_copy[obj](3) assert Person.__module__ == __main__.__name__ assert isinstance(person, Person) assert person.age == globals_copy['person'].age assert issubclass(CalendarSubclass, Calendar) assert isinstance(cal, CalendarSubclass) assert cal.weekdays() == globals_copy['cal'].weekdays() assert selfref is __main__ except AssertionError as error: error.args = (_error_line(error, obj, refimported),) raise def test_session_main(refimported): """test dump/load_module() for __main__, both in this process and in a subprocess""" extra_objects = {} if refimported: # Test unpickleable imported object in main. from sys import flags extra_objects['flags'] = flags with TestNamespace(**extra_objects) as ns: try: # Test session loading in a new session. dill.dump_module(session_file % refimported, refimported=refimported) from dill.tests.__main__ import python, shell, sp error = sp.call([python, __file__, '--child', str(refimported)], shell=shell) if error: sys.exit(error) finally: with suppress(OSError): os.remove(session_file % refimported) # Test session loading in the same session. session_buffer = BytesIO() dill.dump_module(session_buffer, refimported=refimported) session_buffer.seek(0) dill.load_module(session_buffer, module='__main__') ns.backup['_test_objects'](__main__, ns.backup, refimported) def test_session_other(): """test dump/load_module() for a module other than __main__""" import test_classdef as module atexit.register(_clean_up_cache, module) module.selfref = module dict_objects = [obj for obj in module.__dict__.keys() if not obj.startswith('__')] session_buffer = BytesIO() dill.dump_module(session_buffer, module) for obj in dict_objects: del module.__dict__[obj] session_buffer.seek(0) dill.load_module(session_buffer, module) assert all(obj in module.__dict__ for obj in dict_objects) assert module.selfref is module def test_runtime_module(): from types import ModuleType modname = '__runtime__' runtime = ModuleType(modname) runtime.x = 42 mod = dill.session._stash_modules(runtime) if mod is not runtime: print("There are objects to save by referenece that shouldn't be:", mod.__dill_imported, mod.__dill_imported_as, mod.__dill_imported_top_level, file=sys.stderr) # This is also for code coverage, tests the use case of dump_module(refimported=True) # without imported objects in the namespace. It's a contrived example because # even dill can't be in it. This should work after fixing #462. session_buffer = BytesIO() dill.dump_module(session_buffer, module=runtime, refimported=True) session_dump = session_buffer.getvalue() # Pass a new runtime created module with the same name. runtime = ModuleType(modname) # empty return_val = dill.load_module(BytesIO(session_dump), module=runtime) assert return_val is None assert runtime.__name__ == modname assert runtime.x == 42 assert runtime not in sys.modules.values() # Pass nothing as main. load_module() must create it. session_buffer.seek(0) runtime = dill.load_module(BytesIO(session_dump)) assert runtime.__name__ == modname assert runtime.x == 42 assert runtime not in sys.modules.values() def test_refimported_imported_as(): import collections import concurrent.futures import types import typing mod = sys.modules['__test__'] = types.ModuleType('__test__') dill.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) mod.Dict = collections.UserDict # select by type mod.AsyncCM = typing.AsyncContextManager # select by __module__ mod.thread_exec = dill.executor # select by __module__ with regex session_buffer = BytesIO() dill.dump_module(session_buffer, mod, refimported=True) session_buffer.seek(0) mod = dill.load(session_buffer) del sys.modules['__test__'] assert set(mod.__dill_imported_as) == { ('collections', 'UserDict', 'Dict'), ('typing', 'AsyncContextManager', 'AsyncCM'), ('dill', 'executor', 'thread_exec'), } def test_load_module_asdict(): with TestNamespace(): session_buffer = BytesIO() dill.dump_module(session_buffer) global empty, names, x, y x = y = 0 # change x and create y del empty globals_state = globals().copy() session_buffer.seek(0) main_vars = dill.load_module_asdict(session_buffer) assert main_vars is not globals() assert globals() == globals_state assert main_vars['__name__'] == '__main__' assert main_vars['names'] == names assert main_vars['names'] is not names assert main_vars['x'] != x assert 'y' not in main_vars assert 'empty' in main_vars if __name__ == '__main__': test_session_main(refimported=False) test_session_main(refimported=True) test_session_other() test_runtime_module() test_refimported_imported_as() test_load_module_asdict() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_source.py0000644000076500000240000001562314735133337017220 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE from dill.source import getsource, getname, _wrap, getimport from dill.source import importable from dill._dill import IS_PYPY import sys PY310b = 0x30a00b1 f = lambda x: x**2 def g(x): return f(x) - x def h(x): def g(x): return x return g(x) - x class Foo(object): def bar(self, x): return x*x+x _foo = Foo() def add(x,y): return x+y # yes, same as 'f', but things are tricky when it comes to pointers squared = lambda x:x**2 class Bar: pass _bar = Bar() # inspect.getsourcelines # dill.source.getblocks def test_getsource(): assert getsource(f) == 'f = lambda x: x**2\n' assert getsource(g) == 'def g(x): return f(x) - x\n' assert getsource(h) == 'def h(x):\n def g(x): return x\n return g(x) - x\n' assert getname(f) == 'f' assert getname(g) == 'g' assert getname(h) == 'h' assert _wrap(f)(4) == 16 assert _wrap(g)(4) == 12 assert _wrap(h)(4) == 0 assert getname(Foo) == 'Foo' assert getname(Bar) == 'Bar' assert getsource(Bar) == 'class Bar:\n pass\n' assert getsource(Foo) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n' #XXX: add getsource for _foo, _bar # test itself def test_itself(): assert getimport(getimport)=='from dill.source import getimport\n' # builtin functions and objects def test_builtin(): assert getimport(pow) == 'pow\n' assert getimport(100) == '100\n' assert getimport(True) == 'True\n' assert getimport(pow, builtin=True) == 'from builtins import pow\n' assert getimport(100, builtin=True) == '100\n' assert getimport(True, builtin=True) == 'True\n' # this is kinda BS... you can't import a None assert getimport(None) == 'None\n' assert getimport(None, builtin=True) == 'None\n' # other imported functions def test_imported(): from math import sin assert getimport(sin) == 'from math import sin\n' # interactively defined functions def test_dynamic(): assert getimport(add) == 'from %s import add\n' % __name__ # interactive lambdas assert getimport(squared) == 'from %s import squared\n' % __name__ # classes and class instances def test_classes(): from io import BytesIO as StringIO y = "from _io import BytesIO\n" x = y if (IS_PYPY or sys.hexversion >= PY310b) else "from io import BytesIO\n" s = StringIO() assert getimport(StringIO) == x assert getimport(s) == y # interactively defined classes and class instances assert getimport(Foo) == 'from %s import Foo\n' % __name__ assert getimport(_foo) == 'from %s import Foo\n' % __name__ # test importable def test_importable(): assert importable(add, source=False) == 'from %s import add\n' % __name__ assert importable(squared, source=False) == 'from %s import squared\n' % __name__ assert importable(Foo, source=False) == 'from %s import Foo\n' % __name__ assert importable(Foo.bar, source=False) == 'from %s import bar\n' % __name__ assert importable(_foo.bar, source=False) == 'from %s import bar\n' % __name__ assert importable(None, source=False) == 'None\n' assert importable(100, source=False) == '100\n' assert importable(add, source=True) == 'def add(x,y):\n return x+y\n' assert importable(squared, source=True) == 'squared = lambda x:x**2\n' assert importable(None, source=True) == 'None\n' assert importable(Bar, source=True) == 'class Bar:\n pass\n' assert importable(Foo, source=True) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n' assert importable(Foo.bar, source=True) == 'def bar(self, x):\n return x*x+x\n' assert importable(Foo.bar, source=False) == 'from %s import bar\n' % __name__ assert importable(Foo.bar, alias='memo', source=False) == 'from %s import bar as memo\n' % __name__ assert importable(Foo, alias='memo', source=False) == 'from %s import Foo as memo\n' % __name__ assert importable(squared, alias='memo', source=False) == 'from %s import squared as memo\n' % __name__ assert importable(squared, alias='memo', source=True) == 'memo = squared = lambda x:x**2\n' assert importable(add, alias='memo', source=True) == 'def add(x,y):\n return x+y\n\nmemo = add\n' assert importable(None, alias='memo', source=True) == 'memo = None\n' assert importable(100, alias='memo', source=True) == 'memo = 100\n' assert importable(add, builtin=True, source=False) == 'from %s import add\n' % __name__ assert importable(squared, builtin=True, source=False) == 'from %s import squared\n' % __name__ assert importable(Foo, builtin=True, source=False) == 'from %s import Foo\n' % __name__ assert importable(Foo.bar, builtin=True, source=False) == 'from %s import bar\n' % __name__ assert importable(_foo.bar, builtin=True, source=False) == 'from %s import bar\n' % __name__ assert importable(None, builtin=True, source=False) == 'None\n' assert importable(100, builtin=True, source=False) == '100\n' def test_numpy(): try: import numpy as np y = np.array x = y([1,2,3]) assert importable(x, source=False) == 'from numpy import array\narray([1, 2, 3])\n' assert importable(y, source=False) == 'from %s import array\n' % y.__module__ assert importable(x, source=True) == 'from numpy import array\narray([1, 2, 3])\n' assert importable(y, source=True) == 'from %s import array\n' % y.__module__ y = np.int64 x = y(0) assert importable(x, source=False) == 'from numpy import int64\nint64(0)\n' assert importable(y, source=False) == 'from %s import int64\n' % y.__module__ assert importable(x, source=True) == 'from numpy import int64\nint64(0)\n' assert importable(y, source=True) == 'from %s import int64\n' % y.__module__ y = np.bool_ x = y(0) import warnings with warnings.catch_warnings(): warnings.filterwarnings('ignore', category=FutureWarning) warnings.filterwarnings('ignore', category=DeprecationWarning) if hasattr(np, 'bool'): b = 'bool_' if np.bool is bool else 'bool' else: b = 'bool_' assert importable(x, source=False) == 'from numpy import %s\n%s(False)\n' % (b,b) assert importable(y, source=False) == 'from %s import %s\n' % (y.__module__,b) assert importable(x, source=True) == 'from numpy import %s\n%s(False)\n' % (b,b) assert importable(y, source=True) == 'from %s import %s\n' % (y.__module__,b) except ImportError: pass #NOTE: if before getimport(pow), will cause pow to throw AssertionError def test_foo(): assert importable(_foo, source=True).startswith("import dill\nclass Foo(object):\n def bar(self, x):\n return x*x+x\ndill.loads(") if __name__ == '__main__': test_getsource() test_itself() test_builtin() test_imported() test_dynamic() test_classes() test_importable() test_numpy() test_foo() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_sources.py0000644000076500000240000002074014735133337017377 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @uqfoundation) # Copyright (c) 2024-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ check that dill.source performs as expected with changes to locals in 3.13.0b1 see: https://github.com/python/cpython/issues/118888 """ # repeat functions from test_source.py f = lambda x: x**2 def g(x): return f(x) - x def h(x): def g(x): return x return g(x) - x class Foo(object): def bar(self, x): return x*x+x _foo = Foo() def add(x,y): return x+y squared = lambda x:x**2 class Bar: pass _bar = Bar() # repeat, but from test_source.py import test_source as ts # test objects created in other test modules import test_mixins as tm import dill.source as ds def test_isfrommain(): assert ds.isfrommain(add) == True assert ds.isfrommain(squared) == True assert ds.isfrommain(Bar) == True assert ds.isfrommain(_bar) == True assert ds.isfrommain(ts.add) == False assert ds.isfrommain(ts.squared) == False assert ds.isfrommain(ts.Bar) == False assert ds.isfrommain(ts._bar) == False assert ds.isfrommain(tm.quad) == False assert ds.isfrommain(tm.double_add) == False assert ds.isfrommain(tm.quadratic) == False assert ds.isdynamic(add) == False assert ds.isdynamic(squared) == False assert ds.isdynamic(ts.add) == False assert ds.isdynamic(ts.squared) == False assert ds.isdynamic(tm.double_add) == False assert ds.isdynamic(tm.quadratic) == False def test_matchlambda(): assert ds._matchlambda(f, 'f = lambda x: x**2\n') assert ds._matchlambda(squared, 'squared = lambda x:x**2\n') assert ds._matchlambda(ts.f, 'f = lambda x: x**2\n') assert ds._matchlambda(ts.squared, 'squared = lambda x:x**2\n') def test_findsource(): lines, lineno = ds.findsource(add) assert lines[lineno] == 'def add(x,y):\n' lines, lineno = ds.findsource(ts.add) assert lines[lineno] == 'def add(x,y):\n' lines, lineno = ds.findsource(squared) assert lines[lineno] == 'squared = lambda x:x**2\n' lines, lineno = ds.findsource(ts.squared) assert lines[lineno] == 'squared = lambda x:x**2\n' lines, lineno = ds.findsource(Bar) assert lines[lineno] == 'class Bar:\n' lines, lineno = ds.findsource(ts.Bar) assert lines[lineno] == 'class Bar:\n' lines, lineno = ds.findsource(_bar) assert lines[lineno] == 'class Bar:\n' lines, lineno = ds.findsource(ts._bar) assert lines[lineno] == 'class Bar:\n' lines, lineno = ds.findsource(tm.quad) assert lines[lineno] == 'def quad(a=1, b=1, c=0):\n' lines, lineno = ds.findsource(tm.double_add) assert lines[lineno] == ' def func(*args, **kwds):\n' lines, lineno = ds.findsource(tm.quadratic) assert lines[lineno] == ' def dec(f):\n' def test_getsourcelines(): assert ''.join(ds.getsourcelines(add)[0]) == 'def add(x,y):\n return x+y\n' assert ''.join(ds.getsourcelines(ts.add)[0]) == 'def add(x,y):\n return x+y\n' assert ''.join(ds.getsourcelines(squared)[0]) == 'squared = lambda x:x**2\n' assert ''.join(ds.getsourcelines(ts.squared)[0]) == 'squared = lambda x:x**2\n' assert ''.join(ds.getsourcelines(Bar)[0]) == 'class Bar:\n pass\n' assert ''.join(ds.getsourcelines(ts.Bar)[0]) == 'class Bar:\n pass\n' assert ''.join(ds.getsourcelines(_bar)[0]) == 'class Bar:\n pass\n' #XXX: ? assert ''.join(ds.getsourcelines(ts._bar)[0]) == 'class Bar:\n pass\n' #XXX: ? assert ''.join(ds.getsourcelines(tm.quad)[0]) == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n' assert ''.join(ds.getsourcelines(tm.quadratic)[0]) == ' def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n' assert ''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0]) == 'def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n' assert ''.join(ds.getsourcelines(tm.quadratic, enclosing=True)[0]) == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n' assert ''.join(ds.getsourcelines(tm.double_add)[0]) == ' def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n' assert ''.join(ds.getsourcelines(tm.double_add, enclosing=True)[0]) == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n' def test_indent(): assert ds.outdent(''.join(ds.getsourcelines(tm.quadratic)[0])) == ''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0]) assert ds.indent(''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0]), 2) == ''.join(ds.getsourcelines(tm.quadratic)[0]) def test_dumpsource(): local = {} exec(ds.dumpsource(add, alias='raw'), {}, local) exec(ds.dumpsource(ts.add, alias='mod'), {}, local) assert local['raw'](1,2) == local['mod'](1,2) exec(ds.dumpsource(squared, alias='raw'), {}, local) exec(ds.dumpsource(ts.squared, alias='mod'), {}, local) assert local['raw'](3) == local['mod'](3) assert ds._wrap(add)(1,2) == ds._wrap(ts.add)(1,2) assert ds._wrap(squared)(3) == ds._wrap(ts.squared)(3) def test_name(): assert ds._namespace(add) == ds.getname(add, fqn=True).split('.') assert ds._namespace(ts.add) == ds.getname(ts.add, fqn=True).split('.') assert ds._namespace(squared) == ds.getname(squared, fqn=True).split('.') assert ds._namespace(ts.squared) == ds.getname(ts.squared, fqn=True).split('.') assert ds._namespace(Bar) == ds.getname(Bar, fqn=True).split('.') assert ds._namespace(ts.Bar) == ds.getname(ts.Bar, fqn=True).split('.') assert ds._namespace(tm.quad) == ds.getname(tm.quad, fqn=True).split('.') #XXX: the following also works, however behavior may be wrong for nested functions #assert ds._namespace(tm.double_add) == ds.getname(tm.double_add, fqn=True).split('.') #assert ds._namespace(tm.quadratic) == ds.getname(tm.quadratic, fqn=True).split('.') assert ds.getname(add) == 'add' assert ds.getname(ts.add) == 'add' assert ds.getname(squared) == 'squared' assert ds.getname(ts.squared) == 'squared' assert ds.getname(Bar) == 'Bar' assert ds.getname(ts.Bar) == 'Bar' assert ds.getname(tm.quad) == 'quad' assert ds.getname(tm.double_add) == 'func' #XXX: ? assert ds.getname(tm.quadratic) == 'dec' #XXX: ? def test_getimport(): local = {} exec(ds.getimport(add, alias='raw'), {}, local) exec(ds.getimport(ts.add, alias='mod'), {}, local) assert local['raw'](1,2) == local['mod'](1,2) exec(ds.getimport(squared, alias='raw'), {}, local) exec(ds.getimport(ts.squared, alias='mod'), {}, local) assert local['raw'](3) == local['mod'](3) exec(ds.getimport(Bar, alias='raw'), {}, local) exec(ds.getimport(ts.Bar, alias='mod'), {}, local) assert ds.getname(local['raw']) == ds.getname(local['mod']) exec(ds.getimport(tm.quad, alias='mod'), {}, local) assert local['mod']()(sum)([1,2,3]) == tm.quad()(sum)([1,2,3]) #FIXME: wrong results for nested functions (e.g. tm.double_add, tm.quadratic) def test_importable(): assert ds.importable(add, source=False) == ds.getimport(add) assert ds.importable(add) == ds.getsource(add) assert ds.importable(squared, source=False) == ds.getimport(squared) assert ds.importable(squared) == ds.getsource(squared) assert ds.importable(Bar, source=False) == ds.getimport(Bar) assert ds.importable(Bar) == ds.getsource(Bar) assert ds.importable(ts.add) == ds.getimport(ts.add) assert ds.importable(ts.add, source=True) == ds.getsource(ts.add) assert ds.importable(ts.squared) == ds.getimport(ts.squared) assert ds.importable(ts.squared, source=True) == ds.getsource(ts.squared) assert ds.importable(ts.Bar) == ds.getimport(ts.Bar) assert ds.importable(ts.Bar, source=True) == ds.getsource(ts.Bar) if __name__ == '__main__': test_isfrommain() test_matchlambda() test_findsource() test_getsourcelines() test_indent() test_dumpsource() test_name() test_getimport() test_importable() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_temp.py0000644000076500000240000000507314735133337016663 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import sys from dill.temp import dump, dump_source, dumpIO, dumpIO_source from dill.temp import load, load_source, loadIO, loadIO_source WINDOWS = sys.platform[:3] == 'win' f = lambda x: x**2 x = [1,2,3,4,5] # source code to tempfile def test_code_to_tempfile(): if not WINDOWS: #see: https://bugs.python.org/issue14243 pyfile = dump_source(f, alias='_f') _f = load_source(pyfile) assert _f(4) == f(4) # source code to stream def test_code_to_stream(): pyfile = dumpIO_source(f, alias='_f') _f = loadIO_source(pyfile) assert _f(4) == f(4) # pickle to tempfile def test_pickle_to_tempfile(): if not WINDOWS: #see: https://bugs.python.org/issue14243 dumpfile = dump(x) _x = load(dumpfile) assert _x == x # pickle to stream def test_pickle_to_stream(): dumpfile = dumpIO(x) _x = loadIO(dumpfile) assert _x == x ### now testing the objects ### f = lambda x: x**2 def g(x): return f(x) - x def h(x): def g(x): return x return g(x) - x class Foo(object): def bar(self, x): return x*x+x _foo = Foo() def add(x,y): return x+y # yes, same as 'f', but things are tricky when it comes to pointers squared = lambda x:x**2 class Bar: pass _bar = Bar() # test function-type objects that take 2 args def test_two_arg_functions(): for obj in [add]: pyfile = dumpIO_source(obj, alias='_obj') _obj = loadIO_source(pyfile) assert _obj(4,2) == obj(4,2) # test function-type objects that take 1 arg def test_one_arg_functions(): for obj in [g, h, squared]: pyfile = dumpIO_source(obj, alias='_obj') _obj = loadIO_source(pyfile) assert _obj(4) == obj(4) # test instance-type objects #for obj in [_bar, _foo]: # pyfile = dumpIO_source(obj, alias='_obj') # _obj = loadIO_source(pyfile) # assert type(_obj) == type(obj) # test the rest of the objects def test_the_rest(): for obj in [Bar, Foo, Foo.bar, _foo.bar]: pyfile = dumpIO_source(obj, alias='_obj') _obj = loadIO_source(pyfile) assert _obj.__name__ == obj.__name__ if __name__ == '__main__': test_code_to_tempfile() test_code_to_stream() test_pickle_to_tempfile() test_pickle_to_stream() test_two_arg_functions() test_one_arg_functions() test_the_rest() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_threads.py0000644000076500000240000000235114735133337017344 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2024-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import dill dill.settings['recurse'] = True def test_new_thread(): import threading t = threading.Thread() t_ = dill.copy(t) assert t.is_alive() == t_.is_alive() for i in ['daemon','name','ident','native_id']: if hasattr(t, i): assert getattr(t, i) == getattr(t_, i) def test_run_thread(): import threading t = threading.Thread() t.start() t_ = dill.copy(t) assert t.is_alive() == t_.is_alive() for i in ['daemon','name','ident','native_id']: if hasattr(t, i): assert getattr(t, i) == getattr(t_, i) def test_join_thread(): import threading t = threading.Thread() t.start() t.join() t_ = dill.copy(t) assert t.is_alive() == t_.is_alive() for i in ['daemon','name','ident','native_id']: if hasattr(t, i): assert getattr(t, i) == getattr(t_, i) if __name__ == '__main__': test_new_thread() test_run_thread() test_join_thread() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/dill/tests/test_weakref.py0000644000076500000240000000310214735133337017331 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import dill dill.settings['recurse'] = True import weakref class _class: def _method(self): pass class _callable_class: def __call__(self): pass def _function(): pass def test_weakref(): o = _class() oc = _callable_class() f = _function x = _class # ReferenceType r = weakref.ref(o) d_r = weakref.ref(_class()) fr = weakref.ref(f) xr = weakref.ref(x) # ProxyType p = weakref.proxy(o) d_p = weakref.proxy(_class()) # CallableProxyType cp = weakref.proxy(oc) d_cp = weakref.proxy(_callable_class()) fp = weakref.proxy(f) xp = weakref.proxy(x) objlist = [r,d_r,fr,xr, p,d_p, cp,d_cp,fp,xp] #dill.detect.trace(True) for obj in objlist: res = dill.detect.errors(obj) if res: print ("%r:\n %s" % (obj, res)) # else: # print ("PASS: %s" % obj) assert not res def test_dictproxy(): from dill._dill import DictProxyType try: m = DictProxyType({"foo": "bar"}) except Exception: m = type.__dict__ mp = dill.copy(m) assert mp.items() == m.items() if __name__ == '__main__': test_weakref() from dill._dill import IS_PYPY if not IS_PYPY: test_dictproxy() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744763843.2579973 dill-0.4.0/dill.egg-info/0000755000076500000240000000000014777575703014645 5ustar00mmckernsstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744763843.0 dill-0.4.0/dill.egg-info/PKG-INFO0000644000076500000240000002367214777575703015754 0ustar00mmckernsstaffMetadata-Version: 2.1 Name: dill Version: 0.4.0 Summary: serialize all of Python Home-page: https://github.com/uqfoundation/dill Download-URL: https://pypi.org/project/dill/#files Author: Mike McKerns Author-email: mmckerns@uqfoundation.org Maintainer: Mike McKerns Maintainer-email: mmckerns@uqfoundation.org License: BSD-3-Clause Project-URL: Documentation, http://dill.rtfd.io Project-URL: Source Code, https://github.com/uqfoundation/dill Project-URL: Bug Tracker, https://github.com/uqfoundation/dill/issues Platform: Linux Platform: Windows Platform: Mac Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Science/Research Classifier: License :: OSI Approved :: BSD License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3.13 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Topic :: Scientific/Engineering Classifier: Topic :: Software Development Requires-Python: >=3.8 License-File: LICENSE Provides-Extra: readline Provides-Extra: graph Requires-Dist: objgraph>=1.7.2; extra == "graph" Provides-Extra: profile Requires-Dist: gprof2dot>=2022.7.29; extra == "profile" ----------------------------- dill: serialize all of Python ----------------------------- About Dill ========== ``dill`` extends Python's ``pickle`` module for serializing and de-serializing Python objects to the majority of the built-in Python types. Serialization is the process of converting an object to a byte stream, and the inverse of which is converting a byte stream back to a Python object hierarchy. ``dill`` provides the user the same interface as the ``pickle`` module, and also includes some additional features. In addition to pickling Python objects, ``dill`` provides the ability to save the state of an interpreter session in a single command. Hence, it would be feasible to save an interpreter session, close the interpreter, ship the pickled file to another computer, open a new interpreter, unpickle the session and thus continue from the 'saved' state of the original interpreter session. ``dill`` can be used to store Python objects to a file, but the primary usage is to send Python objects across the network as a byte stream. ``dill`` is quite flexible, and allows arbitrary user defined classes and functions to be serialized. Thus ``dill`` is not intended to be secure against erroneously or maliciously constructed data. It is left to the user to decide whether the data they unpickle is from a trustworthy source. ``dill`` is part of ``pathos``, a Python framework for heterogeneous computing. ``dill`` is in active development, so any user feedback, bug reports, comments, or suggestions are highly appreciated. A list of issues is located at https://github.com/uqfoundation/dill/issues, with a legacy list maintained at https://uqfoundation.github.io/project/pathos/query. Major Features ============== ``dill`` can pickle the following standard types: - none, type, bool, int, float, complex, bytes, str, - tuple, list, dict, file, buffer, builtin, - Python classes, namedtuples, dataclasses, metaclasses, - instances of classes, - set, frozenset, array, functions, exceptions ``dill`` can also pickle more 'exotic' standard types: - functions with yields, nested functions, lambdas, - cell, method, unboundmethod, module, code, methodwrapper, - methoddescriptor, getsetdescriptor, memberdescriptor, wrapperdescriptor, - dictproxy, slice, notimplemented, ellipsis, quit ``dill`` cannot yet pickle these standard types: - frame, generator, traceback ``dill`` also provides the capability to: - save and load Python interpreter sessions - save and extract the source code from functions and classes - interactively diagnose pickling errors Current Release =============== The latest released version of ``dill`` is available from: https://pypi.org/project/dill ``dill`` is distributed under a 3-clause BSD license. Development Version =================== You can get the latest development version with all the shiny new features at: https://github.com/uqfoundation If you have a new contribution, please submit a pull request. Installation ============ ``dill`` can be installed with ``pip``:: $ pip install dill To optionally include the ``objgraph`` diagnostic tool in the install:: $ pip install dill[graph] To optionally include the ``gprof2dot`` diagnostic tool in the install:: $ pip install dill[profile] For windows users, to optionally install session history tools:: $ pip install dill[readline] Requirements ============ ``dill`` requires: - ``python`` (or ``pypy``), **>=3.8** - ``setuptools``, **>=42** Optional requirements: - ``objgraph``, **>=1.7.2** - ``gprof2dot``, **>=2022.7.29** - ``pyreadline``, **>=1.7.1** (on windows) Basic Usage =========== ``dill`` is a drop-in replacement for ``pickle``. Existing code can be updated to allow complete pickling using:: >>> import dill as pickle or:: >>> from dill import dumps, loads ``dumps`` converts the object to a unique byte string, and ``loads`` performs the inverse operation:: >>> squared = lambda x: x**2 >>> loads(dumps(squared))(3) 9 There are a number of options to control serialization which are provided as keyword arguments to several ``dill`` functions: * with *protocol*, the pickle protocol level can be set. This uses the same value as the ``pickle`` module, *DEFAULT_PROTOCOL*. * with *byref=True*, ``dill`` to behave a lot more like pickle with certain objects (like modules) pickled by reference as opposed to attempting to pickle the object itself. * with *recurse=True*, objects referred to in the global dictionary are recursively traced and pickled, instead of the default behavior of attempting to store the entire global dictionary. * with *fmode*, the contents of the file can be pickled along with the file handle, which is useful if the object is being sent over the wire to a remote system which does not have the original file on disk. Options are *HANDLE_FMODE* for just the handle, *CONTENTS_FMODE* for the file content and *FILE_FMODE* for content and handle. * with *ignore=False*, objects reconstructed with types defined in the top-level script environment use the existing type in the environment rather than a possibly different reconstructed type. The default serialization can also be set globally in *dill.settings*. Thus, we can modify how ``dill`` handles references to the global dictionary locally or globally:: >>> import dill.settings >>> dumps(absolute) == dumps(absolute, recurse=True) False >>> dill.settings['recurse'] = True >>> dumps(absolute) == dumps(absolute, recurse=True) True ``dill`` also includes source code inspection, as an alternate to pickling:: >>> import dill.source >>> print(dill.source.getsource(squared)) squared = lambda x:x**2 To aid in debugging pickling issues, use *dill.detect* which provides tools like pickle tracing:: >>> import dill.detect >>> with dill.detect.trace(): >>> dumps(squared) ┬ F1: at 0x7fe074f8c280> ├┬ F2: │└ # F2 [34 B] ├┬ Co: at 0x7fe07501eb30, file "", line 1> │├┬ F2: ││└ # F2 [19 B] │└ # Co [87 B] ├┬ D1: │└ # D1 [22 B] ├┬ D2: │└ # D2 [2 B] ├┬ D2: │├┬ D2: ││└ # D2 [2 B] │└ # D2 [23 B] └ # F1 [180 B] With trace, we see how ``dill`` stored the lambda (``F1``) by first storing ``_create_function``, the underlying code object (``Co``) and ``_create_code`` (which is used to handle code objects), then we handle the reference to the global dict (``D2``) plus other dictionaries (``D1`` and ``D2``) that save the lambda object's state. A ``#`` marks when the object is actually stored. More Information ================ Probably the best way to get started is to look at the documentation at http://dill.rtfd.io. Also see ``dill.tests`` for a set of scripts that demonstrate how ``dill`` can serialize different Python objects. You can run the test suite with ``python -m dill.tests``. The contents of any pickle file can be examined with ``undill``. As ``dill`` conforms to the ``pickle`` interface, the examples and documentation found at http://docs.python.org/library/pickle.html also apply to ``dill`` if one will ``import dill as pickle``. The source code is also generally well documented, so further questions may be resolved by inspecting the code itself. Please feel free to submit a ticket on github, or ask a question on stackoverflow (**@Mike McKerns**). If you would like to share how you use ``dill`` in your work, please send an email (to **mmckerns at uqfoundation dot org**). Citation ======== If you use ``dill`` to do research that leads to publication, we ask that you acknowledge use of ``dill`` by citing the following in your publication:: M.M. McKerns, L. Strand, T. Sullivan, A. Fang, M.A.G. Aivazis, "Building a framework for predictive science", Proceedings of the 10th Python in Science Conference, 2011; http://arxiv.org/pdf/1202.1056 Michael McKerns and Michael Aivazis, "pathos: a framework for heterogeneous computing", 2010- ; https://uqfoundation.github.io/project/pathos Please see https://uqfoundation.github.io/project/pathos or http://arxiv.org/pdf/1202.1056 for further information. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744763843.0 dill-0.4.0/dill.egg-info/SOURCES.txt0000644000076500000240000000314414777575703016533 0ustar00mmckernsstaff.codecov.yml .coveragerc .gitignore .readthedocs.yml .travis.yml LICENSE MANIFEST.in README.md pyproject.toml setup.cfg setup.py tox.ini version.py dill/__diff.py dill/__info__.py dill/__init__.py dill/_dill.py dill/_objects.py dill/_shims.py dill/detect.py dill/logger.py dill/objtypes.py dill/pointers.py dill/session.py dill/settings.py dill/source.py dill/temp.py dill.egg-info/PKG-INFO dill.egg-info/SOURCES.txt dill.egg-info/dependency_links.txt dill.egg-info/not-zip-safe dill.egg-info/requires.txt dill.egg-info/top_level.txt dill/tests/__init__.py dill/tests/__main__.py dill/tests/test_abc.py dill/tests/test_check.py dill/tests/test_classdef.py dill/tests/test_dataclasses.py dill/tests/test_detect.py dill/tests/test_dictviews.py dill/tests/test_diff.py dill/tests/test_extendpickle.py dill/tests/test_fglobals.py dill/tests/test_file.py dill/tests/test_functions.py dill/tests/test_functors.py dill/tests/test_logger.py dill/tests/test_mixins.py dill/tests/test_module.py dill/tests/test_moduledict.py dill/tests/test_nested.py dill/tests/test_objects.py dill/tests/test_properties.py dill/tests/test_pycapsule.py dill/tests/test_recursive.py dill/tests/test_registered.py dill/tests/test_restricted.py dill/tests/test_selected.py dill/tests/test_session.py dill/tests/test_source.py dill/tests/test_sources.py dill/tests/test_temp.py dill/tests/test_threads.py dill/tests/test_weakref.py docs/Makefile docs/requirements.txt docs/source/conf.py docs/source/dill.rst docs/source/index.rst docs/source/pathos.png docs/source/scripts.rst docs/source/_static/css/custom.css scripts/get_gprof scripts/get_objgraph scripts/undill././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744763843.0 dill-0.4.0/dill.egg-info/dependency_links.txt0000644000076500000240000000000114777575703020713 0ustar00mmckernsstaff ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744763842.0 dill-0.4.0/dill.egg-info/not-zip-safe0000644000076500000240000000000114777575702017072 0ustar00mmckernsstaff ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744763843.0 dill-0.4.0/dill.egg-info/requires.txt0000644000076500000240000000010514777575703017241 0ustar00mmckernsstaff [graph] objgraph>=1.7.2 [profile] gprof2dot>=2022.7.29 [readline] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744763843.0 dill-0.4.0/dill.egg-info/top_level.txt0000644000076500000240000000000514777575703017372 0ustar00mmckernsstaffdill ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1744763843.247672 dill-0.4.0/docs/0000755000076500000240000000000014777575703013157 5ustar00mmckernsstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1674962506.0 dill-0.4.0/docs/Makefile0000644000076500000240000000123414365363112014575 0ustar00mmckernsstaff# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SPHINXPROJ = dill SOURCEDIR = source BUILDDIR = build # Internal variables ALLSPHINXOPTS = $(SPHINXOPTS) $(SOURCEDIR) # Put it first so that "make" without argument is like "make help". help: @echo "Please use \`make html' to generate standalone HTML files" .PHONY: help clean html Makefile clean: -rm -rf $(BUILDDIR) html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR) -rm -f $(BUILDDIR)/../../scripts/_*py -rm -f $(BUILDDIR)/../../scripts/_*pyc -rm -rf $(BUILDDIR)/../../scripts/__pycache__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1741750643.0 dill-0.4.0/docs/requirements.txt0000644000076500000240000000227714764200563016434 0ustar00mmckernsstaff# Packages required to build docs # dependencies pinned as: # https://github.com/readthedocs/readthedocs.org/blob/543f389ddd184ff91dac6a7b808dd21697292fd5/requirements/docs.txt alabaster==1.0.0 anyio==4.8.0 babel==2.17.0 certifi==2025.1.31 charset-normalizer==3.4.1 click==8.1.8 colorama==0.4.6 docutils==0.21.2 exceptiongroup==1.2.2 h11==0.14.0 idna==3.10 imagesize==1.4.1 jinja2==3.1.6 markdown-it-py==3.0.0 markupsafe==3.0.2 mdit-py-plugins==0.4.2 mdurl==0.1.2 myst-parser==4.0.0 packaging==24.2 pygments==2.19.1 pyyaml==6.0.2 requests==2.32.3 six==1.17.0 sniffio==1.3.1 snowballstemmer==2.2.0 sphinx==8.1.3 sphinx-autobuild==2024.10.3 sphinx-copybutton==0.5.2 sphinx-design==0.6.1 sphinx-intl==2.3.1 sphinx-multiproject==1.0.0 sphinx-notfound-page==1.1.0 sphinx-prompt==1.9.0 sphinx-rtd-theme==3.0.2 sphinx-tabs==3.4.7 sphinxcontrib-applehelp==2.0.0 sphinxcontrib-devhelp==2.0.0 sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-httpdomain==1.8.1 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 sphinxext-opengraph==0.9.1 starlette==0.45.3 tomli==2.2.1 typing-extensions==4.12.2 urllib3==2.3.0 uvicorn==0.34.0 watchfiles==1.0.4 websockets==14.2 ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744763843.2529318 dill-0.4.0/docs/source/0000755000076500000240000000000014777575703014457 5ustar00mmckernsstaff././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744763843.1460202 dill-0.4.0/docs/source/_static/0000755000076500000240000000000014777575703016105 5ustar00mmckernsstaff././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1744763843.254341 dill-0.4.0/docs/source/_static/css/0000755000076500000240000000000014777575703016675 5ustar00mmckernsstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1688532017.0 dill-0.4.0/docs/source/_static/css/custom.css0000644000076500000240000000012514451172061020672 0ustar00mmckernsstaffdiv.sphinxsidebar { height: 100%; /* 100vh */ overflow: auto; /* overflow-y */ } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1727839031.0 dill-0.4.0/docs/source/conf.py0000644000076500000240000002013514677135467015755 0ustar00mmckernsstaff# -*- coding: utf-8 -*- # # dill documentation build configuration file, created by # sphinx-quickstart on Sun Aug 6 06:50:58 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os from datetime import datetime import sys sys.path.insert(0, os.path.abspath('../..')) scripts = os.path.abspath('../../scripts') sys.path.insert(0, scripts) try: os.symlink(scripts+os.sep+'undill', scripts+os.sep+'_undill.py') os.symlink(scripts+os.sep+'get_objgraph', scripts+os.sep+'_get_objgraph.py') os.symlink(scripts+os.sep+'get_gprof', scripts+os.sep+'_get_gprof.py') except: pass # Import the project import dill # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.imgmath', 'sphinx.ext.ifconfig', 'sphinx.ext.napoleon'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = 'dill' year = datetime.now().year copyright = '%d, The Uncertainty Quantification Foundation' % year author = 'Mike McKerns' # extension config github_project_url = "https://github.com/uqfoundation/dill" autoclass_content = 'both' autodoc_default_options = { 'members': True, 'undoc-members': True, 'private-members': True, 'special-members': True, 'show-inheritance': True, 'imported-members': True, 'exclude-members': ( #NOTE: this is a single string concatenation '__dict__,' # may be verbose '__slots__,' '__weakref__,' '__module__,' '_abc_impl,' '__init__,' # redundant with class docstring by "autoclass_content=both" '__annotations__,' # redundant with signature documentation '__dataclass_fields__,' # redundant automatic attribute ) } autodoc_typehints = 'description' autodoc_typehints_format = 'short' napoleon_include_private_with_doc = False napoleon_include_special_with_doc = True napoleon_use_ivar = True napoleon_use_param = True # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = dill.__version__ # The full version, including alpha/beta/rc tags. release = version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # Configure how the modules, functions, etc names look add_module_names = False modindex_common_prefix = ['dill.'] # -- Options for HTML output ---------------------------------------------- # on_rtd is whether we are on readthedocs.io on_rtd = os.environ.get('READTHEDOCS', None) == 'True' # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # if not on_rtd: html_theme = 'alabaster' #'bizstyle' html_css_files = ['css/custom.css',] #import sphinx_rtd_theme #html_theme = 'sphinx_rtd_theme' #html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] else: html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { 'github_user': 'uqfoundation', 'github_repo': 'dill', 'github_button': False, 'github_banner': True, 'travis_button': True, 'codecov_button': True, 'donate_url': 'http://uqfoundation.org/pages/donate.html', 'gratipay_user': False, # username 'extra_nav_links': {'Module Index': 'py-modindex.html'}, # 'show_related': True, # 'globaltoc_collapse': True, 'globaltoc_maxdepth': 4, 'show_powered_by': False } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars if on_rtd: toc_style = 'localtoc.html', # display the toctree else: toc_style = 'globaltoc.html', # collapse the toctree html_sidebars = { '**': [ 'about.html', 'donate.html', 'searchbox.html', # 'navigation.html', toc_style, # defined above 'relations.html', # needs 'show_related':True option to display ] } # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'dilldoc' # Logo for sidebar html_logo = 'pathos.png' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'dill.tex', 'dill Documentation', 'Mike McKerns', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'dill', 'dill Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'dill', 'dill Documentation', author, 'dill', 'Serialize all of python.', 'Miscellaneous'), ] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'python': ('https://docs.python.org/3', None), # 'mystic': ('https://mystic.readthedocs.io/en/latest/', None), # 'pathos': ('https://pathos.readthedocs.io/en/latest/', None), # 'pox': ('https://pox.readthedocs.io/en/latest/', None), # 'multiprocess': ('https://multiprocess.readthedocs.io/en/latest/', None), # 'ppft': ('https://ppft.readthedocs.io/en/latest/', None), # 'klepto': ('https://klepto.readthedocs.io/en/latest/', None), # 'pyina': ('https://pyina.readthedocs.io/en/latest/', None), } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1675828441.0 dill-0.4.0/docs/source/dill.rst0000644000076500000240000000151614370616331016116 0ustar00mmckernsstaffdill module documentation ========================= detect module ------------- .. automodule:: dill.detect .. :exclude-members: +ismethod, isfunction, istraceback, isframe, iscode, parent, reference, at, parents, children logger module ------------- .. automodule:: dill.logger :exclude-members: +trace objtypes module --------------- .. automodule:: dill.objtypes .. :exclude-members: + pointers module --------------- .. automodule:: dill.pointers .. :exclude-members: + session module -------------- .. automodule:: dill.session .. :exclude-members: +dump_session, load_session settings module --------------- .. automodule:: dill.settings .. :exclude-members: + source module ------------- .. automodule:: dill.source .. :exclude-members: + temp module ----------- .. automodule:: dill.temp .. :exclude-members: + ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1688536505.0 dill-0.4.0/docs/source/index.rst0000644000076500000240000000045614451202671016301 0ustar00mmckernsstaff.. dill documentation master file dill package documentation ========================== .. toctree:: :hidden: :maxdepth: 2 self dill scripts .. automodule:: dill .. :exclude-members: + Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1502478447.0 dill-0.4.0/docs/source/pathos.png0000644000076500000240000023146613143400157016447 0ustar00mmckernsstaffPNG  IHDRI("WCiCCPICC ProfilexXy8?L-0kf0'EٗDEDDHRHJ$zs>9>ss&A&?J6#>eii KYy `Ԯ)79rLGm8B@2>T)KX@ ޻vq;ཀྵC= @~==~N#AY_B鲮Au\*i@6\PIK? 8h[+wo ys064#ۋPlilnoomooD|p`Ȏ, }(9Pa.x<I#5(gbflU 49-$N? /꠿ؖD$T lU%r# j3BU-SlzeƬ&*!+zkc6wlGԏtvjqreuSte=f>\2*Dm?cs:<8H,"6<"?xJ%6j3vgbXb |b|l%vq WSSCӊ{fgfd^IͿ~/>`K⍭t%enIޖzW\G+ԫT+=~$Y'X~_OX +_mz\ђ,ܵM][gcW nް/* S_x}P[?]V!gDnc}tcX8ç ']jui*o=ӄLwͳvS?b5,a+W=~ot6ކ]|zۨ .%u+Ef [+g .(_B`DDl+KKݓޒ5;"7hܪ눔zF 斖Abp0֨ɈOs. iK=+'kʱK6ٶ۷;:>irCm{,G/0Xo}PycB T܏=)R q7CR2(YbrJ &Jf*jG0,_G uNӗ52DM4YZXYsa3dhW`pM']]/<4q\lOcmA >̈́ēD-p!185;r!X-GHqSKXs8%B,]d)쩹tmgRϺHl=wxQ=;|2%h1cEimbΖ5{uO@)p P@@4 vLبxp3_X {&# (`a0_%MX3#l.gEVpFG# H7d@#JHJӁBT-jV6vN. 0}}# ;C5#=;#4aaJcZg1hcfbcgKbcf_ q|&s/e;Wo?!%+Xz!eL>'aVEDD$ J z"Y"U(}_YܢLģWED4Ǖ5:b3Gi]ж}H?!Dkbaeiheep gCK/uxr3[{ {{{7A'| ROF0GFv1vLy FRg`=[_TyQ}]ʂj:|Z&;5zg+^]v8)Wo3Ms= 2W|dMo5`H 0 eî`e8/{F ыF"Ld (M$M:* DKGkE{3ygMOLLre,,y{X D˱?PЙRKUirPcUК"Ȉ"h3!z: M8hQ#/Bo30 0j3VLL9,,ͬlمٟrrlqr@k;Gg:?Z=={L18jTSWP=!Ґvɖ})VQ U@_uAM@P#[sYO|i96ٌ`*{XS[WW/WaDcWfm_;^< ,{f[FF6u[B$3K2S}yz 'B<3@1 a <@ ^@ x &A^ ؑ#e&-)pb^Ȏ/t1]q#{GvJן֎G?&P뿙]K'$Ѡz{p"$~)ΜH $ 0Hn$?B* UZHMԧ3S;w,cM8@Έ/}عC P-VbB P 9qݶaJh:`]NBSx]ƊπΚegt ^qYV" YbVTTl%ϲb=k`T"AryDl]nkok}!۳wu (% ***K 4$QV (C`۫Z`ih.{姴T@am>(-I@dqqMdg vq>S' >OdZY~`($q>&! CC0R7g{0_UH)('XIt y2ǒ2a!z:CͿ"m=)~h3};پ;oևZV>we ZsE~uz`"t$!b~^NֽiΜeEjz/}ť$=JPQcI3-TR$voG>wqkgv)^_( Gee;lw(D,).ur@R#1dٮ],jN>$@OAb IG _:\F &UU%e**Wm+<ě/Rd*+,IpќܖΆvu. αɯK+k AḀd(T^ٮQ"6ƒIV|U,_b__oy%CF{ 1q:52 hGKq9@͒4`z D4t)fpaƢ@RؑOpjZ: IvX!vɔMmIx7iuNUpT"VB^<7_ \3V,[n]JTCl$btڥKPRf:|~Q嫡΋7֯V:U_gկ|}%$MHE}JWb;8dڶkChx_oUͷ>݊WcEN4V2 2qD Gji$UVԛzFTJmzAġH NDŽ >> ]s`Rzd$emf :{(I2psI2$y1Q` rʖ)? pH.&#s̱E>z(zT+V~O%S E`e?|p6O6GNWmחc=1.ʧڜO|.|^' zcs\@fZ+T<581*dtR+ m߿ȦmV2V_Ҫy^ "xo/y1S6.#.m}گvM.LQj@]u9L"Qo驆 nw-tlCHE$Nܸ$ĚHÔ"iLmIJM"IP(IN3{Qb`Wc@2_L'AG%q^7&MlR++eT0%wJ58؃<_ֳ]߂߯,)γɦ^0%tNJ$7~m{A~Eыkl7fVqjttϊ4z4@>w|M[F'O6JT:i9hJŠt^G|"EH@5;$P_ZÈ4t< qKf# !j@KFީ Jwj+/PЅ(@i-6ٗ8/+p `BF#*&Eb'FzuW5ՁQa\-69ȌjD'Q `coFT4u`]_R IE;/^>.<H%( .Ê6M6os !cFXAKXaG^p4(29I;YQS+) Ss3yR9.E*3]oV%t"Iz@4D sUo͐Yc7m0ɜBw on}uNO6 ?/)^ilû>-̈%WGN=(3n &Ou$J_hS D3}1ɈZZ 8*&rڋWXKt@ƚ*S$NkUk8.嗆Ȁ c!‛ M&*P=$Ϩ+YpIn6ǟ"4,Pɫ<%j:L!BeR "OD m;^ߡHHn #e؛V5kMĉUV\ч hFb%?)/B,dV91_SLl>?wBgX'eji&GZ-5RB Hbs[]a-?RVnθ㿬x^#\Ɍ{p N&4ΖVN1[6oE䃴 HrQ5ht4@ƝlxO#Euܩm~mnmH0.#F0Lt3NP$0)MX% Cπ(!T^; PX ED"}S*# [?)Z`]ulY6Iӈ*4P6ҝǩ5:d ]pHyBÁȥ/}L BaoF3^HA}3)-A/uc 5HCMx}Ix,[ a >D@c#_/~~/Yb.1;HA)\M\@})M'udFMq:8p#KMة%OE:p  )3nh`&pcbvi&I.-q&0{Tm$Jm3''~ ^`/6s DCep(:ꖁXfRB1(k>d[ui^$T8pT=R_$pLػ68ºZ"VsMmOOp'j a=kCmni/~nb5:\`dEQfai=nko]lvij>htHPc>Ƣ=z8|/?h?$ȉ.0ū(.r =P1GHq Z3ءLw6o\>mMTɀQw'=&re\3umů.5u+,Uϻ\ U$La:nL@B3>t5_<c?*^] c.66\omR͛N N ]H7JLAv/5z}NH7*̣U' pt<OBTT)D AŒ%I XPi>q)fӆy2(Y z{n6}L3Z` y{=dwmtT&;Vly_ف_Ob&$Ɗϲ߿]U hY$Kb|O:& mWT5"ĉ)UmcڧBCv}B'􇣩0Hnme >Z`2H;vtɱYFYHEJ{?L-sd_3f:-A+FYt ad;v*\Q0"p^Nf|ZZt!p +ܮҳ9#S`CEh4ŸLFdbO0n߱v#nTp!6|C|b8<>8*\zPyyK_tɀOH? )DtVy='CѮ={_Ϸ)ZXQ?dL %^2&$zIwuVau4 }Aݱm%iE2F},GvW%i)ZFHCsiF.1.զ.U=ˬEFwivlH6U2g2J16h|%d -vp>Z8qD]dwPS{:۬UڑzkHwڷfSxhi#ؼzwbTe+:3̓y$[W|N(S= s [?𯾔mz+ʨѠ|HL(4L%6Q#Rv!iq:tkvɀLp.éC1ugz$)d+>Ξ))1NVpDDh肃 9{h?kG*#}94XQ6^V&0>e;Amٶ/lP57)^i{1 @Nv%^J!`_^t׳~5FBc"Şc^gQ9U:z"*lN??Ts*OF 4V Oڭ73.ihPA6/sӸCRFM.߹c7BVhbGҌwGrM5"5h0f℉N6|@0`mYHj2Ϥ_{{%-CLg:?:GsCnde 8lߊ. U&"qAhPAl& W_}]~+/~ͫoUUX'j¢Av뉜d?,?eZ LTf{ֵXG| -d!mcT6i ]7p P F[/ Z?U' m*Ӂ/'lӚ_6 <){PpCX.S~:v3q%,_q@x%i[8UHx%ݯu}2 C{6PްYrZ KkI~'3@@-d@@З6bϓᛃ?4WʀVƪ"@ƘQP4lμPg0TXbPo[͋_gE53Nzڛ 緿[nz]@]JF) )^i0ajԊbrg8_DtZ!tk5O`X=JGHr0U6óe]F:,cqu$\RCe!J_@H,tR4ͧCeyfga{寴e6glIe6$}RBd-vD+R7lѣy`i,܈F"8*֦RR jR9Tv3W2u(V\3E:U IDATUg[Aʇ>C?`{ ]3- .q]\S~[EwEIa7}{GmiIc S2av?䙼s xmF~T;tw`$qLj5F&MҞUDՒAl 6hw̌ 88͒ټM@٬@iV5I._ KyYTOW*OBB@Igf R"4n:EϋSQ}ֹF]ЪԨC0Mm\%UR>,tWqCXU@IJ5H@(|;:ԚU*wBRg(՜"^S֎Eo>2D#?T2w:,SXm-kKi0wT:С^$\-G\-mݲ֬]#Fd%#A$˗ 5gvf.QGW|w^vh$y<`f7$zwHL?Pq}xsIzxp֊ʌ>gQVaL:4AӧLHg(Lˣ/;/)nL efΜiO;,ڏnV\iv\ʖTו~]z-=y-[nW>.̀ L|hyH2U bŻaZ[l+='^_ъo@sߏe?|MX3Tdwtxz,*V~VM"wXFNN.j[p܈@AFI-Pmxs; SIE WtC ̃Qˎ [%*Fʪg^"b2  F>XV#׬1#[+[mڄv^]mD9͆l;5N <nzUP3gzÖq|>_h۷mtlA<_ҶKlض6Jn@nmyc tK;$0T6ve3fҀLOA Lw$ā wv؟x/O7,ZϝVzd :wz-D<\iP%M3g؜stI/xr\r_AAU]8P`lcׯ~JmܲYwZ:zҮOtx-+|LV\iz9cs.,=-8(y#!}$I:9qC)N<+PH$ǀ $+8h t(Sa xyioeXrt?#5%z~c؆G=0TX< ௕Y 7f}pTbi($QNzB0z'IXcW<7o?GWj>c 20T?ye sE5$QTe$\,u2dUetlEqpަmz͛6B4:TLfZv̪8 UI^I~O~mh5Z ηB{R#1Κ!ZG h~#M68B4xWrG+x d -wiFKJ]!d-nͶ 8Efz:S*Zus܄qkR|SDsRW@J]8 Fޜm:2Ө8Uga~:M:y5ѥz/Zb(AjW@y-R^]43Kdy_O6G W>[ο?7Mcv+4NL4yLw< S9V/CWK !o :HJd5{=OY 8#Of'5_P[K{@9V^`_5xH6]>~% 5?[%Բ/!R/Vn|8z`!~ze4%hP*ɘ&t:Y̻SR MA Bm2AdHg9쎀'I.UkT%pf _Ial.f:0H< u ^J65* .yHa?nhAߏ>\=@ą4'~rްM Wfs~Zj@IZg[#,{T55N|Pٜ:udفYl\XbA]f0 턲Ń .]̻w#)17s0He:Fz|PoevQT4wѳNR8ɀM˚5@nj;l;6(}Z/K ֮@ iHObϻ;Y3%~ߜ 'YybFAB~m{H DKFq>@d$rP^D]N\D쎯 4+tgj.}*Cc"p #nЫ |t޻&ȡQ:a % Z<;HRk9\1a(F5Ja|83WEBozF!p8ԽEvHԝ֪'ߩILׁy ˮd4?*IYKZPWo6wc?`"c%>@xi:yqtMRձYPBߡEA6^^/⻑^x Ri7(O"(/ bk2mUk5I~D?Wy[luPwS%c:eKH ;}:_o@)*.% $Hg/ux4] 4 }c/SK+z ҬZ%9?‹l%R/g1S08EQkAM ='Fr_)!K] Pzpx–㠱f\"UO:S@1L= p LJ=t3{n|+; 8U^H瀩 (wsu `1QuEtv$oKh/{W ~N!E4}b>F%m}__W~(i,^Pғ$ P"K3}0qå%Mt礨<ô0> K})ħr`yI#ti)Heۚ]=B-4?_H6mJ5]d }VO]v|PjgYh?}=c 3u1l\M`]{g.ٯ3 F朘DGR(X4B?䤔oDo "2 Q!-v>gwka$EkN+G`P960R!{rIǥH"O9M($aŶN^쿹f3ʠ[nQ-WxI ޒX(OWnl\״t @/PbN鹰Z5g)N+_4NI>nk >w9J ,t3 zu=Ț/OhP84xs W%OQ(P#x}V VٔWmoz.8KB E0sMtF$(G_ͽ{Rh=ˤ.a+?˴~ꗾjwFZz2S=՛P8#(<@( L#=::kʨlThA vh]AA?9q:,Ɏ&;} ?+: OrI09|iCd)#>/{(ט5u@Η:6/hܨ/|?"]([!~@ 6hFz[y=9 ֦4y `-'%ghMeAdxA$o|0 a @'x/~/ !E<ՒfHz)) : ;8Yň"r¼0HH/ z4P @KV <. jL|B<F= I [&}l[|)\aiN?L;Yѩ{#6/mspL.v tU*S݋B44Dc]yB0YRxe^B[ϓΟk -+PQ`ZB@8\]&K4ol{ֶ`(B;qfv7k5*?6`RH1(k4%8ũȌ0R I E  @{I}6ﴋ_ y#PBS@Bq @H# " \ vO';]Njd< )KLf?ۖ8ퟛJTZn^ pOS>[(= 7P z5A/h tWpBW#Qx&q>7Q9Qhf IDATH'߉k s‡O$yA6sG{&>uˌnvmחnz[J CKTCHA})J+-[3t2L|!cDH ;m|ں7j3=NU۟dlt[ |`VbkW9[z[DW"PHw/}(BS(jOz[j4?a>;a @prQLHaB<;i!wOd1i鴸9HRbt! J;h P`k QԶb⅋. 鲑Cڍ=:wn?@ =4=T'NiFptKM~"o'bqw?C'7Wt1.+aჴ]  zӍ$~D*Ųv5?|jIv\S  9Vul/EOYuŶ~[w)|t*&m~-]UJEߕ8M׽޷רd{mN0sp!G00bFH9 ;  G&jEQFIWV17ѓS2TKgA-7{ 6M?k Z|23R AԦ8f~ܨO@ug!N5غ"A ]d29@JgYsϴ o՗^aN=%@D󯟷m\,ǽ{eU^Cs09 2v{Ga4HWK",<U_ER!ON]ZW[-WZW79\qm>`4U5e/@\=<~"M=g|AA#hK;dUژ>-?}wtp~hgVo`7,]ciSfLk F^7j" ,3nV9`g0gˉt:umD`LI'.}R ɪYz۶^{k^k_lk}s|~y6s#Vqj9QIxГ(0|GKm_;P01H[H, cBA B[C:lG$>몗JL0`l>J*-#1A[R,UE`Y)._}Ze@Au{Es3~NB>ɗ.ЋqeAZ" қ0u)%A6?H5k%Cӈ4^YKL? ۶Tugh#,zQjܖ঩o֫܃m?ܘ|QQ1kJW(Ufo@QX>f*ajt<huϻ^_3/uZ*6gnɒ$+00_]̙rF<—FyThDPt_| 6)^5@v'zGΟ1I rb)Oci$Y$AVpU Ѻz6:]Tpu W |wꪛQn ]W[[߰O\kE3%E,'tftkeF a `s}ӰaQ}r\>7hhWE'>ܥC:h' Jz&q|jV4GS 'ghua#%yL#qEW3ۦ^<mX–S?lt(fG{tس%0]lLp5 Ґ vTmMP? 'ilƎyqrvlÚ>9LN<#f.]ZuPja,~i@R\wOu 摤}((KZm[<(9:z}0!?t#a)$d[Nr^_zH h92$ݟG,㪘es²'%ӵT}nwtΑb>U ڠgq㕩2Q Ơ(O(eE_lP)ly0H΃4١CtqꅎДp: EE9X5ІY'„#x KH35~ FHlČ5;;](9BO;p;@zz.BmGx)h~_:gw~ͪ.^lc:wǰ8$p5G#T=b(Jl aD*/T<hA& MC"Q*Jg-n.ŨR]n DFZm   O&Qbl-;n-U_QuGc.vhG`EMQOH+i(N%ٜu'כ54ѬUB\DPH@UGU QAFf{6ɮS13(>x|O#5L$`tr$Wu&.5/+龧qB#J6q_+)42Q8X1؈*Zo J=O3Z:r DUfh)iPgժilK-Pry#(&!iJ7U8:ߥJ:,ؠnF:u#Р ^ vg%ׯ ..SGlǙH؍#Prہ_}OtDtJcI@2Q\׉FGaV3ÜcQ8ŵMiUyڊϓV0dIy,s%cOy94:]pƕ̺vnҡM#U8"ٌFJܧ[+ 6>oI#GuKgx}!tMN%m ,#}pXa{6jg{pw Bo/o@M}Pu<>||wp*㡞2jRqԵ r w!?ܝ<2r6HI&eܗ{鑺/](@*ő4GsyfAp*!C@ t~-6IK"} eұv)aηNi EJB8+O2ogs?x_f WC 3`%kG/A%hN'Ji"pFk: > zS8$vttt Bs@lko ҈VDT&q9zхݧ=֭ŋ<+pQ}P91byhu)&HsuWNh 1?Pg^IMm$ӎBR?NW'((e* V+-Om+f(fۿ}jʦ^vM|Cr$ l$@Ndc"IVGDiBlO~bOr3Sa[]颾W`A J{J7I 0Bdo Ns+u # Q)``ʹH'}[X5."VۨHы@*S=5P|^\y_PI+L5XOeuP;ѮRc#BNuXHf;_F]:.N5o{6$@9ĄA!8Hg@n߳z5Fs٢xKTVtWvϛ2SghJ 5Y up`ڹ|DpQVh4,TkrG$HlG "pn? $*<}aֵ+o/+O^=!)4ű>g!q𴷿*/̚W*!x$`8GyPM`[e='TWpKfR&U 1zL\ Qj2~GYyݺuc-Z?-.a:+Zҍ\gE5aJUX X4u7ýCrGꅏr-nݺU_DqݴqNqRuy ͛6Xwv&)VB:bz7ZvxM}ӹs45w5); cM/"GT`gID]Ds˱t831HkbLn^t|_ I;Ux/4Lzb}Ǭ[\q!ÑKJ@NĔ ׽u b"Os)^.$>4bL8o?V$.a 0\x2S'(#mOUH*rȿ*{ o^n ;l6!rBwk@]b2t֭[>2?cm9;HdB.^C zzen2֍֩S: 9\J#u: :w&nrаmrx945.S:я'OY3+eLuƨ}ȼSFk]>m1t9A2-vl2TT~ԥrV)hb6?{,kCΕ恽:q瀨Jyx4@PP-4ng$hAG$-D TH88.5}+4fPԁ&@Zʗ۰ ZʅHOwg*@TR.Hރ-P9ʼnc8WWFyF;̳2tXՄ hL#ƫOo}ᇭJȳacAhP[_o.ڼeҚ&a ճx=&C%0BO$h5$MpHzd@l8Td^tX-_RcFu- Ifp\N@Ј$rlyT4= QÄHEr' ^L@T=;:uk$Lޑ̡e~YN !,@ٙeھ)peT HسBc|0NUIoR͞6Ac9iLVgeMsTtZW:@:$ ͇ ԰Imf۫k)_x'wujPiبNUʵC<3ub1sM|sΣrfN /'I [N<[/8ᬐ"8Rꓱcn suh|"2X)@$BMUpDb6'bxh4b ?/!i#aF2Dq1Iʞ@{Ψf.n+g3BǦA텲7WIJ8wU6Pӧ8$;M|tP^:ذztY~ tMHPOu ;wxD )RdF`]Tn1઴cGF1cgn+eI/-ieVT*:G`UJ3Pfv X_B-Wڨ'jт>͚t?xUNk*_Ӂf; ՚ev:,ή=fq XEڸzu֑`ӡARDfi&8J[!iKІi q T]met3 F@(wf`@26&W?iWeYQ0ZURQ0"q! `)ЀKXGA6)(ay%6OSb66X] 82yRWݯ:Ou f34_Nh f͞e[l f؀G򜉟Uևͱ͵uvFaէE" |eu$?m~kTF*j[3Jm`bÀ&S5󣥐s882q1۞]ivEv^a/[z%$_zU5<w[a $ڱSftZ- Iy|Z1<Tݚg@: htvY>gC80 $#`KP e_!BOΠ{$I/ٳg۶m[>74nIa7mm.=x)F2 ۲~@W0ɱSv.Pi핉DHEoCӰ.sI$-2 JN ^C/]jSj4KJxāNB'ɾlϣ8+}q5=đ9l˼-7kwR]S*J.9'.h[ڣtQ~M5O*JsbJ9 %9JB9Ȧ#1 +ei'yz4O>%.VlR71˘a]ĩ"01=E{􎟈]R/b;Np$ӎP,g7 22#YNjRI1Qtr)P4+J:IDE3jD+LP|m$P~<}TGh Z (swN<$߃~)9M<%,K4۶yBo0/z .s_<콌tz^t+lC^=[rt`uA z8$I:r1to"|-ޫQ{79X"=>U4>m͜9N? :}c؉NXdG"ⵯ+W`I`ι{|j7v"8Ryt ^tI,=|}J%4~H.јGɑ 8L(!Ѓ{]%(N#YI0#AۮALYd5Ȅ QAp$|!9SU|gO…3%˲R?x<,@ z޼btd 'ǎ :}v}_Uox&:30<)*Ɏsz(tL ob|<&ynLIs4Xsye2N)7zy$z^}啲I۫W=Iqh$.eڬ ?*_.EA0%>vJ~X앥-!QԡDdezO4 8!ٚQ}aVMfȷ^J{w4g~Js@QΤ./%bЛ{߄ȟ{ӎ$zmuѱTiI+NO~N_y9{^ 珪^o*wv $ͦK t 2&n5$ RҎF)F]: CEGbZChH@! G #>Qp5dp÷o!@ BRNJ=2W\hjwJ} z<޵flɤjONpbVz\-C0!?Fx D_!M1DfDzIܳ~$qa5έ^00} #y8 qFs zZ!;ƀTJI8G^eݱYku_J8F)>;SX"ۢ킋."ڜ*w ѝ ^ʲCHfےel=vZOsc;[Ѡnzopw{$DҨ&iyğ`taLLFH x9DO40Z|VOI>h_öS9b; - '_dݰv4`~P< (=!q/8R1zt`q iIG\+҂V%I%m ?.a8FVˈ$Ej䨖3f،Y<8:h$A줺dزܫ,Z8vuv/N|SܔߍV)mEZUIH ͞s6Ht{Ϟ=uo̬YC=COڑ!)ܼ[NS^$+< Wᆰrْ~X'2RcKm3 "p!!^R0 gvl%8}nv&c0"V*m5Zb! Ip%ZfO`8eEu#4Abڧ,u}Mm>]Vsj3H \8'&~W,#1pCusYE]qU>hɒeޞ8DF,e1, śA+/~7Q+И iI2%H/\x$c.a5w /TKw <#9>32c<Gތ} D+ =)I#UVj Xb<#[q('ŭ,LnZm՜'$E ꄷݟZy4qDl?X:1+iJs|9/PIucz#xoU_q8V4giwW/]IӠCȕtl/ThrHx_š0YG+7 1`,7͟#*Mwܡ"wH)5ҏ̝aAT&fw_qrWkm@i AO^/2G[ÒŋEyc˘:#G|<E{ر 2k^r,B#oXt.@)Ճ",#GCUF*C׬|w]>iq6JR[g]?5C[V$07p]סhϹ Ww1OR'O>D9{#|"aw%.̟R0٧\3;^-l>8,T}R/xD 8DbS:ixxw,HZNT"3DF4Q(z5a݆ QG'@ e(=F&p'Z(2y/̉+Fqx&qJ"@ n79` $ ,$l!8eY30fLT7J)~1i:M/{iXk+堻kw\J")|4c%ŋ^~ы$΋GGFQW:-$J><IW:!);4y)$_Ño$G:tLRgq:)rwFC]s󭷪ԟ܋WYdbir.(lظ!Y&d|F^WEI3@8ÜbFg=[ڊ_7I88]'p)$HKW!ǜCp9"Q_*8`wG"6PS"0&7H1_f8Kׯ7y٢ B>+i94w]S]t?~cO9pEUk΃0 |bDa,ba%?%WRٵ[hP~q?99b\`)6fČ<5H@>ůz*+$/_R& ^.D腡;޲R&i d(X=pwzJ(@xG4GHwdqH5uFU<ҠJ`w0p zau Fd'Ц>,UNI~c$eRý/ G}PTǽ}/$pUCfUڸ-u8OhF:86ִǙfǙ=Q#X3#ynq,O03ɴX;gIT׼޶E z a(ÈEI21 _\ $< _7>D"wģw.?=˜y3 I͠%RJ(cEj1'`ɔfkZ,rJʌ {HNHԐ\uGd[a5ڻ+H̨@ RʙUK x{K3ewG+gol25-ښخ}q1B<=CocgjI~hY`y"R|ox3,m=?(ЇlA')"ӦM_rIغu-Qtcè0 1-DO\PKqޏ9a_,ч!+$過'.~%^wS孒02 H" ͜lL䉹 $,务0Y;Q_6.j1Tt_b<={ Bc>5ZܘM JT(DwX+L/Y9A~I:T&>=ۭ̔?0i0ed[=zy~g0ɕ-Hn+8D_իpktH(8HVsL8yKD0FT d ^E@(.w1,UQ XdIΙC^Lh.LK8ՠ,ϝLUH^SY̚YiP%'iS_(wIRL܎2K;,@Padzﰒ&z.w;80 Qas $ب]55>?\un7a)Vp iv0?c~G?UVlb)iAHq42h z$bCsCvlkȨ 1 Hdz֢yb7L,ڥB}4.uetɒiP9V2ԥ-v({w 'Lޏg?Aa!! 3I6')Fk1#RsoVgy0;wNx>zc^KDdf5 AZE>p8xW]m;eW^ sB!ͼOңET@dc3[?Q:#}oɅx`J aa>M!Tk#z὚ +6Fow(ke1IZ`l:?E@YmEru㩋./:mH$&M a06u%L: A.wp7ì8j쬗$x> ̓? C80 E.*@^/2U3LO,CY<đ't~H- oi)s8ḧP=]zbEʚ>$jC$PmGTq ~̵3LSYI_ʍ;shKS6nfhY4 )XzamwMGbVm5d&4\^ nQ*%r%PdB\Y3&pܩ6Śv00%uS]XVv:d㒜ܪߧt!ü' om+BR#xأiڤq &8ڈh,A2Ql HťY?{y=k h /o}փ\0^KXts<@Z?`,3Rc>WW j(NA?EV"cta+Ze2lnp|d*APj%2Arܩp25V#mLj$o%]ZX` 8pMacPDfsG|<"씽ϭ2 DuXᨛhCvNaLDYv?3&tq;]P-hlͱ yxmw$a+d: :c{-w 5 1!ha?!}2A [M}j7hD#Ed)RV!*,P&Xw7"xUù:x'i 3\ ,3 Z3fR){HpgUxsDRp)߆;,Zt؉(.%vK3FW@~y:#:G:q.hFC0x J2J „ IDAT )_;H{pȾ4 Cg,I:m~ FH6 u"}PپBIP2 D\e :zAl6!G I/k%ך<>Oh,K0s\FxPQj+" [ I9 ;;QZ(1*+4UEg٘o>rD {Y"q\H;vlq(W\9_Voא|vQ!Ez1 pJbyޱD.HGc@, L6ئ=.le}Πpm䨻ڛ}kgШ?? /Zb@gdH] ʫs%}@0Q$xGLF7 K wޡ3`<[ygoT@M@*I'$oPs`B1 s11_؎:_9+ lSf QRlj ‹ݙC_AX =5\0R9,+;{,DUDxN]щDpYirxC10-@Mu[Ct(FXa#zD9aQfzC6,_ՖOp4 gց^t=>iC\>y޶}%IND6uFde}K[KS`gqMϷpW_v I*x-ᜳSd&eƊ @2خ)O>Zq8e;z#|SLG+\Bxh(4G֣a1g(G >\ùcBem!+Ia? `XtYظy1#Ifƺc:0>o/аPW-X_QPaJ wz:\C8:MEBA^@"̵ШrHuvR]XO;E CevFd_y[tW;]vqQ Ef i,A"&q0Cj"!d3"C] 8Dۿ_͟tKpIE!fxcy-Ñ$Cvޘ ?4H} 4 Qf 9+[%bE"PnEg3:uJ&hGWt|eJA2Sj\&(gh~5 U  )%tFDlbb$56p9 Z:W[NjڟVZ4Wv&;$]M25\wn҂>`mK5mʜ^=/YDGη.%{9C!uڞsq2iOw4$2=p0)􅂸%Hi?duZKڳ'צw W:+z/?t[@ ^Qkd:0S7K{vхZ͎'v7d!jbqF@te 4HjD}1LhR%: KUJwjns _ Zh)rEa`ĴJ|,,`fxg InĶe%fi$4da&63Wt Maًꨛ1G{kނaCپ_9DM&JUz*L7NqaŊᥗ_n.m;YFrx̢, mhl-Z(}iޜwa)Rl@l&itjFׄ1C pSG>0Z1O EC[aTRck#t[A:$-F1'ifw$L` w R! Z/xSaᒄa)RfX O@=4M^$ߍ4S\pQ@:!FԿkяjgz`X׶ƊUJ7Վ B -AH>RaHWfRiȃ4jƎ*_ 9[[ojS7i!{QfckԹhWK$H?ǟ۪clG Of2)#YӯD|G@ a=#)yzEiGbP  Dz3uJpΧ9)y"YBO-˯Fb>E?t&s@te+Id$Β¿+ 3ݹח- / ~Z9pmeX.vY`t0tu|&-0/8 Heڬ=sXU_x`1@4tew&:n$_h  X!@D&/Z0k@G'P)޴ichyeυTE)0PEr< z8bE;$d~&W8rZB@jXzp$[yȋ R/ʾ*;tQ\K8F)U3 7 NQӕFӕILǪ7"~ 8jTFLPgwsf&1a~-?)|XVPfl SIa%,DV:'q'=U1=AJ+O޲$H_@rN V*$qYʁ4/wGNS)@kwpUfFy4hu*CXQnW`.Nut`y@-\tf_5d;dAy%pEq8  0Պ+kaogJ;XA^ /}07>dW24͌IZᥜI5#Xbĸ%2@ |$Cq<<.wHH82 Kp f ɥɡ!sz?8i If ٳU螫U]#A"]5Qvm۵X2dixvGU(ϕu:x$ﭖ*)ti+yo@“O=B҆ m掼}hϠeA2I\Y-bCؖ +R TH ﳥh~Ż ӧG&58H@|-NցDGԚ4F_MGsc:DoAfB] mZ-e?OӓO V(A2?)Рx+:]@ Lm21I\js׮YC4Cnj HojSEqsX>">.NxF]{CZo1,  A;ky8gi9 /o|tthI3@Hɗz޹ɽ#DSξ. Ks<0R+-V$R߮Ϧwyk"=}ŒY/V)s\.DXYN@#O]G)Dg!J1v?#2 /l jdv]rKD|>ܬFL-(w v*4[/epJ6R|{_6|oXwkT4[[lǟqXxqS~s78E=\}d0]e%I *FcoETP,5dAʾ'GI$'~Pι$,_FcL%q#~"cNTgb=GM7Nk&ሑ}Rb _TCPf( +K. ښ*OZdF$ӾSlۺ5lCGNPZ64eZ:qᐤ}с!Eת~lAGRm7^13TlI&}} dwJȴ$L5:3NG& GVGU\ )#p&/iwYduvn0Qk 1R8g*X^1V0+C$ ݣkb@F=a-b||cŒ I+@2GDhšW=zv( Qqea]'%Ow=RapB{嗭` H<ʶQh0s{@wݥLᩎ}ա]!:XJ9M'-!bzHH.-FVߪX{7D,fQ.Q FA~'s6:;~I50\x .4-P'DDL$u}8^DTGK<߲IL:@0=(*!/w>\ib,'I=`yi{bd$̕+%$F\!u=w2vCٖ WN~| J9wF9#*P/lg۷wOh\kjKGg7jY ܳυ.\2\ygw¢EΝ;ıw타2Z# 3tJiB)<g7]U;̔H[7kBsNdϜ93'+͟Qaޒ`*FfzL"\̓bB_y8&pz-H+tZD* LmrO(b˳@ +f ["*7P4: RtnOFq Y"_޵j~CI:m2zѠ-{$-(qaqO%vkzI;|Qv$O=~Zi׾W_O?C=TʞzNWD!pdahBIwu7"T{{qO7tI=QJpF780:RH~])?L4E9_X,̉*U I"/X+߾5!}mb K',$A8OR"l=*|O{ /_T28+,q$3L4BA3 IDATΧ05Zw/%\r<ڽWxiq/L7spט1 @9iqd2x0L56na]Ev(zR[ciau@m-_+s^jr0 (|w^Kr<:šaYº;&8L("F5|iH". -* z ɼd`T9S:7s(Mѕ>IB7L<8#!։{M7C]ѹ++!dHcQ'IJIiu 0Zt gfa0DšP!4sLMr4z3V;ח, eeKo~]u }P Kd]/6op;Z`>*˝ڍ*w=iJ$ςHG:uF's`??̞3'KN֠_edi,UV:8ZaB%LW )&@ٳgG "1cN!6./24ĿCĤ35i?n_&Gm<أW dݒ<%-WjDXS;Tdtx.\fV&H ^9FZ[&.e|Rnm@_4 Z^e 4d)@2s5Z62Q`E2&2*a ˡ1) IH,aA0r4*Α(cC8"g,H:?t5566GX:ê5jVs4䝗x0D'5۵}[x񇽢và]:{ƮDI uɓåx|^ (IHmܴJ۶P}Ns|?UMX)(( oS}Q͞5;\-\`boy)$a(+.]mF}kRdцf_oY%|}jқ Y:&$(_3PCC0}V9%vT%bc(qW2Ѐ/tK5 < MHpir =!Q2j eg̚5v jL5ŹڢasO?!ZyFtQ=%pLYqqiJ$ML/ۏn:eqv$[!٧Wtի%aN]QoH;ZG9WB=31E9ړpjd (_oT[*T}3TQ M}{eHyԀ.Z:,[\ <7IZH0(Ȝ$՟47 ;up2eT\ztߪuhԼpdP$yۦ>Y pM(RD =aJ$<0GmI3τ)OHzוW/z/yƏ+pxtl<en6`b WUwiD>UpY. ,R'h%Zj_ox^1F_4J>)wZ-ƀ=D]s] _B:_dE2=g/szǥ{=0veeϒn>~!Iq0!_l("9j8CHqp@úG?qlAVօ}4lܸ1;Be2z9[帞G9 ʲAWb1Ϧʾߒ~4g㏇ǟxBYZ>õ]tq]NaC$7mem:H7,Q(.J>v,zyR$sS' uuŅEV6^}G#Mh6n;Y'zU0JF5$$M(NJ#$/{t7V=i[tivcx\|.& @Dąމ"L}Eo nIqamڲ61NrND\=jH`jh;)e9sUaXfB]6nX6H`b-Q[#N`HxNhs{&&ؑ,^W\k葇Ȃw8u7hu+Ë jp@Ṗj:@q6m@qst-;O'iNEK.8ϩ k.?`ٲ2KwGm_R?gyVJ;LCظ₾+552\Y$ d"Zڢ|Dxi-},5)[6o <iOT> /{YG4K/3Lׯb=֓Hf4Iz nUFޣEweM QkȄ1b b  Yp`Q<[X ~~|d,Sg!Z?>򈆫˯2+Mk/2fxNNP~f4͔9ׇy01yJɽ;gpD׮Yl@7tz#H[:~ue Tyw Uñ4Ø_$zZ 0`8^t΍7,`8~!-\14RCbՆK٬=' b1QUG6z#WIig :e ]4$zM*:?px駬"sLiFeO=WPj=y`䙟=op$w]qU]Q}?ha]%ɒR[R5c:Jy:$O"%\>z.`k:H顃#5 RI[䢺c!7ChU X|[ ]#yavYʰDb'U2y<`(cȩ]sW5k7}塿~i8AY׉IL͒;4g6u=whg1,J/GE엢3ܓϞ5'lg^qpMp@a{;m A0M#&V2'o)jvtMd1KXnnG'?h9&=SX/vV,?XXlZd :0 8Q+u(]_C2U# ҜsZz%)$UzƀmnV͑"H[8q3ZN@utL],NO(!5P_Оsԣy+ <$zM˥tٲަ6C5ypPe5ք+VX<Y [(Cql.rvOֈQ*M勿꼦|A{#5=gHq(gi?k׵uǏl^5HvWZhPUKr-A1 =xD85 0}ƱXaq!T+O\C4|Ao;t|OUyTҕK.M%@obe ߾ǭ\"LXM\R(q j DxĎ7MH3d= ~Etq!>YtW,9t`K/jo.[󗳵1a:&Y:oqگ>sܰtabP/"4P-4+n l_u\(OE^NӋ3p.wE+!醾,zcht K55[\A8;ڭinIm:mjEm}3_)CKx?3RNɉ$M&Neg)i9gT$Iڐ[c=N~7£ X'X#rݸ.-7%^J]&xbBG.H&ProT.8YtWS-M\Whu\LOznfP>?=.s:dBD\&'@9LIv3*BM<(+䁸<:9OymqkL/ hֈҟ^!&FGʗ;{$$)n2JIr Y- c1g=RRcmMmX/{,s0=G e@E؇=s_8KCNMzn(Wtﴅ#EMzڳ6!~- :uy<-A Y[niOWL Fc^R >T‰4}Jr99Ο#d1cADT@04=@ٻ=䄾=F1 ^Eq;Ja}ɂ!q)m&N HO5E3I_2;"GV;$גV8:6F1癛R0yR2r=yC9hd,rUٙV9`(յu``7OMt#5b}jcRajw5*䃽k7nܡeuXh]Xbu^\1YSSI $Ie[1 ? != $(\E003Cvu# Vj:BqߨDYL;! ڸ?\sML)=kV@<$Hy%Yq'P2YAJ?/w]2'-[>EW6ԊcU0"A8].e@g>SP\wW^~D$#j(+59Cmfc@wNT9,&YL.a`! URy=9 9LCpˀ;^-t\It:R"L A>4ksfhQFSِ3Scprl٥HG w5@ޗFt"?3ԁe>HS6*#HIuɧ nt8ׅlkBՔZFz7cWN=q]tt|$sS8tޟl*>y5-,>o(@flrTFs_y"&gk5+ݨ{KRQIX0OwĴ(OoV#0 F$0kJdHKY#d@ Oq9ѿY<5prZ#4ۢ#Ie4p59UUhjoڼɋ*G|5腺9"euo3LW#twvA6A/4cmB !IWh 3ca$M3nI O~,? c/ oy:N)!w7KUszߩv F2 9lo #d[&?a')˳MA׷Pz QTk1*6%1Y&/Gy6kd,{e-f ĝj$ 49:fä6M[:ٶK囬-EqʮCP82y*ȩ#qG, 'GY+ Lg$,<ۗ<ӷ])cpnU(8g?^|9LewG󵿔zH>`sHhpwLѭnŨLEɡٷgD7 $0d4Y+ @f]+0Ž@ Q!9Ew66{ʾ=[%Zi_zvMטaOudIiUĽFIi3[+BW*,?3̈́E#trkp/C&2\ddksKYasݪ(= 屘"i">{dG(e&DwEH84}Heu%!7{,Y~/f86tM):T0VYt$4pp: Aa[7rZN) Fǥ+eY_y=>oٺ%zk }*]Jc=S WYBf2T#H(\E@C䵸8?՞-|٩{}ѡ42FpJ_ 3ĞɹS.fu`X["iRaMƳbt));7 imO`ULKb?6m^{%J|@_j[c֋dh=[p$}Ko( %*YfޒH[FwIP)HxDtƙL sՇ/7D; LLDBbO\|Pb%ag a $r \i 3M1O%p5c]<BUq-ݺ^H!Käb[H'189XC3F@S'HqN76rleY;sUz fJ#vBTy)Noph"$0+<'tFP/24Ć I!ͮ)nݺ%({'$GJ5q6!1jEr^%A7=NEAA 0\rIgU~0dA#)\aH~m v#x`X0GF P׈‹nj Z^f~2J̕G cb.hrTD5hN?utH@o57 A2F"VFjL?q)8G D`9^ E?|{:kr0qw&*̓e2td@GK άW1Xf%3T;6BU픨2( 3Oi;"]1ojfN S^”TYita?%R88 D+_7ӆ+$yW`h=/y"^0%JUʣ@wH52Q`Ii(Zp8"gΧ1=b;FvLĘh(`K=͇kK8n iHgZcgܸah_CwV_.a,H&4#؜/1Z"izLKz Hz&jjB₝:e{N_O?Lhޡ sELL-D nfLB۶mMXJZs\VH LJa:sRCeg9N›gHeXG4Dщj e@*٦XآX}ۮCS?c3C1Ydq;t|Ji=42hźBGll Z勒#a(aѡSQ^_[E$D vhsφIJ'),OW.ZD(M[66֠1QZ(c>]^LAݤM~tfÄ lC =Ӹ0ac" DxDs xp&Z1+5u5-lT~2zb2Yz1\:y _1s~3-Ny? /!*ۍP%j*RzSP4 W,iy#T;a>Vg#Oj%LYtuT_5\TR9|%dYy%pɑ=o9=RW: yϟ=Γ^6>`-ˡ1AYyAx3ǮWG}>,<4L!8hcaT)CEd.JSNbC](3u4FGr`[QG1Rpp 0;/=tHa R! ZI2 !&&Šgׁ/]Fljz> Q**x[EMdP=mVФ:ʖfӡ* aZN<^5/^o*8ҹIGA0\amr&'HH% ,5m4A%Wi9['_ghT&9cƏ Bމ @Va(,9a)x8a嚵>)ouC's&N ~V_@ dc 0Jfs5^5 S K3iZArL]E][Ш>7*l΃š埇;x]+[1EzSr])B]+p?xĠM%p& *jåNSdž*阶>Ε-cG\,FT,S~Da޵HvԓO#bĂqɒfئm(GōlK'Dc2y+9N6Hn#Co4?9MЋE0*V PXaRfŽ静Rb߮c\G[orr`p0$İ~ʲ-0 bo{C%-IGuˌH}uEEuxY+;eZIbsTջCua!cCs8$DD;>+# JN'#-aVjjES0T#S ZsI߲#u*v+6vK8T'TBd+P#OClSS2Qe$skEEOJ[emFTh ,)V3݀I*h3kCC}ysEur3kBq">ޘ  ,aH -F{:HI~CMl'qHgΚ-%a5sb~#(B)_g1 aFi~TIۜ ;ZJ6 HwRVR8'9֊jvҪC>ŲSTa/P*YY-̳M|L*Q5])nS=Tf< 5RTGY)wjXʢҝ {4w2\-xˬ;V\tmH{:-=7e: j ?J؊ܭCS_0axԷT&?Hؔ6e"W!I״Sq{Z}yORFjQm3诃B2"}Q|A&,WKce~vl ?TRKJ\s I$'Q.R;':VkRbJxIsg -MtEax.na<^ k`?/jtCן+RXao:J+&p .Fies|P!;>ܲYCSaJҡ.OX)Ʃ uhZa%G@Zx>XMx/;ΑzHPa_Hè`vNܧ͇ $G)u#YgϚ.)wb&Թ wHfV9⽏kw(l%ϤqꓺiohY^&Xg+IaU=UQ? l!hһɊeP#93' i&zMTo[-`Iu#.{s? $"h!-JTqF܋㝿q$5>,]$lBL&%@ oQ=gvE*0_X?y?UƩ{:?kppjѳO?c7nA@A1Sap06g3RYA6WkE%49E2܈܏ngoeqћXF. z pOțϳȬY$R>'KhMpZ! NbQRz]- 3/X> K7dj%N9n!`_z2 8qHݻ8 gC,sB>q];ث)I:!jMOҒȿe\Ϊ0T҉8Ey!8:(1cyQ;޼qX,_E~g &M߻g ul8⠈GB|-)uW/R[U❋وյh_=qssU. zXحtX HOc1bv2>Gv sfٯ.鳀7DWUՍ^۝~QGJB&?y|jM79SO>맫Pm5-vt$ks4[|vxeE Lq8`< C$h@U1R/[߶{G9 xVWORi+/}Hn9gGI(,Y У ]lj= LZYxB);@z*oJGRF=u촽*wMgS57ySva;a\ ױlWx}*S-Dj qȮ'-/ʅS^۸h#eY8>m<ӾUku''r_x@7Jsݓ'OqNMsQ\ IM -B|┯{Ǚ/bY^ Cy /bm$}U;^h34m șSbތ$Hll PgNuTXk/}w~?7L3A^:XzpUy$H.]fjuPxG9cA]@# Lj~k[LI?R82N(GE+Z!MsWa?0/ck9L 4<ɓ~FUvy>̈*lӁkik  IDAT'sIY&I ޫX]Cp%'dYaHN:0ze8ISm]uT8 @@7bATH1C.wR-'ϙ(F0b뇿7etTa2yWk]x{$Mm"Xz:K*-zighmvcS~fG!@= ;e$f8io:1Avd;DuP PFh P8~L?ϯz-[A?ɪ?/m;MsH3N/|@UXPB<NvǬQ!]N?"\zEq$伬Psql l@OCGw-VV0o F& DYke0UZ`L76Aʚߏ\I$H1b3F=ٷ|Xϋ? S{O& LeǂO?~wo7mQlu=]t 7jNitM{ aӛ < =~@| 3Njx īޏBi[P䈋̗1ZFY}6,.KM顇ķb~XO/":zaBiC٘!<?/mb2-1>03T.] F7-%\ ڤiv ,eOjpqS=ʆ'}߲7I|Ÿa06Wc*+W^LdygYqL7,Hz!Dt k?kɢ&}Ӫ2B*CM!=< Ã|_er8y*6GЌ7m˖/P2X:8R`Vhil}z܆dY߯9y왧 NCɬzwOݲ?.m_=$ɟkkYsE֫ P,\ߪK:^ Qm(_ s/  cQ,<1]<2|( ̏d_ʗmݺuVYk105M#@n銕n>*-oT%M 41Oܭ<]e7 =27tK ;:+k_ъ/9PTCE/_ָڵ6oH@xf,Wشi3 C驳gl"WFIAU2 6xB0 s (q([*pRJld2u˫ A@gAs~ vyI$OG(a+;Fz?G]fE"Nb1r&L)A^%!#>2~//P0Wҏ,)\픒fƍc`!ߐxZp4"ď@@7rHz @ kC&x9z k뮹td@9 ѭt#6{172Ԉc\vKnfa;h:Ycoy->n*6C$bntK." ٲr-p]6l+r"j7Ξ5 }w6>2H1@c Îvb*/DQ~y"sdRbF c!^crDc|%[v6hs6\FqxBO`"G6p|taHAF`DX>m}N@FwYjEFnj3̰Cxy^3*9u.\z  Հ;'=#@\@A<'|3Uڎ׬UcI%GE.k%vѠ\slaQLR8sy쑼~2/Ǔ `2{ǒƕSr;@D=xxr&l ܫ鉃׮7|kl㵲O{|c~]kɼ\oC| ['208RzG?@_#q6Luƀ0٬:UpU?dҖmzS)?P@ҡH:mߦA+!L|o6vlcľbMZق-g.AU$cF)?(h~٬FwD{%>޿@-MM[d-[ܗҿRH@^wfuÇiۮλ[N, vʎ&#I+r%i3[TlV/D'/+,(>[T3)ex9C1uD8XLru@Rm~Kd~WhG |o֞NVx7D8r:if[qs*۟2u-]Ė)#H]{,Obh+#4FɦlvSQo{qn{?`/dQ~.d=ϣ6p ׸Fŧ?){饗m:,p13wͣSqsf ^@k;95 Qp08t;B 6*|Gd;a @gπe쥋8Ԇdx'*g>V,9DI H'&QVGFp8ʹsOdxosp[! ÿ~ 9H8cE<0Uy6CT/qNrVg8_@7OOK伻=G@($0g$%nr+|b܌~;m8{ӱc'I7 xM?aoɓTC&v7Y]\9Sc<^Ư_RŖB ħpaH Ynqt`L0р-J8a#%~ϝ;ߞ{} #{at0 r6|9NnHo( S5jLJf[&'w2?y7’Rba:5kHIC<dScڵ ek[$x) i L?Q_| '>DA)$#xA9{ɯ V@aFe^~-{R|VFefh(.1xY}1:[@W>Eϱ<9᥽3jH4"Fklj۶3ND|x̵ڻמE[=؏9ptՑs#NﴱEK6Yy,HgJJ9 !jywnjeY?Ok&eGeЀbHnw1ARi5Am!W_Y>ao|ѯj [lZlh veYsP <{md.2e^̨uL Qq(ihrE^qx@֕V@PˠRFyxD 4uVZP!s1pnf˃T c[O}Ȝ&,Ss@%.MK#qSy1ʅu$1_Gz۹iy6ۺu(O=nW_xZIѾ!%3F[y|J4C{<S.stEѻFc|Y5@2dr` rZ@1 .!ef4p npm+rY j0 *ďxfKt圗PEf7J^_q .j7ߨ#%0%< 3}5*vQ1yr Sgp9--$vkC[{Hq1)P9k?z(?+Ivp(Ja̓bl0B<$/Nה O2E`U*/Nw8ب:cBc|6~4R&-efcݘ%K_llOo{*ҡ22 9A /j-/ăO؋gb&hW\N*.\ ܵ&b{B/8R\o$bw1A|ʮw+Ta%&MJs簫ltnpz'rpjqhwu[Ul۶ժV{peevUL73+) mG{#ƍ(:!Gډǝ|L{Μ:m{d3列{3@/@S^ =JisJԮo#5 P PQ$AǺ_-]۟J|[`8!'! "1}uu 4Nvrp^A>p28|bgcw7mf;"U?-`+c Щ[(v]m=bFFˬvE {̿lH@ʶ4omP7w,ɒ. $6b2h*MT+D;?_րSHx:v.yH)M޲J6@b3 y 7j&۩"M_4j<{!WgoUhh̝cӴf-NS2:%l]=}ݧDr|l*>+ԻpB"klmq9. @8LXG=Qua?(q{oتկ"(Ba,rtGWu<+g x,GHcB`Dq"DW|re:$ mAR,k@+Qk=D^iJ%2%\s̮={T*ZGα&Mt8Y}qMFm[;V.=^A, qs.|;"zeqxh٠ /f7(_"\x-irN)NIU"cc X2'A3͛6i/]ʏN\\i0@α .q~ljҪߩw =d Jr]G1q멪/jZ'CHh/i#*sL 9tFaÈ|QF%o8OHb>tp5!]`U'i)漢~mngnjr.=/_ܫ}9ɾRˁDAW&iOr6 z\ Gx-7w5X|qk"@@C4~=CyH0 / b _aMuv.DQFh 6gZ(G5Gɦb{cqgsƞ={n}Uj1skayh3M-\폢.28fƕZs 4jy7?KjI}v7 +mn'e:AKfG?{INТ> cޏL pq)8ɋK $eF96I_wP IDATh^ŁSX^%d;g5o1c2E~VhzEߙo}bE >HJc{jK_bCJ?d_OWq32i欰7_QjS1+pjUti͛Ƙ467ʼP!q $,3ќV'qx䨑r:^9{{F[3)RЌ\dƴ:XF@}$bs;JL@4Wƞׯ}RH˧RNbYK$be^06 o93=1b1, 4$Wᙩ(#4-d>_:tH yh_A1fq9ZhU{ߐ΂LySylN8~l.ʞ{Vyf)$[L$j"qҡ1{ő3grweyQ+|>O9J R Nkp<_gNuM}蒥`6ǎ\),qsk?>(S<Ìb9O҉c+ 1rJ 5MBYAjW27# (33%Rc>4M8wJƜs u<&| $/wQ%Cqgi3Y>wsmXbB~0$wkn& Yq:GqgexҞ<06xܹoR&EMbQbz}*iN5O_R#՞@<5sր3* 3CؒQa?:]XO},1&&׾7yH.8:JcO I"9Mmix|kg] p3x o(_;曛K)۲ȉCD<".ƵMFvm۱~;S2"`mN wmUʡZ*y\.xɋ^~zGYacb񢅋˛f} +ɼ0R>I8ה4OW?Q#KM݉SxsP K9 ZO'O#ؤnLx~G)v-{n8hFUy͘Q/p!DpIz zg>cq _ 1纃y1M?g짩Qhq)ªl7TϓqӦMu|r'k~xP>?{T%'TH2*0Q jgX{|-7< uܧ}b0KJ&;jǽqҀeqP`hsmIy2 v*ppR6sg*TW(@rX~F[|P'rR9=i'=qrQ;믻^{ߵm4jM&Drb+Yg&G3gEt@E i۴F~nK+3|GtS}@S 6ͷ|@=uJysLkoymٶELsypq,G.(N"AF|8a/m4,=w{c\z(@2{!nNLmoys<*eX}@"wP0u0#P14*H6$Dl*khmop9"&0X:I3HJ\fgDQ~ǻd7<&,1Wlds}]yț%K]Ͻ}|dԤɄ2 E@R ףF2[([gVFk9ub"H7oC-1}C O-)k͛J)@"C:IU HGfi|.]"mp8̚8 ұEp "VԴ[GaSiH5";p 5pi-(B)HR$]A5dpS#iȹB Q sB38G\ ۵6;sCfy)S\b:GdyC SlRu:& @G"A\9SbLWaUAt\y5 nDU/MӠ{/X93Cc[z}Cr1NJ&BsS ĥ(@2^) %|w)Wcy>y8d?J @v-v뭷~oo+PN$0(PLN!Q`@@L\b5k۶v:mk= ގ*}i Ѱa@[n@7ǧD(@'ʤQ[{][jmڻ{0 q0A&IAα׬;~oށ?KҔ8@.=N:!!mF@vΏ?a^hň#Y]!ͺhgæFq")ۈR92_ $/Rnxν5U/h[lv#GQqΩSH-eoZ4:T'[a1Fx X?Z̆{׻YS]9bb$u:&  $K,"0ƛ\Qcy֭[lͶl>~EquIKӕ= ]w<r̘6AwTm+% K}tL/HR)y(" a) pNNƖn#2*IuuP.(]r}KTJw(Vtޒ j:FA>,;hZMVw*(Q`(~f3O@ pѪ;9 =j8QH:rVfm:Vfm[[}nQRIY3&y.9rY@%(&g*ATG7 N3g9NMcdZ#G߰V(Y;m RY;SH$HVک.@z#vwV@7;#CǣOS 䠓8UPJ n8-w1tErH޺W|;5/Y(Pi $4S}Rz /FT ̤"AM?A~ݤ5s/D R d +gd7]#Fs&6(:e_|ҹKW,_ϐ~*@ rrlQ%j/êXo-@ҥ ơ "tr vbPN@)r۩b4DH我Njk뜜.2׸b8ew@ 5Z}ⰵUGB@sJ$EHʏ?hE bpKlmTCW̫dCԖW \% Rsx+ospl5B;h7@5l$ssG}\e\-vTWuL-IvCkīXrd"ǁNs}O'q07$fm؁{EmDNm@p^S=yI(6.)N>N~F/ {{:GAe3j#N?C $̫؃~O\C% wV XJMwPZ۞3^+ip@P$]r'r+i{L$o'"1N'">p~7Pnf@7[}4+Vى/ yLpZA|pl}=! ]I"8;K]-}nRiˎ>|+>h1y+b9mD"Hi2 tb._;a]h(.QDG\ Z;loRHEΑsDit5@T-vTJPZ8a>1Z~A3tvA(2VܬR3>ݿvJ54$H@Ii )yͪUK}xFgm8-wS\\&!Wܱ^#lPzu1)=M3NPs88EŋdҽW"2Ɋ}7VX]5k6Vr"mrGٞ~َ>ݹiq5|T@P rm/8HՊy6Cѷ׸t%l >v8(tss3G_f .:fXaI$kշda>79pRvWP:]~{vٱd@xԍ >hu#^WH 9@OJ][;' .cN$3܎.:lCzDd h)S7pf;-GPPu/jT*p]n`OpS+eUcFٰsk?q:myy3Fk liI N#GCU=[g[Oɹ#ZN G# t:xW ÜgZ[s\/SNEsY:.\2~kV5Q7Tť0(CӣFjk:%"ژYˎm?:'8TMt,9^~`HHVuUpG6sæ_wa%+dW~FaMR6DuzhY$SumMlYʹF8RFhSHS qyjsj](:WPh۽3Ose KsKIG^8?6sq<*Z6JvxyTSc(>\\Z݃%P63h#W\琧#=RH(Ce2ԣgv85U_\NdS t6=o2+1GLqyqkwO٩WwɦTVB A@[*̅t,cոZ{CM`nE6zIePRkó()@2R"QyI:8,>k?!-IDAT%?έ?+y `S:N H(ImĜi6&.s]Mf?5V%PO8gGݰ:ϝ֣Z9J5WиvU6zNQ7K ąbPn;𜒸io 9 $# 08U9>pEN Dn2Jɝ-^$9|BlJ\T]^G^geУ ʶ,>˧j"p^u~ ioX gRJ3ysO!(DR leõu8A9ȁ^2ܖ$N&,VV8RH]Y4:uzb"<ˁG`!:k}G~^/7Y-6YȖN0=|!=zw $Ai3Bk G͖vO-J)N0E+[2XD.H 2p|W28N[,Q# RuV&U p9W8&E^O!Q,!H P! dӋiIA^'En1J"@q\|x98%̹Ev:'$B@@Ht,P0 J^Ep8lc7`>l X@ ١yL4.blLxzJ/[r9B"?!\0B@@z@8V@@@z@^n% $ $ $L} Q Q Q eIENDB`././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1690201758.0 dill-0.4.0/docs/source/scripts.rst0000644000076500000240000000050414457467236016673 0ustar00mmckernsstaffdill scripts documentation ========================== get_objgraph script ------------------- .. automodule:: _get_objgraph .. :exclude-members: + get_gprof script ------------------- .. automodule:: _get_gprof .. :exclude-members: + undill script ------------------- .. automodule:: _undill .. :exclude-members: + ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1674962506.0 dill-0.4.0/pyproject.toml0000644000076500000240000000024614365363112015123 0ustar00mmckernsstaff[build-system] # Further build requirements come from setup.py via the PEP 517 interface requires = [ "setuptools>=42", ] build-backend = "setuptools.build_meta" ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1744763843.256817 dill-0.4.0/scripts/0000755000076500000240000000000014777575703013716 5ustar00mmckernsstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/scripts/get_gprof0000644000076500000240000000463414735133337015607 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE ''' build profile graph for the given instance running: $ get_gprof executes: gprof2dot -f pstats .prof | dot -Tpng -o .call.png where: are arguments for gprof2dot, such as "-n 5 -e 5" is code to create the instance to profile is the class of the instance (i.e. type(instance)) For example: $ get_gprof -n 5 -e 1 "import numpy; numpy.array([1,2])" will create 'ndarray.call.png' with the profile graph for numpy.array([1,2]), where '-n 5' eliminates nodes below 5% threshold, similarly '-e 1' eliminates edges below 1% threshold ''' if __name__ == "__main__": import sys if len(sys.argv) < 2: print ("Please provide an object instance (e.g. 'import math; math.pi')") sys.exit() # grab args for gprof2dot args = sys.argv[1:-1] args = ' '.join(args) # last arg builds the object obj = sys.argv[-1] obj = obj.split(';') # multi-line prep for generating an instance for line in obj[:-1]: exec(line) # one-line generation of an instance try: obj = eval(obj[-1]) except Exception: print ("Error processing object instance") sys.exit() # get object 'name' objtype = type(obj) name = getattr(objtype, '__name__', getattr(objtype, '__class__', objtype)) # profile dumping an object import dill import os import cProfile #name = os.path.splitext(os.path.basename(__file__))[0] cProfile.run("dill.dumps(obj)", filename="%s.prof" % name) msg = "gprof2dot -f pstats %s %s.prof | dot -Tpng -o %s.call.png" % (args, name, name) try: res = os.system(msg) except Exception: print ("Please verify install of 'gprof2dot' to view profile graphs") if res: print ("Please verify install of 'gprof2dot' to view profile graphs") # get stats f_prof = "%s.prof" % name import pstats stats = pstats.Stats(f_prof, stream=sys.stdout) stats.strip_dirs().sort_stats('cumtime') stats.print_stats(20) #XXX: save to file instead of print top 20? os.remove(f_prof) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/scripts/get_objgraph0000644000076500000240000000316614735133337016265 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ display the reference paths for objects in ``dill.types`` or a .pkl file Notes: the generated image is useful in showing the pointer references in objects that are or can be pickled. Any object in ``dill.objects`` listed in ``dill.load_types(picklable=True, unpicklable=True)`` works. Examples:: $ get_objgraph ArrayType Image generated as ArrayType.png """ import dill as pickle #pickle.debug.trace(True) #import pickle # get all objects for testing from dill import load_types load_types(pickleable=True,unpickleable=True) from dill import objects if __name__ == "__main__": import sys if len(sys.argv) != 2: print ("Please provide exactly one file or type name (e.g. 'IntType')") msg = "\n" for objtype in list(objects.keys())[:40]: msg += objtype + ', ' print (msg + "...") else: objtype = str(sys.argv[-1]) try: obj = objects[objtype] except KeyError: obj = pickle.load(open(objtype,'rb')) import os objtype = os.path.splitext(objtype)[0] try: import objgraph objgraph.show_refs(obj, filename=objtype+'.png') except ImportError: print ("Please install 'objgraph' to view object graphs") # EOF ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/scripts/undill0000644000076500000240000000111614735133337015112 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE """ unpickle the contents of a pickled object file Examples:: $ undill hello.pkl ['hello', 'world'] """ if __name__ == '__main__': import sys import dill for file in sys.argv[1:]: print (dill.load(open(file,'rb'))) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744763843.2621682 dill-0.4.0/setup.cfg0000644000076500000240000000007614777575703014053 0ustar00mmckernsstaff[egg_info] tag_build = tag_date = 0 [bdist_wheel] [sdist] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1735702239.0 dill-0.4.0/setup.py0000644000076500000240000001075014735133337013727 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2016 California Institute of Technology. # Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE import os import sys # drop support for older python if sys.version_info < (3, 8): unsupported = 'Versions of Python before 3.8 are not supported' raise ValueError(unsupported) # get distribution meta info here = os.path.abspath(os.path.dirname(__file__)) sys.path.append(here) from version import (__version__, __author__, __contact__ as AUTHOR_EMAIL, get_license_text, get_readme_as_rst, write_info_file) LICENSE = get_license_text(os.path.join(here, 'LICENSE')) README = get_readme_as_rst(os.path.join(here, 'README.md')) # write meta info file write_info_file(here, 'dill', doc=README, license=LICENSE, version=__version__, author=__author__) del here, get_license_text, get_readme_as_rst, write_info_file # check if setuptools is available try: from setuptools import setup from setuptools.dist import Distribution has_setuptools = True except ImportError: from distutils.core import setup Distribution = object has_setuptools = False # build the 'setup' call setup_kwds = dict( name='dill', version=__version__, description='serialize all of Python', long_description = README.strip(), author = __author__, author_email = AUTHOR_EMAIL, maintainer = __author__, maintainer_email = AUTHOR_EMAIL, license = 'BSD-3-Clause', platforms = ['Linux', 'Windows', 'Mac'], url = 'https://github.com/uqfoundation/dill', download_url = 'https://pypi.org/project/dill/#files', project_urls = { 'Documentation':'http://dill.rtfd.io', 'Source Code':'https://github.com/uqfoundation/dill', 'Bug Tracker':'https://github.com/uqfoundation/dill/issues', }, python_requires = '>=3.8', classifiers = [ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: 3.13', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Scientific/Engineering', 'Topic :: Software Development', ], packages = ['dill','dill.tests'], package_dir = {'dill':'dill', 'dill.tests':'dill/tests'}, scripts=['scripts/undill','scripts/get_objgraph','scripts/get_gprof'], ) # force python-, abi-, and platform-specific naming of bdist_wheel class BinaryDistribution(Distribution): """Distribution which forces a binary package with platform name""" def has_ext_modules(foo): return True # define dependencies ctypes_version = 'ctypes>=1.0.1' objgraph_version = 'objgraph>=1.7.2' gprof2dot_version = 'gprof2dot>=2022.7.29' pyreadline_version = 'pyreadline>=1.7.1' # add dependencies depend = [ctypes_version] if sys.platform[:3] == 'win': extras = {'readline': [pyreadline_version], 'graph': [objgraph_version], 'profile': [gprof2dot_version]} else: extras = {'readline': [], 'graph': [objgraph_version], 'profile': [gprof2dot_version]} # update setup kwds if has_setuptools: setup_kwds.update( zip_safe=False, # distclass=BinaryDistribution, # install_requires=depend, extras_require=extras, ) # call setup setup(**setup_kwds) # if dependencies are missing, print a warning try: pass #import ctypes #import objgraph #import gprof2dot #import readline except ImportError: print ("\n***********************************************************") print ("WARNING: One of the following dependencies is unresolved:") # print (" %s" % ctypes_version) print (" %s (optional)" % objgraph_version) print (" %s (optional)" % gprof2dot_version) if sys.platform[:3] == 'win': print (" %s (optional)" % pyreadline_version) print ("***********************************************************\n") ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1730163643.0 dill-0.4.0/tox.ini0000644000076500000240000000045014710031673013515 0ustar00mmckernsstaff[tox] skip_missing_interpreters= True envlist = py38 py39 py310 py311 py312 py313 py314 pypy38 pypy39 pypy310 [testenv] deps = # numpy whitelist_externals = # bash commands = {envpython} -m pip install . {envpython} dill/tests/__main__.py ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744763711.0 dill-0.4.0/version.py0000644000076500000240000000630614777575477014303 0ustar00mmckernsstaff#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2022-2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/dill/blob/master/LICENSE __version__ = '0.4.0'#.dev0' __author__ = 'Mike McKerns' __contact__ = 'mmckerns@uqfoundation.org' def get_license_text(filepath): "open the LICENSE file and read the contents" try: LICENSE = open(filepath).read() except: LICENSE = '' return LICENSE def get_readme_as_rst(filepath): "open the README file and read the markdown as rst" try: fh = open(filepath) name, null = fh.readline().rstrip(), fh.readline() tag, null = fh.readline(), fh.readline() tag = "%s: %s" % (name, tag) split = '-'*(len(tag)-1)+'\n' README = ''.join((null,split,tag,split,'\n')) skip = False for line in fh: if line.startswith('['): continue elif skip and line.startswith(' http'): README += '\n' + line elif line.startswith('* with'): #XXX: don't indent README += line elif line.startswith('* '): README += line.replace('* ',' - ',1) elif line.startswith('-'): README += line.replace('-','=') + '\n' elif line.startswith('!['): # image alt,img = line.split('](',1) if img.startswith('docs'): # relative path img = img.split('docs/source/',1)[-1] # make is in docs README += '.. image:: ' + img.replace(')','') README += ' :alt: ' + alt.replace('![','') + '\n' #elif ')[http' in line: # alt text link (`text `_) else: README += line skip = line.endswith(':\n') fh.close() except: README = '' return README def write_info_file(dirpath, modulename, **info): """write the given info to 'modulename/__info__.py' info expects: doc: the module's long_description version: the module's version string author: the module's author string license: the module's license contents """ import os infofile = os.path.join(dirpath, '%s/__info__.py' % modulename) header = '''#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2025 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/%s/blob/master/LICENSE ''' % modulename #XXX: author and email are hardwired in the header doc = info.get('doc', None) version = info.get('version', None) author = info.get('author', None) license = info.get('license', None) with open(infofile, 'w') as fh: fh.write(header) if doc is not None: fh.write("'''%s'''\n\n" % doc) if version is not None: fh.write("__version__ = %r\n" % version) if author is not None: fh.write("__author__ = %r\n\n" % author) if license is not None: fh.write("__license__ = '''\n%s'''\n" % license) return