From 8635299f16436f4264caf3439952414f64b619fe Mon Sep 17 00:00:00 2001 From: Jim Pivarski Date: Mon, 23 Nov 2020 13:05:48 -0600 Subject: [PATCH] Renamed awkward -> awkward0 and uproot -> uproot3. (#527) --- .travis.yml | 109 - README.rst | 581 +- appveyor.yml | 73 - binder/requirements.txt | 17 - binder/samples | 1 - binder/trigger_binder.sh | 26 - binder/tutorial.ipynb | 7466 ----------------- binder/uproot | 1 - dev/allstreamers.c | 42 +- dev/streamergen.py | 12 +- docs/old-tutorial.rst | 198 +- docs/source/caches.rst | 14 +- docs/source/conf.py | 8 +- docs/source/index.rst | 10 +- docs/source/interpretation.rst | 54 +- docs/source/opening-files.rst | 52 +- docs/source/parallel-io.rst | 66 +- docs/source/root-io.rst | 46 +- docs/source/ttree-handling.rst | 110 +- requirements.txt | 4 +- setup.py | 133 +- tests/__init__.py | 2 +- tests/test_cache.py | 12 +- tests/test_compression.py | 104 +- tests/test_http.py | 26 +- tests/test_issues.py | 116 +- tests/test_jagged.py | 6 +- tests/test_rntuple.py | 8 +- tests/test_stlvector.py | 22 +- tests/test_tree.py | 164 +- tests/test_versions.py | 32 +- tests/test_write.py | 582 +- uproot/pandas.py | 15 - uproot/write/objects/__init__.py | 5 - uproot/write/sink/__init__.py | 5 - {uproot => uproot3}/__init__.py | 118 +- .../write => uproot3/_connect}/__init__.py | 4 +- {uproot => uproot3}/_connect/_pandas.py | 40 +- {uproot => uproot3}/_help.py | 734 +- {uproot => uproot3}/_util.py | 2 +- {uproot => uproot3}/cache.py | 4 +- {uproot => uproot3}/const.py | 2 +- .../_connect => uproot3/interp}/__init__.py | 4 +- {uproot => uproot3}/interp/auto.py | 268 +- {uproot => uproot3}/interp/interp.py | 11 +- {uproot => uproot3}/interp/jagged.py | 56 +- {uproot => uproot3}/interp/numerical.py | 98 +- {uproot => uproot3}/interp/objects.py | 80 +- uproot3/pandas.py | 15 + {uproot => uproot3}/rootio.py | 164 +- {uproot/interp => uproot3/source}/__init__.py | 4 +- {uproot => uproot3}/source/chunked.py | 14 +- {uproot => uproot3}/source/compressed.py | 42 +- {uproot => uproot3}/source/cursor.py | 4 +- {uproot => uproot3}/source/file.py | 8 +- {uproot => uproot3}/source/http.py | 8 +- {uproot => uproot3}/source/memmap.py | 8 +- {uproot => uproot3}/source/source.py | 2 +- {uproot => uproot3}/source/xrootd.py | 10 +- {uproot => uproot3}/tree.py | 338 +- {uproot => uproot3}/version.py | 4 +- {uproot => uproot3}/write/TDirectory.py | 36 +- {uproot => uproot3}/write/TFile.py | 106 +- {uproot => uproot3}/write/TFree.py | 2 +- {uproot => uproot3}/write/TKey.py | 18 +- {uproot/source => uproot3/write}/__init__.py | 4 +- {uproot => uproot3}/write/compress.py | 24 +- {uproot => uproot3}/write/objects/TH.py | 44 +- .../write/objects/TObjString.py | 14 +- {uproot => uproot3}/write/objects/TTree.py | 212 +- uproot3/write/objects/__init__.py | 5 + {uproot => uproot3}/write/objects/util.py | 14 +- uproot3/write/sink/__init__.py | 5 + {uproot => uproot3}/write/sink/cursor.py | 4 +- {uproot => uproot3}/write/sink/file.py | 2 +- {uproot => uproot3}/write/streamers.py | 4 +- {uproot => uproot3}/write/util.py | 2 +- 77 files changed, 2386 insertions(+), 10274 deletions(-) delete mode 100644 .travis.yml delete mode 100644 appveyor.yml delete mode 100644 binder/requirements.txt delete mode 120000 binder/samples delete mode 100644 binder/trigger_binder.sh delete mode 100644 binder/tutorial.ipynb delete mode 120000 binder/uproot delete mode 100644 uproot/pandas.py delete mode 100644 uproot/write/objects/__init__.py delete mode 100644 uproot/write/sink/__init__.py rename {uproot => uproot3}/__init__.py (60%) rename {uproot/write => uproot3/_connect}/__init__.py (59%) rename {uproot => uproot3}/_connect/_pandas.py (87%) rename {uproot => uproot3}/_help.py (64%) rename {uproot => uproot3}/_util.py (91%) rename {uproot => uproot3}/cache.py (96%) rename {uproot => uproot3}/const.py (99%) rename {uproot/_connect => uproot3/interp}/__init__.py (59%) rename {uproot => uproot3}/interp/auto.py (78%) rename {uproot => uproot3}/interp/interp.py (88%) rename {uproot => uproot3}/interp/jagged.py (64%) rename {uproot => uproot3}/interp/numerical.py (80%) rename {uproot => uproot3}/interp/objects.py (80%) create mode 100644 uproot3/pandas.py rename {uproot => uproot3}/rootio.py (91%) rename {uproot/interp => uproot3/source}/__init__.py (59%) rename {uproot => uproot3}/source/chunked.py (93%) rename {uproot => uproot3}/source/compressed.py (85%) rename {uproot => uproot3}/source/cursor.py (98%) rename {uproot => uproot3}/source/file.py (87%) rename {uproot => uproot3}/source/http.py (90%) rename {uproot => uproot3}/source/memmap.py (86%) rename {uproot => uproot3}/source/source.py (98%) rename {uproot => uproot3}/source/xrootd.py (93%) rename {uproot => uproot3}/tree.py (88%) rename {uproot => uproot3}/version.py (83%) rename {uproot => uproot3}/write/TDirectory.py (72%) rename {uproot => uproot3}/write/TFile.py (77%) rename {uproot => uproot3}/write/TFree.py (97%) rename {uproot => uproot3}/write/TKey.py (83%) rename {uproot/source => uproot3/write}/__init__.py (59%) rename {uproot => uproot3}/write/compress.py (92%) rename {uproot => uproot3}/write/objects/TH.py (94%) rename {uproot => uproot3}/write/objects/TObjString.py (72%) rename {uproot => uproot3}/write/objects/TTree.py (87%) create mode 100644 uproot3/write/objects/__init__.py rename {uproot => uproot3}/write/objects/util.py (88%) create mode 100644 uproot3/write/sink/__init__.py rename {uproot => uproot3}/write/sink/cursor.py (97%) rename {uproot => uproot3}/write/sink/file.py (97%) rename {uproot => uproot3}/write/streamers.py (99%) rename {uproot => uproot3}/write/util.py (94%) diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index be5e013d..00000000 --- a/.travis.yml +++ /dev/null @@ -1,109 +0,0 @@ -language: python -dist: xenial - -cache: pip - -os: - - linux - -env: - - PYVER=2.7 NPY="numpy==1.13.1" - - PYVER=2.7 NPY="numpy==1.14.5" - - PYVER=2.7 NPY="numpy>=1.15" - - - PYVER=3.5 NPY="numpy==1.13.1" - - PYVER=3.5 NPY="numpy==1.14.5" - - PYVER=3.5 NPY="numpy>=1.15" - - - PYVER=3.6 NPY="numpy==1.13.1" - - PYVER=3.6 NPY="numpy==1.14.5" - - PYVER=3.6 NPY="numpy>=1.15" - - - PYVER=3.7 NPY="numpy>=1.15" - - - PYVER=3.8 NPY="numpy>=1.15" - - - PYVER=3.9 NPY="numpy>=1.15" - -# - PYVER=pypy2.7 NPY="numpy>=1.15" - - - PYVER=pypy3.6 NPY="numpy>=1.15" - -install: - # Install conda - - wget -nv https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh -O miniconda.sh - - bash miniconda.sh -b -p $HOME/miniconda - - eval "$($HOME/miniconda/bin/python -m conda shell.bash hook)" - - conda install --quiet --yes mamba - - conda config --set always_yes yes --set changeps1 no - # Create the conda testing environment - # pyopenssl is for deployment - - if [[ "${PYVER}" = pypy* ]]; then - mamba create --quiet --yes -n testenv ${PYVER}; - elif [ "${PYVER}" = "2.7" ] || [ "${PYVER}" = "3.6" ] || [ "${PYVER}" = "3.7" || [ "${PYVER}" = "3.8" ] || [ "${PYVER}" = "3.9" ]; then - mamba create --quiet --yes -n testenv python=${PYVER} pip pyopenssl; - else - mamba create --quiet --yes -n testenv python=${PYVER} pyopenssl; - fi - - conda activate testenv - - if [ "${PYVER}" = "2.7" ] || [ "${PYVER}" = "3.6" ] || [ "${PYVER}" = "3.7" || [ "${PYVER}" = "3.8" ] || [ "${PYVER}" = "3.9" ]; then - mamba install --quiet --yes -n testenv python=${PYVER} pip root; - conda activate testenv; - fi - - pip install --upgrade setuptools-scm - - pip install $NPY - - python -c 'import numpy; print(numpy.__version__)' - - pip install "awkward>=0.12.0,<1.0" - - python -c 'import awkward; print(awkward.__version__)' - - pip install "uproot-methods>=0.7.0" - - python -c 'import uproot_methods; print(uproot_methods.__version__)' - - pip install cachetools pkgconfig lz4 zstandard xxhash mock requests "pytest>=3.9" pytest-runner - - if [[ ${PYVER} = "2.7" ]] ; then pip install backports.lzma ; fi - - pip install pandas - - wget -O tests/samples/Event.root http://scikit-hep.org/uproot/examples/Event.root - - pip install pytest-error-for-skips - -addons: - apt: - packages: - - python-setuptools - - libatlas-base-dev - -script: - if [ "${PYVER}" = "2.7" ] || [ "${PYVER}" = "3.6" ] || [ "${PYVER}" = "3.7" || [ "${PYVER}" = "3.8" ] || [ "${PYVER}" = "3.9" ]; then - pytest --error-for-skips -v tests; - else - pytest -v tests; - fi - -notifications: - slack: scikit-hep:b6cgBXwccPoaCNLn5VKFJFVy - -deploy: - provider: pypi - user: pivarski - password: - secure: "BpSLpIcnkTtey4MILj0hiBPCF2W89iAyPbMs4eGO+KMYrACbuXoSR5Pt+Fm4JZpBF5apQeW2ul3WZLpJYNCnkZ/FnliUJwmfJVzcSJrJBmG0dkYyGtASaxuIR7BK4GWuMC00YVcqUX33mExwodU+P54LzXsFYIM4uVjjZgUWrlMxgZTPrYmfSowQfSAxkSV9TUSPZoh8ofONBUZJrt0dzeO8MnIfbssMnI2VMnWv3FG9H0eWxbzaWLjQBOQmMHRXPqGWBflwJ5uUsdBEFKoDpbj9JhDesiULcxezHHFxnWeXr0z7lR6KGM+R3rSwjCrLrS/0MQnXmW8+00pNVLwxjGOYrbSinnvBdZ+Olwb8PcGnqXVFAf0EdwWsYgrX2055vzJDpH0d82t3CAu2RrvpStq/TuDydCfPHmehIpJwBbicKAOmK+DIcMKs/rqpGnb4TCFav0L+zB+xQBqn5tZhbev6F0fPtyuUbrjCPqpb/HikzaZoSnqKA39FSK3wBDP3K13GkEJZiYlq0lvynQD5Ye/1DB9OKH3Vdzohi0JStopAtEga5YvH/CzpIm3D9gK7ZpIVKLXf79ebxAMeYGBNFxEI9SKLdWgxL0kJoCxyZWxdzusyafxWRLp76xJ5mMMNEb+RgNXThgybVBISWhD+WjhOodUm/43sty5eMM4mKRs=" - distributions: "sdist bdist_wheel --universal" - on: - tags: true - branch: master - condition: '$PYVER = "3.7" && $NPY = "numpy>=1.15"' - -# stages: -# - test -# - name: binder -# if: (branch = master) AND (NOT (type IN (pull_request))) - -# jobs: -# include: -# - stage: binder -# python: "3.6" -# env: NPY="numpy>=1.15" -# addons: skip -# before_install: skip -# install: skip -# script: -# # Use Binder build API to trigger repo2docker to build image on GKE and OVH Binder Federation clusters -# - bash binder/trigger_binder.sh https://gke.mybinder.org/build/gh/scikit-hep/uproot/"${TRAVIS_BRANCH}" -# - bash binder/trigger_binder.sh https://ovh.mybinder.org/build/gh/scikit-hep/uproot/"${TRAVIS_BRANCH}" diff --git a/README.rst b/README.rst index 4ac4f805..440f7592 100644 --- a/README.rst +++ b/README.rst @@ -9,42 +9,19 @@ See `scikit-hep/uproot4 `__ for the lates .. code-block:: bash - pip install uproot # old - pip install uproot4 # new - -because the interface has changed. Later this year, "Uproot 4" will simply become the `uproot` package with version number 4.0. Then the two packages will shift to - -.. code-block:: bash - - pip install uproot # new pip install uproot3 # old + pip install uproot # new -You can adopt the new library gradually by importing both in Python, switching to the old version as a contingency (missing feature or bug in the new version). Note that Uproot 3 returns old-style `Awkward 0 `__ arrays and Uproot 4 returns new-style `Awkward 1 `__ arrays. (The new version of Uproot was motivated by the new version of Awkward, to make a clear distinction.) +because the interface has changed. + +You can adopt the new library gradually by importing both in Python, switching to the old version as a contingency (missing feature or bug in the new version). Note that Uproot 3 returns old-style `Awkward 0 `__ arrays and Uproot 4 returns new-style `Awkward 1 `__ arrays. (The new version of Uproot was motivated by the new version of Awkward, to make a clear distinction.) uproot ====== -.. image:: https://travis-ci.org/scikit-hep/uproot.svg?branch=master - :target: https://travis-ci.org/scikit-hep/uproot - -.. image:: https://readthedocs.org/projects/uproot/badge/?version=latest - :target: https://uproot.readthedocs.io/en/latest/?badge=latest - .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.1173083.svg :target: https://doi.org/10.5281/zenodo.1173083 -.. image:: https://mybinder.org/badge_logo.svg - :target: https://mybinder.org/v2/gh/scikit-hep/uproot/master?urlpath=lab/tree/binder%2Ftutorial.ipynb - -|PyPI version| -|Conda-Forge| - -.. |PyPI version| image:: https://badge.fury.io/py/uproot.svg - :target: https://badge.fury.io/py/uproot - -.. |Conda-Forge| image:: https://img.shields.io/conda/vn/conda-forge/uproot - :target: https://github.com/conda-forge/uproot-feedstock - .. inclusion-marker-1-do-not-remove ROOT I/O in pure Python and Numpy. @@ -59,11 +36,11 @@ Python does not necessarily mean slow. As long as the data blocks ("baskets") ar .. raw:: html -
+
.. inclusion-marker-replaceplots-stop -uproot is not maintained by the ROOT project team, so post bug reports here as `GitHub issues `__, not on a ROOT forum. Thanks! +uproot is not maintained by the ROOT project team, so post bug reports here as `GitHub issues `__, not on a ROOT forum. Thanks! .. inclusion-marker-2-do-not-remove @@ -74,14 +51,7 @@ Install uproot like any other Python package: .. code-block:: bash - pip install uproot # maybe with sudo or --user, or in virtualenv - -or install with `conda `__: - -.. code-block:: bash - - conda config --add channels conda-forge # if you haven't added conda-forge already - conda install uproot + pip install uproot3 # maybe with sudo or --user, or in virtualenv The pip installer automatically installs strict dependencies; the conda installer also installs optional dependencies (except for Pandas). @@ -89,8 +59,8 @@ Strict dependencies: -------------------- - `numpy `__ (1.13.1+) -- `awkward-array `__ (0.12.0+) -- `uproot-methods `__ (0.7.0+) +- `Awkward Array 0.x `__ +- `uproot-methods `__ (0.9.1+) - `cachetools `__ Optional dependencies: @@ -105,40 +75,22 @@ Optional dependencies: **Reminder: you do not need C++ ROOT to run uproot.** -Testing and development requirements ------------------------------------- - -The packages need to run the test suite can be installed via ``pip install -"uproot[testing]"``. Here is an example how to set up a development - -.. code-block:: bash - - git clone https://github.com/scikit-hep/uproot.git # alternatively your own fork - cd uproot - python -m venv venv # create a virtual environment in the folder venv - . venv/bin/activate # activate the Python environment - pip install -e ".[testing]" # installs uproot in editable mode with all packages required for testing - pytest # run the test suite - - .. inclusion-marker-3-do-not-remove Questions ========= -If you have a question about how to use uproot that is not answered in the document below, I recommend asking your question on `StackOverflow `__ with the ``[uproot]`` tag. (I get notified of questions with this tag.) +If you have a question about how to use uproot that is not answered in the document below, I recommend asking your question on `StackOverflow `__ with the ``[uproot]`` tag. (I get notified of questions with this tag.) Note that this tag is primarily intended for the new version of Uproot, so if you're using this version (Uproot 3.x), be sure to mention that. .. raw:: html

-If you believe you have found a bug in uproot, post it on the `GitHub issues tab `__. +If you believe you have found a bug in uproot, post it on the `GitHub issues tab `__. Tutorial ======== -Run `this tutorial `__ on Binder. - **Tutorial contents:** * `Introduction <#introduction>`__ @@ -192,52 +144,18 @@ Run `this tutorial `__ - `Writing TTrees <#writing-ttrees>`__ -Reference documentation -======================= - -For a list of all functions, classes, methods, and their parameters, click below. - -* `Opening files `__ - - - `uproot.open `__ - - `uproot.xrootd `__ - - `uproot.http `__ - - `uproot.iterate `__ - - `uproot.pandas.iterate `__ - - `uproot.lazyarray(s) `__ - - `uproot.daskarray/daskframe `__ - - `uproot.numentries `__ - -* `ROOT I/O `__ - - - `uproot.rootio.ROOTDirectory `__ - - `uproot.rootio.ROOTObject `__ - - `uproot.rootio.ROOTStreamedObject `__ - -* `TTree Handling `__ - - - `uproot.tree.TTreeMethods `__ - - `uproot.tree.TBranchMethods `__ - -* `Interpretation `__ -* `Caches `__ -* `Parallel I/O `__ - Introduction ============ -This tutorial is designed to help you start using uproot. Unlike the -`reference documentation `__, -which defines every parameter of every function, this tutorial provides -introductory examples to help you learn how to use them. +This tutorial is designed to help you start using uproot. The original tutorial `has been archived `__—this version was written in June 2019 in response to feedback from a series of tutorials I presented early this year and common questions in the -`GitHub issues `__. The new +`GitHub issues `__. The new tutorial is `executable on -Binder `__ +Binder `__ and may be read in any order, though it has to be executed from top to bottom because some variables are reused. @@ -255,7 +173,7 @@ In short, you should never see a segmentation fault. .. raw:: html -

+

Uproot is strictly concerned with file I/O only—all other functionality is handled by other libraries: @@ -264,7 +182,7 @@ is handled by other libraries: physics methods for types read from ROOT files, such as histograms and Lorentz vectors. It is intended to be largely user-contributed (and is). -- `awkward-array `__: +- `awkward-array `__: array manipulation beyond `Numpy `__. Several are encountered in this tutorial, particularly lazy arrays and jagged @@ -277,32 +195,28 @@ MAGIC (gamma ray astronomy), and IceCube (neutrino astronomy). .. raw:: html -

+

Exploring a file ================ -`uproot.open `__ -is the entry point for reading a single file. +``uproot3.open`` is the entry point for reading a single file. It takes a local filename path or a remote ``http://`` or ``root://`` URL. (HTTP requires the Python `requests `__ library and XRootD requires `pyxrootd `__, both of which have to be -explicitly pip-installed if you installed uproot with pip, but are -automatically installed if you installed uproot with conda.) +explicitly pip-installed.) .. code-block:: python3 - import uproot + import uproot3 - file = uproot.open("http://scikit-hep.org/uproot/examples/nesteddirs.root") + file = uproot3.open("http://scikit-hep.org/uproot3/examples/nesteddirs.root") file # -`uproot.open `__ -returns a -`ROOTDirectory `__, +``uproot3.open`` returns a ``ROOTDirectory``, which behaves like a Python dict; it has ``keys()``, ``values()``, and key-value access with square brackets. @@ -314,8 +228,7 @@ key-value access with square brackets. file["one"] # -Subdirectories also have type -`ROOTDirectory `__, +Subdirectories also have type ``ROOTDirectory``, so they behave like Python dicts, too. .. code-block:: python3 @@ -328,7 +241,7 @@ so they behave like Python dicts, too. **What’s the `b` before each object name?** Python 3 distinguishes between bytestrings and encoded strings. ROOT object names have no -encoding, such as Latin-1 or Unicode, so uproot presents them as raw +encoding, such as Latin-1 or Unicode, so Uproot presents them as raw bytestrings. However, if you enter a Python string (no ``b``) and it matches an object name (interpreted as plain ASCII), it will count as a match, as ``"one"`` does above. @@ -359,20 +272,20 @@ Here are a few more tricks for finding your way around a file: ``allkeys()``, ``allvalues()``, ``allitems()`` variants that recursively search through all subdirectories; - all of these functions can be filtered by name or class: see - `ROOTDirectory.keys `__. + ``ROOTDirectory.keys``. Here’s how you would search the subdirectories to find all TTrees: .. code-block:: python3 - file.allkeys(filterclass=lambda cls: issubclass(cls, uproot.tree.TTreeMethods)) + file.allkeys(filterclass=lambda cls: issubclass(cls, uproot3.tree.TTreeMethods)) # [b'one/two/tree;1', b'one/tree;1', b'three/tree;1'] Or get a Python dict of them: .. code-block:: python3 - all_ttrees = dict(file.allitems(filterclass=lambda cls: issubclass(cls, uproot.tree.TTreeMethods))) + all_ttrees = dict(file.allitems(filterclass=lambda cls: issubclass(cls, uproot3.tree.TTreeMethods))) all_ttrees # {b'one/two/tree;1': , # b'one/tree;1': , @@ -396,16 +309,12 @@ decompressor and gives you the objects transparently: you don’t have to specify anything. However, if an object is compressed with LZ4 and you don’t have the `lz4 `__ library installed, you’ll get an error with installation instructions in the -message. (It is automatically installed if you installed uproot with -conda.) ZLIB is part of the Python Standard Library, and LZMA is part of +message. ZLIB is part of the Python Standard Library, and LZMA is part of the Python 3 Standard Library, so you won’t get error messages about these except for LZMA in Python 2 (for which there is -`backports.lzma `__, -automatically installed if you installed uproot with conda). +`backports.lzma `__). -The -`ROOTDirectory `__ -class has a ``compression`` property that tells you the compression +The ``ROOTDirectory`` class has a ``compression`` property that tells you the compression algorithm and level associated with this file, .. code-block:: python3 @@ -421,25 +330,20 @@ Exploring a TTree ----------------- TTrees are special objects in ROOT files: they contain most of the -physics data. Uproot presents TTrees as subclasses of -`TTreeMethods `__. +physics data. Uproot presents TTrees as subclasses of ``TTreeMethods``. (**Why subclass?** Different ROOT files can have different versions of a -class, so uproot generates Python classes to fit the data, as needed. -All TTrees inherit from -`TTreeMethods `__ -so that they get the same data-reading methods.) +class, so Uproot generates Python classes to fit the data, as needed. +All TTrees inherit from ``TTreeMethods`` so that they get the same data-reading methods.) .. code-block:: python3 - events = uproot.open("http://scikit-hep.org/uproot/examples/Zmumu.root")["events"] + events = uproot3.open("http://scikit-hep.org/uproot3/examples/Zmumu.root")["events"] events # -Although -`TTreeMethods `__ -objects behave like Python dicts of -`TBranchMethods `__ +Although ``TTreeMethods`` +objects behave like Python dicts of ``TBranchMethods`` objects, the easiest way to browse a TTree is by calling its ``show()`` method, which prints the branches and their interpretations as arrays. @@ -485,16 +389,16 @@ Some terminology ---------------- ROOT files contain objects internally referred to via ``TKeys`` -(dict-like lookup in uproot). ``TTree`` organizes data in ``TBranches``, -and uproot interprets one ``TBranch`` as one array, either a `Numpy +(dict-like lookup in Uproot). ``TTree`` organizes data in ``TBranches``, +and Uproot interprets one ``TBranch`` as one array, either a `Numpy array `__ -or an `awkward array `__. -``TBranch`` data are stored in chunks called ``TBaskets``, though uproot +or an `Awkward Array `__. +``TBranch`` data are stored in chunks called ``TBaskets``, though Uproot hides this level of granularity unless you dig into the details. .. raw:: html -

+

Reading arrays from a TTree =========================== @@ -502,31 +406,18 @@ Reading arrays from a TTree The bulk data in a TTree are not read until requested. There are many ways to do that: -- select a TBranch and call - `TBranchMethods.array `__; -- call - `TTreeMethods.array `__ +- select a TBranch and call ``TBranchMethods.array``; +- call ``TTreeMethods.array`` directly from the TTree object; -- call - `TTreeMethods.arrays `__ +- call ``TTreeMethods.arrays`` to get several arrays at a time; - call - `TBranch.lazyarray `__, - `TTreeMethods.lazyarray `__, - `TTreeMethods.lazyarrays `__, - or - `uproot.lazyarrays `__ - to get array-like objects that read on demand; -- call - `TTreeMethods.iterate `__ - or - `uproot.iterate `__ + ``TBranch.lazyarray``, ``TTreeMethods.lazyarray``, ``TTreeMethods.lazyarrays``, or + ``uproot3.lazyarrays`` to get array-like objects that read on demand; +- call ``TTreeMethods.iterate`` or ``uproot3.iterate`` to explicitly iterate over chunks of data (to avoid reading more than would fit into memory); -- call - `TTreeMethods.pandas `__ - or - `uproot.pandas.iterate `__ +- call ``TTreeMethods.pandas`` or ``uproot3.pandas.iterate`` to get Pandas DataFrames (`Pandas `__ must be installed). @@ -570,10 +461,7 @@ introduction `__, or introduction `__ to see how to put Numpy arrays to work in machine learning. -The -`TBranchMethods.array `__ -method is the same as -`TTreeMethods.array `__ +The ``TBranchMethods.array`` method is the same as ``TTreeMethods.array`` except that you don’t have to specify the TBranch name (naturally). Sometimes one is more convenient, sometimes the other. @@ -625,10 +513,10 @@ special features below. Caching data ============ -Every time you ask for arrays, uproot goes to the file and re-reads +Every time you ask for arrays, Uproot goes to the file and re-reads them. For especially large arrays, this can take a long time. -For quicker access, uproot’s array-reading functions have a **cache** +For quicker access, Uproot’s array-reading functions have a **cache** parameter, which is an entry point for you to manage your own cache. The **cache** only needs to behave like a dict (many third-party Python caches do). @@ -690,18 +578,16 @@ This manual process of clearing the cache when you run out of memory is not very robust. What you want instead is a dict-like object that drops elements on its own when memory is scarce. -Uproot has an -`ArrayCache `__ +Uproot has an ``ArrayCache`` class for this purpose, though it’s a thin wrapper around the third-party `cachetools `__ library. Whereas `cachetools `__ -drops old data from cache when a maximum number of items is reached, -`ArrayCache `__ +drops old data from cache when a maximum number of items is reached, ``ArrayCache`` drops old data when the data usage reaches a limit, specified in bytes. .. code-block:: python3 - mycache = uproot.ArrayCache("100 kB") + mycache = uproot3.ArrayCache("100 kB") events.arrays("*", cache=mycache); len(mycache), len(events.keys()) @@ -710,7 +596,7 @@ drops old data when the data usage reaches a limit, specified in bytes. With a limit of 100 kB, only 6 of the 20 arrays fit into cache, the rest have been evicted. -All data sizes in uproot are specified as an integer in bytes (integers) +All data sizes in Uproot are specified as an integer in bytes (integers) or a string with the appropriate unit (interpreted as powers of 1024, not 1000). @@ -736,21 +622,17 @@ middle of a basket (see below). There is also a **keycache** for caching ROOT’s TKey objects, which use negligible memory but would be a bottleneck to re-read when TBaskets are provided by a **basketcache**. -For more on these high and mid-level caching parameters, see `reference -documentation `__. - At the lowest level of abstraction, raw bytes are cached by the HTTP and XRootD remote file readers. You can control the memory remote file -memory use with ``uproot.HTTPSource.defaults["limitbytes"]`` and -``uproot.XRootDSource.defaults["limitbytes"]``, either by globally +memory use with ``uproot3.HTTPSource.defaults["limitbytes"]`` and +``uproot3.XRootDSource.defaults["limitbytes"]``, either by globally setting these parameters before opening a file, or by passing them to -`uproot.open `__ -through the **limitbytes** parameter. +``uproot3.open`` through the **limitbytes** parameter. .. code-block:: python3 # default remote file caches in MB - uproot.HTTPSource.defaults["limitbytes"] / 1024**2, uproot.XRootDSource.defaults["limitbytes"] / 1024**2 + uproot3.HTTPSource.defaults["limitbytes"] / 1024**2, uproot3.XRootDSource.defaults["limitbytes"] / 1024**2 # (32.0, 32.0) If you want to limit this cache to less than the default **chunkbytes** @@ -759,7 +641,7 @@ able to load at least one chunk! .. code-block:: python3 - uproot.open("http://scikit-hep.org/uproot/examples/Zmumu.root", limitbytes="100 kB", chunkbytes="10 kB") + uproot3.open("http://scikit-hep.org/uproot3/examples/Zmumu.root", limitbytes="100 kB", chunkbytes="10 kB") # By default (unless **localsource** is overridden), local files are @@ -769,21 +651,13 @@ Lazy arrays =========== If you call -`TBranchMethods.array `__, -`TTreeMethods.array `__, -or -`TTreeMethods.arrays `__, -uproot reads the file or cache immediately and returns an in-memory +``TBranchMethods.array``, ``TTreeMethods.array``, or +``TTreeMethods.arrays``, Uproot reads the file or cache immediately and returns an in-memory array. For exploratory work or to control memory usage, you might want to let the data be read on demand. -The -`TBranch.lazyarray `__, -`TTreeMethods.lazyarray `__, -`TTreeMethods.lazyarrays `__, -and -`uproot.lazyarrays `__ -functions take most of the same parameters but return lazy array +The ``TBranch.lazyarray``, ``TTreeMethods.lazyarray``, ``TTreeMethods.lazyarrays``, and +``uproot.lazyarrays`` functions take most of the same parameters but return lazy array objects, rather than Numpy arrays. .. code-block:: python3 @@ -852,14 +726,8 @@ contiguous whole.) Lazy array of many files ------------------------ -There’s a lazy version of each of the array-reading functions in -`TTreeMethods `__ -and -`TBranchMethods `__, -but there’s also module-level -`uproot.lazyarray `__ -and -`uproot.lazyarrays `__. +There’s a lazy version of each of the array-reading functions in ``TTreeMethods`` +and ``TBranchMethods``, but there’s also module-level ``uproot.lazyarray`` and ``uproot.lazyarrays``. These functions let you make a lazy array that spans many files. These functions may be thought of as alternatives to ROOT’s TChain: a @@ -869,9 +737,9 @@ single array. See `Iteration <#iteration>`__ below as a more explicit TChain alt .. code-block:: python3 - data = uproot.lazyarray( + data = uproot3.lazyarray( # list of files; local files can have wildcards (*) - ["http://scikit-hep.org/uproot/examples/sample-%s-zlib.root" % x + ["http://scikit-hep.org/uproot3/examples/sample-%s-zlib.root" % x for x in ["5.23.02", "5.24.00", "5.25.02", "5.26.00", "5.27.02", "5.28.00", "5.29.02", "5.30.00", "6.08.04", "6.10.05", "6.14.00"]], # TTree name in each file @@ -885,15 +753,14 @@ This ``data`` represents the entire set of files, and the only up-front processing that had to be done was to find out how many entries each TTree contains. -It uses the -`uproot.numentries `__ +It uses the ``uproot3.numentries`` shortcut method (which reads less data than normal file-opening): .. code-block:: python3 - dict(uproot.numentries( + dict(uproot3.numentries( # list of files; local files can have wildcards (*) - ["http://scikit-hep.org/uproot/examples/sample-%s-zlib.root" % x + ["http://scikit-hep.org/uproot3/examples/sample-%s-zlib.root" % x for x in ["5.23.02", "5.24.00", "5.25.02", "5.26.00", "5.27.02", "5.28.00", "5.29.02", "5.30.00", "6.08.04", "6.10.05", "6.14.00"]], # TTree name in each file @@ -901,17 +768,17 @@ shortcut method (which reads less data than normal file-opening): # total=True adds all values; total=False leaves them as a dict total=False)) - # {'http://scikit-hep.org/uproot/examples/sample-5.23.02-zlib.root': 30, - # 'http://scikit-hep.org/uproot/examples/sample-5.24.00-zlib.root': 30, - # 'http://scikit-hep.org/uproot/examples/sample-5.25.02-zlib.root': 30, - # 'http://scikit-hep.org/uproot/examples/sample-5.26.00-zlib.root': 30, - # 'http://scikit-hep.org/uproot/examples/sample-5.27.02-zlib.root': 30, - # 'http://scikit-hep.org/uproot/examples/sample-5.28.00-zlib.root': 30, - # 'http://scikit-hep.org/uproot/examples/sample-5.29.02-zlib.root': 30, - # 'http://scikit-hep.org/uproot/examples/sample-5.30.00-zlib.root': 30, - # 'http://scikit-hep.org/uproot/examples/sample-6.08.04-zlib.root': 30, - # 'http://scikit-hep.org/uproot/examples/sample-6.10.05-zlib.root': 30, - # 'http://scikit-hep.org/uproot/examples/sample-6.14.00-zlib.root': 30} + # {'http://scikit-hep.org/uproot3/examples/sample-5.23.02-zlib.root': 30, + # 'http://scikit-hep.org/uproot3/examples/sample-5.24.00-zlib.root': 30, + # 'http://scikit-hep.org/uproot3/examples/sample-5.25.02-zlib.root': 30, + # 'http://scikit-hep.org/uproot3/examples/sample-5.26.00-zlib.root': 30, + # 'http://scikit-hep.org/uproot3/examples/sample-5.27.02-zlib.root': 30, + # 'http://scikit-hep.org/uproot3/examples/sample-5.28.00-zlib.root': 30, + # 'http://scikit-hep.org/uproot3/examples/sample-5.29.02-zlib.root': 30, + # 'http://scikit-hep.org/uproot3/examples/sample-5.30.00-zlib.root': 30, + # 'http://scikit-hep.org/uproot3/examples/sample-6.08.04-zlib.root': 30, + # 'http://scikit-hep.org/uproot3/examples/sample-6.10.05-zlib.root': 30, + # 'http://scikit-hep.org/uproot3/examples/sample-6.14.00-zlib.root': 30} Lazy arrays with caching ------------------------ @@ -925,7 +792,7 @@ This is caching, and the caching mechanism is the same as before: .. code-block:: python3 - mycache = uproot.cache.ArrayCache(100*1024) # 100 kB + mycache = uproot3.cache.ArrayCache(100*1024) # 100 kB data = events.lazyarrays(entrysteps=500, cache=mycache) data @@ -958,8 +825,8 @@ Lazy arrays as lightweight skims -------------------------------- The ``ChunkedArray`` and ``VirtualArray`` classes are defined in the -`awkward-array `__ -library installed with uproot. These arrays can be saved to files in a +`Awkward Array `__ +library installed with Uproot. These arrays can be saved to files in a way that preserves their virtualness, which allows you to save a “diff” with respect to the original ROOT files. @@ -973,22 +840,22 @@ and add a derived feature: data["mass"] = numpy.sqrt((data["E1"] + data["E2"])**2 - (data["px1"] + data["px2"])**2 - (data["py1"] + data["py2"])**2 - (data["pz1"] + data["pz2"])**2) -and save the whole thing to an awkward-array file (``.awkd``). +and save the whole thing to an Awkward Array file (``.awkd``). .. code-block:: python3 - import awkward + import awkward0 - awkward.save("derived-feature.awkd", data, mode="w") + awkward0.save("derived-feature.awkd", data, mode="w") -When we read it back, the derived features come from the awkward-array +When we read it back, the derived features come from the Awkward Array file but the original features are loaded as pointers to the original ROOT files (``VirtualArrays`` whose array-making function knows the original ROOT filenames—don’t move them!). .. code-block:: python3 - data2 = awkward.load("derived-feature.awkd") + data2 = awkward0.load("derived-feature.awkd") # reads from derived-feature.awkd data2["mass"] @@ -1010,9 +877,9 @@ as a lightweight skim. selected # ...] at 0x7f3739b3e7f0> - awkward.save("selected-events.awkd", selected, mode="w") + awkward0.save("selected-events.awkd", selected, mode="w") - data3 = awkward.load("selected-events.awkd") + data3 = awkward0.load("selected-events.awkd") data3 # ... ] at 0x7f3739b1e048> @@ -1020,21 +887,18 @@ Lazy arrays in Dask ------------------- `Dask `__ is a framework for delayed and distributed -computation with lazy array and dataframe interfaces. To turn uproot’s -lazy arrays into Dask objects, use the -`uproot.daskarray `__ -and -`uproot.daskframe `__ +computation with lazy array and dataframe interfaces. To turn Uproot’s +lazy arrays into Dask objects, use the ``uproot3.daskarray`` and ``uproot3.daskframe`` functions. .. code-block:: python3 - uproot.daskarray("http://scikit-hep.org/uproot/examples/Zmumu.root", "events", "E1") + uproot3.daskarray("http://scikit-hep.org/uproot3/examples/Zmumu.root", "events", "E1") # dask.array .. code-block:: python3 - uproot.daskframe("http://scikit-hep.org/uproot/examples/Zmumu.root", "events") + uproot3.daskframe("http://scikit-hep.org/uproot3/examples/Zmumu.root", "events") .. raw:: html @@ -1149,9 +1013,7 @@ impression that you have a larger array than memory can hold all at once. The next two methods *explicitly* step through chunks of data, to give you more control over the process. -`TTreeMethods.iterate `__ -iterates over chunks of a TTree and -`uproot.iterate `__ +``TTreeMethods.iterate`` iterates over chunks of a TTree and ``uproot3.iterate`` iterates through files. Like a file-spanning lazy array, a file-spanning iterator erases the @@ -1200,15 +1062,14 @@ need. Filenames and entry numbers while iterating ------------------------------------------- -`uproot.iterate `__ +``uproot3.iterate`` crosses file boundaries as part of its iteration, and that’s information we might need in the loop. If the following are ``True``, each step in iteration is a tuple containing the arrays and the additional information. - **reportpath:** the full path or URL of the (possibly remote) file; -- **reportfile:** the - `ROOTDirectory `__ +- **reportfile:** the ``ROOTDirectory`` object itself (so that you don’t need to re-open it at each iteration step); - **reportentries:** the starting and stopping entry numbers for this @@ -1217,8 +1078,8 @@ information. .. code-block:: python3 - for path, file, start, stop, arrays in uproot.iterate( - ["http://scikit-hep.org/uproot/examples/sample-%s-zlib.root" % x + for path, file, start, stop, arrays in uproot3.iterate( + ["http://scikit-hep.org/uproot3/examples/sample-%s-zlib.root" % x for x in ["5.23.02", "5.24.00", "5.25.02", "5.26.00", "5.27.02", "5.28.00", "5.29.02", "5.30.00", "6.08.04", "6.10.05", "6.14.00"]], "sample", @@ -1226,27 +1087,27 @@ information. reportpath=True, reportfile=True, reportentries=True): print(path, file, start, stop, len(arrays)) - # http://scikit-hep.org/uproot/examples/sample-5.23.02-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-5.23.02-zlib.root # 0 30 1 - # http://scikit-hep.org/uproot/examples/sample-5.24.00-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-5.24.00-zlib.root # 30 60 1 - # http://scikit-hep.org/uproot/examples/sample-5.25.02-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-5.25.02-zlib.root # 60 90 1 - # http://scikit-hep.org/uproot/examples/sample-5.26.00-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-5.26.00-zlib.root # 90 120 1 - # http://scikit-hep.org/uproot/examples/sample-5.27.02-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-5.27.02-zlib.root # 120 150 1 - # http://scikit-hep.org/uproot/examples/sample-5.28.00-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-5.28.00-zlib.root # 150 180 1 - # http://scikit-hep.org/uproot/examples/sample-5.29.02-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-5.29.02-zlib.root # 180 210 1 - # http://scikit-hep.org/uproot/examples/sample-5.30.00-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-5.30.00-zlib.root # 210 240 1 - # http://scikit-hep.org/uproot/examples/sample-6.08.04-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-6.08.04-zlib.root # 240 270 1 - # http://scikit-hep.org/uproot/examples/sample-6.10.05-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-6.10.05-zlib.root # 270 300 1 - # http://scikit-hep.org/uproot/examples/sample-6.14.00-zlib.root + # http://scikit-hep.org/uproot3/examples/sample-6.14.00-zlib.root # 300 330 1 Limiting the number of entries to be read @@ -1284,7 +1145,7 @@ between basket boundaries. .. code-block:: python3 # This file has small TBaskets - tree = uproot.open("http://scikit-hep.org/uproot/examples/foriter.root")["foriter"] + tree = uproot3.open("http://scikit-hep.org/uproot3/examples/foriter.root")["foriter"] branch = tree["data"] [branch.basket_numentries(i) for i in range(branch.numbaskets)] # [6, 6, 6, 6, 6, 6, 6, 4] @@ -1331,23 +1192,17 @@ iteration functions also have: # [500, 500, 500, 500, 304] The TTree lazy array/iteration functions -(`TTreeMethods.array `__, -`TTreeMethods.arrays `__, -`TBranch.lazyarray `__, -`TTreeMethods.lazyarray `__, -and -`TTreeMethods.lazyarrays `__) +(``TTreeMethods.array``, ``TTreeMethods.arrays``, ``TBranch.lazyarray``, ``TTreeMethods.lazyarray``, and +``TTreeMethods.lazyarrays``) use basket or cluster sizes as a default **entrysteps**, while multi-file lazy array/iteration functions -(`uproot.lazyarrays `__ -and -`uproot.iterate `__) +(``uproot3.lazyarrays`` and ``uproot3.iterate``) use the maximum per file: ``numpy.inf``. .. code-block:: python3 # This file has small TBaskets - tree = uproot.open("http://scikit-hep.org/uproot/examples/foriter.root")["foriter"] + tree = uproot3.open("http://scikit-hep.org/uproot3/examples/foriter.root")["foriter"] branch = tree["data"] [len(a["data"]) for a in tree.iterate(namedecode="utf-8")] # [6, 6, 6, 6, 6, 6, 6, 4] @@ -1355,8 +1210,8 @@ use the maximum per file: ``numpy.inf``. .. code-block:: python3 # This file has small TBaskets - [len(a["data"]) for a in uproot.iterate(["http://scikit-hep.org/uproot/examples/foriter.root"] * 3, - "foriter", namedecode="utf-8")] + [len(a["data"]) for a in uproot3.iterate(["http://scikit-hep.org/uproot3/examples/foriter.root"] * 3, + "foriter", namedecode="utf-8")] # [46, 46, 46] One particularly useful way to specify the **entrysteps** is with a @@ -1396,9 +1251,7 @@ Caching and iteration Since iteration gives you more precise control over which set of events you’re processing at a given time, caching with the **cache** parameter is less useful than it is with lazy arrays. For consistency’s sake, the -`TTreeMethods.iterate `__ -and -`uproot.iterate `__ +``TTreeMethods.iterate`` and ``uproot3.iterate`` functions provide a **cache** parameter and it works the same way that it does in other array-reading functions, but its effect would be to retain the previous step’s arrays while working on a new step in the @@ -1414,15 +1267,11 @@ caching it for exactly one iteration step is ideal: it avoids the need to reread it and decompress it again. It is such a useful feature that it’s built into -`TTreeMethods.iterate `__ -and -`uproot.iterate `__ +``TTreeMethods.iterate`` and ``uproot3.iterate`` by default. If you don’t set a **basketcache**, these functions will create one with no memory limit and save TBaskets in it for exactly one iteration step, eliminating that temporary cache at the end of -iteration. (The same is true of the **keycache**; see `reference -documentation `__ -for detail.) +iteration. (The same is true of the **keycache**.) Thus, you probably don’t want to set any explicit caches while iterating. Setting an explicit **basketcache** would introduce an upper @@ -1434,12 +1283,7 @@ running out of memory during iteration, try reducing the **entrysteps**. Changing the output container type ================================== -When we ask for -`TTreeMethods.arrays `__ -(plural), -`TTreeMethods.iterate `__, -or -`uproot.iterate `__, +When we ask for ``TTreeMethods.arrays`` (plural), ``TTreeMethods.iterate``, or ``uproot3.iterate``, we get a Python dict mapping branch names to arrays. (As a reminder, **namedecode=“utf-8”** makes those branch names Python strings, rather than bytestrings.) Sometimes, we want a different kind of container. @@ -1460,10 +1304,7 @@ a tuple-assignment. # array([-41.19528764, 35.11804977, 35.11804977, ..., 32.37749196, # 32.37749196, 32.48539387]) -Using ``tuple`` as an **outputtype** in -`TTreeMethods.iterate `__ -and -`uproot.iterate `__ +Using ``tuple`` as an **outputtype** in ``TTreeMethods.iterate`` and ``uproot3.iterate`` lets us unpack the arrays in Python’s for statement. .. code-block:: python3 @@ -1599,9 +1440,7 @@ The previous example filled a `pandas.DataFrame `__ by explicitly passing it as an **outputtype**. Pandas is such an important container type that there are specialized functions for it: -`TTreeMethods.pandas.df `__ -and -`uproot.pandas.df `__. +``TTreeMethods.pandas.df`` and ``uproot3.pandas.df``. .. code-block:: python3 @@ -1780,15 +1619,14 @@ entry numbers in the file. For instance, counting from the end: -The -`uproot.pandas.df `__ +The ``uproot3.pandas.df`` function doesn’t have a **reportentries** because they’re included in the DataFrame itself. .. code-block:: python3 - for df in uproot.pandas.iterate("http://scikit-hep.org/uproot/examples/Zmumu.root", "events", "p[xyz]1", - entrysteps=500): + for df in uproot3.pandas.iterate("http://scikit-hep.org/uproot3/examples/Zmumu.root", "events", "p[xyz]1", + entrysteps=500): print(df[:3]) # px1 py1 pz1 @@ -1830,7 +1668,7 @@ it’s usually desirable for DataFrames. .. code-block:: python3 - events2 = uproot.open("http://scikit-hep.org/uproot/examples/HZZ.root")["events"] # non-flat data + events2 = uproot3.open("http://scikit-hep.org/uproot3/examples/HZZ.root")["events"] # non-flat data .. code-block:: python3 @@ -2438,8 +2276,7 @@ Note that the return values must be strictly ``True`` and ``False``, not anything that `Python evaluates to true or false `__. If the function returns anything else, it will be used as a new -`Interpretation `__ -for the branch. +``Interpretation`` for the branch. TBranch interpretations ----------------------- @@ -2486,7 +2323,7 @@ but it wouldn’t be meaningful. .. code-block:: python3 - events["E1"].array(uproot.asdtype(">i8")) + events["E1"].array(uproot3.asdtype(">i8")) # array([4635484859043618393, 4633971086021346367, 4633971086021346367, ..., # 4635419294316473354, 4635419294316473354, 4635440129219414362]) @@ -2498,9 +2335,9 @@ Reading data into a preexisting array ------------------------------------- One actually useful TBranch reinterpretation is -`uproot.asarray `__. +``uproot3.asarray``. It differs from -`uproot.asdtype `__ +``uproot3.asdtype`` only in that the latter creates a new array when reading data while the former fills a user-specified array. @@ -2511,8 +2348,7 @@ former fills a user-specified array. reinterpretation # asarray('>f8', ) -Passing the new -`uproot.asarray `__ +Passing the new ``uproot3.asarray`` interpretation to the array-reading function .. code-block:: python3 @@ -2549,7 +2385,7 @@ literally ``True``, not an object that Python would evaluate to .. code-block:: python3 - events.arrays(lambda branch: isinstance(branch.interpretation, uproot.asdtype) and + events.arrays(lambda branch: isinstance(branch.interpretation, uproot3.asdtype) and str(branch.interpretation.fromdtype) == ">f8").keys() # dict_keys([b'E1', b'px1', b'py1', b'pz1', b'pt1', b'eta1', b'phi1', # b'E2', b'px2', b'py2', b'pz2', b'pt2', b'eta2', b'phi2', b'M']) @@ -2559,7 +2395,7 @@ sets their interpretations in one pass. .. code-block:: python3 - events.arrays(lambda branch: uproot.asdtype(">f8", "f8", "`__ +TBranches to exclude and an ``Interpretation`` for the ones to reinterpret. The same could have been said in a less functional way with a dict: .. code-block:: python3 - events.arrays({"px1": uproot.asdtype(">f8", "f8", "f8", "f8", "`__), not the data themselves. -For this, uproot fills a new ``JaggedArray`` data structure (from the -awkward-array library, like ``ChunkedArray`` and ``VirtualArray``). +For this, Uproot fills a new ``JaggedArray`` data structure (from the +Awkward Array library, like ``ChunkedArray`` and ``VirtualArray``). .. code-block:: python3 - tree = uproot.open("http://scikit-hep.org/uproot/examples/nesteddirs.root")["one/two/tree"] + tree = uproot3.open("http://scikit-hep.org/uproot3/examples/nesteddirs.root")["one/two/tree"] array = tree.array("SliceInt64", entrystop=20) array # `__, including `advanced indexing `__. -Awkward-array generalizes Numpy in many ways—details can be found `in -its documentation `__. +Awkward Array generalizes Numpy in many ways—details can be found `in +its documentation `__. .. code-block:: python3 @@ -3159,7 +2994,7 @@ Here is an example of ``JaggedArrays`` in physics data: .. code-block:: python3 - events2 = uproot.open("http://scikit-hep.org/uproot/examples/HZZ.root")["events"] + events2 = uproot3.open("http://scikit-hep.org/uproot3/examples/HZZ.root")["events"] E, px, py, pz = events2.arrays(["Muon_E", "Muon_P[xyz]"], outputtype=tuple) E @@ -3309,19 +3144,19 @@ Fortunately, ROOT files are themselves structured this way, with variable-width data represented by contents and offsets in a TBasket. These arrays do not need to be deserialized individually, but can be merely cast as Numpy arrays in one Python call. The lack of per-event -processing is why reading in uproot and processing data with -awkward-array can be fast, despite being written in Python. +processing is why reading in Uproot and processing data with +Awkward Array can be fast, despite being written in Python. .. raw:: html -

+

Special physics objects: Lorentz vectors ---------------------------------------- Although any C++ type can in principle be read (see below), some are important enough to be given convenience methods for analysis. These are -not defined in uproot (which is strictly concerned with I/O), but in +not defined in Uproot (which is strictly concerned with I/O), but in `uproot-methods `__. If you need certain classes to have user-friendly methods in Python, you’re encouraged to contribute them to @@ -3332,7 +3167,7 @@ One of these classes is ``TLorentzVectorArray``, which defines an .. code-block:: python3 - events3 = uproot.open("http://scikit-hep.org/uproot/examples/HZZ-objects.root")["events"] + events3 = uproot3.open("http://scikit-hep.org/uproot3/examples/HZZ-objects.root")["events"] .. code-block:: python3 @@ -3379,7 +3214,7 @@ method on each). muons.mass # some mass**2 are slightly negative, hence the Numpy warning about negative square roots # /home/pivarski/miniconda3/lib/python3.7/site-packages/uproot_methods-0.6.1-py3.7.egg/uproot_methods/ # classes/TLorentzVector.py:189: RuntimeWarning: invalid value encountered in sqrt - # return self._trymemo("mass", lambda self: self.awkward.numpy.sqrt(self.mag2)) + # return self._trymemo("mass", lambda self: self.awkward0.numpy.sqrt(self.mag2)) # @@ -3401,7 +3236,7 @@ into ``TLorentzVectorArrays``. Although they’re in wide use, the C++ `ROOT::Math::LorentzVector `__. Unlike the old class, the new vectors can be represented with a variety of data types and coordinate systems, and they’re split into multiple -branches, so uproot sees them as four branches, each representing the +branches, so Uproot sees them as four branches, each representing the components. You can still use the ``TLorentzVectorArray`` Python class; you just @@ -3451,7 +3286,7 @@ demand) to the same Python string type. .. code-block:: python3 - branch = uproot.open("http://scikit-hep.org/uproot/examples/sample-6.14.00-zlib.root")["sample"]["str"] + branch = uproot3.open("http://scikit-hep.org/uproot3/examples/sample-6.14.00-zlib.root")["sample"]["str"] branch.array() # @@ -3473,7 +3308,7 @@ Arbitrary objects in TTrees Uproot does not have a hard-coded deserialization for every C++ class type; it uses the “streamers” that ROOT includes in each file to learn how to deserialize the objects in that file. Even if you defined your -own C++ classes, uproot should be able to read them. (**Caveat:** not +own C++ classes, Uproot should be able to read them. (**Caveat:** not all structure types have been implemented, so the coverage of C++ types is a work in progress.) @@ -3489,7 +3324,7 @@ split into ``fTracks.fUniqueID``, ``fTracks.fBits``, ``fTracks.fPx``, .. code-block:: python3 - tree = uproot.open("http://scikit-hep.org/uproot/examples/Event.root")["T"] + tree = uproot3.open("http://scikit-hep.org/uproot3/examples/Event.root")["T"] tree.show() # event TStreamerInfo None # TObject TStreamerInfo None @@ -3726,10 +3561,8 @@ The criteria for whether an object can be read vectorially in Numpy (fast) or individually in Python (slow) is whether it has a fixed width—all objects having the same number of bytes—or a variable width. You can see this in the TBranch’s ``interpretation`` as the distinction -between -`uproot.asobj `__ -(fixed width, vector read) and -`uproot.asgenobj `__ +between ``uproot3.asobj`` +(fixed width, vector read) and ``uproot3.asgenobj`` (variable width, read into Python objects). .. code-block:: python3 @@ -3755,7 +3588,7 @@ such as ``std::vector>``. .. code-block:: python3 - branch = uproot.open("http://scikit-hep.org/uproot/examples/vectorVectorDouble.root")["t"]["x"] + branch = uproot3.open("http://scikit-hep.org/uproot3/examples/vectorVectorDouble.root")["t"]["x"] branch.interpretation # asgenobj(STLVector(STLVector(asdtype('>f8')))) @@ -3785,12 +3618,12 @@ usual ``JaggedArray`` slicing. print(type(err), err) # 'ObjectArray' object has no attribute 'counts' -To get ``JaggedArray`` semantics, use ``awkward.fromiter`` to convert -the arbitrary Python objects into awkward-arrays. +To get ``JaggedArray`` semantics, use ``awkward0.fromiter`` to convert +the arbitrary Python objects into Awkward Arrays. .. code-block:: python3 - jagged = awkward.fromiter(array) + jagged = awkward0.fromiter(array) jagged # @@ -3798,7 +3631,7 @@ the arbitrary Python objects into awkward-arrays. jagged[jagged.counts > 0, 0] # -Doubly nested ``JaggedArrays`` are a native type in awkward-array: they +Doubly nested ``JaggedArrays`` are a native type in Awkward Array: they can be any number of levels deep. .. code-block:: python3 @@ -3839,7 +3672,7 @@ parallel. All of the array-reading functions have **executor** and result = tree.array("fTracks.fVertex[3]", executor=executor, blocking=False) result - # .wait()> + # .wait()> We can work on other things while the array is being read. @@ -3943,7 +3776,7 @@ zero-argument function to reveal it. immediately return a future - **executor=None**, **blocking=False**: not useful but not excluded. -Although parallel processing has been integrated into uproot’s design, +Although parallel processing has been integrated into Uproot’s design, it only provides a performance improvement in cases that are dominated by read time in non-Python functions. Python’s `Global Interpreter Lock `__ (GIL) severely limits @@ -3951,7 +3784,7 @@ parallel scaling of Python calls, but external functions that release the GIL (not all do) are immune. Thus, if reading is slow because the ROOT file has a lot of small -TBaskets, requiring uproot to step through them using Python calls, +TBaskets, requiring Uproot to step through them using Python calls, parallelizing that work in many threads has limited benefit because those threads stop and wait for each other due to Python’s GIL. If reading is slow because the ROOT file is heavily compressed—for @@ -3960,7 +3793,7 @@ with the number of threads. .. raw:: html -

+

If, on the other other hand, processing time is dominated by your analysis code and not file-reading, then parallelizing the file-reading @@ -3993,12 +3826,12 @@ useful: .. code-block:: python3 - file = uproot.open("http://scikit-hep.org/uproot/examples/Event.root") + file = uproot3.open("http://scikit-hep.org/uproot3/examples/Event.root") dict(file.classes()) - # {b'ProcessID0;1': uproot.rootio.TProcessID, - # b'htime;1': uproot.rootio.TH1F, - # b'T;1': uproot.rootio.TTree, - # b'hstat;1': uproot.rootio.TH1F} + # {b'ProcessID0;1': uproot3.rootio.TProcessID, + # b'htime;1': uproot3.rootio.TH1F, + # b'T;1': uproot3.rootio.TTree, + # b'hstat;1': uproot3.rootio.TH1F} .. code-block:: python3 @@ -4075,7 +3908,7 @@ conversion to common Python formats. .. code-block:: python3 - uproot.open("http://scikit-hep.org/uproot/examples/issue33.root")["cutflow"].show() + uproot3.open("http://scikit-hep.org/uproot3/examples/issue33.root")["cutflow"].show() # 0 41529 # +---------------------------------------------------+ @@ -4229,7 +4062,7 @@ ecosystem, are just a tuple of counts/bin contents and edge positions. # dtype=float32), # array([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])) - uproot.open("http://scikit-hep.org/uproot/examples/hepdata-example.root")["hpxpy"].numpy() + uproot3.open("http://scikit-hep.org/uproot3/examples/hepdata-example.root")["hpxpy"].numpy() # (array([[0., 0., 0., ..., 0., 0., 0.], # [0., 0., 0., ..., 0., 0., 0.], # [0., 0., 0., ..., 0., 0., 0.], @@ -4253,18 +4086,17 @@ Uproot has a limited ability to *write* ROOT files, including TTrees of flat data (non-jagged: single number per event), a variety of histogram types, and ``TObjString`` (for metadata). -To write to a ROOT file in uproot, the file must be opened for writing -using ``uproot.create``, ``uproot.recreate``, or ``uproot.update`` +To write to a ROOT file in Uproot, the file must be opened for writing +using ``uproot3.create``, ``uproot3.recreate``, or ``uproot3.update`` (corresponding to ROOT’s ``"CREATE"``, ``"RECREATE"``, and ``"UPDATE"`` -file modes). The compression level is given by ``uproot.ZLIB(n)``, -``uproot.LZMA(n)``, ``uproot.LZ4(n)``, or ``None``. +file modes). The compression level is given by ``uproot3.ZLIB(n)``, +``uproot3.LZMA(n)``, ``uproot3.LZ4(n)``, or ``None``. .. code-block:: python3 - file = uproot.recreate("tmp.root", compression=uproot.ZLIB(4)) + file = uproot3.recreate("tmp.root", compression=uproot3.ZLIB(4)) -Unlike objects created by -`uproot.open `__, +Unlike objects created by ``uproot3.open``, you can *assign* to this ``file``. Just as reading behaves like getting an object from a Python dict, writing behaves like putting an object into a Python dict. @@ -4272,8 +4104,7 @@ into a Python dict. **Note:** this is a fundamental departure from how ROOT uses names. In ROOT, a name is a part of an object that is *also* used for lookup. With a dict-like interface, the object need not have a name; only the lookup -mechanism (e.g. -`ROOTDirectory `__) +mechanism (e.g. ``ROOTDirectory``) needs to manage names. When you write objects to the ROOT file, they can be unnamed things like @@ -4299,7 +4130,7 @@ read the data, like this: root [4] data->GetString() (const TString &) "Some object, like a TObjString."[31] -We can also read it back in uproot, like this: +We can also read it back in Uproot, like this: .. code-block:: python3 @@ -4307,7 +4138,7 @@ We can also read it back in uproot, like this: # [b'name;1'] dict(file.classes()) - # {b'name;1': uproot.rootio.TObjString} + # {b'name;1': uproot3.rootio.TObjString} file["name"] # b'Some object, like a TObjString.' @@ -4323,7 +4154,7 @@ from another file and modified, .. code-block:: python3 - histogram = uproot.open("http://scikit-hep.org/uproot/examples/histograms.root")["one"] + histogram = uproot3.open("http://scikit-hep.org/uproot3/examples/histograms.root")["one"] histogram.show() norm = histogram.allvalues.sum() for i in range(len(histogram)): @@ -4405,7 +4236,7 @@ or it may be created entirely in Python. # [5, inf] 0 | | # +--------------------------------------------------------+ -But it is particularly useful that uproot recognizes `Numpy +But it is particularly useful that Uproot recognizes `Numpy histograms `__, which may have come from other libraries. @@ -4461,29 +4292,29 @@ which may have come from other libraries. Writing TTrees -------------- -As of now, uproot can write TTrees whose branches are basic types +As of now, Uproot can write TTrees whose branches are basic types (integers and floating-point numbers). Basic usage: .. code-block:: python3 - import uproot + import uproot3 import numpy - with uproot.recreate("example.root") as f: - f["t"] = uproot.newtree({"branch": "int32"}) + with uproot3.recreate("example.root") as f: + f["t"] = uproot3.newtree({"branch": "int32"}) f["t"].extend({"branch": numpy.array([1, 2, 3, 4, 5])}) You can specify the branches in your TTree explicitly: .. code-block:: python3 - t = uproot.newtree({"branch1": int, - "branch2": numpy.int32, - "branch3": uproot.newbranch(numpy.float64, title="This is the title")}) + t = uproot3.newtree({"branch1": int, + "branch2": numpy.int32, + "branch3": uproot3.newbranch(numpy.float64, title="This is the title")}) -uproot.newtree() takes a python dictionary as an argument, where the key +``uproot3.newtree()`` takes a Python dictionary as an argument, where the key is the name of the branch and the value is the branch object or type of branch. @@ -4494,14 +4325,14 @@ This is an example of how you would add a title to your tree: .. code-block:: python3 - tree = uproot.newtree(branchdict, title="TTree Title") + tree = uproot3.newtree(branchdict, title="TTree Title") To specify the title of the branch, similar to how you would add a title to a tree: .. code-block:: python3 - b = uproot.newbranch("int32", title="This is the title") + b = uproot3.newbranch("int32", title="This is the title") Writing baskets ~~~~~~~~~~~~~~~ @@ -4525,7 +4356,7 @@ extend method: | Remember to add entries to all the branches and the number of entries added to the branches is the same! | -What must be kept in mind is that if you write a lot of small baskets, it is going to be much less performant(slow and will increase the size of the file) than writing large arrays into the TTree as a single basket -> uproot's implementation is optimized for large array oriented operations. +What must be kept in mind is that if you write a lot of small baskets, it is going to be much less performant(slow and will increase the size of the file) than writing large arrays into the TTree as a single basket -> Uproot's implementation is optimized for large array oriented operations. **Low level interface** @@ -4551,19 +4382,19 @@ By default, the baskets of all the branches are compressed depending on the compression set for the file. You can specify the compression of all the branches if you want it to be -separate from the compression specified for the entire file by using the uproot.newtree() method. +separate from the compression specified for the entire file by using the ``uproot3.newtree()`` method. -You can also specify the compression of each branch individually by using the uproot.newbranch() method. +You can also specify the compression of each branch individually by using the ``uproot3.newbranch()`` method. .. code-block:: python3 - b1 = uproot.newbranch("i4", compression=uproot.ZLIB(5)) - b2 = uproot.newbranch("i8", compression=uproot.LZMA(4)) - b3 = uproot.newbranch("f4") + b1 = uproot3.newbranch("i4", compression=uproot3.ZLIB(5)) + b2 = uproot3.newbranch("i8", compression=uproot3.LZMA(4)) + b3 = uproot3.newbranch("f4") branchdict = {"branch1": b1, "branch2": b2, "branch3": b3} - tree = uproot.newtree(branchdict, compression=uproot.LZ4(4)) - with uproot.recreate("example.root", compression=uproot.LZMA(5)) as f: + tree = uproot3.newtree(branchdict, compression=uproot3.LZ4(4)) + with uproot3.recreate("example.root", compression=uproot3.LZMA(5)) as f: f["t"] = tree f["t"].extend({"branch1": [1]*1000, "branch2": [2]*1000, "branch3": [3]*1000}) @@ -4572,7 +4403,7 @@ Acknowledgements Support for this work was provided by NSF cooperative agreement OAC-1836650 (IRIS-HEP), grant OAC-1450377 (DIANA/HEP) and PHY-1520942 (US-CMS LHC Ops). -Thanks especially to the gracious help of `uproot contributors `__! +Thanks especially to the gracious help of `Uproot contributors `__! .. inclusion-marker-4-do-not-remove diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 0a20365b..00000000 --- a/appveyor.yml +++ /dev/null @@ -1,73 +0,0 @@ -environment: - matrix: - - PYTHON: "C:\\Python27" - PYTHON_VERSION: "2.7.10" - PYTHON_ARCH: "32" - NUMPY: "numpy==1.13.1" - - - PYTHON: "C:\\Python27" - PYTHON_VERSION: "2.7.10" - PYTHON_ARCH: "64" - NUMPY: "numpy==1.13.1" - - - PYTHON: "C:\\Python27" - PYTHON_VERSION: "2.7.10" - PYTHON_ARCH: "32" - NUMPY: "numpy>=1.15" - - - PYTHON: "C:\\Python27" - PYTHON_VERSION: "2.7.10" - PYTHON_ARCH: "64" - NUMPY: "numpy>=1.15" - - - PYTHON: "C:\\Python36" - PYTHON_VERSION: "3.6.x" - PYTHON_ARCH: "32" - NUMPY: "numpy==1.13.1" - - - PYTHON: "C:\\Python36-x64" - PYTHON_VERSION: "3.6.x" - PYTHON_ARCH: "64" - NUMPY: "numpy==1.13.1" - - - PYTHON: "C:\\Python36" - PYTHON_VERSION: "3.6.x" - PYTHON_ARCH: "32" - NUMPY: "numpy>=1.15" - - - PYTHON: "C:\\Python36-x64" - PYTHON_VERSION: "3.6.x" - PYTHON_ARCH: "64" - NUMPY: "numpy>=1.15" - - - PYTHON: "C:\\Python37" - PYTHON_VERSION: "3.7.x" - PYTHON_ARCH: "32" - NUMPY: "numpy>=1.15" - - - PYTHON: "C:\\Python38" - PYTHON_VERSION: "3.8.x" - PYTHON_ARCH: "32" - NUMPY: "numpy>=1.15" - - - PYTHON: "C:\\Python39" - PYTHON_VERSION: "3.9.x" - PYTHON_ARCH: "32" - NUMPY: "numpy>=1.15" - -install: - - "SET PATH=%PYTHON%;%PATH%" - - "python --version" - -build_script: - - "python -m pip install --upgrade pip" - - "python -m pip install %NUMPY%" - - "python -m pip install \"awkward>=0.12.0,<1.0\"" - - "python -c \"import awkward; print(awkward.__version__)\"" - - "python -m pip install \"uproot-methods>=0.7.0\"" - - "python -c \"import uproot_methods; print(uproot_methods.__version__)\"" - - "python -m pip install cachetools lz4 zstandard mock xxhash" - - "IF \"%PYTHON_VERSION%\"==\"2.7.10\" ( python -m pip install -i https://pypi.anaconda.org/carlkl/simple backports.lzma )" - - "python -m pip install pytest pytest-runner pandas requests" - - "curl -fsS -o tests/samples/Event.root http://scikit-hep.org/uproot/examples/Event.root" - - "python -m pytest -v tests" diff --git a/binder/requirements.txt b/binder/requirements.txt deleted file mode 100644 index 17e120f3..00000000 --- a/binder/requirements.txt +++ /dev/null @@ -1,17 +0,0 @@ -numpy>=1.13.1 -awkward>=0.12.0,<1.0 -uproot-methods>=0.7.0 -cachetools -pkgconfig -lz4 -zstandard -backports.lzma;python_version<"3.3" -pandas -requests -matplotlib -dask[array] -dask[dataframe] -fsspec -cloudpickle -pyyaml -xxhash diff --git a/binder/samples b/binder/samples deleted file mode 120000 index 09322685..00000000 --- a/binder/samples +++ /dev/null @@ -1 +0,0 @@ -../tests/samples \ No newline at end of file diff --git a/binder/trigger_binder.sh b/binder/trigger_binder.sh deleted file mode 100644 index 2e0c9afb..00000000 --- a/binder/trigger_binder.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash - -function trigger_binder() { - local URL="${1}" - - curl -L --connect-timeout 10 --max-time 30 "${URL}" - curl_return=$? - - # Return code 28 is when the --max-time is reached - if [ "${curl_return}" -eq 0 ] || [ "${curl_return}" -eq 28 ]; then - if [[ "${curl_return}" -eq 28 ]]; then - printf "\nBinder build started.\nCheck back soon.\n" - fi - else - return "${curl_return}" - fi - - return 0 -} - -function main() { - # 1: the Binder build API URL to curl - trigger_binder $1 -} - -main "$@" || exit 1 diff --git a/binder/tutorial.ipynb b/binder/tutorial.ipynb deleted file mode 100644 index ecd83e46..00000000 --- a/binder/tutorial.ipynb +++ /dev/null @@ -1,7466 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Introduction\n", - "\n", - "This tutorial is designed to help you start using uproot. Unlike the [reference documentation](https://uproot.readthedocs.io/en/latest/), which defines every parameter of every function, this tutorial provides introductory examples to help you learn how to use them.\n", - "\n", - "The original tutorial [has been archived](https://github.com/scikit-hep/uproot/blob/master/docs/old-tutorial.rst)—this version was written in June 2019 in response to feedback from a series of tutorials I presented early this year and common questions in the [GitHub issues](https://github.com/scikit-hep/uproot/issues). The new tutorial is [executable on Binder](https://mybinder.org/v2/gh/scikit-hep/uproot/master?urlpath=lab/tree/binder%2Ftutorial.ipynb) and may be read in any order, though it has to be executed from top to bottom because some variables are reused." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# What is uproot?\n", - "\n", - "Uproot is a Python package; it is pip and conda-installable, and it only depends on other Python packages. Although it is similar in function to [root_numpy](https://pypi.org/project/root-numpy/) and [root_pandas](https://pypi.org/project/root_pandas/), it does not compile into ROOT and therefore avoids issues in which the version used in compilation differs from the version encountered at runtime.\n", - "\n", - "In short, you should never see a segmentation fault.\n", - "\n", - "
" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Uproot is strictly concerned with file I/O only—all other functionality is handled by other libraries:\n", - "\n", - " * [uproot-methods](https://github.com/scikit-hep/uproot-methods): physics methods for types read from ROOT files, such as histograms and Lorentz vectors. It is intended to be largely user-contributed (and is).\n", - " * [awkward-array](https://github.com/scikit-hep/awkward-array): array manipulation beyond [Numpy](https://docs.scipy.org/doc/numpy/reference/). Several are encountered in this tutorial, particularly lazy arrays and jagged arrays." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In the past year, uproot has become one of the most widely used Python packages made for particle physics, with users in all four LHC experiments, theory, neutrino experiments, XENON-nT (dark matter direct detection), MAGIC (gamma ray astronomy), and IceCube (neutrino astronomy).\n", - "\n", - "
" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Exploring a file\n", - "\n", - "[uproot.open](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-open) is the entry point for reading a single file.\n", - "\n", - "It takes a local filename path or a remote `http://` or `root://` URL. (HTTP requires the Python [requests](https://pypi.org/project/requests/) library and XRootD requires [pyxrootd](http://xrootd.org/), both of which have to be explicitly pip-installed if you installed uproot with pip, but are automatically installed if you installed uproot with conda.)" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import uproot\n", - "\n", - "file = uproot.open(\"https://scikit-hep.org/uproot/examples/nesteddirs.root\")\n", - "file" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "[uproot.open](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-open) returns a [ROOTDirectory](https://uproot.readthedocs.io/en/latest/root-io.html#uproot-rootio-rootdirectory), which behaves like a Python dict; it has `keys()`, `values()`, and key-value access with square brackets." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[b'one;1', b'three;1']" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file.keys()" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"one\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Subdirectories also have type [ROOTDirectory](https://uproot.readthedocs.io/en/latest/root-io.html#uproot-rootio-rootdirectory), so they behave like Python dicts, too." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[b'two;1', b'tree;1']" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"one\"].keys()" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[, ]" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"one\"].values()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**What's the `b` before each object name?** Python 3 distinguishes between bytestrings and encoded strings. ROOT object names have no encoding, such as Latin-1 or Unicode, so uproot presents them as raw bytestrings. However, if you enter a Python string (no `b`) and it matches an object name (interpreted as plain ASCII), it will count as a match, as `\"one\"` does above." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**What's the `;1` after each object name?** ROOT objects are versioned with a \"cycle number.\" If multiple objects are written to the ROOT file with the same name, they will have different cycle numbers, with the largest value being last. If you don't specify a cycle number, you'll get the latest one." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This file is deeply nested, so while you could find the TTree with" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"one\"][\"two\"][\"tree\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "you can also find it using a directory path, with slashes." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"one/two/tree\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here are a few more tricks for finding your way around a file:\n", - "\n", - " * the `keys()`, `values()`, and `items()` methods have `allkeys()`, `allvalues()`, `allitems()` variants that recursively search through all subdirectories;\n", - " * all of these functions can be filtered by name or class: see [ROOTDirectory.keys](https://uproot.readthedocs.io/en/latest/root-io.html#uproot.rootio.ROOTDirectory.keys).\n", - "\n", - "Here's how you would search the subdirectories to find all TTrees:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[b'one/two/tree;1', b'one/tree;1', b'three/tree;1']" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file.allkeys(filterclass=lambda cls: issubclass(cls, uproot.tree.TTreeMethods))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Or get a Python dict of them:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{b'one/two/tree;1': ,\n", - " b'one/tree;1': ,\n", - " b'three/tree;1': }" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "all_ttrees = dict(file.allitems(filterclass=lambda cls: issubclass(cls, uproot.tree.TTreeMethods)))\n", - "all_ttrees" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Be careful: Python 3 is not as forgiving about matching key names. `all_ttrees` is a plain Python dict, so the key must be a bytestring and must include the cycle number." - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "all_ttrees[b\"one/two/tree;1\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Compressed objects in ROOT files\n", - "\n", - "Objects in ROOT files can be uncompressed, compressed with ZLIB, compressed with LZMA, or compressed with LZ4. Uproot picks the right decompressor and gives you the objects transparently: you don't have to specify anything. However, if an object is compressed with LZ4 and you don't have the [lz4](https://pypi.org/project/lz4/) library installed, you'll get an error with installation instructions in the message. (It is automatically installed if you installed uproot with conda.) ZLIB is part of the Python Standard Library, and LZMA is part of the Python 3 Standard Library, so you won't get error messages about these except for LZMA in Python 2 (for which there is [backports.lzma](https://pypi.org/project/backports.lzma/), automatically installed if you installed uproot with conda).\n", - "\n", - "The [ROOTDirectory](https://uproot.readthedocs.io/en/latest/root-io.html#uproot-rootio-rootdirectory) class has a `compression` property that tells you the compression algorithm and level associated with this file," - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file.compression" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "but any object can be compressed with any algorithm at any level—this is only the default compression for the file. Some ROOT files are written with each TTree branch compressed using a different algorithm and level." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Exploring a TTree\n", - "\n", - "TTrees are special objects in ROOT files: they contain most of the physics data. Uproot presents TTrees as subclasses of [TTreeMethods](https://uproot.readthedocs.io/en/latest/ttree-handling.html#uproot-tree-ttreemethods).\n", - "\n", - "(**Why subclass?** Different ROOT files can have different versions of a class, so uproot generates Python classes to fit the data, as needed. All TTrees inherit from [TTreeMethods](https://uproot.readthedocs.io/en/latest/ttree-handling.html#uproot-tree-ttreemethods) so that they get the same data-reading methods.)" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events = uproot.open(\"https://scikit-hep.org/uproot/examples/Zmumu.root\")[\"events\"]\n", - "events" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Although [TTreeMethods](https://uproot.readthedocs.io/en/latest/ttree-handling.html#uproot-tree-ttreemethods) objects behave like Python dicts of [TBranchMethods](https://uproot.readthedocs.io/en/latest/ttree-handling.html#uproot-tree-tbranchmethods) objects, the easiest way to browse a TTree is by calling its `show()` method, which prints the branches and their interpretations as arrays." - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[b'Type',\n", - " b'Run',\n", - " b'Event',\n", - " b'E1',\n", - " b'px1',\n", - " b'py1',\n", - " b'pz1',\n", - " b'pt1',\n", - " b'eta1',\n", - " b'phi1',\n", - " b'Q1',\n", - " b'E2',\n", - " b'px2',\n", - " b'py2',\n", - " b'pz2',\n", - " b'pt2',\n", - " b'eta2',\n", - " b'phi2',\n", - " b'Q2',\n", - " b'M']" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.keys()" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Type (no streamer) asstring()\n", - "Run (no streamer) asdtype('>i4')\n", - "Event (no streamer) asdtype('>i4')\n", - "E1 (no streamer) asdtype('>f8')\n", - "px1 (no streamer) asdtype('>f8')\n", - "py1 (no streamer) asdtype('>f8')\n", - "pz1 (no streamer) asdtype('>f8')\n", - "pt1 (no streamer) asdtype('>f8')\n", - "eta1 (no streamer) asdtype('>f8')\n", - "phi1 (no streamer) asdtype('>f8')\n", - "Q1 (no streamer) asdtype('>i4')\n", - "E2 (no streamer) asdtype('>f8')\n", - "px2 (no streamer) asdtype('>f8')\n", - "py2 (no streamer) asdtype('>f8')\n", - "pz2 (no streamer) asdtype('>f8')\n", - "pt2 (no streamer) asdtype('>f8')\n", - "eta2 (no streamer) asdtype('>f8')\n", - "phi2 (no streamer) asdtype('>f8')\n", - "Q2 (no streamer) asdtype('>i4')\n", - "M (no streamer) asdtype('>f8')\n" - ] - } - ], - "source": [ - "events.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Basic information about the TTree, such as its number of entries, are available as properties." - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(b'events', b'Z -> mumu events', 2304)" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.name, events.title, events.numentries" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Some terminology\n", - "\n", - "ROOT files contain objects internally referred to via `TKeys` (dict-like lookup in uproot). `TTree` organizes data in `TBranches`, and uproot interprets one `TBranch` as one array, either a [Numpy array](https://docs.scipy.org/doc/numpy/reference/generated/numpy.array.html) or an [awkward array](https://github.com/scikit-hep/awkward-array). `TBranch` data are stored in chunks called `TBaskets`, though uproot hides this level of granularity unless you dig into the details.\n", - "\n", - "
\n", - "\n", - "
" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Reading arrays from a TTree\n", - "\n", - "The bulk data in a TTree are not read until requested. There are many ways to do that:\n", - "\n", - " * select a TBranch and call [TBranchMethods.array](https://uproot.readthedocs.io/en/latest/ttree-handling.html#id11);\n", - " * call [TTreeMethods.array](https://uproot.readthedocs.io/en/latest/ttree-handling.html#array) directly from the TTree object;\n", - " * call [TTreeMethods.arrays](https://uproot.readthedocs.io/en/latest/ttree-handling.html#arrays) to get several arrays at a time;\n", - " * call [TBranch.lazyarray](https://uproot.readthedocs.io/en/latest/ttree-handling.html#id13), [TTreeMethods.lazyarray](https://uproot.readthedocs.io/en/latest/ttree-handling.html#lazyarray), [TTreeMethods.lazyarrays](https://uproot.readthedocs.io/en/latest/ttree-handling.html#lazyarrays), or [uproot.lazyarrays](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-lazyarray-and-lazyarrays) to get array-like objects that read on demand;\n", - " * call [TTreeMethods.iterate](https://uproot.readthedocs.io/en/latest/ttree-handling.html#iterate) or [uproot.iterate](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-iterate) to explicitly iterate over chunks of data (to avoid reading more than would fit into memory);\n", - " * call [TTreeMethods.pandas](https://uproot.readthedocs.io/en/latest/ttree-handling.html#id7) or [uproot.pandas.iterate](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-pandas-iterate) to get Pandas DataFrames ([Pandas](https://pandas.pydata.org/) must be installed).\n", - "\n", - "Let's start with the simplest." - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([82.20186639, 62.34492895, 62.34492895, ..., 81.27013558,\n", - " 81.27013558, 81.56621735])" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "a = events.array(\"E1\")\n", - "a" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Since `array` is singular, you specify one branch name and get one array back. This is a [Numpy array](https://docs.scipy.org/doc/numpy/reference/generated/numpy.array.html) of 8-byte floating point numbers, the [Numpy dtype](https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html) specified by the `\"E1\"` branch's interpretation." - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "asdtype('>f8')" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events[\"E1\"].interpretation" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can use this array in Numpy calculations; see the [Numpy documentation](https://docs.scipy.org/doc/numpy/) for details." - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([4.40917801, 4.13268234, 4.13268234, ..., 4.39777861, 4.39777861,\n", - " 4.40141517])" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import numpy\n", - "\n", - "numpy.log(a)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Numpy arrays are also the standard container for entering data into machine learning frameworks; see this [Keras introduction](https://keras.io/), [PyTorch introduction](https://pytorch.org/tutorials/beginner/deep_learning_60min_blitz.html), [TensorFlow introduction](https://www.tensorflow.org/guide/low_level_intro), or [Scikit-Learn introduction](https://scikit-learn.org/stable/tutorial/basic/tutorial.html) to see how to put Numpy arrays to work in machine learning." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The [TBranchMethods.array](https://uproot.readthedocs.io/en/latest/ttree-handling.html#id11) method is the same as [TTreeMethods.array](https://uproot.readthedocs.io/en/latest/ttree-handling.html#array) except that you don't have to specify the TBranch name (naturally). Sometimes one is more convenient, sometimes the other." - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(array([82.20186639, 62.34492895, 62.34492895, ..., 81.27013558,\n", - " 81.27013558, 81.56621735]),\n", - " array([82.20186639, 62.34492895, 62.34492895, ..., 81.27013558,\n", - " 81.27013558, 81.56621735]))" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.array(\"E1\"), events[\"E1\"].array()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The plural `arrays` method is different. Whereas singular `array` could only return one array, plural `arrays` takes a list of names (possibly including wildcards) and returns them all in a Python dict." - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{b'px1': array([-41.19528764, 35.11804977, 35.11804977, ..., 32.37749196,\n", - " 32.37749196, 32.48539387]),\n", - " b'py1': array([ 17.4332439 , -16.57036233, -16.57036233, ..., 1.19940578,\n", - " 1.19940578, 1.2013503 ]),\n", - " b'pz1': array([-68.96496181, -48.77524654, -48.77524654, ..., -74.53243061,\n", - " -74.53243061, -74.80837247])}" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.arrays([\"px1\", \"py1\", \"pz1\"])" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{b'px1': array([-41.19528764, 35.11804977, 35.11804977, ..., 32.37749196,\n", - " 32.37749196, 32.48539387]),\n", - " b'py1': array([ 17.4332439 , -16.57036233, -16.57036233, ..., 1.19940578,\n", - " 1.19940578, 1.2013503 ]),\n", - " b'pz1': array([-68.96496181, -48.77524654, -48.77524654, ..., -74.53243061,\n", - " -74.53243061, -74.80837247]),\n", - " b'px2': array([ 34.14443725, -41.19528764, -40.88332344, ..., -68.04191497,\n", - " -68.79413604, -68.79413604]),\n", - " b'py2': array([-16.11952457, 17.4332439 , 17.29929704, ..., -26.10584737,\n", - " -26.39840043, -26.39840043]),\n", - " b'pz2': array([ -47.42698439, -68.96496181, -68.44725519, ..., -152.2350181 ,\n", - " -153.84760383, -153.84760383])}" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.arrays([\"p[xyz]*\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As with all ROOT object names, the TBranch names are bytestrings (prepended by `b`). If you know the encoding or it doesn't matter (`\"ascii\"` and `\"utf-8\"` are generic), pass a `namedecode` to get keys that are strings." - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'px1': array([-41.19528764, 35.11804977, 35.11804977, ..., 32.37749196,\n", - " 32.37749196, 32.48539387]),\n", - " 'py1': array([ 17.4332439 , -16.57036233, -16.57036233, ..., 1.19940578,\n", - " 1.19940578, 1.2013503 ]),\n", - " 'pz1': array([-68.96496181, -48.77524654, -48.77524654, ..., -74.53243061,\n", - " -74.53243061, -74.80837247]),\n", - " 'px2': array([ 34.14443725, -41.19528764, -40.88332344, ..., -68.04191497,\n", - " -68.79413604, -68.79413604]),\n", - " 'py2': array([-16.11952457, 17.4332439 , 17.29929704, ..., -26.10584737,\n", - " -26.39840043, -26.39840043]),\n", - " 'pz2': array([ -47.42698439, -68.96496181, -68.44725519, ..., -152.2350181 ,\n", - " -153.84760383, -153.84760383])}" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.arrays([\"p[xyz]*\"], namedecode=\"utf-8\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "These array-reading functions have many parameters, but most of them have the same names and meanings across all the functions. Rather than discuss all of them here, they'll be presented in context in sections on special features below." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Caching data\n", - "\n", - "Every time you ask for arrays, uproot goes to the file and re-reads them. For especially large arrays, this can take a long time.\n", - "\n", - "For quicker access, uproot's array-reading functions have a **cache** parameter, which is an entry point for you to manage your own cache. The **cache** only needs to behave like a dict (many third-party Python caches do)." - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [], - "source": [ - "mycache = {}\n", - "\n", - "# first time: reads from file\n", - "events.arrays([\"p[xyz]*\"], cache=mycache);\n", - "\n", - "# any other time: reads from cache\n", - "events.arrays([\"p[xyz]*\"], cache=mycache);" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In this example, the cache is a simple Python dict. Uproot has filled it with unique ID → array pairs, and it uses the unique ID to identify an array that it has previously read. You can see that it's full by looking at those keys:" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'AAGUS3fQmKsR56dpAQAAf77v;events;px1;asdtype(Bf8(),Lf8());0-2304': array([-41.19528764, 35.11804977, 35.11804977, ..., 32.37749196,\n", - " 32.37749196, 32.48539387]),\n", - " 'AAGUS3fQmKsR56dpAQAAf77v;events;py1;asdtype(Bf8(),Lf8());0-2304': array([ 17.4332439 , -16.57036233, -16.57036233, ..., 1.19940578,\n", - " 1.19940578, 1.2013503 ]),\n", - " 'AAGUS3fQmKsR56dpAQAAf77v;events;pz1;asdtype(Bf8(),Lf8());0-2304': array([-68.96496181, -48.77524654, -48.77524654, ..., -74.53243061,\n", - " -74.53243061, -74.80837247]),\n", - " 'AAGUS3fQmKsR56dpAQAAf77v;events;px2;asdtype(Bf8(),Lf8());0-2304': array([ 34.14443725, -41.19528764, -40.88332344, ..., -68.04191497,\n", - " -68.79413604, -68.79413604]),\n", - " 'AAGUS3fQmKsR56dpAQAAf77v;events;py2;asdtype(Bf8(),Lf8());0-2304': array([-16.11952457, 17.4332439 , 17.29929704, ..., -26.10584737,\n", - " -26.39840043, -26.39840043]),\n", - " 'AAGUS3fQmKsR56dpAQAAf77v;events;pz2;asdtype(Bf8(),Lf8());0-2304': array([ -47.42698439, -68.96496181, -68.44725519, ..., -152.2350181 ,\n", - " -153.84760383, -153.84760383])}" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mycache" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "though they're not very human-readable.\n", - "\n", - "If you're running out of memory, you could manually clear your cache by simply clearing the dict." - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{}" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mycache.clear()\n", - "mycache" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now the same line of code reads from the file again." - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": {}, - "outputs": [], - "source": [ - "# not in cache: reads from file\n", - "events.arrays([\"p[xyz]*\"], cache=mycache);" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Automatically managed caches\n", - "\n", - "This manual process of clearing the cache when you run out of memory is not very robust. What you want instead is a dict-like object that drops elements on its own when memory is scarce.\n", - "\n", - "Uproot has an [ArrayCache](https://uproot.readthedocs.io/en/latest/caches.html#uproot-cache-arraycache) class for this purpose, though it's a thin wrapper around the third-party [cachetools](https://pypi.org/project/cachetools/) library. Whereas [cachetools](https://pypi.org/project/cachetools/) drops old data from cache when a maximum number of items is reached, [ArrayCache](https://uproot.readthedocs.io/en/latest/caches.html#uproot-cache-arraycache) drops old data when the data usage reaches a limit, specified in bytes." - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(6, 20)" - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mycache = uproot.ArrayCache(\"100 kB\")\n", - "events.arrays(\"*\", cache=mycache);\n", - "\n", - "len(mycache), len(events.keys())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "With a limit of 100 kB, only 6 of the 20 arrays fit into cache, the rest have been evicted.\n", - "\n", - "All data sizes in uproot are specified as an integer in bytes (integers) or a string with the appropriate unit (interpreted as powers of 1024, not 1000)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The fact that any dict-like object may be a cache opens many possibilities. If you're struggling with a script that takes a long time to load data, then crashes, you may want to try a process-independent cache like [memcached](https://realpython.com/python-memcache-efficient-caching/). If you have a small, fast disk, you may want to consider [diskcache](http://www.grantjenks.com/docs/diskcache/tutorial.html) to temporarily hold arrays from ROOT files on the big, slow disk." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Caching at all levels of abstraction\n", - "\n", - "All of the array-reading functions have a **cache** parameter to accept a cache object. This is the high-level cache, which caches data after it has been fully interpreted. These functions also have a **basketcache** parameter to cache data after reading and decompressing baskets, but before interpretation as high-level arrays. The main purpose of this is to avoid reading TBaskets twice when an iteration step falls in the middle of a basket (see below). There is also a **keycache** for caching ROOT's TKey objects, which use negligible memory but would be a bottleneck to re-read when TBaskets are provided by a **basketcache**.\n", - "\n", - "For more on these high and mid-level caching parameters, see [reference documentation](https://uproot.readthedocs.io/en/latest/caches.html).\n", - "\n", - "At the lowest level of abstraction, raw bytes are cached by the HTTP and XRootD remote file readers. You can control the memory remote file memory use with `uproot.HTTPSource.defaults[\"limitbytes\"]` and `uproot.XRootDSource.defaults[\"limitbytes\"]`, either by globally setting these parameters before opening a file, or by passing them to [uproot.open](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-open) through the **limitbytes** parameter." - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(100.0, 100.0)" - ] - }, - "execution_count": 28, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# default remote file caches in MB\n", - "uproot.HTTPSource.defaults[\"limitbytes\"] / 1024**2, uproot.XRootDSource.defaults[\"limitbytes\"] / 1024**2" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If you want to limit this cache to less than the default **chunkbytes** of 1 MB, be sure to make the **chunkbytes** smaller, so that it's able to load at least one chunk!" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 29, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "uproot.open(\"https://scikit-hep.org/uproot/examples/Zmumu.root\", limitbytes=\"100 kB\", chunkbytes=\"10 kB\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "By default (unless **localsource** is overridden), local files are memory-mapped, so the operating system manages its byte-level cache." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Lazy arrays\n", - "\n", - "If you call [TBranchMethods.array](https://uproot.readthedocs.io/en/latest/ttree-handling.html#id11), [TTreeMethods.array](https://uproot.readthedocs.io/en/latest/ttree-handling.html#array), or [TTreeMethods.arrays](https://uproot.readthedocs.io/en/latest/ttree-handling.html#arrays), uproot reads the file or cache immediately and returns an in-memory array. For exploratory work or to control memory usage, you might want to let the data be read on demand.\n", - "\n", - "The [TBranch.lazyarray](https://uproot.readthedocs.io/en/latest/ttree-handling.html#id13), [TTreeMethods.lazyarray](https://uproot.readthedocs.io/en/latest/ttree-handling.html#lazyarray), [TTreeMethods.lazyarrays](https://uproot.readthedocs.io/en/latest/ttree-handling.html#lazyarrays), and [uproot.lazyarrays](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-lazyarray-and-lazyarrays) functions take most of the same parameters but return lazy array objects, rather than Numpy arrays." - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - " ... ] at 0x7ac1a8404d30>" - ] - }, - "execution_count": 30, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "data = events.lazyarrays(\"*\")\n", - "data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This `ChunkedArray` represents all the data in the file in chunks specified by ROOT's internal baskets (specifically, the places where the baskets align, called \"clusters\"). Each chunk contains a `VirtualArray`, which is read when any element from it is accessed." - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 31, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "data = events.lazyarrays(entrysteps=500) # chunks of 500 events each\n", - "dataE1 = data[\"E1\"]\n", - "dataE1" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Requesting `\"E1\"` through all the chunks and printing it (above) has caused the first and last chunks of the array to be read, because that's all that got written to the screen. (See the `...`?)" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[True, False, False, False, True]" - ] - }, - "execution_count": 32, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[chunk.ismaterialized for chunk in dataE1.chunks]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "These arrays can be used with [Numpy's universal functions](https://docs.scipy.org/doc/numpy/reference/ufuncs.html) (ufuncs), which are the mathematical functions that perform elementwise mathematics." - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 33, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "numpy.log(dataE1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now all of the chunks have been read, because the values were needed to compute `log(E1)` for all `E1`." - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[True, True, True, True, True]" - ] - }, - "execution_count": 34, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[chunk.ismaterialized for chunk in dataE1.chunks]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "(**Note:** only ufuncs recognize these lazy arrays because Numpy provides a [mechanism to override ufuncs](https://www.numpy.org/neps/nep-0013-ufunc-overrides.html) but a [similar mechanism for high-level functions](https://www.numpy.org/neps/nep-0018-array-function-protocol.html) is still in development. To turn lazy arrays into Numpy arrays, pass them to the Numpy constructor, as shown below. This causes the whole array to be loaded into memory and to be stitched together into a contiguous whole.)" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([82.20186639, 62.34492895, 62.34492895, ..., 81.27013558,\n", - " 81.27013558, 81.56621735])" - ] - }, - "execution_count": 35, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "numpy.array(dataE1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Lazy array of many files\n", - "\n", - "There's a lazy version of each of the array-reading functions in [TTreeMethods](https://uproot.readthedocs.io/en/latest/ttree-handling.html#uproot-tree-ttreemethods) and [TBranchMethods](https://uproot.readthedocs.io/en/latest/ttree-handling.html#uproot-tree-tbranchmethods), but there's also module-level [uproot.lazyarray](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot.tree.lazyarray) and [uproot.lazyarrays](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot.tree.lazyarrays). These functions let you make a lazy array that spans many files.\n", - "\n", - "These functions may be thought of as alternatives to ROOT's TChain: a TChain presents many files as though they were a single TTree, and a file-spanning lazy array presents many files as though they were a single array. See Iteration below as a more explicit TChain alternative." - ] - }, - { - "cell_type": "code", - "execution_count": 36, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 36, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "data = uproot.lazyarray(\n", - " # list of files; local files can have wildcards (*)\n", - " [\"samples/sample-%s-zlib.root\" % x\n", - " for x in [\"5.23.02\", \"5.24.00\", \"5.25.02\", \"5.26.00\", \"5.27.02\", \"5.28.00\",\n", - " \"5.29.02\", \"5.30.00\", \"6.08.04\", \"6.10.05\", \"6.14.00\"]],\n", - " # TTree name in each file\n", - " \"sample\",\n", - " # branch(s) in each file for lazyarray(s)\n", - " \"f8\")\n", - "data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This `data` represents the entire set of files, and the only up-front processing that had to be done was to find out how many entries each TTree contains.\n", - "\n", - "It uses the [uproot.numentries](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-numentries) shortcut method (which reads less data than normal file-opening):" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'samples/sample-5.23.02-zlib.root': 30,\n", - " 'samples/sample-5.24.00-zlib.root': 30,\n", - " 'samples/sample-5.25.02-zlib.root': 30,\n", - " 'samples/sample-5.26.00-zlib.root': 30,\n", - " 'samples/sample-5.27.02-zlib.root': 30,\n", - " 'samples/sample-5.28.00-zlib.root': 30,\n", - " 'samples/sample-5.29.02-zlib.root': 30,\n", - " 'samples/sample-5.30.00-zlib.root': 30,\n", - " 'samples/sample-6.08.04-zlib.root': 30,\n", - " 'samples/sample-6.10.05-zlib.root': 30,\n", - " 'samples/sample-6.14.00-zlib.root': 30}" - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dict(uproot.numentries(\n", - " # list of files; local files can have wildcards (*)\n", - " [\"samples/sample-%s-zlib.root\" % x\n", - " for x in [\"5.23.02\", \"5.24.00\", \"5.25.02\", \"5.26.00\", \"5.27.02\", \"5.28.00\",\n", - " \"5.29.02\", \"5.30.00\", \"6.08.04\", \"6.10.05\", \"6.14.00\"]],\n", - " # TTree name in each file\n", - " \"sample\",\n", - " # total=True adds all values; total=False leaves them as a dict\n", - " total=False))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Lazy arrays with caching\n", - "\n", - "By default, lazy arrays hold onto all data that have been read as long as the lazy array continues to exist. To use a lazy array as a window into a very large dataset, you'll have to limit how much it's allowed to keep in memory at a time.\n", - "\n", - "This is caching, and the caching mechanism is the same as before:" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "
... ] at 0x7ac1a831e710>" - ] - }, - "execution_count": 38, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "mycache = uproot.cache.ArrayCache(100*1024) # 100 kB\n", - "\n", - "data = events.lazyarrays(entrysteps=500, cache=mycache)\n", - "data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Before performing a calculation, the cache is empty." - ] - }, - { - "cell_type": "code", - "execution_count": 39, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "0" - ] - }, - "execution_count": 39, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "len(mycache)" - ] - }, - { - "cell_type": "code", - "execution_count": 40, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 40, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "numpy.sqrt((data[\"E1\"] + data[\"E2\"])**2 - (data[\"px1\"] + data[\"px2\"])**2 -\n", - " (data[\"py1\"] + data[\"py2\"])**2 - (data[\"pz1\"] + data[\"pz2\"])**2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "After performing the calculation, the cache contains only as many chunks as it could hold." - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(27, 40)" - ] - }, - "execution_count": 41, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# chunks in cache chunks touched to compute (E1 + E2)**2 - (px1 + px2)**2 - (py1 + py2)**2 - (pz1 + pz2)**2\n", - "len(mycache), len(data[\"E1\"].chunks) * 8" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Lazy arrays as lightweight skims\n", - "\n", - "The `ChunkedArray` and `VirtualArray` classes are defined in the [awkward-array](https://github.com/scikit-hep/awkward-array#awkward-array) library installed with uproot. These arrays can be saved to files in a way that preserves their virtualness, which allows you to save a \"diff\" with respect to the original ROOT files.\n", - "\n", - "Below, we load lazy arrays from a ROOT file with **persistvirtual=True** and add a derived feature:" - ] - }, - { - "cell_type": "code", - "execution_count": 42, - "metadata": {}, - "outputs": [], - "source": [ - "data = events.lazyarrays([\"E*\", \"p[xyz]*\"], persistvirtual=True)\n", - "\n", - "data[\"mass\"] = numpy.sqrt((data[\"E1\"] + data[\"E2\"])**2 - (data[\"px1\"] + data[\"px2\"])**2 -\n", - " (data[\"py1\"] + data[\"py2\"])**2 - (data[\"pz1\"] + data[\"pz2\"])**2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "and save the whole thing to an awkward-array file (`.awkd`)." - ] - }, - { - "cell_type": "code", - "execution_count": 43, - "metadata": {}, - "outputs": [], - "source": [ - "import awkward\n", - "\n", - "awkward.save(\"derived-feature.awkd\", data, mode=\"w\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "When we read it back, the derived features come from the awkward-array file but the original features are loaded as pointers to the original ROOT files (`VirtualArrays` whose array-making function knows the original ROOT filenames—don't move them!)." - ] - }, - { - "cell_type": "code", - "execution_count": 44, - "metadata": {}, - "outputs": [], - "source": [ - "data2 = awkward.load(\"derived-feature.awkd\")" - ] - }, - { - "cell_type": "code", - "execution_count": 45, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 45, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# reads from derived-feature.awkd\n", - "data2[\"mass\"]" - ] - }, - { - "cell_type": "code", - "execution_count": 46, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 46, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# reads from the original ROOT flies\n", - "data2[\"E1\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Similarly, a dataset with a cut applied saves the identities of the selected events but only pointers to the original ROOT data. This acts as a lightweight skim." - ] - }, - { - "cell_type": "code", - "execution_count": 47, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "
... ] at 0x7ac1a8278c18>" - ] - }, - "execution_count": 47, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "selected = data[data[\"mass\"] < 80]\n", - "selected" - ] - }, - { - "cell_type": "code", - "execution_count": 48, - "metadata": {}, - "outputs": [], - "source": [ - "awkward.save(\"selected-events.awkd\", selected, mode=\"w\")" - ] - }, - { - "cell_type": "code", - "execution_count": 49, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "
... ] at 0x7ac1a8278668>" - ] - }, - "execution_count": 49, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "data3 = awkward.load(\"selected-events.awkd\")\n", - "data3" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Lazy arrays in Dask\n", - "\n", - "[Dask](https://dask.org/) is a framework for delayed and distributed computation with lazy array and dataframe interfaces. To turn uproot's lazy arrays into Dask objects, use the [uproot.daskarray](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot.tree.daskarray) and [uproot.daskframe](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot.tree.daskframe) functions." - ] - }, - { - "cell_type": "code", - "execution_count": 50, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
Array Chunk
Bytes 18.43 kB 18.43 kB
Shape (2304,) (2304,)
Count 2 Tasks 1 Chunks
Type float64 awkward.ChunkedArray
\n", - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - "\n", - " \n", - " \n", - " \n", - "\n", - " \n", - " \n", - "\n", - " \n", - " 2304\n", - " 1\n", - "\n", - "
" - ], - "text/plain": [ - "dask.array" - ] - }, - "execution_count": 50, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "uproot.daskarray(\"https://scikit-hep.org/uproot/examples/Zmumu.root\", \"events\", \"E1\")" - ] - }, - { - "cell_type": "code", - "execution_count": 51, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
Dask DataFrame Structure:
\n", - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
TypeRunEventE1px1py1pz1pt1eta1phi1Q1E2px2py2pz2pt2eta2phi2Q2M
npartitions=1
0float64float64float64float64float64float64float64float64float64float64float64float64float64float64float64float64float64float64float64float64
2303............................................................
\n", - "
\n", - "
Dask Name: concat-indexed, 101 tasks
" - ], - "text/plain": [ - "Dask DataFrame Structure:\n", - " Type Run Event E1 px1 py1 pz1 pt1 eta1 phi1 Q1 E2 px2 py2 pz2 pt2 eta2 phi2 Q2 M\n", - "npartitions=1 \n", - "0 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64 float64\n", - "2303 ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...\n", - "Dask Name: concat-indexed, 101 tasks" - ] - }, - "execution_count": 51, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "uproot.daskframe(\"https://scikit-hep.org/uproot/examples/Zmumu.root\", \"events\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Iteration\n", - "\n", - "Lazy arrays _implicitly_ step through chunks of data to give you the impression that you have a larger array than memory can hold all at once. The next two methods _explicitly_ step through chunks of data, to give you more control over the process.\n", - "\n", - "[TTreeMethods.iterate](https://uproot.readthedocs.io/en/latest/ttree-handling.html#iterate) iterates over chunks of a TTree and [uproot.iterate](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-iterate) iterates through files.\n", - "\n", - "Like a file-spanning lazy array, a file-spanning iterator erases the difference between files and may be used as a TChain alternative. However, the iteration is over _chunks of many events_, not _single events_." - ] - }, - { - "cell_type": "code", - "execution_count": 52, - "metadata": {}, - "outputs": [], - "source": [ - "histogram = None\n", - "\n", - "for data in events.iterate([\"E*\", \"p[xyz]*\"], namedecode=\"utf-8\"):\n", - " # operate on a batch of data in the loop\n", - " mass = numpy.sqrt((data[\"E1\"] + data[\"E2\"])**2 - (data[\"px1\"] + data[\"px2\"])**2 -\n", - " (data[\"py1\"] + data[\"py2\"])**2 - (data[\"pz1\"] + data[\"pz2\"])**2)\n", - "\n", - " # accumulate results\n", - " counts, edges = numpy.histogram(mass, bins=120, range=(0, 120))\n", - " if histogram is None:\n", - " histogram = counts, edges\n", - " else:\n", - " histogram = histogram[0] + counts, edges" - ] - }, - { - "cell_type": "code", - "execution_count": 53, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEGCAYAAABy53LJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAZ1ElEQVR4nO3df7RdZX3n8fdHRFQkCkNgxUAm2Ik6wdGoWURqV8eKLciooT8cIqOlrVNYM7jUGddUUKvtTGmtVTvaEYdU0XQKAqtKQx2nFRldaovBoFFJEEmNg4E7EIs1WDso8J0/zr54uJx779n33nPPj7xfa2Wds5+z9znfJ3ef/T3P8+z97FQVkiT161HDDkCSNF5MHJKkVkwckqRWTBySpFZMHJKkVh497AAW49hjj621a9cOOwxJGis33XTTd6pq5UK3H+vEsXbtWnbu3DnsMCRprCT5P4vZ3q4qSVIrJg5JUismDklSKyYOSVIrJg5JUismDklSKyYOSVIrJg5JUismDklSKyYOSVIrJg5JUismDklSKyYOSVIrJg5JUismDklSKyYOSVIrJg5JUitjfQdASZPvih23s33XHQ8tb96wmnM2rRliRLLFIWmkbd91B3umDgKwZ+rgw5KIhsPEIWnkrV+1gqvOP5X1q1YMOxRh4pAktWTikCS1MrDEkeSxSW5M8pUku5P8dlN+TJLrktzWPB7dtc1FSfYmuTXJ6YOKTZK0cINscdwHvLCqngVsAM5I8jzgQuD6qloHXN8sk2Q9sAU4GTgDuCTJYQOMT5K0AANLHNXx/Wbx8OZfAZuBbU35NuCs5vlm4Mqquq+q9gF7gVMGFZ8kaWEGOsaR5LAku4C7geuqagdwfFVNATSPxzWrrwa+3bX5/qZs5nuel2Rnkp0HDhwYZPiSpB4Gmjiq6oGq2gCcAJyS5BlzrJ5eb9HjPbdW1caq2rhy5cqlClWS1KdlOauqqv4e+AydsYu7kqwCaB7vblbbD5zYtdkJwJ3LEZ8kqX+DPKtqZZInNc8fB7wI+DpwLXBus9q5wPbm+bXAliRHJDkJWAfcOKj4JEkLM8i5qlYB25ozox4FXF1VH09yA3B1klcDtwMvB6iq3UmuBvYA9wMXVNUDA4xPkrQAA0scVfVV4Nk9yv8OOG2WbS4GLh5UTJKkxfPKcUlSKyYOSVIrJg5JUismDklSKyYOSVIrJg5JUismDklSKyYOSVIrJg5JUismDklSKyYOSVIrJg5JUismDklSKyYOSVIrJg5JUiuDvJGTJC25PVMHOfvSGwDYvGE152xaM+SIDj0mDkljY/OG1Q893zN1EMDEMQQmDklj45xNax5KFNOtDi0/xzgkSa2YOCRJrZg4JEmtmDgkSa2YOCRJrQwscSQ5Mcmnk9ySZHeS1zXlv5XkjiS7mn9ndm1zUZK9SW5NcvqgYpMkLdwgT8e9H3hDVX0pyVHATUmua177w6p6Z/fKSdYDW4CTgScDn0ry1Kp6YIAxSpJaGliLo6qmqupLzfN7gVuA1XNsshm4sqruq6p9wF7glEHFJ0lamGUZ40iyFng2sKMpek2Srya5LMnRTdlq4Ntdm+2nR6JJcl6SnUl2HjhwYIBRS5J6GXjiSPIE4KPA66vqIPB+4CeADcAU8K7pVXtsXo8oqNpaVRurauPKlSsHFLUkaTYDTRxJDqeTNC6vqo8BVNVdVfVAVT0I/DE/7o7aD5zYtfkJwJ2DjE+S1N4gz6oK8EHglqp6d1f5qq7Vfh64uXl+LbAlyRFJTgLWATcOKj5J0sIM8qyq5wOvAr6WZFdT9ibgFUk20OmG+hZwPkBV7U5yNbCHzhlZF3hGlSSNnoEljqr6PL3HLT4xxzYXAxcPKiZJ0uJ55bgkqRUThySpFROHJKkVE4ckqRUThySpFROHJKkVE4ckqRUThySpFROHJKkVE4ckqRUThySplXkTR5JfSHJbku8lOZjk3iQHlyM4SYeuK3bcztmX3sCeKQ83o6afSQ7fAby0qm4ZdDCSNG37rjvYM3WQ9atWsHnDXHed1nLrJ3HcZdKQNAzrV63gqvNPHXYYmqGfxLEzyVXAnwP3TRdO39FPknRo6SdxrAB+APxcV1kBJg5JOgTNmziq6leXIxBJ0niYNXEk+Y2qekeSP6LTwniYqnrtQCOTJI2kuVoc0wPiO5cjEEnSeJg1cVTVXzSP2wCSrOgs1r3LFJskaQT1cwHgxiRfA74K3JzkK0meO/jQJEmjqJ+zqi4D/n1VfQ4gyU8BHwKeOcjAJEmjqZ+5qu6dThoAVfV5wO4qSTpEzZo4kjwnyXOAG5NcmuQFSf5lkkuAz8z3xklOTPLpJLck2Z3kdU35MUmua+a/ui7J0V3bXJRkb5Jbk5y+BPWTJC2xubqq3jVj+W1dzx9xem4P9wNvqKovJTkKuCnJdcCvANdX1duTXAhcCLwxyXpgC3Ay8GTgU0meWlUP9FkXSdIymOusqp9ZzBtX1RQw1Ty/N8ktwGpgM/CCZrVtdFovb2zKr6yq+4B9SfYCpwA3LCYOSdLSWpb7cSRZCzwb2AEc3ySV6eRyXLPaauDbXZvtb8pmvtd5SXYm2XngwIFBhi1J6mHgiSPJE4CPAq+vqrkm1k+Psl5XrG+tqo1VtXHlypVLFaYkqU9zJo4kj0rykwt98ySH00kal3fNpntXklXN66uAu5vy/cCJXZufANy50M+WJA3GnImjqh7kkYPkfUkS4IPALVX17q6XrgXObZ6fC2zvKt+S5IgkJwHrgBsX8tmSpMHp5wLATyb5ReBjVdXP2VTTng+8Cvhakl1N2ZuAtwNXJ3k1cDvwcoCq2p3kamAPnTOyLvCMKklz2TN1kLMv7Zw/s3nDas7ZtGbIER0a+kkc/xE4EnggyT/SGYuoqlox10bNhYK9xi0ATptlm4uBi/uISdIhrvt2stP3JTdxLI9+7sdx1HIEIkltnLNpzUOJYrrVoeXRzySHSfLKJL/ZLJ+Y5JTBhyZJGkX9nI57CXAqcE6z/H3gfQOLSJI00voZ49hUVc9J8mWAqvpukscMOC5J0ojqp8XxoySH0VyMl2Ql8OBAo5Ikjax+Esd7gWuA45NcDHwe+N2BRiVJGln9nFV1eZKb+PEptGdV1S1zbSNJmlz9jHEAPB6Y7q563ODCkSSNun5Ox30rnenPjwGOBT6U5C2DDkySNJr6aXG8Anh2Vf0/gCRvB74E/M4gA5MkjaZ+Bse/BTy2a/kI4G8HEo0kaeT10+K4D9jd3Pa1gJ8FPp/kvQBV9doBxidJGjH9JI5rmn/TPjOYUCRJ46Cf03G3LUcgkqTxsCz3HJckTQ4ThySplVaJo7kH+Zw3cJIkTbZ+LgC8IsmKJEfSua3rrUn+0+BDkySNon5aHOur6iBwFvAJYA2de4lLkg5B/SSOw5McTidxbK+qHw04JknSCOsncVxK5+rxI4HPJvmnwPcGGZQkaXT1kzj+oqpWV9WZVVXA7cCvDTguSdKI6idxfLR7oUkeV863UZLLktyd5Oaust9KckeSXc2/M7teuyjJ3iS3Jjm9TSUkSctn1ivHkzwdOBl4YpJf6HppBQ+f9HA2Hwb+G/AnM8r/sKreOeOz1gNbms97MvCpJE+tqgf6+BxJ0jKaa8qRpwEvAZ4EvLSr/F7g1+d746r6bJK1fcaxGbiyqu4D9iXZC5wC3NDn9pKkZTJr4qiq7cD2JKdW1VIewF+T5JeBncAbquq7wGrgC13r7G/KHiHJecB5AGvWrFnCsCRJ/ehnjGNvkjcl2dqMW1yW5LIFft77gZ8ANgBTwLua8vRYt3q9QVVtraqNVbVx5cqVCwxDkrRQ/Uyrvh34HPApYFFjDlV11/TzJH8MfLxZ3A+c2LXqCcCdi/ksSdJg9JM4Hl9Vb1yKD0uyqqqmmsWfB6bPuLoWuCLJu+kMjq8DblyKz5Q0Pq7YcTvbd90BwJ6pg6xf5dR4o6ifxPHxJGdW1SfavHGSjwAvAI5Nsh94G/CCJBvodEN9CzgfoKp2J7mazlxY9wMXeEaVdOjZvuuOhxLG+lUr2Lyh51CnhqyfxPE64E1Jfgj8kM54RFXVnD8FquoVPYo/OMf6FwMX9xGPpAm2ftUKrjr/1GGHoTn0cwfAo5YjEEnSeOhnWvUkeWWS32yWT0xyyuBDkySNon5Ox70EOBU4p1n+PvC+gUUkSRpp/YxxbKqq5yT5MkBVfTfJYwYclyRpRPXT4vhRksNoLshLshJ4cKBRSZJGVj+J473ANcBxSS4GPg/87kCjkiSNrH7Oqro8yU3AaXROxT2rqm4ZeGSSpJE0b+JI8h7gqqpyQFyS1FdX1ZeAtzQ3WfqDJBsHHZQkaXTNmziqaltVnUnn/hjfAH4/yW0Dj0ySNJL6aXFM+2fA04G1wNcHEo0kaeT1c+X4dAvjP9OZzfa5VfXSeTaTJE2ofi4A3AecWlXfGXQwkqTR109X1VbgjCRvBUiyxrmqJOnQ1U/ieB+duaqmp0m/F+eqkqRDlnNVSZJa6SdxOFeVpLHRffvZzRtWc86mNUOOaPL0kzhmzlX1S8BbBhqVJLW0Z+ogZ196Azv23QPAUY/tHN5MHEvPuaokjb3ue5NvOukYNm9Y/VCrQ0uvnxYHVfV1vOhP0og6Z9OaR7QsTByD0+bKcUmSTBySpHZMHJKkVgaWOJJcluTuJDd3lR2T5LoktzWPR3e9dlEzdfutSU4fVFySpMUZZIvjw8AZM8ouBK6vqnXA9c0ySdYDW4CTm20uaa4dkSSNmIEljqr6LHDPjOLNwLbm+TbgrK7yK6vqvqraB+ylc/8PSdKIWe4xjuOragqgeTyuKV8NfLtrvf1N2SMkOS/JziQ7Dxw4MNBgJUmPNCqD4+lRVr1WrKqtVbWxqjauXLlywGFJkmZa7sRxV5JVAM3j3U35fuDErvVOAO5c5tgkSX1Y7sRxLXBu8/xcYHtX+ZYkRyQ5CVgH3LjMsUmS+tDXlCMLkeQjwAuAY5PsB94GvB24OsmrgduBlwNU1e4kVwN7gPuBC6rqgUHFJml0dM9mu2fqIOtXrRhyRJrPwBJHVb1ilpdOm2X9i4GLBxWPpNG0fdcdDyWM9atWPGzCQo2mgSUOSerX+lUruOr8U4cdhvo0KmdVSZLGhIlDktSKiUOS1IqJQ5LUiolDktSKiUOS1IqJQ5LUiolDktSKiUOS1IqJQ5LUiolDktSKiUOS1IqTHEpadk6lPt5scUhadtNTqQNOpT6GbHFIGgqnUh9ftjgkSa2YOCRJrZg4JEmtmDgkSa2YOCRJrZg4JEmtmDgkSa0M5TqOJN8C7gUeAO6vqo1JjgGuAtYC3wL+dVV9dxjxSZJmN8wWx89U1Yaq2tgsXwhcX1XrgOubZUnSiBmlrqrNwLbm+TbgrCHGIkmaxbCmHCngk0kKuLSqtgLHV9UUQFVNJTmu14ZJzgPOA1izZs1yxStpkZzYcHIMq8Xx/Kp6DvBi4IIkP93vhlW1tao2VtXGlStXDi5CSUvKiQ0nx1BaHFV1Z/N4d5JrgFOAu5Ksalobq4C7hxGbpMFxYsPJsOwtjiRHJjlq+jnwc8DNwLXAuc1q5wLblzs2SZNlz9RBzr70Bs6+9Aau2HH7sMOZGMNocRwPXJNk+vOvqKq/TPJF4OokrwZuB14+hNgkTYjurrDpLrJzNjkuuhSWPXFU1TeBZ/Uo/zvgtOWOR9JkOmfTmocSxdmX3jDkaCbLKJ2OK0kaA94BUNJATZ+G6ym4k8MWh6SB6k4anoI7GWxxSBo4T8OdLLY4JEmtmDgkSa3YVSWpte55pzZvWO31EYcYE4ek1rrnnQIvrDvUmDgkLYin1h66HOOQJLVii0NaJt3jAuDYgMaXLQ5pmXSPC+yZOviwJCKNExOHtIymL4RzfEDj7JDuqrLrQNK0mceDaR4XHumQThzdc+g4X780t0m/Z3iviRg9LvR2yCWOXjv/Veef6nz90jy6D6zTExaO4zjNXD0NM+fU8rjQ2yGXOHrt/JL6M/PA2n0Anq2rZ1RaJ9O3kd2x7x4ANp10DDv23cOOffc47XtLh1zigNln6pzesWazlH2djq9oIWbbb3pNAbIU+1g/3VO9DsjdRuEHWvfnbzrpmJ7/R6MQ57g4JBJHPzv/fDtMP32ds/3imvk552xaM+/4ykK/9PNt18/7mtRG12z7Ta8pQLrX7f5lDXP/Tbv//t3JoNeBdbYD8qjpvo1sP+Wa2yGROPrpnppvB+qnr3O+5u7MBDHX+MpCB+7n266f9/WkgdE2234zvd9NtwC6x/Bm/niC2f+m3X//+ZKBB95D00Qkjn5+IS/FjWS6u7Jm+zLN9TnTX+buL/XM9+41UDdf0mo74N/P+07aSQOj3IpaTGwz96fuH0Xdy90H+O79sNtcg8RSt7FOHN888A+P6Fvt/jW1lPc67v5Czmz2T5vvc2b7Uk8/9jNQ16sve9AD/r2S2rgZ5VbUQmPrtT/10wLotX84SKw2xjpxTOtuTnf/mupOKIs9mHZ/IWcby5jvoD1fP2s/A3WzTWc93y/EhSbR6RiW+mA7qDGcuSxnK6rXPjJbK7NXK7GfM5QW2k3UazsHiefXa98Dev6dZhr2j65+xl/bGLnEkeQM4D3AYcAHqurts637lJVHPuJguRyDdYPq1+33fRfya7A7afQ6KMx2AsF0TEt9sF3oyQGj3HLo1itJ92plziyfa/vZ1l0KjlXMr9e+B/P3NLTZvwfVpTpb7As1UokjyWHA+4CfBfYDX0xybVXt6fc9JvUL0GtsZLbxktn0apX0ap3Nd3Bqs9MDs34Run9lz+xzn637ceZ2c8Wz3GYba+qln/3UcYbRMFfLEOb/O8138kt3N+Fs15jA4vfr7thvXvC7dIxU4gBOAfZW1TcBklwJbAb6ThyTaL5fp3Md6OdKLm1bZ/NdQNWrvPv9Z+tH7xX7bN2Pvbp75vqyda/Xq5toKbVJvnOZTpSOMwzfXN+9Nn+nmfvbbGe8zXaNyWz7db+WetwqVbVkb7ZYSX4JOKOq/m2z/CpgU1W9pmud84DzmsVnwKKT5yg7FvjOsIMYIOs33ia5fpNcN4CnVdVRC9141Foc6VH2sMxWVVuBrQBJdlbVxuUIbBis33izfuNrkusGnfotZvtRux/HfuDEruUTgDuHFIskqYdRSxxfBNYlOSnJY4AtwLVDjkmS1GWkuqqq6v4krwH+is7puJdV1e45Ntm6PJENjfUbb9ZvfE1y3WCR9RupwXFJ0ugbta4qSdKIM3FIkloZ28SR5IwktybZm+TCYcezWElOTPLpJLck2Z3kdU35MUmuS3Jb83j0sGNdqCSHJflyko83y5NUtycl+bMkX2/+hqdOWP3+Q7Nf3pzkI0keO871S3JZkruT3NxVNmt9klzUHGtuTXL6cKLu3yz1+4Nm//xqkmuSPKnrtVb1G8vE0TU1yYuB9cArkqwfblSLdj/whqr658DzgAuaOl0IXF9V64Drm+Vx9Trglq7lSarbe4C/rKqnA8+iU8+JqF+S1cBrgY1V9Qw6J65sYbzr92HgjBllPevTfA+3ACc321zSHING2Yd5ZP2uA55RVc8EvgFcBAur31gmDrqmJqmqHwLTU5OMraqaqqovNc/vpXPgWU2nXtua1bYBZw0nwsVJcgLwr4APdBVPSt1WAD8NfBCgqn5YVX/PhNSv8WjgcUkeDTyezvVVY1u/qvoscM+M4tnqsxm4sqruq6p9wF46x6CR1at+VfXJqrq/WfwCnevkYAH1G9fEsRr4dtfy/qZsIiRZCzwb2AEcX1VT0EkuwHHDi2xR/ivwG8CDXWWTUrenAAeADzVdcR9IciQTUr+qugN4J3A7MAV8r6o+yYTUr8ts9ZnE482vAf+red66fuOaOOadmmRcJXkC8FHg9VW1+PmPR0CSlwB3V9VNw45lQB4NPAd4f1U9G/gHxqvbZk5NX/9m4CTgycCRSV453KiW1UQdb5K8mU7X+OXTRT1Wm7N+45o4JnJqkiSH00kal1fVx5riu5Ksal5fBdw9rPgW4fnAy5J8i0634guT/CmTUTfo7I/7q2pHs/xndBLJpNTvRcC+qjpQVT8CPgb8JJNTv2mz1WdijjdJzgVeAvyb+vFFfK3rN66JY+KmJkkSOn3kt1TVu7teuhY4t3l+LrB9uWNbrKq6qKpOqKq1dP5W/7uqXskE1A2gqv4v8O0kT2uKTqNzK4CJqB+dLqrnJXl8s5+eRmcMblLqN222+lwLbElyRJKTgHXAjUOIb1HSuUneG4GXVdUPul5qX7+qGst/wJl0zgz4W+DNw45nCerzU3Sah18FdjX/zgT+CZ0zPG5rHo8ZdqyLrOcLgI83zyembsAGYGfz9/tz4OgJq99vA1+ncxuD/wEcMc71Az5CZ7zmR3R+cb96rvoAb26ONbcCLx52/Aus3146YxnTx5f/vtD6OeWIJKmVce2qkiQNiYlDktSKiUOS1IqJQ5LUiolDktSKiUOS1IqJQ5LUiolD6kOStc29DD7Q3JPi8iQvSvLXzf0bTmn+/U0z0eHfTF9JnuTkJDcm2dXcC2FdkiOT/M8kX2ne7+xh11HqlxcASn1oZizeS2fW4t10pr35Cp0rcl8G/Crwy8APqur+JC8C/l1V/WKSPwK+UFWXN1PkHEZnVoAzqurXm/d/YlV9b5mrJS3Io4cdgDRG9lXV1wCS7KZz059K8jVgLfBEYFuSdXSmjzm82e4G4M3NPUk+VlW3Ndu8M8nv05mC5XPLXRlpoeyqkvp3X9fzB7uWH6TzI+y/AJ+uzl3yXgo8FqCqrqDTKvlH4K+SvLCqvgE8F/ga8HtJ3ro8VZAWzxaHtHSeCNzRPP+V6cIkTwG+WVXvbZ4/M8nXgXuq6k+TfL97fWnU2eKQls476LQe/prOOMa0s4Gbk+wCng78CfAvgBubsjcDv7PcwUoL5eC4JKkVWxySpFZMHJKkVkwckqRWTBySpFZMHJKkVkwckqRWTBySpFb+P2VLik/1n7KcAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "%matplotlib inline\n", - "import matplotlib.pyplot\n", - "\n", - "counts, edges = histogram\n", - "\n", - "matplotlib.pyplot.step(x=edges, y=numpy.append(counts, 0), where=\"post\");\n", - "matplotlib.pyplot.xlim(edges[0], edges[-1]);\n", - "matplotlib.pyplot.ylim(0, counts.max() * 1.1);\n", - "matplotlib.pyplot.xlabel(\"mass\");\n", - "matplotlib.pyplot.ylabel(\"events per bin\");" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This differs from the lazy array approach in that you need to explicitly manage the iteration, as in this histogram accumulation. However, since we aren't caching, the previous array batch is deleted as soon as `data` goes out of scope, so it is easier to control which arrays are in memory and which aren't.\n", - "\n", - "Choose lazy arrays or iteration according to the degree of control you need." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Filenames and entry numbers while iterating\n", - "\n", - "[uproot.iterate](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot.tree.iterate) crosses file boundaries as part of its iteration, and that's information we might need in the loop. If the following are `True`, each step in iteration is a tuple containing the arrays and the additional information.\n", - "\n", - " * **reportpath:** the full path or URL of the (possibly remote) file;\n", - " * **reportfile:** the [ROOTDirectory](https://uproot.readthedocs.io/en/latest/root-io.html#uproot-rootio-rootdirectory) object itself (so that you don't need to re-open it at each iteration step);\n", - " * **reportentries:** the starting and stopping entry numbers for this chunk of data. In a multi-file iteration, these are global (always increasing, not returning to zero as we start the next file)." - ] - }, - { - "cell_type": "code", - "execution_count": 54, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "https://scikit-hep.org/uproot/examples/sample-5.23.02-zlib.root 0 30 1\n", - "https://scikit-hep.org/uproot/examples/sample-5.24.00-zlib.root 30 60 1\n", - "https://scikit-hep.org/uproot/examples/sample-5.25.02-zlib.root 60 90 1\n", - "https://scikit-hep.org/uproot/examples/sample-5.26.00-zlib.root 90 120 1\n", - "https://scikit-hep.org/uproot/examples/sample-5.27.02-zlib.root 120 150 1\n", - "https://scikit-hep.org/uproot/examples/sample-5.28.00-zlib.root 150 180 1\n", - "https://scikit-hep.org/uproot/examples/sample-5.29.02-zlib.root 180 210 1\n", - "https://scikit-hep.org/uproot/examples/sample-5.30.00-zlib.root 210 240 1\n", - "https://scikit-hep.org/uproot/examples/sample-6.08.04-zlib.root 240 270 1\n", - "https://scikit-hep.org/uproot/examples/sample-6.10.05-zlib.root 270 300 1\n", - "https://scikit-hep.org/uproot/examples/sample-6.14.00-zlib.root 300 330 1\n" - ] - } - ], - "source": [ - "for path, file, start, stop, arrays in uproot.iterate(\n", - " [\"https://scikit-hep.org/uproot/examples/sample-%s-zlib.root\" % x\n", - " for x in [\"5.23.02\", \"5.24.00\", \"5.25.02\", \"5.26.00\", \"5.27.02\", \"5.28.00\",\n", - " \"5.29.02\", \"5.30.00\", \"6.08.04\", \"6.10.05\", \"6.14.00\"]],\n", - " \"sample\",\n", - " \"f8\",\n", - " reportpath=True, reportfile=True, reportentries=True):\n", - " print(path, file, start, stop, len(arrays))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Limiting the number of entries to be read\n", - "\n", - "All array-reading functions have the following parameters:\n", - "\n", - " * **entrystart:** the first entry to read, by default `0`;\n", - " * **entrystop:** one after the last entry to read, by default `numentries`.\n", - "\n", - "Setting **entrystart** and/or **entrystop** differs from slicing the resulting array in that slicing reads, then discards, but these parameters minimize the data to read." - ] - }, - { - "cell_type": "code", - "execution_count": 55, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "200" - ] - }, - "execution_count": 55, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "len(events.array(\"E1\", entrystart=100, entrystop=300))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As with Python slices, the **entrystart** and **entrystop** can be negative to count from the end of the TTree." - ] - }, - { - "cell_type": "code", - "execution_count": 56, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([ 35.36458334, 35.46037568, 27.74254176, 32.67634359,\n", - " 32.67634359, 32.70165023, 168.78012134, 81.27013558,\n", - " 81.27013558, 81.56621735])" - ] - }, - "execution_count": 56, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.array(\"E1\", entrystart=-10)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Internally, ROOT files are written in chunks and whole chunks must be read, so the best places to set **entrystart** and **entrystop** are between basket boundaries." - ] - }, - { - "cell_type": "code", - "execution_count": 57, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[6, 6, 6, 6, 6, 6, 6, 4]" - ] - }, - "execution_count": 57, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# This file has small TBaskets\n", - "tree = uproot.open(\"https://scikit-hep.org/uproot/examples/foriter.root\")[\"foriter\"]\n", - "branch = tree[\"data\"]\n", - "[branch.basket_numentries(i) for i in range(branch.numbaskets)]" - ] - }, - { - "cell_type": "code", - "execution_count": 58, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[(0, 6), (6, 12), (12, 18), (18, 24), (24, 30), (30, 36), (36, 42), (42, 46)]" - ] - }, - "execution_count": 58, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# (entrystart, entrystop) pairs where ALL the TBranches' TBaskets align\n", - "list(tree.clusters())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Or simply," - ] - }, - { - "cell_type": "code", - "execution_count": 59, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[array([0, 1, 2, 3, 4, 5], dtype=int32),\n", - " array([ 6, 7, 8, 9, 10, 11], dtype=int32),\n", - " array([12, 13, 14, 15, 16, 17], dtype=int32),\n", - " array([18, 19, 20, 21, 22, 23], dtype=int32),\n", - " array([24, 25, 26, 27, 28, 29], dtype=int32),\n", - " array([30, 31, 32, 33, 34, 35], dtype=int32),\n", - " array([36, 37, 38, 39, 40, 41], dtype=int32),\n", - " array([42, 43, 44, 45], dtype=int32)]" - ] - }, - "execution_count": 59, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "branch.baskets()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Controlling lazy chunk and iteration step sizes\n", - "\n", - "In addition to **entrystart** and **entrystop**, the lazy array and iteration functions also have:\n", - "\n", - " * **entrysteps:** the number of entries to read in each chunk or step, `numpy.inf` for make the chunks/steps as big as possible (limited by file boundaries), a memory size string, or a list of `(entrystart, entrystop)` pairs to be explicit." - ] - }, - { - "cell_type": "code", - "execution_count": 60, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[500, 500, 500, 500, 304]" - ] - }, - "execution_count": 60, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[len(chunk) for chunk in events.lazyarrays(entrysteps=500)[\"E1\"].chunks]" - ] - }, - { - "cell_type": "code", - "execution_count": 61, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[500, 500, 500, 500, 304]" - ] - }, - "execution_count": 61, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[len(data[b\"E1\"]) for data in events.iterate([\"E*\", \"p[xyz]*\"], entrysteps=500)]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The TTree lazy array/iteration functions ([TTreeMethods.array](https://uproot.readthedocs.io/en/latest/ttree-handling.html#array), [TTreeMethods.arrays](https://uproot.readthedocs.io/en/latest/ttree-handling.html#arrays), [TBranch.lazyarray](https://uproot.readthedocs.io/en/latest/ttree-handling.html#id13), [TTreeMethods.lazyarray](https://uproot.readthedocs.io/en/latest/ttree-handling.html#lazyarray), and [TTreeMethods.lazyarrays](https://uproot.readthedocs.io/en/latest/ttree-handling.html#lazyarrays)) use basket or cluster sizes as a default **entrysteps**, while multi-file lazy array/iteration functions ([uproot.lazyarrays](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-lazyarray-and-lazyarrays) and [uproot.iterate](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-iterate)) use the maximum per file: `numpy.inf`." - ] - }, - { - "cell_type": "code", - "execution_count": 62, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[6, 6, 6, 6, 6, 6, 6, 4]" - ] - }, - "execution_count": 62, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# This file has small TBaskets\n", - "tree = uproot.open(\"https://scikit-hep.org/uproot/examples/foriter.root\")[\"foriter\"]\n", - "branch = tree[\"data\"]\n", - "[len(a[\"data\"]) for a in tree.iterate(namedecode=\"utf-8\")]" - ] - }, - { - "cell_type": "code", - "execution_count": 63, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[46, 46, 46]" - ] - }, - "execution_count": 63, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# This file has small TBaskets\n", - "[len(a[\"data\"]) for a in uproot.iterate([\"https://scikit-hep.org/uproot/examples/foriter.root\"] * 3,\n", - " \"foriter\", namedecode=\"utf-8\")]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "One particularly useful way to specify the **entrysteps** is with a memory size string. This string consists of a number followed by a memory unit: `B` for bytes, `kB` for kilobytes, `MB`, `GB`, and so on (whitespace and case insensitive).\n", - "\n", - "The chunks are not guaranteed to fit the memory size perfectly or even be less than the target size. Uproot picks a fixed number of events that approximates this size on average. The result depends on the number of branches chosen because it is the total size of the set of branches that are chosen for the memory target." - ] - }, - { - "cell_type": "code", - "execution_count": 64, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[753, 753, 753, 45]" - ] - }, - "execution_count": 64, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[len(data[b\"E1\"]) for data in events.iterate([\"E*\", \"p[xyz]*\"], entrysteps=\"50 kB\")]" - ] - }, - { - "cell_type": "code", - "execution_count": 65, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[359, 359, 359, 359, 359, 359, 150]" - ] - }, - "execution_count": 65, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[len(data[b\"E1\"]) for data in events.iterate(entrysteps=\"50 kB\")]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Since lazy arrays represent all branches but we won't necessarily be reading all branches, memory size chunking is less useful for lazy arrays, but you can do it because all function parameters are treated consistently." - ] - }, - { - "cell_type": "code", - "execution_count": 66, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[359, 359, 359, 359, 359, 359, 150]" - ] - }, - "execution_count": 66, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[len(chunk) for chunk in events.lazyarrays(entrysteps=\"50 kB\")[\"E1\"].chunks]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Caching and iteration\n", - "\n", - "Since iteration gives you more precise control over which set of events you're processing at a given time, caching with the **cache** parameter is less useful than it is with lazy arrays. For consistency's sake, the [TTreeMethods.iterate](https://uproot.readthedocs.io/en/latest/ttree-handling.html#iterate) and [uproot.iterate](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-iterate) functions provide a **cache** parameter and it works the same way that it does in other array-reading functions, but its effect would be to retain the previous step's arrays while working on a new step in the iteration. Presumably, the reason you're iterating is because only the current step fits into memory, so this is not a useful feature.\n", - "\n", - "However, the **basketcache** is very useful for iteration, more so than it is for lazy arrays. If an iteration step falls in the middle of a TBasket, the whole TBasket must be read in that step, despite the fact that only part of it is incorporated into the output array. The remainder of the TBasket will be used in the next iteration step, so caching it for exactly one iteration step is ideal: it avoids the need to reread it and decompress it again.\n", - "\n", - "It is such a useful feature that it's built into [TTreeMethods.iterate](https://uproot.readthedocs.io/en/latest/ttree-handling.html#iterate) and [uproot.iterate](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-iterate) by default. If you don't set a **basketcache**, these functions will create one with no memory limit and save TBaskets in it for exactly one iteration step, eliminating that temporary cache at the end of iteration. (The same is true of the **keycache**; see [reference documentation](https://uproot.readthedocs.io/en/latest/caches.html) for detail.)\n", - "\n", - "Thus, you probably don't want to set any explicit caches while iterating. Setting an explicit **basketcache** would introduce an upper limit on how much it can store, but it would lose the property of evicting after exactly one iteration step (because the connection between the cache object and the iterator would be lost). If you're running out of memory during iteration, try reducing the **entrysteps**." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Changing the output container type\n", - "\n", - "When we ask for [TTreeMethods.arrays](https://uproot.readthedocs.io/en/latest/ttree-handling.html#arrays) (plural), [TTreeMethods.iterate](https://uproot.readthedocs.io/en/latest/ttree-handling.html#iterate), or [uproot.iterate](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-iterate), we get a Python dict mapping branch names to arrays. (As a reminder, **namedecode=\"utf-8\"** makes those branch names Python strings, rather than bytestrings.) Sometimes, we want a different kind of container.\n", - "\n", - " * **outputtype:** the _type_ of the container to hold the output arrays.\n", - "\n", - "One particularly useful container is `tuple`, which can be unpacked by a tuple-assignment." - ] - }, - { - "cell_type": "code", - "execution_count": 67, - "metadata": {}, - "outputs": [], - "source": [ - "px, py, pz = events.arrays(\"p[xyz]1\", outputtype=tuple)" - ] - }, - { - "cell_type": "code", - "execution_count": 68, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([-41.19528764, 35.11804977, 35.11804977, ..., 32.37749196,\n", - " 32.37749196, 32.48539387])" - ] - }, - "execution_count": 68, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "px" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Using `tuple` as an **outputtype** in [TTreeMethods.iterate](https://uproot.readthedocs.io/en/latest/ttree-handling.html#iterate) and [uproot.iterate](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-iterate) lets us unpack the arrays in Python's for statement." - ] - }, - { - "cell_type": "code", - "execution_count": 69, - "metadata": {}, - "outputs": [], - "source": [ - "for px, py, pz in events.iterate(\"p[xyz]1\", outputtype=tuple):\n", - " px**2 + py**2 + pz**2" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Another useful type is `collections.namedtuple`, which packs everything into a single object, but the fields are accessible by name." - ] - }, - { - "cell_type": "code", - "execution_count": 70, - "metadata": {}, - "outputs": [], - "source": [ - "import collections # from the Python standard library\n", - "\n", - "a = events.arrays(\"p[xyz]1\", outputtype=collections.namedtuple)" - ] - }, - { - "cell_type": "code", - "execution_count": 71, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([-41.19528764, 35.11804977, 35.11804977, ..., 32.37749196,\n", - " 32.37749196, 32.48539387])" - ] - }, - "execution_count": 71, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "a.px1" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can also use your own classes." - ] - }, - { - "cell_type": "code", - "execution_count": 72, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 72, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "class Stuff:\n", - " def __init__(self, px, py, pz):\n", - " self.p = numpy.sqrt(px**2 + py**2 + pz**2)\n", - " def __repr__(self):\n", - " return \"\" % self.p\n", - "\n", - "events.arrays(\"p[xyz]1\", outputtype=Stuff)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "And perhaps most importantly, you can pass in [pandas.DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html)." - ] - }, - { - "cell_type": "code", - "execution_count": 73, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
px1py1pz1
entry
0-41.19528817.433244-68.964962
135.118050-16.570362-48.775247
235.118050-16.570362-48.775247
334.144437-16.119525-47.426984
422.78358215.036444-31.689894
5-19.862307-9.20422543.817098
6-19.862307-9.20422543.817098
7-20.177373-9.35414944.513955
871.14371129.542308-108.150553
951.050486-51.849400-49.631328
\n", - "
" - ], - "text/plain": [ - " px1 py1 pz1\n", - "entry \n", - "0 -41.195288 17.433244 -68.964962\n", - "1 35.118050 -16.570362 -48.775247\n", - "2 35.118050 -16.570362 -48.775247\n", - "3 34.144437 -16.119525 -47.426984\n", - "4 22.783582 15.036444 -31.689894\n", - "5 -19.862307 -9.204225 43.817098\n", - "6 -19.862307 -9.204225 43.817098\n", - "7 -20.177373 -9.354149 44.513955\n", - "8 71.143711 29.542308 -108.150553\n", - "9 51.050486 -51.849400 -49.631328" - ] - }, - "execution_count": 73, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import pandas\n", - "\n", - "events.arrays(\"p[xyz]1\", outputtype=pandas.DataFrame, entrystop=10)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Filling Pandas DataFrames\n", - "\n", - "The previous example filled a [pandas.DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) by explicitly passing it as an **outputtype**. Pandas is such an important container type that there are specialized functions for it: [TTreeMethods.pandas.df](https://uproot.readthedocs.io/en/latest/ttree-handling.html#id7) and [uproot.pandas.df](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-pandas-iterate)." - ] - }, - { - "cell_type": "code", - "execution_count": 74, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
px1py1pz1
entry
0-41.19528817.433244-68.964962
135.118050-16.570362-48.775247
235.118050-16.570362-48.775247
334.144437-16.119525-47.426984
422.78358215.036444-31.689894
5-19.862307-9.20422543.817098
6-19.862307-9.20422543.817098
7-20.177373-9.35414944.513955
871.14371129.542308-108.150553
951.050486-51.849400-49.631328
\n", - "
" - ], - "text/plain": [ - " px1 py1 pz1\n", - "entry \n", - "0 -41.195288 17.433244 -68.964962\n", - "1 35.118050 -16.570362 -48.775247\n", - "2 35.118050 -16.570362 -48.775247\n", - "3 34.144437 -16.119525 -47.426984\n", - "4 22.783582 15.036444 -31.689894\n", - "5 -19.862307 -9.204225 43.817098\n", - "6 -19.862307 -9.204225 43.817098\n", - "7 -20.177373 -9.354149 44.513955\n", - "8 71.143711 29.542308 -108.150553\n", - "9 51.050486 -51.849400 -49.631328" - ] - }, - "execution_count": 74, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.pandas.df(\"p[xyz]1\", entrystop=10)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The **entry** index in the resulting DataFrame represents the actual entry numbers in the file. For instance, counting from the end:" - ] - }, - { - "cell_type": "code", - "execution_count": 75, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
px1py1pz1
entry
229412.96698430.97450611.094139
229513.00127031.05902111.123455
2296-16.891371-15.335677-15.784044
229719.03757714.82072322.037447
229819.03757714.82072322.037447
229919.05465114.83395422.051323
2300-68.041915-26.105847-152.235018
230132.3774921.199406-74.532431
230232.3774921.199406-74.532431
230332.4853941.201350-74.808372
\n", - "
" - ], - "text/plain": [ - " px1 py1 pz1\n", - "entry \n", - "2294 12.966984 30.974506 11.094139\n", - "2295 13.001270 31.059021 11.123455\n", - "2296 -16.891371 -15.335677 -15.784044\n", - "2297 19.037577 14.820723 22.037447\n", - "2298 19.037577 14.820723 22.037447\n", - "2299 19.054651 14.833954 22.051323\n", - "2300 -68.041915 -26.105847 -152.235018\n", - "2301 32.377492 1.199406 -74.532431\n", - "2302 32.377492 1.199406 -74.532431\n", - "2303 32.485394 1.201350 -74.808372" - ] - }, - "execution_count": 75, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.pandas.df(\"p[xyz]1\", entrystart=-10)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The [uproot.pandas.df](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-pandas-iterate) function doesn't have a **reportentries** because they're included in the DataFrame itself." - ] - }, - { - "cell_type": "code", - "execution_count": 76, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " px1 py1 pz1\n", - "0 -41.195288 17.433244 -68.964962\n", - "1 35.118050 -16.570362 -48.775247\n", - "2 35.118050 -16.570362 -48.775247\n", - " px1 py1 pz1\n", - "500 39.163212 -19.185280 -13.979333\n", - "501 39.094970 -19.152964 -13.936115\n", - "502 -7.656437 -33.431880 91.840257\n", - " px1 py1 pz1\n", - "1000 26.043759 -17.618814 -0.567176\n", - "1001 26.043759 -17.618814 -0.567176\n", - "1002 25.996204 -17.585241 -0.568920\n", - " px1 py1 pz1\n", - "1500 82.816840 13.262734 27.797909\n", - "1501 -11.416911 39.815352 32.349893\n", - "1502 -11.416911 39.815352 32.349893\n", - " px1 py1 pz1\n", - "2000 -43.378378 -15.235422 3.019698\n", - "2001 -43.378378 -15.235422 3.019698\n", - "2002 -43.244422 -15.187402 3.003985\n" - ] - } - ], - "source": [ - "for df in uproot.pandas.iterate(\"https://scikit-hep.org/uproot/examples/Zmumu.root\", \"events\", \"p[xyz]1\", entrysteps=500):\n", - " print(df[:3])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Part of the motivation for a special function is that it's the first of potentially many external connectors (Dask is another: see above). The other part is that these functions have more Pandas-friendly default parameters, such as **flatten=True**.\n", - "\n", - "Flattening turns multiple values per entry (i.e. multiple particles per event) into separate DataFrame rows, maintaining the nested structure in the DataFrame index. Flattening is usually undesirable for arrays—because arrays don't have an index to record that information—but it's usually desirable for DataFrames." - ] - }, - { - "cell_type": "code", - "execution_count": 77, - "metadata": {}, - "outputs": [], - "source": [ - "events2 = uproot.open(\"https://scikit-hep.org/uproot/examples/HZZ.root\")[\"events\"] # non-flat data" - ] - }, - { - "cell_type": "code", - "execution_count": 78, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
MET_pxMET_pyMuon_PxMuon_PyMuon_Pz
entry
05.9127712.563633[-52.899456, 37.73778][-11.654672, 0.6934736][-8.160793, -11.307582]
124.765203-16.349110[-0.81645936][-24.404259][20.199968]
2-25.78508816.237131[48.98783, 0.8275667][-21.723139, 29.800508][11.168285, 36.96519]
38.619896-22.786547[22.088331, 76.69192][-85.835464, -13.956494][403.84845, 335.0942]
45.393139-1.310052[45.17132, 39.750957][67.24879, 25.403667][-89.69573, 20.115053]
5-3.759475-19.417021[9.22811, -5.793715][40.55438, -30.295189][-14.642164, 42.954376]
623.962149-9.049156[12.538717, 29.54184][-42.54871, -4.4455166][-124.44899, -26.356554]
7-57.533348-20.487679[34.88376][-15.982724][155.53117]
842.416195-94.350861[-53.166973, 11.49187][92.02971, -4.4173865][35.638836, -17.473787]
9-1.914469-23.963034[-67.014854, -18.118755][53.159172, -35.106167][54.41294, 58.036896]
\n", - "
" - ], - "text/plain": [ - " MET_px MET_py Muon_Px \\\n", - "entry \n", - "0 5.912771 2.563633 [-52.899456, 37.73778] \n", - "1 24.765203 -16.349110 [-0.81645936] \n", - "2 -25.785088 16.237131 [48.98783, 0.8275667] \n", - "3 8.619896 -22.786547 [22.088331, 76.69192] \n", - "4 5.393139 -1.310052 [45.17132, 39.750957] \n", - "5 -3.759475 -19.417021 [9.22811, -5.793715] \n", - "6 23.962149 -9.049156 [12.538717, 29.54184] \n", - "7 -57.533348 -20.487679 [34.88376] \n", - "8 42.416195 -94.350861 [-53.166973, 11.49187] \n", - "9 -1.914469 -23.963034 [-67.014854, -18.118755] \n", - "\n", - " Muon_Py Muon_Pz \n", - "entry \n", - "0 [-11.654672, 0.6934736] [-8.160793, -11.307582] \n", - "1 [-24.404259] [20.199968] \n", - "2 [-21.723139, 29.800508] [11.168285, 36.96519] \n", - "3 [-85.835464, -13.956494] [403.84845, 335.0942] \n", - "4 [67.24879, 25.403667] [-89.69573, 20.115053] \n", - "5 [40.55438, -30.295189] [-14.642164, 42.954376] \n", - "6 [-42.54871, -4.4455166] [-124.44899, -26.356554] \n", - "7 [-15.982724] [155.53117] \n", - "8 [92.02971, -4.4173865] [35.638836, -17.473787] \n", - "9 [53.159172, -35.106167] [54.41294, 58.036896] " - ] - }, - "execution_count": 78, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events2.pandas.df([\"MET_p*\", \"Muon_P*\"], entrystop=10, flatten=False) # not the default" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "DataFrames like the above are slow (the cell entries are Python lists) and difficult to use in Pandas. Pandas doesn't have specialized functions for manipulating this kind of structure.\n", - "\n", - "However, if we use the default **flatten=True**:" - ] - }, - { - "cell_type": "code", - "execution_count": 79, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
MET_pxMET_pyMuon_PxMuon_PyMuon_Pz
entrysubentry
005.9127712.563633-52.899456-11.654672-8.160793
15.9127712.56363337.7377820.693474-11.307582
1024.765203-16.349110-0.816459-24.40425920.199968
20-25.78508816.23713148.987831-21.72313911.168285
1-25.78508816.2371310.82756729.80050836.965191
308.619896-22.78654722.088331-85.835464403.848450
18.619896-22.78654776.691917-13.956494335.094208
405.393139-1.31005245.17132267.248787-89.695732
15.393139-1.31005239.75095725.40366720.115053
50-3.759475-19.4170219.22811040.554379-14.642164
1-3.759475-19.417021-5.793715-30.29518942.954376
6023.962149-9.04915612.538717-42.548710-124.448990
123.962149-9.04915629.541840-4.445517-26.356554
70-57.533348-20.48767934.883759-15.982724155.531174
8042.416195-94.350861-53.16697392.02970935.638836
142.416195-94.35086111.491870-4.417387-17.473787
90-1.914469-23.963034-67.01485453.15917254.412941
1-1.914469-23.963034-18.118755-35.10616758.036896
\n", - "
" - ], - "text/plain": [ - " MET_px MET_py Muon_Px Muon_Py Muon_Pz\n", - "entry subentry \n", - "0 0 5.912771 2.563633 -52.899456 -11.654672 -8.160793\n", - " 1 5.912771 2.563633 37.737782 0.693474 -11.307582\n", - "1 0 24.765203 -16.349110 -0.816459 -24.404259 20.199968\n", - "2 0 -25.785088 16.237131 48.987831 -21.723139 11.168285\n", - " 1 -25.785088 16.237131 0.827567 29.800508 36.965191\n", - "3 0 8.619896 -22.786547 22.088331 -85.835464 403.848450\n", - " 1 8.619896 -22.786547 76.691917 -13.956494 335.094208\n", - "4 0 5.393139 -1.310052 45.171322 67.248787 -89.695732\n", - " 1 5.393139 -1.310052 39.750957 25.403667 20.115053\n", - "5 0 -3.759475 -19.417021 9.228110 40.554379 -14.642164\n", - " 1 -3.759475 -19.417021 -5.793715 -30.295189 42.954376\n", - "6 0 23.962149 -9.049156 12.538717 -42.548710 -124.448990\n", - " 1 23.962149 -9.049156 29.541840 -4.445517 -26.356554\n", - "7 0 -57.533348 -20.487679 34.883759 -15.982724 155.531174\n", - "8 0 42.416195 -94.350861 -53.166973 92.029709 35.638836\n", - " 1 42.416195 -94.350861 11.491870 -4.417387 -17.473787\n", - "9 0 -1.914469 -23.963034 -67.014854 53.159172 54.412941\n", - " 1 -1.914469 -23.963034 -18.118755 -35.106167 58.036896" - ] - }, - "execution_count": 79, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "df = events2.pandas.df([\"MET_p*\", \"Muon_P*\"], entrystop=10)\n", - "df" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The particles-within-events structure is encoded in the [pandas.MultiIndex](https://pandas.pydata.org/pandas-docs/stable/user_guide/advanced.html), and we can use Pandas functions like [DataFrame.unstack](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.unstack.html) to manipulate that structure." - ] - }, - { - "cell_type": "code", - "execution_count": 80, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
MET_pxMET_pyMuon_PxMuon_PyMuon_Pz
subentry0101010101
entry
05.9127715.9127712.5636332.563633-52.89945637.737782-11.6546720.693474-8.160793-11.307582
124.765203NaN-16.349110NaN-0.816459NaN-24.404259NaN20.199968NaN
2-25.785088-25.78508816.23713116.23713148.9878310.827567-21.72313929.80050811.16828536.965191
38.6198968.619896-22.786547-22.78654722.08833176.691917-85.835464-13.956494403.848450335.094208
45.3931395.393139-1.310052-1.31005245.17132239.75095767.24878725.403667-89.69573220.115053
5-3.759475-3.759475-19.417021-19.4170219.228110-5.79371540.554379-30.295189-14.64216442.954376
623.96214923.962149-9.049156-9.04915612.53871729.541840-42.548710-4.445517-124.448990-26.356554
7-57.533348NaN-20.487679NaN34.883759NaN-15.982724NaN155.531174NaN
842.41619542.416195-94.350861-94.350861-53.16697311.49187092.029709-4.41738735.638836-17.473787
9-1.914469-1.914469-23.963034-23.963034-67.014854-18.11875553.159172-35.10616754.41294158.036896
\n", - "
" - ], - "text/plain": [ - " MET_px MET_py Muon_Px \\\n", - "subentry 0 1 0 1 0 1 \n", - "entry \n", - "0 5.912771 5.912771 2.563633 2.563633 -52.899456 37.737782 \n", - "1 24.765203 NaN -16.349110 NaN -0.816459 NaN \n", - "2 -25.785088 -25.785088 16.237131 16.237131 48.987831 0.827567 \n", - "3 8.619896 8.619896 -22.786547 -22.786547 22.088331 76.691917 \n", - "4 5.393139 5.393139 -1.310052 -1.310052 45.171322 39.750957 \n", - "5 -3.759475 -3.759475 -19.417021 -19.417021 9.228110 -5.793715 \n", - "6 23.962149 23.962149 -9.049156 -9.049156 12.538717 29.541840 \n", - "7 -57.533348 NaN -20.487679 NaN 34.883759 NaN \n", - "8 42.416195 42.416195 -94.350861 -94.350861 -53.166973 11.491870 \n", - "9 -1.914469 -1.914469 -23.963034 -23.963034 -67.014854 -18.118755 \n", - "\n", - " Muon_Py Muon_Pz \n", - "subentry 0 1 0 1 \n", - "entry \n", - "0 -11.654672 0.693474 -8.160793 -11.307582 \n", - "1 -24.404259 NaN 20.199968 NaN \n", - "2 -21.723139 29.800508 11.168285 36.965191 \n", - "3 -85.835464 -13.956494 403.848450 335.094208 \n", - "4 67.248787 25.403667 -89.695732 20.115053 \n", - "5 40.554379 -30.295189 -14.642164 42.954376 \n", - "6 -42.548710 -4.445517 -124.448990 -26.356554 \n", - "7 -15.982724 NaN 155.531174 NaN \n", - "8 92.029709 -4.417387 35.638836 -17.473787 \n", - "9 53.159172 -35.106167 54.412941 58.036896 " - ] - }, - "execution_count": 80, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "df.unstack()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "There's also a **flatten=None** that skips all non-flat TBranches, included as a convenience against overzealous branch selection." - ] - }, - { - "cell_type": "code", - "execution_count": 81, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
MET_pxMET_py
entry
05.9127712.563633
124.765203-16.349110
2-25.78508816.237131
38.619896-22.786547
45.393139-1.310052
5-3.759475-19.417021
623.962149-9.049156
7-57.533348-20.487679
842.416195-94.350861
9-1.914469-23.963034
\n", - "
" - ], - "text/plain": [ - " MET_px MET_py\n", - "entry \n", - "0 5.912771 2.563633\n", - "1 24.765203 -16.349110\n", - "2 -25.785088 16.237131\n", - "3 8.619896 -22.786547\n", - "4 5.393139 -1.310052\n", - "5 -3.759475 -19.417021\n", - "6 23.962149 -9.049156\n", - "7 -57.533348 -20.487679\n", - "8 42.416195 -94.350861\n", - "9 -1.914469 -23.963034" - ] - }, - "execution_count": 81, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events2.pandas.df([\"MET_p*\", \"Muon_P*\"], entrystop=10, flatten=None)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Selecting and interpreting branches\n", - "\n", - "We have already seen that TBranches can be selected as lists of strings and with wildcards. This is the same wildcard pattern that filesystems use to match file lists: `*` can be replaced with any text (or none), `?` can be replaced by one character, and `[...]` specifies a list of alternate characters.\n", - "\n", - "Wildcard patters are quick to write, but limited relative to regular expressions. Any branch request between slashes (`/` inside the quotation marks) will be interpreted as regular expressions instead (i.e. `.*` instead of `*`)." - ] - }, - { - "cell_type": "code", - "execution_count": 82, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys([b'px1', b'py1', b'pz1', b'px2', b'py2', b'pz2'])" - ] - }, - "execution_count": 82, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.arrays(\"p[xyz]?\").keys() # using wildcards" - ] - }, - { - "cell_type": "code", - "execution_count": 83, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys([b'px1', b'py1', b'pz1', b'px2', b'py2', b'pz2'])" - ] - }, - "execution_count": 83, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.arrays(\"/p[x-z].?/\").keys() # using regular expressions" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If, instead of strings, you pass a function from branch objects to `True` or `False`, the branches will be selected by evaluating the function as a filter. This is a way of selecting branches based on properties other than their names." - ] - }, - { - "cell_type": "code", - "execution_count": 84, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys([b'Type', b'Run', b'Event', b'Q1', b'Q2'])" - ] - }, - "execution_count": 84, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.arrays(lambda branch: branch.compressionratio() > 3).keys()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note that the return values must be strictly `True` and `False`, not anything that [Python evaluates to true or false](https://itnext.io/you-shouldnt-use-truthy-tests-753b39ef8893). If the function returns anything else, it will be used as a new [Interpretation](https://uproot.readthedocs.io/en/latest/interpretation.html) for the branch." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## TBranch interpretations\n", - "\n", - "The very first thing we looked at when we opened a TTree was its TBranches and their interpretations with the `show` method:" - ] - }, - { - "cell_type": "code", - "execution_count": 85, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Type (no streamer) asstring()\n", - "Run (no streamer) asdtype('>i4')\n", - "Event (no streamer) asdtype('>i4')\n", - "E1 (no streamer) asdtype('>f8')\n", - "px1 (no streamer) asdtype('>f8')\n", - "py1 (no streamer) asdtype('>f8')\n", - "pz1 (no streamer) asdtype('>f8')\n", - "pt1 (no streamer) asdtype('>f8')\n", - "eta1 (no streamer) asdtype('>f8')\n", - "phi1 (no streamer) asdtype('>f8')\n", - "Q1 (no streamer) asdtype('>i4')\n", - "E2 (no streamer) asdtype('>f8')\n", - "px2 (no streamer) asdtype('>f8')\n", - "py2 (no streamer) asdtype('>f8')\n", - "pz2 (no streamer) asdtype('>f8')\n", - "pt2 (no streamer) asdtype('>f8')\n", - "eta2 (no streamer) asdtype('>f8')\n", - "phi2 (no streamer) asdtype('>f8')\n", - "Q2 (no streamer) asdtype('>i4')\n", - "M (no streamer) asdtype('>f8')\n" - ] - } - ], - "source": [ - "events.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Every branch has a default interpretation, such as" - ] - }, - { - "cell_type": "code", - "execution_count": 86, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "asdtype('>f8')" - ] - }, - "execution_count": 86, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events[\"E1\"].interpretation" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "meaning big-endian, 8-byte floating point numbers as a [Numpy dtype](https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html). We could interpret this branch with a different [Numpy dtype](https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html), but it wouldn't be meaningful." - ] - }, - { - "cell_type": "code", - "execution_count": 87, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([4635484859043618393, 4633971086021346367, 4633971086021346367, ...,\n", - " 4635419294316473354, 4635419294316473354, 4635440129219414362])" - ] - }, - "execution_count": 87, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events[\"E1\"].array(uproot.asdtype(\">i8\"))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Instead of reading the values as floating point numbers, we've read them as integers. It's unlikely that you'd ever want to do that, unless the default interpretation is wrong." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Reading data into a preexisting array\n", - "\n", - "One actually useful TBranch reinterpretation is [uproot.asarray](https://uproot.readthedocs.io/en/latest/interpretation.html#uproot-interp-numerical-asarray). It differs from [uproot.asdtype](https://uproot.readthedocs.io/en/latest/interpretation.html#uproot-interp-numerical-asdtype) only in that the latter creates a new array when reading data while the former fills a user-specified array." - ] - }, - { - "cell_type": "code", - "execution_count": 88, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "asarray('>f8', )" - ] - }, - "execution_count": 88, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "myarray = numpy.zeros(events.numentries, dtype=numpy.float32) # (different size)\n", - "reinterpretation = events[\"E1\"].interpretation.toarray(myarray)\n", - "reinterpretation" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Passing the new [uproot.asarray](https://uproot.readthedocs.io/en/latest/interpretation.html#uproot-interp-numerical-asarray) interpretation to the array-reading function" - ] - }, - { - "cell_type": "code", - "execution_count": 89, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([82.201866, 62.34493 , 62.34493 , ..., 81.270134, 81.270134,\n", - " 81.566216], dtype=float32)" - ] - }, - "execution_count": 89, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events[\"E1\"].array(reinterpretation)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "fills and returns that array. When you look at my array object, you can see that it is now filled, overwriting whatever might have been in it before." - ] - }, - { - "cell_type": "code", - "execution_count": 90, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([82.201866, 62.34493 , 62.34493 , ..., 81.270134, 81.270134,\n", - " 81.566216], dtype=float32)" - ] - }, - "execution_count": 90, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "myarray" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This is useful for speed-critical applications or ones in which the array is managed by an external system. The array could be NUMA-allocated in a supercomputer or CPU/GPU managed by PyTorch, for instance.\n", - "\n", - "As the provider of the array, it is your responsibility to ensure that it has enough elements to hold the (possibly type-converted) output. (Failure to do so only results in an exception, not a segmentation fault or anything.)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Passing many new interpretations in one call\n", - "\n", - "Above, you saw what happens when a TBranch selector is a function returning `True` or `False`, and I stressed that it must be literally `True`, not an object that Python would evaluate to `True`." - ] - }, - { - "cell_type": "code", - "execution_count": 91, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys([b'E1', b'px1', b'py1', b'pz1', b'pt1', b'eta1', b'phi1', b'E2', b'px2', b'py2', b'pz2', b'pt2', b'eta2', b'phi2', b'M'])" - ] - }, - "execution_count": 91, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.arrays(lambda branch: isinstance(branch.interpretation, uproot.asdtype) and\n", - " str(branch.interpretation.fromdtype) == \">f8\").keys()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This is because a function that returns objects selects branches and sets their interpretations in one pass." - ] - }, - { - "cell_type": "code", - "execution_count": 92, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{b'px1': array([-41.195286, 35.11805 , 35.11805 , ..., 32.37749 , 32.37749 ,\n", - " 32.485394], dtype=float32),\n", - " b'px2': array([ 34.144436, -41.195286, -40.883324, ..., -68.041916, -68.794136,\n", - " -68.794136], dtype=float32)}" - ] - }, - "execution_count": 92, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events.arrays(lambda branch: uproot.asdtype(\">f8\", \"f8\", \"f8\", \"i8', (10,))\")" - ] - }, - "execution_count": 96, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree[\"ArrayInt64\"].interpretation" - ] - }, - { - "cell_type": "code", - "execution_count": 97, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(10,)" - ] - }, - "execution_count": 97, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree[\"ArrayInt64\"].interpretation.todtype.shape" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The [dtype shape](https://docs.scipy.org/doc/numpy/reference/generated/numpy.dtype.shape.html) of a TBranch with one value per event (simple, 1-dimensional arrays) is an empty tuple." - ] - }, - { - "cell_type": "code", - "execution_count": 98, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "()" - ] - }, - "execution_count": 98, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree[\"Int64\"].interpretation.todtype.shape" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Fixed-width arrays are exploded into one column per element when viewed as a [pandas.DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html)." - ] - }, - { - "cell_type": "code", - "execution_count": 99, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
ArrayInt64[0]ArrayInt64[1]ArrayInt64[2]ArrayInt64[3]ArrayInt64[4]ArrayInt64[5]ArrayInt64[6]ArrayInt64[7]ArrayInt64[8]ArrayInt64[9]
entry
00000000000
11111111111
22222222222
33333333333
44444444444
55555555555
66666666666
77777777777
88888888888
99999999999
1010101010101010101010
1111111111111111111111
1212121212121212121212
1313131313131313131313
1414141414141414141414
1515151515151515151515
1616161616161616161616
1717171717171717171717
1818181818181818181818
1919191919191919191919
\n", - "
" - ], - "text/plain": [ - " ArrayInt64[0] ArrayInt64[1] ArrayInt64[2] ArrayInt64[3] \\\n", - "entry \n", - "0 0 0 0 0 \n", - "1 1 1 1 1 \n", - "2 2 2 2 2 \n", - "3 3 3 3 3 \n", - "4 4 4 4 4 \n", - "5 5 5 5 5 \n", - "6 6 6 6 6 \n", - "7 7 7 7 7 \n", - "8 8 8 8 8 \n", - "9 9 9 9 9 \n", - "10 10 10 10 10 \n", - "11 11 11 11 11 \n", - "12 12 12 12 12 \n", - "13 13 13 13 13 \n", - "14 14 14 14 14 \n", - "15 15 15 15 15 \n", - "16 16 16 16 16 \n", - "17 17 17 17 17 \n", - "18 18 18 18 18 \n", - "19 19 19 19 19 \n", - "\n", - " ArrayInt64[4] ArrayInt64[5] ArrayInt64[6] ArrayInt64[7] \\\n", - "entry \n", - "0 0 0 0 0 \n", - "1 1 1 1 1 \n", - "2 2 2 2 2 \n", - "3 3 3 3 3 \n", - "4 4 4 4 4 \n", - "5 5 5 5 5 \n", - "6 6 6 6 6 \n", - "7 7 7 7 7 \n", - "8 8 8 8 8 \n", - "9 9 9 9 9 \n", - "10 10 10 10 10 \n", - "11 11 11 11 11 \n", - "12 12 12 12 12 \n", - "13 13 13 13 13 \n", - "14 14 14 14 14 \n", - "15 15 15 15 15 \n", - "16 16 16 16 16 \n", - "17 17 17 17 17 \n", - "18 18 18 18 18 \n", - "19 19 19 19 19 \n", - "\n", - " ArrayInt64[8] ArrayInt64[9] \n", - "entry \n", - "0 0 0 \n", - "1 1 1 \n", - "2 2 2 \n", - "3 3 3 \n", - "4 4 4 \n", - "5 5 5 \n", - "6 6 6 \n", - "7 7 7 \n", - "8 8 8 \n", - "9 9 9 \n", - "10 10 10 \n", - "11 11 11 \n", - "12 12 12 \n", - "13 13 13 \n", - "14 14 14 \n", - "15 15 15 \n", - "16 16 16 \n", - "17 17 17 \n", - "18 18 18 \n", - "19 19 19 " - ] - }, - "execution_count": 99, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree.pandas.df(\"ArrayInt64\", entrystop=20)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Multiple values per event: leaf-lists\n", - "\n", - "Another of ROOT's fundamental TBranch types is a \"[leaf-list](https://root.cern.ch/root/htmldoc/guides/users-guide/Trees.html#adding-a-branch-to-hold-a-list-of-variables),\" or a TBranch with multiple TLeaves. (**Note:** in ROOT terminology, \"TBranch\" is a data structure that usually points to data in TBaskets and \"TLeaf\" is the _data type_ descriptor. TBranches and TLeaves have no relationship to the interior and endpoints of a tree structure in computer science.)\n", - "\n", - "The Numpy analogue of a leaf-list is a [structured array](https://docs.scipy.org/doc/numpy/user/basics.rec.html), a [dtype](https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html) with named fields, which is Numpy's view into a C array of structs (with or without padding)." - ] - }, - { - "cell_type": "code", - "execution_count": 100, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([(1.1, 1, 97), (2.2, 2, 98), (3.3, 3, 99), (4. , 4, 100),\n", - " (5.5, 5, 101)], dtype=[('x', '\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
leaflist.xleaflist.yleaflist.z
entry
01.1197
12.2298
23.3399
34.04100
45.55101
\n", - "" - ], - "text/plain": [ - " leaflist.x leaflist.y leaflist.z\n", - "entry \n", - "0 1.1 1 97\n", - "1 2.2 2 98\n", - "2 3.3 3 99\n", - "3 4.0 4 100\n", - "4 5.5 5 101" - ] - }, - "execution_count": 107, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree.pandas.df(\"leaflist\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The **flatname** parameter determines how fixed-width arrays and field names are translated into Pandas names; the default is `uproot._connect._pandas.default_flatname` (a function from **branchname** _(str)_, **fieldname** _(str)_, **index** _(int)_ to Pandas column name _(str)_)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Multiple values per event: jagged arrays\n", - "\n", - "In physics data, it is even more common to have an arbitrary number of values per event than a fixed number of values per event. Consider, for instance, particles produced in a collision, tracks in a jet, hits on a track, etc.\n", - "\n", - "Unlike fixed-width arrays and a fixed number of fields per element, Numpy has no analogue for this type. It is fundamentally outside of Numpy's scope because Numpy describes rectangular tables of data. As we have seen above, Pandas has some support for this so-called \"jagged\" (sometimes \"ragged\") data, but only through manipulation of its index ([pandas.MultiIndex](https://pandas.pydata.org/pandas-docs/stable/user_guide/advanced.html)), not the data themselves.\n", - "\n", - "For this, uproot fills a new `JaggedArray` data structure (from the awkward-array library, like `ChunkedArray` and `VirtualArray`)." - ] - }, - { - "cell_type": "code", - "execution_count": 108, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 108, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree = uproot.open(\"https://scikit-hep.org/uproot/examples/nesteddirs.root\")[\"one/two/tree\"]\n", - "array = tree.array(\"SliceInt64\", entrystop=20)\n", - "array" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "These `JaggedArrays` are made of [Numpy arrays](https://docs.scipy.org/doc/numpy/reference/generated/numpy.array.html) and follow the same [Numpy slicing rules](https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html), including [advanced indexing](https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html#advanced-indexing).\n", - "\n", - "Awkward-array generalizes Numpy in many ways—details can be found [in its documentation](https://github.com/scikit-hep/awkward-array)." - ] - }, - { - "cell_type": "code", - "execution_count": 109, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9])" - ] - }, - "execution_count": 109, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "array.counts" - ] - }, - { - "cell_type": "code", - "execution_count": 110, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([ 1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6,\n", - " 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8,\n", - " 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 11, 12, 12, 13, 13, 13,\n", - " 14, 14, 14, 14, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 17, 17,\n", - " 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19,\n", - " 19, 19, 19, 19, 19])" - ] - }, - "execution_count": 110, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "array.flatten()" - ] - }, - { - "cell_type": "code", - "execution_count": 111, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 111, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "array[:6]" - ] - }, - { - "cell_type": "code", - "execution_count": 112, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([ 2, 3, 4, 5, 6, 7, 8, 9, 12, 13, 14, 15, 16, 17, 18, 19])" - ] - }, - "execution_count": 112, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "array[array.counts > 1, 0]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here is an example of `JaggedArrays` in physics data:" - ] - }, - { - "cell_type": "code", - "execution_count": 113, - "metadata": {}, - "outputs": [], - "source": [ - "events2 = uproot.open(\"https://scikit-hep.org/uproot/examples/HZZ.root\")[\"events\"]" - ] - }, - { - "cell_type": "code", - "execution_count": 114, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 114, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "E, px, py, pz = events2.arrays([\"Muon_E\", \"Muon_P[xyz]\"], outputtype=tuple)\n", - "E" - ] - }, - { - "cell_type": "code", - "execution_count": 115, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 115, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pt = numpy.sqrt(px**2 + py**2)\n", - "p = numpy.sqrt(px**2 + py**2 + pz**2)\n", - "p" - ] - }, - { - "cell_type": "code", - "execution_count": 116, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 116, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "eta = numpy.log((p + pz)/(p - pz))/2\n", - "eta" - ] - }, - { - "cell_type": "code", - "execution_count": 117, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 117, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "phi = numpy.arctan2(py, px)\n", - "phi" - ] - }, - { - "cell_type": "code", - "execution_count": 118, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([2, 1, 2, ..., 1, 1, 1])" - ] - }, - "execution_count": 118, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pt.counts" - ] - }, - { - "cell_type": "code", - "execution_count": 119, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([54.168106, 37.744152, 24.417913, ..., 33.461536, 63.619816,\n", - " 42.93995 ], dtype=float32)" - ] - }, - "execution_count": 119, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pt.flatten()" - ] - }, - { - "cell_type": "code", - "execution_count": 120, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 120, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pt[:6]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Note that if you want to histogram the inner contents of these arrays (i.e. histogram of particles, ignoring event boundaries), functions like [numpy.histogram](https://docs.scipy.org/doc/numpy/reference/generated/numpy.histogram.html) require non-jagged arrays, so flatten them with a call to `.flatten()`.\n", - "\n", - "To select elements of inner lists (Pandas's [DataFrame.xs](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.xs.html)), first require the list to have at least that many elements." - ] - }, - { - "cell_type": "code", - "execution_count": 121, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([54.168106, 53.58827 , 88.63194 , ..., 58.38824 , 61.645054,\n", - " 44.971596], dtype=float32)" - ] - }, - "execution_count": 121, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pt[pt.counts > 1, 0]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`JaggedArrays` of booleans select from inner lists (i.e. put a cut on particles):" - ] - }, - { - "cell_type": "code", - "execution_count": 122, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 122, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pt > 50" - ] - }, - { - "cell_type": "code", - "execution_count": 123, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 123, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "eta[pt > 50]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "And Numpy arrays of booleans select from outer lists (i.e. put a cut on events):" - ] - }, - { - "cell_type": "code", - "execution_count": 124, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 124, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "eta[pt.max() > 50]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Reducers like `count`, `sum`, `min`, `max`, `any` (boolean), or `all` (boolean) apply per-event, turning a `JaggedArray` into a Numpy array." - ] - }, - { - "cell_type": "code", - "execution_count": 125, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([54.168106, 24.417913, 53.58827 , ..., 33.461536, 63.619816,\n", - " 42.93995 ], dtype=float32)" - ] - }, - "execution_count": 125, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pt.max()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can even do combinatorics, such as `a.cross(b)` to compute the Cartesian product of `a` and `b` per event, or `a.choose(n)` to choose `n` distinct combinations of elements per event." - ] - }, - { - "cell_type": "code", - "execution_count": 126, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 126, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pt.choose(2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Some of these functions have \"arg\" versions that return integers, which can be used in indexing." - ] - }, - { - "cell_type": "code", - "execution_count": 127, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 127, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "abs(eta).argmax()" - ] - }, - { - "cell_type": "code", - "execution_count": 128, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 128, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pairs = pt.argchoose(2)\n", - "pairs" - ] - }, - { - "cell_type": "code", - "execution_count": 129, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(,\n", - " )" - ] - }, - "execution_count": 129, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "left = pairs.i0\n", - "right = pairs.i1\n", - "left, right" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Masses of unique pairs of muons, for events that have them:" - ] - }, - { - "cell_type": "code", - "execution_count": 130, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 130, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "masses = numpy.sqrt((E[left] + E[right])**2 - (px[left] + px[right])**2 -\n", - " (py[left] + py[right])**2 - (pz[left] + pz[right])**2)\n", - "masses" - ] - }, - { - "cell_type": "code", - "execution_count": 131, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEGCAYAAABy53LJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAa4klEQVR4nO3df7xcdX3n8de7gFCQIJQLj9vA3QQ3YgNrL3gfxNTqUtEaWTFo1yZE2VRZE3fhIa4+HgWkq922tFRFV6rYRInEbRLCCjTUtS2U1SJtCN5AhPwACYTGhLvJVZTEopEkn/3jnImHy8ydc+beM2dm8n4+HvO4c75zzsznO3fmfOb7/Z7zPYoIzMzM8vqlqgMwM7Pu4sRhZmaFOHGYmVkhThxmZlaIE4eZmRVyZNUBTMTJJ58c06ZNqzoMM7Ousn79+h9ERF+r23d14pg2bRrDw8NVh2Fm1lUk/ctEtndXlZmZFeLEYWZmhThxmJlZIU4cZmZWiBOHmZkV4sRhZmaFOHGYmVkhThxmZlaIE4eZmRXixGFmZoU4cZiZWSFOHGZmVogTh5mZFeLEYWZmhZSWOCSdLumbkrZI2iTpyrT8JEn3SHoi/XtiZptrJG2V9Likt5YVm5mZta7MFsd+4KMR8WvA64DLJc0ErgbujYgZwL3pMulj84GzgDnATZKOKDE+MzNrQWmJIyJGIuKh9P5eYAswFZgLLE9XWw5cnN6fC9waEfsiYhuwFTivrPjMzKw1bbkCoKRpwDnAOuDUiBiBJLlIOiVdbSrwQGazHWnZ2OdaBCwCGBgYKC9oM+sIK9dtZ82GnYeW5w5OZcEsf/erVPrguKSXA7cDH46IPeOtWqcsXlIQsTQihiJiqK+v5UvmmlmXWLNhJ5tHkl3H5pE9L0oiVo1SE4eko0iSxoqIuCMt3iWpP328H9idlu8ATs9sfhrwTJnxmVl3mNk/hdWLZzOzf0rVoRjlHlUl4GZgS0R8JvPQXcDC9P5CYE2mfL6koyVNB2YAD5YVn5mZtabMMY7XA5cCj0rakJZ9DLgeuE3SZcB24N0AEbFJ0m3AZpIjsi6PiAMlxmdmZi0oLXFExP3UH7cAuKDBNtcB15UVk5mZTZzPHDczs0KcOMzMrBAnDjMzK8SJw8zMCnHiMDOzQpw4zMysECcOMzMrxInDzMwKceIwM7NCnDjMzKwQJw4zMyvEicPMzApx4jAzs0KcOMzMrBAnDjMzK8SJw8zMCinz0rHLJO2WtDFTtlrShvT2dO3KgJKmSfpp5rG/LCsuMzObmDIvHXsL8Hngq7WCiJhXuy/pBuC5zPpPRsRgifGYmdkkKPPSsfdJmlbvMUkCfhd4U1mvb2Zm5ahqjOMNwK6IeCJTNl3Sw5L+UdIbGm0oaZGkYUnDo6Oj5UdqZmYvUlXiuARYlVkeAQYi4hzgI8BKSVPqbRgRSyNiKCKG+vr62hCqmZlltT1xSDoSeBewulYWEfsi4ofp/fXAk8Cr2h2bmZk1V+bgeCNvBh6LiB21Akl9wLMRcUDSGcAM4KkKYjOzDrd5ZA/zlqwFYO7gVBbMGqg4osNPmYfjrgLWAmdK2iHpsvSh+by4mwrgjcAjkr4LfA34YEQ8W1ZsZtad5g5OZWZ/0ou9eWQPazbsrDiiw1OZR1Vd0qD89+qU3Q7cXlYsZtYbFswaONTCqLU6rP185riZmRXixGFmZoU4cZiZWSFOHGZmVogTh5mZFeLEYWZmhThxmJlZIU4cZmZWiBOHmZkV4sRhZmaFOHGYmVkhThxmZlaIE4eZmRXixGFmZoU4cZiZWSFOHGZmVkiZVwBcJmm3pI2Zsj+UtFPShvR2YeaxayRtlfS4pLeWFZeZmU1MmS2OW4A5dco/GxGD6e0bAJJmklxS9qx0m5skHVFibGZm1qLSEkdE3AfkvW74XODWiNgXEduArcB5ZcVmZmatq2KM4wpJj6RdWSemZVOB72fW2ZGWvYSkRZKGJQ2Pjo6WHauZmY3R7sTxReCVwCAwAtyQlqvOulHvCSJiaUQMRcRQX19fOVGamVlDbU0cEbErIg5ExEHgS/yiO2oHcHpm1dOAZ9oZm5mZ5dPWxCGpP7P4TqB2xNVdwHxJR0uaDswAHmxnbGZmls+RZT2xpFXA+cDJknYAnwDOlzRI0g31NLAYICI2SboN2AzsBy6PiANlxWZmZq0rLXFExCV1im8eZ/3rgOvKisfMzCaHzxw3M7NCmiYOSe+S9ISk5yTtkbRX0p52BGdmZp0nT1fVJ4GLImJL2cGYmVnny9NVtctJw8zMavK0OIYlrQb+GthXK4yIO0qLyszMOlaexDEFeB747UxZAE4cZmaHoaaJIyLe145AzMysOzRMHJJ+PyI+KekvqDNvVER8qNTIzMysI43X4qgNiA+3IxAzM+sODRNHRPxN+nc5gKQpyWLsbVNsZmbWgZqOcUgaAr4CHJ8s6sfA+yNifdnBmZmNZ/PIHuYtWQvA3MGpLJg1UHFEh4c8R1UtA/5rRHwbQNJvkiSS15QZmJnZeOYO/uJab5tHksksnDjaI0/i2FtLGgARcb8kd1eZWalWrtvOmg072Tyyh5n9U17y+IJZA4cSRa3VYe0x3lFV56Z3H5S0BFhFcnTVPOBb5YdmZoezbNLIti6seuO1OG4Ys/yJzP26l3U1M5tMM/unsHrx7KrDsDHGO6rqtybyxJKWAW8HdkfE2WnZp4CLgJ8DTwLvi4gfS5pGcvjv4+nmD0TEByfy+mZmVo4yr8dxCzBnTNk9wNkR8Rrge8A1mceejIjB9OakYWbWoUpLHBFxH/DsmLK7I2J/uvgAcFpZr29mZuUYN3FI+iVJv1HSa78f+NvM8nRJD0v6R0lvKOk1zcxsgsZNHBFxkJcOkk+YpGuB/cCKtGgEGIiIc4CPACvTM9XrbbtI0rCk4dHR0ckOzczMmsjTVXW3pN+RpMl4QUkLSQbN3xMRARAR+yLih+n99SQD56+qt31ELI2IoYgY6uvrm4yQzMysgDwnAH4EOA44IOmngEjmrKrbIhiPpDnAVcC/j4jnM+V9wLMRcUDSGcAM4Kmiz29mZuXLcz2O41t5YkmrgPOBkyXtIDkP5BrgaOCetAFTO+z2jcAfSdoPHAA+GBHP1n1iMzOrVJ5JDgW8B5geEX8s6XSgPyIeHG+7iLikTvHNDda9Hbg9R7xmZlaxPGMcNwGzgQXp8k+AL5QWkZmZdbQ8YxyzIuJcSQ8DRMSPJL2s5LjMzKxD5WlxvCDpCNL5qdKB7IOlRmVmZh0rT+K4EbgTOFXSdcD9wJ+WGpWZmXWsPEdVrZC0HrggLbo4IraMt42ZmfWuPGMcAMcCte6qXy4vHDMz63RNu6okfRxYDpwEnAx8RdIflB2YmZl1pjwtjkuAcyLiZwCSrgceAv6kzMDMzKwz5Rkcfxo4JrN8NMlcUmZmdhjK0+LYB2ySdA/JGMdbgPsl3QgQER8qMT4zM+sweRLHnemt5lvlhGJmZt0gz+G4y9sRiJmZdYcyrzluZmY9yInDzMwKKZQ40muQF76Ak5mZ9Y481+NYCXyQ5AJL64ETJH0mIj5VdnBmdnhZuW47azbsBGDzyB5m9vt3aifK0+KYGRF7gIuBbwADwKXNNpK0TNJuSRszZSdJukfSE+nfEzOPXSNpq6THJb21hbqYWZdbs2Enm0f2ADCzfwpzB6dWHJHVk+dw3KMkHUWSOD4fES+kl31t5hbg88BXM2VXA/dGxPWSrk6Xr5I0E5gPnAX8KvAPkl4VEQfyV8XMesHM/imsXjy76jBsHHlaHEtIzh4/DrhP0r8Bnmu2UUTcB4y9bvhcknmvSP9enCm/NSL2RcQ2YCtwXo7YzMyApGtr3pK1zFuylpXrtlcdTk/L0+L4m4i4sbYgaTvw/hZf79SIGAGIiBFJp6TlU4EHMuvtSMteQtIiYBHAwMBAi2GYWS/JdmnVuroWzPL+oSx5Why3ZxciIoBbJzmOen1fUW/FiFgaEUMRMdTX1zfJYZhZN1owa4DVi2ezevFsD6i3QcMWh6RXk4w5nCDpXZmHpvDiSQ+L2CWpP21t9AO70/IdwOmZ9U4DnmnxNczMrETjtTjOBN4OvAK4KHM7F/hAi693F7Awvb8QWJMpny/paEnTgRnAgy2+hpmZlahhiyMi1gBrJM2OiLVFn1jSKuB84GRJO4BPANcDt0m6DNgOvDt9rU2SbgM2A/uBy31ElZlZZ8ozOL5V0seAadn1I2LcAfKIuKTBQxfUK4yI64DrcsRjZmYVypM41gDfBv6B5OxxMzM7jOVJHMdGxFWlR2JmZl0hz+G4X5d0YemRmJlZV8iTOK4kSR4/k7RH0l5Je8oOzMzMOlOeKwAe345AzMysOzRtcSjxXkn/PV0+XZLnkTIzO0zl6aq6CZgNLEiXfwJ8obSIzMyso+U5qmpWRJwr6WGAiPiRpJeVHJeZmXWoPC2OFyQdQTrpoKQ+4GCpUZmZWcfKkzhuBO4ETpF0HXA/8KelRmVmZh0rz1FVKyStJ5kqRMDFEbGl9MjMzKwjNU0ckj4HrI4ID4ibmVmurqqHgD+QtFXSpyQNlR2UmZl1rqaJIyKWR8SFJNcA/x7w55KeKD0yMzPrSHlaHDX/Fng1yfTqj5USjZmZdbw8Z47XWhh/BGwEXhsRF5UemZmZdaQ8JwBuA2ZHxA8m4wUlnQmszhSdAXyc5BK1HwBG0/KPRcQ3JuM1zcxs8uTpqloKzJH0cQBJAxOZqyoiHo+IwYgYBF4LPE9yngjAZ2uPOWmYmXWmPInjCyRzVdUuBbuXyZur6gLgyYj4l0l6PjMzK1mexDErIi4HfgbJXFXAZM1VNR9YlVm+QtIjkpZJOrHeBpIWSRqWNDw6OlpvFTMzK1Flc1WlEyW+A/jfadEXgVcCg8AIcEO97SJiaUQMRcRQX1/fRMMwM7OCqpyr6m3AQxGxCyAidkXEgYg4CHyJ5LwRMzPrMFXOVXUJmW4qSf0RMZIuvpPk0F8zM+sweQ7HJSIeYxJP+pN0LPAWYHGm+JOSBkm6xJ4e85iZmXWIXIljskXE88CvjCm7tIpYzMysmCJTjpiZmTlxmJlZMU4cZmZWiBOHmfWczSN7mLdkLSvXba86lJ5UyeC4mVlZ5g5OBZLkAbBg1kCV4fQktzjMrKcsmDXA6sWzmdk/pepQepZbHGZWqZXrtrNmw04gaSV4h9/53OIws0qt2bDzULfSzP4ph7qarHO5xWFmlZvZP4XVi2dXHYbl5BaHmZkV4sRhZmaFOHGYmVkhThxmZlaIE4eZmRXixGFmZoU4cZiZWSGVnMch6WlgL3AA2B8RQ5JOAlYD00iuAPi7EfGjKuIzs3L5bPHuVmWL47ciYjAihtLlq4F7I2IGcG+6bGY9yGeLd7dOOnN8LnB+en858C3gqqqCMbNy+Wzx7lVViyOAuyWtl7QoLTs1IkYA0r+n1NtQ0iJJw5KGR0dH2xSumZnVVNXieH1EPCPpFOAeSY/l3TAilgJLAYaGhqKsAM3MrL5KWhwR8Uz6dzdwJ3AesEtSP0D6d3cVsZmZ2fjanjgkHSfp+Np94LeBjcBdwMJ0tYXAmnbHZmZmzVXRVXUqcKek2uuvjIi/k/Qd4DZJlwHbgXdXEJuZmTXR9sQREU8Bv16n/IfABe2Ox8zMivGZ42ZmVogTh5mZFeLEYWZmhXTSmeNmZpNq88ge5i1ZC8DcwaksmDVQcUS9wYnDzHpSdv6r2rxYThyTw4nDzHrSglkDhxJFrdVhk8NjHGZmVogTh5mZFeLEYWZmhThxmJlZIR4cN7O28OVie4dbHGbWFr5cbO9wi8PM2saXi+0NbnGYmVkhThxmZlaIu6rM7LDgeasmTxWXjj1d0jclbZG0SdKVafkfStopaUN6u7DdsZlZb5o7OPXQUVybR/YcOrrLWlNFi2M/8NGIeCi99vh6Sfekj302Ij5dQUxm1sM8b9XkquLSsSPASHp/r6QtgI/LMzPrEpUOjkuaBpwDrEuLrpD0iKRlkk5ssM0iScOShkdHR9sUqZmZ1VSWOCS9HLgd+HBE7AG+CLwSGCRpkdxQb7uIWBoRQxEx1NfX17Z4zcwsUUnikHQUSdJYERF3AETErog4EBEHgS8B51URm5mZja+Ko6oE3AxsiYjPZMr7M6u9E9jY7tjMzKy5Ko6qej1wKfCopA1p2ceASyQNAgE8DSyuIDYzM2uiiqOq7gdU56FvtDsWMytHdibcGs+I2zt85riZTbraTLjZRNFJM+L6LPKJceIws1J06ky42eRVm+bdiaMYJw4zO6z4LPKJ8+y4ZmZWiBOHmZkV4sRhZmaFeIzDzCZF9hDcbjr01kdYFefEYWYtyyaLddueBWDW9JM66tDb8fgIq9Y4cZhZy7Lna8yaflLX/WL3EVatceIwswnp1PM1WpHttqqn2xJjWZw4zHrI2Kk+ytrR1V6nm8YymmnWtbZu27Os2/bsofe3tn473u9O48Rh1kOyO/MiffZ5Ek6j8YxuGMvII9ttVc/Ywf+a2vtdL7H0ahJx4jDrQBNpOdS6jor02TdKOI2SRTeOZ0xUo/GQ2vtdL7H06vvjxNFlsh/OXvniNttJtqv7pWxF6tFqyyHva2fVWgz1Ek63D3630+E00O7EUYLJ3tHV+9V3/DHJv24ydybt3Ck0+iVbbydZ5k60nXUuWo8iLYdmYw6NHqt1rxx/zJENxyp6afB7MtX+h83GeHrlh0+WE0cJsl/SVvs9m3URNPr12GqcZTWtG/3SbdTtMW/J2pcc2VKLsbYTHXvCVq0u9dSet977Wbs/WUkkzw6ilW6kPLL/y0ZjDvUSwNgkavlk36tm71s7vmft1nGJQ9Ic4HPAEcCXI+L6ikMaV72dBUy837NZF0HRxNHoV3a9HVmjnX29nfDY5xuvHlmNuj3qfQmzO8N6J2zV7o99jez7Xe/9rMUHLz1ipl69axrVv9kPhkbyXPSo2WGi2eRaRLMBYasvz/tW+5+N/eGT13jdizXNDmJotM5EdVTikHQE8AXgLcAO4DuS7oqIzdVG9mKNWgP1mv2N+j2bfSiK7Aia9V9nd2p7f7b/0A6tSLdGdkdYr85jk9LYrpO8O7RmX8ix72e2u2Dsa2RbJ43iqD1Xo/ew3kBxo/pnX2PsZ6Red1Attuzz1TRKlo00ammMrX9erW5niez/Yuz/pt6PgPFaxtnPRVaj7954P2AmS0clDuA8YGtEPAUg6VZgLlA3cTw1+q+VDEI16mbJ0+wfb2eRlWfKhmbPVW+nlu3mavSBrreTzdatUZ0bJZeyukCadReM9+Udq1HCyiafsXUaW//sa2Sfr97nIhtLs0HnVlsFReo/GdvZLzT6n9V7Lxt9b5p9Lhp99xr9gMm+3kQpIib8JJNF0n8E5kTEf06XLwVmRcQVmXUWAYvSxbOBjW0PtH1OBn5QdRAlcv26Wy/Xr5frBnBmRBzf6sad1uJQnbIXZbaIWAosBZA0HBFD7QisCq5fd3P9ulcv1w2S+k1k+067HscO4PTM8mnAMxXFYmZmdXRa4vgOMEPSdEkvA+YDd1Uck5mZZXRUV1VE7Jd0BfD3JIfjLouITeNssrQ9kVXG9eturl/36uW6wQTr11GD42Zm1vk6ravKzMw6nBOHmZkV0rWJQ9IcSY9L2irp6qrjmShJp0v6pqQtkjZJujItP0nSPZKeSP+eWHWsrZJ0hKSHJX09Xe6lur1C0tckPZb+D2f3WP3+W/q53ChplaRjurl+kpZJ2i1pY6asYX0kXZPuax6X9NZqos6vQf0+lX4+H5F0p6RXZB4rVL+uTByZqUneBswELpE0s9qoJmw/8NGI+DXgdcDlaZ2uBu6NiBnAvelyt7oS2JJZ7qW6fQ74u4h4NfDrJPXsifpJmgp8CBiKiLNJDlyZT3fX7xZgzpiyuvVJv4fzgbPSbW5K90Gd7BZeWr97gLMj4jXA94BroLX6dWXiIDM1SUT8HKhNTdK1ImIkIh5K7+8l2fFMJanX8nS15cDF1UQ4MZJOA/4D8OVMca/UbQrwRuBmgIj4eUT8mB6pX+pI4JclHQkcS3J+VdfWLyLuA8bOvdGoPnOBWyNiX0RsA7aS7IM6Vr36RcTdEbE/XXyA5Dw5aKF+3Zo4pgLfzyzvSMt6gqRpwDnAOuDUiBiBJLkAp1QX2YT8T+D3gYOZsl6p2xnAKPCVtCvuy5KOo0fqFxE7gU8D24ER4LmIuJseqV9Go/r04v7m/cDfpvcL169bE0fTqUm6laSXA7cDH46IPc3W7waS3g7sjoj1VcdSkiOBc4EvRsQ5wL/SXd0240r7+ucC04FfBY6T9N5qo2qrntrfSLqWpGt8Ra2ozmrj1q9bE0dPTk0i6SiSpLEiIu5Ii3dJ6k8f7wd2VxXfBLweeIekp0m6Fd8k6a/ojbpB8nncERHr0uWvkSSSXqnfm4FtETEaES8AdwC/Qe/Ur6ZRfXpmfyNpIfB24D3xi5P4CtevWxNHz01NIkkkfeRbIuIzmYfuAham9xcCa9od20RFxDURcVpETCP5X/3fiHgvPVA3gIj4f8D3JZ2ZFl1AcimAnqgfSRfV6yQdm35OLyAZg+uV+tU0qs9dwHxJR0uaDswAHqwgvglRcpG8q4B3RMTzmYeK1y8iuvIGXEhyZMCTwLVVxzMJ9flNkubhI8CG9HYh8CskR3g8kf49qepYJ1jP84Gvp/d7pm7AIDCc/v/+Gjixx+r3P4DHSC5j8L+Ao7u5fsAqkvGaF0h+cV82Xn2Aa9N9zePA26qOv8X6bSUZy6jtX/6y1fp5yhEzMyukW7uqzMysIk4cZmZWiBOHmZkV4sRhZmaFOHGYmVkhThxmZlaIE4eZmRXixGGWg6Rp6bUMvpxek2KFpDdL+qf0+g3npbd/Tic6/OfameSSzpL0oKQN6bUQZkg6TtL/kfTd9PnmVV1Hs7x8AqBZDumMxVtJZi3eRDLtzXdJzsh9B/A+4D8Bz0fEfklvBv5LRPyOpL8AHoiIFekUOUeQzAowJyI+kD7/CRHxXJurZdaSI6sOwKyLbIuIRwEkbSK56E9IehSYBpwALJc0g2T6mKPS7dYC16bXJLkjIp5It/m0pD8nmYLl2+2ujFmr3FVllt++zP2DmeWDJD/C/hj4ZiRXybsIOAYgIlaStEp+Cvy9pDdFxPeA1wKPAn8m6ePtqYLZxLnFYTZ5TgB2pvd/r1Yo6QzgqYi4Mb3/GkmPAc9GxF9J+kl2fbNO5xaH2eT5JEnr4Z9IxjFq5gEbJW0AXg18Ffh3wINp2bXAn7Q7WLNWeXDczMwKcYvDzMwKceIwM7NCnDjMzKwQJw4zMyvEicPMzApx4jAzs0KcOMzMrJD/Dxjp468Td7F3AAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "counts, edges = numpy.histogram(masses.flatten(), bins=120, range=(0, 120))\n", - "\n", - "matplotlib.pyplot.step(x=edges, y=numpy.append(counts, 0), where=\"post\");\n", - "matplotlib.pyplot.xlim(edges[0], edges[-1]);\n", - "matplotlib.pyplot.ylim(0, counts.max() * 1.1);\n", - "matplotlib.pyplot.xlabel(\"mass\");\n", - "matplotlib.pyplot.ylabel(\"events per bin\");" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Jagged array performance\n", - "\n", - "`JaggedArrays` are compact in memory and fast to read. Whereas [root_numpy](https://pypi.org/project/root-numpy/) reads data like `std::vector` per event into a Numpy array of Numpy arrays (Numpy's object `\"O\"` [dtype](https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html)), which has data locality issues, `JaggedArray` consists of two contiguous arrays: one containing content (the `floats`) and the other representing structure via `offsets` (random access) or `counts`." - ] - }, - { - "cell_type": "code", - "execution_count": 132, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([90.227806, 74.746544, 89.75766 , ..., 92.06495 , 85.44384 ,\n", - " 75.96062 ], dtype=float32)" - ] - }, - "execution_count": 132, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "masses.content" - ] - }, - { - "cell_type": "code", - "execution_count": 133, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([ 0, 1, 1, ..., 1521, 1521, 1521])" - ] - }, - "execution_count": 133, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "masses.offsets" - ] - }, - { - "cell_type": "code", - "execution_count": 134, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([1, 0, 1, ..., 0, 0, 0])" - ] - }, - "execution_count": 134, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "masses.counts" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Fortunately, ROOT files are themselves structured this way, with variable-width data represented by contents and offsets in a TBasket. These arrays do not need to be deserialized individually, but can be merely cast as Numpy arrays in one Python call. The lack of per-event processing is why reading in uproot and processing data with awkward-array can be fast, despite being written in Python.\n", - "\n", - "
\n", - "\n", - "
" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Special physics objects: Lorentz vectors\n", - "\n", - "Although any C++ type can in principle be read (see below), some are important enough to be given convenience methods for analysis. These are not defined in uproot (which is strictly concerned with I/O), but in [uproot-methods](https://github.com/scikit-hep/uproot-methods). If you need certain classes to have user-friendly methods in Python, you're encouraged to contribute them to [uproot-methods](https://github.com/scikit-hep/uproot-methods).\n", - "\n", - "One of these classes is `TLorentzVectorArray`, which defines an _array_ of Lorentz vectors." - ] - }, - { - "cell_type": "code", - "execution_count": 135, - "metadata": {}, - "outputs": [], - "source": [ - "events3 = uproot.open(\"https://scikit-hep.org/uproot/examples/HZZ-objects.root\")[\"events\"]" - ] - }, - { - "cell_type": "code", - "execution_count": 136, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 136, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "muons = events3.array(\"muonp4\")\n", - "muons" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In the print-out, these appear to be Python objects, but they're high-performance arrays that are only turned into objects when you look at individuals." - ] - }, - { - "cell_type": "code", - "execution_count": 137, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(uproot_methods.classes.TLorentzVector.TLorentzVector,\n", - " TLorentzVector(x=-52.899, y=-11.655, z=-8.1608, t=54.779))" - ] - }, - "execution_count": 137, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "muon = muons[0, 0]\n", - "type(muon), muon" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This object has all the usual kinematics methods," - ] - }, - { - "cell_type": "code", - "execution_count": 138, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "0.10559298741436905" - ] - }, - "execution_count": 138, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "muon.mass" - ] - }, - { - "cell_type": "code", - "execution_count": 139, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "-2.9431136434497858" - ] - }, - "execution_count": 139, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "muons[0, 0].delta_phi(muons[0, 1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "But an array of Lorentz vectors also has these methods, and they are computed in bulk (faster than creating each object and calling the method on each)." - ] - }, - { - "cell_type": "code", - "execution_count": 140, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/jpivarski/miniconda3/lib/python3.7/site-packages/uproot_methods-0.7.2-py3.7.egg/uproot_methods/classes/TLorentzVector.py:189: RuntimeWarning: invalid value encountered in sqrt\n", - " return self._trymemo(\"mass\", lambda self: self.awkward.numpy.sqrt(self.mag2))\n" - ] - }, - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 140, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "muons.mass # some mass**2 are slightly negative, hence the Numpy warning about negative square roots" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "(**Note:** if you don't want to see Numpy warnings, use [numpy.seterr](https://docs.scipy.org/doc/numpy/reference/generated/numpy.seterr.html).)" - ] - }, - { - "cell_type": "code", - "execution_count": 141, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 141, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "pairs = muons.choose(2)\n", - "lefts = pairs.i0\n", - "rights = pairs.i1\n", - "lefts.delta_r(rights)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "TBranches with C++ class `TLorentzVector` are automatically converted into `TLorentzVectorArrays`. Although they're in wide use, the C++ `TLorentzVector` class is deprecated in favor of [ROOT::Math::LorentzVector](https://root.cern/doc/v612/classROOT_1_1Math_1_1LorentzVector.html). Unlike the old class, the new vectors can be represented with a variety of data types and coordinate systems, and they're split into multiple branches, so uproot sees them as four branches, each representing the components.\n", - "\n", - "You can still use the `TLorentzVectorArray` Python class; you just need to use a special constructor to build the object from its branches." - ] - }, - { - "cell_type": "code", - "execution_count": 142, - "metadata": {}, - "outputs": [], - "source": [ - "# Suppose you have four component branches...\n", - "E, px, py, pz = events2.arrays([\"Muon_E\", \"Muon_P[xyz]\"], outputtype=tuple)" - ] - }, - { - "cell_type": "code", - "execution_count": 143, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 143, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import uproot_methods\n", - "\n", - "array = uproot_methods.TLorentzVectorArray.from_cartesian(px, py, pz, E)\n", - "array" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "There are constructors for different coordinate systems. Internally, `TLorentzVectorArray` uses the coordinates you give it and only converts to other systems on demand." - ] - }, - { - "cell_type": "code", - "execution_count": 144, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['from_cartesian',\n", - " 'from_cylindrical',\n", - " 'from_p3',\n", - " 'from_ptetaphi',\n", - " 'from_ptetaphie',\n", - " 'from_ptetaphim',\n", - " 'from_spherical',\n", - " 'from_xyzm']" - ] - }, - "execution_count": 144, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[x for x in dir(uproot_methods.TLorentzVectorArray) if x.startswith(\"from_\")]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Variable-width values: strings\n", - "\n", - "Strings are another fundamental type. In C++, they may be `char*`, `std::string`, or `TString`, but all string types are converted (on demand) to the same Python string type." - ] - }, - { - "cell_type": "code", - "execution_count": 145, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 145, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "branch = uproot.open(\"https://scikit-hep.org/uproot/examples/sample-6.14.00-zlib.root\")[\"sample\"][\"str\"]\n", - "branch.array()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As with most strings from ROOT, they are unencoded bytestrings (see the `b` before each quote). Since they're not names, there's no **namedecode**, but they can be decoded as needed using the usual Python method." - ] - }, - { - "cell_type": "code", - "execution_count": 146, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['hey-0',\n", - " 'hey-1',\n", - " 'hey-2',\n", - " 'hey-3',\n", - " 'hey-4',\n", - " 'hey-5',\n", - " 'hey-6',\n", - " 'hey-7',\n", - " 'hey-8',\n", - " 'hey-9',\n", - " 'hey-10',\n", - " 'hey-11',\n", - " 'hey-12',\n", - " 'hey-13',\n", - " 'hey-14',\n", - " 'hey-15',\n", - " 'hey-16',\n", - " 'hey-17',\n", - " 'hey-18',\n", - " 'hey-19',\n", - " 'hey-20',\n", - " 'hey-21',\n", - " 'hey-22',\n", - " 'hey-23',\n", - " 'hey-24',\n", - " 'hey-25',\n", - " 'hey-26',\n", - " 'hey-27',\n", - " 'hey-28',\n", - " 'hey-29']" - ] - }, - "execution_count": 146, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "[x.decode(\"utf-8\") for x in branch.array()]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Arbitrary objects in TTrees\n", - "\n", - "Uproot does not have a hard-coded deserialization for every C++ class type; it uses the \"streamers\" that ROOT includes in each file to learn how to deserialize the objects in that file. Even if you defined your own C++ classes, uproot should be able to read them. (**Caveat:** not all structure types have been implemented, so the coverage of C++ types is a work in progress.)\n", - "\n", - "In some cases, the deserialization is simplified by the fact that ROOT has \"split\" the objects. Instead of seeing a `JaggedArray` of objects, you see a `JaggedArray` of each attribute separately, such as the components of a [ROOT::Math::LorentzVector](https://root.cern/doc/v612/classROOT_1_1Math_1_1LorentzVector.html).\n", - "\n", - "In the example below, `Track` objects under `fTracks` have been split into `fTracks.fUniqueID`, `fTracks.fBits`, `fTracks.fPx`, `fTracks.fPy`, `fTracks.fPz`, etc." - ] - }, - { - "cell_type": "code", - "execution_count": 150, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "--2019-12-18 07:20:26-- https://scikit-hep.org/uproot/examples/Event.root\n", - "Resolving scikit-hep.org (scikit-hep.org)... 185.199.109.153, 185.199.108.153, 185.199.111.153, ...\n", - "Connecting to scikit-hep.org (scikit-hep.org)|185.199.109.153|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 37533466 (36M) [application/octet-stream]\n", - "Saving to: ‘Event.root’\n", - "\n", - "Event.root 100%[===================>] 35.79M 3.63MB/s in 11s \n", - "\n", - "2019-12-18 07:20:37 (3.35 MB/s) - ‘Event.root’ saved [37533466/37533466]\n", - "\n" - ] - } - ], - "source": [ - "!wget https://scikit-hep.org/uproot/examples/Event.root" - ] - }, - { - "cell_type": "code", - "execution_count": 151, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "event TStreamerInfo None\n", - "TObject TStreamerInfo None\n", - "fUniqueID TStreamerBasicType asdtype('>u4')\n", - "fBits TStreamerBasicType asdtype('>u4')\n", - "\n", - "fType[20] TStreamerBasicType asdtype(\"('i1', (20,))\")\n", - "fEventName TStreamerBasicType asstring(4)\n", - "fNtrack TStreamerBasicType asdtype('>i4')\n", - "fNseg TStreamerBasicType asdtype('>i4')\n", - "fNvertex TStreamerBasicType asdtype('>u4')\n", - "fFlag TStreamerBasicType asdtype('>u4')\n", - "fTemperature TStreamerBasicType asdtype('>f4', 'float64')\n", - "fMeasures[10] TStreamerBasicType asdtype(\"('>i4', (10,))\")\n", - "fMatrix[4][4] TStreamerBasicType asdtype(\"('>f4', (4, 4))\", \"('i4')\n", - "fEvtHdr.fRun TStreamerBasicType asdtype('>i4')\n", - "fEvtHdr.fDate TStreamerBasicType asdtype('>i4')\n", - "\n", - "fTracks TStreamerObjectPointer None\n", - "fTracks.fUniqueID TStreamerBasicType asjagged(asdtype('>u4'))\n", - "fTracks.fBits TStreamerBasicType asjagged(asdtype('>u4'))\n", - "fTracks.fPx TStreamerBasicType asjagged(asdtype('>f4'))\n", - "fTracks.fPy TStreamerBasicType asjagged(asdtype('>f4'))\n", - "fTracks.fPz TStreamerBasicType asjagged(asdtype('>f4'))\n", - "fTracks.fRandom TStreamerBasicType asjagged(asdtype('>f4'))\n", - "fTracks.fMass2 TStreamerBasicType asjagged(asfloat16(0.0, 0.0, 8, dtype([('exponent', 'u1'), ('mantissa', '>u2')]), dtype('float32')))\n", - "fTracks.fBx TStreamerBasicType asjagged(asfloat16(0.0, 0.0, 10, dtype([('exponent', 'u1'), ('mantissa', '>u2')]), dtype('float32')))\n", - "fTracks.fBy TStreamerBasicType asjagged(asfloat16(0.0, 0.0, 10, dtype([('exponent', 'u1'), ('mantissa', '>u2')]), dtype('float32')))\n", - "fTracks.fMeanCharge TStreamerBasicType asjagged(asdtype('>f4'))\n", - "fTracks.fXfirst TStreamerBasicType asjagged(asfloat16(0, 0, 12, dtype([('exponent', 'u1'), ('mantissa', '>u2')]), dtype('float32')))\n", - "fTracks.fXlast TStreamerBasicType asjagged(asfloat16(0, 0, 12, dtype([('exponent', 'u1'), ('mantissa', '>u2')]), dtype('float32')))\n", - "fTracks.fYfirst TStreamerBasicType asjagged(asfloat16(0, 0, 12, dtype([('exponent', 'u1'), ('mantissa', '>u2')]), dtype('float32')))\n", - "fTracks.fYlast TStreamerBasicType asjagged(asfloat16(0, 0, 12, dtype([('exponent', 'u1'), ('mantissa', '>u2')]), dtype('float32')))\n", - "fTracks.fZfirst TStreamerBasicType asjagged(asfloat16(0, 0, 12, dtype([('exponent', 'u1'), ('mantissa', '>u2')]), dtype('float32')))\n", - "fTracks.fZlast TStreamerBasicType asjagged(asfloat16(0, 0, 12, dtype([('exponent', 'u1'), ('mantissa', '>u2')]), dtype('float32')))\n", - "fTracks.fCharge TStreamerBasicType asjagged(asdouble32(-1.0, 1.0, 2, dtype('>u4'), dtype('float64')))\n", - "fTracks.fVertex[3] TStreamerBasicType asjagged(asdouble32(-30.0, 30.0, 16, dtype(('>u4', (3,))), dtype(('i4'))\n", - "fTracks.fValid TStreamerBasicType asjagged(asdtype('>i2'))\n", - "fTracks.fNsp TStreamerBasicType asjagged(asdtype('>u4'))\n", - "fTracks.fPointValue TStreamerBasicPointer None\n", - "fTracks.fTriggerBits.fUniqueID\n", - " TStreamerBasicType asjagged(asdtype('>u4'))\n", - "fTracks.fTriggerBits.fBits TStreamerBasicType asjagged(asdtype('>u4'))\n", - "fTracks.fTriggerBits.fNbits\n", - " TStreamerBasicType asjagged(asdtype('>u4'))\n", - "fTracks.fTriggerBits.fNbytes\n", - " TStreamerBasicType asjagged(asdtype('>u4'))\n", - "fTracks.fTriggerBits.fAllBits\n", - " TStreamerBasicPointer asjagged(asdtype('uint8'), 1)\n", - "fTracks.fTArray[3] TStreamerBasicType asjagged(asdtype(\"('>f4', (3,))\"))\n", - "\n", - "fHighPt TStreamerObjectPointer asgenobj(TRefArray)\n", - "fMuons TStreamerObjectPointer asgenobj(TRefArray)\n", - "fLastTrack TStreamerInfo asobj()\n", - "fWebHistogram TStreamerInfo asobj()\n", - "fH TStreamerObjectPointer asgenobj(TH1F)\n", - "fTriggerBits TStreamerInfo None\n", - "fTriggerBits.TObject (no streamer) None\n", - "fTriggerBits.fUniqueID TStreamerBasicType asdtype('>u4')\n", - "fTriggerBits.fBits TStreamerBasicType asdtype('>u4')\n", - "\n", - "fTriggerBits.fNbits TStreamerBasicType asdtype('>u4')\n", - "fTriggerBits.fNbytes TStreamerBasicType asdtype('>u4')\n", - "fTriggerBits.fAllBits TStreamerBasicPointer asjagged(asdtype('uint8'), 1)\n", - "\n", - "fIsValid TStreamerBasicType asdtype('bool')\n", - "\n" - ] - } - ], - "source": [ - "tree = uproot.open(\"Event.root\")[\"T\"]\n", - "tree.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In this view, many of the attributes are _not_ special classes and can be read as arrays of numbers," - ] - }, - { - "cell_type": "code", - "execution_count": 152, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([20.28261757, 20.47114182, 20.5931778 , 20.5848484 , 20.80287933,\n", - " 20.2972393 , 20.30301666, 20.87490845, 20.56552505, 20.67128181,\n", - " 20.74524879, 20.85200119, 20.26188469, 20.82903862, 20.02412415,\n", - " 20.97918129, 20.71551132, 20.60189629, 20.11310196, 20.53161049])" - ] - }, - "execution_count": 152, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree.array(\"fTemperature\", entrystop=20)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "as arrays of fixed-width matrices," - ] - }, - { - "cell_type": "code", - "execution_count": 153, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[[ 1.54053164, 0.09474282, 1.52469206, 0. ],\n", - " [-0.13630907, 0.80078429, 1.70623565, 0. ],\n", - " [-1.16029346, 2.012362 , 4.02206421, 0. ],\n", - " [ 0. , 0. , 0. , 0. ]],\n", - "\n", - " [[ 0.41865557, 1.60363352, -0.56923842, 0. ],\n", - " [ 0.06950195, 0.79105824, 2.0322361 , 0. ],\n", - " [ 0.05688119, 2.52811217, 3.91394544, 0. ],\n", - " [ 0. , 0. , 0. , 0. ]],\n", - "\n", - " [[-1.24031985, 2.3477006 , -0.67482847, 0. ],\n", - " [ 1.22933233, 1.39499295, 2.17524433, 0. ],\n", - " [ 0.18559125, 2.40421987, 4.56326485, 0. ],\n", - " [ 0. , 0. , 0. , 0. ]],\n", - "\n", - " [[-0.43785933, -0.05061727, 0.28988785, 0. ],\n", - " [-0.90204114, 0.88527524, 2.34751844, 0. ],\n", - " [ 0.3241719 , 0.79971647, 4.13229847, 0. ],\n", - " [ 0. , 0. , 0. , 0. ]],\n", - "\n", - " [[-0.98912323, 0.97513503, 1.03762376, 0. ],\n", - " [-0.96955669, -0.05892833, 3.02420664, 0. ],\n", - " [ 1.10181248, 3.31268907, 6.04244947, 0. ],\n", - " [ 0. , 0. , 0. , 0. ]],\n", - "\n", - " [[ 1.1283927 , 1.20095801, 0.7379719 , 0. ],\n", - " [ 0.32370013, 1.08198583, 2.96736264, 0. ],\n", - " [ 1.19329214, 2.01726198, 3.93975949, 0. ],\n", - " [ 0. , 0. , 0. , 0. ]]])" - ] - }, - "execution_count": 153, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree.array(\"fMatrix[4][4]\", entrystop=6)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "as jagged arrays (of ROOT's \"Float16_t\" encoding)," - ] - }, - { - "cell_type": "code", - "execution_count": 154, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 154, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree.array(\"fTracks.fMass2\", entrystop=6)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "or as jagged arrays of fixed arrays (of ROOT's \"Double32_t\" encoding)," - ] - }, - { - "cell_type": "code", - "execution_count": 155, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 155, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree.array(\"fTracks.fTArray[3]\", entrystop=6)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "However, some types are not fully split by ROOT and have to be deserialized individually (not vectorally). This example includes _histograms_ in the TTree, and histograms are sufficiently complex that they cannot be split." - ] - }, - { - "cell_type": "code", - "execution_count": 156, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - " ] at 0x7ac116114438>" - ] - }, - "execution_count": 156, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree.array(\"fH\", entrystop=6)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Each of those is a standard histogram object, something that would ordinarily be in a `TDirectory`, not a `TTree`. It has histogram convenience methods (see below)." - ] - }, - { - "cell_type": "code", - "execution_count": 157, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "b'Event Histogram'\n", - "[0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0.]\n", - "b'Event Histogram'\n", - "[0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1.\n", - " 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0.]\n", - "b'Event Histogram'\n", - "[0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1.\n", - " 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n", - " 0. 0. 0. 0.]\n", - "\n", - "...\n", - "\n", - "b'Event Histogram'\n", - "[14. 18. 14. 11. 15. 13. 12. 13. 8. 8. 9. 10. 10. 7. 7. 10. 8. 12.\n", - " 6. 8. 7. 9. 10. 12. 10. 11. 10. 10. 10. 8. 14. 13. 9. 7. 12. 10.\n", - " 7. 6. 9. 13. 11. 8. 10. 9. 7. 4. 7. 10. 8. 8. 9. 9. 7. 12.\n", - " 11. 9. 10. 7. 10. 13. 13. 11. 9. 9. 8. 8. 10. 12. 7. 5. 9. 10.\n", - " 12. 13. 10. 14. 10. 10. 8. 12. 12. 11. 16. 12. 8. 12. 7. 9. 8. 7.\n", - " 10. 7. 11. 11. 8. 13. 9. 8. 14. 16.]\n", - "b'Event Histogram'\n", - "[14. 18. 14. 11. 15. 13. 12. 13. 8. 8. 9. 10. 10. 7. 8. 10. 8. 12.\n", - " 6. 8. 7. 9. 10. 12. 10. 11. 10. 10. 10. 8. 14. 13. 9. 7. 12. 10.\n", - " 7. 6. 9. 13. 11. 8. 10. 9. 7. 4. 7. 10. 8. 8. 9. 9. 7. 12.\n", - " 11. 9. 10. 7. 10. 13. 13. 11. 9. 9. 8. 8. 10. 12. 7. 5. 9. 10.\n", - " 12. 13. 10. 14. 10. 10. 8. 12. 12. 11. 16. 12. 8. 12. 7. 9. 8. 7.\n", - " 10. 7. 11. 11. 8. 13. 9. 8. 14. 16.]\n", - "b'Event Histogram'\n", - "[14. 18. 14. 11. 15. 13. 12. 13. 8. 8. 9. 10. 10. 7. 8. 10. 8. 12.\n", - " 6. 8. 7. 9. 10. 12. 10. 11. 10. 10. 10. 8. 14. 13. 9. 7. 12. 10.\n", - " 7. 6. 9. 13. 11. 8. 10. 9. 7. 4. 7. 10. 8. 8. 9. 9. 7. 12.\n", - " 11. 9. 10. 7. 10. 13. 13. 11. 9. 9. 8. 8. 10. 12. 7. 5. 9. 10.\n", - " 12. 13. 10. 14. 10. 10. 8. 12. 12. 11. 16. 12. 8. 12. 7. 9. 9. 7.\n", - " 10. 7. 11. 11. 8. 13. 9. 8. 14. 16.]\n" - ] - } - ], - "source": [ - "for histogram in tree.array(\"fH\", entrystop=3):\n", - " print(histogram.title)\n", - " print(histogram.values)\n", - "print(\"\\n...\\n\")\n", - "for histogram in tree.array(\"fH\", entrystart=-3):\n", - " print(histogram.title)\n", - " print(histogram.values)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The criteria for whether an object can be read vectorially in Numpy (fast) or individually in Python (slow) is whether it has a fixed width—all objects having the same number of bytes—or a variable width. You can see this in the TBranch's `interpretation` as the distinction between [uproot.asobj](https://uproot.readthedocs.io/en/latest/interpretation.html#uproot-interp-objects-asobj) (fixed width, vector read) and [uproot.asgenobj](https://uproot.readthedocs.io/en/latest/interpretation.html#uproot-interp-objects-asgenobj) (variable width, read into Python objects)." - ] - }, - { - "cell_type": "code", - "execution_count": 158, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "asjagged(asobj(), 10)" - ] - }, - "execution_count": 158, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# TLorentzVectors all have the same number of fixed width components, so they can be read vectorially.\n", - "events3[\"muonp4\"].interpretation" - ] - }, - { - "cell_type": "code", - "execution_count": 159, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "asgenobj(TH1F)" - ] - }, - "execution_count": 159, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Histograms contain name strings and variable length lists, so they must be read as Python objects.\n", - "tree[\"fH\"].interpretation" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Doubly nested jagged arrays (i.e. `std::vector>`)\n", - "\n", - "Variable length lists are an exception to the above—up to one level of depth. This is why `JaggedArrays`, representing types such as `std::vector` for a fixed-width `T`, can be read vectorially. Unfortunately, the same does not apply to doubly nested jagged arrays, such as `std::vector>`." - ] - }, - { - "cell_type": "code", - "execution_count": 160, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "asgenobj(STLVector(STLVector(asdtype('>f8'))))" - ] - }, - "execution_count": 160, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "branch = uproot.open(\"https://scikit-hep.org/uproot/examples/vectorVectorDouble.root\")[\"t\"][\"x\"]\n", - "branch.interpretation" - ] - }, - { - "cell_type": "code", - "execution_count": 161, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "b'vector >'" - ] - }, - "execution_count": 161, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "branch._streamer._fTypeName" - ] - }, - { - "cell_type": "code", - "execution_count": 162, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 162, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "array = branch.array()\n", - "array" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Although you see something that looks like a `JaggedArray`, the type is `ObjectArray`, meaning that you only have some bytes with an auto-generated prescription for turning them into Python objects (from the \"streamers,\" self-describing the ROOT file). You can't apply the usual `JaggedArray` slicing." - ] - }, - { - "cell_type": "code", - "execution_count": 163, - "metadata": {}, - "outputs": [], - "source": [ - "try:\n", - " array[array.counts > 0, 0]\n", - "except Exception as err:\n", - " print(type(err), err)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To get `JaggedArray` semantics, use `awkward.fromiter` to convert the arbitrary Python objects into awkward-arrays." - ] - }, - { - "cell_type": "code", - "execution_count": 164, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 164, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "jagged = awkward.fromiter(array)\n", - "jagged" - ] - }, - { - "cell_type": "code", - "execution_count": 165, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 165, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "jagged[jagged.counts > 0, 0]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Doubly nested `JaggedArrays` are a native type in awkward-array: they can be any number of levels deep." - ] - }, - { - "cell_type": "code", - "execution_count": 166, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 166, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "jagged.flatten()" - ] - }, - { - "cell_type": "code", - "execution_count": 167, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([ 10., 10., 20., 20., -21., -22., 200., -201., 202.])" - ] - }, - "execution_count": 167, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "jagged.flatten().flatten()" - ] - }, - { - "cell_type": "code", - "execution_count": 168, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 168, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "jagged.sum()" - ] - }, - { - "cell_type": "code", - "execution_count": 169, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([ 0., 0., 40., -23., 201.])" - ] - }, - "execution_count": 169, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "jagged.sum().sum()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Parallel array reading\n", - "\n", - "Uproot supports reading, deserialization, and array-building in parallel. All of the array-reading functions have **executor** and **blocking** parameters:\n", - "\n", - " * **executor:** a Python 3 [Executor](https://docs.python.org/3/library/concurrent.futures.html) object, which schedules and runs tasks in parallel;\n", - " * **blocking:** if `True` _(default)_, the array-reading function blocks (waits) until the result is ready, then returns it. If `False`, it immediately returns a zero-argument function that, when called, blocks until the result is ready. This zero-argument function is a simple type of \"future.\"" - ] - }, - { - "cell_type": "code", - "execution_count": 170, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - ".wait()>" - ] - }, - "execution_count": 170, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import concurrent.futures\n", - "\n", - "# ThreadPoolExecutor divides work among multiple threads.\n", - "# Avoid ProcessPoolExecutor because the finalized arrays would have to be reserialized to pass between processes.\n", - "executor = concurrent.futures.ThreadPoolExecutor()\n", - "\n", - "result = tree.array(\"fTracks.fVertex[3]\", executor=executor, blocking=False)\n", - "result" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can work on other things while the array is being read." - ] - }, - { - "cell_type": "code", - "execution_count": 171, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 171, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# and now get the array (waiting, if necessary, for it to complete)\n", - "result()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The **executor** and **blocking** parameters are often used together, but they do not have to be. You can collect data in parallel but let the array-reading function block until it is finished:" - ] - }, - { - "cell_type": "code", - "execution_count": 172, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 172, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "tree.array(\"fTracks.fVertex[3]\", executor=executor)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The other case, non-blocking return without parallel processing (**executor=None** and **blocking=False**) is not very useful because all the work of creating the array would be done on the main thread (meaning: you have to wait) and then you would be returned a zero-argument function to reveal it.\n", - "\n", - " * **executor=None**, **blocking=True**: common case\n", - " * **executor=executor**, **blocking=True**: read in parallel, but wait for it to finish\n", - " * **executor=executor**, **blocking=False**: read in parallel and immediately return a future\n", - " * **executor=None**, **blocking=False**: not useful but not excluded." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Although parallel processing has been integrated into uproot's design, it only provides a performance improvement in cases that are dominated by read time in non-Python functions. Python's [Global Interpreter Lock](https://realpython.com/python-gil/) (GIL) severely limits parallel scaling of Python calls, but external functions that release the GIL (not all do) are immune.\n", - "\n", - "Thus, if reading is slow because the ROOT file has a lot of small TBaskets, requiring uproot to step through them using Python calls, parallelizing that work in many threads has limited benefit because those threads stop and wait for each other due to Python's GIL. If reading is slow because the ROOT file is heavily compressed—for instance, with LZMA—then parallel reading is beneficial and scales well with the number of threads.\n", - "\n", - "
" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If, on the other other hand, processing time is dominated by your analysis code and not file-reading, then parallelizing the file-reading won't help. Instead, you want to [parallelize your whole analysis](https://sebastianraschka.com/Articles/2014_multiprocessing.html), and a good way to do that in Python is with [multiprocessing](https://docs.python.org/3/library/multiprocessing.html) from the Python Standard Library.\n", - "\n", - "If you do split your analysis into multiple processes, you _probably don't_ want to also parallelize the array-reading within each process. It's easy to make performance worse by making it too complicated. Particle physics analysis is usually embarrassingly parallel, well suited to splitting the work into independent tasks, each of which is single-threaded.\n", - "\n", - "Another option, of course, is to use a batch system (Condor, Slurm, GRID, etc.). It can be advantageous to parallelize your work across machines with a batch system and across CPU cores with [multiprocessing](https://docs.python.org/3/library/multiprocessing.html)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Histograms, TProfiles, TGraphs, and others\n", - "\n", - "TTrees are not the only kinds of objects to analyze in ROOT files; we are also interested in aggregated data in histograms, profiles, and graphs. Uproot uses the ROOT file's \"streamers\" to learn how to deserialize any object, but an anonymous deserialization often isn't useful:" - ] - }, - { - "cell_type": "code", - "execution_count": 179, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{b'ProcessID0;1': uproot.rootio.TProcessID,\n", - " b'htime;1': uproot.rootio.TH1F,\n", - " b'T;1': uproot.rootio.TTree,\n", - " b'hstat;1': uproot.rootio.TH1F}" - ] - }, - "execution_count": 179, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file = uproot.open(\"Event.root\")\n", - "dict(file.classes())" - ] - }, - { - "cell_type": "code", - "execution_count": 180, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 180, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "processid = file[\"ProcessID0\"]\n", - "processid" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "What is a `TProcessID`?" - ] - }, - { - "cell_type": "code", - "execution_count": 181, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['fName', 'fTitle']" - ] - }, - "execution_count": 181, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "processid._members()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Something with an `fName` and `fTitle`..." - ] - }, - { - "cell_type": "code", - "execution_count": 182, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(b'ProcessID0', b'3ec87674-3aa2-11e9-bb02-0301a8c0beef')" - ] - }, - "execution_count": 182, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "processid._fName, processid._fTitle # note the underscore; these are private members" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Some C++ classes have Pythonic overloads to make them more useful in Python. Here's a way to find out which ones have been defined so far:" - ] - }, - { - "cell_type": "code", - "execution_count": 183, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['TGraph',\n", - " 'TGraphAsymmErrors',\n", - " 'TGraphErrors',\n", - " 'TH1',\n", - " 'TH2',\n", - " 'TH3',\n", - " 'THnSparse',\n", - " 'TLorentzVector',\n", - " 'TVector2',\n", - " 'TVector3']" - ] - }, - "execution_count": 183, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import pkgutil\n", - "\n", - "[modname for importer, modname, ispkg in pkgutil.walk_packages(uproot_methods.classes.__path__)]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This file contains `TH1F` objects, which is a subclass of `TH1`. The `TH1` methods will extend it." - ] - }, - { - "cell_type": "code", - "execution_count": 184, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])" - ] - }, - "execution_count": 184, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"htime\"].edges" - ] - }, - { - "cell_type": "code", - "execution_count": 185, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([0.33352208, 0.30402994, 0.32451916, 0.3509729 , 0.36894202,\n", - " 0.30728292, 0.30681205, 0.341563 , 0.16150808, 0. ],\n", - " dtype=float32)" - ] - }, - "execution_count": 185, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"htime\"].values" - ] - }, - { - "cell_type": "code", - "execution_count": 186, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " 0 0.38739\n", - " +-----------------------------------------------------------+\n", - "[-inf, 0) 0.021839 |*** |\n", - "[0, 1) 0.33352 |*************************************************** |\n", - "[1, 2) 0.30403 |********************************************** |\n", - "[2, 3) 0.32452 |************************************************* |\n", - "[3, 4) 0.35097 |***************************************************** |\n", - "[4, 5) 0.36894 |******************************************************** |\n", - "[5, 6) 0.30728 |*********************************************** |\n", - "[6, 7) 0.30681 |*********************************************** |\n", - "[7, 8) 0.34156 |**************************************************** |\n", - "[8, 9) 0.16151 |************************* |\n", - "[9, 10) 0 | |\n", - "[10, inf] 0 | |\n", - " +-----------------------------------------------------------+\n" - ] - } - ], - "source": [ - "file[\"htime\"].show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The purpose of most of these methods is to extract data, which includes conversion to common Python formats." - ] - }, - { - "cell_type": "code", - "execution_count": 187, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " 0 41529\n", - " +---------------------------------------------------+\n", - "(underflow) 0 | |\n", - "Dijet 39551 |************************************************* |\n", - "MET 27951 |********************************** |\n", - "MuonVeto 27911 |********************************** |\n", - "IsoMuonTrackVeto 27861 |********************************** |\n", - "ElectronVeto 27737 |********************************** |\n", - "IsoElectronTrackVeto 27460 |********************************** |\n", - "IsoPionTrackVeto 26751 |********************************* |\n", - "(overflow) 0 | |\n", - " +---------------------------------------------------+\n" - ] - } - ], - "source": [ - "uproot.open(\"https://scikit-hep.org/uproot/examples/issue33.root\")[\"cutflow\"].show()" - ] - }, - { - "cell_type": "code", - "execution_count": 188, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
countvariance
Real-Time to write versus time
[-inf, 0.0)0.0218390.000477
[0.0, 1.0)0.3335220.111237
[1.0, 2.0)0.3040300.092434
[2.0, 3.0)0.3245190.105313
[3.0, 4.0)0.3509730.123182
[4.0, 5.0)0.3689420.136118
[5.0, 6.0)0.3072830.094423
[6.0, 7.0)0.3068120.094134
[7.0, 8.0)0.3415630.116665
[8.0, 9.0)0.1615080.026085
[9.0, 10.0)0.0000000.000000
[10.0, inf)0.0000000.000000
\n", - "
" - ], - "text/plain": [ - " count variance\n", - "Real-Time to write versus time \n", - "[-inf, 0.0) 0.021839 0.000477\n", - "[0.0, 1.0) 0.333522 0.111237\n", - "[1.0, 2.0) 0.304030 0.092434\n", - "[2.0, 3.0) 0.324519 0.105313\n", - "[3.0, 4.0) 0.350973 0.123182\n", - "[4.0, 5.0) 0.368942 0.136118\n", - "[5.0, 6.0) 0.307283 0.094423\n", - "[6.0, 7.0) 0.306812 0.094134\n", - "[7.0, 8.0) 0.341563 0.116665\n", - "[8.0, 9.0) 0.161508 0.026085\n", - "[9.0, 10.0) 0.000000 0.000000\n", - "[10.0, inf) 0.000000 0.000000" - ] - }, - "execution_count": 188, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"htime\"].pandas()" - ] - }, - { - "cell_type": "code", - "execution_count": 189, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "dependent_variables:\n", - "- header:\n", - " name: counts\n", - " units: null\n", - " qualifiers: []\n", - " values:\n", - " - errors:\n", - " - label: stat\n", - " symerror: 0.33352208137512207\n", - " value: 0.33352208137512207\n", - " - errors:\n", - " - label: stat\n", - " symerror: 0.3040299415588379\n", - " value: 0.3040299415588379\n", - " - errors:\n", - " - label: stat\n", - " symerror: 0.32451915740966797\n", - " value: 0.32451915740966797\n", - " - errors:\n", - " - label: stat\n", - " symerror: 0.35097289085388184\n", - " value: 0.35097289085388184\n", - " - errors:\n", - " - label: stat\n", - " symerror: 0.3689420223236084\n", - " value: 0.3689420223236084\n", - " - errors:\n", - " - label: stat\n", - " symerror: 0.3072829246520996\n", - " value: 0.3072829246520996\n", - " - errors:\n", - " - label: stat\n", - " symerror: 0.306812047958374\n", - " value: 0.306812047958374\n", - " - errors:\n", - " - label: stat\n", - " symerror: 0.34156298637390137\n", - " value: 0.34156298637390137\n", - " - errors:\n", - " - label: stat\n", - " symerror: 0.16150808334350586\n", - " value: 0.16150808334350586\n", - " - errors:\n", - " - label: stat\n", - " symerror: 0.0\n", - " value: 0.0\n", - "independent_variables:\n", - "- header:\n", - " name: Real-Time to write versus time\n", - " units: null\n", - " values:\n", - " - high: 1.0\n", - " low: 0.0\n", - " - high: 2.0\n", - " low: 1.0\n", - " - high: 3.0\n", - " low: 2.0\n", - " - high: 4.0\n", - " low: 3.0\n", - " - high: 5.0\n", - " low: 4.0\n", - " - high: 6.0\n", - " low: 5.0\n", - " - high: 7.0\n", - " low: 6.0\n", - " - high: 8.0\n", - " low: 7.0\n", - " - high: 9.0\n", - " low: 8.0\n", - " - high: 10.0\n", - " low: 9.0\n", - "\n" - ] - } - ], - "source": [ - "print(file[\"htime\"].hepdata())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Numpy histograms, used as a common format through the scientific Python ecosystem, are just a tuple of counts/bin contents and edge positions. (There's one more edge than contents to cover left and right.)" - ] - }, - { - "cell_type": "code", - "execution_count": 190, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(array([0.33352208, 0.30402994, 0.32451916, 0.3509729 , 0.36894202,\n", - " 0.30728292, 0.30681205, 0.341563 , 0.16150808, 0. ],\n", - " dtype=float32),\n", - " array([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.]))" - ] - }, - "execution_count": 190, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"htime\"].numpy()" - ] - }, - { - "cell_type": "code", - "execution_count": 192, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(array([[0., 0., 0., ..., 0., 0., 0.],\n", - " [0., 0., 0., ..., 0., 0., 0.],\n", - " [0., 0., 0., ..., 0., 0., 0.],\n", - " ...,\n", - " [0., 0., 0., ..., 0., 0., 0.],\n", - " [0., 0., 0., ..., 0., 0., 0.],\n", - " [0., 0., 0., ..., 0., 0., 0.]], dtype=float32),\n", - " [(array([-4. , -3.8, -3.6, -3.4, -3.2, -3. , -2.8, -2.6, -2.4, -2.2, -2. ,\n", - " -1.8, -1.6, -1.4, -1.2, -1. , -0.8, -0.6, -0.4, -0.2, 0. , 0.2,\n", - " 0.4, 0.6, 0.8, 1. , 1.2, 1.4, 1.6, 1.8, 2. , 2.2, 2.4,\n", - " 2.6, 2.8, 3. , 3.2, 3.4, 3.6, 3.8, 4. ]),\n", - " array([-4. , -3.8, -3.6, -3.4, -3.2, -3. , -2.8, -2.6, -2.4, -2.2, -2. ,\n", - " -1.8, -1.6, -1.4, -1.2, -1. , -0.8, -0.6, -0.4, -0.2, 0. , 0.2,\n", - " 0.4, 0.6, 0.8, 1. , 1.2, 1.4, 1.6, 1.8, 2. , 2.2, 2.4,\n", - " 2.6, 2.8, 3. , 3.2, 3.4, 3.6, 3.8, 4. ]))])" - ] - }, - "execution_count": 192, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "uproot.open(\"samples/hepdata-example.root\")[\"hpxpy\"].numpy()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Creating and writing data to ROOT files\n", - "\n", - "Uproot has a limited (but growing!) ability to _write_ ROOT files. Two types currently supported are `TObjString` (for debugging) and histograms.\n", - "\n", - "To write to a ROOT file in uproot, the file must be opened for writing using `uproot.create`, `uproot.recreate`, or `uproot.update` (corresponding to ROOT's `\"CREATE\"`, `\"RECREATE\"`, and `\"UPDATE\"` file modes). The compression level is given by `uproot.ZLIB(n)`, `uproot.LZMA(n)`, `uproot.LZ4(n)`, or `None`." - ] - }, - { - "cell_type": "code", - "execution_count": 193, - "metadata": {}, - "outputs": [], - "source": [ - "file = uproot.recreate(\"tmp.root\", compression=uproot.ZLIB(4))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Unlike objects created by [uproot.open](https://uproot.readthedocs.io/en/latest/opening-files.html#uproot-open), you can _assign_ to this `file`. Just as reading behaves like getting an object from a Python dict, writing behaves like putting an object into a Python dict.\n", - "\n", - "**Note:** this is a fundamental departure from how ROOT uses names. In ROOT, a name is a part of an object that is _also_ used for lookup. With a dict-like interface, the object need not have a name; only the lookup mechanism (e.g. [ROOTDirectory](https://uproot.readthedocs.io/en/latest/root-io.html#uproot-rootio-rootdirectory)) needs to manage names.\n", - "\n", - "When you write objects to the ROOT file, they can be unnamed things like a Python string, but they get \"stamped\" with the lookup name once they go into the file." - ] - }, - { - "cell_type": "code", - "execution_count": 194, - "metadata": {}, - "outputs": [], - "source": [ - "file[\"name\"] = \"Some object, like a TObjString.\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The object is now in the file. ROOT would be able to open this file and read the data, like this:\n", - "\n", - "```c++\n", - "root [0] auto file = TFile::Open(\"tmp.root\");\n", - "root [1] file->ls();\n", - "```\n", - "```\n", - "TFile**\t\ttmp.root\t\n", - " TFile*\t\ttmp.root\t\n", - " KEY: TObjString\tname;1\tCollectable string class\n", - "```\n", - "```c++\n", - "root [2] TObjString* data;\n", - "root [3] file->GetObject(\"name\", data);\n", - "root [4] data->GetString()\n", - "```\n", - "```\n", - "(const TString &) \"Some object, like a TObjString.\"[31]\n", - "```\n", - "\n", - "We can also read it back in uproot, like this:" - ] - }, - { - "cell_type": "code", - "execution_count": 195, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[b'name;1']" - ] - }, - "execution_count": 195, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file.keys()" - ] - }, - { - "cell_type": "code", - "execution_count": 196, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{b'name;1': uproot.rootio.TObjString}" - ] - }, - "execution_count": 196, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dict(file.classes())" - ] - }, - { - "cell_type": "code", - "execution_count": 197, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "b'Some object, like a TObjString.'" - ] - }, - "execution_count": 197, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"name\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "(Notice that it lost its encoding—it is now a bytestring.)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Writing histograms\n", - "\n", - "Histograms can be written to the file in the same way: by assignment (choosing a name at the time of assignment). The histograms may be taken from another file and modified," - ] - }, - { - "cell_type": "code", - "execution_count": 198, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " 0 2410.8\n", - " +------------------------------------------------------------+\n", - "[-inf, -3) 0 | |\n", - "[-3, -2.4) 68 |** |\n", - "[-2.4, -1.8) 285 |******* |\n", - "[-1.8, -1.2) 755 |******************* |\n", - "[-1.2, -0.6) 1580 |*************************************** |\n", - "[-0.6, 0) 2296 |********************************************************* |\n", - "[0, 0.6) 2286 |********************************************************* |\n", - "[0.6, 1.2) 1570 |*************************************** |\n", - "[1.2, 1.8) 795 |******************** |\n", - "[1.8, 2.4) 289 |******* |\n", - "[2.4, 3) 76 |** |\n", - "[3, inf] 0 | |\n", - " +------------------------------------------------------------+\n", - " 0 0.24108\n", - " +----------------------------------------------------------+\n", - "[-inf, -3) 0 | |\n", - "[-3, -2.4) 0.0068 |** |\n", - "[-2.4, -1.8) 0.0285 |******* |\n", - "[-1.8, -1.2) 0.0755 |****************** |\n", - "[-1.2, -0.6) 0.158 |************************************** |\n", - "[-0.6, 0) 0.2296 |******************************************************* |\n", - "[0, 0.6) 0.2286 |******************************************************* |\n", - "[0.6, 1.2) 0.157 |************************************** |\n", - "[1.2, 1.8) 0.0795 |******************* |\n", - "[1.8, 2.4) 0.0289 |******* |\n", - "[2.4, 3) 0.0076 |** |\n", - "[3, inf] 0 | |\n", - " +----------------------------------------------------------+\n" - ] - } - ], - "source": [ - "histogram = uproot.open(\"https://scikit-hep.org/uproot/examples/histograms.root\")[\"one\"]\n", - "histogram.show()\n", - "norm = histogram.allvalues.sum()\n", - "for i in range(len(histogram)):\n", - " histogram[i] /= norm\n", - "histogram.show()\n", - " \n", - "file[\"normalized\"] = histogram" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "or it may be created entirely in Python." - ] - }, - { - "cell_type": "code", - "execution_count": 199, - "metadata": {}, - "outputs": [], - "source": [ - "import types\n", - "import uproot_methods.classes.TH1\n", - "\n", - "class MyTH1(uproot_methods.classes.TH1.Methods, list):\n", - " def __init__(self, low, high, values, title=\"\"):\n", - " self._fXaxis = types.SimpleNamespace()\n", - " self._fXaxis._fNbins = len(values)\n", - " self._fXaxis._fXmin = low\n", - " self._fXaxis._fXmax = high\n", - " for x in values:\n", - " self.append(float(x))\n", - " self._fTitle = title\n", - " self._classname = \"TH1F\"\n", - " \n", - "histogram = MyTH1(-5, 5, [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0])\n", - "\n", - "file[\"synthetic\"] = histogram" - ] - }, - { - "cell_type": "code", - "execution_count": 200, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " 0 1.05\n", - " +--------------------------------------------------------+\n", - "[-inf, -5) 0 | |\n", - "[-5, -4.1667) 1 |***************************************************** |\n", - "[-4.1667, -3.3333) 1 |***************************************************** |\n", - "[-3.3333, -2.5) 1 |***************************************************** |\n", - "[-2.5, -1.6667) 1 |***************************************************** |\n", - "[-1.6667, -0.83333) 1 |***************************************************** |\n", - "[-0.83333, 0) 1 |***************************************************** |\n", - "[0, 0.83333) 1 |***************************************************** |\n", - "[0.83333, 1.6667) 1 |***************************************************** |\n", - "[1.6667, 2.5) 1 |***************************************************** |\n", - "[2.5, 3.3333) 1 |***************************************************** |\n", - "[5, inf] 0 | |\n", - " +--------------------------------------------------------+\n" - ] - } - ], - "source": [ - "file[\"synthetic\"].show()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "But it is particularly useful that uproot recognizes [Numpy histograms](https://docs.scipy.org/doc/numpy/reference/generated/numpy.histogram.html), which may have come from other libraries." - ] - }, - { - "cell_type": "code", - "execution_count": 201, - "metadata": {}, - "outputs": [], - "source": [ - "file[\"from_numpy\"] = numpy.histogram(numpy.random.normal(0, 1, 10000))" - ] - }, - { - "cell_type": "code", - "execution_count": 202, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " 0 3209.8\n", - " +----------------------------------------------------+\n", - "[-inf, -4.2249) 0 | |\n", - "[-4.2249, -3.3867) 6 | |\n", - "[-3.3867, -2.5484) 63 |* |\n", - "[-2.5484, -1.7101) 376 |****** |\n", - "[-1.7101, -0.8718) 1438 |*********************** |\n", - "[-0.8718, -0.033514) 2975 |************************************************ |\n", - "[-0.033514, 0.80477) 3057 |************************************************** |\n", - "[0.80477, 1.6431) 1570 |************************* |\n", - "[1.6431, 2.4813) 442 |******* |\n", - "[2.4813, 3.3196) 66 |* |\n", - "[3.3196, 4.1579) 7 | |\n", - "[4.1579, inf] 0 | |\n", - " +----------------------------------------------------+\n" - ] - } - ], - "source": [ - "file[\"from_numpy\"].show()" - ] - }, - { - "cell_type": "code", - "execution_count": 203, - "metadata": {}, - "outputs": [], - "source": [ - "file[\"from_numpy2d\"] = numpy.histogram2d(numpy.random.normal(0, 1, 10000), numpy.random.normal(0, 1, 10000))" - ] - }, - { - "cell_type": "code", - "execution_count": 204, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(array([[ 0., 0., 1., 0., 3., 3., 2., 0., 0., 0.],\n", - " [ 0., 3., 12., 21., 33., 23., 22., 4., 0., 0.],\n", - " [ 0., 6., 35., 96., 152., 174., 93., 28., 5., 2.],\n", - " [ 7., 16., 91., 284., 470., 434., 266., 69., 16., 1.],\n", - " [ 1., 37., 146., 471., 766., 759., 425., 140., 24., 1.],\n", - " [ 1., 38., 145., 512., 771., 715., 394., 138., 22., 6.],\n", - " [ 4., 16., 86., 252., 417., 414., 249., 56., 10., 0.],\n", - " [ 1., 5., 25., 98., 136., 133., 82., 21., 3., 0.],\n", - " [ 0., 1., 8., 12., 23., 28., 14., 4., 3., 0.],\n", - " [ 0., 0., 1., 3., 4., 4., 2., 1., 0., 0.]]),\n", - " [(array([-3.71371807, -2.9587047 , -2.20369132, -1.44867795, -0.69366458,\n", - " 0.06134879, 0.81636217, 1.57137554, 2.32638891, 3.08140228,\n", - " 3.83641566]),\n", - " array([-3.80131588, -3.0313005 , -2.26128512, -1.49126974, -0.72125436,\n", - " 0.04876102, 0.8187764 , 1.58879178, 2.35880716, 3.12882254,\n", - " 3.89883792]))])" - ] - }, - "execution_count": 204, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "file[\"from_numpy2d\"].numpy()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Writing TTrees\n", - "\n", - "Uproot can now write TTrees (documented on the [main README](https://github.com/scikit-hep/uproot#writing-ttrees)), but the interactive tutorial has not been written." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.3" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/binder/uproot b/binder/uproot deleted file mode 120000 index 12e51de9..00000000 --- a/binder/uproot +++ /dev/null @@ -1 +0,0 @@ -../uproot \ No newline at end of file diff --git a/dev/allstreamers.c b/dev/allstreamers.c index a1fec6c7..462846f9 100644 --- a/dev/allstreamers.c +++ b/dev/allstreamers.c @@ -1,45 +1,45 @@ void allstreamers(){ TFile *tfile = new TFile("dev/allstreamers.root", "RECREATE"); tfile->SetCompressionLevel(0); - + TH1F* hist1 = new TH1F("habla1f", "th1f title", 10, 2.1, 3.1); TH1C* hist2 = new TH1C("habla1c", "th1c title", 10, 2.1, 3.1); TH1I* hist3 = new TH1I("habla1i", "th1i title", 10, 2.1, 3.1); TH1D* hist4 = new TH1D("habla1d", "th1d title", 10, 2.1, 3.1); TH1S* hist5 = new TH1S("habla1s", "th1s title", 10, 2.1, 3.1); - + TH2C* hist6 = new TH2C("habla2c", "th2c title", 10, 2.1, 3.1, 5, 1.1, 2.1); TH2S* hist7 = new TH2S("habla2s", "th2s title", 10, 2.1, 3.1, 5, 1.1, 2.1); TH2I* hist8 = new TH2I("habla2i", "th2i title", 10, 2.1, 3.1, 5, 1.1, 2.1); TH2F* hist9 = new TH2F("habla2f", "th2f title", 10, 2.1, 3.1, 5, 1.1, 2.1); TH2D* hist10 = new TH2D("habla2d", "th2d title", 10, 2.1, 3.1, 5, 1.1, 2.1); - + TH3C* hist11 = new TH3C("habla3c", "th3c title", 10, 2.1, 3.1, 5, 1.1, 2.1, 5, 1.1, 2.1); TH3S* hist12 = new TH3S("habla3s", "th3s title", 10, 2.1, 3.1, 5, 1.1, 2.1, 5, 1.1, 2.1); TH3I* hist13 = new TH3I("habla3i", "th3i title", 10, 2.1, 3.1, 5, 1.1, 2.1, 5, 1.1, 2.1); TH3D* hist14 = new TH3D("habla3d", "th3d title", 10, 2.1, 3.1, 5, 1.1, 2.1, 5, 1.1, 2.1); TH3F* hist15 = new TH3F("habla3f", "th3f title", 10, 2.1, 3.1, 5, 1.1, 2.1, 5, 1.1, 2.1); - + TObjString* comment = new TObjString("Hello World"); comment->Write(); - + TLorentzVector v2(1., 1., 1., 1.); v2.Write(); - + TVector2 tvector2(5.0, 6.0); tvector2.Write(); - + TProfile profile1("hprof","Profile of pz versus px",100,-4,4,0,20); profile1.Write(); - + TProfile2D profile2("hprof2d","Profile of pz versus px and py",40,-4,4,40,-4,4,0,20); profile2.Write(); - + TProfile3D profile3("hprof3d","Profile of pt versus px, py and pz",40,-4,4,40,-4,4,40,0,20); profile3.Write(); - + TTree *t = new TTree("tvec","Tree with vectors"); - + int i1; double d1; float f1; @@ -47,7 +47,7 @@ void allstreamers(){ char c1; short s1; bool b1; - + t -> Branch("intBranch", &i1); t -> Branch("doubleBranch", &d1); t -> Branch("floatBranch", &f1); @@ -55,14 +55,14 @@ void allstreamers(){ t -> Branch("charBranch", &c1); t -> Branch("shortBranch", &s1); t -> Branch("boolBranch", &b1); - - std::vector i2; + + std::vector i2; std::vector d2; std::vector f2; std::vector l2; std::vector c2; std::vector s2; - + t -> Branch("intvector", &i2); t -> Branch("doublevector", &d2); t -> Branch("floatvector", &f2); @@ -73,22 +73,22 @@ void allstreamers(){ char* s; // ? - Warning in : Extra characters after type tag 'C/B' for branch 'character star/C'; must be one character. t -> Branch("character star/C", s); - + std::string a_string("blah"); t -> Branch("str_branch_name", &a_string); - + std::vector sample; t -> Branch("sample string", &sample); - + t -> Fill(); t -> Write(); - + TGraph g(10); g.Write(); - + TMultiGraph *mg = new TMultiGraph(); mg -> Write(); - + tfile -> Write(); tfile -> Close(); } diff --git a/dev/streamergen.py b/dev/streamergen.py index af81281e..7d3ff63e 100644 --- a/dev/streamergen.py +++ b/dev/streamergen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE # Run this script from the root directory of the project. @@ -8,7 +8,7 @@ import os sys.path.insert(0, os.path.abspath("")) -import uproot +import uproot3 import subprocess import json @@ -16,7 +16,7 @@ # Make sure c file is named allstreamers.c subprocess.run("root -l -q dev/allstreamers.c", shell=True) -f = uproot.open("dev/allstreamers.root") +f = uproot3.open("dev/allstreamers.root") # Check with json data = json.load(open("dev/streamerversions.json")) @@ -24,7 +24,7 @@ if data[x._fName.decode("ascii")] != x._fClassVersion: print("Old {0} version = {1}. New {0} version = {2}".format(x._fName, data[x._fName.decode("ascii")], x._fClassVersion)) -tkey = uproot.rootio.TKey.read(f._context.source, uproot.source.cursor.Cursor(f._context.tfile["_fSeekInfo"]), None, None) +tkey = uproot3.rootio.TKey.read(f._context.source, uproot3.source.cursor.Cursor(f._context.tfile["_fSeekInfo"]), None, None) start = f._context.tfile["_fSeekInfo"] + tkey._fKeylen streamerlen = tkey._fObjlen @@ -34,13 +34,13 @@ streamers = "streamers = {0}".format(repr(couple_bytes)) lines = [] -for line in open("uproot/write/streamers.py"): +for line in open("uproot3/write/streamers.py"): if line.startswith("streamers"): lines.append(streamers) else: lines.append(line) -with open("uproot/write/streamers.py", "w") as streamerfile: +with open("uproot3/write/streamers.py", "w") as streamerfile: for line in lines: streamerfile.writelines(line) diff --git a/docs/old-tutorial.rst b/docs/old-tutorial.rst index efa3fdb9..592d7718 100644 --- a/docs/old-tutorial.rst +++ b/docs/old-tutorial.rst @@ -18,29 +18,29 @@ Tutorial Getting started --------------- -Download a Z → μμ `flat ntuple `__ and a H → ZZ → eeμμ `structured TTree `__. +Download a Z → μμ `flat ntuple `__ and a H → ZZ → eeμμ `structured TTree `__. .. code-block:: bash - wget http://scikit-hep.org/uproot/examples/Zmumu.root - wget http://scikit-hep.org/uproot/examples/HZZ.root + wget http://scikit-hep.org/uproot3/examples/Zmumu.root + wget http://scikit-hep.org/uproot3/examples/HZZ.root -Open each of the files; uproot presents them as ``dict``-like objects with ROOT names and objects as keys and values. (The "cycle number" after the semicolon can usually be ignored.) +Open each of the files; Uproot presents them as ``dict``-like objects with ROOT names and objects as keys and values. (The "cycle number" after the semicolon can usually be ignored.) .. code-block:: python - >>> import uproot - >>> uproot.open("Zmumu.root").keys() + >>> import uproot3 + >>> uproot3.open("Zmumu.root").keys() [b'events;1'] - >>> uproot.open("HZZ.root").keys() + >>> uproot3.open("HZZ.root").keys() [b'events;1'] Since the file acts as a ``dict``, access the TTrees with square brackets. TTrees are also ``dict``-like objects, with branch names and branches as keys and values. (Hint: ``allkeys()`` lists branches recursively, if they're nested.) .. code-block:: python - >>> zmumu = uproot.open("Zmumu.root")["events"] - >>> hzz = uproot.open("HZZ.root")["events"] + >>> zmumu = uproot3.open("Zmumu.root")["events"] + >>> hzz = uproot3.open("HZZ.root")["events"] >>> zmumu.keys() [b'Type', b'Run', b'Event', b'E1', b'px1', b'py1', b'pz1', b'pt1', b'eta1', b'phi1', b'Q1', b'E2', b'px2', b'py2', b'pz2', b'pt2', b'eta2', b'phi2', b'Q2', b'M'] @@ -59,7 +59,7 @@ You can turn a chosen set of branches into Numpy arrays with the ``arrays`` meth b'py1': array([ 17.4332439 , -16.57036233, ..., 1.19940578, 1.2013503 ]), b'pz1': array([-68.96496181, -48.77524654, ..., -74.53243061, -74.80837247])} -If the number of items per entry is not constant, such as the number of jets in an event, they can't be expressed as flat Numpy arrays. Instead, uproot loads them into `jagged arrays `__. +If the number of items per entry is not constant, such as the number of jets in an event, they can't be expressed as flat Numpy arrays. Instead, Uproot loads them into `jagged arrays `__. .. code-block:: python @@ -106,18 +106,18 @@ But it's built out of regular Numpy arrays, for use in libraries that accept Num Exploring a file ---------------- -Download the `nesteddirs.root `__ sample and open it with uproot. +Download the `nesteddirs.root `__ sample and open it with Uproot. .. code-block:: bash - wget http://scikit-hep.org/uproot/examples/nesteddirs.root + wget http://scikit-hep.org/uproot3/examples/nesteddirs.root .. code-block:: python - >>> import uproot - >>> file = uproot.open("nesteddirs.root") + >>> import uproot3 + >>> file = uproot3.open("nesteddirs.root") -This ``file`` is a `ROOTDirectory `__, a class that can represent either a whole ROOT file or a TDirectory within that file. It emulates a Python ``dict``, so if you're familiar with this interface, you don't have to remember many method names. The "keys" are the names ROOT uses to find objects in files and the "values" are the data themselves. +This ``file`` is a ``ROOTDirectory``, a class that can represent either a whole ROOT file or a TDirectory within that file. It emulates a Python ``dict``, so if you're familiar with this interface, you don't have to remember many method names. The "keys" are the names ROOT uses to find objects in files and the "values" are the data themselves. .. code-block:: python @@ -140,19 +140,19 @@ If you only ask for the keys, the data won't be loaded (which can be important f [(b'one;1', ), (b'three;1', )] -In addition, `ROOTDirectory `__ has ``classes()``, ``iterclasses()`` and ``allclasses()`` to iterate over keys and class names of the contained objects. You can identify the class of an object before loading it. +In addition, ``ROOTDirectory`` has ``classes()``, ``iterclasses()`` and ``allclasses()`` to iterate over keys and class names of the contained objects. You can identify the class of an object before loading it. .. code-block:: python >>> for n, x in file.allclasses(): ... print(repr(n), "\t", x) ... - b'one;1' - b'one/two;1' - b'one/two/tree;1' - b'one/tree;1' - b'three;1' - b'three/tree;1' + b'one;1' + b'one/two;1' + b'one/two/tree;1' + b'one/tree;1' + b'three;1' + b'three/tree;1' As with a ``dict``, square brackets extract values by key. If you include ``"/"`` or ``";"`` in your request, you can specify subdirectories or cycle numbers (those ``;1`` at the end of key names, which you can usually ignore). @@ -169,11 +169,11 @@ is equivalent to >>> file["one/two/tree"] -The memory management is explicit: each time you request a value from a `ROOTDirectory `__, it is deserialized from the file. This usually doesn't matter on the command-line, but it could in a loop. +The memory management is explicit: each time you request a value from a ``ROOTDirectory``, it is deserialized from the file. This usually doesn't matter on the command-line, but it could in a loop. -`TTree `__ objects are also ``dict``-like objects, but this time the keys and values are the `TBranch `__ names and objects. If you're not familiar with ROOT terminology, "tree" means a dataset and "branch" means one column or attribute of that dataset. The `TTree `__ class also has ``keys()``, ``iterkeys()``, ``allkeys()``, ``values()``, ``items()``, etc., because `TBranch `__ instances may be nested. +``TTree`` objects are also ``dict``-like objects, but this time the keys and values are the ``TBranch`` names and objects. If you're not familiar with ROOT terminology, "tree" means a dataset and "branch" means one column or attribute of that dataset. The ``TTree`` class also has ``keys()``, ``iterkeys()``, ``allkeys()``, ``values()``, ``items()``, etc., because ``TBranch`` instances may be nested. -The `TTree `__ also has the attributes you expect from ROOT, presented with Pythonic conventions (``numentries`` follows an uproot convention, in which all "number of" methods start with "num"), +The ``TTree`` also has the attributes you expect from ROOT, presented with Pythonic conventions (``numentries`` follows an Uproot convention, in which all "number of" methods start with "num"), .. code-block:: python @@ -193,7 +193,7 @@ as well as the raw data that was read from the file (C++ private members that st '_fName', '_fSavedBytes', '_fScanField', '_fTimerInterval', '_fTitle', '_fTotBytes', '_fTreeIndex', '_fUpdate', '_fUserInfo', '_fWeight', '_fZipBytes', '_filter'] -To get an overview of what arrays are available in the `TTree `__ and whether uproot can read it, call ``show()``. +To get an overview of what arrays are available in the ``TTree`` and whether Uproot can read it, call ``show()``. .. code-block:: python @@ -219,9 +219,9 @@ To get an overview of what arrays are available in the `TTree f4')) SliceFloat64 (no streamer) asjagged(asdtype('>f8')) -The first column shows `TBranch `__ names, the "streamers" in the second column are ROOT schemas in the file used to reconstruct complex user classes. (This file doesn't have any.) The third column shows uproot's default interpretation of the data. If any `TBranch `__ objects have ``None`` as the default interpretation, uproot cannot read it (but possibly will in the future, as more types are handled). +The first column shows ``TBranch`` names, the "streamers" in the second column are ROOT schemas in the file used to reconstruct complex user classes. (This file doesn't have any.) The third column shows Uproot's default interpretation of the data. If any ``TBranch`` objects have ``None`` as the default interpretation, Uproot cannot read it (but possibly will in the future, as more types are handled). -You can read each `TBranch `__ into an array by calling ``array()`` on the `TBranch `__. +You can read each ``TBranch`` into an array by calling ``array()`` on the ``TBranch``. .. code-block:: python @@ -245,7 +245,7 @@ You can read each `TBranch `__. +or read many at once with a single ``arrays([...])`` call on the ``TTree``. .. code-block:: python @@ -257,17 +257,17 @@ or read many at once with a single ``arrays([...])`` call on the `TTree `__ reference (`e.g. this link `__), but here's a guide to what you should know. +The complete list of array-reading parameters is given in the ``TTree`` reference, but here's a guide to what you should know. -The **branches** parameter lets you specify which `TBranch `__ data to load and optionally, an interpretation other than the default. +The **branches** parameter lets you specify which ``TBranch`` data to load and optionally, an interpretation other than the default. - If it's ``None`` or unspecified, you'll get all arrays. - If it's a single string, you'll either get the array you've named or all the arrays that match a glob pattern (if it includes ``*``, ``?``, or ``[...]``) or full regular expression (if it starts and ends with slashes with optional flags ``/pattern/i``). - If it's a list of strings, you'll get all the arrays you've named or specified by pattern-matching. -- If it's a ``dict`` from name to `Interpretation `__, you'll read the requested arrays in the specified ways. +- If it's a ``dict`` from name to ``Interpretation``, you'll read the requested arrays in the specified ways. - There's also a functional form that gives more control at the cost of more complexity. -An `Interpretation `__ lets you view the bytes of the ROOT file in different ways. Naturally, most of these are non-sensical: +An ``Interpretation`` lets you view the bytes of the ROOT file in different ways. Naturally, most of these are non-sensical: .. code-block:: python @@ -283,7 +283,7 @@ An `Interpretation ` 91., 92., 93., 94., 95., 96., 97., 98., 99.])} # but we could try reading them as little-endian, 4-byte integers (non-sensically) - >>> tree.arrays({"Float32": uproot.asdtype(">> tree.arrays({"Float32": uproot3.asdtype(">> import numpy - >>> tree.arrays({"Float64": uproot.asdtype(numpy.dtype((">f8", (5, 5))))}) + >>> tree.arrays({"Float64": uproot3.asdtype(numpy.dtype((">f8", (5, 5))))}) {b'Float64': array([[[ 0., 1., 2., 3., 4.], [ 5., 6., 7., 8., 9.], [10., 11., 12., 13., 14.], @@ -324,14 +324,14 @@ Some reinterpretations are useful, though: [90., 91., 92., 93., 94.], [95., 96., 97., 98., 99.]]])} -In particular, replacing ``asdtype`` with ``asarray`` lets you instruct uproot to fill an existing array, so that you can manage your own memory: +In particular, replacing ``asdtype`` with ``asarray`` lets you instruct Uproot to fill an existing array, so that you can manage your own memory: .. code-block:: python >>> import numpy >>> myarray = numpy.zeros(200) # allocate 200 zeros - >>> tree.arrays({"Float64": uproot.asarray(">f8", myarray)}) + >>> tree.arrays({"Float64": uproot3.asarray(">f8", myarray)}) {b'Float64': array([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16., 17., 18., 19., 20., 21., 22., 23., 24., 25., 26., 27., 28., 29., 30., 31., 32., 33., 34., 35., 36., 37., 38., @@ -392,14 +392,14 @@ The **cache**, **basketcache**, and **keycache** parameters allow you to avoid r The **executor** and **blocking** parameters allow you to read and possibly decompress the branches in parallel. See `Parallel processing <#parallel-processing>`__ below. -All of the `TTree `__ and `TBranch `__ methods that read data into arrays— ``array``, ``lazyarray``, ``arrays``, ``lazyarrays``, ``iterate``, ``basket``, ``baskets``, and ``iterate_baskets``— all use these parameters consistently. If you understand what they do for one method, you understand them all. +All of the ``TTree`` and ``TBranch`` methods that read data into arrays— ``array``, ``lazyarray``, ``arrays``, ``lazyarrays``, ``iterate``, ``basket``, ``baskets``, and ``iterate_baskets``— all use these parameters consistently. If you understand what they do for one method, you understand them all. Remote files through XRootD --------------------------- XRootD is a remote file protocol that allows selective reading: if you only want a few arrays from a file that has hundreds, it can be much faster to leave the file on the server and read it through XRootD. -To use XRootD with uproot, you need to have an XRootD installation with its Python interface (ships with XRootD 4 and up). You may `install XRootD with conda `__ or `install XRootD from source `__, but in the latter case, be sure to configure ``PYTHONPATH`` and ``LD_LIBRARY_PATH`` such that +To use XRootD with Uproot, you need to have an XRootD installation with its Python interface (ships with XRootD 4 and up). You may `install XRootD with conda `__ or `install XRootD from source `__, but in the latter case, be sure to configure ``PYTHONPATH`` and ``LD_LIBRARY_PATH`` such that .. code-block:: python @@ -407,13 +407,13 @@ To use XRootD with uproot, you need to have an XRootD installation with its Pyth does not raise an ``ImportError`` exception. -Once XRootD is installed, you can open remote files in uproot by specifying the ``root://`` protocol: +Once XRootD is installed, you can open remote files in Uproot by specifying the ``root://`` protocol: .. code-block:: python - >>> import uproot - >>> file = uproot.open("root://eospublic.cern.ch//eos/opendata/atlas/OutreachDatasets/" - ... "2016-07-29/MC/mc_117049.ttbar_had.root") + >>> import uproot3 + >>> file = uproot3.open("root://eospublic.cern.ch//eos/opendata/atlas/OutreachDatasets/" + ... "2016-07-29/MC/mc_117049.ttbar_had.root") >>> file.keys() [b'mini;1'] >>> tree = file["mini"] @@ -426,13 +426,13 @@ Once XRootD is installed, you can open remote files in uproot by specifying the vxp_z (no streamer) asdtype('>f4') ... -Apart from possible network bandwidth issues, this `ROOTDirectory `__ and the objects it contains are indistinguishable from data from a local file. +Apart from possible network bandwidth issues, this ``ROOTDirectory`` and the objects it contains are indistinguishable from data from a local file. -Unlike a local file, however, remote files are buffered and cached by uproot. (The operating system buffers and caches local files!) For performance reasons, you may need to tune this buffering and caching: you do it through an **xrootdsource** parameter. +Unlike a local file, however, remote files are buffered and cached by Uproot. (The operating system buffers and caches local files!) For performance reasons, you may need to tune this buffering and caching: you do it through an **xrootdsource** parameter. .. code-block:: python - >>> file = uproot.open(..., xrootdsource=dict(chunkbytes=8*1024, limitbytes=1024**2)) + >>> file = uproot3.open(..., xrootdsource=dict(chunkbytes=8*1024, limitbytes=1024**2)) - **chunkbytes** is the granularity (in bytes) of requests through XRootD (by default, it requests data in 8 kB chunks); - **limitbytes** is the number of bytes that are held in memory before evicting and reusing memory (by default, it stores 1 MB of recently read XRootD data). @@ -442,20 +442,20 @@ These defaults have not been tuned. You might find improvements in throughput by Reading only part of a TBranch ------------------------------ -ROOT files can be very large— it wouldn't be unusual to encounter a file that is too big to load entirely into memory. Even in these cases, you may be able to load individual arrays into memory, but maybe you don't want to. uproot lets you slice an array before you load it from the file. +ROOT files can be very large— it wouldn't be unusual to encounter a file that is too big to load entirely into memory. Even in these cases, you may be able to load individual arrays into memory, but maybe you don't want to. Uproot lets you slice an array before you load it from the file. -Inside a ROOT file, `TBranch `__ data are split into chunks called baskets; each basket can be read and uncompressed independently of the others. Specifying a slice before reading, rather than loading a whole array and then slicing it, avoids reading baskets that aren't in the slice. +Inside a ROOT file, ``TBranch`` data are split into chunks called baskets; each basket can be read and uncompressed independently of the others. Specifying a slice before reading, rather than loading a whole array and then slicing it, avoids reading baskets that aren't in the slice. -The `foriter.root `__ file has very small baskets to demonstrate. +The `foriter.root `__ file has very small baskets to demonstrate. .. code-block:: bash - wget http://scikit-hep.org/uproot/examples/foriter.root + wget http://scikit-hep.org/uproot3/examples/foriter.root .. code-block:: python - >>> import uproot - >>> branch = uproot.open("foriter.root")["foriter"]["data"] + >>> import uproot3 + >>> branch = uproot3.open("foriter.root")["foriter"]["data"] >>> branch.numbaskets 8 >>> branch.baskets() @@ -528,14 +528,14 @@ Only the first three baskets were touched by the above call (and hence, only tho All of the baskets were touched by the above call (and hence, they are all loaded into cache). -One reason you might want to only part of an array is to get a sense of the data without reading all of it. This can be a particularly useful way to examine a remote file over XRootD with a slow network connection. While you could do this by specifying a small **entrystop**, uproot has a lazy array interface to make this more convenient. +One reason you might want to only part of an array is to get a sense of the data without reading all of it. This can be a particularly useful way to examine a remote file over XRootD with a slow network connection. While you could do this by specifying a small **entrystop**, Uproot has a lazy array interface to make this more convenient. .. code-block:: python >>> basketcache = {} >>> myarray = branch.lazyarray(basketcache=basketcache) >>> myarray - + >>> len(basketcache) 0 >>> myarray[5] @@ -555,13 +555,13 @@ One reason you might want to only part of an array is to get a sense of the data Whenever a lazy array is indexed or sliced, it loads as little as possible to yield the result. Slicing everything (``[:]``) gives you a normal array. -Since caching in uproot is always explicit (for consistency: see `Caching data <#caching-data>`__), repeatedly indexing the same value repeatedly reads from the file unless you specify a cache. You'd probably always want to provide lazy arrays with caches. +Since caching in Uproot is always explicit (for consistency: see `Caching data <#caching-data>`__), repeatedly indexing the same value repeatedly reads from the file unless you specify a cache. You'd probably always want to provide lazy arrays with caches. -Another reason to want to read part of an array is to efficiently iterate over data. `TTree `__ has an ``iterate`` method for that purpose (which, incidentally, also takes **entrystart** and **entrystop** parameters). +Another reason to want to read part of an array is to efficiently iterate over data. ``TTree`` has an ``iterate`` method for that purpose (which, incidentally, also takes **entrystart** and **entrystop** parameters). .. code-block:: python - >>> tree = uproot.open("foriter.root")["foriter"] + >>> tree = uproot3.open("foriter.root")["foriter"] >>> for chunk in tree.iterate("data"): ... print(chunk) ... @@ -592,29 +592,29 @@ By default, the iteration step size is the minimum necessary to line up with bas Iterating over files (like TChain) ---------------------------------- -If one file doesn't fit in memory, a collection of them won't, so we need to iterate over a collection of files just as we iterate over one file. The interface for this is similar to the `TTree `__ ``iterate`` method: +If one file doesn't fit in memory, a collection of them won't, so we need to iterate over a collection of files just as we iterate over one file. The interface for this is similar to the ``TTree.iterate`` method: .. code-block:: python - >>> for arrays in uproot.iterate("/set/of/files*.root", "events", + >>> for arrays in uproot3.iterate("/set/of/files*.root", "events", ... ["branch1", "branch2", "branch3"],entrysteps=10000): ... do_something_with(arrays) -The **branches** parameter is the same (usually, a list of `TBranch `__ names will do), as is **entrysteps**, **outputtype**, caching, and parallel processing parameters. Since this form must iterate over a collection of files, it also takes a **path** (string with wildcards or a list of strings) and a **treepath** (location of the `TTree `__ within each file; must be the same), as well as **xrootdsource** options, if relevant. +The **branches** parameter is the same (usually, a list of ``TBranch`` names will do), as is **entrysteps**, **outputtype**, caching, and parallel processing parameters. Since this form must iterate over a collection of files, it also takes a **path** (string with wildcards or a list of strings) and a **treepath** (location of the ``TTree`` within each file; must be the same), as well as **xrootdsource** options, if relevant. Non-flat TTrees\: jagged arrays and more ---------------------------------------- -We have already seen non-scalar structure in the `H → ZZ → eeμμ sample `__. +We have already seen non-scalar structure in the `H → ZZ → eeμμ sample `__. .. code-block:: bash - wget http://scikit-hep.org/uproot/examples/HZZ.root + wget http://scikit-hep.org/uproot3/examples/HZZ.root .. code-block:: python - >>> import uproot - >>> tree = uproot.open("HZZ.root")["events"] + >>> import uproot3 + >>> tree = uproot3.open("HZZ.root")["events"] >>> tree.arrays(["Muon_Px", "Muon_Py", "Muon_Pz"]) {b'Muon_Pz': jaggedarray([[ -8.160793 -11.307582], @@ -664,7 +664,7 @@ Jagged arrays are presented as Python objects with an array-like syntax (square [112.09960289042792, 21.37544434752662], [101.37877704093872, 70.2069335164593]] -But you can also take advantage of the fact that `JaggedArray `__ is backed by Numpy arrays to perform structure-preserving operations much more quickly. The following does the same thing as the above, but using only Numpy calls. +But you can also take advantage of the fact that ``JaggedArray`` is backed by Numpy arrays to perform structure-preserving operations much more quickly. The following does the same thing as the above, but using only Numpy calls. .. code-block:: python @@ -681,22 +681,22 @@ But you can also take advantage of the fact that `JaggedArray `__. Since we want the same structure as the original ``px``, we can reuse its ``starts`` and ``stops``. +In the first code block, we used the Python interpreter and ``math`` library to compute momentum magnitudes, one for each muon, maintaining the event structure (one or two muons per event). In the second code block, we used Numpy to compute all the momentum magnitudes in one call (the loop is performed in compiled code) and packaged the result in a new ``JaggedArray``. Since we want the same structure as the original ``px``, we can reuse its ``starts`` and ``stops``. -`JaggedArray `__ is a single Python type used to describe any list of lists of numbers from ROOT. In C++, it may be a branch with another branch as a counter (e.g. ``Muon_pt[nMuons]``), a ``std::vector``, a numeric field from an exploded ``TClonesArray`` of class instances, etc. Jagged arrays are also the simplest kind of variable-sized object that can be found in a `TTree `__. More complex objects are deserialized into `JaggedArray `__ wrapped in classes that present them differently, for instance +``JaggedArray`` is a single Python type used to describe any list of lists of numbers from ROOT. In C++, it may be a branch with another branch as a counter (e.g. ``Muon_pt[nMuons]``), a ``std::vector``, a numeric field from an exploded ``TClonesArray`` of class instances, etc. Jagged arrays are also the simplest kind of variable-sized object that can be found in a ``TTree``. More complex objects are deserialized into ``JaggedArray`` wrapped in classes that present them differently, for instance .. code-block:: bash - wget http://scikit-hep.org/uproot/examples/Zmumu.root + wget http://scikit-hep.org/uproot3/examples/Zmumu.root .. code-block:: python - >>> import uproot - >>> tree = uproot.open("Zmumu.root")["events"] + >>> import uproot3 + >>> tree = uproot3.open("Zmumu.root")["events"] >>> tree.array("Type") strings([b'GT' b'TT' b'GT' ... b'TT' b'GT' b'GG']) -The `Strings `__ type represents a collection of strings, not as (memory-hogging) Python ``bytes``, but as a `JaggedArray `__ wrapper: +The ``Strings`` type represents a collection of strings, not as (memory-hogging) Python ``bytes``, but as a ``JaggedArray`` wrapper: .. code-block:: python @@ -717,7 +717,7 @@ The "numeric" content is actually the ASCII representation of all the string dat TTTGTTTTTGTGTGGGTTTGTGGGTTTGTGGGTTTGTGGGTTTGTGGGTTTGTGGGTTTGTGGGTTTGTGGGTTTGTGGGTTTGTGG ... -The role of the `Strings `__ wrapper is to yield each item as a Python ``bytes`` on demand. +The role of the ``Strings`` wrapper is to yield each item as a Python ``bytes`` on demand. .. code-block:: python @@ -730,29 +730,29 @@ The role of the `Strings >> strings[5:10].tolist() [b'TT', b'GT', b'GG', b'GT', b'TT'] -Again, it doesn't matter whether the strings were ``char*``, ``std::string``, or ``TString``, etc. in C++. They all translate into `Strings `__. +Again, it doesn't matter whether the strings were ``char*``, ``std::string``, or ``TString``, etc. in C++. They all translate into ``Strings``. -At the time of this writing, ``std::vector`` and ``std::vector>`` are also implemented this way. Eventually, uproot should be able to read any type, translating C++ classes into Python ``namedtuples``, filled on demand. +At the time of this writing, ``std::vector`` and ``std::vector>`` are also implemented this way. Eventually, Uproot should be able to read any type, translating C++ classes into Python ``namedtuples``, filled on demand. Non-TTrees\: histograms and more -------------------------------- -The uproot implementation is fairly general, to be robust against changes in the ROOT format. ROOT has a wonderful backward-compatibility mechanism called "streamers," which specify how bytes translate into data fields for every type of object contained in the file. Even such basic types as ``TObjArray`` and ``TNamed`` are defined by streamers. +The Uproot implementation is fairly general, to be robust against changes in the ROOT format. ROOT has a wonderful backward-compatibility mechanism called "streamers," which specify how bytes translate into data fields for every type of object contained in the file. Even such basic types as ``TObjArray`` and ``TNamed`` are defined by streamers. -To read a `TTree `__, uproot first consults the streamers in your ROOT file to know how to deserialize your particular version of that class. This is why it contains so many members starting with ``"_f"``: they are the C++ class private members, and uproot is literally following the prescription to deserialize the C++ class. Pythonic attributes like ``tree.name`` and ``tree.numentries`` are aliases for ``tree._fName`` and ``tree._fEntries``, etc. +To read a ``TTree``, Uproot first consults the streamers in your ROOT file to know how to deserialize your particular version of that class. This is why it contains so many members starting with ``"_f"``: they are the C++ class private members, and Uproot is literally following the prescription to deserialize the C++ class. Pythonic attributes like ``tree.name`` and ``tree.numentries`` are aliases for ``tree._fName`` and ``tree._fEntries``, etc. -This means that literally any kind of object may be read from a `ROOTDirectory `__. Even if the uproot authors have never heard of it, the new data type will have a streamer in the file, and uproot will follow that prescription to make an object with the appropriate private fields. What you do with that object is another story: the member functions, written in C++, are *not* serialized into the ROOT file, and thus the Python object will have data but no functionality. +This means that literally any kind of object may be read from a ``ROOTDirectory``. Even if the Uproot authors have never heard of it, the new data type will have a streamer in the file, and Uproot will follow that prescription to make an object with the appropriate private fields. What you do with that object is another story: the member functions, written in C++, are *not* serialized into the ROOT file, and thus the Python object will have data but no functionality. -We have to add functionality by writing the equivalent Python. The uproot `TTree `__ implementation is a bundle of functions that expect private members like ``_fName``, ``_fEntries``, and ``_fBranches``. Other ROOT types can be wrapped in similar ways. Histograms are useful, and therefore the ``TH1`` classes are similarly wrapped: +We have to add functionality by writing the equivalent Python. The Uproot ``TTree`` implementation is a bundle of functions that expect private members like ``_fName``, ``_fEntries``, and ``_fBranches``. Other ROOT types can be wrapped in similar ways. Histograms are useful, and therefore the ``TH1`` classes are similarly wrapped: .. code-block:: bash - wget http://scikit-hep.org/uproot/examples/histograms.root + wget http://scikit-hep.org/uproot3/examples/histograms.root .. code-block:: python - >>> import uproot - >>> file = uproot.open("histograms.root") + >>> import uproot3 + >>> file = uproot3.open("histograms.root") >>> file.allkeys() [b'one;1', b'two;1', b'three;1'] >>> file["one"].show() @@ -775,7 +775,7 @@ We have to add functionality by writing the equivalent Python. The uproot `TTree [3, inf] 0 | | +------------------------------------------------------------+ -Code to view histograms in Pythonic plotting packages is in development, but this is a wide-open area for the future. For now, uproot's ability to read histograms is useful for querying bin values in scripts, like so. +Code to view histograms in Pythonic plotting packages is in development, but this is a wide-open area for the future. For now, Uproot's ability to read histograms is useful for querying bin values in scripts, like so. .. code-block:: python @@ -792,7 +792,7 @@ There are corresponding fields ``alledges``, ``allvalues``, and ``allvariances`` Caching data ------------ -Following Python's preference for explicit operations over implicit ones, uproot does not cache any data by default. If you say ``file["tree"]`` twice or ``tree["branch"].array()`` twice, uproot will go back to the file each time to extract the contents. It will not hold previously loaded objects or arrays in memory in case you want them again. You can keep them in memory yourself by assigning them to a variable; the price of having to be explicit is well worth not having to reverse engineer a memory-hogging cache. +Following Python's preference for explicit operations over implicit ones, Uproot does not cache any data by default. If you say ``file["tree"]`` twice or ``tree["branch"].array()`` twice, Uproot will go back to the file each time to extract the contents. It will not hold previously loaded objects or arrays in memory in case you want them again. You can keep them in memory yourself by assigning them to a variable; the price of having to be explicit is well worth not having to reverse engineer a memory-hogging cache. Sometimes, however, changing your code to assign new variable names (or ``dict`` entries) for every array you want to keep in memory can be time-consuming or obscure an otherwise simple analysis script. It would be nice to just turn on caching. For this purpose, all array-extracting methods have **cache**, **basketcache**, and **keycache** parameters that accpet any ``dict``-like object as a cache. @@ -817,13 +817,13 @@ The array methods will always check the cache first, and if it's empty, get the 'AAGUS3fQmKsR56dpAQAAf77v;events;Q2;asdtype(Bi4,Li4,(),());0-2304': array([-1, 1, 1, ..., -1, -1, -1], dtype=int32)} -Key names are long because they encode a unique identifier to the file, the path to the `TTree `__, to the `TBranch `__, the `Interpretation `__, and the entry range, so that we don't confuse one cached array for another. +Key names are long because they encode a unique identifier to the file, the path to the ``TTree``, to the ``TBranch``, the ``Interpretation``, and the entry range, so that we don't confuse one cached array for another. -Python ``dict`` objects will keep the arrays as long as the process lives (or they're manually deleted, or the ``dict`` goes out of scope). Sometimes this is too long. Real caches typically have a Least Recently Used (LRU) eviction policy: they're capped at a given size and when adding a new array would exceed that size, they delete the ones that were least recently accessed. `ArrayCache `__ implements such a policy. +Python ``dict`` objects will keep the arrays as long as the process lives (or they're manually deleted, or the ``dict`` goes out of scope). Sometimes this is too long. Real caches typically have a Least Recently Used (LRU) eviction policy: they're capped at a given size and when adding a new array would exceed that size, they delete the ones that were least recently accessed. ``ArrayCache`` implements such a policy. .. code-block:: python - >>> cache = uproot.cache.ArrayCache(8*1024**3) # 8 GB (typical) + >>> cache = uproot3.cache.ArrayCache(8*1024**3) # 8 GB (typical) >>> import numpy >>> cache["one"] = numpy.zeros(2*1024**3, dtype=numpy.uint8) # 2 GB >>> list(cache) @@ -841,22 +841,22 @@ Python ``dict`` objects will keep the arrays as long as the process lives (or th >>> list(cache) ['three', 'four', 'five'] -Thus, you can pass a `ArrayCache `__ as the **cache** argument to get caching with an LRU (least recently used) policy. If you need it, there's also a `ThreadSafeArrayCache `__ for parallel processing, and the ``method="LFU"`` parameter to both lets you pick an LFU (least frequently used) policy. +Thus, you can pass a ``ArrayCache`` as the **cache** argument to get caching with an LRU (least recently used) policy. If you need it, there's also a ``ThreadSafeArrayCache`` for parallel processing, and the ``method="LFU"`` parameter to both lets you pick an LFU (least frequently used) policy. Finally, you may be wondering why the array methods have three cache parameters: **cache**, **basketcache**, and **keycache**. Here's what they mean. -- **cache:** applies to fully constructed arrays. Thus, if you request the same branch with a different **entrystart**, **entrystop**, or `Interpretation `__ (e.g. ``dtype`` or ``dims``), it counts as a new array and *competes* with arrays already in the cache, rather than drawing on them. Pass a **cache** argument if you're extracting whole arrays or iterating with fixed **entrysteps**. -- **basketcache:** applies to raw (but decompressed) basket data. This data can be re-sliced and re-interpreted many ways, and uproot finds what it needs in the cache. It's particularly useful for lazy arrays, which are frequently re-sliced. -- **keycache:** applies to ROOT ``TKey`` objects, used to look up baskets. With a full **basketcache** and a **keycache**, uproot never needs to access the file. The reason **keycache** is separate from **basketcache** is because ``TKey`` objects are much smaller than most arrays and should have a different eviction priority than an array: use a cache with LRU for **basketcache** and a simple ``dict`` for **keycache**. +- **cache:** applies to fully constructed arrays. Thus, if you request the same branch with a different **entrystart**, **entrystop**, or ``Interpretation`` (e.g. ``dtype`` or ``dims``), it counts as a new array and *competes* with arrays already in the cache, rather than drawing on them. Pass a **cache** argument if you're extracting whole arrays or iterating with fixed **entrysteps**. +- **basketcache:** applies to raw (but decompressed) basket data. This data can be re-sliced and re-interpreted many ways, and Uproot finds what it needs in the cache. It's particularly useful for lazy arrays, which are frequently re-sliced. +- **keycache:** applies to ROOT ``TKey`` objects, used to look up baskets. With a full **basketcache** and a **keycache**, Uproot never needs to access the file. The reason **keycache** is separate from **basketcache** is because ``TKey`` objects are much smaller than most arrays and should have a different eviction priority than an array: use a cache with LRU for **basketcache** and a simple ``dict`` for **keycache**. Normally, you'd *either* set only **cache** *or* both **basketcache** and **keycache**. You can use the same ``dict``-like object for many applications (single pool) or different caches for different applications (to keep the priority queues distinct). -As we have seen, uproot's XRootD handler has an even lower-level cache for bytes read over the network. This is implemented as a `ThreadSafeArrayCache `__. Local files are usually read as memory-mapped files, in which case the operating system does the low-level caching with the same mechanism as virtual memory. (For more control, you can `uproot.open `__ a file with ``localsource=dict(chunkbytes=8*1024, limitbytes=1024**2)`` to use a regular file handle and custom paging/cache size.) +As we have seen, Uproot's XRootD handler has an even lower-level cache for bytes read over the network. This is implemented as a ``ThreadSafeArrayCache``. Local files are usually read as memory-mapped files, in which case the operating system does the low-level caching with the same mechanism as virtual memory. (For more control, you can ``uproot3.open`` a file with ``localsource=dict(chunkbytes=8*1024, limitbytes=1024**2)`` to use a regular file handle and custom paging/cache size.) Parallel processing ------------------- -Just as caching must be explicit in uproot, parallel processing must be explicit as well. By default, every read, decompression, and array construction is single-threaded. To enable parallel processing, pass in a Python 3 executor. +Just as caching must be explicit in Uproot, parallel processing must be explicit as well. By default, every read, decompression, and array construction is single-threaded. To enable parallel processing, pass in a Python 3 executor. To use executors in Python 2, install the backport. @@ -885,8 +885,8 @@ Most array-reading methods have an **executor** parameter, into which you can pa .. code-block:: python - >>> import uproot - >>> branch = uproot.open("foriter.root")["foriter"]["data"] + >>> import uproot3 + >>> branch = uproot3.open("foriter.root")["foriter"]["data"] >>> branch.array(executor=executor) array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, @@ -894,7 +894,7 @@ Most array-reading methods have an **executor** parameter, into which you can pa The only difference that might be visible to the user is performance. With an executor, each basket is read, decompressed, and copied to the output array in a separate task, and these tasks are handed to the executor for scheduling. A ``ThreadPoolExecutor`` fills all of the available workers and pushes more work on whenever a task finishes. The tasks must share memory (cannot be a ``ProcessPoolExecutor``) because they all write to (different parts of) the same output array. -If you're familiar with Python's Global Interpreter Lock (GIL), you might be wondering how parallel processing could help a single-process Python program. In uproot, at least, all of the operations that scale with the number of events— reading, decompressing, and the array copy— are performed in operating system calls (reading), compiled compression libraries that release the GIL, and Numpy, which also releases the GIL. +If you're familiar with Python's Global Interpreter Lock (GIL), you might be wondering how parallel processing could help a single-process Python program. In Uproot, at least, all of the operations that scale with the number of events— reading, decompressing, and the array copy— are performed in operating system calls (reading), compiled compression libraries that release the GIL, and Numpy, which also releases the GIL. Since the baskets are being read in parallel, you may want to read them in the background, freeing up the main thread to do other things (such as submit even more work!). If you set ``blocking=False``, the array methods return a zero-argument function instead of an array, ``dict`` of arrays, or whatever. When you want to wait for the result, evaluate this function. @@ -913,12 +913,12 @@ The ``blocking=False`` setting can be used without an executor (without parallel Connectors to other packages ---------------------------- -As a connector between ROOT and the scientific Python world, uproot has a growing set of extensions to ease these transitions. For instance, to get a Pandas DataFrame, call `tree.pandas.df `__: +As a connector between ROOT and the scientific Python world, Uproot has a growing set of extensions to ease these transitions. For instance, to get a Pandas DataFrame, call ``tree.pandas.df``: .. code-block:: python - >>> import uproot - >>> tree = uproot.open("Zmumu.root")["events"] + >>> import uproot3 + >>> tree = uproot3.open("Zmumu.root")["events"] >>> tree.pandas.df(["pt1", "eta1", "phi1", "pt2", "eta2", "phi2"]) eta1 eta2 phi1 phi2 pt1 pt2 0 -1.217690 -1.051390 2.741260 -0.440873 44.7322 38.8311 @@ -935,5 +935,5 @@ As a connector between ROOT and the scientific Python world, uproot has a growin This method takes the same **branches**, **entrystart**, **entrystop**, **cache**, **basketcache**, **keycache**, and **executor** methods as all the other array methods. -Note that ``pandas.DataFrame`` is also a recognized option for all **outputtype** parameters, so you can, for instance, iterate through DataFrames with ``uproot.iterate("files*.root", "treename", outputtype=pandas.DataFrame)``. +Note that ``pandas.DataFrame`` is also a recognized option for all **outputtype** parameters, so you can, for instance, iterate through DataFrames with ``uproot3.iterate("files*.root", "treename", outputtype=pandas.DataFrame)``. diff --git a/docs/source/caches.rst b/docs/source/caches.rst index c06372f4..88866257 100644 --- a/docs/source/caches.rst +++ b/docs/source/caches.rst @@ -1,13 +1,13 @@ Caches ====== -Many functions in uproot may be given a cache to avoid expensive re-reading or re-calculating previously read or calculated quantities. These functions assume nothing about the cache other than a ``dict``-like interface: square brackets get old values and set new ones and ``in`` checks for existence. Therefore, a ``dict`` may be used as a "save forever" cache, but of course you might not have enough memory to save all data in memory forever. +Many functions in Uproot may be given a cache to avoid expensive re-reading or re-calculating previously read or calculated quantities. These functions assume nothing about the cache other than a ``dict``-like interface: square brackets get old values and set new ones and ``in`` checks for existence. Therefore, a ``dict`` may be used as a "save forever" cache, but of course you might not have enough memory to save all data in memory forever. The classes described on this page are drop-in replacements for ``dict`` with additional properties: least-recently-used (LRU) eviction, which drops the oldest cache item upon reaching a memory budget, as well as thread safety and process safety. -This interface, in which the user instantiates and passes the cache object explicitly instead of turning on an internal cache option, is to avoid situations in which the user can't determine where large amounts of memory are accumulating. When uproot reads a ROOT file, it does not save anything for reuse except what it puts in user-provided caches, so the user can always inspect these objects to see what's being saved. +This interface, in which the user instantiates and passes the cache object explicitly instead of turning on an internal cache option, is to avoid situations in which the user can't determine where large amounts of memory are accumulating. When Uproot reads a ROOT file, it does not save anything for reuse except what it puts in user-provided caches, so the user can always inspect these objects to see what's being saved. -The array-reading functions (in :py:class:`TTreeMethods ` and :py:class:`TBranchMethods `) each have three cache parameters: +The array-reading functions (in :py:class:`TTreeMethods ` and :py:class:`TBranchMethods `) each have three cache parameters: - **cache** for fully interpreted data. Accessing the same arrays with a different interpretation or a different entry range results in a cache miss. - **basketcache** for raw basket data. Accessing the same arrays with a different interpretation or a different entry range fully utilizes this cache, since the interpretation/construction from baskets is performed after retrieving data from this cache. @@ -15,12 +15,12 @@ The array-reading functions (in :py:class:`TTreeMethods `__ for a `tutorial `__. +See the `project homepage `__ for a `tutorial `__. -Run `the tutorial `__ on Binder. +Run `the tutorial `__ on Binder. Reference documentation ----------------------- diff --git a/docs/source/interpretation.rst b/docs/source/interpretation.rst index a45b2921..fa633a86 100644 --- a/docs/source/interpretation.rst +++ b/docs/source/interpretation.rst @@ -1,67 +1,67 @@ Interpretation ============== -ROOT was designed for C++, so ROOT data have an unambiguous C++ interpretation. However, their Python interpretation is open to interpretation. For instance, you may want a branch to be read as a new Numpy array, or perhaps a user-provided array in shared memory, with or without byte-swapping, type conversion, or reshaping, or as an array of unequal-length arrays, or an array of classes defined by the ROOT streamers, or an array of custom classes, or as a Numpy record array, etc. The uproot :py:class:`Interpretation ` mechanism provides such flexibility without sacrificing the flexibility of the selective reading methods. +ROOT was designed for C++, so ROOT data have an unambiguous C++ interpretation. However, their Python interpretation is open to interpretation. For instance, you may want a branch to be read as a new Numpy array, or perhaps a user-provided array in shared memory, with or without byte-swapping, type conversion, or reshaping, or as an array of unequal-length arrays, or an array of classes defined by the ROOT streamers, or an array of custom classes, or as a Numpy record array, etc. The Uproot :py:class:`Interpretation ` mechanism provides such flexibility without sacrificing the flexibility of the selective reading methods. -If no interpretation is specified, :py:func:`uproot.interpret ` is automatically called to provide a reasonable default. This function may also be called by the user with custom arguments and its output may be modified in the *branches* or *interpretation* arguments of :py:class:`TTreeMethods ` and :py:class:`TBranchMethods ` array-producing functions. +If no interpretation is specified, :py:func:`uproot3.interpret ` is automatically called to provide a reasonable default. This function may also be called by the user with custom arguments and its output may be modified in the *branches* or *interpretation* arguments of :py:class:`TTreeMethods ` and :py:class:`TBranchMethods ` array-producing functions. -uproot.interp.interp.Interpretation +uproot3.interp.interp.Interpretation ----------------------------------- -.. autoclass:: uproot.interp.interp.Interpretation +.. autoclass:: uproot3.interp.interp.Interpretation -uproot.interpret +uproot3.interpret ---------------- -.. autofunction:: uproot.interp.auto.interpret +.. autofunction:: uproot3.interp.auto.interpret -uproot.interp.numerical.asdtype +uproot3.interp.numerical.asdtype ------------------------------- -.. autoclass:: uproot.interp.numerical.asdtype +.. autoclass:: uproot3.interp.numerical.asdtype -.. automethod:: uproot.interp.numerical.asdtype.to +.. automethod:: uproot3.interp.numerical.asdtype.to -.. automethod:: uproot.interp.numerical.asdtype.toarray +.. automethod:: uproot3.interp.numerical.asdtype.toarray -uproot.interp.numerical.asarray +uproot3.interp.numerical.asarray ------------------------------- -.. autoclass:: uproot.interp.numerical.asarray +.. autoclass:: uproot3.interp.numerical.asarray -uproot.interp.numerical.asdouble32 +uproot3.interp.numerical.asdouble32 ---------------------------------- -.. autoclass:: uproot.interp.numerical.asdouble32 +.. autoclass:: uproot3.interp.numerical.asdouble32 -uproot.interp.numerical.asstlbitset +uproot3.interp.numerical.asstlbitset ----------------------------------- -.. autoclass:: uproot.interp.numerical.asstlbitset +.. autoclass:: uproot3.interp.numerical.asstlbitset -uproot.interp.jagged.asjagged +uproot3.interp.jagged.asjagged ----------------------------- -.. autoclass:: uproot.interp.jagged.asjagged +.. autoclass:: uproot3.interp.jagged.asjagged -.. automethod:: uproot.interp.jagged.asjagged.to +.. automethod:: uproot3.interp.jagged.asjagged.to -uproot.interp.objects.astable +uproot3.interp.objects.astable ----------------------------- -.. autoclass:: uproot.interp.objects.astable +.. autoclass:: uproot3.interp.objects.astable -uproot.interp.objects.asobj +uproot3.interp.objects.asobj --------------------------- -.. autoclass:: uproot.interp.objects.asobj +.. autoclass:: uproot3.interp.objects.asobj -uproot.interp.objects.asgenobj +uproot3.interp.objects.asgenobj ------------------------------ -.. autoclass:: uproot.interp.objects.asgenobj +.. autoclass:: uproot3.interp.objects.asgenobj -uproot.interp.objects.asstring +uproot3.interp.objects.asstring ------------------------------ -.. autoclass:: uproot.interp.objects.asstring +.. autoclass:: uproot3.interp.objects.asstring diff --git a/docs/source/opening-files.rst b/docs/source/opening-files.rst index e02af800..ec0f051a 100644 --- a/docs/source/opening-files.rst +++ b/docs/source/opening-files.rst @@ -1,66 +1,66 @@ Opening files ============= -Unlike ROOT, uproot strongly assumes that the input file does not change while you read it. File :py:class:`Sources ` do not lock the file, and they may open, close, and reopen it as needed to read the file in parallel. Therefore, if another process is changing the contents of a file while uproot reads it, the behavior is undefined (but likely bad!). +Unlike ROOT, Uproot strongly assumes that the input file does not change while you read it. File :py:class:`Sources ` do not lock the file, and they may open, close, and reopen it as needed to read the file in parallel. Therefore, if another process is changing the contents of a file while Uproot reads it, the behavior is undefined (but likely bad!). -uproot.open +uproot3.open ----------- -All ROOT objects come from ROOT files, so :py:func:`uproot.open ` is probably the first function you'll call. The :py:class:`ROOTDirectory ` object it returns is a handle for accessing contents deeper within the file. If the file is remote, use ``"root://"`` or ``"http://"`` in the file name to invoke a remote protocol. +All ROOT objects come from ROOT files, so :py:func:`uproot3.open ` is probably the first function you'll call. The :py:class:`ROOTDirectory ` object it returns is a handle for accessing contents deeper within the file. If the file is remote, use ``"root://"`` or ``"http://"`` in the file name to invoke a remote protocol. -.. autofunction:: uproot.rootio.open +.. autofunction:: uproot3.rootio.open -uproot.xrootd +uproot3.xrootd ------------- -Although :py:func:`uproot.open ` opens files regardless of whether they're local or remote, you can explicitly invoke XRootD with :py:func:`uproot.xrootd `. You get the same kind of :py:class:`ROOTDirectory ` as from :py:func:`uproot.open `. +Although :py:func:`uproot3.open ` opens files regardless of whether they're local or remote, you can explicitly invoke XRootD with :py:func:`uproot3.xrootd `. You get the same kind of :py:class:`ROOTDirectory ` as from :py:func:`uproot3.open `. -.. autofunction:: uproot.rootio.xrootd +.. autofunction:: uproot3.rootio.xrootd -uproot.http +uproot3.http ----------- -You can also open files through the HTTP protocol (and some XRootD servers support HTTP, too). The :py:func:`uproot.http ` function opens files via HTTP. +You can also open files through the HTTP protocol (and some XRootD servers support HTTP, too). The :py:func:`uproot3.http ` function opens files via HTTP. -.. autofunction:: uproot.rootio.http +.. autofunction:: uproot3.rootio.http -uproot.iterate +uproot3.iterate -------------- -With a :py:class:`ROOTDirectory `, you can dig into a file and extract objects, subdirectories, or TTree data, but sometimes you know exactly where to find a TTree and have a collection of identically-typed files to iterate through. +With a :py:class:`ROOTDirectory `, you can dig into a file and extract objects, subdirectories, or TTree data, but sometimes you know exactly where to find a TTree and have a collection of identically-typed files to iterate through. -The :py:func:`uproot.iterate ` function gives you an iterator over groups of arrays just like :py:meth:`TreeMethods.iterate `, except that it iterates over a large set of files (and verifies that the selected TTree branches are compatible). This serves essentially the same function as ROOT's TChain, allowing you to use TTrees in a set of files the same way you would use a single TTree. +The :py:func:`uproot3.iterate ` function gives you an iterator over groups of arrays just like :py:meth:`TreeMethods.iterate `, except that it iterates over a large set of files (and verifies that the selected TTree branches are compatible). This serves essentially the same function as ROOT's TChain, allowing you to use TTrees in a set of files the same way you would use a single TTree. -.. autofunction:: uproot.tree.iterate +.. autofunction:: uproot3.tree.iterate -uproot.pandas.iterate +uproot3.pandas.iterate --------------------- -The :py:func:`uproot.pandas.iterate ` function is like the above, except that it iterates over Pandas DataFrames (as though you passed ``outputtype=pandas.DataFrame`` and changed some defaults). +The :py:func:`uproot3.pandas.iterate ` function is like the above, except that it iterates over Pandas DataFrames (as though you passed ``outputtype=pandas.DataFrame`` and changed some defaults). -.. autofunction:: uproot.pandas.iterate +.. autofunction:: uproot3.pandas.iterate -uproot.lazyarray and lazyarrays +uproot3.lazyarray and lazyarrays ------------------------------- -The :py:func:`uproot.lazyarray ` and :py:func:`uproot.lazyarrays ` give you a lazy view into a whole collection of files. They behave like the :py:meth:`TTreeMethods.lazyarray ` and :py:meth:`TTreeMethods.lazyarrays ` methods except that they wildcarded filenames and a TTree name as the first arguments. +The :py:func:`uproot3.lazyarray ` and :py:func:`uproot3.lazyarrays ` give you a lazy view into a whole collection of files. They behave like the :py:meth:`TTreeMethods.lazyarray ` and :py:meth:`TTreeMethods.lazyarrays ` methods except that they wildcarded filenames and a TTree name as the first arguments. -.. autofunction:: uproot.tree.lazyarray +.. autofunction:: uproot3.tree.lazyarray -.. autofunction:: uproot.tree.lazyarrays +.. autofunction:: uproot3.tree.lazyarrays -uproot.daskarray and daskframe +uproot3.daskarray and daskframe ------------------------------ The following are the above, but presents the data as `Dask `__ objects. -.. autofunction:: uproot.tree.daskarray +.. autofunction:: uproot3.tree.daskarray -.. autofunction:: uproot.tree.daskframe +.. autofunction:: uproot3.tree.daskframe -uproot.numentries +uproot3.numentries ----------------- If you need to know the total number of entries of a set of files before processing them (e.g. for normalization or setting weights), you could open each file and TTree individually, but the function below is faster because it skips some steps that aren't needed when you only want the number of files. -.. autofunction:: uproot.tree.numentries +.. autofunction:: uproot3.tree.numentries diff --git a/docs/source/parallel-io.rst b/docs/source/parallel-io.rst index 8043e00e..c3438c71 100644 --- a/docs/source/parallel-io.rst +++ b/docs/source/parallel-io.rst @@ -1,75 +1,75 @@ Parallel I/O ============ -An essential aspect of uproot's file-reader is that data :py:class:`Sources ` are completely distinct from :py:class:`Cursors `, which track position in the source. This interface is similar to memory-mapped files, which do not track a position but respond as needed to requests for data by address, and it is unlike traditional file handles, which reference the source of data (integer linked to a file through syscalls) and a position within it (queried by ``seek`` and changed by ``tell``) as an indivisible unit. By default, uproot reads data through memory-mapped files; all other sources are made to *look* like a memory-mapped file. +An essential aspect of Uproot's file-reader is that data :py:class:`Sources ` are completely distinct from :py:class:`Cursors `, which track position in the source. This interface is similar to memory-mapped files, which do not track a position but respond as needed to requests for data by address, and it is unlike traditional file handles, which reference the source of data (integer linked to a file through syscalls) and a position within it (queried by ``seek`` and changed by ``tell``) as an indivisible unit. By default, Uproot reads data through memory-mapped files; all other sources are made to *look* like a memory-mapped file. -Throughout the ROOT I/O and TTree-handling modules, :py:class:`Sources ` and :py:class:`Cursors ` are passed as independent objects. A :py:class:`Cursor ` cannot read data without being given an explicit :py:class:`Source `. When parts of a file are to be read in parallel, lightweight :py:class:`Cursors ` are duplicated, one per thread, while :py:class:`Sources ` are only duplicated (e.g. multiple file handles into the same file) if the source is not inherently thread-safe (as memory-mapped files are). +Throughout the ROOT I/O and TTree-handling modules, :py:class:`Sources ` and :py:class:`Cursors ` are passed as independent objects. A :py:class:`Cursor ` cannot read data without being given an explicit :py:class:`Source `. When parts of a file are to be read in parallel, lightweight :py:class:`Cursors ` are duplicated, one per thread, while :py:class:`Sources ` are only duplicated (e.g. multiple file handles into the same file) if the source is not inherently thread-safe (as memory-mapped files are). -Even when not reading in parallel, copying a :py:class:`Cursor ` when passing it to a subroutine is a lightweight way to keep one's place without the spaghetti of ``seek`` and ``tell`` commands to backtrack, as is often necessary in the ROOT file structure. +Even when not reading in parallel, copying a :py:class:`Cursor ` when passing it to a subroutine is a lightweight way to keep one's place without the spaghetti of ``seek`` and ``tell`` commands to backtrack, as is often necessary in the ROOT file structure. -uproot.source.cursor.Cursor +uproot3.source.cursor.Cursor --------------------------- -.. autoclass:: uproot.source.cursor.Cursor +.. autoclass:: uproot3.source.cursor.Cursor -.. automethod:: uproot.source.cursor.Cursor.copied +.. automethod:: uproot3.source.cursor.Cursor.copied -.. automethod:: uproot.source.cursor.Cursor.skipped +.. automethod:: uproot3.source.cursor.Cursor.skipped -.. automethod:: uproot.source.cursor.Cursor.skip +.. automethod:: uproot3.source.cursor.Cursor.skip -.. automethod:: uproot.source.cursor.Cursor.fields +.. automethod:: uproot3.source.cursor.Cursor.fields -.. automethod:: uproot.source.cursor.Cursor.field +.. automethod:: uproot3.source.cursor.Cursor.field -.. automethod:: uproot.source.cursor.Cursor.bytes +.. automethod:: uproot3.source.cursor.Cursor.bytes -.. automethod:: uproot.source.cursor.Cursor.array +.. automethod:: uproot3.source.cursor.Cursor.array -.. automethod:: uproot.source.cursor.Cursor.string +.. automethod:: uproot3.source.cursor.Cursor.string -.. automethod:: uproot.source.cursor.Cursor.cstring +.. automethod:: uproot3.source.cursor.Cursor.cstring -.. automethod:: uproot.source.cursor.Cursor.skipstring +.. automethod:: uproot3.source.cursor.Cursor.skipstring -.. automethod:: uproot.source.cursor.Cursor.hexdump +.. automethod:: uproot3.source.cursor.Cursor.hexdump -uproot.source.source.Source +uproot3.source.source.Source --------------------------- -.. autoclass:: uproot.source.source.Source +.. autoclass:: uproot3.source.source.Source -uproot.FileSource +uproot3.FileSource ----------------- -.. autoattribute:: uproot.source.file.FileSource.defaults +.. autoattribute:: uproot3.source.file.FileSource.defaults -.. autoclass:: uproot.source.file.FileSource +.. autoclass:: uproot3.source.file.FileSource -uproot.MemmapSource +uproot3.MemmapSource ------------------- -.. autoattribute:: uproot.source.memmap.MemmapSource.defaults +.. autoattribute:: uproot3.source.memmap.MemmapSource.defaults -.. autoclass:: uproot.source.memmap.MemmapSource +.. autoclass:: uproot3.source.memmap.MemmapSource -uproot.XRootDSource +uproot3.XRootDSource ------------------- -.. autoattribute:: uproot.source.xrootd.XRootDSource.defaults +.. autoattribute:: uproot3.source.xrootd.XRootDSource.defaults -.. autoclass:: uproot.source.xrootd.XRootDSource +.. autoclass:: uproot3.source.xrootd.XRootDSource -uproot.HTTPSource +uproot3.HTTPSource ------------------- -.. autoattribute:: uproot.source.http.HTTPSource.defaults +.. autoattribute:: uproot3.source.http.HTTPSource.defaults -.. autoclass:: uproot.source.http.HTTPSource +.. autoclass:: uproot3.source.http.HTTPSource -uproot.source.compressed.CompressedSource +uproot3.source.compressed.CompressedSource ----------------------------------------- -.. autoclass:: uproot.source.compressed.Compression +.. autoclass:: uproot3.source.compressed.Compression -.. autoclass:: uproot.source.compressed.CompressedSource +.. autoclass:: uproot3.source.compressed.CompressedSource diff --git a/docs/source/root-io.rst b/docs/source/root-io.rst index b09bde17..b42a7944 100644 --- a/docs/source/root-io.rst +++ b/docs/source/root-io.rst @@ -1,51 +1,51 @@ ROOT I/O ======== -The :py:mod:`uproot.rootio` module contains everything needed to navigate through a ROOT file and extract inert, data-only objects. Methods for those objects are defined in other modules. The :py:func:`uproot.open ` function returns a :py:class:`ROOTDirectory ` object, which is a handle into the file, from which all other data can be accessed. +The :py:mod:`uproot3.rootio` module contains everything needed to navigate through a ROOT file and extract inert, data-only objects. Methods for those objects are defined in other modules. The :py:func:`uproot3.open ` function returns a :py:class:`ROOTDirectory ` object, which is a handle into the file, from which all other data can be accessed. -This module has many more classes than those documented here, but all but a few are considered internal details. The classes documented below represent the public API. In fact, only :py:class:`ROOTDirectory ` has useful attributes and methods for a typical user. The other two, :py:class:`ROOTObject ` and :py:class:`ROOTStreamedObject `, are documented because they are superclasses of all objects that could be extracted from a ROOT file, and may be useful in ``isinstance`` checks. +This module has many more classes than those documented here, but all but a few are considered internal details. The classes documented below represent the public API. In fact, only :py:class:`ROOTDirectory ` has useful attributes and methods for a typical user. The other two, :py:class:`ROOTObject ` and :py:class:`ROOTStreamedObject `, are documented because they are superclasses of all objects that could be extracted from a ROOT file, and may be useful in ``isinstance`` checks. -uproot.rootio.ROOTDirectory +uproot3.rootio.ROOTDirectory --------------------------- -Although :py:class:`ROOTDirectory ` resembles ROOT's TFile, TDirectory, and TFileDirectory to some degree, it does not have a direct relationship to any of them. (This is because we adopted a different model for representing the contents of a ROOT file: purely acyclic with continuation passing.) As a result, :py:class:`ROOTDirectory ` is not a :py:class:`ROOTObject ` and isn't named "TFile." +Although :py:class:`ROOTDirectory ` resembles ROOT's TFile, TDirectory, and TFileDirectory to some degree, it does not have a direct relationship to any of them. (This is because we adopted a different model for representing the contents of a ROOT file: purely acyclic with continuation passing.) As a result, :py:class:`ROOTDirectory ` is not a :py:class:`ROOTObject ` and isn't named "TFile." -A :py:class:`ROOTDirectory ` may represent a whole ROOT file or a single TDirectory within that file--- after reading, there is no difference. +A :py:class:`ROOTDirectory ` may represent a whole ROOT file or a single TDirectory within that file--- after reading, there is no difference. -.. autoclass:: uproot.rootio.ROOTDirectory +.. autoclass:: uproot3.rootio.ROOTDirectory -.. automethod:: uproot.rootio.ROOTDirectory.get +.. automethod:: uproot3.rootio.ROOTDirectory.get -.. automethod:: uproot.rootio.ROOTDirectory.iterkeys +.. automethod:: uproot3.rootio.ROOTDirectory.iterkeys -.. automethod:: uproot.rootio.ROOTDirectory.itervalues +.. automethod:: uproot3.rootio.ROOTDirectory.itervalues -.. automethod:: uproot.rootio.ROOTDirectory.iteritems +.. automethod:: uproot3.rootio.ROOTDirectory.iteritems -.. automethod:: uproot.rootio.ROOTDirectory.iterclasses +.. automethod:: uproot3.rootio.ROOTDirectory.iterclasses -.. automethod:: uproot.rootio.ROOTDirectory.keys +.. automethod:: uproot3.rootio.ROOTDirectory.keys -.. automethod:: uproot.rootio.ROOTDirectory.values +.. automethod:: uproot3.rootio.ROOTDirectory.values -.. automethod:: uproot.rootio.ROOTDirectory.items +.. automethod:: uproot3.rootio.ROOTDirectory.items -.. automethod:: uproot.rootio.ROOTDirectory.classes +.. automethod:: uproot3.rootio.ROOTDirectory.classes -.. automethod:: uproot.rootio.ROOTDirectory.allkeys +.. automethod:: uproot3.rootio.ROOTDirectory.allkeys -.. automethod:: uproot.rootio.ROOTDirectory.allvalues +.. automethod:: uproot3.rootio.ROOTDirectory.allvalues -.. automethod:: uproot.rootio.ROOTDirectory.allitems +.. automethod:: uproot3.rootio.ROOTDirectory.allitems -.. automethod:: uproot.rootio.ROOTDirectory.allclasses +.. automethod:: uproot3.rootio.ROOTDirectory.allclasses -uproot.rootio.ROOTObject +uproot3.rootio.ROOTObject ------------------------ -.. autoclass:: uproot.rootio.ROOTObject +.. autoclass:: uproot3.rootio.ROOTObject -uproot.rootio.ROOTStreamedObject +uproot3.rootio.ROOTStreamedObject -------------------------------- -.. autoclass:: uproot.rootio.ROOTStreamedObject +.. autoclass:: uproot3.rootio.ROOTStreamedObject diff --git a/docs/source/ttree-handling.rst b/docs/source/ttree-handling.rst index ae3494da..6316f66e 100644 --- a/docs/source/ttree-handling.rst +++ b/docs/source/ttree-handling.rst @@ -1,164 +1,164 @@ TTree Handling ============== -TTree and TBranch are two of the data objects that are versioned and streamed from ROOT files (subclasses of :py:class:`ROOTStreamedObject `). As such, their classes do not exist until observed in a ROOT file, and different class objects may be generated by different versions of the class with different member variables. +TTree and TBranch are two of the data objects that are versioned and streamed from ROOT files (subclasses of :py:class:`ROOTStreamedObject `). As such, their classes do not exist until observed in a ROOT file, and different class objects may be generated by different versions of the class with different member variables. -However, you may use TTrees and TBranches consistently across versions because they inherit from mix-ins :py:class:`TTreeMethods ` and :py:class:`TBranchMethods `. These interfaces provide more Pythonic names for TTree/TBranch private data and a variety of data-reading routines. +However, you may use TTrees and TBranches consistently across versions because they inherit from mix-ins :py:class:`TTreeMethods ` and :py:class:`TBranchMethods `. These interfaces provide more Pythonic names for TTree/TBranch private data and a variety of data-reading routines. TLeaf and its subclasses do not (currently) have mix-ins: they are used only for their data. TBasket does not even have an instantiation--- TBasket bytes are read directly into arrays. -These bytes are read according to some :py:class:`Interpretation `, which governs how the bytes are interpreted (source) and what kinds of objects are filled (destination). For numerical data, the source is usually one big-endian array per basket and the destination is usually one native-endian array per branch. You have complete freedom to set the :py:class:`Interpretation ` for each branch, but sensible defaults are provided (by the :py:func:`interpret ` function, automatically). +These bytes are read according to some :py:class:`Interpretation `, which governs how the bytes are interpreted (source) and what kinds of objects are filled (destination). For numerical data, the source is usually one big-endian array per basket and the destination is usually one native-endian array per branch. You have complete freedom to set the :py:class:`Interpretation ` for each branch, but sensible defaults are provided (by the :py:func:`interpret ` function, automatically). -Many of the :py:class:`TTreeMethods ` and :py:class:`TBranchMethods ` have the same parameters--- identical parameter names have identical meanings. In the documentation below, the definitions are repeated for each method (because it may be the first you call ``help`` on, though it inflates the size of this page.). +Many of the :py:class:`TTreeMethods ` and :py:class:`TBranchMethods ` have the same parameters--- identical parameter names have identical meanings. In the documentation below, the definitions are repeated for each method (because it may be the first you call ``help`` on, though it inflates the size of this page.). -uproot.tree.TTreeMethods +uproot3.tree.TTreeMethods ------------------------ -Every class read from a ROOT file named "TTree" is mixed in with :py:class:`TTreeMethods ` so that the following methods are available. +Every class read from a ROOT file named "TTree" is mixed in with :py:class:`TTreeMethods ` so that the following methods are available. -.. autoclass:: uproot.tree.TTreeMethods +.. autoclass:: uproot3.tree.TTreeMethods branch accessors ^^^^^^^^^^^^^^^^ -.. automethod:: uproot.tree.TTreeMethods.get +.. automethod:: uproot3.tree.TTreeMethods.get -.. automethod:: uproot.tree.TTreeMethods.iterkeys +.. automethod:: uproot3.tree.TTreeMethods.iterkeys -.. automethod:: uproot.tree.TTreeMethods.itervalues +.. automethod:: uproot3.tree.TTreeMethods.itervalues -.. automethod:: uproot.tree.TTreeMethods.iteritems +.. automethod:: uproot3.tree.TTreeMethods.iteritems -.. automethod:: uproot.tree.TTreeMethods.keys +.. automethod:: uproot3.tree.TTreeMethods.keys -.. automethod:: uproot.tree.TTreeMethods.values +.. automethod:: uproot3.tree.TTreeMethods.values -.. automethod:: uproot.tree.TTreeMethods.items +.. automethod:: uproot3.tree.TTreeMethods.items -.. automethod:: uproot.tree.TTreeMethods.allkeys +.. automethod:: uproot3.tree.TTreeMethods.allkeys -.. automethod:: uproot.tree.TTreeMethods.allvalues +.. automethod:: uproot3.tree.TTreeMethods.allvalues -.. automethod:: uproot.tree.TTreeMethods.allitems +.. automethod:: uproot3.tree.TTreeMethods.allitems -.. automethod:: uproot.tree.TTreeMethods.clusters +.. automethod:: uproot3.tree.TTreeMethods.clusters -.. automethod:: uproot.tree.TTreeMethods.mempartitions +.. automethod:: uproot3.tree.TTreeMethods.mempartitions array ^^^^^ -.. automethod:: uproot.tree.TTreeMethods.array +.. automethod:: uproot3.tree.TTreeMethods.array arrays ^^^^^^ -.. automethod:: uproot.tree.TTreeMethods.arrays +.. automethod:: uproot3.tree.TTreeMethods.arrays lazyarray ^^^^^^^^^ -.. automethod:: uproot.tree.TTreeMethods.lazyarray +.. automethod:: uproot3.tree.TTreeMethods.lazyarray lazyarrays ^^^^^^^^^^ -.. automethod:: uproot.tree.TTreeMethods.lazyarrays +.. automethod:: uproot3.tree.TTreeMethods.lazyarrays iterate ^^^^^^^ -.. automethod:: uproot.tree.TTreeMethods.iterate +.. automethod:: uproot3.tree.TTreeMethods.iterate pandas ^^^^^^ TTree objects can be converted into `Pandas `__ DataFrames. -.. autoattribute:: uproot.tree.TTreeMethods.pandas +.. autoattribute:: uproot3.tree.TTreeMethods.pandas :annotation: = methods for interacting with Pandas. -.. automethod:: uproot._connect._pandas.TTreeMethods_pandas.df +.. automethod:: uproot3._connect._pandas.TTreeMethods_pandas.df -uproot.tree.TBranchMethods +uproot3.tree.TBranchMethods -------------------------- -Every class read from a ROOT file named "TBranch" is mixed in with :py:class:`TBranchMethods ` so that the following methods are available. Subclasses of "TBranch" (such as "TBranchElement") inherit the mix-in. +Every class read from a ROOT file named "TBranch" is mixed in with :py:class:`TBranchMethods ` so that the following methods are available. Subclasses of "TBranch" (such as "TBranchElement") inherit the mix-in. -.. autoclass:: uproot.tree.TBranchMethods +.. autoclass:: uproot3.tree.TBranchMethods branch accessors ^^^^^^^^^^^^^^^^ -.. automethod:: uproot.tree.TBranchMethods.get +.. automethod:: uproot3.tree.TBranchMethods.get -.. automethod:: uproot.tree.TBranchMethods.iterkeys +.. automethod:: uproot3.tree.TBranchMethods.iterkeys -.. automethod:: uproot.tree.TBranchMethods.itervalues +.. automethod:: uproot3.tree.TBranchMethods.itervalues -.. automethod:: uproot.tree.TBranchMethods.iteritems +.. automethod:: uproot3.tree.TBranchMethods.iteritems -.. automethod:: uproot.tree.TBranchMethods.keys +.. automethod:: uproot3.tree.TBranchMethods.keys -.. automethod:: uproot.tree.TBranchMethods.values +.. automethod:: uproot3.tree.TBranchMethods.values -.. automethod:: uproot.tree.TBranchMethods.items +.. automethod:: uproot3.tree.TBranchMethods.items -.. automethod:: uproot.tree.TBranchMethods.allkeys +.. automethod:: uproot3.tree.TBranchMethods.allkeys -.. automethod:: uproot.tree.TBranchMethods.allvalues +.. automethod:: uproot3.tree.TBranchMethods.allvalues -.. automethod:: uproot.tree.TBranchMethods.allitems +.. automethod:: uproot3.tree.TBranchMethods.allitems branch information ^^^^^^^^^^^^^^^^^^ -.. automethod:: uproot.tree.TBranchMethods.uncompressedbytes +.. automethod:: uproot3.tree.TBranchMethods.uncompressedbytes -.. automethod:: uproot.tree.TBranchMethods.compressedbytes +.. automethod:: uproot3.tree.TBranchMethods.compressedbytes -.. automethod:: uproot.tree.TBranchMethods.compressionratio +.. automethod:: uproot3.tree.TBranchMethods.compressionratio -.. automethod:: uproot.tree.TBranchMethods.numitems +.. automethod:: uproot3.tree.TBranchMethods.numitems -.. automethod:: uproot.tree.TBranchMethods.mempartitions +.. automethod:: uproot3.tree.TBranchMethods.mempartitions basket information ^^^^^^^^^^^^^^^^^^ -.. automethod:: uproot.tree.TBranchMethods.basket_entrystart +.. automethod:: uproot3.tree.TBranchMethods.basket_entrystart -.. automethod:: uproot.tree.TBranchMethods.basket_entrystop +.. automethod:: uproot3.tree.TBranchMethods.basket_entrystop -.. automethod:: uproot.tree.TBranchMethods.basket_numentries +.. automethod:: uproot3.tree.TBranchMethods.basket_numentries -.. automethod:: uproot.tree.TBranchMethods.basket_uncompressedbytes +.. automethod:: uproot3.tree.TBranchMethods.basket_uncompressedbytes -.. automethod:: uproot.tree.TBranchMethods.basket_compressedbytes +.. automethod:: uproot3.tree.TBranchMethods.basket_compressedbytes -.. automethod:: uproot.tree.TBranchMethods.basket_numitems +.. automethod:: uproot3.tree.TBranchMethods.basket_numitems array ^^^^^ -.. automethod:: uproot.tree.TBranchMethods.array +.. automethod:: uproot3.tree.TBranchMethods.array lazyarray ^^^^^^^^^ -.. automethod:: uproot.tree.TBranchMethods.lazyarray +.. automethod:: uproot3.tree.TBranchMethods.lazyarray basket ^^^^^^ -.. automethod:: uproot.tree.TBranchMethods.basket +.. automethod:: uproot3.tree.TBranchMethods.basket baskets ^^^^^^^ -.. automethod:: uproot.tree.TBranchMethods.baskets +.. automethod:: uproot3.tree.TBranchMethods.baskets iterate_baskets ^^^^^^^^^^^^^^^ -.. automethod:: uproot.tree.TBranchMethods.iterate_baskets +.. automethod:: uproot3.tree.TBranchMethods.iterate_baskets diff --git a/requirements.txt b/requirements.txt index e99072fd..2bd718a1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ numpy>=1.13.1 -awkward>=0.12.0,<1.0 -uproot-methods>=0.7.0 +awkward0 +uproot-methods>=0.9.1 cachetools backports.lzma;python_version<"3.3" diff --git a/setup.py b/setup.py index b3c4c180..3b8b613b 100755 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE import sys import os.path @@ -10,15 +10,15 @@ def get_version(): g = {} - exec(open(os.path.join("uproot", "version.py")).read(), g) + exec(open(os.path.join("uproot3", "version.py")).read(), g) return g["__version__"] def get_description(): description = open("README.rst", "rb").read().decode("utf8", "ignore") - before = """.. image:: https://raw.githubusercontent.com/scikit-hep/uproot/master/docs/source/logo-300px.png + before = """.. image:: https://raw.githubusercontent.com/scikit-hep/uproot3/master/docs/source/logo-300px.png :alt: uproot - :target: https://github.com/scikit-hep/uproot + :target: https://github.com/scikit-hep/uproot3 """ @@ -28,9 +28,9 @@ def get_description(): start_replaceplots = middle.index(".. inclusion-marker-replaceplots-start") stop_replaceplots = middle.index(".. inclusion-marker-replaceplots-stop") + len(".. inclusion-marker-replaceplots-stop") middle = middle[:start_replaceplots] + """ -.. image:: https://raw.githubusercontent.com/scikit-hep/uproot/master/docs/root-none-muon.png +.. image:: https://raw.githubusercontent.com/scikit-hep/uproot3/master/docs/root-none-muon.png :width: 350 px -.. image:: https://raw.githubusercontent.com/scikit-hep/uproot/master/docs/rootnumpy-none-muon.png +.. image:: https://raw.githubusercontent.com/scikit-hep/uproot3/master/docs/rootnumpy-none-muon.png :width: 350 px """ + middle[stop_replaceplots:] @@ -39,95 +39,66 @@ def get_description(): Tutorial ======== -See the `project homepage `__ for a `tutorial `__. +See the `project homepage `__ for a `tutorial `__. -Run `that tutorial `__ on Binder. +Run `that tutorial `__ on Binder. **Tutorial contents:** -* `Introduction `__ -* `What is uproot? `__ -* `Exploring a file `__ +* `Introduction `__ +* `What is Uproot? `__ +* `Exploring a file `__ - - `Compressed objects in ROOT files `__ - - `Exploring a TTree `__ - - `Some terminology `__ + - `Compressed objects in ROOT files `__ + - `Exploring a TTree `__ + - `Some terminology `__ -* `Reading arrays from a TTree `__ -* `Caching data `__ +* `Reading arrays from a TTree `__ +* `Caching data `__ - - `Automatically managed caches `__ - - `Caching at all levels of abstraction `__ + - `Automatically managed caches `__ + - `Caching at all levels of abstraction `__ -* `Lazy arrays `__ +* `Lazy arrays `__ - - `Lazy array of many files `__ - - `Lazy arrays with caching `__ - - `Lazy arrays as lightweight skims `__ - - `Lazy arrays in Dask `__ + - `Lazy array of many files `__ + - `Lazy arrays with caching `__ + - `Lazy arrays as lightweight skims `__ + - `Lazy arrays in Dask `__ -* `Iteration `__ +* `Iteration `__ - - `Filenames and entry numbers while iterating `__ - - `Limiting the number of entries to be read `__ - - `Controlling lazy chunk and iteration step sizes `__ - - `Caching and iteration `__ + - `Filenames and entry numbers while iterating `__ + - `Limiting the number of entries to be read `__ + - `Controlling lazy chunk and iteration step sizes `__ + - `Caching and iteration `__ -* `Changing the output container type `__ -* `Filling Pandas DataFrames `__ -* `Selecting and interpreting branches `__ +* `Changing the output container type `__ +* `Filling Pandas DataFrames `__ +* `Selecting and interpreting branches `__ - - `TBranch interpretations `__ - - `Reading data into a preexisting array `__ - - `Passing many new interpretations in one call `__ - - `Multiple values per event: fixed size arrays `__ - - `Multiple values per event: leaf-lists `__ - - `Multiple values per event: jagged arrays `__ - - `Jagged array performance `__ - - `Special physics objects: Lorentz vectors `__ - - `Variable-width values: strings `__ - - `Arbitrary objects in TTrees `__ - - `Doubly nested jagged arrays (i.e. std::vector>) `__ + - `TBranch interpretations `__ + - `Reading data into a preexisting array `__ + - `Passing many new interpretations in one call `__ + - `Multiple values per event: fixed size arrays `__ + - `Multiple values per event: leaf-lists `__ + - `Multiple values per event: jagged arrays `__ + - `Jagged array performance `__ + - `Special physics objects: Lorentz vectors `__ + - `Variable-width values: strings `__ + - `Arbitrary objects in TTrees `__ + - `Doubly nested jagged arrays (i.e. std::vector>) `__ -* `Parallel array reading `__ -* `Histograms, TProfiles, TGraphs, and others `__ -* `Creating and writing data to ROOT files `__ +* `Parallel array reading `__ +* `Histograms, TProfiles, TGraphs, and others `__ +* `Creating and writing data to ROOT files `__ - - `Writing histograms `__ - - `Writing TTrees `__ - -Reference documentation -======================= - -* `Opening files `__ - - - `uproot.open `__ - - `uproot.xrootd `__ - - `uproot.http `__ - - `uproot.iterate `__ - - `uproot.pandas.iterate `__ - - `uproot.lazyarray(s) `__ - - `uproot.daskarray/daskframe `__ - - `uproot.numentries `__ - -* `ROOT I/O `__ - - - `uproot.rootio.ROOTDirectory `__ - - `uproot.rootio.ROOTObject `__ - - `uproot.rootio.ROOTStreamedObject `__ - -* `TTree Handling `__ - - - `uproot.tree.TTreeMethods `__ - - `uproot.tree.TBranchMethods `__ - -* `Interpretation `__ -* `Caches `__ -* `Parallel I/O `__ + - `Writing histograms `__ + - `Writing TTrees `__ """ return before + middle + after -setup(name = "uproot", +setup(name = "uproot3", version = get_version(), packages = find_packages(exclude = ["tests"]), scripts = [], @@ -137,19 +108,19 @@ def get_description(): author_email = "pivarski@princeton.edu", maintainer = "Jim Pivarski (IRIS-HEP)", maintainer_email = "pivarski@princeton.edu", - url = "https://github.com/scikit-hep/uproot", - download_url = "https://github.com/scikit-hep/uproot/releases", + url = "https://github.com/scikit-hep/uproot3", + download_url = "https://github.com/scikit-hep/uproot3/releases", license = "BSD 3-clause", test_suite = "tests", python_requires = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*", - install_requires = ["numpy>=1.13.1", "awkward>=0.12.0,<1.0", "uproot-methods>=0.7.0", "cachetools"], + install_requires = ["numpy>=1.13.1", "awkward0", "uproot-methods>=0.9.1", "cachetools"], setup_requires = ["pytest-runner"], extras_require = { "testing": ["pytest>=3.9", "pkgconfig", "lz4", "zstandard", 'backports.lzma;python_version<"3.3"', "xxhash", "mock", "requests"], "compress": ["lz4", "zstandard", 'backports.lzma;python_version<"3.3"', "xxhash"], }, classifiers = [ - "Development Status :: 5 - Production/Stable", + "Development Status :: 7 - Inactive", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: Science/Research", diff --git a/tests/__init__.py b/tests/__init__.py index 6ea34656..18375fab 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,3 +1,3 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE diff --git a/tests/test_cache.py b/tests/test_cache.py index 5752ca81..3546224e 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,12 +1,12 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE -import uproot +import uproot3 class Test(object): def test_flat_array(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["i8"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["i8"] expectation = [-15, -14, -13, -12, -11, -10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] cache = {} @@ -34,7 +34,7 @@ def test_flat_array(self): keycache = {} def test_regular_array(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["ai8"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["ai8"] expectation = [[-14, -13, -12], [-13, -12, -11], [-12, -11, -10], [-11, -10, -9], [-10, -9, -8], [-9, -8, -7], [-8, -7, -6], [-7, -6, -5], [-6, -5, -4], [-5, -4, -3], [-4, -3, -2], [-3, -2, -1], [-2, -1, 0], [-1, 0, 1], [0, 1, 2], [1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 6], [5, 6, 7], [6, 7, 8], [7, 8, 9], [8, 9, 10], [9, 10, 11], [10, 11, 12], [11, 12, 13], [12, 13, 14], [13, 14, 15], [14, 15, 16], [15, 16, 17]] cache = {} @@ -62,7 +62,7 @@ def test_regular_array(self): keycache = {} def test_irregular_array(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["Ai8"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["Ai8"] expectation = [[], [-15], [-15, -13], [-15, -13, -11], [-15, -13, -11, -9], [], [-10], [-10, -8], [-10, -8, -6], [-10, -8, -6, -4], [], [-5], [-5, -3], [-5, -3, -1], [-5, -3, -1, 1], [], [0], [0, 2], [0, 2, 4], [0, 2, 4, 6], [], [5], [5, 7], [5, 7, 9], [5, 7, 9, 11], [], [10], [10, 12], [10, 12, 14], [10, 12, 14, 16]] assert [len(x) for x in expectation] == [0, 1, 2, 3, 4] * 6 @@ -91,7 +91,7 @@ def test_irregular_array(self): keycache = {} def test_strings_array(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["str"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["str"] expectation = [b"hey-0", b"hey-1", b"hey-2", b"hey-3", b"hey-4", b"hey-5", b"hey-6", b"hey-7", b"hey-8", b"hey-9", b"hey-10", b"hey-11", b"hey-12", b"hey-13", b"hey-14", b"hey-15", b"hey-16", b"hey-17", b"hey-18", b"hey-19", b"hey-20", b"hey-21", b"hey-22", b"hey-23", b"hey-24", b"hey-25", b"hey-26", b"hey-27", b"hey-28", b"hey-29"] cache = {} diff --git a/tests/test_compression.py b/tests/test_compression.py index 1cc9e080..48b985f8 100644 --- a/tests/test_compression.py +++ b/tests/test_compression.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE import pytest try: @@ -9,76 +9,76 @@ lzma = pytest.importorskip('backports.lzma') lz4 = pytest.importorskip('lz4') zstandard = pytest.importorskip('zstandard') -import uproot +import uproot3 class Test(object): def test_compression_identity(self): - assert uproot.open("tests/samples/Zmumu-zlib.root").compression.algoname == "zlib" - assert uproot.open("tests/samples/Zmumu-zlib.root").compression.level == 4 + assert uproot3.open("tests/samples/Zmumu-zlib.root").compression.algoname == "zlib" + assert uproot3.open("tests/samples/Zmumu-zlib.root").compression.level == 4 - assert uproot.open("tests/samples/Zmumu-lzma.root").compression.algoname == "lzma" - assert uproot.open("tests/samples/Zmumu-lzma.root").compression.level == 4 + assert uproot3.open("tests/samples/Zmumu-lzma.root").compression.algoname == "lzma" + assert uproot3.open("tests/samples/Zmumu-lzma.root").compression.level == 4 - assert uproot.open("tests/samples/Zmumu-lz4.root").compression.algoname == "lz4" - assert uproot.open("tests/samples/Zmumu-lz4.root").compression.level == 4 + assert uproot3.open("tests/samples/Zmumu-lz4.root").compression.algoname == "lz4" + assert uproot3.open("tests/samples/Zmumu-lz4.root").compression.level == 4 - assert uproot.open("tests/samples/Zmumu-zstd.root").compression.algoname == "zstd" - assert uproot.open("tests/samples/Zmumu-zstd.root").compression.level == 5 + assert uproot3.open("tests/samples/Zmumu-zstd.root").compression.algoname == "zstd" + assert uproot3.open("tests/samples/Zmumu-zstd.root").compression.level == 5 - assert uproot.open("tests/samples/Zmumu-uncompressed.root").compression.level == 0 + assert uproot3.open("tests/samples/Zmumu-uncompressed.root").compression.level == 0 - assert uproot.open("tests/samples/HZZ-zlib.root").compression.algoname == "zlib" - assert uproot.open("tests/samples/HZZ-zlib.root").compression.level == 4 + assert uproot3.open("tests/samples/HZZ-zlib.root").compression.algoname == "zlib" + assert uproot3.open("tests/samples/HZZ-zlib.root").compression.level == 4 - assert uproot.open("tests/samples/HZZ-lzma.root").compression.algoname == "lzma" - assert uproot.open("tests/samples/HZZ-lzma.root").compression.level == 4 + assert uproot3.open("tests/samples/HZZ-lzma.root").compression.algoname == "lzma" + assert uproot3.open("tests/samples/HZZ-lzma.root").compression.level == 4 - assert uproot.open("tests/samples/HZZ-lz4.root").compression.algoname == "lz4" - assert uproot.open("tests/samples/HZZ-lz4.root").compression.level == 4 + assert uproot3.open("tests/samples/HZZ-lz4.root").compression.algoname == "lz4" + assert uproot3.open("tests/samples/HZZ-lz4.root").compression.level == 4 - assert uproot.open("tests/samples/HZZ-zstd.root").compression.algoname == "zstd" - assert uproot.open("tests/samples/HZZ-zstd.root").compression.level == 5 + assert uproot3.open("tests/samples/HZZ-zstd.root").compression.algoname == "zstd" + assert uproot3.open("tests/samples/HZZ-zstd.root").compression.level == 5 - assert uproot.open("tests/samples/HZZ-uncompressed.root").compression.level == 0 + assert uproot3.open("tests/samples/HZZ-uncompressed.root").compression.level == 0 def test_compression_keys(self): - keys = [(n, cls._classname) for n, cls in uproot.open("tests/samples/Zmumu-uncompressed.root").allclasses()] - assert [(n, cls._classname) for n, cls in uproot.open("tests/samples/Zmumu-zlib.root").allclasses()] == keys - assert [(n, cls._classname) for n, cls in uproot.open("tests/samples/Zmumu-lzma.root").allclasses()] == keys - assert [(n, cls._classname) for n, cls in uproot.open("tests/samples/Zmumu-lz4.root").allclasses()] == keys - assert [(n, cls._classname) for n, cls in uproot.open("tests/samples/Zmumu-zstd.root").allclasses()] == keys - - keys = [(n, cls._classname) for n, cls in uproot.open("tests/samples/HZZ-uncompressed.root").allclasses()] - assert [(n, cls._classname) for n, cls in uproot.open("tests/samples/HZZ-zlib.root").allclasses()] == keys - assert [(n, cls._classname) for n, cls in uproot.open("tests/samples/HZZ-lzma.root").allclasses()] == keys - assert [(n, cls._classname) for n, cls in uproot.open("tests/samples/HZZ-lz4.root").allclasses()] == keys - assert [(n, cls._classname) for n, cls in uproot.open("tests/samples/HZZ-zstd.root").allclasses()] == keys + keys = [(n, cls._classname) for n, cls in uproot3.open("tests/samples/Zmumu-uncompressed.root").allclasses()] + assert [(n, cls._classname) for n, cls in uproot3.open("tests/samples/Zmumu-zlib.root").allclasses()] == keys + assert [(n, cls._classname) for n, cls in uproot3.open("tests/samples/Zmumu-lzma.root").allclasses()] == keys + assert [(n, cls._classname) for n, cls in uproot3.open("tests/samples/Zmumu-lz4.root").allclasses()] == keys + assert [(n, cls._classname) for n, cls in uproot3.open("tests/samples/Zmumu-zstd.root").allclasses()] == keys + + keys = [(n, cls._classname) for n, cls in uproot3.open("tests/samples/HZZ-uncompressed.root").allclasses()] + assert [(n, cls._classname) for n, cls in uproot3.open("tests/samples/HZZ-zlib.root").allclasses()] == keys + assert [(n, cls._classname) for n, cls in uproot3.open("tests/samples/HZZ-lzma.root").allclasses()] == keys + assert [(n, cls._classname) for n, cls in uproot3.open("tests/samples/HZZ-lz4.root").allclasses()] == keys + assert [(n, cls._classname) for n, cls in uproot3.open("tests/samples/HZZ-zstd.root").allclasses()] == keys def test_compression_branches(self): - branches = list(uproot.open("tests/samples/Zmumu-uncompressed.root")["events"].keys()) - assert list(uproot.open("tests/samples/Zmumu-zlib.root")["events"].keys()) == branches - assert list(uproot.open("tests/samples/Zmumu-lzma.root")["events"].keys()) == branches - assert list(uproot.open("tests/samples/Zmumu-lz4.root")["events"].keys()) == branches - assert list(uproot.open("tests/samples/Zmumu-zstd.root")["events"].keys()) == branches - - branches = list(uproot.open("tests/samples/HZZ-uncompressed.root")["events"].keys()) - assert list(uproot.open("tests/samples/HZZ-zlib.root")["events"].keys()) == branches - assert list(uproot.open("tests/samples/HZZ-lzma.root")["events"].keys()) == branches - assert list(uproot.open("tests/samples/HZZ-lz4.root")["events"].keys()) == branches - assert list(uproot.open("tests/samples/HZZ-zstd.root")["events"].keys()) == branches + branches = list(uproot3.open("tests/samples/Zmumu-uncompressed.root")["events"].keys()) + assert list(uproot3.open("tests/samples/Zmumu-zlib.root")["events"].keys()) == branches + assert list(uproot3.open("tests/samples/Zmumu-lzma.root")["events"].keys()) == branches + assert list(uproot3.open("tests/samples/Zmumu-lz4.root")["events"].keys()) == branches + assert list(uproot3.open("tests/samples/Zmumu-zstd.root")["events"].keys()) == branches + + branches = list(uproot3.open("tests/samples/HZZ-uncompressed.root")["events"].keys()) + assert list(uproot3.open("tests/samples/HZZ-zlib.root")["events"].keys()) == branches + assert list(uproot3.open("tests/samples/HZZ-lzma.root")["events"].keys()) == branches + assert list(uproot3.open("tests/samples/HZZ-lz4.root")["events"].keys()) == branches + assert list(uproot3.open("tests/samples/HZZ-zstd.root")["events"].keys()) == branches def test_compression_content1(self): - for name, array in uproot.open("tests/samples/Zmumu-uncompressed.root")["events"].arrays(["Type", "Event", "E1", "px1", "Q1", "M"]).items(): + for name, array in uproot3.open("tests/samples/Zmumu-uncompressed.root")["events"].arrays(["Type", "Event", "E1", "px1", "Q1", "M"]).items(): array = array.tolist() - assert uproot.open("tests/samples/Zmumu-zlib.root")["events"].array(name).tolist() == array - assert uproot.open("tests/samples/Zmumu-lzma.root")["events"].array(name).tolist() == array - assert uproot.open("tests/samples/Zmumu-lz4.root")["events"].array(name).tolist() == array - assert uproot.open("tests/samples/Zmumu-zstd.root")["events"].array(name).tolist() == array + assert uproot3.open("tests/samples/Zmumu-zlib.root")["events"].array(name).tolist() == array + assert uproot3.open("tests/samples/Zmumu-lzma.root")["events"].array(name).tolist() == array + assert uproot3.open("tests/samples/Zmumu-lz4.root")["events"].array(name).tolist() == array + assert uproot3.open("tests/samples/Zmumu-zstd.root")["events"].array(name).tolist() == array def test_compression_content2(self): - array = uproot.open("tests/samples/HZZ-uncompressed.root")["events"].array("Electron_Px").tolist() - assert uproot.open("tests/samples/HZZ-zlib.root")["events"].array("Electron_Px").tolist() == array - assert uproot.open("tests/samples/HZZ-lzma.root")["events"].array("Electron_Px").tolist() == array - assert uproot.open("tests/samples/HZZ-lz4.root")["events"].array("Electron_Px").tolist() == array - assert uproot.open("tests/samples/HZZ-zstd.root")["events"].array("Electron_Px").tolist() == array + array = uproot3.open("tests/samples/HZZ-uncompressed.root")["events"].array("Electron_Px").tolist() + assert uproot3.open("tests/samples/HZZ-zlib.root")["events"].array("Electron_Px").tolist() == array + assert uproot3.open("tests/samples/HZZ-lzma.root")["events"].array("Electron_Px").tolist() == array + assert uproot3.open("tests/samples/HZZ-lz4.root")["events"].array("Electron_Px").tolist() == array + assert uproot3.open("tests/samples/HZZ-zstd.root")["events"].array("Electron_Px").tolist() == array diff --git a/tests/test_http.py b/tests/test_http.py index f7255693..9cc31e9f 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -1,18 +1,18 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE import pytest import mock HTTPError = pytest.importorskip('requests.exceptions').HTTPError -import uproot +import uproot3 FILE = "foriter" LOCAL = "tests/samples/{FILE}.root".format(FILE=FILE) -URL = "http://scikit-hep.org/uproot/examples/{FILE}.root".format(FILE=FILE) -URL_AUTH = "http://scikit-hep.org/uproot/authentication/{FILE}.root".format(FILE=FILE) -AUTH = ("scikit-hep", "uproot") +URL = "http://scikit-hep.org/uproot3/examples/{FILE}.root".format(FILE=FILE) +URL_AUTH = "http://scikit-hep.org/uproot3/authentication/{FILE}.root".format(FILE=FILE) +AUTH = ("scikit-hep", "uproot3") def mock_get_local_instead_of_http(url="", headers={}, auth=None, **kwargs): class MockResponse: @@ -41,21 +41,21 @@ def raise_for_status(self): @mock.patch("requests.get", mock_get_local_instead_of_http) class Test(object): def test_no_auth_needed_no_auth(self): - f = uproot.open(URL) - assert type(f) == uproot.rootio.ROOTDirectory + f = uproot3.open(URL) + assert type(f) == uproot3.rootio.ROOTDirectory def test_no_auth_needed_with_auth(self): - f = uproot.open(URL, httpsource={"auth": AUTH}) - assert type(f) == uproot.rootio.ROOTDirectory + f = uproot3.open(URL, httpsource={"auth": AUTH}) + assert type(f) == uproot3.rootio.ROOTDirectory def test_auth_needed_no_auth(self): with pytest.raises(HTTPError): - f = uproot.open(URL_AUTH) + f = uproot3.open(URL_AUTH) def test_auth_needed_correct_auth(self): - f = uproot.open(URL_AUTH, httpsource={"auth": AUTH}) - assert type(f) == uproot.rootio.ROOTDirectory + f = uproot3.open(URL_AUTH, httpsource={"auth": AUTH}) + assert type(f) == uproot3.rootio.ROOTDirectory def test_auth_needed_wrong_auth(self): with pytest.raises(HTTPError): - f = uproot.open(URL_AUTH, httpsource={"auth": ("", "")}) + f = uproot3.open(URL_AUTH, httpsource={"auth": ("", "")}) diff --git a/tests/test_issues.py b/tests/test_issues.py index a49c3c0e..48606ed0 100644 --- a/tests/test_issues.py +++ b/tests/test_issues.py @@ -1,14 +1,14 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE import sys import pytest import numpy -import uproot -import awkward +import uproot3 +import awkward0 import uproot_methods.classes.TVector3 import uproot_methods.classes.TLorentzVector @@ -16,7 +16,7 @@ class Test(object): def test_issue21(self): - t = uproot.open("tests/samples/issue21.root")["nllscan"] + t = uproot3.open("tests/samples/issue21.root")["nllscan"] ### Explicit recover removed # assert t.array("mH").tolist() == [] @@ -50,25 +50,25 @@ def test_issue21(self): ] def test_issue30(self): - uproot.open("tests/samples/issue30.root") + uproot3.open("tests/samples/issue30.root") def test_issue31(self): - t = uproot.open("tests/samples/issue31.root")["T"] + t = uproot3.open("tests/samples/issue31.root")["T"] assert t.array("name").tolist() == [ b"one", b"two", b"three", b"four", b"five" ] def test_issue33(self): - h = uproot.open("tests/samples/issue33.root")["cutflow"] + h = uproot3.open("tests/samples/issue33.root")["cutflow"] assert h.xlabels == [ "Dijet", "MET", "MuonVeto", "IsoMuonTrackVeto", "ElectronVeto", "IsoElectronTrackVeto", "IsoPionTrackVeto" ] def test_issue38(self): - before_hadd = uproot.open( + before_hadd = uproot3.open( "tests/samples/issue38a.root")["ntupler/tree"] - after_hadd = uproot.open("tests/samples/issue38b.root")["ntupler/tree"] + after_hadd = uproot3.open("tests/samples/issue38b.root")["ntupler/tree"] before = before_hadd.arrays() after = after_hadd.arrays() @@ -80,21 +80,21 @@ def test_issue38(self): assert before[key].tolist() * 3 == after[key].tolist() def test_issue46(self): - t = uproot.open("tests/samples/issue46.root")["tree"] - t["evt"].array(uproot.asdebug) + t = uproot3.open("tests/samples/issue46.root")["tree"] + t["evt"].array(uproot3.asdebug) def test_issue49(self): - t = uproot.open("tests/samples/issue49.root")["nllscan"] + t = uproot3.open("tests/samples/issue49.root")["nllscan"] t.arrays() def test_issue54(self): - h = uproot.open("tests/samples/hepdata-example.root")["hpx"] + h = uproot3.open("tests/samples/hepdata-example.root")["hpx"] assert h._fFunctions[0]._fParent is h def test_issue55(self): - withoffsets = uproot.open( + withoffsets = uproot3.open( "tests/samples/small-dy-withoffsets.root")["tree"] - nooffsets = uproot.open( + nooffsets = uproot3.open( "tests/samples/small-dy-nooffsets.root")["tree"] assert numpy.array_equal(withoffsets.array("nJet"), nooffsets.array("nJet")) @@ -119,7 +119,7 @@ def equal(left, right): assert equal(withoffsets.array("event"), nooffsets.array("event")) def test_issue57(self): - tree = uproot.open("tests/samples/issue57.root")["outtree"] + tree = uproot3.open("tests/samples/issue57.root")["outtree"] for x in tree["sel_lep"].array(): for y in x: assert isinstance( @@ -134,7 +134,7 @@ def test_issue57(self): y._fP, uproot_methods.classes.TVector3.Methods) def test_issue60(self): - t = uproot.open("tests/samples/issue60.root")["nllscan"] + t = uproot3.open("tests/samples/issue60.root")["nllscan"] assert t["status"].numbaskets == 2 assert t["mH"].numbaskets == 3 @@ -158,18 +158,18 @@ def test_issue60(self): ] def test_issue63(self): - t = uproot.open("tests/samples/issue63.root")["WtLoop_meta"] + t = uproot3.open("tests/samples/issue63.root")["WtLoop_meta"] assert t["initialState"].array().tolist() == [b"Wt"] assert t["generator"].array().tolist() == [b"PowhegPythia6"] assert t["sampleType"].array().tolist() == [b"Nominal"] assert t["campaign"].array().tolist() == [b"MC16a"] def test_issue64(self): - t = uproot.open("tests/samples/issue64.root")["events/events"] + t = uproot3.open("tests/samples/issue64.root")["events/events"] assert t["e_pri"].array().tolist() == [0.00698000006377697] * 500 def test_issue66(self): - f = uproot.open("tests/samples/issue66.root") + f = uproot3.open("tests/samples/issue66.root") h, = f.values() assert h.values.tolist() == [ 4814.0, 45.0, 45.0, 25.0, 15.0, 4.0, 0.0, 6.0, 7.0, 5.0, 3.0, 3.0, @@ -194,11 +194,11 @@ def test_issue66(self): ] def test_issue70(self): - f = uproot.open("tests/samples/issue70.root") + f = uproot3.open("tests/samples/issue70.root") assert f.keys() == [] def test_issue74(self): - t = uproot.open("tests/samples/issue74.root")["Events"] + t = uproot3.open("tests/samples/issue74.root")["Events"] assert all( isinstance(x[0], uproot_methods.classes.TVector3.Methods) for x in t.array("bees.xyzPosition")) @@ -207,26 +207,26 @@ def test_issue74(self): 1.0, 2.0, -1.0) def test_issue76(self): - t = uproot.open("tests/samples/issue76.root")["Events"] + t = uproot3.open("tests/samples/issue76.root")["Events"] assert list(t.array("rootStrings")[0]) == [b"2", b"4"] x, y = t.array("rootStrings")[0] - assert isinstance(x, uproot.rootio.TString) + assert isinstance(x, uproot3.rootio.TString) def test_issue79(self): - t = uproot.open("tests/samples/issue79.root")["taus"] + t = uproot3.open("tests/samples/issue79.root")["taus"] assert t["pt"].numbaskets == 2 baskets = numpy.concatenate([t["pt"].basket(0), t["pt"].basket(1)]) assert baskets.shape == (t["pt"].numentries, ) assert numpy.array_equal(baskets, t["pt"].array()) def test_issue96(self): - t = uproot.open("tests/samples/issue96.root")["tree"] + t = uproot3.open("tests/samples/issue96.root")["tree"] assert all( isinstance(x, uproot_methods.classes.TLorentzVector.Methods) for x in t.array("jet1P4")) def test_geant4(self): - f = uproot.open("tests/samples/from-geant4.root") + f = uproot3.open("tests/samples/from-geant4.root") arrays = f["Details"].arrays() assert arrays[b"numgood"][0] == 224 assert [len(x) for x in f["HitStrips"].arrays().values() @@ -236,20 +236,20 @@ def test_geant4(self): ### file is too big to include # def test_issue168(self): - # t = uproot.open("tests/samples/issue168.root")["Events"] + # t = uproot3.open("tests/samples/issue168.root")["Events"] # a1 = t["MRawEvtData.fHiGainFadcSamples"].array(t["MRawEvtData.fHiGainFadcSamples"].interpretation.speedbump(False), entrystop=4) # assert a1[0]._fArray.shape == (108400,) # a2 = t["MRawEvtData.fHiGainPixId"].array(t["MRawEvtData.fHiGainPixId"].interpretation.speedbump(False)) # assert a2[0]._fArray.shape == (1084,) def test_issue187(self): - t = uproot.open("tests/samples/issue187.root")["fTreeV0"] + t = uproot3.open("tests/samples/issue187.root")["fTreeV0"] assert (t.array("fMultiplicity") == -1).all() assert t.array("V0s.fEtaPos")[-3].tolist() == [-0.390625, 0.046875] def test_issue213(self): pytest.importorskip("xxhash") - t = uproot.open("tests/samples/issue213.root")["T"] + t = uproot3.open("tests/samples/issue213.root")["T"] assert t["fMCHits.fPosition"].array().x.tolist() == [ [], [], [], [], [], [], [], [42.17024612426758, 50.63192367553711], [], [], [], [43.292755126953125], [], [], [], [], [], [], [], [], @@ -271,25 +271,25 @@ def test_issue213(self): def test_issue232(self): pytest.importorskip("pandas") - t = uproot.open("tests/samples/issue232.root")["fTreeV0"] + t = uproot3.open("tests/samples/issue232.root")["fTreeV0"] t.pandas.df( ["V0Hyper.fNsigmaHe3Pos", "V0Hyper.fDcaPos2PrimaryVertex"], flatten=True) def test_issue240(self): pytest.importorskip("pyxrootd") - t = uproot.open( + t = uproot3.open( "root://eospublic.cern.ch//eos/root-eos/cms_opendata_2012_nanoaod/Run2012B_DoubleMuParked.root" )["Events"] assert (abs(t.array("nMuon", entrystop=100000)) < 50).all() def test_issue243(self): - t = uproot.open("tests/samples/issue243.root")["triggerList"] + t = uproot3.open("tests/samples/issue243.root")["triggerList"] for x in t.array("triggerMap", entrystop=100): assert all(y == 1.0 for y in x.values()) def test_issue243_new(self): - t = uproot.open("tests/samples/issue243-new.root")["triggerList"] + t = uproot3.open("tests/samples/issue243-new.root")["triggerList"] first = t["triggerMap.first"].array() second = t["triggerMap.second"].array() for i in range(t.numentries): @@ -297,10 +297,10 @@ def test_issue243_new(self): assert all(y == 1.0 for y in x.values()) def test_issue327(self): - uproot.open("tests/samples/issue327.root")["DstTree"] + uproot3.open("tests/samples/issue327.root")["DstTree"] def test_issue371(self): - t = uproot.open("tests/samples/issue371.root")["Event"] + t = uproot3.open("tests/samples/issue371.root")["Event"] obj = t["DRIFT_0."].array()[0] assert obj._samplerName == b'DRIFT_0' assert obj._n == 1 @@ -308,7 +308,7 @@ def test_issue371(self): dtype=numpy.float32)[0] def test_issue376_simple(self): - f = uproot.open("tests/samples/from-geant4.root") + f = uproot3.open("tests/samples/from-geant4.root") assert type(f).classname == 'TDirectory' assert f.classname == 'TDirectory' real_class_names = ['TTree'] * 4 + ['TH1D'] * 10 + ['TH2D'] * 5 @@ -321,7 +321,7 @@ def test_issue376_simple(self): assert [value.classname for value in f.values()] == real_class_names def test_issue376_nested(self): - f = uproot.open("tests/samples/nesteddirs.root") + f = uproot3.open("tests/samples/nesteddirs.root") top_level_class_names = ['TDirectory', 'TDirectory'] recursive_class_names = [ 'TDirectory', 'TDirectory', 'TTree', 'TTree', 'TDirectory', 'TTree' @@ -340,8 +340,8 @@ def test_issue376_nested(self): ] == recursive_class_names def test_issue367(self): - t = uproot.open("tests/samples/issue367.root")["tree"] - assert awkward.fromiter( + t = uproot3.open("tests/samples/issue367.root")["tree"] + assert awkward0.fromiter( t.array("weights.second"))[0].counts.tolist() == [ 1000, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 1000, 1000, @@ -351,12 +351,12 @@ def test_issue367(self): def test_issue390(self): pytest.importorskip("pandas") - t = uproot.open("tests/samples/issue390.root")["E"] + t = uproot3.open("tests/samples/issue390.root")["E"] t.pandas.df("hits.*") t.pandas.df("trks.*") def test_issue399(self): - t = uproot.open("tests/samples/issue399.root")["Event"] + t = uproot3.open("tests/samples/issue399.root")["Event"] a = t["Histos.histograms1D"].array() for i in range(t.numentries): assert [x.title for x in a[i]] == [ @@ -366,13 +366,13 @@ def test_issue399(self): ] def test_issue404(self): - t = uproot.open("tests/samples/issue404.root")["Beam"] + t = uproot3.open("tests/samples/issue404.root")["Beam"] assert t["Beam.GMAD::BeamBase.beamParticleName"].array().tolist() == [ b"proton" ] def test_issue124_and_followup_issue419_with_pr420(self): - f = uproot.open("tests/samples/issue124.root") + f = uproot3.open("tests/samples/issue124.root") branch = f[b'KM3NET_TIMESLICE;1'][b'KM3NET_TIMESLICE'] assert branch.interpretation is None assert 0 == branch.compressedbytes() @@ -385,34 +385,34 @@ def test_issue429(self): else: fix = lambda name: name - file = uproot.open("tests/samples/issue429.root") + file = uproot3.open("tests/samples/issue429.root") tree = file["data_tr"] branch = tree["data_ana_kk"] - # FIXME: how can uproot.interp.auto.interpret *infer* the 4 bytes of padding? + # FIXME: how can uproot3.interp.auto.interpret *infer* the 4 bytes of padding? dtype = [(fix(x._fName), "float32" if type(x).__name__ == "TLeafF" else "int32") for x in branch._fLeaves] - array = branch.array(uproot.asdtype(dtype + [("padding", "S4")])) + array = branch.array(uproot3.asdtype(dtype + [("padding", "S4")])) assert (array["padding"] == b"\xff\xff\xff\xff").all() def test_issue431(self): - file = uproot.open("tests/samples/issue431.root") + file = uproot3.open("tests/samples/issue431.root") head = file["Head"] assert head._map_3c_string_2c_string_3e_ == {b'DAQ': b'394', b'PDF': b'4 58', b'XSecFile': b'', b'can': b'0 1027 888.4', b'can_user': b'0.00 1027.00 888.40', b'coord_origin': b'0 0 0', b'cut_in': b'0 0 0 0', b'cut_nu': b'100 1e+08 -1 1', b'cut_primary': b'0 0 0 0', b'cut_seamuon': b'0 0 0 0', b'decay': b'doesnt happen', b'detector': b'NOT', b'drawing': b'Volume', b'end_event': b'', b'genhencut': b'2000 0', b'genvol': b'0 1027 888.4 2.649e+09 100000', b'kcut': b'2', b'livetime': b'0 0', b'model': b'1 2 0 1 12', b'muon_desc_file': b'', b'ngen': b'0.1000E+06', b'norma': b'0 0', b'nuflux': b'0 3 0 0.500E+00 0.000E+00 0.100E+01 0.300E+01', b'physics': b'GENHEN 7.2-220514 181116 1138', b'seed': b'GENHEN 3 305765867 0 0', b'simul': b'JSirene 11012 11/17/18 07', b'sourcemode': b'diffuse', b'spectrum': b'-1.4', b'start_run': b'1', b'target': b'isoscalar', b'usedetfile': b'false', b'xlat_user': b'0.63297', b'xparam': b'OFF', b'zed_user': b'0.00 3450.00'} def test_issue434(self): - f = uproot.open("tests/samples/issue434.root") + f = uproot3.open("tests/samples/issue434.root") fromdtype = [("pmt", "u1"), ("tdc", "u4"), ("tot", "u1")] tree = f[b'KM3NET_TIMESLICE_L1'][b'KM3NETDAQ::JDAQTimeslice'] superframes = tree[b'vector'] hits_buffer = superframes[b'vector.buffer'] hits = hits_buffer.lazyarray( - uproot.asjagged( - uproot.astable( - uproot.asdtype(fromdtype, todtype)), skipbytes=6)) + uproot3.asjagged( + uproot3.astable( + uproot3.asdtype(fromdtype, todtype)), skipbytes=6)) assert 486480 == hits['tdc'][0][0] def test_issue438_accessing_memory_mapped_objects_outside_of_context_raises(self): - with uproot.open("tests/samples/issue434.root") as f: + with uproot3.open("tests/samples/issue434.root") as f: a = f['KM3NET_EVENT']['KM3NET_EVENT']['KM3NETDAQ::JDAQPreamble'].array() b = f['KM3NET_EVENT']['KM3NET_EVENT']['KM3NETDAQ::JDAQPreamble'].lazyarray() assert 4 == len(a[0]) @@ -421,7 +421,7 @@ def test_issue438_accessing_memory_mapped_objects_outside_of_context_raises(self def test_issue448(self): pytest.importorskip("pyxrootd") - f = uproot.open('root://eospublic.cern.ch//eos/opendata/cms/Run2010B/MuOnia/AOD/Apr21ReReco-v1/0000/02186E3C-D277-E011-8A05-00215E21D516.root') + f = uproot3.open('root://eospublic.cern.ch//eos/opendata/cms/Run2010B/MuOnia/AOD/Apr21ReReco-v1/0000/02186E3C-D277-E011-8A05-00215E21D516.root') tree = f['Events'] assert len(tree.arrays(entrystop=0)) == 4179 assert len(tree.arrays('recoMuons_muons__RECO.*', entrystop=10)) == 93 @@ -431,7 +431,7 @@ def test_issue448(self): ('l1CaloTowerTree/L1CaloTowerTree', b'L1CaloTowerTree/L1CaloTower/et'), ]) def test_issue447_tree_arrays_omitting_variables(self, treename, branchtest): - with uproot.open("tests/samples/issue447.root") as f: + with uproot3.open("tests/samples/issue447.root") as f: t1 = f[treename] arrays = t1.arrays(recursive=b'/') array_keys = arrays.keys() @@ -448,14 +448,14 @@ def test_issue447_recursive_provenance(self): 'tree/b1/b2/b3/b4', ] expectedKeys = sorted([k.encode(encoding='UTF-8') for k in expectedKeys]) - with uproot.open('tests/samples/issue447_recursive.root') as f: + with uproot3.open('tests/samples/issue447_recursive.root') as f: t1 = f['tree'] arrays = t1.arrays(recursive=b'/') assert sorted(list(arrays.keys())) == expectedKeys def test_issue444_subbranche_lookup_with_slash(self): # Uses same test file as issue #447 - with uproot.open("tests/samples/issue447.root") as f: + with uproot3.open("tests/samples/issue447.root") as f: # Access subbranches directly from file assert numpy.all(f['l1CaloTowerEmuTree/L1CaloTowerTree/CaloTP']['nECALTP'].array() == f['l1CaloTowerEmuTree/L1CaloTowerTree/CaloTP/nECALTP'].array()) @@ -471,5 +471,5 @@ def test_issue444_subbranche_lookup_with_slash(self): assert b'nECALTP' in tree.keys(recursive=True) assert b'nECALTP' not in tree.keys(recursive=False) # Specify subbranches in iterate - for arrays in uproot.iterate(["tests/samples/issue447.root"], 'l1CaloTowerEmuTree/L1CaloTowerTree', ['CaloTP/nECALTP']): + for arrays in uproot3.iterate(["tests/samples/issue447.root"], 'l1CaloTowerEmuTree/L1CaloTowerTree', ['CaloTP/nECALTP']): pass diff --git a/tests/test_jagged.py b/tests/test_jagged.py index 9019fea0..a442ca1b 100644 --- a/tests/test_jagged.py +++ b/tests/test_jagged.py @@ -1,10 +1,10 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE import pytest -import uproot +import uproot3 class Test(object): @@ -14,7 +14,7 @@ def sample(self): try: self._sample except AttributeError: - self._sample = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"] + self._sample = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"] return self._sample def test_flatten_False(self): diff --git a/tests/test_rntuple.py b/tests/test_rntuple.py index cb798a75..7b4ea8be 100644 --- a/tests/test_rntuple.py +++ b/tests/test_rntuple.py @@ -1,18 +1,18 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE import os import numpy import pytest -import awkward -import uproot +import awkward0 +import uproot3 class Test(object): def test_read_anchor(self): - f = uproot.open("tests/samples/ntpl001_staff.root") + f = uproot3.open("tests/samples/ntpl001_staff.root") rntuple = f["Staff"] assert rntuple._fVersion == 0 assert rntuple._fSize == 48 diff --git a/tests/test_stlvector.py b/tests/test_stlvector.py index 99efb00a..763f2212 100644 --- a/tests/test_stlvector.py +++ b/tests/test_stlvector.py @@ -1,54 +1,54 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE import os import pytest -import uproot +import uproot3 class Test(object): def runTest(self): pass def test_vector_of_numbers(self): - branch = uproot.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"]["StlVecU32"] + branch = uproot3.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"]["StlVecU32"] a = branch.array() for i in range(100): assert a[i].tolist() == [i] * (i % 10) - branch = uproot.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"]["StlVecF64"] + branch = uproot3.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"]["StlVecF64"] a = branch.array() for i in range(100): assert a[i].tolist() == [i] * (i % 10) def test_vector_of_vector_of_numbers(self): - branch = uproot.open("tests/samples/vectorVectorDouble.root")["t"]["x"] + branch = uproot3.open("tests/samples/vectorVectorDouble.root")["t"]["x"] assert branch.array().tolist() == [[], [[], []], [[10.0], [], [10.0, 20.0]], [[20.0, -21.0, -22.0]], [[200.0], [-201.0], [202.0]]] def test_strings1(self): - tree = uproot.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"] + tree = uproot3.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"] assert tree.array("Str").tolist() == [b'evt-000', b'evt-001', b'evt-002', b'evt-003', b'evt-004', b'evt-005', b'evt-006', b'evt-007', b'evt-008', b'evt-009', b'evt-010', b'evt-011', b'evt-012', b'evt-013', b'evt-014', b'evt-015', b'evt-016', b'evt-017', b'evt-018', b'evt-019', b'evt-020', b'evt-021', b'evt-022', b'evt-023', b'evt-024', b'evt-025', b'evt-026', b'evt-027', b'evt-028', b'evt-029', b'evt-030', b'evt-031', b'evt-032', b'evt-033', b'evt-034', b'evt-035', b'evt-036', b'evt-037', b'evt-038', b'evt-039', b'evt-040', b'evt-041', b'evt-042', b'evt-043', b'evt-044', b'evt-045', b'evt-046', b'evt-047', b'evt-048', b'evt-049', b'evt-050', b'evt-051', b'evt-052', b'evt-053', b'evt-054', b'evt-055', b'evt-056', b'evt-057', b'evt-058', b'evt-059', b'evt-060', b'evt-061', b'evt-062', b'evt-063', b'evt-064', b'evt-065', b'evt-066', b'evt-067', b'evt-068', b'evt-069', b'evt-070', b'evt-071', b'evt-072', b'evt-073', b'evt-074', b'evt-075', b'evt-076', b'evt-077', b'evt-078', b'evt-079', b'evt-080', b'evt-081', b'evt-082', b'evt-083', b'evt-084', b'evt-085', b'evt-086', b'evt-087', b'evt-088', b'evt-089', b'evt-090', b'evt-091', b'evt-092', b'evt-093', b'evt-094', b'evt-095', b'evt-096', b'evt-097', b'evt-098', b'evt-099'] def test_strings2(self): - tree = uproot.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"] + tree = uproot3.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"] assert tree.array("StdStr").tolist() == [b'std-000', b'std-001', b'std-002', b'std-003', b'std-004', b'std-005', b'std-006', b'std-007', b'std-008', b'std-009', b'std-010', b'std-011', b'std-012', b'std-013', b'std-014', b'std-015', b'std-016', b'std-017', b'std-018', b'std-019', b'std-020', b'std-021', b'std-022', b'std-023', b'std-024', b'std-025', b'std-026', b'std-027', b'std-028', b'std-029', b'std-030', b'std-031', b'std-032', b'std-033', b'std-034', b'std-035', b'std-036', b'std-037', b'std-038', b'std-039', b'std-040', b'std-041', b'std-042', b'std-043', b'std-044', b'std-045', b'std-046', b'std-047', b'std-048', b'std-049', b'std-050', b'std-051', b'std-052', b'std-053', b'std-054', b'std-055', b'std-056', b'std-057', b'std-058', b'std-059', b'std-060', b'std-061', b'std-062', b'std-063', b'std-064', b'std-065', b'std-066', b'std-067', b'std-068', b'std-069', b'std-070', b'std-071', b'std-072', b'std-073', b'std-074', b'std-075', b'std-076', b'std-077', b'std-078', b'std-079', b'std-080', b'std-081', b'std-082', b'std-083', b'std-084', b'std-085', b'std-086', b'std-087', b'std-088', b'std-089', b'std-090', b'std-091', b'std-092', b'std-093', b'std-094', b'std-095', b'std-096', b'std-097', b'std-098', b'std-099'] def test_strings3(self): - tree = uproot.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"] + tree = uproot3.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"] assert tree.array("StlVecStr").tolist() == [[], [b'vec-001'], [b'vec-002', b'vec-002'], [b'vec-003', b'vec-003', b'vec-003'], [b'vec-004', b'vec-004', b'vec-004', b'vec-004'], [b'vec-005', b'vec-005', b'vec-005', b'vec-005', b'vec-005'], [b'vec-006', b'vec-006', b'vec-006', b'vec-006', b'vec-006', b'vec-006'], [b'vec-007', b'vec-007', b'vec-007', b'vec-007', b'vec-007', b'vec-007', b'vec-007'], [b'vec-008', b'vec-008', b'vec-008', b'vec-008', b'vec-008', b'vec-008', b'vec-008', b'vec-008'], [b'vec-009', b'vec-009', b'vec-009', b'vec-009', b'vec-009', b'vec-009', b'vec-009', b'vec-009', b'vec-009'], [], [b'vec-011'], [b'vec-012', b'vec-012'], [b'vec-013', b'vec-013', b'vec-013'], [b'vec-014', b'vec-014', b'vec-014', b'vec-014'], [b'vec-015', b'vec-015', b'vec-015', b'vec-015', b'vec-015'], [b'vec-016', b'vec-016', b'vec-016', b'vec-016', b'vec-016', b'vec-016'], [b'vec-017', b'vec-017', b'vec-017', b'vec-017', b'vec-017', b'vec-017', b'vec-017'], [b'vec-018', b'vec-018', b'vec-018', b'vec-018', b'vec-018', b'vec-018', b'vec-018', b'vec-018'], [b'vec-019', b'vec-019', b'vec-019', b'vec-019', b'vec-019', b'vec-019', b'vec-019', b'vec-019', b'vec-019'], [], [b'vec-021'], [b'vec-022', b'vec-022'], [b'vec-023', b'vec-023', b'vec-023'], [b'vec-024', b'vec-024', b'vec-024', b'vec-024'], [b'vec-025', b'vec-025', b'vec-025', b'vec-025', b'vec-025'], [b'vec-026', b'vec-026', b'vec-026', b'vec-026', b'vec-026', b'vec-026'], [b'vec-027', b'vec-027', b'vec-027', b'vec-027', b'vec-027', b'vec-027', b'vec-027'], [b'vec-028', b'vec-028', b'vec-028', b'vec-028', b'vec-028', b'vec-028', b'vec-028', b'vec-028'], [b'vec-029', b'vec-029', b'vec-029', b'vec-029', b'vec-029', b'vec-029', b'vec-029', b'vec-029', b'vec-029'], [], [b'vec-031'], [b'vec-032', b'vec-032'], [b'vec-033', b'vec-033', b'vec-033'], [b'vec-034', b'vec-034', b'vec-034', b'vec-034'], [b'vec-035', b'vec-035', b'vec-035', b'vec-035', b'vec-035'], [b'vec-036', b'vec-036', b'vec-036', b'vec-036', b'vec-036', b'vec-036'], [b'vec-037', b'vec-037', b'vec-037', b'vec-037', b'vec-037', b'vec-037', b'vec-037'], [b'vec-038', b'vec-038', b'vec-038', b'vec-038', b'vec-038', b'vec-038', b'vec-038', b'vec-038'], [b'vec-039', b'vec-039', b'vec-039', b'vec-039', b'vec-039', b'vec-039', b'vec-039', b'vec-039', b'vec-039'], [], [b'vec-041'], [b'vec-042', b'vec-042'], [b'vec-043', b'vec-043', b'vec-043'], [b'vec-044', b'vec-044', b'vec-044', b'vec-044'], [b'vec-045', b'vec-045', b'vec-045', b'vec-045', b'vec-045'], [b'vec-046', b'vec-046', b'vec-046', b'vec-046', b'vec-046', b'vec-046'], [b'vec-047', b'vec-047', b'vec-047', b'vec-047', b'vec-047', b'vec-047', b'vec-047'], [b'vec-048', b'vec-048', b'vec-048', b'vec-048', b'vec-048', b'vec-048', b'vec-048', b'vec-048'], [b'vec-049', b'vec-049', b'vec-049', b'vec-049', b'vec-049', b'vec-049', b'vec-049', b'vec-049', b'vec-049'], [], [b'vec-051'], [b'vec-052', b'vec-052'], [b'vec-053', b'vec-053', b'vec-053'], [b'vec-054', b'vec-054', b'vec-054', b'vec-054'], [b'vec-055', b'vec-055', b'vec-055', b'vec-055', b'vec-055'], [b'vec-056', b'vec-056', b'vec-056', b'vec-056', b'vec-056', b'vec-056'], [b'vec-057', b'vec-057', b'vec-057', b'vec-057', b'vec-057', b'vec-057', b'vec-057'], [b'vec-058', b'vec-058', b'vec-058', b'vec-058', b'vec-058', b'vec-058', b'vec-058', b'vec-058'], [b'vec-059', b'vec-059', b'vec-059', b'vec-059', b'vec-059', b'vec-059', b'vec-059', b'vec-059', b'vec-059'], [], [b'vec-061'], [b'vec-062', b'vec-062'], [b'vec-063', b'vec-063', b'vec-063'], [b'vec-064', b'vec-064', b'vec-064', b'vec-064'], [b'vec-065', b'vec-065', b'vec-065', b'vec-065', b'vec-065'], [b'vec-066', b'vec-066', b'vec-066', b'vec-066', b'vec-066', b'vec-066'], [b'vec-067', b'vec-067', b'vec-067', b'vec-067', b'vec-067', b'vec-067', b'vec-067'], [b'vec-068', b'vec-068', b'vec-068', b'vec-068', b'vec-068', b'vec-068', b'vec-068', b'vec-068'], [b'vec-069', b'vec-069', b'vec-069', b'vec-069', b'vec-069', b'vec-069', b'vec-069', b'vec-069', b'vec-069'], [], [b'vec-071'], [b'vec-072', b'vec-072'], [b'vec-073', b'vec-073', b'vec-073'], [b'vec-074', b'vec-074', b'vec-074', b'vec-074'], [b'vec-075', b'vec-075', b'vec-075', b'vec-075', b'vec-075'], [b'vec-076', b'vec-076', b'vec-076', b'vec-076', b'vec-076', b'vec-076'], [b'vec-077', b'vec-077', b'vec-077', b'vec-077', b'vec-077', b'vec-077', b'vec-077'], [b'vec-078', b'vec-078', b'vec-078', b'vec-078', b'vec-078', b'vec-078', b'vec-078', b'vec-078'], [b'vec-079', b'vec-079', b'vec-079', b'vec-079', b'vec-079', b'vec-079', b'vec-079', b'vec-079', b'vec-079'], [], [b'vec-081'], [b'vec-082', b'vec-082'], [b'vec-083', b'vec-083', b'vec-083'], [b'vec-084', b'vec-084', b'vec-084', b'vec-084'], [b'vec-085', b'vec-085', b'vec-085', b'vec-085', b'vec-085'], [b'vec-086', b'vec-086', b'vec-086', b'vec-086', b'vec-086', b'vec-086'], [b'vec-087', b'vec-087', b'vec-087', b'vec-087', b'vec-087', b'vec-087', b'vec-087'], [b'vec-088', b'vec-088', b'vec-088', b'vec-088', b'vec-088', b'vec-088', b'vec-088', b'vec-088'], [b'vec-089', b'vec-089', b'vec-089', b'vec-089', b'vec-089', b'vec-089', b'vec-089', b'vec-089', b'vec-089'], [], [b'vec-091'], [b'vec-092', b'vec-092'], [b'vec-093', b'vec-093', b'vec-093'], [b'vec-094', b'vec-094', b'vec-094', b'vec-094'], [b'vec-095', b'vec-095', b'vec-095', b'vec-095', b'vec-095'], [b'vec-096', b'vec-096', b'vec-096', b'vec-096', b'vec-096', b'vec-096'], [b'vec-097', b'vec-097', b'vec-097', b'vec-097', b'vec-097', b'vec-097', b'vec-097'], [b'vec-098', b'vec-098', b'vec-098', b'vec-098', b'vec-098', b'vec-098', b'vec-098', b'vec-098'], [b'vec-099', b'vec-099', b'vec-099', b'vec-099', b'vec-099', b'vec-099', b'vec-099', b'vec-099', b'vec-099']] @pytest.mark.skipif(os.name == "nt", reason="Windows Python 3 'long' is not struct's 'i', 'l', or 'q'.") def test_unsplit(self): - branch = uproot.open("tests/samples/small-evnt-tree-nosplit.root")["tree"]["evt"] + branch = uproot3.open("tests/samples/small-evnt-tree-nosplit.root")["tree"]["evt"] a = branch.array() assert [x._StlVecStr for x in a] == [[], [b'vec-001'], [b'vec-002', b'vec-002'], [b'vec-003', b'vec-003', b'vec-003'], [b'vec-004', b'vec-004', b'vec-004', b'vec-004'], [b'vec-005', b'vec-005', b'vec-005', b'vec-005', b'vec-005'], [b'vec-006', b'vec-006', b'vec-006', b'vec-006', b'vec-006', b'vec-006'], [b'vec-007', b'vec-007', b'vec-007', b'vec-007', b'vec-007', b'vec-007', b'vec-007'], [b'vec-008', b'vec-008', b'vec-008', b'vec-008', b'vec-008', b'vec-008', b'vec-008', b'vec-008'], [b'vec-009', b'vec-009', b'vec-009', b'vec-009', b'vec-009', b'vec-009', b'vec-009', b'vec-009', b'vec-009'], [], [b'vec-011'], [b'vec-012', b'vec-012'], [b'vec-013', b'vec-013', b'vec-013'], [b'vec-014', b'vec-014', b'vec-014', b'vec-014'], [b'vec-015', b'vec-015', b'vec-015', b'vec-015', b'vec-015'], [b'vec-016', b'vec-016', b'vec-016', b'vec-016', b'vec-016', b'vec-016'], [b'vec-017', b'vec-017', b'vec-017', b'vec-017', b'vec-017', b'vec-017', b'vec-017'], [b'vec-018', b'vec-018', b'vec-018', b'vec-018', b'vec-018', b'vec-018', b'vec-018', b'vec-018'], [b'vec-019', b'vec-019', b'vec-019', b'vec-019', b'vec-019', b'vec-019', b'vec-019', b'vec-019', b'vec-019'], [], [b'vec-021'], [b'vec-022', b'vec-022'], [b'vec-023', b'vec-023', b'vec-023'], [b'vec-024', b'vec-024', b'vec-024', b'vec-024'], [b'vec-025', b'vec-025', b'vec-025', b'vec-025', b'vec-025'], [b'vec-026', b'vec-026', b'vec-026', b'vec-026', b'vec-026', b'vec-026'], [b'vec-027', b'vec-027', b'vec-027', b'vec-027', b'vec-027', b'vec-027', b'vec-027'], [b'vec-028', b'vec-028', b'vec-028', b'vec-028', b'vec-028', b'vec-028', b'vec-028', b'vec-028'], [b'vec-029', b'vec-029', b'vec-029', b'vec-029', b'vec-029', b'vec-029', b'vec-029', b'vec-029', b'vec-029'], [], [b'vec-031'], [b'vec-032', b'vec-032'], [b'vec-033', b'vec-033', b'vec-033'], [b'vec-034', b'vec-034', b'vec-034', b'vec-034'], [b'vec-035', b'vec-035', b'vec-035', b'vec-035', b'vec-035'], [b'vec-036', b'vec-036', b'vec-036', b'vec-036', b'vec-036', b'vec-036'], [b'vec-037', b'vec-037', b'vec-037', b'vec-037', b'vec-037', b'vec-037', b'vec-037'], [b'vec-038', b'vec-038', b'vec-038', b'vec-038', b'vec-038', b'vec-038', b'vec-038', b'vec-038'], [b'vec-039', b'vec-039', b'vec-039', b'vec-039', b'vec-039', b'vec-039', b'vec-039', b'vec-039', b'vec-039'], [], [b'vec-041'], [b'vec-042', b'vec-042'], [b'vec-043', b'vec-043', b'vec-043'], [b'vec-044', b'vec-044', b'vec-044', b'vec-044'], [b'vec-045', b'vec-045', b'vec-045', b'vec-045', b'vec-045'], [b'vec-046', b'vec-046', b'vec-046', b'vec-046', b'vec-046', b'vec-046'], [b'vec-047', b'vec-047', b'vec-047', b'vec-047', b'vec-047', b'vec-047', b'vec-047'], [b'vec-048', b'vec-048', b'vec-048', b'vec-048', b'vec-048', b'vec-048', b'vec-048', b'vec-048'], [b'vec-049', b'vec-049', b'vec-049', b'vec-049', b'vec-049', b'vec-049', b'vec-049', b'vec-049', b'vec-049'], [], [b'vec-051'], [b'vec-052', b'vec-052'], [b'vec-053', b'vec-053', b'vec-053'], [b'vec-054', b'vec-054', b'vec-054', b'vec-054'], [b'vec-055', b'vec-055', b'vec-055', b'vec-055', b'vec-055'], [b'vec-056', b'vec-056', b'vec-056', b'vec-056', b'vec-056', b'vec-056'], [b'vec-057', b'vec-057', b'vec-057', b'vec-057', b'vec-057', b'vec-057', b'vec-057'], [b'vec-058', b'vec-058', b'vec-058', b'vec-058', b'vec-058', b'vec-058', b'vec-058', b'vec-058'], [b'vec-059', b'vec-059', b'vec-059', b'vec-059', b'vec-059', b'vec-059', b'vec-059', b'vec-059', b'vec-059'], [], [b'vec-061'], [b'vec-062', b'vec-062'], [b'vec-063', b'vec-063', b'vec-063'], [b'vec-064', b'vec-064', b'vec-064', b'vec-064'], [b'vec-065', b'vec-065', b'vec-065', b'vec-065', b'vec-065'], [b'vec-066', b'vec-066', b'vec-066', b'vec-066', b'vec-066', b'vec-066'], [b'vec-067', b'vec-067', b'vec-067', b'vec-067', b'vec-067', b'vec-067', b'vec-067'], [b'vec-068', b'vec-068', b'vec-068', b'vec-068', b'vec-068', b'vec-068', b'vec-068', b'vec-068'], [b'vec-069', b'vec-069', b'vec-069', b'vec-069', b'vec-069', b'vec-069', b'vec-069', b'vec-069', b'vec-069'], [], [b'vec-071'], [b'vec-072', b'vec-072'], [b'vec-073', b'vec-073', b'vec-073'], [b'vec-074', b'vec-074', b'vec-074', b'vec-074'], [b'vec-075', b'vec-075', b'vec-075', b'vec-075', b'vec-075'], [b'vec-076', b'vec-076', b'vec-076', b'vec-076', b'vec-076', b'vec-076'], [b'vec-077', b'vec-077', b'vec-077', b'vec-077', b'vec-077', b'vec-077', b'vec-077'], [b'vec-078', b'vec-078', b'vec-078', b'vec-078', b'vec-078', b'vec-078', b'vec-078', b'vec-078'], [b'vec-079', b'vec-079', b'vec-079', b'vec-079', b'vec-079', b'vec-079', b'vec-079', b'vec-079', b'vec-079'], [], [b'vec-081'], [b'vec-082', b'vec-082'], [b'vec-083', b'vec-083', b'vec-083'], [b'vec-084', b'vec-084', b'vec-084', b'vec-084'], [b'vec-085', b'vec-085', b'vec-085', b'vec-085', b'vec-085'], [b'vec-086', b'vec-086', b'vec-086', b'vec-086', b'vec-086', b'vec-086'], [b'vec-087', b'vec-087', b'vec-087', b'vec-087', b'vec-087', b'vec-087', b'vec-087'], [b'vec-088', b'vec-088', b'vec-088', b'vec-088', b'vec-088', b'vec-088', b'vec-088', b'vec-088'], [b'vec-089', b'vec-089', b'vec-089', b'vec-089', b'vec-089', b'vec-089', b'vec-089', b'vec-089', b'vec-089'], [], [b'vec-091'], [b'vec-092', b'vec-092'], [b'vec-093', b'vec-093', b'vec-093'], [b'vec-094', b'vec-094', b'vec-094', b'vec-094'], [b'vec-095', b'vec-095', b'vec-095', b'vec-095', b'vec-095'], [b'vec-096', b'vec-096', b'vec-096', b'vec-096', b'vec-096', b'vec-096'], [b'vec-097', b'vec-097', b'vec-097', b'vec-097', b'vec-097', b'vec-097', b'vec-097'], [b'vec-098', b'vec-098', b'vec-098', b'vec-098', b'vec-098', b'vec-098', b'vec-098', b'vec-098'], [b'vec-099', b'vec-099', b'vec-099', b'vec-099', b'vec-099', b'vec-099', b'vec-099', b'vec-099', b'vec-099']] def test_array(self): - tree = uproot.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"] + tree = uproot3.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"] assert tree.array("ArrayI16[10]").tolist() == [[i] * 10 for i in range(100)] def test_slice(self): - tree = uproot.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"] + tree = uproot3.open("tests/samples/small-evnt-tree-fullsplit.root")["tree"] assert tree.array("SliceI16").tolist() == [[], [1], [2, 2], [3, 3, 3], [4, 4, 4, 4], [5, 5, 5, 5, 5], [6, 6, 6, 6, 6, 6], [7, 7, 7, 7, 7, 7, 7], [8, 8, 8, 8, 8, 8, 8, 8], [9, 9, 9, 9, 9, 9, 9, 9, 9], [], [11], [12, 12], [13, 13, 13], [14, 14, 14, 14], [15, 15, 15, 15, 15], [16, 16, 16, 16, 16, 16], [17, 17, 17, 17, 17, 17, 17], [18, 18, 18, 18, 18, 18, 18, 18], [19, 19, 19, 19, 19, 19, 19, 19, 19], [], [21], [22, 22], [23, 23, 23], [24, 24, 24, 24], [25, 25, 25, 25, 25], [26, 26, 26, 26, 26, 26], [27, 27, 27, 27, 27, 27, 27], [28, 28, 28, 28, 28, 28, 28, 28], [29, 29, 29, 29, 29, 29, 29, 29, 29], [], [31], [32, 32], [33, 33, 33], [34, 34, 34, 34], [35, 35, 35, 35, 35], [36, 36, 36, 36, 36, 36], [37, 37, 37, 37, 37, 37, 37], [38, 38, 38, 38, 38, 38, 38, 38], [39, 39, 39, 39, 39, 39, 39, 39, 39], [], [41], [42, 42], [43, 43, 43], [44, 44, 44, 44], [45, 45, 45, 45, 45], [46, 46, 46, 46, 46, 46], [47, 47, 47, 47, 47, 47, 47], [48, 48, 48, 48, 48, 48, 48, 48], [49, 49, 49, 49, 49, 49, 49, 49, 49], [], [51], [52, 52], [53, 53, 53], [54, 54, 54, 54], [55, 55, 55, 55, 55], [56, 56, 56, 56, 56, 56], [57, 57, 57, 57, 57, 57, 57], [58, 58, 58, 58, 58, 58, 58, 58], [59, 59, 59, 59, 59, 59, 59, 59, 59], [], [61], [62, 62], [63, 63, 63], [64, 64, 64, 64], [65, 65, 65, 65, 65], [66, 66, 66, 66, 66, 66], [67, 67, 67, 67, 67, 67, 67], [68, 68, 68, 68, 68, 68, 68, 68], [69, 69, 69, 69, 69, 69, 69, 69, 69], [], [71], [72, 72], [73, 73, 73], [74, 74, 74, 74], [75, 75, 75, 75, 75], [76, 76, 76, 76, 76, 76], [77, 77, 77, 77, 77, 77, 77], [78, 78, 78, 78, 78, 78, 78, 78], [79, 79, 79, 79, 79, 79, 79, 79, 79], [], [81], [82, 82], [83, 83, 83], [84, 84, 84, 84], [85, 85, 85, 85, 85], [86, 86, 86, 86, 86, 86], [87, 87, 87, 87, 87, 87, 87], [88, 88, 88, 88, 88, 88, 88, 88], [89, 89, 89, 89, 89, 89, 89, 89, 89], [], [91], [92, 92], [93, 93, 93], [94, 94, 94, 94], [95, 95, 95, 95, 95], [96, 96, 96, 96, 96, 96], [97, 97, 97, 97, 97, 97, 97], [98, 98, 98, 98, 98, 98, 98, 98], [99, 99, 99, 99, 99, 99, 99, 99, 99]] diff --git a/tests/test_tree.py b/tests/test_tree.py index b154955b..ca3061ca 100644 --- a/tests/test_tree.py +++ b/tests/test_tree.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE import os from collections import namedtuple @@ -8,8 +8,8 @@ import numpy import pytest -import awkward -import uproot +import awkward0 +import uproot3 def basest(array): while getattr(array, "base", None) is not None: @@ -20,7 +20,7 @@ class Test(object): ###################################################### double32 def test_double32(self): - t = uproot.open("tests/samples/demo-double32.root")["T"] + t = uproot3.open("tests/samples/demo-double32.root")["T"] fD64 = t.array("fD64") fF32 = t.array("fF32") fI32 = t.array("fI32") @@ -38,13 +38,13 @@ def test_double32(self): ###################################################### basket def test_flat_basket(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["i8"] - interpretation = branch._normalize_interpretation(None, awkward) - entrystart, entrystop = uproot.tree._normalize_entrystartstop(branch.numentries, None, None) + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["i8"] + interpretation = branch._normalize_interpretation(None, awkward0) + entrystart, entrystop = uproot3.tree._normalize_entrystartstop(branch.numentries, None, None) local_entrystart, local_entrystop = branch._localentries(0, entrystart, entrystop) - one = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward, None, None) - two = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward, None, None) + one = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward0, None, None) + two = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward0, None, None) assert numpy.array_equal(one, numpy.array([-15, -14, -13], dtype=">i8")) assert basest(one) is basest(two) @@ -58,13 +58,13 @@ def test_flat_basket(self): assert basest(four) is buf def test_regular_basket(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["ai8"] - interpretation = branch._normalize_interpretation(None, awkward) - entrystart, entrystop = uproot.tree._normalize_entrystartstop(branch.numentries, None, None) + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["ai8"] + interpretation = branch._normalize_interpretation(None, awkward0) + entrystart, entrystop = uproot3.tree._normalize_entrystartstop(branch.numentries, None, None) local_entrystart, local_entrystop = branch._localentries(0, entrystart, entrystop) - one = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward, None, None) - two = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward, None, None) + one = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward0, None, None) + two = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward0, None, None) assert numpy.array_equal(one, numpy.array([[-14, -13, -12]], dtype=">i8")) assert basest(one) is basest(two) @@ -84,13 +84,13 @@ def test_regular_basket(self): assert basest(four) is buf def test_irregular_basket(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["Ai8"] - interpretation = branch._normalize_interpretation(None, awkward) - entrystart, entrystop = uproot.tree._normalize_entrystartstop(branch.numentries, None, None) + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["Ai8"] + interpretation = branch._normalize_interpretation(None, awkward0) + entrystart, entrystop = uproot3.tree._normalize_entrystartstop(branch.numentries, None, None) local_entrystart, local_entrystop = branch._localentries(0, entrystart, entrystop) - one = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward, None, None) - two = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward, None, None) + one = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward0, None, None) + two = branch._basket(0, interpretation, local_entrystart, local_entrystop, awkward0, None, None) assert numpy.array_equal(one[0], numpy.array([], dtype=">i8")) assert numpy.array_equal(one[1], numpy.array([-15], dtype=">i8")) assert basest(one.content) is basest(two.content) @@ -100,9 +100,9 @@ def test_irregular_basket(self): assert numpy.array_equal(three[1], numpy.array([-15], dtype=">i8")) def test_strings_basket(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["str"] - interpretation = branch._normalize_interpretation(None, awkward) - entrystart, entrystop = uproot.tree._normalize_entrystartstop(branch.numentries, None, None) + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["str"] + interpretation = branch._normalize_interpretation(None, awkward0) + entrystart, entrystop = uproot3.tree._normalize_entrystartstop(branch.numentries, None, None) local_entrystart, local_entrystop = branch._localentries(0, entrystart, entrystop) one = branch.basket(0, interpretation, local_entrystart, local_entrystop) @@ -117,26 +117,26 @@ def test_strings_basket(self): ###################################################### baskets def test_flat_baskets(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["i8"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["i8"] expectation = [[-15, -14, -13], [-12, -11, -10], [-9, -8, -7], [-6, -5, -4], [-3, -2, -1], [0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11], [12, 13, 14]] assert [x.tolist() for x in branch.baskets()] == expectation assert [x.tolist() for x in branch.iterate_baskets()] == expectation def test_regular_baskets(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["ai8"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["ai8"] expectation = [[[-14, -13, -12]], [[-13, -12, -11]], [[-12, -11, -10]], [[-11, -10, -9]], [[-10, -9, -8]], [[-9, -8, -7]], [[-8, -7, -6]], [[-7, -6, -5]], [[-6, -5, -4]], [[-5, -4, -3]], [[-4, -3, -2]], [[-3, -2, -1]], [[-2, -1, 0]], [[-1, 0, 1]], [[0, 1, 2]], [[1, 2, 3]], [[2, 3, 4]], [[3, 4, 5]], [[4, 5, 6]], [[5, 6, 7]], [[6, 7, 8]], [[7, 8, 9]], [[8, 9, 10]], [[9, 10, 11]], [[10, 11, 12]], [[11, 12, 13]], [[12, 13, 14]], [[13, 14, 15]], [[14, 15, 16]], [[15, 16, 17]]] assert [x.tolist() for x in branch.baskets()] == expectation assert [x.tolist() for x in branch.iterate_baskets()] == expectation def test_irregular_baskets(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["Ai8"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["Ai8"] expectation = [[[], [-15]], [[-15, -13]], [[-15, -13, -11]], [[-15, -13, -11, -9]], [[], [-10]], [[-10, -8]], [[-10, -8, -6]], [[-10, -8, -6, -4]], [[], [-5]], [[-5, -3]], [[-5, -3, -1]], [[-5, -3, -1, 1]], [[], [0]], [[0, 2]], [[0, 2, 4]], [[0, 2, 4, 6]], [[], [5]], [[5, 7]], [[5, 7, 9]], [[5, 7, 9, 11]], [[], [10]], [[10, 12]], [[10, 12, 14]], [[10, 12, 14, 16]]] assert [len(y) for x in expectation for y in x] == [0, 1, 2, 3, 4] * 6 assert [x.tolist() for x in branch.baskets()] == expectation assert [x.tolist() for x in branch.iterate_baskets()] == expectation def test_strings_baskets(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["str"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["str"] expectation = [[b"hey-0", b"hey-1", b"hey-2", b"hey-3", b"hey-4", b"hey-5"], [b"hey-6", b"hey-7", b"hey-8", b"hey-9", b"hey-10"], [b"hey-11", b"hey-12", b"hey-13", b"hey-14", b"hey-15"], [b"hey-16", b"hey-17", b"hey-18", b"hey-19", b"hey-20"], [b"hey-21", b"hey-22", b"hey-23", b"hey-24", b"hey-25"], [b"hey-26", b"hey-27", b"hey-28", b"hey-29"]] assert [x.tolist() for x in branch.baskets()] == expectation assert [x.tolist() for x in branch.iterate_baskets()] == expectation @@ -144,26 +144,26 @@ def test_strings_baskets(self): ###################################################### array def test_flat_array(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["i8"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["i8"] expectation = [-15, -14, -13, -12, -11, -10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] for entrystart, entrystop in [(None, None), (1, None), (1, 2), (1, 10), (10, 11), (10, 20), (6, 12), (6, 13)]: assert branch.array(entrystart=entrystart, entrystop=entrystop).tolist() == expectation[entrystart:entrystop] def test_regular_array(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["ai8"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["ai8"] expectation = [[-14, -13, -12], [-13, -12, -11], [-12, -11, -10], [-11, -10, -9], [-10, -9, -8], [-9, -8, -7], [-8, -7, -6], [-7, -6, -5], [-6, -5, -4], [-5, -4, -3], [-4, -3, -2], [-3, -2, -1], [-2, -1, 0], [-1, 0, 1], [0, 1, 2], [1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 6], [5, 6, 7], [6, 7, 8], [7, 8, 9], [8, 9, 10], [9, 10, 11], [10, 11, 12], [11, 12, 13], [12, 13, 14], [13, 14, 15], [14, 15, 16], [15, 16, 17]] for entrystart, entrystop in [(None, None), (1, None), (1, 2), (1, 10), (10, 11), (10, 20), (6, 12), (6, 13)]: assert branch.array(entrystart=entrystart, entrystop=entrystop).tolist() == expectation[entrystart:entrystop] def test_irregular_array(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["Ai8"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["Ai8"] expectation = [[], [-15], [-15, -13], [-15, -13, -11], [-15, -13, -11, -9], [], [-10], [-10, -8], [-10, -8, -6], [-10, -8, -6, -4], [], [-5], [-5, -3], [-5, -3, -1], [-5, -3, -1, 1], [], [0], [0, 2], [0, 2, 4], [0, 2, 4, 6], [], [5], [5, 7], [5, 7, 9], [5, 7, 9, 11], [], [10], [10, 12], [10, 12, 14], [10, 12, 14, 16]] assert [len(x) for x in expectation] == [0, 1, 2, 3, 4] * 6 for entrystart, entrystop in [(None, None), (1, None), (1, 2), (1, 10), (10, 11), (10, 20), (6, 12), (6, 13)]: assert branch.array(entrystart=entrystart, entrystop=entrystop).tolist() == expectation[entrystart:entrystop] def test_strings_array(self): - branch = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["str"] + branch = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"]["str"] expectation = [b"hey-0", b"hey-1", b"hey-2", b"hey-3", b"hey-4", b"hey-5", b"hey-6", b"hey-7", b"hey-8", b"hey-9", b"hey-10", b"hey-11", b"hey-12", b"hey-13", b"hey-14", b"hey-15", b"hey-16", b"hey-17", b"hey-18", b"hey-19", b"hey-20", b"hey-21", b"hey-22", b"hey-23", b"hey-24", b"hey-25", b"hey-26", b"hey-27", b"hey-28", b"hey-29"] for entrystart, entrystop in [(None, None), (1, None), (1, 2), (1, 10), (10, 11), (10, 20), (6, 12), (6, 13)]: assert branch.array(entrystart=entrystart, entrystop=entrystop).tolist() == expectation[entrystart:entrystop] @@ -171,25 +171,25 @@ def test_strings_array(self): ###################################################### iterate def test_flat_iterate(self): - tree = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"] + tree = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"] expectation = [-15, -14, -13, -12, -11, -10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] for n in 1000, 5, 6, 7: assert [x.tolist() for (x,) in tree.iterate("i8", n, outputtype=tuple)] == [expectation[x : x + n] for x in range(0, len(expectation), n)] def test_regular_iterate(self): - tree = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"] + tree = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"] expectation = [[-14, -13, -12], [-13, -12, -11], [-12, -11, -10], [-11, -10, -9], [-10, -9, -8], [-9, -8, -7], [-8, -7, -6], [-7, -6, -5], [-6, -5, -4], [-5, -4, -3], [-4, -3, -2], [-3, -2, -1], [-2, -1, 0], [-1, 0, 1], [0, 1, 2], [1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 6], [5, 6, 7], [6, 7, 8], [7, 8, 9], [8, 9, 10], [9, 10, 11], [10, 11, 12], [11, 12, 13], [12, 13, 14], [13, 14, 15], [14, 15, 16], [15, 16, 17]] for n in 1000, 5, 6, 7: assert [x.tolist() for (x,) in tree.iterate("ai8", n, outputtype=tuple)] == [expectation[x : x + n] for x in range(0, len(expectation), n)] def test_irregular_iterate(self): - tree = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"] + tree = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"] expectation = [[], [-15], [-15, -13], [-15, -13, -11], [-15, -13, -11, -9], [], [-10], [-10, -8], [-10, -8, -6], [-10, -8, -6, -4], [], [-5], [-5, -3], [-5, -3, -1], [-5, -3, -1, 1], [], [0], [0, 2], [0, 2, 4], [0, 2, 4, 6], [], [5], [5, 7], [5, 7, 9], [5, 7, 9, 11], [], [10], [10, 12], [10, 12, 14], [10, 12, 14, 16]] for n in 1000, 5, 6, 7: assert [x.tolist() for (x,) in tree.iterate("Ai8", n, outputtype=tuple)] == [expectation[x : x + n] for x in range(0, len(expectation), n)] def test_strings_iterate(self): - tree = uproot.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"] + tree = uproot3.open("tests/samples/sample-6.10.05-uncompressed.root")["sample"] expectation = [b"hey-0", b"hey-1", b"hey-2", b"hey-3", b"hey-4", b"hey-5", b"hey-6", b"hey-7", b"hey-8", b"hey-9", b"hey-10", b"hey-11", b"hey-12", b"hey-13", b"hey-14", b"hey-15", b"hey-16", b"hey-17", b"hey-18", b"hey-19", b"hey-20", b"hey-21", b"hey-22", b"hey-23", b"hey-24", b"hey-25", b"hey-26", b"hey-27", b"hey-28", b"hey-29"] for n in 1000, 5, 6, 7: assert [x.tolist() for (x,) in tree.iterate("str", n, outputtype=tuple)] == [expectation[x : x + n] for x in range(0, len(expectation), n)] @@ -197,7 +197,7 @@ def test_strings_iterate(self): ###################################################### old tests def test_branch_array(self): - file = uproot.open("tests/samples/simple.root") + file = uproot3.open("tests/samples/simple.root") repr(file) tree = file["tree"] @@ -218,7 +218,7 @@ def test_branch_array(self): assert tree["three"].array().tolist() == [b"uno", b"dos", b"tres", b"quatro"] def test_tree_arrays(self): - file = uproot.open("tests/samples/simple.root") + file = uproot3.open("tests/samples/simple.root") tree = file["tree"] arrays = tree.arrays() @@ -240,7 +240,7 @@ def test_tree_arrays(self): assert arrays[b"three"].tolist() == [b"uno", b"dos", b"tres", b"quatro"] def test_tree_arrays_namedecode(self): - file = uproot.open("tests/samples/simple.root") + file = uproot3.open("tests/samples/simple.root") tree = file["tree"] arrays = tree.arrays(namedecode="utf-8") @@ -250,38 +250,38 @@ def test_tree_arrays_namedecode(self): def test_tree_iterator1(self): # one big array - for arrays in uproot.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=1000): + for arrays in uproot3.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=1000): assert arrays[b"data"].tolist() == list(range(46)) # size is equal to basket size (for most baskets) i = 0 - for arrays in uproot.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=6): + for arrays in uproot3.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=6): assert arrays[b"data"].tolist() == list(range(i, min(i + 6, 46))) i += 6 # size is smaller i = 0 - for arrays in uproot.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=3): + for arrays in uproot3.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=3): assert arrays[b"data"].tolist() == list(range(i, min(i + 3, 46))) i += 3 i = 0 - for arrays in uproot.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=4): + for arrays in uproot3.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=4): assert arrays[b"data"].tolist() == list(range(i, min(i + 4, 46))) i += 4 # size is larger i = 0 - for arrays in uproot.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=12): + for arrays in uproot3.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=12): assert arrays[b"data"].tolist() == list(range(i, min(i + 12, 46))) i += 12 i = 0 - for arrays in uproot.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=10): + for arrays in uproot3.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=10): assert arrays[b"data"].tolist() == list(range(i, min(i + 10, 46))) i += 10 # singleton case i = 0 - for arrays in uproot.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=1): + for arrays in uproot3.open("tests/samples/foriter.root")["foriter"].iterate(entrysteps=1): assert arrays[b"data"].tolist() == list(range(i, min(i + 1, 46))) i += 1 @@ -289,38 +289,38 @@ def test_tree_iterator2(self): words = [b"zero", b"one", b"two", b"three", b"four", b"five", b"six", b"seven", b"eight", b"nine", b"ten", b"eleven", b"twelve", b"thirteen", b"fourteen", b"fifteen", b"sixteen", b"seventeen", b"eighteen", b"ninteen", b"twenty", b"twenty-one", b"twenty-two", b"twenty-three", b"twenty-four", b"twenty-five", b"twenty-six", b"twenty-seven", b"twenty-eight", b"twenty-nine", b"thirty"] # one big array - for arrays in uproot.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=1000): + for arrays in uproot3.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=1000): assert arrays[b"data"].tolist() == words # size is equal to basket size (for most baskets) i = 0 - for arrays in uproot.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=6): + for arrays in uproot3.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=6): assert arrays[b"data"].tolist() == words[i:i + 6] i += 6 # size is smaller i = 0 - for arrays in uproot.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=3): + for arrays in uproot3.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=3): assert arrays[b"data"].tolist() == words[i:i + 3] i += 3 i = 0 - for arrays in uproot.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=4): + for arrays in uproot3.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=4): assert arrays[b"data"].tolist() == words[i:i + 4] i += 4 # size is larger i = 0 - for arrays in uproot.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=12): + for arrays in uproot3.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=12): assert arrays[b"data"].tolist() == words[i:i + 12] i += 12 i = 0 - for arrays in uproot.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=10): + for arrays in uproot3.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=10): assert arrays[b"data"].tolist() == words[i:i + 10] i += 10 # singleton case i = 0 - for arrays in uproot.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=1): + for arrays in uproot3.open("tests/samples/foriter2.root")["foriter2"].iterate(entrysteps=1): assert arrays[b"data"].tolist() == words[i:i + 1] i += 1 @@ -328,43 +328,43 @@ def test_tree_iterator3(self): source = list(range(46)) # one big array - for arrays in uproot.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=1000): + for arrays in uproot3.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=1000): assert arrays[b"data"].tolist() == source # size is equal to basket size (for most baskets) i = 0 - for arrays in uproot.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=6): + for arrays in uproot3.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=6): assert arrays[b"data"].tolist() == source[i : i + 6] i += 6 if i > 45: i = 0 # size is smaller i = 0 - for arrays in uproot.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=3): + for arrays in uproot3.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=3): assert arrays[b"data"].tolist() == source[i : i + 3] i += 3 if i > 45: i = 0 i = 0 - for arrays in uproot.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=4): + for arrays in uproot3.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=4): assert arrays[b"data"].tolist() == source[i : i + 4] i += 4 if i > 45: i = 0 # size is larger i = 0 - for arrays in uproot.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=12): + for arrays in uproot3.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=12): assert arrays[b"data"].tolist() == source[i : i + 12] i += 12 if i > 45: i = 0 i = 0 - for arrays in uproot.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=10): + for arrays in uproot3.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=10): assert arrays[b"data"].tolist() == source[i : i + 10] i += 10 if i > 45: i = 0 # singleton case i = 0 - for arrays in uproot.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=1): + for arrays in uproot3.iterate(["tests/samples/foriter.root", "tests/samples/foriter.root"], "foriter", entrysteps=1): assert arrays[b"data"].tolist() == source[i : i + 1] i += 1 if i > 45: i = 0 @@ -373,49 +373,49 @@ def test_tree_iterator4(self): words2 = [b"zero", b"one", b"two", b"three", b"four", b"five", b"six", b"seven", b"eight", b"nine", b"ten", b"eleven", b"twelve", b"thirteen", b"fourteen", b"fifteen", b"sixteen", b"seventeen", b"eighteen", b"ninteen", b"twenty", b"twenty-one", b"twenty-two", b"twenty-three", b"twenty-four", b"twenty-five", b"twenty-six", b"twenty-seven", b"twenty-eight", b"twenty-nine", b"thirty"] # one big array - for arrays in uproot.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=1000): + for arrays in uproot3.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=1000): assert arrays[b"data"].tolist() == words2 # size is equal to basket size (for most baskets) i = 0 - for arrays in uproot.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=6): + for arrays in uproot3.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=6): assert arrays[b"data"].tolist() == words2[i : i + 6] i += 6 if i > 30: i = 0 # size is smaller i = 0 - for arrays in uproot.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=3): + for arrays in uproot3.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=3): assert arrays[b"data"].tolist() == words2[i : i + 3] i += 3 if i > 30: i = 0 i = 0 - for arrays in uproot.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=4): + for arrays in uproot3.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=4): assert arrays[b"data"].tolist() == words2[i : i + 4] i += 4 if i > 30: i = 0 # size is larger i = 0 - for arrays in uproot.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=12): + for arrays in uproot3.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=12): assert arrays[b"data"].tolist() == words2[i : i + 12] i += 12 if i > 30: i = 0 i = 0 - for arrays in uproot.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=10): + for arrays in uproot3.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=10): assert arrays[b"data"].tolist() == words2[i : i + 10] i += 10 if i > 30: i = 0 # singleton case i = 0 - for arrays in uproot.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=1): + for arrays in uproot3.iterate(["tests/samples/foriter2.root", "tests/samples/foriter2.root"], "foriter2", entrysteps=1): assert arrays[b"data"].tolist() == words2[i : i + 1] i += 1 if i > 30: i = 0 def test_directories(self): - file = uproot.open("tests/samples/nesteddirs.root") + file = uproot3.open("tests/samples/nesteddirs.root") assert [(n, cls._classname) for n, cls in file.classes()] == [(b"one;1", b"TDirectory"), (b"three;1", b"TDirectory")] assert [(n, cls._classname) for n, cls in file.allclasses()] == [(b"one;1", b"TDirectory"), (b"one/two;1", b"TDirectory"), (b"one/two/tree;1", b"TTree"), (b"one/tree;1", b"TTree"), (b"three;1", b"TDirectory"), (b"three/tree;1", b"TTree")] @@ -430,7 +430,7 @@ def test_directories(self): assert file["one/two/tree"].array("Int32").shape == (100,) assert file["three/tree"].array("I32").shape == (100,) - file = uproot.open("tests/samples/nesteddirs.root") + file = uproot3.open("tests/samples/nesteddirs.root") assert list(file["one/tree"].keys()) == [b"one", b"two", b"three"] assert list(file["one/two/tree"].keys()) == [b"Int32", b"Int64", b"UInt32", b"UInt64", b"Float32", b"Float64", b"Str", b"ArrayInt32", b"ArrayInt64", b"ArrayUInt32", b"ArrayUInt64", b"ArrayFloat32", b"ArrayFloat64", b"N", b"SliceInt32", b"SliceInt64", b"SliceUInt32", b"SliceUInt64", b"SliceFloat32", b"SliceFloat64"] @@ -441,7 +441,7 @@ def test_directories(self): assert file["three/tree;1"].array("I32").shape == (100,) def test_cast(self): - tree = uproot.open("tests/samples/Zmumu.root")["events"] + tree = uproot3.open("tests/samples/Zmumu.root")["events"] one = numpy.cast[numpy.int32](numpy.floor(tree.array("M"))) two = tree.array("M", numpy.int32) assert one.dtype == two.dtype @@ -457,7 +457,7 @@ def test_cast(self): assert numpy.array_equal(one, two) def test_pass_array(self): - tree = uproot.open("tests/samples/Zmumu.root")["events"] + tree = uproot3.open("tests/samples/Zmumu.root")["events"] one = numpy.cast[numpy.int32](numpy.floor(tree.array("M"))) two = numpy.zeros(one.shape, dtype=one.dtype) tree.array("M", two) @@ -470,7 +470,7 @@ def test_pass_array(self): assert numpy.array_equal(one, two) def test_outputtype(self): - tree = uproot.open("tests/samples/simple.root")["tree"] + tree = uproot3.open("tests/samples/simple.root")["tree"] arrays = tree.arrays(["three", "two", "one"], outputtype=dict) assert isinstance(arrays, dict) @@ -517,7 +517,7 @@ class MyTuple(tuple): assert isinstance(arrays, MyTuple) def test_tree_lazy(self): - tree = uproot.open("tests/samples/sample-5.30.00-uncompressed.root")["sample"] + tree = uproot3.open("tests/samples/sample-5.30.00-uncompressed.root")["sample"] for branchname in b"u1", b"i8", b"Ai8", b"f4", b"af4": strict = tree[branchname].array() @@ -539,7 +539,7 @@ def test_tree_lazy(self): assert lazy[i - 1 : i + 3].tolist() == strict[i - 1 : i + 3].tolist() def test_tree_lazy2(self): - tree = uproot.open("tests/samples/sample-5.30.00-uncompressed.root")["sample"] + tree = uproot3.open("tests/samples/sample-5.30.00-uncompressed.root")["sample"] lazy = tree.lazyarrays() for branchname in "u1", "i8", "Ai8", "f4", "af4": @@ -558,7 +558,7 @@ def test_tree_lazy2(self): assert lazy[branchname][i - 1 : i + 3].tolist() == strict[i - 1 : i + 3].tolist() def test_tree_lazy3(self): - lazy = uproot.lazyarrays(["tests/samples/sample-5.29.02-uncompressed.root", "tests/samples/sample-5.30.00-uncompressed.root"], "sample") + lazy = uproot3.lazyarrays(["tests/samples/sample-5.29.02-uncompressed.root", "tests/samples/sample-5.30.00-uncompressed.root"], "sample") assert lazy["u1"].tolist() == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29] assert lazy["i8"].tolist() == [-15, -14, -13, -12, -11, -10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, -15, -14, -13, -12, -11, -10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] @@ -567,7 +567,7 @@ def test_tree_lazy3(self): assert lazy["af4"].tolist() == [[-13.899999618530273, -12.899999618530273, -11.899999618530273], [-12.899999618530273, -11.899999618530273, -10.899999618530273], [-11.899999618530273, -10.899999618530273, -9.899999618530273], [-10.899999618530273, -9.899999618530273, -8.899999618530273], [-9.899999618530273, -8.899999618530273, -7.900000095367432], [-8.899999618530273, -7.900000095367432, -6.900000095367432], [-7.900000095367432, -6.900000095367432, -5.900000095367432], [-6.900000095367432, -5.900000095367432, -4.900000095367432], [-5.900000095367432, -4.900000095367432, -3.9000000953674316], [-4.900000095367432, -3.9000000953674316, -2.9000000953674316], [-3.9000000953674316, -2.9000000953674316, -1.899999976158142], [-2.9000000953674316, -1.899999976158142, -0.8999999761581421], [-1.899999976158142, -0.8999999761581421, 0.10000000149011612], [-0.8999999761581421, 0.10000000149011612, 1.100000023841858], [0.10000000149011612, 1.100000023841858, 2.0999999046325684], [1.100000023841858, 2.0999999046325684, 3.0999999046325684], [2.0999999046325684, 3.0999999046325684, 4.099999904632568], [3.0999999046325684, 4.099999904632568, 5.099999904632568], [4.099999904632568, 5.099999904632568, 6.099999904632568], [5.099999904632568, 6.099999904632568, 7.099999904632568], [6.099999904632568, 7.099999904632568, 8.100000381469727], [7.099999904632568, 8.100000381469727, 9.100000381469727], [8.100000381469727, 9.100000381469727, 10.100000381469727], [9.100000381469727, 10.100000381469727, 11.100000381469727], [10.100000381469727, 11.100000381469727, 12.100000381469727], [11.100000381469727, 12.100000381469727, 13.100000381469727], [12.100000381469727, 13.100000381469727, 14.100000381469727], [13.100000381469727, 14.100000381469727, 15.100000381469727], [14.100000381469727, 15.100000381469727, 16.100000381469727], [15.100000381469727, 16.100000381469727, 17.100000381469727], [-13.899999618530273, -12.899999618530273, -11.899999618530273], [-12.899999618530273, -11.899999618530273, -10.899999618530273], [-11.899999618530273, -10.899999618530273, -9.899999618530273], [-10.899999618530273, -9.899999618530273, -8.899999618530273], [-9.899999618530273, -8.899999618530273, -7.900000095367432], [-8.899999618530273, -7.900000095367432, -6.900000095367432], [-7.900000095367432, -6.900000095367432, -5.900000095367432], [-6.900000095367432, -5.900000095367432, -4.900000095367432], [-5.900000095367432, -4.900000095367432, -3.9000000953674316], [-4.900000095367432, -3.9000000953674316, -2.9000000953674316], [-3.9000000953674316, -2.9000000953674316, -1.899999976158142], [-2.9000000953674316, -1.899999976158142, -0.8999999761581421], [-1.899999976158142, -0.8999999761581421, 0.10000000149011612], [-0.8999999761581421, 0.10000000149011612, 1.100000023841858], [0.10000000149011612, 1.100000023841858, 2.0999999046325684], [1.100000023841858, 2.0999999046325684, 3.0999999046325684], [2.0999999046325684, 3.0999999046325684, 4.099999904632568], [3.0999999046325684, 4.099999904632568, 5.099999904632568], [4.099999904632568, 5.099999904632568, 6.099999904632568], [5.099999904632568, 6.099999904632568, 7.099999904632568], [6.099999904632568, 7.099999904632568, 8.100000381469727], [7.099999904632568, 8.100000381469727, 9.100000381469727], [8.100000381469727, 9.100000381469727, 10.100000381469727], [9.100000381469727, 10.100000381469727, 11.100000381469727], [10.100000381469727, 11.100000381469727, 12.100000381469727], [11.100000381469727, 12.100000381469727, 13.100000381469727], [12.100000381469727, 13.100000381469727, 14.100000381469727], [13.100000381469727, 14.100000381469727, 15.100000381469727], [14.100000381469727, 15.100000381469727, 16.100000381469727], [15.100000381469727, 16.100000381469727, 17.100000381469727]] def test_tree_lazy_cached(self): - tree = uproot.open("tests/samples/sample-5.30.00-uncompressed.root")["sample"] + tree = uproot3.open("tests/samples/sample-5.30.00-uncompressed.root")["sample"] cache = {} keycache = {} @@ -593,15 +593,16 @@ def test_tree_lazy_cached(self): assert lazy[i - 1 : i + 3].tolist() == strict[i - 1 : i + 3].tolist() @pytest.mark.parametrize("use_http", [False, True]) + @pytest.mark.skip(reason="http://scikit-hep.org/uproot/examples/Event.root moved") def test_hist_in_tree(self, use_http): if use_http: pytest.importorskip("requests") - tree = uproot.open("http://scikit-hep.org/uproot/examples/Event.root")["T"] + tree = uproot3.open("http://scikit-hep.org/uproot3/examples/Event.root")["T"] else: path = os.path.join("tests", "samples", "Event.root") if not os.path.exists(path): raise pytest.skip() - tree = uproot.open(path)["T"] + tree = uproot3.open(path)["T"] check = [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, @@ -613,6 +614,7 @@ def test_hist_in_tree(self, use_http): assert tree.array("fH")[20].values.tolist() == check @pytest.mark.parametrize("use_http", [False, True]) + @pytest.mark.skip(reason="http://scikit-hep.org/uproot/examples/Event.root moved") def test_branch_auto_interpretation(self, use_http): # The aim is to reduce this list in a controlled manner known_branches_without_interp = [ @@ -627,12 +629,12 @@ def test_branch_auto_interpretation(self, use_http): ] if use_http: pytest.importorskip("requests") - tree = uproot.open("http://scikit-hep.org/uproot/examples/Event.root")["T"] + tree = uproot3.open("http://scikit-hep.org/uproot3/examples/Event.root")["T"] else: path = os.path.join("tests", "samples", "Event.root") if not os.path.exists(path): raise pytest.skip() - tree = uproot.open(path)["T"] + tree = uproot3.open(path)["T"] branches_without_interp = [b.name for b in tree.allvalues() if b.interpretation is None] assert branches_without_interp == known_branches_without_interp assert tree.array("fTracks.fTArray[3]", entrystop=10)[5][10].tolist() == [11.03951644897461, 19.40645980834961, 34.54059982299805] @@ -650,7 +652,7 @@ def test_branch_auto_interpretation(self, use_http): assert tree.array("fTracks.fVertex[3]", entrystop=10)[1][2].tolist() == [0.245361328125, 0.029296875,-16.171875] def test_leaflist(self): - tree = uproot.open("tests/samples/leaflist.root")["tree"] + tree = uproot3.open("tests/samples/leaflist.root")["tree"] a = tree.array("leaflist") assert a["x"].tolist() == [1.1, 2.2, 3.3, 4.0, 5.5] # yeah, I goofed up when making it assert a["y"].tolist() == [1, 2, 3, 4, 5] @@ -659,7 +661,7 @@ def test_leaflist(self): pytest.importorskip("pandas") assert tree.pandas.df()["leaflist.x"].tolist() == [1.1, 2.2, 3.3, 4.0, 5.5] - tree = uproot.open("tests/samples/HZZ-objects.root")["events"] + tree = uproot3.open("tests/samples/HZZ-objects.root")["events"] tree.pandas.df("muonp4") tree.pandas.df("muonp4", flatten=False) df = tree.pandas.df("eventweight", entrystart=100, entrystop=200) @@ -672,6 +674,6 @@ def test_leaflist(self): assert max(index) == 199 def test_mempartitions(self): - t = uproot.open("tests/samples/sample-5.23.02-zlib.root")["sample"] + t = uproot3.open("tests/samples/sample-5.23.02-zlib.root")["sample"] assert list(t.mempartitions(500)) == [(0, 2), (2, 4), (4, 6), (6, 8), (8, 10), (10, 12), (12, 14), (14, 16), (16, 18), (18, 20), (20, 22), (22, 24), (24, 26), (26, 28), (28, 30)] assert [sum(y.nbytes for y in x.values()) for x in t.iterate(entrysteps="0.5 kB")] == [693, 865, 822, 779, 951, 695, 867, 824, 781, 953, 695, 867, 824, 781, 953] diff --git a/tests/test_versions.py b/tests/test_versions.py index 6a7718b2..09440a8a 100644 --- a/tests/test_versions.py +++ b/tests/test_versions.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE import pytest try: @@ -9,7 +9,7 @@ lzma = pytest.importorskip('backports.lzma') lz4 = pytest.importorskip('lz4') -import uproot +import uproot3 class Test(object): @@ -71,66 +71,66 @@ def compare(self, arrays): def test_5_23_02(self): # 2009-02-26, TTree version 16 for compression in "uncompressed", "zlib": - self.compare(uproot.open("tests/samples/sample-5.23.02-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-5.23.02-{0}.root".format(compression))["sample"].arrays()) def test_5_24_00(self): # 2009-06-30, TTree version 16 for compression in "uncompressed", "zlib": - self.compare(uproot.open("tests/samples/sample-5.24.00-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-5.24.00-{0}.root".format(compression))["sample"].arrays()) def test_5_25_02(self): # 2009-10-01, TTree version 17 for compression in "uncompressed", "zlib": - self.compare(uproot.open("tests/samples/sample-5.25.02-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-5.25.02-{0}.root".format(compression))["sample"].arrays()) def test_5_26_00(self): # 2009-12-14, TTree version 18 for compression in "uncompressed", "zlib": - self.compare(uproot.open("tests/samples/sample-5.26.00-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-5.26.00-{0}.root".format(compression))["sample"].arrays()) def test_5_27_02(self): # 2010-04-27, TTree version 18 for compression in "uncompressed", "zlib": - self.compare(uproot.open("tests/samples/sample-5.27.02-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-5.27.02-{0}.root".format(compression))["sample"].arrays()) def test_5_28_00(self): # 2010-12-15, TTree version 18 for compression in "uncompressed", "zlib": - self.compare(uproot.open("tests/samples/sample-5.28.00-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-5.28.00-{0}.root".format(compression))["sample"].arrays()) def test_5_29_02(self): # 2011-04-21, TTree version 18 for compression in "uncompressed", "zlib": - self.compare(uproot.open("tests/samples/sample-5.29.02-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-5.29.02-{0}.root".format(compression))["sample"].arrays()) def test_5_30_00(self): # 2011-06-28, TTree version 19 for compression in "uncompressed", "zlib", "lzma": - self.compare(uproot.open("tests/samples/sample-5.30.00-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-5.30.00-{0}.root".format(compression))["sample"].arrays()) def test_6_08_04(self): # 2017-01-13, TTree version 19 for compression in "uncompressed", "zlib", "lzma": - self.compare(uproot.open("tests/samples/sample-6.08.04-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-6.08.04-{0}.root".format(compression))["sample"].arrays()) def test_6_10_05(self): # 2017-07-28, TTree version 19 for compression in "uncompressed", "zlib", "lzma", "lz4": - self.compare(uproot.open("tests/samples/sample-6.10.05-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-6.10.05-{0}.root".format(compression))["sample"].arrays()) def test_6_14_00(self): # 2018-06-20, TTree version 20 for compression in "uncompressed", "zlib", "lzma", "lz4": - self.compare(uproot.open("tests/samples/sample-6.14.00-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-6.14.00-{0}.root".format(compression))["sample"].arrays()) def test_6_16_00(self): for compression in "uncompressed", "zlib", "lzma", "lz4": - self.compare(uproot.open("tests/samples/sample-6.16.00-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-6.16.00-{0}.root".format(compression))["sample"].arrays()) def test_6_18_00(self): for compression in "uncompressed", "zlib", "lzma", "lz4": - self.compare(uproot.open("tests/samples/sample-6.18.00-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-6.18.00-{0}.root".format(compression))["sample"].arrays()) def test_6_20_04(self): for compression in "uncompressed", "zlib", "lzma", "lz4": - self.compare(uproot.open("tests/samples/sample-6.20.04-{0}.root".format(compression))["sample"].arrays()) + self.compare(uproot3.open("tests/samples/sample-6.20.04-{0}.root".format(compression))["sample"].arrays()) diff --git a/tests/test_write.py b/tests/test_write.py index 314d3f65..c51f9fb3 100644 --- a/tests/test_write.py +++ b/tests/test_write.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from os.path import join @@ -8,17 +8,17 @@ import numpy import ctypes -import awkward +import awkward0 -import uproot -from uproot.write.objects.TTree import newtree, newbranch +import uproot3 +from uproot3.write.objects.TTree import newtree, newbranch ROOT = pytest.importorskip("ROOT") def test_strings(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["hello"] = "world" f = ROOT.TFile.Open(filename) @@ -28,23 +28,23 @@ def test_strings(tmp_path): def test_cycle(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["hello"] = "world" - f["hello"] = "uproot" + f["hello"] = "uproot3" f = ROOT.TFile.Open(filename) assert str(f.Get("hello;1")) == "world" - assert str(f.Get("hello;2")) == "uproot" + assert str(f.Get("hello;2")) == "uproot3" f.Close() def test_zlib(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=uproot.ZLIB(1)) as f: + with uproot3.recreate(filename, compression=uproot3.ZLIB(1)) as f: f["hello"] = "a"*2000 f = ROOT.TFile.Open(filename) - assert f.GetCompressionAlgorithm() == uproot.const.kZLIB + assert f.GetCompressionAlgorithm() == uproot3.const.kZLIB assert f.GetCompressionLevel() == 1 assert str(f.Get("hello")) == "a"*2000 f.Close() @@ -52,18 +52,18 @@ def test_zlib(tmp_path): def test_compresschange(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=uproot.ZLIB(2)) as f: - f.compression = uproot.ZLIB(3) + with uproot3.recreate(filename, compression=uproot3.ZLIB(2)) as f: + f.compression = uproot3.ZLIB(3) f["hello"] = "a"*2000 f = ROOT.TFile.Open(filename) - assert f.GetCompressionAlgorithm() == uproot.const.kZLIB + assert f.GetCompressionAlgorithm() == uproot3.const.kZLIB assert f.GetCompressionLevel() == 3 def test_nocompress(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["hello"] = "a"*2000 f = ROOT.TFile.Open(filename) @@ -74,7 +74,7 @@ def test_nocompress(tmp_path): def test_compress_small_data(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=uproot.ZLIB(4)) as f: + with uproot3.recreate(filename, compression=uproot3.ZLIB(4)) as f: f["hello"] = "a" f = ROOT.TFile.Open(filename) @@ -84,11 +84,11 @@ def test_compress_small_data(tmp_path): def test_lzma(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=uproot.LZMA(1)) as f: + with uproot3.recreate(filename, compression=uproot3.LZMA(1)) as f: f["hello"] = "a"*2000 f = ROOT.TFile.Open(filename) - assert f.GetCompressionAlgorithm() == uproot.const.kLZMA + assert f.GetCompressionAlgorithm() == uproot3.const.kLZMA assert f.GetCompressionLevel() == 1 assert str(f.Get("hello")) == "a"*2000 f.Close() @@ -96,22 +96,22 @@ def test_lzma(tmp_path): def test_lz4_leveldown(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=uproot.LZ4(5)) as f: + with uproot3.recreate(filename, compression=uproot3.LZ4(5)) as f: f["hello"] = "a"*2000 f = ROOT.TFile.Open(filename) - assert (f.GetCompressionAlgorithm()) == uproot.const.kLZ4 + assert (f.GetCompressionAlgorithm()) == uproot3.const.kLZ4 assert str(f.Get("hello")) == "a"*2000 f.Close() def test_lz4_levelup(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=uproot.LZ4(5)) as f: + with uproot3.recreate(filename, compression=uproot3.LZ4(5)) as f: f["hello"] = "a"*2000 f = ROOT.TFile.Open(filename) - assert (f.GetCompressionAlgorithm()) == uproot.const.kLZ4 + assert (f.GetCompressionAlgorithm()) == uproot3.const.kLZ4 assert (f.GetCompressionLevel()) == 5 assert str(f.Get("hello")) == "a"*2000 f.Close() @@ -128,9 +128,9 @@ def test_th1(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -146,7 +146,7 @@ def test_th1(tmp_path): assert h.GetMean() == 1.5714285714285714 assert h.GetRMS() == 0.4948716593053938 -def test_th1_uproot(tmp_path): +def test_th1_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") testfile = join(str(tmp_path), "test.root") @@ -158,12 +158,12 @@ def test_th1_uproot(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist - assert "TH1" in uproot.open(filename)["test"]._classname.decode("utf-8") + assert "TH1" in uproot3.open(filename)["test"]._classname.decode("utf-8") def test_th1_varbin(tmp_path): filename = join(str(tmp_path), "example.root") @@ -176,9 +176,9 @@ def test_th1_varbin(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -201,9 +201,9 @@ def test_compressed_th1(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=uproot.ZLIB(1)) as f: + with uproot3.recreate(filename, compression=uproot3.ZLIB(1)) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -227,9 +227,9 @@ def test_th2(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -247,7 +247,7 @@ def test_th2(tmp_path): assert h.GetNbinsX() == 5 assert h.GetNbinsY() == 6 -def test_th2_uproot(tmp_path): +def test_th2_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") testfile = join(str(tmp_path), "test.root") @@ -259,12 +259,12 @@ def test_th2_uproot(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist - assert "TH2" in uproot.open(filename)["test"]._classname.decode("utf-8") + assert "TH2" in uproot3.open(filename)["test"]._classname.decode("utf-8") def test_th2_varbin(tmp_path): filename = join(str(tmp_path), "example.root") @@ -278,9 +278,9 @@ def test_th2_varbin(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -300,9 +300,9 @@ def test_compressed_th2(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=uproot.ZLIB(1)) as f: + with uproot3.recreate(filename, compression=uproot3.ZLIB(1)) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -322,9 +322,9 @@ def test_th3(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -344,7 +344,7 @@ def test_th3(tmp_path): assert h.GetBinContent(x, y, z) == bincontents[count] count += 1 -def test_th3_uproot(tmp_path): +def test_th3_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") testfile = join(str(tmp_path), "test.root") @@ -356,12 +356,12 @@ def test_th3_uproot(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist - assert "TH3" in uproot.open(filename)["test"]._classname.decode("utf-8") + assert "TH3" in uproot3.open(filename)["test"]._classname.decode("utf-8") def test_th3_varbin(tmp_path): filename = join(str(tmp_path), "example.root") @@ -376,9 +376,9 @@ def test_th3_varbin(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -400,9 +400,9 @@ def test_compressed_th3(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=uproot.ZLIB(1)) as f: + with uproot3.recreate(filename, compression=uproot3.ZLIB(1)) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -423,9 +423,9 @@ def test_tprofile(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -440,7 +440,7 @@ def test_tprofile(tmp_path): assert h.GetBinContent(x) == bincontents[count] count += 1 -def test_tprofile_uproot(tmp_path): +def test_tprofile_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") testfile = join(str(tmp_path), "test.root") @@ -452,12 +452,12 @@ def test_tprofile_uproot(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist - assert uproot.open(filename)["test"]._classname == b"TProfile" + assert uproot3.open(filename)["test"]._classname == b"TProfile" def test_compressed_tprofile(tmp_path): filename = join(str(tmp_path), "example.root") @@ -471,9 +471,9 @@ def test_compressed_tprofile(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=uproot.LZMA(5)) as f: + with uproot3.recreate(filename, compression=uproot3.LZMA(5)) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -500,9 +500,9 @@ def test_tprofile2d(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -520,7 +520,7 @@ def test_tprofile2d(tmp_path): assert h.GetNbinsX() == 5 assert h.GetNbinsY() == 6 -def test_tprofile2d_uproot(tmp_path): +def test_tprofile2d_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") testfile = join(str(tmp_path), "test.root") @@ -532,12 +532,12 @@ def test_tprofile2d_uproot(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist - assert uproot.open(filename)["test"]._classname == b"TProfile2D" + assert uproot3.open(filename)["test"]._classname == b"TProfile2D" def test_compressed_tprofile2d(tmp_path): filename = join(str(tmp_path), "example.root") @@ -551,9 +551,9 @@ def test_compressed_tprofile2d(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=uproot.LZMA(5)) as f: + with uproot3.recreate(filename, compression=uproot3.LZMA(5)) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -583,9 +583,9 @@ def test_tprofile3d(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -605,7 +605,7 @@ def test_tprofile3d(tmp_path): assert h.GetNbinsY() == 6 assert h.GetNbinsZ() == 8 -def test_tprofile3d_uproot(tmp_path): +def test_tprofile3d_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") testfile = join(str(tmp_path), "test.root") @@ -617,12 +617,12 @@ def test_tprofile3d_uproot(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist - assert uproot.open(filename)["test"]._classname == b"TProfile3D" + assert uproot3.open(filename)["test"]._classname == b"TProfile3D" def test_compressed_tprofile3d(tmp_path): filename = join(str(tmp_path), "example.root") @@ -636,9 +636,9 @@ def test_compressed_tprofile3d(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=uproot.LZMA(6)) as f: + with uproot3.recreate(filename, compression=uproot3.LZMA(6)) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -661,7 +661,7 @@ def test_compressed_tprofile3d(tmp_path): def test_dir_allocation(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: for i in range(1, 101): f["a"*i] = "a"*i @@ -680,9 +680,9 @@ def test_taxis_axisbins(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -700,9 +700,9 @@ def test_taxis_time(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -720,16 +720,16 @@ def test_th1_binlabel1(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) h = f.Get("test") assert h.GetXaxis().GetBinLabel(1) == "Hi" -def test_th1_binlabel1_uproot(tmp_path): +def test_th1_binlabel1_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") testfile = join(str(tmp_path), "test.root") @@ -740,12 +740,12 @@ def test_th1_binlabel1_uproot(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist - f = uproot.open(filename) + f = uproot3.open(filename) h = f["test"] assert h._fXaxis._fLabels[0] == b"Hi" @@ -762,9 +762,9 @@ def test_th1_binlabel2(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -772,7 +772,7 @@ def test_th1_binlabel2(tmp_path): assert h.GetXaxis().GetBinLabel(1) == "Hi" assert h.GetXaxis().GetBinLabel(2) == "Hello" -def test_th1_binlabel2_uproot(tmp_path): +def test_th1_binlabel2_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") testfile = join(str(tmp_path), "test.root") @@ -785,12 +785,12 @@ def test_th1_binlabel2_uproot(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist - f = uproot.open(filename) + f = uproot3.open(filename) h = f["test"] assert h._fXaxis._fLabels[0] == b"Hi" assert h._fXaxis._fLabels[1] == b"Hello" @@ -807,9 +807,9 @@ def test_th2_binlabel1(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -830,9 +830,9 @@ def test_th3_binlabel1(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename) @@ -852,9 +852,9 @@ def test_objany_multihist(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f["test1"] = hist @@ -864,7 +864,7 @@ def test_objany_multihist(tmp_path): assert h.GetXaxis().GetBinLabel(1) == "Hi" assert h1.GetXaxis().GetBinLabel(1) == "Hi" -def test_objany_multihist_uproot(tmp_path): +def test_objany_multihist_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") testfile = join(str(tmp_path), "test.root") @@ -875,13 +875,13 @@ def test_objany_multihist_uproot(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f["test1"] = hist - f = uproot.open(filename) + f = uproot3.open(filename) h = f["test"] h1 = f["test1"] assert h._fXaxis._fLabels[0] == b"Hi" @@ -891,7 +891,7 @@ def test_ttree(tmp_path): filename = join(str(tmp_path), "example.root") tree = newtree() - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f = ROOT.TFile.Open(filename) @@ -900,7 +900,7 @@ def test_ttree(tmp_path): def test_tree_diff_interface(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f.newtree("t") f = ROOT.TFile.Open(filename) @@ -910,7 +910,7 @@ def test_ttree_multiple(tmp_path): filename = join(str(tmp_path), "example.root") tree = newtree() - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: for i in range(100): f["t"*(i+1)] = tree @@ -918,25 +918,25 @@ def test_ttree_multiple(tmp_path): for i in range(100): assert f.GetKey("t"*(i+1)).GetClassName() == "TTree" -def test_ttree_uproot(tmp_path): +def test_ttree_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") tree = newtree() - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree - f = uproot.open(filename) + f = uproot3.open(filename) assert f["t"]._classname == b"TTree" -def test_ttree_multiple_uproot(tmp_path): +def test_ttree_multiple_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") tree = newtree() - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: for i in range(100): f["t"*(i+1)] = tree - f = uproot.open(filename) + f = uproot3.open(filename) for i in range(100): assert f["t"*(i+1)]._classname == b"TTree" @@ -946,7 +946,7 @@ def test_ttree_empty_tbranch(tmp_path): b = newbranch("int32") branchdict = {"intBranch": b} tree = newtree(branchdict) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f = ROOT.TFile.Open(filename) @@ -958,7 +958,7 @@ def test_ttree_empty_tbranch_multitree(tmp_path): b = newbranch("int32") branchdict = {"intBranch": b} tree = newtree(branchdict) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: for i in range(10): f["t" * (i + 1)] = tree @@ -966,29 +966,29 @@ def test_ttree_empty_tbranch_multitree(tmp_path): for i in range(10): assert f.Get("t" * (i + 1)).GetBranch("intBranch").GetName() == "intBranch" -def test_ttree_empty_tbranch_uproot(tmp_path): +def test_ttree_empty_tbranch_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") b = newbranch("int32") branchdict = {"intBranch": b} tree = newtree(branchdict) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree - f = uproot.open(filename) + f = uproot3.open(filename) assert f["t"]["intBranch"]._classname == b"TBranch" -def test_ttree_empty_tbranch_multitree_uproot(tmp_path): +def test_ttree_empty_tbranch_multitree_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") b = newbranch("int32") branchdict = {"intBranch": b} tree = newtree(branchdict) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: for i in range(10): f["t"*(i+1)] = tree - f = uproot.open(filename) + f = uproot3.open(filename) for i in range(10): assert f["t" * (i + 1)]["intBranch"]._classname == b"TBranch" @@ -998,23 +998,23 @@ def test_ttree_empty_tbranch_multiple(tmp_path): b = newbranch("int32") branchdict = {"intBranch": b, "testbranch": b} tree = newtree(branchdict) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f = ROOT.TFile.Open(filename) assert f.Get("t").GetBranch("intBranch").GetName() == "intBranch" assert f.Get("t").GetBranch("testbranch").GetName() == "testbranch" -def test_ttree_empty_tbranch_multiple_uproot(tmp_path): +def test_ttree_empty_tbranch_multiple_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") b = newbranch("int32") branchdict = {"intBranch": b, "testbranch": b} tree = newtree(branchdict) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree - f = uproot.open(filename) + f = uproot3.open(filename) assert f["t"]["intBranch"]._classname == b"TBranch" assert f["t"]["testbranch"]._classname == b"TBranch" @@ -1024,7 +1024,7 @@ def test_ttree_empty_tbranch_diff_type(tmp_path): b = newbranch("int64") branchdict = {"intBranch": b} tree = newtree(branchdict) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f = ROOT.TFile.Open(filename) @@ -1036,7 +1036,7 @@ def test_ttree_empty_tbranch_title(tmp_path): b = newbranch("int32", title="hi") branchdict = {"intBranch": b} tree = newtree(branchdict) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f = ROOT.TFile.Open(filename) @@ -1054,9 +1054,9 @@ def test_hist_rewrite_root(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["test"] = hist f = ROOT.TFile.Open(filename, "UPDATE") @@ -1073,7 +1073,7 @@ def test_empty_ttree_rewrite_root(tmp_path): b = newbranch("int32") branchdict = {"intBranch": b} tree = newtree(branchdict) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f = ROOT.TFile.Open(filename, "UPDATE") @@ -1087,7 +1087,7 @@ def test_empty_ttree_rewrite_root(tmp_path): def test_string_rewrite_root(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["a"*5] = "a"*5 f = ROOT.TFile.Open(filename, "UPDATE") @@ -1101,7 +1101,7 @@ def test_string_rewrite_root(tmp_path): def test_string_rewrite_root_compress(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, compression=uproot.ZLIB(4)) as f: + with uproot3.recreate(filename, compression=uproot3.ZLIB(4)) as f: f["a"*5] = "a"*5 f = ROOT.TFile.Open(filename, "UPDATE") @@ -1117,7 +1117,7 @@ def test_branch_alt_interface(tmp_path): branchdict = {"intBranch": "int"} tree = newtree(branchdict) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f = ROOT.TFile.Open(filename) @@ -1130,7 +1130,7 @@ def test_branch_basket_one(tmp_path): branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1140,18 +1140,18 @@ def test_branch_basket_one(tmp_path): for i in range(5): assert a[i] == treedata[i] -def test_branch_basket_one_uproot(tmp_path): +def test_branch_basket_one_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") b = newbranch("int32") branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5]).astype("int32").newbyteorder(">") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) - f = uproot.open(filename) + f = uproot3.open(filename) tree = f["t"] treedata = tree.array("intBranch") for i in range(5): @@ -1164,7 +1164,7 @@ def test_branch_basket_one_rewrite_root(tmp_path): branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5]).astype("int32").newbyteorder(">") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1185,7 +1185,7 @@ def test_branch_basket_one_more_data(tmp_path): a = [] for i in range(0, 100): a.append(i) - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1202,7 +1202,7 @@ def test_branch_basket_one_less_data(tmp_path): branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1219,7 +1219,7 @@ def test_branch_basket_one_tleafb(tmp_path): branchdict = {"int8Branch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype="int8") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["int8Branch"].newbasket(a) @@ -1236,7 +1236,7 @@ def test_branch_basket_one_tleaff(tmp_path): branchdict = {"floatBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype="float32") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["floatBranch"].newbasket(a) @@ -1253,7 +1253,7 @@ def test_branch_basket_one_tleafd(tmp_path): branchdict = {"float8Branch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">f8") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["float8Branch"].newbasket(a) @@ -1270,7 +1270,7 @@ def test_branch_basket_one_tleafl(tmp_path): branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i8") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1287,7 +1287,7 @@ def test_branch_basket_one_tleafO(tmp_path): branchdict = {"booleanBranch": b} tree = newtree(branchdict) a = numpy.array([1, 0, 0, 0, 1], dtype=">?") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["booleanBranch"].newbasket(a) @@ -1316,7 +1316,7 @@ def test_branch_basket_one_tleafs(tmp_path): branchdict = {"int2Branch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i2") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["int2Branch"].newbasket(a) @@ -1334,7 +1334,7 @@ def test_one_branch_multi_basket(tmp_path): tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i4") b = numpy.array([6, 7, 8, 9, 10], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) f["t"]["intBranch"].newbasket(b) @@ -1355,7 +1355,7 @@ def test_multi_branch_one_basket_same_type(tmp_path): tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i4") b = numpy.array([6, 7, 8, 9, 10], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) f["t"]["intBranch2"].newbasket(b) @@ -1368,7 +1368,7 @@ def test_multi_branch_one_basket_same_type(tmp_path): assert a[i] == intBranchdata[i] assert b[i] == int8Branchdata[i] -def test_multi_branch_one_basket_same_type_uproot(tmp_path): +def test_multi_branch_one_basket_same_type_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") b1 = newbranch("int32") @@ -1377,12 +1377,12 @@ def test_multi_branch_one_basket_same_type_uproot(tmp_path): tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i4") b = numpy.array([6, 7, 8, 9, 10], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) f["t"]["intBranch2"].newbasket(b) - f = uproot.open(filename) + f = uproot3.open(filename) tree = f["t"] intBranchdata = tree.array("intBranch") int8Branchdata = tree.array("intBranch2") @@ -1399,7 +1399,7 @@ def test_multi_branch_one_basket_diff_type(tmp_path): tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i4") b = numpy.array([6, 7, 8, 9, 10], dtype=">i8") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) f["t"]["int8Branch"].newbasket(b) @@ -1423,7 +1423,7 @@ def test_multi_branch_multi_basket_diff_type(tmp_path): b = numpy.array([6, 7, 8, 9, 10], dtype=">i4") c = numpy.array([6, 7, 8, 9, 10], dtype=">i8") d = numpy.array([1, 2, 3, 4, 5], dtype=">i8") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) f["t"]["intBranch"].newbasket(b) @@ -1440,7 +1440,7 @@ def test_multi_branch_multi_basket_diff_type(tmp_path): assert c[i] == int8Branchdata[i] assert d[i] == int8Branchdata[i+5] -def test_multi_tree_one_branch_multi_basket_uproot(tmp_path): +def test_multi_tree_one_branch_multi_basket_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") b = newbranch("int32") @@ -1450,7 +1450,7 @@ def test_multi_tree_one_branch_multi_basket_uproot(tmp_path): b = numpy.array([6, 7, 8, 9, 10], dtype=">i4") c = numpy.array([1, 2, 3, 4, 5, 6, 7, 8, 9], dtype=">i4") d = numpy.array([11, 12, 13, 14, 15, 16, 17, 18, 19], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["tree"] = tree f["tree"]["intBranch"].newbasket(c) f["tree"]["intBranch"].newbasket(d) @@ -1458,7 +1458,7 @@ def test_multi_tree_one_branch_multi_basket_uproot(tmp_path): f["t"]["intBranch"].newbasket(a) f["t"]["intBranch"].newbasket(b) - f = uproot.open(filename) + f = uproot3.open(filename) treedata1 = f["t"].array("intBranch") treedata2 = f["tree"].array("intBranch") for i in range(5): @@ -1478,7 +1478,7 @@ def test_multi_tree_one_branch_multi_basket(tmp_path): b = numpy.array([6, 7, 8, 9, 10], dtype=">i4") c = numpy.array([1, 2, 3, 4, 5, 6, 7, 8, 9], dtype=">i4") d = numpy.array([11, 12, 13, 14, 15, 16, 17, 18, 19], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["tree"] = tree f["tree"]["intBranch"].newbasket(c) f["tree"]["intBranch"].newbasket(d) @@ -1505,12 +1505,12 @@ def test_tree_compression_empty(tmp_path): b = newbranch(">i4") branchdict = {"intBranch": b} tree = newtree(branchdict) - with uproot.recreate(filename, compression=uproot.ZLIB(4)) as f: + with uproot3.recreate(filename, compression=uproot3.ZLIB(4)) as f: f["t"] = tree f = ROOT.TFile.Open(filename) assert f.Get("t").GetBranch("intBranch").GetName() == "intBranch" - assert f.GetCompressionAlgorithm() == uproot.const.kZLIB + assert f.GetCompressionAlgorithm() == uproot3.const.kZLIB assert f.GetCompressionLevel() == 4 # Not actually compressed @@ -1521,7 +1521,7 @@ def test_tree_compression(tmp_path): branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i4") - with uproot.recreate(filename, compression=uproot.ZLIB(4)) as f: + with uproot3.recreate(filename, compression=uproot3.ZLIB(4)) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1534,11 +1534,11 @@ def test_tree_compression(tmp_path): def test_tree_branch_compression_only(tmp_path): filename = join(str(tmp_path), "example.root") - b = newbranch(">i4", compression=uproot.ZLIB(4)) + b = newbranch(">i4", compression=uproot3.ZLIB(4)) branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1551,11 +1551,11 @@ def test_tree_branch_compression_only(tmp_path): def test_tree_branch_compression(tmp_path): filename = join(str(tmp_path), "example.root") - b = newbranch(">i4", compression=uproot.ZLIB(4)) + b = newbranch(">i4", compression=uproot3.ZLIB(4)) branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i4") - with uproot.recreate(filename) as f: + with uproot3.recreate(filename) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1575,7 +1575,7 @@ def test_branch_compression_interface1(tmp_path): branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], dtype=">i8") - with uproot.recreate(filename, compression=uproot.ZLIB(4)) as f: + with uproot3.recreate(filename, compression=uproot3.ZLIB(4)) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1595,7 +1595,7 @@ def test_branch_compression_interface1_diff_type(tmp_path): branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], dtype=">i4") - with uproot.recreate(filename, compression=uproot.ZLIB(4)) as f: + with uproot3.recreate(filename, compression=uproot3.ZLIB(4)) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1611,11 +1611,11 @@ def test_branch_compression_interface1_diff_type(tmp_path): def test_branch_compression_interface2(tmp_path): filename = join(str(tmp_path), "example.root") - b = newbranch(">i8", compression=uproot.ZLIB(4)) + b = newbranch(">i8", compression=uproot3.ZLIB(4)) branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], dtype=">i8") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1633,9 +1633,9 @@ def test_branch_compression_interface3(tmp_path): b = newbranch(">i8") branchdict = {"intBranch": b} - tree = newtree(branchdict, compression=uproot.ZLIB(4)) + tree = newtree(branchdict, compression=uproot3.ZLIB(4)) a = numpy.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], dtype=">i8") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) @@ -1648,19 +1648,19 @@ def test_branch_compression_interface3(tmp_path): assert branch.GetCompressionAlgorithm() == 1 assert branch.GetCompressionLevel() == 4 -def test_many_basket_uproot(tmp_path): +def test_many_basket_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") b = newbranch(">i4") branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree for i in range(101): f["t"]["intBranch"].newbasket(a) - f = uproot.open(filename) + f = uproot3.open(filename) tree = f["t"] treedata = tree.array("intBranch") for i in range(101): @@ -1673,7 +1673,7 @@ def test_many_basket(tmp_path): branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree for i in range(101): f["t"]["intBranch"].newbasket(a) @@ -1687,11 +1687,11 @@ def test_many_basket(tmp_path): def test_tree_move_compress(tmp_path): filename = join(str(tmp_path), "example.root") - b = newbranch(">i4", compression=uproot.ZLIB(4)) + b = newbranch(">i4", compression=uproot3.ZLIB(4)) branchdict = {"intBranch": b} tree = newtree(branchdict) a = numpy.array([1], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree for i in range(101): f["t"]["intBranch"].newbasket(a) @@ -1708,16 +1708,16 @@ def test_tree_move_compress(tmp_path): def test_tree_renames(tmp_path): filename = join(str(tmp_path), "example.root") - b = uproot.newbranch(">i4") + b = uproot3.newbranch(">i4") branchdict = {"intBranch": b} - tree = uproot.newtree(branchdict) + tree = uproot3.newtree(branchdict) a = numpy.array([1], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree for i in range(19): f["t"]["intBranch"].newbasket(a) - f = uproot.open(filename) + f = uproot3.open(filename) tree = f["t"] treedata = tree.array("intBranch") for i in range(19): @@ -1726,10 +1726,10 @@ def test_tree_renames(tmp_path): def test_ttree_extend(tmp_path): filename = join(str(tmp_path), "example.root") - b = uproot.newbranch(">i4") + b = uproot3.newbranch(">i4") branchdict = {"intBranch": b, "intBranch2": b} - tree = uproot.newtree(branchdict) - with uproot.recreate(filename) as f: + tree = uproot3.newtree(branchdict) + with uproot3.recreate(filename) as f: f["t"] = tree basket_add = {"intBranch": numpy.array([1, 2, 3, 4, 5]), "intBranch2": numpy.array([6, 7, 8, 9, 10])} f["t"].extend(basket_add) @@ -1748,11 +1748,11 @@ def test_issue340(tmp_path): filename = join(str(tmp_path), "example.root") a = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0] - with uproot.recreate(filename) as f: - f["t"] = uproot.newtree({"normal": numpy.float64}) + with uproot3.recreate(filename) as f: + f["t"] = uproot3.newtree({"normal": numpy.float64}) f["t"].extend({"normal": a}) - t = uproot.open(filename)["t"] + t = uproot3.open(filename)["t"] for i in range(10): assert t["normal"].basket(0)[i] == a[i] @@ -1764,7 +1764,7 @@ def test_rdf(tmp_path): tree = newtree(branchdict) a = numpy.array([1, 2, 3, 4, 5], dtype=">i4") b = numpy.array([11, 12, 13, 14, 15], dtype=">i4") - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f["t"] = tree f["t"]["intBranch"].newbasket(a) f["t"]["intBranch2"].newbasket(b) @@ -1779,9 +1779,9 @@ def test_rdf(tmp_path): def test_tree_cycle(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename) as f: - f["t;1"] = uproot.newtree({"branch": "int32"}) - f["t;2"] = uproot.newtree({"branch": "int32"}) + with uproot3.recreate(filename) as f: + f["t;1"] = uproot3.newtree({"branch": "int32"}) + f["t;2"] = uproot3.newtree({"branch": "int32"}) f["t;1"].extend({"branch": numpy.array([1, 2, 3, 4, 5])}) f["t"].extend({"branch": numpy.array([6, 7, 8, 9, 10])}) @@ -1799,7 +1799,7 @@ def test_tree_cycle(tmp_path): def test_large_compress(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, uproot.ZLIB(5)) as f: + with uproot3.recreate(filename, uproot3.ZLIB(5)) as f: f["a"] = "a" * ((2 ** 24) + 2000) f["b"] = "b" * ((2 ** 24) + 10) @@ -1808,14 +1808,14 @@ def test_large_compress(tmp_path): assert str(f.Get("b")) == "b" * ((2 ** 24) + 10) f.Close() -def test_large_compress_uproot(tmp_path): +def test_large_compress_uproot3(tmp_path): filename = join(str(tmp_path), "example.root") - with uproot.recreate(filename, uproot.ZLIB(5)) as f: + with uproot3.recreate(filename, uproot3.ZLIB(5)) as f: f["a"] = "a"*((2**24) + 2000) f["b"] = "b"*((2**24) + 10) - f = uproot.open(filename) + f = uproot3.open(filename) assert f["a"] == ("a"*((2**24) + 2000)).encode("utf-8") assert f["b"] == ("b"*((2**24) + 10)).encode("utf-8") @@ -1825,8 +1825,8 @@ def test_tree_twodim(tmp_path): a = numpy.array([[0, 1, 2, 3], [3, 4, 5, 6]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">i4"), shape=a.shape)}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">i4"), shape=a.shape)}) f["t"].extend({"branch": a}) f = ROOT.TFile.Open(filename) @@ -1846,8 +1846,8 @@ def test_tree_threedim(tmp_path): [13, 14, 15, 16], [190, 191, 191, 192]]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">i4"), shape=a.shape)}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">i4"), shape=a.shape)}) f["t"].extend({"branch": a}) f = ROOT.TFile.Open(filename) @@ -1862,12 +1862,12 @@ def test_tree_threedim(tmp_path): def test_jagged_i4(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">i4"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">i4"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) f = ROOT.TFile.Open(filename) @@ -1875,18 +1875,18 @@ def test_jagged_i4(tmp_path): for i, event in enumerate(tree): assert(numpy.all([x for x in event.branch] == a[i])) -def test_jagged_uproot_i4(tmp_path): +def test_jagged_uproot3_i4(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">i4"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">i4"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) - f = uproot.open(filename) + f = uproot3.open(filename) array = f["t"].array(["branch"]) for i in range(len(array)): for j in range(len(array[i])): @@ -1896,12 +1896,12 @@ def test_jagged_uproot_i4(tmp_path): def test_jagged_i8(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">i8"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">i8"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) ROOT.gInterpreter.Declare(""" @@ -1928,18 +1928,18 @@ def test_jagged_i8(tmp_path): ROOT.assertint(flag, filename) assert(flag) -def test_jagged_uproot_i8(tmp_path): +def test_jagged_uproot3_i8(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">i8"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">i8"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) - f = uproot.open(filename) + f = uproot3.open(filename) array = f["t"].array(["branch"]) for i in range(len(array)): for j in range(len(array[i])): @@ -1948,12 +1948,12 @@ def test_jagged_uproot_i8(tmp_path): def test_jagged_f8(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">f8"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">f8"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) f = ROOT.TFile.Open(filename) @@ -1961,18 +1961,18 @@ def test_jagged_f8(tmp_path): for i, event in enumerate(tree): assert(numpy.all([x for x in event.branch] == a[i])) -def test_jagged_uproot_f8(tmp_path): +def test_jagged_uproot3_f8(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">f8"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">f8"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) - f = uproot.open(filename) + f = uproot3.open(filename) array = f["t"].array(["branch"]) for i in range(len(array)): for j in range(len(array[i])): @@ -1981,12 +1981,12 @@ def test_jagged_uproot_f8(tmp_path): def test_jagged_f4(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">f4"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">f4"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) f = ROOT.TFile.Open(filename) @@ -1994,18 +1994,18 @@ def test_jagged_f4(tmp_path): for i, event in enumerate(tree): assert(numpy.all([x for x in event.branch] == a[i])) -def test_jagged_uproot_f4(tmp_path): +def test_jagged_uproot3_f4(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">f4"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">f4"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) - f = uproot.open(filename) + f = uproot3.open(filename) array = f["t"].array(["branch"]) for i in range(len(array)): for j in range(len(array[i])): @@ -2014,12 +2014,12 @@ def test_jagged_uproot_f4(tmp_path): def test_jagged_i2(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">i2"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">i2"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) f = ROOT.TFile.Open(filename) @@ -2027,18 +2027,18 @@ def test_jagged_i2(tmp_path): for i, event in enumerate(tree): assert(numpy.all([x for x in event.branch] == a[i])) -def test_jagged_uproot_i2(tmp_path): +def test_jagged_uproot3_i2(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">i2"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">i2"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) - f = uproot.open(filename) + f = uproot3.open(filename) array = f["t"].array(["branch"]) for i in range(len(array)): for j in range(len(array[i])): @@ -2047,15 +2047,15 @@ def test_jagged_uproot_i2(tmp_path): def test_jagged_i2_multiple_sametype(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2]]) + a = awkward0.fromiter([[0], + [1, 2]]) - b = awkward.fromiter([[3], - [7, 12]]) + b = awkward0.fromiter([[3], + [7, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch1": uproot.newbranch(numpy.dtype(">i2"), size="n"), - "branch2": uproot.newbranch(numpy.dtype(">i2"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch1": uproot3.newbranch(numpy.dtype(">i2"), size="n"), + "branch2": uproot3.newbranch(numpy.dtype(">i2"), size="n")}) f["t"].extend({"branch1": a, "branch2": b, "n": [1, 2]}) @@ -2069,15 +2069,15 @@ def test_jagged_i2_multiple_sametype(tmp_path): def test_jagged_multiple_difftype(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2]]) + a = awkward0.fromiter([[0], + [1, 2]]) - b = awkward.fromiter([[3], - [7, 12]]) + b = awkward0.fromiter([[3], + [7, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch1": uproot.newbranch(numpy.dtype(">i2"), size="n"), - "branch2": uproot.newbranch(numpy.dtype(">i4"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch1": uproot3.newbranch(numpy.dtype(">i2"), size="n"), + "branch2": uproot3.newbranch(numpy.dtype(">i4"), size="n")}) f["t"].extend({"branch1": a, "branch2": b, "n": [1, 2]}) @@ -2091,15 +2091,15 @@ def test_jagged_multiple_difftype(tmp_path): def test_jagged_i2_multiple_difflen(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2]]) + a = awkward0.fromiter([[0], + [1, 2]]) - b = awkward.fromiter([[3], - [10, 11, 12]]) + b = awkward0.fromiter([[3], + [10, 11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch1": uproot.newbranch(numpy.dtype(">i2"), size="n1"), - "branch2": uproot.newbranch(numpy.dtype(">i2"), size="n2")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch1": uproot3.newbranch(numpy.dtype(">i2"), size="n1"), + "branch2": uproot3.newbranch(numpy.dtype(">i2"), size="n2")}) f["t"].extend({"branch1": a, "n1": [1, 2], "branch2": b, @@ -2114,19 +2114,19 @@ def test_jagged_i2_multiple_difflen(tmp_path): def test_jagged_i4_manybasket(tmp_path): filename = join(str(tmp_path), "example.root") - a = awkward.fromiter([[0], - [1, 2], - [10, 11, 12]]) - b = awkward.fromiter([[10], - [11, 12]]) - tester = awkward.fromiter([[0], - [1, 2], - [10, 11, 12], - [10], - [11, 12]]) + a = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12]]) + b = awkward0.fromiter([[10], + [11, 12]]) + tester = awkward0.fromiter([[0], + [1, 2], + [10, 11, 12], + [10], + [11, 12]]) - with uproot.recreate(filename, compression=None) as f: - f["t"] = uproot.newtree({"branch": uproot.newbranch(numpy.dtype(">i4"), size="n")}) + with uproot3.recreate(filename, compression=None) as f: + f["t"] = uproot3.newtree({"branch": uproot3.newbranch(numpy.dtype(">i4"), size="n")}) f["t"].extend({"branch": a, "n": [1, 2, 3]}) f["t"].extend({"branch": b, "n": [1, 2]}) @@ -2145,9 +2145,9 @@ def test_update(tmp_path): h.Write() f.Close() - t = uproot.open(testfile) + t = uproot3.open(testfile) hist = t["hvar"] - with uproot.recreate(filename, compression=None) as f: + with uproot3.recreate(filename, compression=None) as f: f.update(("test%d" % i, hist) for i in range(n)) f = ROOT.TFile.Open(filename) diff --git a/uproot/pandas.py b/uproot/pandas.py deleted file mode 100644 index 27269d4b..00000000 --- a/uproot/pandas.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python - -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE - -"""Top-level functions for Pandas.""" -from __future__ import absolute_import - -import uproot.tree -from uproot.source.memmap import MemmapSource -from uproot.source.xrootd import XRootDSource -from uproot.source.http import HTTPSource - -def iterate(path, treepath, branches=None, entrysteps=None, namedecode="utf-8", reportpath=False, reportfile=False, flatten=True, flatname=None, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, blocking=True, localsource=MemmapSource.defaults, xrootdsource=XRootDSource.defaults, httpsource=HTTPSource.defaults, **options): - import pandas - return uproot.tree.iterate(path, treepath, branches=branches, entrysteps=entrysteps, outputtype=pandas.DataFrame, namedecode=namedecode, reportpath=reportpath, reportfile=reportfile, reportentries=False, flatten=flatten, flatname=flatname, awkwardlib=awkwardlib, cache=cache, basketcache=basketcache, keycache=keycache, executor=executor, blocking=blocking, localsource=localsource, xrootdsource=xrootdsource, httpsource=httpsource, **options) diff --git a/uproot/write/objects/__init__.py b/uproot/write/objects/__init__.py deleted file mode 100644 index 9f2be71d..00000000 --- a/uproot/write/objects/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python - -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE - -from __future__ import absolute_import \ No newline at end of file diff --git a/uproot/write/sink/__init__.py b/uproot/write/sink/__init__.py deleted file mode 100644 index 9f2be71d..00000000 --- a/uproot/write/sink/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python - -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE - -from __future__ import absolute_import \ No newline at end of file diff --git a/uproot/__init__.py b/uproot3/__init__.py similarity index 60% rename from uproot/__init__.py rename to uproot3/__init__.py index 5b8cbf3e..aa4fca13 100644 --- a/uproot/__init__.py +++ b/uproot3/__init__.py @@ -1,18 +1,18 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE -"""uproot -- ROOT I/O in pure Python and Numpy. +"""Uproot -- ROOT I/O in pure Python and Numpy. Basic cheat-sheet ----------------- -Open ROOT files with uproot.open (for reading) or uproot.create (for read-write). +Open ROOT files with uproot3.open (for reading) or uproot3.create (for read-write). - file = uproot.open("/path/to/my/file.root") - file = uproot.open("root://path/to/my/file.root") - file = uproot.open("http://path/to/my/file.root") - writeable = uproot.create("/new/local/file.root") + file = uproot3.open("/path/to/my/file.root") + file = uproot3.open("root://path/to/my/file.root") + file = uproot3.open("http://path/to/my/file.root") + writeable = uproot3.create("/new/local/file.root") These file objects act like dicts; get objects like TTrees from them with square brackets. @@ -38,9 +38,9 @@ tree.arrays(["Muon_*"]) Variable numbers of objects per entry (particles per event) are handled by -awkward-array: +Awkward Array: - https://github.com/scikit-hep/awkward-array + https://github.com/scikit-hep/awkward-0.x The arrays(...) call returns a dict from branch name (bytes) to data (Numpy array) by default. @@ -59,16 +59,16 @@ for x, y, z in tree.iterate(["x", "y", "z"], outputtype=tuple): do_something(x, y, z) -To iterate over many files (like TChain), do uproot.iterate(...). +To iterate over many files (like TChain), do uproot3.iterate(...). - for arrays in uproot.iterate("files*.root", "path/to/events", ["Muon_*"]): + for arrays in uproot3.iterate("files*.root", "path/to/events", ["Muon_*"]): do_something(arrays) Intermediate cheat-sheet ------------------------ Each call to array/arrays/iterate reads the file again. For faster access after -the first time, pass a dict-like object to the cache parameter and uproot will +the first time, pass a dict-like object to the cache parameter and Uproot will try the cache first. cache = {} @@ -86,13 +86,13 @@ arrays = tree.arrays(["Muon_*"], executor=executor) To get the number of entries per file in a a collection of files, use -uproot.numentries(). +uproot3.numentries(). - uproot.numentries("tests/samples/sample*.root", "sample", total=False) + uproot3.numentries("tests/samples/sample*.root", "sample", total=False) -For arrays that read on demand, use uproot.lazyarray and uproot.lazyarrays. -For processing with Dask, use uproot.daskarray, uproot.daskarrays, or -uproot.daskframe. +For arrays that read on demand, use uproot3.lazyarray and uproot3.lazyarrays. +For processing with Dask, use uproot3.daskarray, uproot3.daskarrays, or +uproot3.daskframe. Advanced cheat-sheet -------------------- @@ -111,77 +111,77 @@ mybranch.array(mybranch.interpretation.toarray(fill_me_instead)) fill_me_instead # filled in place - mybranch.array(uproot.asdebug) # view raw bytes of each entry + mybranch.array(uproot3.asdebug) # view raw bytes of each entry By default, local files are read as memory-mapped arrays. Change this by setting - from uproot import FileSource + from uproot3 import FileSource open("...", localsource=lambda path: FileSource(path, **FileSource.defaults)) -The same procedure sets options for uproot.XRootDSource and uproot.HTTPSource. +The same procedure sets options for uproot3.XRootDSource and uproot3.HTTPSource. """ from __future__ import absolute_import import warnings warnings.warn( - """Consider switching from 'uproot' to 'uproot4', since the new interface will become the default later this year (2020). + """Consider switching from 'uproot3' to 'uproot', since the new interface became the default in 2020. - pip install -U uproot4 + pip install -U uproot In Python: - >>> import uproot4 - >>> with uproot4.open(...) as file: + >>> import uproot + >>> with uproot.open(...) as file: ... """, DeprecationWarning ) # high-level entry points -from uproot.rootio import open, xrootd, http -from uproot.tree import iterate, numentries, lazyarray, lazyarrays, daskarray, daskframe -from uproot.write.TFile import TFileCreate as create -from uproot.write.TFile import TFileRecreate as recreate -from uproot.write.TFile import TFileUpdate as update -from uproot.write.compress import ZLIB, LZMA, LZ4 -from uproot.write.objects.TTree import newtree, newbranch - -from uproot.source.memmap import MemmapSource -from uproot.source.file import FileSource -from uproot.source.xrootd import XRootDSource -from uproot.source.http import HTTPSource - -from uproot.cache import ArrayCache, ThreadSafeArrayCache - -from uproot.interp.auto import interpret -from uproot.interp.numerical import asdtype -from uproot.interp.numerical import asarray -from uproot.interp.numerical import asdouble32 -from uproot.interp.numerical import asstlbitset -from uproot.interp.jagged import asjagged -from uproot.interp.objects import astable -from uproot.interp.objects import asobj -from uproot.interp.objects import asgenobj -from uproot.interp.objects import asstring -from uproot.interp.objects import SimpleArray -from uproot.interp.objects import STLVector -from uproot.interp.objects import STLMap -from uproot.interp.objects import STLString -from uproot.interp.objects import Pointer +from uproot3.rootio import open, xrootd, http +from uproot3.tree import iterate, numentries, lazyarray, lazyarrays, daskarray, daskframe +from uproot3.write.TFile import TFileCreate as create +from uproot3.write.TFile import TFileRecreate as recreate +from uproot3.write.TFile import TFileUpdate as update +from uproot3.write.compress import ZLIB, LZMA, LZ4 +from uproot3.write.objects.TTree import newtree, newbranch + +from uproot3.source.memmap import MemmapSource +from uproot3.source.file import FileSource +from uproot3.source.xrootd import XRootDSource +from uproot3.source.http import HTTPSource + +from uproot3.cache import ArrayCache, ThreadSafeArrayCache + +from uproot3.interp.auto import interpret +from uproot3.interp.numerical import asdtype +from uproot3.interp.numerical import asarray +from uproot3.interp.numerical import asdouble32 +from uproot3.interp.numerical import asstlbitset +from uproot3.interp.jagged import asjagged +from uproot3.interp.objects import astable +from uproot3.interp.objects import asobj +from uproot3.interp.objects import asgenobj +from uproot3.interp.objects import asstring +from uproot3.interp.objects import SimpleArray +from uproot3.interp.objects import STLVector +from uproot3.interp.objects import STLMap +from uproot3.interp.objects import STLString +from uproot3.interp.objects import Pointer asdebug = asjagged(asdtype("u1")) -from uproot import pandas +from uproot3 import pandas # put help strings on everything (they're long, too disruptive to intersperse # in the code, and are built programmatically to avoid duplication; Python's # inline docstring method doesn't accept non-literals) -import uproot._help +import uproot3._help # convenient access to the version number -from uproot.version import __version__ +from uproot3.version import __version__ -# don't expose uproot.uproot; it's ugly -del uproot +# don't expose uproot3.uproot3; it's ugly +del uproot3 __all__ = ["open", "xrootd", "http", "iterate", "numentries", "lazyarray", "lazyarrays", "daskarray", "daskframe", "create", "recreate", "update", "ZLIB", "LZMA", "LZ4", "ZSTD", "newtree", "newbranch", "MemmapSource", "FileSource", "XRootDSource", "HTTPSource", "ArrayCache", "ThreadSafeArrayCache", "interpret", "asdtype", "asarray", "asdouble32", "asstlbitset", "asjagged", "astable", "asobj", "asgenobj", "asstring", "asdebug", "SimpleArray", "STLVector", "STLMap", "STLString", "Pointer", "pandas", "__version__"] diff --git a/uproot/write/__init__.py b/uproot3/_connect/__init__.py similarity index 59% rename from uproot/write/__init__.py rename to uproot3/_connect/__init__.py index 9f2be71d..d0cdd065 100644 --- a/uproot/write/__init__.py +++ b/uproot3/_connect/__init__.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE -from __future__ import absolute_import \ No newline at end of file +from __future__ import absolute_import diff --git a/uproot/_connect/_pandas.py b/uproot3/_connect/_pandas.py similarity index 87% rename from uproot/_connect/_pandas.py rename to uproot3/_connect/_pandas.py index c742c85f..e371591b 100644 --- a/uproot/_connect/_pandas.py +++ b/uproot3/_connect/_pandas.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -10,18 +10,18 @@ import numpy -import awkward as awkwardbase +import awkward0 as awkwardbase -import uproot.tree -import uproot.interp.numerical -from uproot.interp.jagged import asjagged -from uproot.interp.numerical import asdtype -from uproot.interp.objects import asobj -from uproot.interp.objects import astable +import uproot3.tree +import uproot3.interp.numerical +from uproot3.interp.jagged import asjagged +from uproot3.interp.numerical import asdtype +from uproot3.interp.objects import asobj +from uproot3.interp.objects import astable -from uproot.source.memmap import MemmapSource -from uproot.source.xrootd import XRootDSource -from uproot.source.http import HTTPSource +from uproot3.source.memmap import MemmapSource +from uproot3.source.xrootd import XRootDSource +from uproot3.source.http import HTTPSource class TTreeMethods_pandas(object): def __init__(self, tree): @@ -45,7 +45,7 @@ def default_flatname(branchname, fieldname, index): out += "[" + "][".join(str(x) for x in index) + "]" return out -def futures2df(futures, outputtype, entrystart, entrystop, flatten, flatname, awkward): +def futures2df(futures, outputtype, entrystart, entrystop, flatten, flatname, awkward0): import pandas if flatname is None: @@ -123,7 +123,7 @@ def futures2df(futures, outputtype, entrystart, entrystop, flatten, flatname, aw stops = array.stops index = array.localindex else: - if starts is not array.starts and not awkward.numpy.array_equal(starts, array.starts): + if starts is not array.starts and not awkward0.numpy.array_equal(starts, array.starts): raise ValueError("cannot use flatten=True on branches with different jagged structure, such as electrons and muons (different, variable number of each per event); either explicitly select compatible branches, such as [\"MET_*\", \"Muon_*\"] (scalar and variable per event is okay), or set flatten=False") if len(array.starts) == 0: @@ -144,7 +144,7 @@ def futures2df(futures, outputtype, entrystart, entrystop, flatten, flatname, aw df = outputtype(index=index) for name, interpretation, array, needbroadcast in zip(names, interpretations, arrays, needbroadcasts): - if isinstance(interpretation, uproot.interp.numerical._asnumeric): + if isinstance(interpretation, uproot3.interp.numerical._asnumeric): if isinstance(array, awkwardbase.ObjectArray): array = array.content @@ -155,13 +155,13 @@ def futures2df(futures, outputtype, entrystart, entrystop, flatten, flatname, aw if isinstance(array, awkwardbase.Table): for nn in array.columns: - array[nn] = awkward.JaggedArray(starts, stops, awkward.numpy.empty(stops[-1], dtype=array[nn].dtype)).tojagged(array[nn]).content + array[nn] = awkward0.JaggedArray(starts, stops, awkward0.numpy.empty(stops[-1], dtype=array[nn].dtype)).tojagged(array[nn]).content else: if len(originaldims) != 0: - array = array.view(awkward.numpy.dtype([(str(i), array.dtype) for i in range(functools.reduce(operator.mul, array.shape[1:]))])).reshape(array.shape[0]) + array = array.view(awkward0.numpy.dtype([(str(i), array.dtype) for i in range(functools.reduce(operator.mul, array.shape[1:]))])).reshape(array.shape[0]) - array = awkward.JaggedArray(starts, stops, awkward.numpy.empty(stops[-1], dtype=array.dtype)).tojagged(array).content + array = awkward0.JaggedArray(starts, stops, awkward0.numpy.empty(stops[-1], dtype=array.dtype)).tojagged(array).content if len(originaldims) != 0: array = array.view(originaldtype).reshape((-1,) + originaldims) @@ -188,14 +188,14 @@ def futures2df(futures, outputtype, entrystart, entrystop, flatten, flatname, aw else: fn = flatname(name, None, ()) - array = awkward.numpy.array(array, dtype=object) + array = awkward0.numpy.array(array, dtype=object) indexes = numpy.arange(len(array)) - indexes = awkward.JaggedArray(starts, stops, awkward.numpy.empty(stops[-1], dtype=object)).tojagged(indexes).content + indexes = awkward0.JaggedArray(starts, stops, awkward0.numpy.empty(stops[-1], dtype=object)).tojagged(indexes).content array = array[indexes] - if len(array) != 0 and isinstance(array[0], awkward.numpy.ndarray): + if len(array) != 0 and isinstance(array[0], awkward0.numpy.ndarray): df[fn] = list(array) else: df[fn] = array diff --git a/uproot/_help.py b/uproot3/_help.py similarity index 64% rename from uproot/_help.py rename to uproot3/_help.py index 24f59a98..4a71d4e9 100644 --- a/uproot/_help.py +++ b/uproot3/_help.py @@ -1,14 +1,14 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import import functools import textwrap -import uproot -import uproot._connect._pandas +import uproot3 +import uproot3._connect._pandas TEXT_WIDTH = 80 @@ -36,24 +36,24 @@ def wrap(text, width=80): map(functools.partial(textwrap.wrap, width=width), text.split('\n')))) -################################################################ uproot.rootio fragments +################################################################ uproot3.rootio fragments open_fragments = { # localsource - "localsource": u"""localsource : function: path \u21d2 :py:class:`Source or ``dict`` of keyword arguments` - function that will be applied to the path to produce an uproot :py:class:`Source ` object if the path is a local file. Default is ``MemmapSource.defaults`` for memory-mapped files. If a ``dict``, the ``dict`` is passed as keyword arguments to :py:class:`MemmapSource ` constructor.""", + "localsource": u"""localsource : function: path \u21d2 :py:class:`Source or ``dict`` of keyword arguments` + function that will be applied to the path to produce an uproot3 :py:class:`Source ` object if the path is a local file. Default is ``MemmapSource.defaults`` for memory-mapped files. If a ``dict``, the ``dict`` is passed as keyword arguments to :py:class:`MemmapSource ` constructor.""", # xrootdsource - "xrootdsource": u"""xrootdsource : function: path \u21d2 :py:class:`Source or ``dict`` of keyword arguments` - function that will be applied to the path to produce an uproot :py:class:`Source ` object if the path is an XRootD URL. Default is ``uproot.source.xrootd.XRootDSource.defaults`` for XRootD with default chunk size/caching. (See :py:class:`XRootDSource ` constructor for details.) If a ``dict``, the ``dict`` is passed as keyword arguments to :py:class:`XRootDSource ` constructor.""", + "xrootdsource": u"""xrootdsource : function: path \u21d2 :py:class:`Source or ``dict`` of keyword arguments` + function that will be applied to the path to produce an uproot3 :py:class:`Source ` object if the path is an XRootD URL. Default is ``uproot3.source.xrootd.XRootDSource.defaults`` for XRootD with default chunk size/caching. (See :py:class:`XRootDSource ` constructor for details.) If a ``dict``, the ``dict`` is passed as keyword arguments to :py:class:`XRootDSource ` constructor.""", # httpsource - "httpsource": u"""httpsource : function: path \u21d2 :py:class:`Source or ``dict`` of keyword arguments` - function that will be applied to the path to produce an uproot :py:class:`Source ` object if the path is an HTTP URL. Default is ``uproot.source.http.HTTPSource.defaults`` for HTTP with default chunk size/caching. (See :py:class:`HTTPSource ` constructor for details.) If a ``dict``, the ``dict`` is passed as keyword arguments to :py:class:`HTTPSource ` constructor.""", + "httpsource": u"""httpsource : function: path \u21d2 :py:class:`Source or ``dict`` of keyword arguments` + function that will be applied to the path to produce an uproot3 :py:class:`Source ` object if the path is an HTTP URL. Default is ``uproot3.source.http.HTTPSource.defaults`` for HTTP with default chunk size/caching. (See :py:class:`HTTPSource ` constructor for details.) If a ``dict``, the ``dict`` is passed as keyword arguments to :py:class:`HTTPSource ` constructor.""", # options "options": u"""options - passed to :py:class:`ROOTDirectory ` constructor.""", + passed to :py:class:`ROOTDirectory ` constructor.""", } rootdirectory_fragments = { @@ -70,15 +70,15 @@ def wrap(text, width=80): only keys for which ``filterclass(class object)`` returns ``True`` are returned (does not eliminate subdirectories if ``recursive=True``). Default returns ``True`` for all input. Note that all class objects passed to this function have a ``classname`` attribute for the C++ class name (may differ from the Python class name for syntactic reasons).""", } -################################################################ uproot.rootio.open +################################################################ uproot3.rootio.open -uproot.rootio.open.__doc__ = wrap( +uproot3.rootio.open.__doc__ = wrap( u"""Opens a ROOT file (local or remote), specified by file path. Parameters ---------- path : str - local file path or URL specifying the location of a file (note: not a Python file object!). If the URL schema is "root://", :py:func:`xrootd ` will be called; if "http://", :py:func:`http ` will be called. + local file path or URL specifying the location of a file (note: not a Python file object!). If the URL schema is "root://", :py:func:`xrootd ` will be called; if "http://", :py:func:`http ` will be called. {localsource} @@ -90,17 +90,17 @@ def wrap(text, width=80): Returns ------- - :py:class:`ROOTDirectory ` + :py:class:`ROOTDirectory ` top-level directory of the ROOT file. Notes ----- - The ROOTDirectory returned by this function is not necessarily an open file. File handles are managed internally by :py:class:`Source ` objects to permit parallel reading. Although this function can be used in a ``with`` construct (which protects against unclosed files), the ``with`` construct has no meaning when applied to this function. Files will be opened or closed as needed to read data on demand. + The ROOTDirectory returned by this function is not necessarily an open file. File handles are managed internally by :py:class:`Source ` objects to permit parallel reading. Although this function can be used in a ``with`` construct (which protects against unclosed files), the ``with`` construct has no meaning when applied to this function. Files will be opened or closed as needed to read data on demand. """.format(**open_fragments), width=TEXT_WIDTH) -################################################################ uproot.rootio.xrootd +################################################################ uproot3.rootio.xrootd -uproot.rootio.xrootd.__doc__ = wrap( +uproot3.rootio.xrootd.__doc__ = wrap( u"""Opens a remote ROOT file with XRootD (if installed). Parameters @@ -114,13 +114,13 @@ def wrap(text, width=80): Returns ------- - :py:class:`ROOTDirectory ` + :py:class:`ROOTDirectory ` top-level directory of the ROOT file. """.format(**open_fragments), width=TEXT_WIDTH) -################################################################ uproot.rootio.http +################################################################ uproot3.rootio.http -uproot.rootio.http.__doc__ = wrap( +uproot3.rootio.http.__doc__ = wrap( u"""Opens a remote ROOT file with HTTP (if ``requests`` is installed). Parameters @@ -134,79 +134,79 @@ def wrap(text, width=80): Returns ------- - :py:class:`ROOTDirectory ` + :py:class:`ROOTDirectory ` top-level directory of the ROOT file. """.format(**open_fragments), width=TEXT_WIDTH) -################################################################ uproot.rootio.ROOTDirectory +################################################################ uproot3.rootio.ROOTDirectory -uproot.rootio.ROOTDirectory.__doc__ = wrap( +uproot3.rootio.ROOTDirectory.__doc__ = wrap( u"""Represents a ROOT file or directory, an entry point for reading objects. - Although this class has a constructor that could be called by a user, objects are usually created from ROOT files through :py:func:`open ` or :py:func:`xrootd `. + Although this class has a constructor that could be called by a user, objects are usually created from ROOT files through :py:func:`open ` or :py:func:`xrootd `. - :py:class:`ROOTDirectory ` objects may be accessed as Python containers: + :py:class:`ROOTDirectory ` objects may be accessed as Python containers: - - square brackets (``__getitem__``) read objects from the file by key name (see :py:meth:`get `). + - square brackets (``__getitem__``) read objects from the file by key name (see :py:meth:`get `). - the ``len`` function (``__len__``) returns the number of keys. - - iteration (``__iter__``) iterates over the *names* of the keys only (like a ``dict``, see :py:meth:`keys `). + - iteration (``__iter__``) iterates over the *names* of the keys only (like a ``dict``, see :py:meth:`keys `). **Attributes, properties, and methods:** - **name** (*bytes*) name of the file or directory *as read from the ROOT file*. (ROOT files may be imprinted with a different name than they have in the file system.) - - **compression** (:py:class:`Compression `) the compression algorithm and level specified in the file header. (Some objects, including TTree branches, may have different compression settings than the global file settings.) + - **compression** (:py:class:`Compression `) the compression algorithm and level specified in the file header. (Some objects, including TTree branches, may have different compression settings than the global file settings.) - - :py:meth:`get ` read an object from the file, selected by name. + - :py:meth:`get ` read an object from the file, selected by name. - - :py:meth:`iterkeys ` iterate over key names in this directory. + - :py:meth:`iterkeys ` iterate over key names in this directory. - - :py:meth:`itervalues ` iterate over objects in this directory. + - :py:meth:`itervalues ` iterate over objects in this directory. - - :py:meth:`iteritems ` iterate over *(key name, object)* pairs in this directory, like a ``dict``. + - :py:meth:`iteritems ` iterate over *(key name, object)* pairs in this directory, like a ``dict``. - - :py:meth:`iterclasses ` iterate over *(key name, class object)* pairs in this directory. + - :py:meth:`iterclasses ` iterate over *(key name, class object)* pairs in this directory. - - :py:meth:`keys ` return key names in this directory. + - :py:meth:`keys ` return key names in this directory. - - :py:meth:`values ` return objects in this directory. + - :py:meth:`values ` return objects in this directory. - - :py:meth:`items ` return *(key name, object)* pairs in this directory, like a ``dict``. + - :py:meth:`items ` return *(key name, object)* pairs in this directory, like a ``dict``. - - :py:meth:`classes ` return *(key name, class object)* pairs in this directory. + - :py:meth:`classes ` return *(key name, class object)* pairs in this directory. - - :py:meth:`allkeys ` return keys at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). + - :py:meth:`allkeys ` return keys at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). - - :py:meth:`allvalues ` return objects at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). + - :py:meth:`allvalues ` return objects at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). - - :py:meth:`allitems ` return *(key name, object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). + - :py:meth:`allitems ` return *(key name, object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). - - :py:meth:`allclasses ` return *(key name, class object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`classes `). + - :py:meth:`allclasses ` return *(key name, class object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`classes `). """, width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.get).__doc__ = wrap( +_method(uproot3.rootio.ROOTDirectory.get).__doc__ = wrap( u"""Read an object from the ROOT file or directory by name. Parameters ---------- name : str (str) - name of the object. Any text before a "``/``" is interpreted as a subdirectory, and subdirectories of any depth may be searched. A number after a "``;``" indicates a `TKey ` cycle. + name of the object. Any text before a "``/``" is interpreted as a subdirectory, and subdirectories of any depth may be searched. A number after a "``;``" indicates a `TKey ` cycle. cycle : ``None`` or int - `TKey ` cycle number to disambiguate keys of the same name. This argument overrides a number after a "``;``". + `TKey ` cycle number to disambiguate keys of the same name. This argument overrides a number after a "``;``". Returns ------- - :py:class:`ROOTStreamedObject ` + :py:class:`ROOTStreamedObject ` a freshly read object from the ROOT file. Notes ----- - This method, without the ``cycle`` argument, can be accessed more directly through square brackets (``__getitem__``) on the :py:class:`ROOTDirectory ` object. + This method, without the ``cycle`` argument, can be accessed more directly through square brackets (``__getitem__``) on the :py:class:`ROOTDirectory ` object. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.iterkeys).__doc__ = wrap( +_method(uproot3.rootio.ROOTDirectory.iterkeys).__doc__ = wrap( u"""Iterate over key names in this directory. This method does not read objects. @@ -227,10 +227,10 @@ def wrap(text, width=80): Notes ----- - This method can be accessed more directly by simply iterating over a :py:class:`ROOTDirectory ` object. + This method can be accessed more directly by simply iterating over a :py:class:`ROOTDirectory ` object. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.itervalues).__doc__ = wrap( +_method(uproot3.rootio.ROOTDirectory.itervalues).__doc__ = wrap( u"""Iterate over objects in this directory. Parameters @@ -243,11 +243,11 @@ def wrap(text, width=80): Returns ------- - iterator over :py:class:`ROOTStreamedObject ` + iterator over :py:class:`ROOTStreamedObject ` freshly read objects from the ROOT file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.iteritems).__doc__ = wrap( +_method(uproot3.rootio.ROOTDirectory.iteritems).__doc__ = wrap( u"""Iterate over *(key name, object)* pairs in this directory, like a ``dict``. Parameters @@ -260,11 +260,11 @@ def wrap(text, width=80): Returns ------- - iterator over (bytes, :py:class:`ROOTStreamedObject `) + iterator over (bytes, :py:class:`ROOTStreamedObject `) name-object pairs from the file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.iterclasses).__doc__ = wrap( +_method(uproot3.rootio.ROOTDirectory.iterclasses).__doc__ = wrap( u"""Iterate over *(key name, class object)* pairs in this directory. This method does not read objects. @@ -283,7 +283,7 @@ def wrap(text, width=80): name-class object pairs from the file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.keys).__doc__ = wrap( +_method(uproot3.rootio.ROOTDirectory.keys).__doc__ = wrap( u"""Return key names in this directory. This method does not read objects. @@ -302,7 +302,7 @@ def wrap(text, width=80): names of objects and subdirectories in the file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.values).__doc__ = wrap( +_method(uproot3.rootio.ROOTDirectory.values).__doc__ = wrap( u"""Return objects in this directory. Parameters @@ -315,11 +315,11 @@ def wrap(text, width=80): Returns ------- - list of :py:class:`ROOTStreamedObject ` + list of :py:class:`ROOTStreamedObject ` freshly read objects from the ROOT file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.items).__doc__ = wrap( +_method(uproot3.rootio.ROOTDirectory.items).__doc__ = wrap( u"""Return *(key name, object)* pairs in this directory, like a ``dict``. Parameters @@ -332,11 +332,11 @@ def wrap(text, width=80): Returns ------- - list of (bytes, :py:class:`ROOTStreamedObject `) + list of (bytes, :py:class:`ROOTStreamedObject `) name-object pairs from the file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.classes).__doc__ = wrap( +_method(uproot3.rootio.ROOTDirectory.classes).__doc__ = wrap( u"""Return *(key name, class object)* pairs in this directory. This method does not read objects. @@ -355,8 +355,8 @@ def wrap(text, width=80): name-class object pairs from the file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.allkeys).__doc__ = wrap( -u"""Return keys at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). +_method(uproot3.rootio.ROOTDirectory.allkeys).__doc__ = wrap( +u"""Return keys at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). This method does not read objects. @@ -372,8 +372,8 @@ def wrap(text, width=80): names of objects and subdirectories in the file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.allvalues).__doc__ = wrap( -u"""Return objects at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). +_method(uproot3.rootio.ROOTDirectory.allvalues).__doc__ = wrap( +u"""Return objects at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). Parameters ---------- @@ -383,12 +383,12 @@ def wrap(text, width=80): Returns ------- - list of :py:class:`ROOTStreamedObject ` + list of :py:class:`ROOTStreamedObject ` freshly read objects from the ROOT file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.allitems).__doc__ = wrap( -u"""Return *(key name, object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). +_method(uproot3.rootio.ROOTDirectory.allitems).__doc__ = wrap( +u"""Return *(key name, object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). Parameters ---------- @@ -398,12 +398,12 @@ def wrap(text, width=80): Returns ------- - list of (bytes, :py:class:`ROOTStreamedObject `) + list of (bytes, :py:class:`ROOTStreamedObject `) name-object pairs from the file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -_method(uproot.rootio.ROOTDirectory.allclasses).__doc__ = wrap( -u"""Return *(key name, class object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`classes `). +_method(uproot3.rootio.ROOTDirectory.allclasses).__doc__ = wrap( +u"""Return *(key name, class object)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`classes `). This method does not read objects. @@ -419,21 +419,21 @@ def wrap(text, width=80): name-class object pairs from the file. """.format(**rootdirectory_fragments), width=TEXT_WIDTH) -################################################################ uproot.rootio.ROOTObject and uproot.rootio.ROOTStreamedObject +################################################################ uproot3.rootio.ROOTObject and uproot3.rootio.ROOTStreamedObject -uproot.rootio.ROOTObject.__doc__ = wrap( -u"""Superclass of all objects read out of a ROOT file (except :py:class:`ROOTDirectory `). +uproot3.rootio.ROOTObject.__doc__ = wrap( +u"""Superclass of all objects read out of a ROOT file (except :py:class:`ROOTDirectory `). - If a :py:class:`ROOTObject ` is not a :py:class:`ROOTStreamedObject `, then its class definition is hard-coded, not derived from the file's *streamer info*. + If a :py:class:`ROOTObject ` is not a :py:class:`ROOTStreamedObject `, then its class definition is hard-coded, not derived from the file's *streamer info*. """, width=TEXT_WIDTH) -uproot.rootio.ROOTStreamedObject.__doc__ = wrap( +uproot3.rootio.ROOTStreamedObject.__doc__ = wrap( u"""Superclass of all objects read out of a ROOT file with an automatically generated class, derived from the file's *streamer info*. - Each subclass of a :py:class:`ROOTStreamedObject ` has a ``classversion`` attribute, corresponding to the class version in the *streamer info*. If this version does not match the version of the serialized class, an error is raised during the read. + Each subclass of a :py:class:`ROOTStreamedObject ` has a ``classversion`` attribute, corresponding to the class version in the *streamer info*. If this version does not match the version of the serialized class, an error is raised during the read. """, width=TEXT_WIDTH) -################################################################ uproot.tree fragments +################################################################ uproot3.tree fragments tree_fragments = { # entrystart @@ -457,17 +457,17 @@ def wrap(text, width=80): name of the branch to read.""", # interpretation - "interpretation": u"""interpretation : ``None`` or :py:class:`Interpretation ` - the meaning imposed upon the bytes of the file and the ultimate form to instantiate. If ``None`` *(default)*, :py:func:`interpret ` will be applied to the branch to generate an interpretation.""", + "interpretation": u"""interpretation : ``None`` or :py:class:`Interpretation ` + the meaning imposed upon the bytes of the file and the ultimate form to instantiate. If ``None`` *(default)*, :py:func:`interpret ` will be applied to the branch to generate an interpretation.""", # branches "branches": u"""branches - if ``None`` *(default)*, select all *interpretable* branches; - if a list of str, select branches by name; - if a single str, select a single branch (though the return value is still a container type, not a single array). The selection by string can include filename-like glob characters (``*``, ``?``, ``[...]``) or it can be a full regular expression (Python flavored) if surrounded by slashes, like ``/pattern/i`` (where ``i`` is an optional `Python re flag `_); - - if a function :py:class:`TBranchMethods ` \u21d2 ``True`` or ``False``, select branches that return ``True``; - - if a function :py:class:`TBranchMethods ` \u21d2 ``None`` or :py:class:`Interpretation `, select branches for which the function does not return ``None`` and use the interpretation it returns otherwise; - - if a ``dict`` of str \u2192 :py:class:`Interpretation `, select branches named by keys and use interpretations from the associated values.""", + - if a function :py:class:`TBranchMethods ` \u21d2 ``True`` or ``False``, select branches that return ``True``; + - if a function :py:class:`TBranchMethods ` \u21d2 ``None`` or :py:class:`Interpretation `, select branches for which the function does not return ``None`` and use the interpretation it returns otherwise; + - if a ``dict`` of str \u2192 :py:class:`Interpretation `, select branches named by keys and use interpretations from the associated values.""", # outputtype "outputtype": u"""outputtype : type @@ -495,15 +495,15 @@ def wrap(text, width=80): # flatname "flatname": u"""flatname : None or (branchname, fieldname, index) \u2192 str - if ``None`` *(default)*, use ``uproot._connect._pandas.default_flatname`` to convert a branchname with a subfield and regular index number into a Pandas column name; otherwise, take a user-defined function.""", + if ``None`` *(default)*, use ``uproot3._connect._pandas.default_flatname`` to convert a branchname with a subfield and regular index number into a Pandas column name; otherwise, take a user-defined function.""", # profile "profile": u"""profile : None or str - if a string *(not default)*, format the lazy arrays using a module from uproot_methods.profiles named by the string.""", + if a string *(not default)*, format the lazy arrays using a module from uproot3_methods.profiles named by the string.""", # awkwardlib "awkwardlib": u"""awkwardlib : ``None``, str, or module - if ``None`` *(default)*, use ``import awkward`` to get awkward-array constructors. Otherwise, parse the module string name or use the provided module.""", + if ``None`` *(default)*, use ``import awkward`` to get Awkward Array constructors. Otherwise, parse the module string name or use the provided module.""", # cache "cache": u"""cache : ``None`` or ``dict``-like object @@ -527,7 +527,7 @@ def wrap(text, width=80): # persistvirtual "persistvirtual": u"""persistvirtual : bool - if ``False`` *(default)*, the resulting awkward.VirtualArrays would convert themselves into real arrays (materialize) before being saved in awkward-array's persistence methods; if ``True``, the "virtualness" of the arrays is preserved\u2014that is, only instructions for reconstituting the arrays is saved, not the array data themselves.""", + if ``False`` *(default)*, the resulting awkward0.VirtualArrays would convert themselves into real arrays (materialize) before being saved in Awkward Array's persistence methods; if ``True``, the "virtualness" of the arrays is preserved\u2014that is, only instructions for reconstituting the arrays is saved, not the array data themselves.""", # recursive "recursive": u"""recursive : bool @@ -547,18 +547,18 @@ def wrap(text, width=80): # chunked "chunked": u"""chunked : bool - if ``True`` *(default)*, produced chunked lazy arrays using awkward.ChunkedArray. If ``False``, produce bare VirtualArrays. This option implies ``entrysteps = float('inf')``.""", + if ``True`` *(default)*, produced chunked lazy arrays using awkward0.ChunkedArray. If ``False``, produce bare VirtualArrays. This option implies ``entrysteps = float('inf')``.""", } -################################################################ uproot.tree.iterate +################################################################ uproot3.tree.iterate -uproot.tree.iterate.__doc__ = wrap( +uproot3.tree.iterate.__doc__ = wrap( u"""Opens a series of ROOT files (local or remote), yielding the same number of entries from all selected branches in each step. Depending on the "entrysteps" parameter, the number of entries in one step may differ from the number of entries in the next step, but in every step, the same number of entries is retrieved from all *baskets.* - All but the first two parameters are identical to :py:meth:`uproot.tree.TreeMethods.iterate`. + All but the first two parameters are identical to :py:meth:`uproot3.tree.TreeMethods.iterate`. Parameters ---------- @@ -608,13 +608,13 @@ def wrap(text, width=80): Returns ------- - iterator over (str, :py:class:`ROOTDirectory `, int, int, outputtype) (if *reportpath*, *reportfile*, *reportentries*) or just outputtype (otherwise) + iterator over (str, :py:class:`ROOTDirectory `, int, int, outputtype) (if *reportpath*, *reportfile*, *reportentries*) or just outputtype (otherwise) aligned array segments from the files. """.format(**dict(list(open_fragments.items()) + list(tree_fragments.items()))), width=TEXT_WIDTH) -################################################################ uproot.pandas.iterate +################################################################ uproot3.pandas.iterate -uproot.pandas.iterate.__doc__ = wrap( +uproot3.pandas.iterate.__doc__ = wrap( u"""Opens a series of ROOT files (local or remote), yielding Pandas DataFrames in each step. Depending on the "entrysteps" parameter, the number of entries in one step may differ from the number of entries in the next step, but in every step, the same number of entries is retrieved from all *baskets.* @@ -663,18 +663,18 @@ def wrap(text, width=80): Returns ------- - iterator over (str, :py:class:`ROOTDirectory `, pandas.Dataframe) (if *reportpath* and *reportfile*) or just pandas.DataFrame (otherwise) + iterator over (str, :py:class:`ROOTDirectory `, pandas.Dataframe) (if *reportpath* and *reportfile*) or just pandas.DataFrame (otherwise) aligned array segments from the files. """.format(**dict(list(open_fragments.items()) + list(tree_fragments.items()))), width=TEXT_WIDTH) -################################################################ uproot.tree.TTreeMethods +################################################################ uproot3.tree.TTreeMethods -uproot.tree.TTreeMethods.__doc__ = wrap( +uproot3.tree.TTreeMethods.__doc__ = wrap( u"""Adds array reading methods to TTree objects that have been streamed from a ROOT file. - - square brackets (``__getitem__``) returns a branch by name (see :py:meth:`get `). + - square brackets (``__getitem__``) returns a branch by name (see :py:meth:`get `). - the ``len`` function (``__len__``) returns the number of entries (same as ``numentries``). - - iteration (``__iter__``) has no implementation. This is to avoid confusion between iterating over all branches (probably not what you want, but fitting the pattern set by :py:class:`ROOTDirectory ` and ``dict``) and iterating over the data. + - iteration (``__iter__``) has no implementation. This is to avoid confusion between iterating over all branches (probably not what you want, but fitting the pattern set by :py:class:`ROOTDirectory ` and ``dict``) and iterating over the data. **Attributes, properties, and methods:** @@ -683,29 +683,29 @@ def wrap(text, width=80): - **numentries** (*int*) number of entries in the TTree (same as ``len``). - **pandas** connector to `Pandas `_ functions - - :py:meth:`get ` return a branch by name (at any level of depth). - - :py:meth:`iterkeys ` iterate over branch names. - - :py:meth:`itervalues ` iterate over branches. - - :py:meth:`iteritems ` iterate over *(branch name, branch)* pairs. - - :py:meth:`keys ` return branch names. - - :py:meth:`values ` return branches. - - :py:meth:`items ` return *(branch name, branch)* pairs. - - :py:meth:`allkeys ` return branch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). - - :py:meth:`allvalues ` return branches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). - - :py:meth:`allitems ` return *(branch name, branch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). - - :py:meth:`clusters ` iterate over *(int, int)* pairs representing cluster entry starts and stops in this TTree. - - :py:meth:`mempartitions ` iterate over *(int, int)* pairs representing entry starts and stops that attempt to maintain a constant memory footprint. + - :py:meth:`get ` return a branch by name (at any level of depth). + - :py:meth:`iterkeys ` iterate over branch names. + - :py:meth:`itervalues ` iterate over branches. + - :py:meth:`iteritems ` iterate over *(branch name, branch)* pairs. + - :py:meth:`keys ` return branch names. + - :py:meth:`values ` return branches. + - :py:meth:`items ` return *(branch name, branch)* pairs. + - :py:meth:`allkeys ` return branch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). + - :py:meth:`allvalues ` return branches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). + - :py:meth:`allitems ` return *(branch name, branch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). + - :py:meth:`clusters ` iterate over *(int, int)* pairs representing cluster entry starts and stops in this TTree. + - :py:meth:`mempartitions ` iterate over *(int, int)* pairs representing entry starts and stops that attempt to maintain a constant memory footprint. **Methods for reading array data:** - - :py:meth:`array ` read one branch into an array (or other object if provided an alternate *interpretation*). - - :py:meth:`arrays ` read many branches into arrays (or other objects if provided alternate *interpretations*). - - :py:meth:`lazyarray ` create a lazy array that would read the branch as needed. - - :py:meth:`lazyarrays ` create many lazy arrays. - - :py:meth:`iterate ` iterate over many arrays at once, yielding the same number of entries from all selected branches in each step. + - :py:meth:`array ` read one branch into an array (or other object if provided an alternate *interpretation*). + - :py:meth:`arrays ` read many branches into arrays (or other objects if provided alternate *interpretations*). + - :py:meth:`lazyarray ` create a lazy array that would read the branch as needed. + - :py:meth:`lazyarrays ` create many lazy arrays. + - :py:meth:`iterate ` iterate over many arrays at once, yielding the same number of entries from all selected branches in each step. """, width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.get).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.get).__doc__ = wrap( u"""Return a branch by name (at any level of depth). Parameters @@ -722,10 +722,10 @@ def wrap(text, width=80): Notes ----- - This method can be accessed more directly through square brackets (``__getitem__``) on the :py:class:`TTree ` object. + This method can be accessed more directly through square brackets (``__getitem__``) on the :py:class:`TTree ` object. """, width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.iterkeys).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.iterkeys).__doc__ = wrap( u"""Iterate over branch names. Parameters @@ -742,7 +742,7 @@ def wrap(text, width=80): names of branches. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.itervalues).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.itervalues).__doc__ = wrap( u"""Iterate over branches. Parameters @@ -755,11 +755,11 @@ def wrap(text, width=80): Returns ------- - iterator over :py:class:`TBranch ` + iterator over :py:class:`TBranch ` branches. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.iteritems).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.iteritems).__doc__ = wrap( u"""Iterate over *(branch name, branch)* pairs. Parameters @@ -772,11 +772,11 @@ def wrap(text, width=80): Returns ------- - iterator over (bytes, :py:class:`TBranch `) + iterator over (bytes, :py:class:`TBranch `) name-branch pairs. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.keys).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.keys).__doc__ = wrap( u"""Return branch names. Parameters @@ -793,7 +793,7 @@ def wrap(text, width=80): names of branches. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.values).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.values).__doc__ = wrap( u"""Return branches. Parameters @@ -806,11 +806,11 @@ def wrap(text, width=80): Returns ------- - list of :py:class:`TBranch ` + list of :py:class:`TBranch ` branches. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.items).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.items).__doc__ = wrap( u"""Return *(branch name, branch)* pairs. Parameters @@ -823,12 +823,12 @@ def wrap(text, width=80): Returns ------- - list of (bytes, :py:class:`TBranch `) + list of (bytes, :py:class:`TBranch `) name-branch pairs. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.allkeys).__doc__ = wrap( -u"""Return branch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). +_method(uproot3.tree.TTreeMethods.allkeys).__doc__ = wrap( +u"""Return branch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). Parameters ---------- @@ -842,8 +842,8 @@ def wrap(text, width=80): names of branches. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.allvalues).__doc__ = wrap( -u"""Return branches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). +_method(uproot3.tree.TTreeMethods.allvalues).__doc__ = wrap( +u"""Return branches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). Parameters ---------- @@ -853,12 +853,12 @@ def wrap(text, width=80): Returns ------- - list of :py:class:`TBranch ` + list of :py:class:`TBranch ` branches. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.allitems).__doc__ = wrap( -u"""Return *(branch name, branch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). +_method(uproot3.tree.TTreeMethods.allitems).__doc__ = wrap( +u"""Return *(branch name, branch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). Parameters ---------- @@ -868,11 +868,11 @@ def wrap(text, width=80): Returns ------- - list of (bytes, :py:class:`TBranch ` + list of (bytes, :py:class:`TBranch ` name-branch pairs. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.clusters).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.clusters).__doc__ = wrap( u"""Return entry starts and stops as *(int, int)* pairs representing clusters for a given set of branches this TTree. Rather than using ROOT's self-reported clusters (which don't exist in every ROOT file), this method finds the minimal step sizes in which a given set of branches have basket thresholds for the same entry number. For a single branch, this is exactly the basket boundaries. It is possible for a given set of branches to never line up, in which case, the cluster is the entire file. @@ -894,10 +894,10 @@ def wrap(text, width=80): start (inclusive) and stop (exclusive) pairs for each cluster. """, width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.mempartitions).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.mempartitions).__doc__ = wrap( u"""Return entry starts and stops as *(int, int)* pairs of (approximately) equal-memory partitions for a given set of branches in this TTree. - Similar to :py:meth:`clusters ` in that it provides a list of (start, stop) entry pairs, but instead of fitting baskets, this method attempts to keep the memory use constant. + Similar to :py:meth:`clusters ` in that it provides a list of (start, stop) entry pairs, but instead of fitting baskets, this method attempts to keep the memory use constant. Parameters ---------- @@ -921,7 +921,7 @@ def wrap(text, width=80): start (inclusive) and stop (exclusive) pairs for each equal-memory partition. """, width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.array).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.array).__doc__ = wrap( u"""Read one branch into an array (or other object if provided an alternate *interpretation*). Parameters @@ -953,7 +953,7 @@ def wrap(text, width=80): array or other object, depending on *interpretation*. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.arrays).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.arrays).__doc__ = wrap( u"""Read many branches into arrays (or other objects if provided alternate *interpretations*). Parameters @@ -990,7 +990,7 @@ def wrap(text, width=80): branch data. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.lazyarray).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.lazyarray).__doc__ = wrap( u"""Create a lazy array that would read the branch as needed. Parameters @@ -1027,7 +1027,7 @@ def wrap(text, width=80): lazy version of the array. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.lazyarrays).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.lazyarrays).__doc__ = wrap( u"""Create a table of lazy arrays. Parameters @@ -1066,7 +1066,7 @@ def wrap(text, width=80): lazy branch data. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TTreeMethods.iterate).__doc__ = wrap( +_method(uproot3.tree.TTreeMethods.iterate).__doc__ = wrap( u"""Iterate over many arrays at once, yielding the same number of entries from all selected branches in each step. Depending on the "entrysteps" parameter, the number of entries in one step may differ from the number of entries in the next step, but in every step, the same number of entries is retrieved from all *baskets.* @@ -1109,60 +1109,60 @@ def wrap(text, width=80): aligned array segments from the TTree. """.format(**tree_fragments), width=TEXT_WIDTH) -################################################################ uproot.tree.TBranchMethods +################################################################ uproot3.tree.TBranchMethods -uproot.tree.TBranchMethods.__doc__ = wrap( +uproot3.tree.TBranchMethods.__doc__ = wrap( u"""Adds array reading methods to TBranch objects that have been streamed from a ROOT file. - - square brackets (``__getitem__``) returns a subbranch by name (see :py:meth:`get `). + - square brackets (``__getitem__``) returns a subbranch by name (see :py:meth:`get `). - the ``len`` function (``__len__``) returns the number of entries (same as ``numentries``). - - iteration (``__iter__``) has no implementation. This is to avoid confusion between iterating over all subbranches (probably not what you want, but fitting the pattern set by :py:class:`ROOTDirectory ` and ``dict``) and iterating over the data. + - iteration (``__iter__``) has no implementation. This is to avoid confusion between iterating over all subbranches (probably not what you want, but fitting the pattern set by :py:class:`ROOTDirectory ` and ``dict``) and iterating over the data. **Attributes, properties, and methods:** - **name** (*bytes*) name of the TBranch. - **title** (*bytes*) title of the TBranch. - - **compression** (:py:class:`Compression `) the compression algorithm and level specified in the TBranch header. (Actual compression used may differ.) - - :py:meth:`get ` return a subbranch by name (at any level of depth). - - :py:meth:`iterkeys ` iterate over subbranch names. - - :py:meth:`itervalues ` iterate over subbranches. - - :py:meth:`iteritems ` iterate over *(subbranch name, subbranch)* pairs. - - :py:meth:`keys ` return subbranch names. - - :py:meth:`values ` return subbranches. - - :py:meth:`items ` return *(subbranch name, subbranch)* pairs. - - :py:meth:`allkeys ` return subbranch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). - - :py:meth:`allvalues ` return subbranches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). - - :py:meth:`allitems ` return *(subbranch name, subbranch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). + - **compression** (:py:class:`Compression `) the compression algorithm and level specified in the TBranch header. (Actual compression used may differ.) + - :py:meth:`get ` return a subbranch by name (at any level of depth). + - :py:meth:`iterkeys ` iterate over subbranch names. + - :py:meth:`itervalues ` iterate over subbranches. + - :py:meth:`iteritems ` iterate over *(subbranch name, subbranch)* pairs. + - :py:meth:`keys ` return subbranch names. + - :py:meth:`values ` return subbranches. + - :py:meth:`items ` return *(subbranch name, subbranch)* pairs. + - :py:meth:`allkeys ` return subbranch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). + - :py:meth:`allvalues ` return subbranches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). + - :py:meth:`allitems ` return *(subbranch name, subbranch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). **Branch information:** - **numentries** (*int*) number of entries in the TBranch (same as ``len``). - **numbaskets** (*int*) number of baskets in the TBranch. - - :py:meth:`uncompressedbytes ` the number of bytes contained in the TBranch (data and offsets; not including any key headers) *after* decompression, if applicable. - - :py:meth:`compressedbytes ` the number of bytes contained in the TBranch (data and offsets; not including any key headers) *before* decompression, if applicable. - - :py:meth:`compressionratio ` the uncompressed bytes divided by compressed bytes (greater than or equal to 1). - - :py:meth:`numitems ` the number of items in the TBranch, under a given interpretation. + - :py:meth:`uncompressedbytes ` the number of bytes contained in the TBranch (data and offsets; not including any key headers) *after* decompression, if applicable. + - :py:meth:`compressedbytes ` the number of bytes contained in the TBranch (data and offsets; not including any key headers) *before* decompression, if applicable. + - :py:meth:`compressionratio ` the uncompressed bytes divided by compressed bytes (greater than or equal to 1). + - :py:meth:`numitems ` the number of items in the TBranch, under a given interpretation. **Basket information:** - - :py:meth:`basket_entrystart ` the starting entry for a given basket (inclusive). - - :py:meth:`basket_entrystop ` the stopping entry for a given basket (exclusive). - - :py:meth:`basket_numentries ` the number of entries in a given basket. - - :py:meth:`basket_uncompressedbytes ` the number of bytes contained in the basket (data and offsets; not including any key headers) *after* decompression, if applicable. - - :py:meth:`basket_compressedbytes ` the number of bytes contained in the basket (data and offsets; not including any key headers) *before* decompression, if applicable. - - :py:meth:`basket_numitems ` the number of items in the basket, under a given interpretation. - - :py:meth:`mempartitions ` iterate over *(int, int)* pairs representing entry starts and stops that attempt to maintain a constant memory footprint. + - :py:meth:`basket_entrystart ` the starting entry for a given basket (inclusive). + - :py:meth:`basket_entrystop ` the stopping entry for a given basket (exclusive). + - :py:meth:`basket_numentries ` the number of entries in a given basket. + - :py:meth:`basket_uncompressedbytes ` the number of bytes contained in the basket (data and offsets; not including any key headers) *after* decompression, if applicable. + - :py:meth:`basket_compressedbytes ` the number of bytes contained in the basket (data and offsets; not including any key headers) *before* decompression, if applicable. + - :py:meth:`basket_numitems ` the number of items in the basket, under a given interpretation. + - :py:meth:`mempartitions ` iterate over *(int, int)* pairs representing entry starts and stops that attempt to maintain a constant memory footprint. **Methods for reading array data:** - - :py:meth:`array ` read the branch into an array (or other object if provided an alternate *interpretation*). - - :py:meth:`lazyarray ` create a lazy array that would read the branch as needed. - - :py:meth:`basket ` read a single basket into an array. - - :py:meth:`baskets ` read baskets into a list of arrays. - - :py:meth:`iterate_baskets ` iterate over baskets. + - :py:meth:`array ` read the branch into an array (or other object if provided an alternate *interpretation*). + - :py:meth:`lazyarray ` create a lazy array that would read the branch as needed. + - :py:meth:`basket ` read a single basket into an array. + - :py:meth:`baskets ` read baskets into a list of arrays. + - :py:meth:`iterate_baskets ` iterate over baskets. """, width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.get).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.get).__doc__ = wrap( u"""Return a subbranch by name (at any level of depth). Parameters @@ -1179,10 +1179,10 @@ def wrap(text, width=80): Notes ----- - This method can be accessed more directly through square brackets (``__getitem__``) on the :py:class:`TBranch ` object. + This method can be accessed more directly through square brackets (``__getitem__``) on the :py:class:`TBranch ` object. """, width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.iterkeys).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.iterkeys).__doc__ = wrap( u"""Iterate over subbranch names. Parameters @@ -1199,7 +1199,7 @@ def wrap(text, width=80): subbranch names. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.itervalues).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.itervalues).__doc__ = wrap( u"""Iterate over subbranches. Parameters @@ -1212,11 +1212,11 @@ def wrap(text, width=80): Returns ------- - iterator over :py:class:`TBranch ` + iterator over :py:class:`TBranch ` subbranches. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.iteritems).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.iteritems).__doc__ = wrap( u"""Iterate over *(subbranch name, subbranch)* pairs. Parameters @@ -1229,11 +1229,11 @@ def wrap(text, width=80): Returns ------- - iterator over (bytes, :py:class:`TBranch `) + iterator over (bytes, :py:class:`TBranch `) *(subbranch name, subbranch)* pairs. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.keys).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.keys).__doc__ = wrap( u"""Return subbranch names. Parameters @@ -1250,7 +1250,7 @@ def wrap(text, width=80): subbranch names. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.values).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.values).__doc__ = wrap( u"""Return subbranches. Parameters @@ -1263,11 +1263,11 @@ def wrap(text, width=80): Returns ------- - list of :py:class:`TBranch ` + list of :py:class:`TBranch ` subbranches. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.items).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.items).__doc__ = wrap( u"""Return *(subbranch name, subbranch)* pairs. Parameters @@ -1280,12 +1280,12 @@ def wrap(text, width=80): Returns ------- - list of (bytes, :py:class:`TBranch `) + list of (bytes, :py:class:`TBranch `) *(subbranch name, subbranch)* pairs. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.allkeys).__doc__ = wrap( -u"""Return subbranch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). +_method(uproot3.tree.TBranchMethods.allkeys).__doc__ = wrap( +u"""Return subbranch names at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`keys `). Parameters ---------- @@ -1299,8 +1299,8 @@ def wrap(text, width=80): subbranch names. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.allvalues).__doc__ = wrap( -u"""Return subbranches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). +_method(uproot3.tree.TBranchMethods.allvalues).__doc__ = wrap( +u"""Return subbranches at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`values `). Parameters ---------- @@ -1310,12 +1310,12 @@ def wrap(text, width=80): Returns ------- - list of :py:class:`TBranch ` + list of :py:class:`TBranch ` subbranches. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.allitems).__doc__ = wrap( -u"""Return *(subbranch name, subbranch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). +_method(uproot3.tree.TBranchMethods.allitems).__doc__ = wrap( +u"""Return *(subbranch name, subbranch)* pairs at all levels of depth (shortcut for passing ``recursive=True`` to :py:meth:`items `). Parameters ---------- @@ -1325,11 +1325,11 @@ def wrap(text, width=80): Returns ------- - list of (bytes, :py:class:`TBranch ` + list of (bytes, :py:class:`TBranch ` (subbranch name, subbranch)* pairs. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.uncompressedbytes).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.uncompressedbytes).__doc__ = wrap( u"""The number of bytes contained in the TBranch (data and offsets; not including any key headers) *after* decompression, if applicable. Parameters @@ -1342,7 +1342,7 @@ def wrap(text, width=80): uncompressed bytes. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.compressedbytes).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.compressedbytes).__doc__ = wrap( u"""The number of bytes contained in the TBranch (data and offsets; not including any key headers) *before* decompression, if applicable. Parameters @@ -1355,7 +1355,7 @@ def wrap(text, width=80): compressed bytes. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.compressionratio).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.compressionratio).__doc__ = wrap( u"""The uncompressed bytes divided by compressed bytes (greater than or equal to 1). Parameters @@ -1368,7 +1368,7 @@ def wrap(text, width=80): compression ratio. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.numitems).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.numitems).__doc__ = wrap( u"""The number of items in the TBranch, under a given interpretation. Parameters @@ -1383,7 +1383,7 @@ def wrap(text, width=80): number of items. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.basket_entrystart).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.basket_entrystart).__doc__ = wrap( u"""The starting entry for a given basket (inclusive). Parameters @@ -1396,7 +1396,7 @@ def wrap(text, width=80): starting entry. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.basket_entrystop).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.basket_entrystop).__doc__ = wrap( u"""The stopping entry for a given basket (exclusive). Parameters @@ -1409,7 +1409,7 @@ def wrap(text, width=80): stopping entry. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.basket_numentries).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.basket_numentries).__doc__ = wrap( u"""The number of entries in a given basket. Parameters @@ -1422,7 +1422,7 @@ def wrap(text, width=80): number of entries. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.basket_uncompressedbytes).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.basket_uncompressedbytes).__doc__ = wrap( u"""The number of bytes contained in the basket (data and offsets; not including any key headers) *after* decompression, if applicable. Parameters @@ -1437,7 +1437,7 @@ def wrap(text, width=80): number of uncompressed bytes. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.basket_compressedbytes).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.basket_compressedbytes).__doc__ = wrap( u"""The number of bytes contained in the basket (data and offsets; not including any key headers) *before* decompression, if applicable. Parameters @@ -1452,7 +1452,7 @@ def wrap(text, width=80): number of compressed bytes. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.basket_numitems).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.basket_numitems).__doc__ = wrap( u"""The number of items in the basket, under a given interpretation. Parameters @@ -1469,7 +1469,7 @@ def wrap(text, width=80): number of items. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.array).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.array).__doc__ = wrap( u"""Read the branch into an array (or other object if provided an alternate *interpretation*). Parameters @@ -1500,10 +1500,10 @@ def wrap(text, width=80): branch data. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.mempartitions).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.mempartitions).__doc__ = wrap( u"""Return entry starts and stops as *(int, int)* pairs of (approximately) equal-memory partitions in this TBranch. - Similar to :py:meth:`clusters ` in that it provides a list of (start, stop) entry pairs, but instead of fitting baskets, this method attempts to keep the memory use constant. + Similar to :py:meth:`clusters ` in that it provides a list of (start, stop) entry pairs, but instead of fitting baskets, this method attempts to keep the memory use constant. Parameters ---------- @@ -1525,7 +1525,7 @@ def wrap(text, width=80): start (inclusive) and stop (exclusive) pairs for each equal-memory partition. """, width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.lazyarray).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.lazyarray).__doc__ = wrap( u"""Create a lazy array that would read the branch as needed. Parameters @@ -1560,7 +1560,7 @@ def wrap(text, width=80): lazy version of branch data. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.basket).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.basket).__doc__ = wrap( u"""Read a single basket into an array. Parameters @@ -1589,7 +1589,7 @@ def wrap(text, width=80): basket data. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.baskets).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.baskets).__doc__ = wrap( u"""Read baskets into a list of arrays. Parameters @@ -1622,7 +1622,7 @@ def wrap(text, width=80): basket data. """.format(**tree_fragments), width=TEXT_WIDTH) -_method(uproot.tree.TBranchMethods.iterate_baskets).__doc__ = wrap( +_method(uproot3.tree.TBranchMethods.iterate_baskets).__doc__ = wrap( u"""Iterate over baskets. Parameters @@ -1651,9 +1651,9 @@ def wrap(text, width=80): basket data. """.format(**tree_fragments), width=TEXT_WIDTH) -################################################################ uproot.tree.TTreeMethods.pandas +################################################################ uproot3.tree.TTreeMethods.pandas -_method(uproot._connect._pandas.TTreeMethods_pandas.df).__doc__ = wrap( +_method(uproot3._connect._pandas.TTreeMethods_pandas.df).__doc__ = wrap( u"""Create a Pandas DataFrame from some branches. Parameters @@ -1684,9 +1684,9 @@ def wrap(text, width=80): data frame (`see docs `_). """.format(**tree_fragments), width=TEXT_WIDTH) -################################################################ uproot.tree.lazyarray(s) +################################################################ uproot3.tree.lazyarray(s) -uproot.tree.lazyarray.__doc__ = wrap( +uproot3.tree.lazyarray.__doc__ = wrap( u"""Create a lazy array that would read from a set of files as needed. Parameters @@ -1735,7 +1735,7 @@ def wrap(text, width=80): lazy files of lazy baskets. """.format(**dict(list(open_fragments.items()) + list(tree_fragments.items()))), width=TEXT_WIDTH) -uproot.tree.lazyarrays.__doc__ = wrap( +uproot3.tree.lazyarrays.__doc__ = wrap( u"""Create a lazy table that would read from a set of files as needed. Parameters @@ -1783,9 +1783,9 @@ def wrap(text, width=80): lazy files of branches of lazy baskets. """.format(**dict(list(open_fragments.items()) + list(tree_fragments.items()))), width=TEXT_WIDTH) -################################################################ uproot.tree.daskarray/daskframe +################################################################ uproot3.tree.daskarray/daskframe -uproot.tree.daskarray.__doc__ = wrap( +uproot3.tree.daskarray.__doc__ = wrap( u"""Create a Dask array that would read from a set of files as needed. Parameters @@ -1832,7 +1832,7 @@ def wrap(text, width=80): lazy files of lazy baskets. """.format(**dict(list(open_fragments.items()) + list(tree_fragments.items()))), width=TEXT_WIDTH) -uproot.tree.daskframe.__doc__ = wrap( +uproot3.tree.daskframe.__doc__ = wrap( u"""Create a Dask DataFrame that would read from a set of files as needed. Parameters @@ -1876,12 +1876,12 @@ def wrap(text, width=80): lazy files of branches of lazy baskets. """.format(**dict(list(open_fragments.items()) + list(tree_fragments.items()))), width=TEXT_WIDTH) -################################################################ uproot.tree.numentries +################################################################ uproot3.tree.numentries -uproot.tree.numentries.__doc__ = wrap( +uproot3.tree.numentries.__doc__ = wrap( u"""Get the number of entries in a TTree without fully opening the file. - ``uproot.numentries("file.root", "tree")`` is a shortcut for ``uproot.open("file.root")["tree"].numentries`` that should be faster, particularly for files with many streamers and/or TTrees with many branches because it skips those steps in getting to the number of entries. + ``uproot3.numentries("file.root", "tree")`` is a shortcut for ``uproot3.open("file.root")["tree"].numentries`` that should be faster, particularly for files with many streamers and/or TTrees with many branches because it skips those steps in getting to the number of entries. If a requested file is not found, this raises the appropriate exception. If a requested file does not have the requested TTree, the number of entries is taken to be zero, raising no error. @@ -1912,9 +1912,9 @@ def wrap(text, width=80): total number of entries or number of entries for each file, depending on *total*. """.format(**dict(list(open_fragments.items()) + list(tree_fragments.items()))), width=TEXT_WIDTH) -################################################################ uproot.interp.interp.Interpretation +################################################################ uproot3.interp.interp.Interpretation -uproot.interp.interp.Interpretation.__doc__ = wrap( +uproot3.interp.interp.Interpretation.__doc__ = wrap( u"""Interface for interpretations. Interpretations do not need to inherit from this class, but they do need to satisfy the interface described below. @@ -1954,34 +1954,34 @@ def wrap(text, width=80): possibly post-process a ``destination`` to make it ready for consumption. This is needed if a different form must be used for filling than should be provided to the user--- for instance, offsets of a jagged array can't be computed when filling sections of it in parallel (sizes can), but the user should receive a jagged array based on offsets for random access. """, width=TEXT_WIDTH) -################################################################ uproot.interp.auto.interpret +################################################################ uproot3.interp.auto.interpret -uproot.interp.auto.interpret.__doc__ = wrap( +uproot3.interp.auto.interpret.__doc__ = wrap( u"""Generate a default interpretation of a branch. This function is called with default options on each branch in the following methods to generate a default interpretation. You can override the default either by calling this function explicitly with different parameters or by modifying its result. - - :py:meth:`TTreeMethods.array ` - - :py:meth:`TTreeMethods.arrays ` - - :py:meth:`TTreeMethods.lazyarray ` - - :py:meth:`TTreeMethods.lazyarrays ` - - :py:meth:`TTreeMethods.iterate ` - - :py:meth:`TTreeMethods.iterate_clusters ` - - :py:meth:`TBranchMethods.array ` - - :py:meth:`TBranchMethods.lazyarray ` - - :py:meth:`TBranchMethods.basket ` - - :py:meth:`TBranchMethods.baskets ` - - :py:meth:`TBranchMethods.iterate_baskets ` + - :py:meth:`TTreeMethods.array ` + - :py:meth:`TTreeMethods.arrays ` + - :py:meth:`TTreeMethods.lazyarray ` + - :py:meth:`TTreeMethods.lazyarrays ` + - :py:meth:`TTreeMethods.iterate ` + - :py:meth:`TTreeMethods.iterate_clusters ` + - :py:meth:`TBranchMethods.array ` + - :py:meth:`TBranchMethods.lazyarray ` + - :py:meth:`TBranchMethods.basket ` + - :py:meth:`TBranchMethods.baskets ` + - :py:meth:`TBranchMethods.iterate_baskets ` Parameters ---------- - branch : :py:class:`TBranchMethods ` + branch : :py:class:`TBranchMethods ` branch to interpret. awkwardlib : ``None``, str, or module - if ``None`` *(default)*, use ``import awkward`` to get awkward-array constructors. Otherwise, parse the module string name or use the provided module. + if ``None`` *(default)*, use ``import awkward`` to get Awkward Array constructors. Otherwise, parse the module string name or use the provided module. - classes : ``None`` or ``dict`` of str \u2192 :py:class:`ROOTStreamedObject ` + classes : ``None`` or ``dict`` of str \u2192 :py:class:`ROOTStreamedObject ` class definitions associated with each class name, usually generated by ROOT file streamers. If ``None`` *(default)*, use the class definitions generated from the file from which this branch was read. swapbytes : bool @@ -1989,21 +1989,21 @@ class definitions associated with each class name, usually generated by ROOT fil Returns ------- - :py:class:`Interpretation ` + :py:class:`Interpretation ` the interpretation. """, width=TEXT_WIDTH) -################################################################ uproot.interp fragments +################################################################ uproot3.interp fragments interp_fragments = { # see1 - "see1": u"""Part of the :py:class:`Interpretation ` interface; type ``help(uproot.interp.interp.Interpretation)`` for details.""", + "see1": u"""Part of the :py:class:`Interpretation ` interface; type ``help(uproot3.interp.interp.Interpretation)`` for details.""", # see2 - "see2": u"""Methods implementing the :py:class:`Interpretation ` interface are not documented here.""", + "see2": u"""Methods implementing the :py:class:`Interpretation ` interface are not documented here.""", } -################################################################ uproot.interp.numerical fragments +################################################################ uproot3.interp.numerical fragments interp_numerical_fragments = { # items @@ -2018,12 +2018,12 @@ class definitions associated with each class name, usually generated by ROOT fil Numpy shape of each source entry. The Numpy shape of the whole source array is ``(numentries,) + fromdims``. Default is ``()`` (scalar).""", } -################################################################ uproot.interp.numerical.asdtype +################################################################ uproot3.interp.numerical.asdtype -uproot.interp.numerical.asdtype.__doc__ = wrap( +uproot3.interp.numerical.asdtype.__doc__ = wrap( u"""Interpret branch data as a new Numpy array with given dtypes and dimensions. - This interpretation directs branch-reading functions to allocate new Numpy arrays and fill them with the branch contents. See :py:class:`asarray ` to fill an existing array, rather than filling a new array. + This interpretation directs branch-reading functions to allocate new Numpy arrays and fill them with the branch contents. See :py:class:`asarray ` to fill an existing array, rather than filling a new array. {items} @@ -2045,8 +2045,8 @@ class definitions associated with each class name, usually generated by ROOT fil {see2} """.format(**dict(list(interp_fragments.items()) + list(interp_numerical_fragments.items()))), width=TEXT_WIDTH) -_method(uproot.interp.numerical.asdtype.to).__doc__ = wrap( -u"""Create a new :py:class:`asdtype ` interpretation from this one. +_method(uproot3.interp.numerical.asdtype.to).__doc__ = wrap( +u"""Create a new :py:class:`asdtype ` interpretation from this one. Parameters ---------- @@ -2058,12 +2058,12 @@ class definitions associated with each class name, usually generated by ROOT fil Returns ------- - :py:class:`asdtype ` + :py:class:`asdtype ` new interpretation. """, width=TEXT_WIDTH) -_method(uproot.interp.numerical.asdtype.toarray).__doc__ = wrap( -u"""Create a :py:class:`asarray ` interpretation from this one. +_method(uproot3.interp.numerical.asdtype.toarray).__doc__ = wrap( +u"""Create a :py:class:`asarray ` interpretation from this one. Parameters ---------- @@ -2072,26 +2072,26 @@ class definitions associated with each class name, usually generated by ROOT fil Returns ------- - :py:class:`asarray ` + :py:class:`asarray ` new interpretation. """, width=TEXT_WIDTH) -_method(uproot.interp.numerical.asdtype.empty).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asdtype.compatible).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asdtype.numitems).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asdtype.source_numitems).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asdtype.fromroot).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asdtype.destination).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asdtype.fill).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asdtype.clip).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asdtype.finalize).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asdtype.empty).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asdtype.compatible).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asdtype.numitems).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asdtype.source_numitems).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asdtype.fromroot).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asdtype.destination).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asdtype.fill).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asdtype.clip).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asdtype.finalize).__doc__ = interp_fragments["see1"] -################################################################ uproot.interp.numerical.asarray +################################################################ uproot3.interp.numerical.asarray -uproot.interp.numerical.asarray.__doc__ = wrap( +uproot3.interp.numerical.asarray.__doc__ = wrap( u"""Interpret branch as array data that should overwrite an existing array. - This interpretation directs branch-reading functions to fill the given Numpy array with branch contents. See :py:class:`asdtype ` to allocate a new array, rather than filling an existing array. + This interpretation directs branch-reading functions to fill the given Numpy array with branch contents. See :py:class:`asdtype ` to allocate a new array, rather than filling an existing array. {items} @@ -2109,17 +2109,17 @@ class definitions associated with each class name, usually generated by ROOT fil {see2} - This class has *todtype* and *todims* parameters like :py:class:`asdtype `, but they are derived from the *toarray* attribute. + This class has *todtype* and *todims* parameters like :py:class:`asdtype `, but they are derived from the *toarray* attribute. """.format(**dict(list(interp_fragments.items()) + list(interp_numerical_fragments.items()))), width=TEXT_WIDTH) -_method(uproot.interp.numerical.asarray.destination).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asarray.fill).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asarray.clip).__doc__ = interp_fragments["see1"] -_method(uproot.interp.numerical.asarray.finalize).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asarray.destination).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asarray.fill).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asarray.clip).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.numerical.asarray.finalize).__doc__ = interp_fragments["see1"] -################################################################ uproot.interp.jagged.asjagged +################################################################ uproot3.interp.jagged.asjagged -uproot.interp.jagged.asjagged.__doc__ = wrap( +uproot3.interp.jagged.asjagged.__doc__ = wrap( u"""Interpret branch as a jagged array (array of non-uniformly sized arrays). This interpretation directs branch-reading to fill contiguous arrays and present them to the user in a ``JaggedArray`` interface. Such an object behaves as though it were an array of non-uniformly sized arrays, but it is more memory and cache-line efficient because the underlying data are contiguous. @@ -2128,7 +2128,7 @@ class definitions associated with each class name, usually generated by ROOT fil Parameters ---------- - asdtype : :py:class:`asdtype ` + asdtype : :py:class:`asdtype ` interpretation for the inner arrays. Notes @@ -2137,8 +2137,8 @@ class definitions associated with each class name, usually generated by ROOT fil {see2} """.format(**interp_fragments), width=TEXT_WIDTH) -_method(uproot.interp.jagged.asjagged.to).__doc__ = wrap( -u"""Create a new :py:class:`asjagged ` interpretation from this one. +_method(uproot3.interp.jagged.asjagged.to).__doc__ = wrap( +u"""Create a new :py:class:`asjagged ` interpretation from this one. Parameters ---------- @@ -2150,43 +2150,43 @@ class definitions associated with each class name, usually generated by ROOT fil Returns ------- - :py:class:`asjagged ` + :py:class:`asjagged ` new interpretation. """, width=TEXT_WIDTH) -_method(uproot.interp.jagged.asjagged.empty).__doc__ = interp_fragments["see1"] -_method(uproot.interp.jagged.asjagged.compatible).__doc__ = interp_fragments["see1"] -_method(uproot.interp.jagged.asjagged.numitems).__doc__ = interp_fragments["see1"] -_method(uproot.interp.jagged.asjagged.source_numitems).__doc__ = interp_fragments["see1"] -_method(uproot.interp.jagged.asjagged.fromroot).__doc__ = interp_fragments["see1"] -_method(uproot.interp.jagged.asjagged.destination).__doc__ = interp_fragments["see1"] -_method(uproot.interp.jagged.asjagged.fill).__doc__ = interp_fragments["see1"] -_method(uproot.interp.jagged.asjagged.clip).__doc__ = interp_fragments["see1"] -_method(uproot.interp.jagged.asjagged.finalize).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.jagged.asjagged.empty).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.jagged.asjagged.compatible).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.jagged.asjagged.numitems).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.jagged.asjagged.source_numitems).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.jagged.asjagged.fromroot).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.jagged.asjagged.destination).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.jagged.asjagged.fill).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.jagged.asjagged.clip).__doc__ = interp_fragments["see1"] +_method(uproot3.interp.jagged.asjagged.finalize).__doc__ = interp_fragments["see1"] # TODO: add asdtype asarray asdouble32 asstlbitset asjagged astable asobj asgenobj asstring STLVector STLString -################################################################ uproot.source.cursor.Cursor +################################################################ uproot3.source.cursor.Cursor -uproot.source.cursor.Cursor.__doc__ = wrap( -u"""Maintain a position in a :py:class:`Source ` that updates as data are read. +uproot3.source.cursor.Cursor.__doc__ = wrap( +u"""Maintain a position in a :py:class:`Source ` that updates as data are read. **Attributes, properties, and methods:** - **index** (*int*) the position. - - **origin** (*int*) "beginning of buffer" position, used in the **refs** key in :py:func:`uproot.rootio._readobjany `. - - **refs** (``None`` or ``dict``-like) manages cross-references in :py:func:`uproot.rootio._readobjany `. - - :py:meth:`copied ` return a copy of this :py:class:`Cursor ` with modifications. - - :py:meth:`skipped ` return a copy of this :py:class:`Cursor ` with the **index** moved forward. - - :py:meth:`skip ` move the **index** of this :py:class:`Cursor ` forward. - - :py:meth:`fields ` interpret bytes in the :py:class:`Source ` with given data types and skip the **index** past them. - - :py:meth:`field ` interpret bytes in the :py:class:`Source ` with a given data type and skip the **index** past it. - - :py:meth:`bytes ` return a range of bytes from the :py:class:`Source ` and skip the **index** past it. - - :py:meth:`array ` return a range of bytes from the :py:class:`Source ` as a typed Numpy array and skip the **index** past it. - - :py:meth:`string ` read a string from the :py:class:`Source `, interpreting the first 1 or 5 bytes as a size and skip the **index** past it. - - :py:meth:`cstring ` read a null-terminated string from the :py:class:`Source ` and skip the **index** past it. - - :py:meth:`skipstring ` interpret the first 1 or 5 bytes as a size and skip the **index** past the string (without creating a Python string). - - :py:meth:`hexdump ` view a section of the :py:class:`Source ` as formatted by the POSIX ``hexdump`` program and *do not* move the **index**. + - **origin** (*int*) "beginning of buffer" position, used in the **refs** key in :py:func:`uproot3.rootio._readobjany `. + - **refs** (``None`` or ``dict``-like) manages cross-references in :py:func:`uproot3.rootio._readobjany `. + - :py:meth:`copied ` return a copy of this :py:class:`Cursor ` with modifications. + - :py:meth:`skipped ` return a copy of this :py:class:`Cursor ` with the **index** moved forward. + - :py:meth:`skip ` move the **index** of this :py:class:`Cursor ` forward. + - :py:meth:`fields ` interpret bytes in the :py:class:`Source ` with given data types and skip the **index** past them. + - :py:meth:`field ` interpret bytes in the :py:class:`Source ` with a given data type and skip the **index** past it. + - :py:meth:`bytes ` return a range of bytes from the :py:class:`Source ` and skip the **index** past it. + - :py:meth:`array ` return a range of bytes from the :py:class:`Source ` as a typed Numpy array and skip the **index** past it. + - :py:meth:`string ` read a string from the :py:class:`Source `, interpreting the first 1 or 5 bytes as a size and skip the **index** past it. + - :py:meth:`cstring ` read a null-terminated string from the :py:class:`Source ` and skip the **index** past it. + - :py:meth:`skipstring ` interpret the first 1 or 5 bytes as a size and skip the **index** past the string (without creating a Python string). + - :py:meth:`hexdump ` view a section of the :py:class:`Source ` as formatted by the POSIX ``hexdump`` program and *do not* move the **index**. Parameters ---------- @@ -2202,12 +2202,12 @@ class definitions associated with each class name, usually generated by ROOT fil format_source_cursor = { # source - "source": u"""source : :py:class:`Source ` + "source": u"""source : :py:class:`Source ` data to be read.""" } -_method(uproot.source.cursor.Cursor.copied).__doc__ = wrap( -u"""Return a copy of this :py:class:`Cursor ` with modifications. +_method(uproot3.source.cursor.Cursor.copied).__doc__ = wrap( +u"""Return a copy of this :py:class:`Cursor ` with modifications. Parameters ---------- @@ -2222,7 +2222,7 @@ class definitions associated with each class name, usually generated by ROOT fil Returns ------- - :py:class:`Cursor ` + :py:class:`Cursor ` the new cursor. Notes @@ -2231,8 +2231,8 @@ class definitions associated with each class name, usually generated by ROOT fil This is a shallow copy--- the **refs** are shared with the parent and all other copies. """.format(**format_source_cursor), width=TEXT_WIDTH) -_method(uproot.source.cursor.Cursor.skipped).__doc__ = wrap( -u"""Return a copy of this :py:class:`Cursor ` with the **index** moved forward. +_method(uproot3.source.cursor.Cursor.skipped).__doc__ = wrap( +u"""Return a copy of this :py:class:`Cursor ` with the **index** moved forward. Parameters ---------- @@ -2247,7 +2247,7 @@ class definitions associated with each class name, usually generated by ROOT fil Returns ------- - :py:class:`Cursor ` + :py:class:`Cursor ` the new cursor. Notes @@ -2256,8 +2256,8 @@ class definitions associated with each class name, usually generated by ROOT fil This is a shallow copy--- the **refs** are shared with the parent and all other copies. """.format(**format_source_cursor), width=TEXT_WIDTH) -_method(uproot.source.cursor.Cursor.skip).__doc__ = wrap( -u"""Move the **index** of this :py:class:`Cursor ` forward. +_method(uproot3.source.cursor.Cursor.skip).__doc__ = wrap( +u"""Move the **index** of this :py:class:`Cursor ` forward. Parameters ---------- @@ -2265,8 +2265,8 @@ class definitions associated with each class name, usually generated by ROOT fil number of bytes to skip """.format(**format_source_cursor), width=TEXT_WIDTH) -_method(uproot.source.cursor.Cursor.fields).__doc__ = wrap( -u"""Interpret bytes in the :py:class:`Source ` with given data types and skip the **index** past them. +_method(uproot3.source.cursor.Cursor.fields).__doc__ = wrap( +u"""Interpret bytes in the :py:class:`Source ` with given data types and skip the **index** past them. Parameters ---------- @@ -2281,8 +2281,8 @@ class definitions associated with each class name, usually generated by ROOT fil field values (types determined by format) """.format(**format_source_cursor), width=TEXT_WIDTH) -_method(uproot.source.cursor.Cursor.field).__doc__ = wrap( -u"""Interpret bytes in the :py:class:`Source ` with a given data type and skip the **index** past it. +_method(uproot3.source.cursor.Cursor.field).__doc__ = wrap( +u"""Interpret bytes in the :py:class:`Source ` with a given data type and skip the **index** past it. Parameters ---------- @@ -2297,8 +2297,8 @@ class definitions associated with each class name, usually generated by ROOT fil field value """.format(**format_source_cursor), width=TEXT_WIDTH) -_method(uproot.source.cursor.Cursor.bytes).__doc__ = wrap( -u"""Return a range of bytes from the :py:class:`Source ` and skip the **index** past it. +_method(uproot3.source.cursor.Cursor.bytes).__doc__ = wrap( +u"""Return a range of bytes from the :py:class:`Source ` and skip the **index** past it. Parameters ---------- @@ -2313,8 +2313,8 @@ class definitions associated with each class name, usually generated by ROOT fil raw view of data from source. """.format(**format_source_cursor), width=TEXT_WIDTH) -_method(uproot.source.cursor.Cursor.array).__doc__ = wrap( -u"""Return a range of bytes from the :py:class:`Source ` as a typed Numpy array and skip the **index** past it. +_method(uproot3.source.cursor.Cursor.array).__doc__ = wrap( +u"""Return a range of bytes from the :py:class:`Source ` as a typed Numpy array and skip the **index** past it. Parameters ---------- @@ -2332,8 +2332,8 @@ class definitions associated with each class name, usually generated by ROOT fil interpreted view of data from source. """.format(**format_source_cursor), width=TEXT_WIDTH) -_method(uproot.source.cursor.Cursor.string).__doc__ = wrap( -u"""Read a string from the :py:class:`Source `, interpreting the first 1 or 5 bytes as a size and skip the **index** past it. +_method(uproot3.source.cursor.Cursor.string).__doc__ = wrap( +u"""Read a string from the :py:class:`Source `, interpreting the first 1 or 5 bytes as a size and skip the **index** past it. Parameters ---------- @@ -2345,8 +2345,8 @@ class definitions associated with each class name, usually generated by ROOT fil Python string (``bytes`` in Python 3). """.format(**format_source_cursor), width=TEXT_WIDTH) -_method(uproot.source.cursor.Cursor.cstring).__doc__ = wrap( -u"""Read a null-terminated string from the :py:class:`Source ` and skip the **index** past it. +_method(uproot3.source.cursor.Cursor.cstring).__doc__ = wrap( +u"""Read a null-terminated string from the :py:class:`Source ` and skip the **index** past it. The index is also skipped past the null that terminates the string. @@ -2360,7 +2360,7 @@ class definitions associated with each class name, usually generated by ROOT fil Python string (``bytes`` in Python 3). """.format(**format_source_cursor), width=TEXT_WIDTH) -_method(uproot.source.cursor.Cursor.skipstring).__doc__ = wrap( +_method(uproot3.source.cursor.Cursor.skipstring).__doc__ = wrap( u"""Interpret the first 1 or 5 bytes as a size and skip the **index** past the string (without creating a Python string). Parameters @@ -2368,8 +2368,8 @@ class definitions associated with each class name, usually generated by ROOT fil {source} """.format(**format_source_cursor), width=TEXT_WIDTH) -_method(uproot.source.cursor.Cursor.hexdump).__doc__ = wrap( -u"""View a section of the :py:class:`Source ` as formatted by the POSIX ``hexdump`` program and *do not* move the **index**. +_method(uproot3.source.cursor.Cursor.hexdump).__doc__ = wrap( +u"""View a section of the :py:class:`Source ` as formatted by the POSIX ``hexdump`` program and *do not* move the **index**. This is much more useful than simply hexdumping the whole file, since partial interpretation is necessary to find the right point in the file to dump. @@ -2392,15 +2392,15 @@ class definitions associated with each class name, usually generated by ROOT fil hexdump-formatted view to be printed """.format(**format_source_cursor), width=TEXT_WIDTH) -################################################################ uproot.source.source.Source +################################################################ uproot3.source.source.Source -uproot.source.source.Source.__doc__ = wrap( +uproot3.source.source.Source.__doc__ = wrap( u"""Interface for data sources. Sources do not need to inherit from this class, but they do need to satisfy the interface described below. **parent(self)** - return the :py:class:`Source ` from which this was copied; may be ``None``. + return the :py:class:`Source ` from which this was copied; may be ``None``. **threadlocal(self)** either return ``self`` (if thread-safe) or return a thread-safe copy, such as a new file handle into the same file. @@ -2414,18 +2414,18 @@ class definitions associated with each class name, usually generated by ROOT fil source_fragments = { # see1 - "see1": u"""Part of the :py:class:`Source ` interface; type ``help(uproot.source.source.Source)`` for details.""", + "see1": u"""Part of the :py:class:`Source ` interface; type ``help(uproot3.source.source.Source)`` for details.""", # see2 - "see2": u"""Methods implementing the :py:class:`Source ` interface are not documented here.""", + "see2": u"""Methods implementing the :py:class:`Source ` interface are not documented here.""", } -################################################################ uproot.source.file.FileSource +################################################################ uproot3.source.file.FileSource -uproot.source.file.FileSource.__doc__ = wrap( +uproot3.source.file.FileSource.__doc__ = wrap( u"""Emulate a memory-mapped interface with traditional file handles, opening many if necessary. - :py:class:`FileSource ` objects avoid double-reading and many small reads by caching data in chunks. All thread-local copies of a :py:class:`FileSource ` share a :py:class:`ThreadSafeArrayCache ` to avoid double-reads across threads. + :py:class:`FileSource ` objects avoid double-reading and many small reads by caching data in chunks. All thread-local copies of a :py:class:`FileSource ` share a :py:class:`ThreadSafeArrayCache ` to avoid double-reads across threads. Parameters ---------- @@ -2444,15 +2444,15 @@ class definitions associated with each class name, usually generated by ROOT fil {see2} """.format(**source_fragments), width=TEXT_WIDTH) -_method(uproot.source.file.FileSource.parent).__doc__ = source_fragments["see1"] -_method(uproot.source.file.FileSource.threadlocal).__doc__ = source_fragments["see1"] -_method(uproot.source.file.FileSource.dismiss).__doc__ = source_fragments["see1"] -_method(uproot.source.file.FileSource.data).__doc__ = source_fragments["see1"] +_method(uproot3.source.file.FileSource.parent).__doc__ = source_fragments["see1"] +_method(uproot3.source.file.FileSource.threadlocal).__doc__ = source_fragments["see1"] +_method(uproot3.source.file.FileSource.dismiss).__doc__ = source_fragments["see1"] +_method(uproot3.source.file.FileSource.data).__doc__ = source_fragments["see1"] -################################################################ uproot.source.memmap.MemmapSource +################################################################ uproot3.source.memmap.MemmapSource -uproot.source.memmap.MemmapSource.__doc__ = wrap( -u"""Thin wrapper around a memory-mapped file, which already behaves like a :py:class:`Source `. +uproot3.source.memmap.MemmapSource.__doc__ = wrap( +u"""Thin wrapper around a memory-mapped file, which already behaves like a :py:class:`Source `. Parameters ---------- @@ -2465,17 +2465,17 @@ class definitions associated with each class name, usually generated by ROOT fil {see2} """.format(**source_fragments), width=TEXT_WIDTH) -_method(uproot.source.memmap.MemmapSource.parent).__doc__ = source_fragments["see1"] -_method(uproot.source.memmap.MemmapSource.threadlocal).__doc__ = source_fragments["see1"] -_method(uproot.source.memmap.MemmapSource.dismiss).__doc__ = source_fragments["see1"] -_method(uproot.source.memmap.MemmapSource.data).__doc__ = source_fragments["see1"] +_method(uproot3.source.memmap.MemmapSource.parent).__doc__ = source_fragments["see1"] +_method(uproot3.source.memmap.MemmapSource.threadlocal).__doc__ = source_fragments["see1"] +_method(uproot3.source.memmap.MemmapSource.dismiss).__doc__ = source_fragments["see1"] +_method(uproot3.source.memmap.MemmapSource.data).__doc__ = source_fragments["see1"] -################################################################ uproot.source.xrootd.XRootDSource +################################################################ uproot3.source.xrootd.XRootDSource -uproot.source.xrootd.XRootDSource.__doc__ = wrap( +uproot3.source.xrootd.XRootDSource.__doc__ = wrap( u"""Emulate a memory-mapped interface with XRootD. - XRootD is already thread-safe, but provides no caching. :py:class:`XRootDSource ` objects avoid double-reading and many small reads by caching data in chunks. They are not duplicated when splitting into threads. + XRootD is already thread-safe, but provides no caching. :py:class:`XRootDSource ` objects avoid double-reading and many small reads by caching data in chunks. They are not duplicated when splitting into threads. Parameters ---------- @@ -2494,14 +2494,14 @@ class definitions associated with each class name, usually generated by ROOT fil {see2} """.format(**source_fragments), width=TEXT_WIDTH) -_method(uproot.source.xrootd.XRootDSource.parent).__doc__ = source_fragments["see1"] -_method(uproot.source.xrootd.XRootDSource.threadlocal).__doc__ = source_fragments["see1"] -_method(uproot.source.xrootd.XRootDSource.dismiss).__doc__ = source_fragments["see1"] -_method(uproot.source.xrootd.XRootDSource.data).__doc__ = source_fragments["see1"] +_method(uproot3.source.xrootd.XRootDSource.parent).__doc__ = source_fragments["see1"] +_method(uproot3.source.xrootd.XRootDSource.threadlocal).__doc__ = source_fragments["see1"] +_method(uproot3.source.xrootd.XRootDSource.dismiss).__doc__ = source_fragments["see1"] +_method(uproot3.source.xrootd.XRootDSource.data).__doc__ = source_fragments["see1"] -################################################################ uproot.source.compressed.Compression +################################################################ uproot3.source.compressed.Compression -uproot.source.compressed.Compression.__doc__ = wrap( +uproot3.source.compressed.Compression.__doc__ = wrap( u"""Describe the compression of a compressed block. **Attributes, properties, and methods:** @@ -2509,7 +2509,7 @@ class definitions associated with each class name, usually generated by ROOT fil - **algo** (*int*) algorithm code. - **level** (*int*) 0 is no compression, 1 is least, 9 is most. - **algoname** (*str*) algorithm expressed as a string: ``"zlib"``, ``"lzma"``, ``"old"``, ``"lz4"`` or ``"zstd"``. - - **copy(algo=None, level=None)** copy this :py:class:`Compression ` object, possibly changing a field. + - **copy(algo=None, level=None)** copy this :py:class:`Compression ` object, possibly changing a field. - **decompress(source, cursor, compressedbytes, uncompressedbytes)** decompress data from **source** at **cursor**, knowing the compressed and uncompressed size. Parameters @@ -2518,24 +2518,24 @@ class definitions associated with each class name, usually generated by ROOT fil ROOT fCompress field. """, width=TEXT_WIDTH) -################################################################ uproot.source.compressed.CompressedSource +################################################################ uproot3.source.compressed.CompressedSource -uproot.source.compressed.CompressedSource.__doc__ = wrap( -u"""A :py:class:`Source ` for compressed data. +uproot3.source.compressed.CompressedSource.__doc__ = wrap( +u"""A :py:class:`Source ` for compressed data. Decompresses on demand--- without caching the result--- so cache options in higher-level array functions are very important. - Ordinary users would never create a :py:class:`CompressedSource `. They are produced when a TKey encounters a compressed value. + Ordinary users would never create a :py:class:`CompressedSource `. They are produced when a TKey encounters a compressed value. Parameters ---------- - compression : :py:class:`Compression ` + compression : :py:class:`Compression ` inherited description of the compression. Note that *this is overridden* by the first two bytes of the compressed block, which can disagree with the higher-level description and takes precedence. - source : :py:class:`Source ` + source : :py:class:`Source ` the source in which compressed data may be found. - cursor : :py:class:`Cursor ` + cursor : :py:class:`Cursor ` location in the source. compressedbytes : int @@ -2545,9 +2545,9 @@ class definitions associated with each class name, usually generated by ROOT fil number of bytes before compression. """, width=TEXT_WIDTH) -################################################################ uproot.cache.ArrayCache +################################################################ uproot3.cache.ArrayCache -uproot.cache.ArrayCache.__doc__ = wrap( +uproot3.cache.ArrayCache.__doc__ = wrap( u"""A cache (wrapping cachetools) whose eviction threshold is determined by total array size. Uses the nbytes property of all values to determine total size. By default, cachetools only counts the number of objects, ignoring their sizes. @@ -2561,10 +2561,10 @@ class definitions associated with each class name, usually generated by ROOT fil least recently used or least frequently used """, width=TEXT_WIDTH) -################################################################ uproot.cache.ThreadSafeArrayCache +################################################################ uproot3.cache.ThreadSafeArrayCache -uproot.cache.ThreadSafeArrayCache.__doc__ = wrap( -u"""An :py:class:`ArrayCache ` with locks for thread safety. +uproot3.cache.ThreadSafeArrayCache.__doc__ = wrap( +u"""An :py:class:`ArrayCache ` with locks for thread safety. Parameters ---------- diff --git a/uproot/_util.py b/uproot3/_util.py similarity index 91% rename from uproot/_util.py rename to uproot3/_util.py index e60e66c7..b59e9610 100644 --- a/uproot/_util.py +++ b/uproot3/_util.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import diff --git a/uproot/cache.py b/uproot3/cache.py similarity index 96% rename from uproot/cache.py rename to uproot3/cache.py index 11c21b7d..e5902fab 100644 --- a/uproot/cache.py +++ b/uproot3/cache.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -19,7 +19,7 @@ def getsizeof(obj): return getattr(obj, "nbytes", 1) def __init__(self, limitbytes, method="LRU"): - from uproot.rootio import _memsize + from uproot3.rootio import _memsize m = _memsize(limitbytes) if m is not None: limitbytes = int(math.ceil(m)) diff --git a/uproot/const.py b/uproot3/const.py similarity index 99% rename from uproot/const.py rename to uproot3/const.py index c5dbf29d..91dd775a 100644 --- a/uproot/const.py +++ b/uproot3/const.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE """ROOT constants used in deserialization.""" from __future__ import absolute_import diff --git a/uproot/_connect/__init__.py b/uproot3/interp/__init__.py similarity index 59% rename from uproot/_connect/__init__.py rename to uproot3/interp/__init__.py index 9f2be71d..d0cdd065 100644 --- a/uproot/_connect/__init__.py +++ b/uproot3/interp/__init__.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE -from __future__ import absolute_import \ No newline at end of file +from __future__ import absolute_import diff --git a/uproot/interp/auto.py b/uproot3/interp/auto.py similarity index 78% rename from uproot/interp/auto.py rename to uproot3/interp/auto.py index ee712b1c..20bdf1aa 100644 --- a/uproot/interp/auto.py +++ b/uproot3/interp/auto.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -9,93 +9,93 @@ import ast from functools import reduce -import uproot.const -import uproot.rootio -from uproot.interp.numerical import asdtype -from uproot.interp.numerical import asarray -from uproot.interp.numerical import asdouble32 -from uproot.interp.numerical import asfloat16 -from uproot.interp.numerical import asstlbitset -from uproot.interp.jagged import asjagged -from uproot.interp.objects import astable -from uproot.interp.objects import asobj -from uproot.interp.objects import asgenobj -from uproot.interp.objects import asstring -from uproot.interp.objects import SimpleArray -from uproot.interp.objects import STLVector -from uproot.interp.objects import STLMap -from uproot.interp.objects import STLString -from uproot.interp.objects import Pointer +import uproot3.const +import uproot3.rootio +from uproot3.interp.numerical import asdtype +from uproot3.interp.numerical import asarray +from uproot3.interp.numerical import asdouble32 +from uproot3.interp.numerical import asfloat16 +from uproot3.interp.numerical import asstlbitset +from uproot3.interp.jagged import asjagged +from uproot3.interp.objects import astable +from uproot3.interp.objects import asobj +from uproot3.interp.objects import asgenobj +from uproot3.interp.objects import asstring +from uproot3.interp.objects import SimpleArray +from uproot3.interp.objects import STLVector +from uproot3.interp.objects import STLMap +from uproot3.interp.objects import STLString +from uproot3.interp.objects import Pointer class _NotNumerical(Exception): pass def _normalize_ftype(fType): - if fType is not None and uproot.const.kOffsetL < fType < uproot.const.kOffsetP: - return fType - uproot.const.kOffsetL + if fType is not None and uproot3.const.kOffsetL < fType < uproot3.const.kOffsetP: + return fType - uproot3.const.kOffsetL else: return fType -def _ftype2dtype(fType, awkward): +def _ftype2dtype(fType, awkward0): fType = _normalize_ftype(fType) - if fType == uproot.const.kBool: - return awkward.numpy.dtype(awkward.numpy.bool_) - elif fType == uproot.const.kChar: - return awkward.numpy.dtype("i1") - elif fType == uproot.const.kUChar: - return awkward.numpy.dtype("u1") - elif fType == uproot.const.kShort: - return awkward.numpy.dtype(">i2") - elif fType == uproot.const.kUShort: - return awkward.numpy.dtype(">u2") - elif fType == uproot.const.kInt: - return awkward.numpy.dtype(">i4") - elif fType in (uproot.const.kBits, uproot.const.kUInt, uproot.const.kCounter): - return awkward.numpy.dtype(">u4") - elif fType == uproot.const.kLong: - return awkward.numpy.dtype(">i8") - elif fType == uproot.const.kULong: - return awkward.numpy.dtype(">u8") - elif fType == uproot.const.kLong64: - return awkward.numpy.dtype(">i8") - elif fType == uproot.const.kULong64: - return awkward.numpy.dtype(">u8") - elif fType == uproot.const.kFloat: - return awkward.numpy.dtype(">f4") - elif fType == uproot.const.kDouble: - return awkward.numpy.dtype(">f8") + if fType == uproot3.const.kBool: + return awkward0.numpy.dtype(awkward0.numpy.bool_) + elif fType == uproot3.const.kChar: + return awkward0.numpy.dtype("i1") + elif fType == uproot3.const.kUChar: + return awkward0.numpy.dtype("u1") + elif fType == uproot3.const.kShort: + return awkward0.numpy.dtype(">i2") + elif fType == uproot3.const.kUShort: + return awkward0.numpy.dtype(">u2") + elif fType == uproot3.const.kInt: + return awkward0.numpy.dtype(">i4") + elif fType in (uproot3.const.kBits, uproot3.const.kUInt, uproot3.const.kCounter): + return awkward0.numpy.dtype(">u4") + elif fType == uproot3.const.kLong: + return awkward0.numpy.dtype(">i8") + elif fType == uproot3.const.kULong: + return awkward0.numpy.dtype(">u8") + elif fType == uproot3.const.kLong64: + return awkward0.numpy.dtype(">i8") + elif fType == uproot3.const.kULong64: + return awkward0.numpy.dtype(">u8") + elif fType == uproot3.const.kFloat: + return awkward0.numpy.dtype(">f4") + elif fType == uproot3.const.kDouble: + return awkward0.numpy.dtype(">f8") else: raise _NotNumerical -def _leaf2dtype(leaf, awkward): +def _leaf2dtype(leaf, awkward0): classname = leaf.__class__.__name__ if classname == "TLeafO": - return awkward.numpy.dtype(awkward.numpy.bool_) + return awkward0.numpy.dtype(awkward0.numpy.bool_) elif classname == "TLeafB": if leaf._fIsUnsigned: - return awkward.numpy.dtype(awkward.numpy.uint8) + return awkward0.numpy.dtype(awkward0.numpy.uint8) else: - return awkward.numpy.dtype(awkward.numpy.int8) + return awkward0.numpy.dtype(awkward0.numpy.int8) elif classname == "TLeafS": if leaf._fIsUnsigned: - return awkward.numpy.dtype(awkward.numpy.uint16) + return awkward0.numpy.dtype(awkward0.numpy.uint16) else: - return awkward.numpy.dtype(awkward.numpy.int16) + return awkward0.numpy.dtype(awkward0.numpy.int16) elif classname == "TLeafI": if leaf._fIsUnsigned: - return awkward.numpy.dtype(awkward.numpy.uint32) + return awkward0.numpy.dtype(awkward0.numpy.uint32) else: - return awkward.numpy.dtype(awkward.numpy.int32) + return awkward0.numpy.dtype(awkward0.numpy.int32) elif classname == "TLeafL": if leaf._fIsUnsigned: - return awkward.numpy.dtype(awkward.numpy.uint64) + return awkward0.numpy.dtype(awkward0.numpy.uint64) else: - return awkward.numpy.dtype(awkward.numpy.int64) + return awkward0.numpy.dtype(awkward0.numpy.int64) elif classname == "TLeafF": - return awkward.numpy.dtype(awkward.numpy.float32) + return awkward0.numpy.dtype(awkward0.numpy.float32) elif classname == "TLeafD": - return awkward.numpy.dtype(awkward.numpy.float64) + return awkward0.numpy.dtype(awkward0.numpy.float64) elif classname == "TLeafElement": - return _ftype2dtype(leaf._fType, awkward) + return _ftype2dtype(leaf._fType, awkward0) else: raise _NotNumerical @@ -131,8 +131,8 @@ def _obj_or_genobj(streamerClass, branch, isjagged, cntvers=False, tobject=True, return asobj(astable(asdtype(recarray)), streamerClass._methods) def interpret(branch, awkwardlib=None, swapbytes=True, cntvers=False, tobject=True, speedbump=True): - import uproot.tree - awkward = uproot.tree._normalize_awkwardlib(awkwardlib) + import uproot3.tree + awkward0 = uproot3.tree._normalize_awkwardlib(awkwardlib) dims, isjagged = (), False if len(branch._fLeaves) == 1: @@ -150,21 +150,21 @@ def interpret(branch, awkwardlib=None, swapbytes=True, cntvers=False, tobject=Tr try: if len(branch._fLeaves) == 1: - if isinstance(branch._streamer, uproot.rootio.TStreamerObjectPointer): + if isinstance(branch._streamer, uproot3.rootio.TStreamerObjectPointer): obj = branch._streamer._fTypeName if obj.endswith(b"*"): obj = obj[:-1] - obj = uproot.rootio._safename(obj) + obj = uproot3.rootio._safename(obj) if obj in branch._context.classes: return _obj_or_genobj(branch._context.classes.get(obj), branch, isjagged, cntvers=cntvers, tobject=tobject, speedbump=speedbump) # Process Double32_t and Float16_t types possibly packed in TLeafElement - leaftype = uproot.const.kBase + leaftype = uproot3.const.kBase if branch._fLeaves[0].__class__.__name__ == "TLeafElement": leaftype = _normalize_ftype(branch._fLeaves[0]._fType) - iskDouble32 = leaftype == uproot.const.kDouble32 - iskFloat16 = leaftype == uproot.const.kFloat16 + iskDouble32 = leaftype == uproot3.const.kDouble32 + iskFloat16 = leaftype == uproot3.const.kFloat16 if iskDouble32 or iskFloat16: def transform(node, tofloat=True): @@ -204,7 +204,7 @@ def transform(node, tofloat=True): return None if iskDouble32 and numbits == 0: - out = asdtype(awkward.numpy.dtype((">f4", dims)), awkward.numpy.dtype(("f8", dims))) + out = asdtype(awkward0.numpy.dtype((">f4", dims)), awkward0.numpy.dtype(("f8", dims))) elif iskDouble32 and numbits is None: out = asdouble32(low, high, 32, dims) elif iskDouble32: @@ -219,12 +219,12 @@ def transform(node, tofloat=True): return None else: - fromdtype = _leaf2dtype(branch._fLeaves[0], awkward).newbyteorder(">") + fromdtype = _leaf2dtype(branch._fLeaves[0], awkward0).newbyteorder(">") if swapbytes: - out = asdtype(awkward.numpy.dtype((fromdtype, dims)), awkward.numpy.dtype((fromdtype.newbyteorder("="), dims))) + out = asdtype(awkward0.numpy.dtype((fromdtype, dims)), awkward0.numpy.dtype((fromdtype.newbyteorder("="), dims))) else: - out = asdtype(awkward.numpy.dtype((fromdtype, dims)), awkward.numpy.dtype((fromdtype, dims))) + out = asdtype(awkward0.numpy.dtype((fromdtype, dims)), awkward0.numpy.dtype((fromdtype, dims))) if branch._fLeaves[0]._fLeafCount is None: return out @@ -232,14 +232,14 @@ def transform(node, tofloat=True): return asjagged(out) elif len(branch._fLeaves) > 1: - fromdtype = awkward.numpy.dtype([(str(leaf._fName.decode("ascii")), _leaf2dtype(leaf, awkward).newbyteorder(">")) for leaf in branch._fLeaves]) + fromdtype = awkward0.numpy.dtype([(str(leaf._fName.decode("ascii")), _leaf2dtype(leaf, awkward0).newbyteorder(">")) for leaf in branch._fLeaves]) if swapbytes: - todtype = awkward.numpy.dtype([(str(leaf._fName.decode("ascii")), _leaf2dtype(leaf, awkward).newbyteorder("=")) for leaf in branch._fLeaves]) + todtype = awkward0.numpy.dtype([(str(leaf._fName.decode("ascii")), _leaf2dtype(leaf, awkward0).newbyteorder("=")) for leaf in branch._fLeaves]) else: todtype = fromdtype if all(leaf._fLeafCount is None for leaf in branch._fLeaves): - return asdtype(awkward.numpy.dtype((fromdtype, dims)), awkward.numpy.dtype((todtype, dims))) + return asdtype(awkward0.numpy.dtype((fromdtype, dims)), awkward0.numpy.dtype((todtype, dims))) else: return None @@ -248,17 +248,17 @@ def transform(node, tofloat=True): if len(branch._fBranches) > 0 and all(len(x._fLeaves) == 1 and x._fLeaves[0]._fLeafCount is branch._fLeaves[0] for x in branch._fBranches): return asdtype(">i4") - if isinstance(branch._streamer, uproot.rootio.TStreamerObject): - obj = uproot.rootio._safename(branch._streamer._fTypeName) + if isinstance(branch._streamer, uproot3.rootio.TStreamerObject): + obj = uproot3.rootio._safename(branch._streamer._fTypeName) if obj == "string": - return asgenobj(STLString(awkward), branch._context, 0) + return asgenobj(STLString(awkward0), branch._context, 0) elif obj in branch._context.classes: return _obj_or_genobj(branch._context.classes.get(obj), branch, isjagged, cntvers=cntvers, tobject=tobject, speedbump=speedbump) - if isinstance(branch._streamer, uproot.rootio.TStreamerInfo): - obj = uproot.rootio._safename(branch._streamer._fName) + if isinstance(branch._streamer, uproot3.rootio.TStreamerInfo): + obj = uproot3.rootio._safename(branch._streamer._fName) if obj == "string": - return asgenobj(STLString(awkward), branch._context, 0) + return asgenobj(STLString(awkward0), branch._context, 0) elif obj in branch._context.classes: return _obj_or_genobj(branch._context.classes.get(obj), branch, isjagged, cntvers=cntvers, tobject=tobject, speedbump=speedbump) @@ -266,9 +266,9 @@ def transform(node, tofloat=True): return asstring(skipbytes=1) elif branch._fLeaves[0].__class__.__name__ == "TLeafElement": - if isinstance(branch._streamer, uproot.rootio.TStreamerBasicType): + if isinstance(branch._streamer, uproot3.rootio.TStreamerBasicType): try: - fromdtype = _ftype2dtype(branch._streamer._fType, awkward) + fromdtype = _ftype2dtype(branch._streamer._fType, awkward0) except _NotNumerical: pass else: @@ -281,12 +281,12 @@ def transform(node, tofloat=True): todims = dims if reduce(lambda x, y: x * y, todims, 1) != fromdims: todims = (fromdims,) - return asdtype(awkward.numpy.dtype((fromdtype, (fromdims,))), awkward.numpy.dtype((todtype, todims))) + return asdtype(awkward0.numpy.dtype((fromdtype, (fromdims,))), awkward0.numpy.dtype((todtype, todims))) - if isinstance(branch._streamer, uproot.rootio.TStreamerBasicPointer): - if uproot.const.kOffsetP < branch._streamer._fType < uproot.const.kOffsetP + 20: + if isinstance(branch._streamer, uproot3.rootio.TStreamerBasicPointer): + if uproot3.const.kOffsetP < branch._streamer._fType < uproot3.const.kOffsetP + 20: try: - fromdtype = _ftype2dtype(branch._streamer._fType - uproot.const.kOffsetP, awkward) + fromdtype = _ftype2dtype(branch._streamer._fType - uproot3.const.kOffsetP, awkward0) except _NotNumerical: pass else: @@ -297,25 +297,25 @@ def transform(node, tofloat=True): if len(branch._fLeaves) == 1 and branch._fLeaves[0]._fLeafCount is not None: return asjagged(asdtype(fromdtype, todtype), skipbytes=1) - if isinstance(branch._streamer, uproot.rootio.TStreamerObjectAny): + if isinstance(branch._streamer, uproot3.rootio.TStreamerObjectAny): if getattr(branch._streamer, "_fTypeName", None) in (b"TArrayC", b"TArrayS", b"TArrayI", b"TArrayL", b"TArrayL64", b"TArrayF", b"TArrayD"): - return asjagged(asdtype(getattr(uproot.rootio, branch._streamer._fTypeName.decode("ascii"))._dtype), skipbytes=4) + return asjagged(asdtype(getattr(uproot3.rootio, branch._streamer._fTypeName.decode("ascii"))._dtype), skipbytes=4) - if isinstance(branch._streamer, uproot.rootio.TStreamerString): + if isinstance(branch._streamer, uproot3.rootio.TStreamerString): return asstring(skipbytes=1) - if isinstance(branch._streamer, uproot.rootio.TStreamerSTLstring): + if isinstance(branch._streamer, uproot3.rootio.TStreamerSTLstring): if branch._isTClonesArray: return asgenobj(STLVector(STLString()), branch._context, 6) else: return asstring(skipbytes=7) - if getattr(branch._streamer, "_fType", None) == uproot.const.kCharStar: + if getattr(branch._streamer, "_fType", None) == uproot3.const.kCharStar: return asstring(skipbytes=4) - if getattr(branch._streamer, "_fSTLtype", None) == uproot.const.kSTLvector: + if getattr(branch._streamer, "_fSTLtype", None) == uproot3.const.kSTLvector: try: - fromdtype = _ftype2dtype(branch._streamer._fCtype, awkward) + fromdtype = _ftype2dtype(branch._streamer._fCtype, awkward0) if swapbytes: ascontent = asdtype(fromdtype, fromdtype.newbyteorder("=")) else: @@ -330,14 +330,14 @@ def transform(node, tofloat=True): try: streamerClass = branch._vecstreamer.pyclass except AttributeError: - obj = uproot.rootio._safename(branch._vecstreamer._fName) + obj = uproot3.rootio._safename(branch._vecstreamer._fName) if obj in branch._context.classes: streamerClass = branch._context.classes.get(obj) if getattr(streamerClass, "_hasreadobjany", False): return None if streamerClass.__name__ == "string": - return asgenobj(STLVector(STLString(awkward)), branch._context, 6) + return asgenobj(STLVector(STLString(awkward0)), branch._context, 6) if len(branch._fBranches) != 0: return None @@ -358,7 +358,7 @@ def transform(node, tofloat=True): return asjagged(asstlbitset(int(m.group(1))), skipbytes=6) if getattr(branch._streamer, "_fTypeName", None) == b"vector" or getattr(branch._streamer, "_fTypeName", None) == b"vector": - return asjagged(asdtype(awkward.numpy.bool_), skipbytes=10) + return asjagged(asdtype(awkward0.numpy.bool_), skipbytes=10) elif getattr(branch._streamer, "_fTypeName", None) == b"vector" or getattr(branch._streamer, "_fTypeName", None) == b"vector": return asjagged(asdtype("i1"), skipbytes=10) elif getattr(branch._streamer, "_fTypeName", None) == b"vector" or getattr(branch._streamer, "_fTypeName", None) == b"vector" or getattr(branch._streamer, "_fTypeName", None) == b"vector": @@ -384,7 +384,7 @@ def transform(node, tofloat=True): elif getattr(branch._streamer, "_fTypeName", None) == b"vector" or getattr(branch._streamer, "_fTypeName", None) == b"vector": return asjagged(asdtype("f8"), skipbytes=10) elif getattr(branch._streamer, "_fTypeName", None) == b"vector": - return asgenobj(STLVector(STLString(awkward)), branch._context, 6) + return asgenobj(STLVector(STLString(awkward0)), branch._context, 6) else: m = interpret._vectorpointer.match(getattr(branch._streamer, "_fTypeName", b"")) if m is not None and m.group(1) in branch._context.streamerinfosmap: @@ -392,36 +392,36 @@ def transform(node, tofloat=True): return asgenobj(STLVector(Pointer(streamer.pyclass)), branch._context, skipbytes=6) if getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype(awkward.numpy.bool_)), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype(awkward0.numpy.bool_)), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("i1")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("i1")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("u1")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("u1")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("i2")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("i2")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("u2")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("u2")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("i4")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("i4")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("u4")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("u4")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("i8")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("i8")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("u8")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("u8")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("i8")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("i8")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("u8")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("u8")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("f4")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("f4")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map" or getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("f8")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("f8")), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"map": - return asgenobj(STLMap(STLString(awkward), STLString(awkward)), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), STLString(awkward0)), branch._context, 6) if getattr(branch._streamer, "_fTypeName", None) == b"vector >" or getattr(branch._streamer, "_fTypeName", None) == b"vector >": - return asgenobj(STLVector(STLVector(asdtype(awkward.numpy.bool_))), branch._context, 6) + return asgenobj(STLVector(STLVector(asdtype(awkward0.numpy.bool_))), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"vector >" or getattr(branch._streamer, "_fTypeName", None) == b"vector >": return asgenobj(STLVector(STLVector(asdtype("i1"))), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"vector >" or getattr(branch._streamer, "_fTypeName", None) == b"vector >" or getattr(branch._streamer, "_fTypeName", None) == b"vector >": @@ -447,7 +447,7 @@ def transform(node, tofloat=True): elif getattr(branch._streamer, "_fTypeName", None) == b"vector >" or getattr(branch._streamer, "_fTypeName", None) == b"vector >": return asgenobj(STLVector(STLVector(asdtype(">f8"))), branch._context, 6) elif getattr(branch._streamer, "_fTypeName", None) == b"vector >": - return asgenobj(STLVector(STLVector(STLString(awkward))), branch._context, 6) + return asgenobj(STLVector(STLVector(STLString(awkward0))), branch._context, 6) m = re.match(b"bitset<([1-9][0-9]*)>", branch._fClassName) if m is not None: @@ -457,7 +457,7 @@ def transform(node, tofloat=True): return asstring(skipbytes=1) if branch._fClassName == b"vector" or branch._fClassName == b"vector": - return asjagged(asdtype(awkward.numpy.bool_), skipbytes=10) + return asjagged(asdtype(awkward0.numpy.bool_), skipbytes=10) elif branch._fClassName == b"vector" or branch._fClassName == b"vector": return asjagged(asdtype("i1"), skipbytes=10) elif branch._fClassName == b"vector" or branch._fClassName == b"vector" or branch._fClassName == b"vector": @@ -483,10 +483,10 @@ def transform(node, tofloat=True): elif branch._fClassName == b"vector" or branch._fClassName == b"vector": return asjagged(asdtype("f8"), skipbytes=10) elif branch._fClassName == b"vector": - return asgenobj(STLVector(STLString(awkward)), branch._context, 6) + return asgenobj(STLVector(STLString(awkward0)), branch._context, 6) if branch._fClassName == b"vector >" or branch._fClassName == b"vector >": - return asgenobj(STLVector(STLVector(asdtype(awkward.numpy.bool_))), branch._context, 6) + return asgenobj(STLVector(STLVector(asdtype(awkward0.numpy.bool_))), branch._context, 6) elif branch._fClassName == b"vector >" or branch._fClassName == b"vector >": return asgenobj(STLVector(STLVector(asdtype("i1"))), branch._context, 6) elif branch._fClassName == b"vector >" or branch._fClassName == b"vector >" or branch._fClassName == b"vector >": @@ -512,46 +512,46 @@ def transform(node, tofloat=True): elif branch._fClassName == b"vector >" or branch._fClassName == b"vector >": return asgenobj(STLVector(STLVector(asdtype(">f8"))), branch._context, 6) elif branch._fClassName == b"vector >": - return asgenobj(STLVector(STLVector(STLString(awkward))), branch._context, 6) + return asgenobj(STLVector(STLVector(STLString(awkward0))), branch._context, 6) if branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype(awkward.numpy.bool_)), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype(awkward0.numpy.bool_)), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("i1")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("i1")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("u1")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("u1")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("i2")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("i2")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("u2")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("u2")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("i4")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("i4")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("u4")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("u4")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("i8")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("i8")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("u8")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("u8")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("i8")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("i8")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("u8")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("u8")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("f4")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("f4")), branch._context, 6) elif branch._fClassName == b"map" or branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), asdtype("f8")), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), asdtype("f8")), branch._context, 6) elif branch._fClassName == b"map": - return asgenobj(STLMap(STLString(awkward), STLString(awkward)), branch._context, 6) + return asgenobj(STLMap(STLString(awkward0), STLString(awkward0)), branch._context, 6) if branch.name.endswith(b".first") and branch._fClassName.startswith(b"pair" or t == b"vector": - return asgenobj(SimpleArray(STLVector(asdtype(awkward.numpy.bool_))), branch._context, 6) + return asgenobj(SimpleArray(STLVector(asdtype(awkward0.numpy.bool_))), branch._context, 6) elif t == b"vector" or t == b"vector": return asgenobj(SimpleArray(STLVector(asdtype("i1"))), branch._context, 6) elif t == b"vector" or t == b"vector" or t == b"vector": @@ -577,7 +577,7 @@ def transform(node, tofloat=True): elif t == b"vector" or t == b"vector": return asgenobj(SimpleArray(STLVector(asdtype("f8"))), branch._context, 6) elif t == b"vector": - return asgenobj(SimpleArray(STLVector(STLString(awkward))), branch._context, 6) + return asgenobj(SimpleArray(STLVector(STLString(awkward0))), branch._context, 6) return None diff --git a/uproot/interp/interp.py b/uproot3/interp/interp.py similarity index 88% rename from uproot/interp/interp.py rename to uproot3/interp/interp.py index ef5aa456..4e413d21 100644 --- a/uproot/interp/interp.py +++ b/uproot3/interp/interp.py @@ -1,16 +1,17 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import -import awkward +import awkward0 class Interpretation(object): # makes __doc__ attribute mutable before Python 3.3 __metaclass__ = type.__new__(type, "type", (type,), {}) - awkward = awkward + awkward = awkward0 + awkward0 = awkward0 debug_reading = False @@ -18,7 +19,7 @@ def awkwardlib(self, lib): cls = type(self) out = cls.__new__(cls) out.__dict__.update(self.__dict__) - out.awkward = lib + out.awkward0 = lib return out @property @@ -27,7 +28,7 @@ def identifier(self): @property def type(self): - raise NotImplementedError # awkward.type.Type + raise NotImplementedError # awkward0.type.Type def empty(self): raise NotImplementedError diff --git a/uproot/interp/jagged.py b/uproot3/interp/jagged.py similarity index 64% rename from uproot/interp/jagged.py rename to uproot3/interp/jagged.py index 97495827..72e6d1ae 100644 --- a/uproot/interp/jagged.py +++ b/uproot3/interp/jagged.py @@ -1,35 +1,35 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import import math -import uproot.interp.interp -import uproot.interp.numerical +import uproot3.interp.interp +import uproot3.interp.numerical class _JaggedArrayPrep(object): def __init__(self, counts, content): self.counts = counts self.content = content -def _destructive_divide(array, divisor, awkward): +def _destructive_divide(array, divisor, awkward0): if divisor == 1: pass elif divisor == 2: - awkward.numpy.right_shift(array, 1, out=array) + awkward0.numpy.right_shift(array, 1, out=array) elif divisor == 4: - awkward.numpy.right_shift(array, 2, out=array) + awkward0.numpy.right_shift(array, 2, out=array) elif divisor == 8: - awkward.numpy.right_shift(array, 3, out=array) + awkward0.numpy.right_shift(array, 3, out=array) else: - awkward.numpy.floor_divide(array, divisor, out=array) + awkward0.numpy.floor_divide(array, divisor, out=array) return array -class asjagged(uproot.interp.interp.Interpretation): +class asjagged(uproot3.interp.interp.Interpretation): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.interp.interp.Interpretation.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.interp.interp.Interpretation.__metaclass__,), {}) def __init__(self, content, skipbytes=0): self.content = content @@ -49,10 +49,10 @@ def identifier(self): @property def type(self): - return self.awkward.type.ArrayType(self.awkward.numpy.inf, self.content.type) + return self.awkward0.type.ArrayType(self.awkward0.numpy.inf, self.content.type) def empty(self): - return self.awkward.JaggedArray(self.awkward.numpy.empty(0, dtype=self.awkward.JaggedArray.INDEXTYPE), self.awkward.numpy.empty(0, dtype=self.awkward.JaggedArray.INDEXTYPE), self.content.empty()) + return self.awkward0.JaggedArray(self.awkward0.numpy.empty(0, dtype=self.awkward0.JaggedArray.INDEXTYPE), self.awkward0.numpy.empty(0, dtype=self.awkward0.JaggedArray.INDEXTYPE), self.content.empty()) def compatible(self, other): return isinstance(other, asjagged) and self.content.compatible(other.content) @@ -65,24 +65,24 @@ def source_numitems(self, source): def fromroot(self, data, byteoffsets, local_entrystart, local_entrystop, keylen): if local_entrystart == local_entrystop: - return self.awkward.JaggedArray.fromoffsets([0], self.content.fromroot(data, None, local_entrystart, local_entrystop, keylen)) + return self.awkward0.JaggedArray.fromoffsets([0], self.content.fromroot(data, None, local_entrystart, local_entrystop, keylen)) else: if self.skipbytes == 0: - offsets = _destructive_divide(byteoffsets, self.content.itemsize, self.awkward) + offsets = _destructive_divide(byteoffsets, self.content.itemsize, self.awkward0) starts = offsets[local_entrystart : local_entrystop ] stops = offsets[local_entrystart + 1 : local_entrystop + 1] content = self.content.fromroot(data, None, starts[0], stops[-1], keylen) - return self.awkward.JaggedArray(starts, stops, content) + return self.awkward0.JaggedArray(starts, stops, content) else: bytestarts = byteoffsets[local_entrystart : local_entrystop ] + self.skipbytes bytestops = byteoffsets[local_entrystart + 1 : local_entrystop + 1] - mask = self.awkward.numpy.zeros(len(data), dtype=self.awkward.numpy.int8) + mask = self.awkward0.numpy.zeros(len(data), dtype=self.awkward0.numpy.int8) mask[bytestarts[bytestarts < len(data)]] = 1 - self.awkward.numpy.add.at(mask, bytestops[bytestops < len(data)], -1) - self.awkward.numpy.cumsum(mask, out=mask) - data = data[mask.view(self.awkward.numpy.bool_)] + self.awkward0.numpy.add.at(mask, bytestops[bytestops < len(data)], -1) + self.awkward0.numpy.cumsum(mask, out=mask) + data = data[mask.view(self.awkward0.numpy.bool_)] content = self.content.fromroot(data, None, 0, bytestops[-1], keylen) @@ -90,27 +90,27 @@ def fromroot(self, data, byteoffsets, local_entrystart, local_entrystop, keylen) sub = self.content while hasattr(sub, "content"): sub = sub.content - if isinstance(sub, uproot.interp.numerical.asdtype): + if isinstance(sub, uproot3.interp.numerical.asdtype): itemsize = sub.fromdtype.itemsize - if isinstance(sub, uproot.interp.numerical.asstlbitset): + if isinstance(sub, uproot3.interp.numerical.asstlbitset): itemsize = sub.numbytes + 4 counts = bytestops - bytestarts shift = math.log(itemsize, 2) if shift == round(shift): - self.awkward.numpy.right_shift(counts, int(shift), out=counts) + self.awkward0.numpy.right_shift(counts, int(shift), out=counts) else: - self.awkward.numpy.floor_divide(counts, itemsize, out=counts) + self.awkward0.numpy.floor_divide(counts, itemsize, out=counts) - offsets = self.awkward.numpy.empty(len(counts) + 1, self.awkward.JaggedArray.INDEXTYPE) + offsets = self.awkward0.numpy.empty(len(counts) + 1, self.awkward0.JaggedArray.INDEXTYPE) offsets[0] = 0 - self.awkward.numpy.cumsum(counts, out=offsets[1:]) + self.awkward0.numpy.cumsum(counts, out=offsets[1:]) - return self.awkward.JaggedArray(offsets[:-1], offsets[1:], content) + return self.awkward0.JaggedArray(offsets[:-1], offsets[1:], content) def destination(self, numitems, numentries): content = self.content.destination(numitems, numentries) - counts = self.awkward.numpy.empty(numentries, dtype=self.awkward.JaggedArray.INDEXTYPE) + counts = self.awkward0.numpy.empty(numentries, dtype=self.awkward0.JaggedArray.INDEXTYPE) return _JaggedArrayPrep(counts, content) def fill(self, source, destination, itemstart, itemstop, entrystart, entrystop): @@ -128,7 +128,7 @@ def finalize(self, destination, branch): if len(branch._fLeaves) == 1: leafcount = branch._fLeaves[0]._fLeafCount - out = self.awkward.Methods.maybemixin(type(content), self.awkward.JaggedArray).fromcounts(destination.counts, content) + out = self.awkward0.Methods.maybemixin(type(content), self.awkward0.JaggedArray).fromcounts(destination.counts, content) out.leafcount = leafcount if self.debug_reading: print("reading {0}".format(repr(out))) diff --git a/uproot/interp/numerical.py b/uproot3/interp/numerical.py similarity index 80% rename from uproot/interp/numerical.py rename to uproot3/interp/numerical.py index 03b0d5c3..a2ee2904 100644 --- a/uproot/interp/numerical.py +++ b/uproot3/interp/numerical.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -10,7 +10,7 @@ import numpy -import uproot.interp.interp +import uproot3.interp.interp if sys.version_info[0] <= 2: string_types = (unicode, str) @@ -27,14 +27,14 @@ def _dtypeshape(obj): out = out + shape return obj, out -def _flatlen(obj, awkward): - if isinstance(obj, awkward.numpy.dtype): +def _flatlen(obj, awkward0): + if isinstance(obj, awkward0.numpy.dtype): dtype, shape = _dtypeshape(obj) - return int(awkward.numpy.prod(shape)) + return int(awkward0.numpy.prod(shape)) else: - return int(awkward.numpy.prod(obj.shape)) + return int(awkward0.numpy.prod(obj.shape)) -class _asnumeric(uproot.interp.interp.Interpretation): +class _asnumeric(uproot3.interp.interp.Interpretation): @property def todtypeflat(self): return _dtypeshape(self.todtype)[0] @@ -49,25 +49,25 @@ def type(self): if shape == (): return dtype else: - return self.awkward.type.ArrayType(*(shape + (dtype,))) + return self.awkward0.type.ArrayType(*(shape + (dtype,))) def empty(self): - return self.awkward.numpy.empty(0, self.todtype) + return self.awkward0.numpy.empty(0, self.todtype) def source_numitems(self, source): - return _flatlen(source, self.awkward) + return _flatlen(source, self.awkward0) def destination(self, numitems, numentries): - quotient, remainder = divmod(numitems, _flatlen(self.todtype, self.awkward)) + quotient, remainder = divmod(numitems, _flatlen(self.todtype, self.awkward0)) if remainder != 0: - raise ValueError("cannot reshape {0} items as {1} (i.e. groups of {2})".format(numitems, self.todtype.shape, _flatlen(self.todtype, self.awkward))) - return self.awkward.numpy.empty(quotient, dtype=self.todtype) + raise ValueError("cannot reshape {0} items as {1} (i.e. groups of {2})".format(numitems, self.todtype.shape, _flatlen(self.todtype, self.awkward0))) + return self.awkward0.numpy.empty(quotient, dtype=self.todtype) def fill(self, source, destination, itemstart, itemstop, entrystart, entrystop): destination.reshape(-1)[itemstart:itemstop] = source.reshape(-1) def clip(self, destination, itemstart, itemstop, entrystart, entrystop): - length = _flatlen(self.todtype, self.awkward) + length = _flatlen(self.todtype, self.awkward0) startquotient, startremainder = divmod(itemstart, length) stopquotient, stopremainder = divmod(itemstop, length) assert startremainder == 0 @@ -86,25 +86,25 @@ class asdtype(_asnumeric): __metaclass__ = type.__new__(type, "type", (_asnumeric.__metaclass__,), {}) def __init__(self, fromdtype, todtype=None): - if isinstance(fromdtype, self.awkward.numpy.dtype): + if isinstance(fromdtype, self.awkward0.numpy.dtype): self.fromdtype = fromdtype elif isinstance(fromdtype, string_types) and len(fromdtype) > 0 and fromdtype[0] in BYTEORDER_INDICATORS: - self.fromdtype = self.awkward.numpy.dtype(fromdtype) + self.fromdtype = self.awkward0.numpy.dtype(fromdtype) elif isinstance(fromdtype, list) and any(e[1][0] in BYTEORDER_INDICATORS for e in fromdtype): - self.fromdtype = self.awkward.numpy.dtype(fromdtype) + self.fromdtype = self.awkward0.numpy.dtype(fromdtype) else: - self.fromdtype = self.awkward.numpy.dtype(fromdtype).newbyteorder(">") + self.fromdtype = self.awkward0.numpy.dtype(fromdtype).newbyteorder(">") if todtype is None: self.todtype = self.fromdtype.newbyteorder("=") - elif isinstance(todtype, self.awkward.numpy.dtype): + elif isinstance(todtype, self.awkward0.numpy.dtype): self.todtype = todtype elif isinstance(todtype, string_types) and len(todtype) > 0 and todtype[0] in BYTEORDER_INDICATORS: - self.todtype = self.awkward.numpy.dtype(todtype) + self.todtype = self.awkward0.numpy.dtype(todtype) elif isinstance(todtype, list) and any(e[1][0] in BYTEORDER_INDICATORS for e in todtype): - self.todtype = self.awkward.numpy.dtype(todtype) + self.todtype = self.awkward0.numpy.dtype(todtype) else: - self.todtype = self.awkward.numpy.dtype(todtype).newbyteorder("=") + self.todtype = self.awkward0.numpy.dtype(todtype).newbyteorder("=") @property def itemsize(self): @@ -120,7 +120,7 @@ def to(self, todtype=None, todims=None): if todims is not None: shape = todims + shape - return asdtype(self.fromdtype, self.awkward.numpy.dtype((dtype, shape))) + return asdtype(self.fromdtype, self.awkward0.numpy.dtype((dtype, shape))) def toarray(self, array): return asarray(self.fromdtype, array) @@ -133,7 +133,7 @@ def __repr__(self): @property def identifier(self): - _byteorder = {"!": "B", ">": "B", "<": "L", "|": "L", "=": "B" if self.awkward.numpy.dtype(">f8").isnative else "L"} + _byteorder = {"!": "B", ">": "B", "<": "L", "|": "L", "=": "B" if self.awkward0.numpy.dtype(">f8").isnative else "L"} def form(dt, n): dtype, shape = _dtypeshape(dt) return "{0}{1}{2}({3}{4})".format(_byteorder[dtype.byteorder], dtype.kind, dtype.itemsize, ",".join(repr(x) for x in shape), n) @@ -168,17 +168,17 @@ class asarray(asdtype): __metaclass__ = type.__new__(type, "type", (asdtype.__metaclass__,), {}) def __init__(self, fromdtype, toarray): - if isinstance(fromdtype, self.awkward.numpy.dtype): + if isinstance(fromdtype, self.awkward0.numpy.dtype): self.fromdtype = fromdtype elif isinstance(fromdtype, string_types) and len(fromdtype) > 0 and fromdtype[0] in (">", "<", "=", "|", b">", b"<", b"=", b"|"): - self.fromdtype = self.awkward.numpy.dtype(fromdtype) + self.fromdtype = self.awkward0.numpy.dtype(fromdtype) else: - self.fromdtype = self.awkward.numpy.dtype(fromdtype).newbyteorder(">") + self.fromdtype = self.awkward0.numpy.dtype(fromdtype).newbyteorder(">") self.toarray = toarray @property def todtype(self): - return self.awkward.numpy.dtype((self.toarray.dtype, self.toarray.shape[1:])) + return self.awkward0.numpy.dtype((self.toarray.dtype, self.toarray.shape[1:])) def __repr__(self): return "asarray({0}, )".format(repr(str(self.fromdtype)), self.toarray.dtype, self.toarray.shape, id(self.toarray)) @@ -188,11 +188,11 @@ def identifier(self): return "asarray" + super(asarray, self).identifier[7:] def destination(self, numitems, numentries): - quotient, remainder = divmod(numitems, _flatlen(self.todtype, self.awkward)) + quotient, remainder = divmod(numitems, _flatlen(self.todtype, self.awkward0)) if remainder != 0: - raise ValueError("cannot reshape {0} items as {1} (i.e. groups of {2})".format(numitems, self.todtype.shape, _flatlen(self.todtype, self.awkward))) - if _flatlen(self.toarray, self.awkward) < numitems: - raise ValueError("cannot put {0} items into an array of {1} items".format(numitems, _flatlen(self.toarray, self.awkward))) + raise ValueError("cannot reshape {0} items as {1} (i.e. groups of {2})".format(numitems, self.todtype.shape, _flatlen(self.todtype, self.awkward0))) + if _flatlen(self.toarray, self.awkward0) < numitems: + raise ValueError("cannot put {0} items into an array of {1} items".format(numitems, _flatlen(self.toarray, self.awkward0))) return self.toarray, quotient def fill(self, source, destination, itemstart, itemstop, entrystart, entrystop): @@ -234,14 +234,14 @@ def __init__(self, low, high, numbits, fromdims=(), todims=None): @property def todtype(self): - return self.awkward.numpy.dtype((self.awkward.numpy.float64, self.todims)) + return self.awkward0.numpy.dtype((self.awkward0.numpy.float64, self.todims)) @property def fromdtype(self): if self.truncated: - return self.awkward.numpy.dtype(({'exponent': ('>u1', 0), 'mantissa': ('>u2', 1)}, self.fromdims)) + return self.awkward0.numpy.dtype(({'exponent': ('>u1', 0), 'mantissa': ('>u2', 1)}, self.fromdims)) else: - return self.awkward.numpy.dtype(('>u4', self.fromdims)) + return self.awkward0.numpy.dtype(('>u4', self.fromdims)) @property def fromdtypeflat(self): @@ -281,7 +281,7 @@ def fromroot(self, data, byteoffsets, local_entrystart, local_entrystop, keylen) array = data.view(dtype=self.fromdtypeflat) # Make sure the interpreted data has correct shape if self.fromdims != (): - product = int(self.awkward.numpy.prod(self.fromdims)) + product = int(self.awkward0.numpy.prod(self.fromdims)) quotient, remainder = divmod(len(array), product) assert remainder == 0, "{0} % {1} == {2} != 0".format(len(array), product, len(array) % product) array = array.reshape((quotient,) + self.fromdims) @@ -289,19 +289,19 @@ def fromroot(self, data, byteoffsets, local_entrystart, local_entrystop, keylen) if self.truncated: array = array[local_entrystart:local_entrystop] # We have to make copies to work with contiguous arrays - unpacked = array['exponent'].astype(self.awkward.numpy.int32) - mantissa = array['mantissa'].astype(self.awkward.numpy.int32) + unpacked = array['exponent'].astype(self.awkward0.numpy.int32) + mantissa = array['mantissa'].astype(self.awkward0.numpy.int32) unpacked <<= 23 unpacked |= (mantissa & ((1 << (self.numbits + 1)) - 1)) << (23 - self.numbits) sign = ((1 << (self.numbits + 1)) & mantissa != 0) * -2 + 1 - array = unpacked.view(dtype=self.awkward.numpy.float32) * sign + array = unpacked.view(dtype=self.awkward0.numpy.float32) * sign array = array.astype(self.todtypeflat) else: array = array[local_entrystart:local_entrystop].astype(self.todtypeflat) - self.awkward.numpy.multiply(array, float(self.high - self.low) / (1 << self.numbits), out=array) - self.awkward.numpy.add(array, self.low, out=array) + self.awkward0.numpy.multiply(array, float(self.high - self.low) / (1 << self.numbits), out=array) + self.awkward0.numpy.add(array, self.low, out=array) return array @@ -316,7 +316,7 @@ def __init__(self, low, high, numbits, fromdims=(), todims=None): @property def todtype(self): - return self.awkward.numpy.dtype((self.awkward.numpy.float32, self.todims)) + return self.awkward0.numpy.dtype((self.awkward0.numpy.float32, self.todims)) def __repr__(self): args = [repr(self.low), repr(self.high), repr(self.numbits), repr(self.fromdtype), repr(self.todtype)] @@ -331,13 +331,13 @@ def identifier(self): def compatible(self, other): return isinstance(other, asfloat16) and self.low == other.low and self.high == other.high and self.numbits == other.numbits and self.fromdtype == other.fromdtype and self.todtype == other.todtype -class asstlbitset(uproot.interp.interp.Interpretation): +class asstlbitset(uproot3.interp.interp.Interpretation): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.interp.interp.Interpretation.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.interp.interp.Interpretation.__metaclass__,), {}) @property def todtype(self): - return self.awkward.numpy.dtype(self.awkward.numpy.bool_) + return self.awkward0.numpy.dtype(self.awkward0.numpy.bool_) def __init__(self, numbytes): self.numbytes = numbytes @@ -351,10 +351,10 @@ def identifier(self): @property def type(self): - return self.awkward.type.ArrayType(self.numbytes, self.todtype) + return self.awkward0.type.ArrayType(self.numbytes, self.todtype) def empty(self): - return self.awkward.numpy.empty((0, self.numbytes), dtype=self.todtype) + return self.awkward0.numpy.empty((0, self.numbytes), dtype=self.todtype) def compatible(self, other): return (isinstance(other, asstlbitset) and self.numbytes == other.numbytes) or \ @@ -364,13 +364,13 @@ def numitems(self, numbytes, numentries): return max(0, numbytes // (self.numbytes + 4)) def source_numitems(self, source): - return int(self.awkward.numpy.prod(source.shape)) + return int(self.awkward0.numpy.prod(source.shape)) def fromroot(self, data, byteoffsets, local_entrystart, local_entrystop, keylen): return data.view(self.todtype).reshape((-1, self.numbytes + 4))[:, 4:] def destination(self, numitems, numentries): - return self.awkward.numpy.empty((numitems, self.numbytes), dtype=self.todtype) + return self.awkward0.numpy.empty((numitems, self.numbytes), dtype=self.todtype) def fill(self, source, destination, itemstart, itemstop, entrystart, entrystop): destination[itemstart:itemstop] = source diff --git a/uproot/interp/objects.py b/uproot3/interp/objects.py similarity index 80% rename from uproot/interp/objects.py rename to uproot3/interp/objects.py index ac1f5319..4ae32688 100644 --- a/uproot/interp/objects.py +++ b/uproot3/interp/objects.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -10,12 +10,12 @@ import numpy -import uproot.rootio -import uproot.interp.interp -import uproot.interp.numerical -import uproot.interp.jagged +import uproot3.rootio +import uproot3.interp.interp +import uproot3.interp.numerical +import uproot3.interp.jagged -from uproot._util import _tobytes +from uproot3._util import _tobytes class SimpleArray(object): def __init__(self, cls): @@ -61,7 +61,7 @@ def read(self, source, cursor, context, parent): if hasattr(source, "_source") and len(source._source) == 0: return [] numitems = cursor.field(source, self._format1) - if isinstance(self.cls, uproot.interp.numerical.asdtype): + if isinstance(self.cls, uproot3.interp.numerical.asdtype): out = cursor.array(source, numitems, self.cls.fromdtype) if out.dtype != self.cls.todtype: out = out.astype(self.cls.todtype) @@ -93,7 +93,7 @@ def read(self, source, cursor, context, parent): out = {} for i in range(numitems): - if isinstance(self.keycls, uproot.interp.numerical.asdtype): + if isinstance(self.keycls, uproot3.interp.numerical.asdtype): key = cursor.array(source, 1, self.keycls.fromdtype) if key.dtype != self.keycls.todtype: key = key.astype(self.keycls.todtype) @@ -101,7 +101,7 @@ def read(self, source, cursor, context, parent): else: key = self.keycls.read(source, cursor, context, parent) - if isinstance(self.valcls, uproot.interp.numerical.asdtype): + if isinstance(self.valcls, uproot3.interp.numerical.asdtype): val = cursor.array(source, 1, self.valcls.fromdtype) if val.dtype != self.valcls.todtype: val = val.astype(self.valcls.todtype) @@ -114,10 +114,10 @@ def read(self, source, cursor, context, parent): return out class STLString(object): - def __init__(self, awkward=None): - if awkward is None: - awkward = uproot.interp.interp.Interpretation.awkward - self.awkward = awkward + def __init__(self, awkward0=None): + if awkward0 is None: + awkward0 = uproot3.interp.interp.Interpretation.awkward0 + self.awkward0 = awkward0 @property def __name__(self): @@ -133,7 +133,7 @@ def read(self, source, cursor, context, parent): numitems = cursor.field(source, self._format1) if numitems == 255: numitems = cursor.field(source, self._format2) - return _tobytes(cursor.array(source, numitems, self.awkward.ObjectArray.CHARTYPE)) + return _tobytes(cursor.array(source, numitems, self.awkward0.ObjectArray.CHARTYPE)) class Pointer(object): def __init__(self, cls): @@ -150,16 +150,16 @@ def __repr__(self): return "Pointer({0})".format(repr(self.cls)) def read(self, source, cursor, context, parent): - return uproot.rootio._readobjany(source, cursor, context, parent) + return uproot3.rootio._readobjany(source, cursor, context, parent) _format1 = struct.Struct(">II") -class astable(uproot.interp.interp.Interpretation): +class astable(uproot3.interp.interp.Interpretation): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.interp.interp.Interpretation.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.interp.interp.Interpretation.__metaclass__,), {}) def __init__(self, content): - if not isinstance(content, uproot.interp.numerical.asdtype) or content.todtype.names is None or len(content.todtype.names) == 0: + if not isinstance(content, uproot3.interp.numerical.asdtype) or content.todtype.names is None or len(content.todtype.names) == 0: raise TypeError("astable must be given a recarray dtype") self.content = content @@ -168,33 +168,33 @@ def itemsize(self): return self.content.itemsize def __repr__(self): - dtype, shape = uproot.interp.numerical._dtypeshape(self.content.todtype) - return "astable({0})".format(repr(self.content.to(self.awkward.util.numpy.dtype([(n, dtype[n]) for n in dtype.names if not n.startswith(" ")]), shape))) + dtype, shape = uproot3.interp.numerical._dtypeshape(self.content.todtype) + return "astable({0})".format(repr(self.content.to(self.awkward0.util.numpy.dtype([(n, dtype[n]) for n in dtype.names if not n.startswith(" ")]), shape))) def tonumpy(self): return self.content @property def identifier(self): - dtype, shape = uproot.interp.numerical._dtypeshape(self.content.todtype) + dtype, shape = uproot3.interp.numerical._dtypeshape(self.content.todtype) return "astable({0})".format(self.content.identifier) @property def type(self): - dtype, shape = uproot.interp.numerical._dtypeshape(self.content.todtype) + dtype, shape = uproot3.interp.numerical._dtypeshape(self.content.todtype) fields = None for n in dtype.names: if fields is None: - fields = self.awkward.type.ArrayType(n, dtype[n]) + fields = self.awkward0.type.ArrayType(n, dtype[n]) else: - fields = fields & self.awkward.type.ArrayType(n, dtype[n]) + fields = fields & self.awkward0.type.ArrayType(n, dtype[n]) if shape == (): return fields else: - return self.awkward.type.ArrayType(*(shape + (fields,))) + return self.awkward0.type.ArrayType(*(shape + (fields,))) def empty(self): - return self.awkward.Table.fromrec(self.content.empty()) + return self.awkward0.Table.fromrec(self.content.empty()) def compatible(self, other): return isinstance(other, astable) and self.content.compatible(other.content) @@ -218,14 +218,14 @@ def clip(self, destination, itemstart, itemstop, entrystart, entrystop): return self.content.clip(destination, itemstart, itemstop, entrystart, entrystop) def finalize(self, destination, branch): - out = self.awkward.Table.fromrec(self.content.finalize(destination, branch)) + out = self.awkward0.Table.fromrec(self.content.finalize(destination, branch)) if self.debug_reading: print("reading {0}".format(repr(out))) return out -class asobj(uproot.interp.interp.Interpretation): +class asobj(uproot3.interp.interp.Interpretation): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.interp.interp.Interpretation.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.interp.interp.Interpretation.__metaclass__,), {}) def __init__(self, content, cls): self.content = content @@ -272,16 +272,16 @@ def clip(self, destination, itemstart, itemstop, entrystart, entrystop): def finalize(self, destination, branch): if self.cls._arraymethods is None: - out = self.awkward.ObjectArray(self.content.finalize(destination, branch), self.cls._fromrow) + out = self.awkward0.ObjectArray(self.content.finalize(destination, branch), self.cls._fromrow) else: - cls = self.awkward.Methods.mixin(self.cls._arraymethods, self.awkward.ObjectArray) + cls = self.awkward0.Methods.mixin(self.cls._arraymethods, self.awkward0.ObjectArray) out = cls.__new__(cls) out._initObjectArray(self.content.finalize(destination, branch)) if self.debug_reading: print("reading {0}".format(repr(out))) return out -class _variable(uproot.interp.interp.Interpretation): +class _variable(uproot3.interp.interp.Interpretation): def __init__(self, content, generator, *args, **kwargs): self.content = content self.generator = generator @@ -300,7 +300,7 @@ def type(self): return self.generator def empty(self): - return self.awkward.ObjectArray(self.content.empty(), self.generator, *self.args, **self.kwargs) + return self.awkward0.ObjectArray(self.content.empty(), self.generator, *self.args, **self.kwargs) def compatible(self, other): return isinstance(other, _variable) and self.content.compatible(other) and self.generator == other.generator and self.args == other.args and self.kwargs == other.kwargs @@ -324,7 +324,7 @@ def clip(self, destination, itemstart, itemstop, entrystart, entrystop): return self.content.clip(destination, itemstart, itemstop, entrystart, entrystop) def finalize(self, destination, branch): - out = self.awkward.ObjectArray(self.content.finalize(destination, branch), self.generator, *self.args, **self.kwargs) + out = self.awkward0.ObjectArray(self.content.finalize(destination, branch), self.generator, *self.args, **self.kwargs) if self.debug_reading: print("reading {0}".format(repr(out))) return out @@ -337,7 +337,7 @@ def fromroot(self, data, byteoffsets, local_entrystart, local_entrystop, keylen) def destination(self, numitems, numentries): out = self.content.destination(numitems, numentries) - out.byteoffsets = self.awkward.numpy.empty(numentries, dtype=self.awkward.numpy.int32) + out.byteoffsets = self.awkward0.numpy.empty(numentries, dtype=self.awkward0.numpy.int32) return out def fill(self, source, destination, itemstart, itemstop, entrystart, entrystop): @@ -350,7 +350,7 @@ def clip(self, destination, itemstart, itemstop, entrystart, entrystop): return out def finalize(self, destination, branch): - out = self.awkward.ObjectArray(JaggedWithByteOffsets(self.content.finalize(destination, branch), destination.byteoffsets), self.generator, *self.args, **self.kwargs) + out = self.awkward0.ObjectArray(JaggedWithByteOffsets(self.content.finalize(destination, branch), destination.byteoffsets), self.generator, *self.args, **self.kwargs) if self.debug_reading: print("reading {0}".format(repr(out))) return out @@ -376,8 +376,8 @@ def __init__(self, cls, context): self.context = context def __call__(self, arg): bytes, origin = arg - source = uproot.source.source.Source(bytes) - cursor = uproot.source.cursor.Cursor(0, origin=origin) + source = uproot3.source.source.Source(bytes) + cursor = uproot3.source.cursor.Cursor(0, origin=origin) return self.cls.read(source, cursor, self.context, None) def __repr__(self): if isinstance(self.cls, type): @@ -386,7 +386,7 @@ def __repr__(self): return repr(self.cls) def __init__(self, cls, context, skipbytes): - super(asgenobj, self).__init__(uproot.interp.jagged.asjagged(uproot.interp.numerical.asdtype(self.awkward.ObjectArray.CHARTYPE), skipbytes=skipbytes), asgenobj._Wrapper(cls, context)) + super(asgenobj, self).__init__(uproot3.interp.jagged.asjagged(uproot3.interp.numerical.asdtype(self.awkward0.ObjectArray.CHARTYPE), skipbytes=skipbytes), asgenobj._Wrapper(cls, context)) def speedbump(self, value): out = copy.copy(self) @@ -406,7 +406,7 @@ class asstring(_variable): __metaclass__ = type.__new__(type, "type", (_variable.__metaclass__,), {}) def __init__(self, skipbytes=1): - super(asstring, self).__init__(uproot.interp.jagged.asjagged(uproot.interp.numerical.asdtype(self.awkward.ObjectArray.CHARTYPE), skipbytes=skipbytes), lambda array: _tobytes(array)) + super(asstring, self).__init__(uproot3.interp.jagged.asjagged(uproot3.interp.numerical.asdtype(self.awkward0.ObjectArray.CHARTYPE), skipbytes=skipbytes), lambda array: _tobytes(array)) def __repr__(self): return "asstring({0})".format("" if self.content.skipbytes == 1 else repr(self.content.skipbytes)) diff --git a/uproot3/pandas.py b/uproot3/pandas.py new file mode 100644 index 00000000..90e04870 --- /dev/null +++ b/uproot3/pandas.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python + +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE + +"""Top-level functions for Pandas.""" +from __future__ import absolute_import + +import uproot3.tree +from uproot3.source.memmap import MemmapSource +from uproot3.source.xrootd import XRootDSource +from uproot3.source.http import HTTPSource + +def iterate(path, treepath, branches=None, entrysteps=None, namedecode="utf-8", reportpath=False, reportfile=False, flatten=True, flatname=None, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, blocking=True, localsource=MemmapSource.defaults, xrootdsource=XRootDSource.defaults, httpsource=HTTPSource.defaults, **options): + import pandas + return uproot3.tree.iterate(path, treepath, branches=branches, entrysteps=entrysteps, outputtype=pandas.DataFrame, namedecode=namedecode, reportpath=reportpath, reportfile=reportfile, reportentries=False, flatten=flatten, flatname=flatname, awkwardlib=awkwardlib, cache=cache, basketcache=basketcache, keycache=keycache, executor=executor, blocking=blocking, localsource=localsource, xrootdsource=xrootdsource, httpsource=httpsource, **options) diff --git a/uproot/rootio.py b/uproot3/rootio.py similarity index 91% rename from uproot/rootio.py rename to uproot3/rootio.py index 6afee71e..8031258d 100644 --- a/uproot/rootio.py +++ b/uproot3/rootio.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -17,13 +17,13 @@ import numpy -import uproot.const -import uproot.source.compressed -from uproot.source.memmap import MemmapSource -from uproot.source.xrootd import XRootDSource -from uproot.source.http import HTTPSource -from uproot.source.cursor import Cursor -from uproot._util import _tobytes +import uproot3.const +import uproot3.source.compressed +from uproot3.source.memmap import MemmapSource +from uproot3.source.xrootd import XRootDSource +from uproot3.source.http import HTTPSource +from uproot3.source.cursor import Cursor +from uproot3._util import _tobytes import uproot_methods.classes @@ -150,7 +150,7 @@ def read(source, *args, **options): "TObjString": TObjString} if read_streamers and fSeekInfo != 0: - streamercontext = ROOTDirectory._FileContext(source.path, None, None, streamerclasses, uproot.source.compressed.Compression(fCompress), tfile) + streamercontext = ROOTDirectory._FileContext(source.path, None, None, streamerclasses, uproot3.source.compressed.Compression(fCompress), tfile) streamerkey = TKey.read(source, Cursor(fSeekInfo), streamercontext, None) streamerinfos, streamerinfosmap, streamerrules = _readstreamers(streamerkey._source, streamerkey._cursor, streamercontext, None) else: @@ -159,7 +159,7 @@ def read(source, *args, **options): classes = dict(globals()) classes.update(builtin_classes) classes = _defineclasses(streamerinfos, classes) - context = ROOTDirectory._FileContext(source.path, streamerinfos, streamerinfosmap, classes, uproot.source.compressed.Compression(fCompress), tfile) + context = ROOTDirectory._FileContext(source.path, streamerinfos, streamerinfosmap, classes, uproot3.source.compressed.Compression(fCompress), tfile) context.source = source keycursor = Cursor(fBEGIN) @@ -439,8 +439,8 @@ def _bytesid(x): def _startcheck(source, cursor): start = cursor.index cnt, vers = cursor.fields(source, _startcheck._format_cntvers) - if numpy.int64(cnt) & uproot.const.kByteCountMask: - cnt = int(numpy.int64(cnt) & ~uproot.const.kByteCountMask) + if numpy.int64(cnt) & uproot3.const.kByteCountMask: + cnt = int(numpy.int64(cnt) & ~uproot3.const.kByteCountMask) return start, cnt + 4, vers else: cursor.index = start @@ -457,11 +457,11 @@ def _endcheck(start, cursor, cnt): def _skiptobj(source, cursor): version = cursor.field(source, _skiptobj._format1) - if numpy.int64(version) & uproot.const.kByteCountVMask: + if numpy.int64(version) & uproot3.const.kByteCountVMask: cursor.skip(4) fUniqueID, fBits = cursor.fields(source, _skiptobj._format2) - fBits = numpy.uint32(fBits) | uproot.const.kIsOnHeap - if fBits & uproot.const.kIsReferenced: + fBits = numpy.uint32(fBits) | uproot3.const.kIsOnHeap + if fBits & uproot3.const.kIsReferenced: cursor.skip(2) _skiptobj._format1 = struct.Struct(">h") _skiptobj._format2 = struct.Struct(">II") @@ -493,7 +493,7 @@ def _readobjany(source, cursor, context, parent, asclass=None): beg = cursor.index - cursor.origin bcnt = cursor.field(source, struct.Struct(">I")) - if numpy.int64(bcnt) & uproot.const.kByteCountMask == 0 or numpy.int64(bcnt) == uproot.const.kNewClassTag: + if numpy.int64(bcnt) & uproot3.const.kByteCountMask == 0 or numpy.int64(bcnt) == uproot3.const.kNewClassTag: vers = 0 start = 0 tag = bcnt @@ -503,7 +503,7 @@ def _readobjany(source, cursor, context, parent, asclass=None): start = cursor.index - cursor.origin tag = cursor.field(source, struct.Struct(">I")) - if numpy.int64(tag) & uproot.const.kClassMask == 0: + if numpy.int64(tag) & uproot3.const.kClassMask == 0: # reference object if tag == 0: return None # return null @@ -519,14 +519,14 @@ def _readobjany(source, cursor, context, parent, asclass=None): else: return cursor.refs[tag] # return object - elif tag == uproot.const.kNewClassTag: + elif tag == uproot3.const.kNewClassTag: # new class and object cname = _safename(cursor.cstring(source)) fct = context.classes.get(cname, Undefined) if vers > 0: - cursor.refs[start + uproot.const.kMapOffset] = fct + cursor.refs[start + uproot3.const.kMapOffset] = fct else: cursor.refs[len(cursor.refs) + 1] = fct @@ -538,7 +538,7 @@ def _readobjany(source, cursor, context, parent, asclass=None): obj = asclass.read(source, cursor, context, parent) # placeholder new object if vers > 0: - cursor.refs[beg + uproot.const.kMapOffset] = obj + cursor.refs[beg + uproot3.const.kMapOffset] = obj else: cursor.refs[len(cursor.refs) + 1] = obj @@ -546,7 +546,7 @@ def _readobjany(source, cursor, context, parent, asclass=None): else: # reference class, new object - ref = int(numpy.int64(tag) & ~uproot.const.kClassMask) + ref = int(numpy.int64(tag) & ~uproot3.const.kClassMask) if asclass is None: if ref not in cursor.refs: @@ -563,7 +563,7 @@ def _readobjany(source, cursor, context, parent, asclass=None): obj = asclass.read(source, cursor, context, parent) # placeholder new object if vers > 0: - cursor.refs[beg + uproot.const.kMapOffset] = obj + cursor.refs[beg + uproot3.const.kMapOffset] = obj else: cursor.refs[len(cursor.refs) + 1] = obj @@ -589,7 +589,7 @@ def _readstreamers(source, cursor, context, parent): for element in obj._fElements: if isinstance(element, TStreamerBase): dependencies.add(element._fName) - # if isinstance(element, (TStreamerObject, TStreamerObjectAny, TStreamerString)) or (isinstance(element, TStreamerObjectPointer) and element._fType == uproot.const.kObjectp): + # if isinstance(element, (TStreamerObject, TStreamerObjectAny, TStreamerString)) or (isinstance(element, TStreamerObjectPointer) and element._fType == uproot3.const.kObjectp): # dependencies.add(element._fTypeName.rstrip(b"*")) streamerinfos.append((obj, dependencies)) @@ -637,31 +637,31 @@ def topological_sort(items): return streamerinfos, streamerinfosmap, streamerrules def _ftype2dtype(fType): - if fType == uproot.const.kBool: + if fType == uproot3.const.kBool: return "numpy.dtype(numpy.bool_)" - elif fType == uproot.const.kChar: + elif fType == uproot3.const.kChar: return "numpy.dtype('i1')" - elif fType in (uproot.const.kUChar, uproot.const.kCharStar): + elif fType in (uproot3.const.kUChar, uproot3.const.kCharStar): return "numpy.dtype('u1')" - elif fType == uproot.const.kShort: + elif fType == uproot3.const.kShort: return "numpy.dtype('>i2')" - elif fType == uproot.const.kUShort: + elif fType == uproot3.const.kUShort: return "numpy.dtype('>u2')" - elif fType == uproot.const.kInt: + elif fType == uproot3.const.kInt: return "numpy.dtype('>i4')" - elif fType in (uproot.const.kBits, uproot.const.kUInt, uproot.const.kCounter): + elif fType in (uproot3.const.kBits, uproot3.const.kUInt, uproot3.const.kCounter): return "numpy.dtype('>u4')" - elif fType == uproot.const.kLong: + elif fType == uproot3.const.kLong: return "numpy.dtype(numpy.long).newbyteorder('>')" - elif fType == uproot.const.kULong: + elif fType == uproot3.const.kULong: return "numpy.dtype('>u' + repr(numpy.dtype(numpy.long).itemsize))" - elif fType == uproot.const.kLong64: + elif fType == uproot3.const.kLong64: return "numpy.dtype('>i8')" - elif fType == uproot.const.kULong64: + elif fType == uproot3.const.kULong64: return "numpy.dtype('>u8')" - elif fType in (uproot.const.kFloat, uproot.const.kFloat16): + elif fType in (uproot3.const.kFloat, uproot3.const.kFloat16): return "numpy.dtype('>f4')" - elif fType in (uproot.const.kDouble, uproot.const.kDouble32): + elif fType in (uproot3.const.kDouble, uproot3.const.kDouble32): return "numpy.dtype('>f8')" else: return "None" @@ -676,31 +676,31 @@ def _longsize(issigned): return "q" if issigned else "Q" def _ftype2struct(fType): - if fType == uproot.const.kBool: + if fType == uproot3.const.kBool: return "?" - elif fType == uproot.const.kChar: + elif fType == uproot3.const.kChar: return "b" - elif fType in (uproot.const.kUChar, uproot.const.kCharStar): + elif fType in (uproot3.const.kUChar, uproot3.const.kCharStar): return "B" - elif fType == uproot.const.kShort: + elif fType == uproot3.const.kShort: return "h" - elif fType == uproot.const.kUShort: + elif fType == uproot3.const.kUShort: return "H" - elif fType == uproot.const.kInt: + elif fType == uproot3.const.kInt: return "i" - elif fType in (uproot.const.kBits, uproot.const.kUInt, uproot.const.kCounter): + elif fType in (uproot3.const.kBits, uproot3.const.kUInt, uproot3.const.kCounter): return "I" - elif fType == uproot.const.kLong: + elif fType == uproot3.const.kLong: return _longsize(True) - elif fType == uproot.const.kULong: + elif fType == uproot3.const.kULong: return _longsize(False) - elif fType == uproot.const.kLong64: + elif fType == uproot3.const.kLong64: return "q" - elif fType == uproot.const.kULong64: + elif fType == uproot3.const.kULong64: return "Q" - elif fType in (uproot.const.kFloat, uproot.const.kFloat16): + elif fType in (uproot3.const.kFloat, uproot3.const.kFloat16): return "f" - elif fType in (uproot.const.kDouble, uproot.const.kDouble32): + elif fType in (uproot3.const.kDouble, uproot3.const.kDouble32): return "d" else: raise NotImplementedError(fType) @@ -759,8 +759,8 @@ def _defineclasses(streamerinfos, classes): bases.append(_safename(element._fName)) elif isinstance(element, TStreamerBasicPointer): - assert uproot.const.kOffsetP < element._fType < uproot.const.kOffsetP + 20 - fType = element._fType - uproot.const.kOffsetP + assert uproot3.const.kOffsetP < element._fType < uproot3.const.kOffsetP + 20 + fType = element._fType - uproot3.const.kOffsetP dtypename = "_dtype{0}".format(len(dtypes) + 1) dtypes[dtypename] = _ftype2dtype(fType) @@ -817,14 +817,14 @@ def _defineclasses(streamerinfos, classes): " self._{0} = {1}.read(source, cursor, context, self)".format(_safename(element._fName), _safename(element._fTypeName.rstrip(b"*")))]) elif isinstance(element, (TStreamerObjectAnyPointer, TStreamerObjectPointer)): - if element._fType == uproot.const.kObjectp or element._fType == uproot.const.kAnyp: + if element._fType == uproot3.const.kObjectp or element._fType == uproot3.const.kAnyp: if pyclassname in skip and _safename(element._fName) in skip[pyclassname]: code.append(" Undefined.read(source, cursor, context, self)") else: code.append(" self._{0} = {1}.read(source, cursor, context, self)".format(_safename(element._fName), _safename(element._fTypeName.rstrip(b"*")))) fields.append(_safename(element._fName)) recarray.append("out.extend({0}._recarray())".format(_safename(element._fName))) - elif element._fType == uproot.const.kObjectP or element._fType == uproot.const.kAnyP: + elif element._fType == uproot3.const.kObjectP or element._fType == uproot3.const.kAnyP: if pyclassname in skip and _safename(element._fName) in skip[pyclassname]: code.append(" _readobjany(source, cursor, context, parent, asclass=Undefined)") hasreadobjany = True @@ -838,57 +838,57 @@ def _defineclasses(streamerinfos, classes): recarray.append("raise ValueError('not a recarray')") elif isinstance(element, TStreamerSTL): - if element._fSTLtype == uproot.const.kSTLstring or element._fTypeName == b"string": + if element._fSTLtype == uproot3.const.kSTLstring or element._fTypeName == b"string": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.string(source)".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kBool) or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kBool) or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), '?')".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kChar) or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kChar) or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), 'i1')".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kUChar) or element._fTypeName == b"vector" or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kUChar) or element._fTypeName == b"vector" or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), 'u1')".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kShort) or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kShort) or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), '>i2')".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kUShort) or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kUShort) or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), '>u2')".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kInt) or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kInt) or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), '>i4')".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kUInt) or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kUInt) or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), '>u4')".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kLong) or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kLong) or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), '>i8')".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kULong) or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kULong) or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), '>u8')".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kFloat) or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kFloat) or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), '>f4')".format(_safename(element._fName))) fields.append(_safename(element._fName)) - elif (element._fSTLtype == uproot.const.kSTLvector and element._fCtype == uproot.const.kDouble) or element._fTypeName == b"vector" or element._fTypeName == b"vector": + elif (element._fSTLtype == uproot3.const.kSTLvector and element._fCtype == uproot3.const.kDouble) or element._fTypeName == b"vector" or element._fTypeName == b"vector": code.append(" cursor.skip(6)") code.append(" self._{0} = cursor.array(source, cursor.field(source, self._int32), '>f8')".format(_safename(element._fName))) fields.append(_safename(element._fName)) elif element._fTypeName == b"vector": code.append(" cursor.skip(6)") - code.append(" self._{0} = uproot.interp.objects.STLVector(uproot.interp.objects.STLString()).read(source, cursor, context, self)".format(_safename(element._fName))) + code.append(" self._{0} = uproot3.interp.objects.STLVector(uproot3.interp.objects.STLString()).read(source, cursor, context, self)".format(_safename(element._fName))) elif element._fTypeName == b"map": code.append(" self._{0} = _mapstrstr(source, cursor)".format(_safename(element._fName))) else: @@ -943,9 +943,9 @@ def _defineclasses(streamerinfos, classes): bases.append("ROOTStreamedObject") if pyclassname == "TTree": - bases.insert(0, "uproot.tree.TTreeMethods") + bases.insert(0, "uproot3.tree.TTreeMethods") if pyclassname == "TBranch": - bases.insert(0, "uproot.tree.TBranchMethods") + bases.insert(0, "uproot3.tree.TBranchMethods") if uproot_methods.classes.hasmethods(pyclassname): bases.insert(0, "uproot_methods.classes.{0}.Methods".format(pyclassname)) @@ -1026,7 +1026,7 @@ def _readinto(cls, self, source, cursor, context, parent): # object size != compressed size means it's compressed if self._fObjlen != self._fNbytes - self._fKeylen: - self._source = uproot.source.compressed.CompressedSource(context.compression, source, Cursor(self._fSeekKey + self._fKeylen), self._fNbytes - self._fKeylen, self._fObjlen) + self._source = uproot3.source.compressed.CompressedSource(context.compression, source, Cursor(self._fSeekKey + self._fKeylen), self._fNbytes - self._fKeylen, self._fObjlen) self._cursor = Cursor(0, origin=-self._fKeylen) # otherwise, it's uncompressed @@ -1221,23 +1221,23 @@ def _readinto(cls, self, source, cursor, context, parent): start, cnt, self._classversion = _startcheck(source, cursor) super(TStreamerBasicType, self)._readinto(self, source, cursor, context, parent) - if uproot.const.kOffsetL < self._fType < uproot.const.kOffsetP: - self._fType -= uproot.const.kOffsetL + if uproot3.const.kOffsetL < self._fType < uproot3.const.kOffsetP: + self._fType -= uproot3.const.kOffsetL basic = True - if self._fType in (uproot.const.kBool, uproot.const.kUChar, uproot.const.kChar): + if self._fType in (uproot3.const.kBool, uproot3.const.kUChar, uproot3.const.kChar): self._fSize = 1 - elif self._fType in (uproot.const.kUShort, uproot.const.kShort): + elif self._fType in (uproot3.const.kUShort, uproot3.const.kShort): self._fSize = 2 - elif self._fType in (uproot.const.kBits, uproot.const.kUInt, uproot.const.kInt, uproot.const.kCounter): + elif self._fType in (uproot3.const.kBits, uproot3.const.kUInt, uproot3.const.kInt, uproot3.const.kCounter): self._fSize = 4 - elif self._fType in (uproot.const.kULong, uproot.const.kULong64, uproot.const.kLong, uproot.const.kLong64): + elif self._fType in (uproot3.const.kULong, uproot3.const.kULong64, uproot3.const.kLong, uproot3.const.kLong64): self._fSize = 8 - elif self._fType in (uproot.const.kFloat, uproot.const.kFloat16): + elif self._fType in (uproot3.const.kFloat, uproot3.const.kFloat16): self._fSize = 4 - elif self._fType in (uproot.const.kDouble, uproot.const.kDouble32): + elif self._fType in (uproot3.const.kDouble, uproot3.const.kDouble32): self._fSize = 8 - elif self._fType == uproot.const.kCharStar: + elif self._fType == uproot3.const.kCharStar: self._fSize = numpy.dtype(numpy.intp).itemsize else: basic = False @@ -1319,11 +1319,11 @@ def _readinto(cls, self, source, cursor, context, parent): self._fSTLtype, self._fCtype = cursor.fields(source, TStreamerSTL._format) - if self._fSTLtype == uproot.const.kSTLmultimap or self._fSTLtype == uproot.const.kSTLset: + if self._fSTLtype == uproot3.const.kSTLmultimap or self._fSTLtype == uproot3.const.kSTLset: if self._fTypeName.startswith(b"std::set") or self._fTypeName.startswith(b"set"): - self._fSTLtype = uproot.const.kSTLset + self._fSTLtype = uproot3.const.kSTLset elif self._fTypeName.startswith(b"std::multimap") or self._fTypeName.startswith(b"multimap"): - self._fSTLtype = uproot.const.kSTLmultimap + self._fSTLtype = uproot3.const.kSTLmultimap _endcheck(start, cursor, cnt) return self @@ -1331,7 +1331,7 @@ def _readinto(cls, self, source, cursor, context, parent): @classmethod def vector(cls, fType, fTypeName): self = cls.__new__(cls) - self._fSTLtype = uproot.const.kSTLvector + self._fSTLtype = uproot3.const.kSTLvector self._fCtype = fType self._fTypeName = b"vector<" + fTypeName + b">" return self diff --git a/uproot/interp/__init__.py b/uproot3/source/__init__.py similarity index 59% rename from uproot/interp/__init__.py rename to uproot3/source/__init__.py index 9f2be71d..d0cdd065 100644 --- a/uproot/interp/__init__.py +++ b/uproot3/source/__init__.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE -from __future__ import absolute_import \ No newline at end of file +from __future__ import absolute_import diff --git a/uproot/source/chunked.py b/uproot3/source/chunked.py similarity index 93% rename from uproot/source/chunked.py rename to uproot3/source/chunked.py index 4387751d..e52f1859 100644 --- a/uproot/source/chunked.py +++ b/uproot3/source/chunked.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -8,16 +8,16 @@ import numpy -import uproot.cache -import uproot.source.source +import uproot3.cache +import uproot3.source.source -class ChunkedSource(uproot.source.source.Source): +class ChunkedSource(uproot3.source.source.Source): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.source.source.Source.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.source.source.Source.__metaclass__,), {}) def __init__(self, path, chunkbytes, limitbytes, parallel): - from uproot.rootio import _memsize + from uproot3.rootio import _memsize m = _memsize(chunkbytes) if m is not None: chunkbytes = int(math.ceil(m)) @@ -30,7 +30,7 @@ def __init__(self, path, chunkbytes, limitbytes, parallel): if limitbytes is None: self.cache = {} else: - self.cache = uproot.cache.ThreadSafeArrayCache(limitbytes) + self.cache = uproot3.cache.ThreadSafeArrayCache(limitbytes) self._source = None self._setup_futures(parallel) diff --git a/uproot/source/compressed.py b/uproot3/source/compressed.py similarity index 85% rename from uproot/source/compressed.py rename to uproot3/source/compressed.py index ff51bfaa..46347aa5 100644 --- a/uproot/source/compressed.py +++ b/uproot3/source/compressed.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -9,17 +9,17 @@ import numpy -import uproot.const -import uproot.source.source +import uproot3.const +import uproot3.source.source class Compression(object): # makes __doc__ attribute mutable before Python 3.3 __metaclass__ = type.__new__(type, "type", (type,), {}) def __init__(self, fCompress): - self.algo = max(fCompress // 100, uproot.const.kZLIB) + self.algo = max(fCompress // 100, uproot3.const.kZLIB) self.level = fCompress % 100 - if not uproot.const.kZLIB <= self.algo < uproot.const.kUndefinedCompressionAlgorithm: + if not uproot3.const.kZLIB <= self.algo < uproot3.const.kUndefinedCompressionAlgorithm: raise ValueError("unrecognized compression algorithm: {0} (from fCompress {1})".format(self.algo, fCompress)) if not 0 <= self.level <= 9: raise ValueError("unrecognized compression level: {0} (from fCompress {1})".format(self.level, fCompress)) @@ -38,15 +38,15 @@ def copy(self, algo=None, level=None): @property def algoname(self): - if self.algo == uproot.const.kZLIB: + if self.algo == uproot3.const.kZLIB: return "zlib" - elif self.algo == uproot.const.kLZMA: + elif self.algo == uproot3.const.kLZMA: return "lzma" - elif self.algo == uproot.const.kOldCompressionAlgo: + elif self.algo == uproot3.const.kOldCompressionAlgo: return "old" - elif self.algo == uproot.const.kLZ4: + elif self.algo == uproot3.const.kLZ4: return "lz4" - elif self.algo == uproot.const.kZSTD: + elif self.algo == uproot3.const.kZSTD: return "zstd" else: raise ValueError("unrecognized compression algorithm: {0}".format(self.algo)) @@ -55,11 +55,11 @@ def __repr__(self): return "".format(repr(self.algoname), self.level) def decompress(self, source, cursor, compressedbytes, uncompressedbytes=None): - if self.algo == uproot.const.kZLIB: + if self.algo == uproot3.const.kZLIB: from zlib import decompress as zlib_decompress return zlib_decompress(cursor.bytes(source, compressedbytes)) - elif self.algo == uproot.const.kLZMA: + elif self.algo == uproot3.const.kLZMA: try: from lzma import decompress as lzma_decompress except ImportError: @@ -69,10 +69,10 @@ def decompress(self, source, cursor, compressedbytes, uncompressedbytes=None): raise ImportError("install lzma package with:\n pip install backports.lzma\nor\n conda install backports.lzma\n(or just use Python >= 3.3).") return lzma_decompress(cursor.bytes(source, compressedbytes)) - elif self.algo == uproot.const.kOldCompressionAlgo: + elif self.algo == uproot3.const.kOldCompressionAlgo: raise NotImplementedError("ROOT's \"old\" algorithm (fCompress 300) is not supported") - elif self.algo == uproot.const.kLZ4: + elif self.algo == uproot3.const.kLZ4: try: from lz4.block import decompress as lz4_decompress except ImportError: @@ -82,7 +82,7 @@ def decompress(self, source, cursor, compressedbytes, uncompressedbytes=None): raise ValueError("lz4 needs to know the uncompressed number of bytes") return lz4_decompress(cursor.bytes(source, compressedbytes), uncompressed_size=uncompressedbytes) - elif self.algo == uproot.const.kZSTD: + elif self.algo == uproot3.const.kZSTD: try: import zstandard as zstd except ImportError: @@ -93,9 +93,9 @@ def decompress(self, source, cursor, compressedbytes, uncompressedbytes=None): else: raise ValueError("unrecognized compression algorithm: {0}".format(self.algo)) -class CompressedSource(uproot.source.source.Source): +class CompressedSource(uproot3.source.source.Source): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.source.source.Source.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.source.source.Source.__metaclass__,), {}) def __init__(self, compression, source, cursor, compressedbytes, uncompressedbytes): self.compression = compression @@ -134,15 +134,15 @@ def _prepare(self): uncompressedbytes = u1 + (u2 << 8) + (u3 << 16) if algo == b"ZL": - compression = self.compression.copy(uproot.const.kZLIB) + compression = self.compression.copy(uproot3.const.kZLIB) elif algo == b"XZ": - compression = self.compression.copy(uproot.const.kLZMA) + compression = self.compression.copy(uproot3.const.kLZMA) elif algo == b"L4": try: import xxhash except ImportError: raise ImportError("install xxhash package with:\n pip install xxhash\nor\n conda install python-xxhash") - compression = self.compression.copy(uproot.const.kLZ4) + compression = self.compression.copy(uproot3.const.kLZ4) compressedbytes -= 8 checksum = cursor.field(self._compressed, self._format_field0) copy_cursor = copy(cursor) @@ -150,7 +150,7 @@ def _prepare(self): if xxhash.xxh64(after_compressed).intdigest() != checksum: raise ValueError("LZ4 checksum didn't match") elif algo == b"ZS": - compression = self.compression.copy(uproot.const.kZSTD) + compression = self.compression.copy(uproot3.const.kZSTD) elif algo == b"CS": raise ValueError("unsupported compression algorithm: 'old' (according to ROOT comments, hasn't been used in 20+ years!)") else: diff --git a/uproot/source/cursor.py b/uproot3/source/cursor.py similarity index 98% rename from uproot/source/cursor.py rename to uproot3/source/cursor.py index a9a6e10c..e17bd201 100644 --- a/uproot/source/cursor.py +++ b/uproot3/source/cursor.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -9,7 +9,7 @@ import numpy -from uproot._util import _tobytes +from uproot3._util import _tobytes class Cursor(object): # makes __doc__ attribute mutable before Python 3.3 diff --git a/uproot/source/file.py b/uproot3/source/file.py similarity index 87% rename from uproot/source/file.py rename to uproot3/source/file.py index 98cf041a..d75476c6 100644 --- a/uproot/source/file.py +++ b/uproot3/source/file.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -10,11 +10,11 @@ import numpy -import uproot.source.chunked +import uproot3.source.chunked -class FileSource(uproot.source.chunked.ChunkedSource): +class FileSource(uproot3.source.chunked.ChunkedSource): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.source.chunked.ChunkedSource.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.source.chunked.ChunkedSource.__metaclass__,), {}) defaults = {"chunkbytes": 8*1024, "limitbytes": 1024**2, "parallel": 8*multiprocessing.cpu_count() if sys.version_info[0] > 2 else 1} diff --git a/uproot/source/http.py b/uproot3/source/http.py similarity index 90% rename from uproot/source/http.py rename to uproot3/source/http.py index d9df3eff..06979cdb 100644 --- a/uproot/source/http.py +++ b/uproot3/source/http.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -11,11 +11,11 @@ import numpy -import uproot.source.chunked +import uproot3.source.chunked -class HTTPSource(uproot.source.chunked.ChunkedSource): +class HTTPSource(uproot3.source.chunked.ChunkedSource): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.source.chunked.ChunkedSource.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.source.chunked.ChunkedSource.__metaclass__,), {}) def __init__(self, path, auth=None, *args, **kwds): super(HTTPSource, self).__init__(path, *args, **kwds) diff --git a/uproot/source/memmap.py b/uproot3/source/memmap.py similarity index 86% rename from uproot/source/memmap.py rename to uproot3/source/memmap.py index 379f1248..046331ff 100644 --- a/uproot/source/memmap.py +++ b/uproot3/source/memmap.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -8,11 +8,11 @@ import numpy -import uproot.source.source +import uproot3.source.source -class MemmapSource(uproot.source.source.Source): +class MemmapSource(uproot3.source.source.Source): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.source.source.Source.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.source.source.Source.__metaclass__,), {}) defaults = {} diff --git a/uproot/source/source.py b/uproot3/source/source.py similarity index 98% rename from uproot/source/source.py rename to uproot3/source/source.py index 94438c11..3e175888 100644 --- a/uproot/source/source.py +++ b/uproot3/source/source.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import diff --git a/uproot/source/xrootd.py b/uproot3/source/xrootd.py similarity index 93% rename from uproot/source/xrootd.py rename to uproot3/source/xrootd.py index d5f25c42..2e97691b 100644 --- a/uproot/source/xrootd.py +++ b/uproot3/source/xrootd.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -9,11 +9,11 @@ import numpy -import uproot.source.chunked +import uproot3.source.chunked -class XRootDSource(uproot.source.chunked.ChunkedSource): +class XRootDSource(uproot3.source.chunked.ChunkedSource): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.source.chunked.ChunkedSource.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.source.chunked.ChunkedSource.__metaclass__,), {}) def __init__(self, path, timeout=None, *args, **kwds): self._size = None @@ -24,7 +24,7 @@ def __init__(self, path, timeout=None, *args, **kwds): def _open(self): try: - os.environ["XRD_RUNFORKHANDLER"] = "1" # To make uproot + xrootd + multiprocessing work + os.environ["XRD_RUNFORKHANDLER"] = "1" # To make uproot3 + xrootd + multiprocessing work import pyxrootd.client except ImportError: raise ImportError("Install pyxrootd package with:\n conda install -c conda-forge xrootd\n(or download from http://xrootd.org/dload.html and manually compile with cmake; setting PYTHONPATH and LD_LIBRARY_PATH appropriately).") diff --git a/uproot/tree.py b/uproot3/tree.py similarity index 88% rename from uproot/tree.py rename to uproot3/tree.py index 2c7e87e5..24dab27b 100644 --- a/uproot/tree.py +++ b/uproot3/tree.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -27,23 +27,23 @@ import numpy import cachetools -import awkward +import awkward0 import uproot_methods.profiles -import uproot.rootio -from uproot.rootio import _bytesid -from uproot.rootio import _memsize -from uproot.rootio import nofilter -from uproot.rootio import _safename -from uproot.interp.auto import interpret -from uproot.interp.numerical import asdtype -from uproot.interp.jagged import asjagged -from uproot.interp.objects import asobj -from uproot.interp.objects import asgenobj -from uproot.source.cursor import Cursor -from uproot.source.memmap import MemmapSource -from uproot.source.xrootd import XRootDSource -from uproot.source.http import HTTPSource +import uproot3.rootio +from uproot3.rootio import _bytesid +from uproot3.rootio import _memsize +from uproot3.rootio import nofilter +from uproot3.rootio import _safename +from uproot3.interp.auto import interpret +from uproot3.interp.numerical import asdtype +from uproot3.interp.jagged import asjagged +from uproot3.interp.objects import asobj +from uproot3.interp.objects import asgenobj +from uproot3.source.cursor import Cursor +from uproot3.source.memmap import MemmapSource +from uproot3.source.xrootd import XRootDSource +from uproot3.source.http import HTTPSource if sys.version_info[0] <= 2: string_types = (unicode, str) @@ -88,7 +88,7 @@ def _filename_explode(x): def _normalize_awkwardlib(awkwardlib): if awkwardlib is None: - return awkward + return awkward0 elif isinstance(awkwardlib, str): return importlib.import_module(awkwardlib) else: @@ -115,9 +115,9 @@ def _normalize_entrystartstop(numentries, entrystart, entrystop): ################################################################ high-level interface def iterate(path, treepath, branches=None, entrysteps=float("inf"), outputtype=dict, namedecode=None, reportpath=False, reportfile=False, reportentries=False, flatten=False, flatname=None, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, blocking=True, localsource=MemmapSource.defaults, xrootdsource=XRootDSource.defaults, httpsource=HTTPSource.defaults, **options): - awkward = _normalize_awkwardlib(awkwardlib) - for tree, branchesinterp, globalentrystart, thispath, thisfile in _iterate(path, treepath, branches, awkward, localsource, xrootdsource, httpsource, **options): - for start, stop, arrays in tree.iterate(branches=branchesinterp, entrysteps=entrysteps, outputtype=outputtype, namedecode=namedecode, reportentries=True, entrystart=0, entrystop=tree.numentries, flatten=flatten, flatname=flatname, awkwardlib=awkward, cache=cache, basketcache=basketcache, keycache=keycache, executor=executor, blocking=blocking): + awkward0 = _normalize_awkwardlib(awkwardlib) + for tree, branchesinterp, globalentrystart, thispath, thisfile in _iterate(path, treepath, branches, awkward0, localsource, xrootdsource, httpsource, **options): + for start, stop, arrays in tree.iterate(branches=branchesinterp, entrysteps=entrysteps, outputtype=outputtype, namedecode=namedecode, reportentries=True, entrystart=0, entrystop=tree.numentries, flatten=flatten, flatname=flatname, awkwardlib=awkward0, cache=cache, basketcache=basketcache, keycache=keycache, executor=executor, blocking=blocking): if getattr(outputtype, "__name__", None) == "DataFrame" and getattr(outputtype, "__module__", None) == "pandas.core.frame": if type(arrays.index).__name__ == "MultiIndex": @@ -125,7 +125,7 @@ def iterate(path, treepath, branches=None, entrysteps=float("inf"), outputtype=d index = arrays.index.levels[0].array # pandas>=0.24.0 else: index = arrays.index.levels[0].values # pandas<0.24.0 - awkward.numpy.add(index, globalentrystart, out=index) + awkward0.numpy.add(index, globalentrystart, out=index) elif type(arrays.index).__name__ == "RangeIndex": if hasattr(arrays.index, "start") and hasattr(arrays.index, "stop"): @@ -141,7 +141,7 @@ def iterate(path, treepath, branches=None, entrysteps=float("inf"), outputtype=d index = arrays.index.array # pandas>=0.24.0 else: index = arrays.index.values # pandas<0.24.0 - awkward.numpy.add(index, globalentrystart, out=index) + awkward0.numpy.add(index, globalentrystart, out=index) out = (arrays,) if reportentries: @@ -155,7 +155,7 @@ def iterate(path, treepath, branches=None, entrysteps=float("inf"), outputtype=d else: yield out -def _iterate(path, treepath, branches, awkward, localsource, xrootdsource, httpsource, **options): +def _iterate(path, treepath, branches, awkward0, localsource, xrootdsource, httpsource, **options): if isinstance(path, string_types): paths = _filename_explode(path) else: @@ -163,13 +163,13 @@ def _iterate(path, treepath, branches, awkward, localsource, xrootdsource, https globalentrystart = 0 for path in paths: - file = uproot.rootio.open(path, localsource=localsource, xrootdsource=xrootdsource, httpsource=httpsource, **options) + file = uproot3.rootio.open(path, localsource=localsource, xrootdsource=xrootdsource, httpsource=httpsource, **options) try: tree = file[treepath] except KeyError: continue branchesinterp = OrderedDict() - for branch, interpretation in tree._normalize_branches(branches, awkward): + for branch, interpretation in tree._normalize_branches(branches, awkward0): branchesinterp[branch.name] = interpretation yield tree, branchesinterp, globalentrystart, path, file @@ -179,7 +179,7 @@ def _iterate(path, treepath, branches, awkward, localsource, xrootdsource, https class TTreeMethods(object): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.rootio.ROOTObject.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.rootio.ROOTObject.__metaclass__,), {}) _copycontext = True @@ -199,25 +199,25 @@ def _attachstreamer(self, branch, streamer, streamerinfosmap, isTClonesArray): else: if m.group(1) in streamerinfosmap: substreamer = streamerinfosmap[m.group(1)] - if isinstance(substreamer, uproot.rootio.TStreamerInfo): - streamer = uproot.rootio.TStreamerSTL.vector(None, substreamer._fName) + if isinstance(substreamer, uproot3.rootio.TStreamerInfo): + streamer = uproot3.rootio.TStreamerSTL.vector(None, substreamer._fName) else: - streamer = uproot.rootio.TStreamerSTL.vector(substreamer._fType, substreamer._fTypeName) + streamer = uproot3.rootio.TStreamerSTL.vector(substreamer._fType, substreamer._fTypeName) else: return - if isinstance(streamer, uproot.rootio.TStreamerInfo): - if len(streamer._fElements) == 1 and isinstance(streamer._fElements[0], uproot.rootio.TStreamerBase) and streamer._fElements[0]._fName == b"TObjArray": + if isinstance(streamer, uproot3.rootio.TStreamerInfo): + if len(streamer._fElements) == 1 and isinstance(streamer._fElements[0], uproot3.rootio.TStreamerBase) and streamer._fElements[0]._fName == b"TObjArray": if streamer._fName == b"TClonesArray": return self._attachstreamer(branch, streamerinfosmap.get(branch._fClonesName, None), streamerinfosmap, True) else: # FIXME: can only determine streamer by reading some values? return - elif len(streamer._fElements) == 1 and isinstance(streamer._fElements[0], uproot.rootio.TStreamerSTL) and streamer._fElements[0]._fName == b"This": + elif len(streamer._fElements) == 1 and isinstance(streamer._fElements[0], uproot3.rootio.TStreamerSTL) and streamer._fElements[0]._fName == b"This": return self._attachstreamer(branch, streamer._fElements[0], streamerinfosmap, isTClonesArray) - if isinstance(streamer, uproot.rootio.TStreamerObject): + if isinstance(streamer, uproot3.rootio.TStreamerObject): if streamer._fTypeName == b"TClonesArray": return self._attachstreamer(branch, streamerinfosmap.get(branch._fClonesName, None), streamerinfosmap, True) else: @@ -225,16 +225,16 @@ def _attachstreamer(self, branch, streamer, streamerinfosmap, isTClonesArray): branch._streamer = streamer branch._isTClonesArray = isTClonesArray - if isinstance(streamer, uproot.rootio.TStreamerSTL) and streamer._fSTLtype == uproot.const.kSTLvector: + if isinstance(streamer, uproot3.rootio.TStreamerSTL) and streamer._fSTLtype == uproot3.const.kSTLvector: branch._vecstreamer = streamerinfosmap.get(re.match(self._vector_regex, streamer._fTypeName).group(1), None) isTClonesArray = True else: branch._vecstreamer = None - digDeeperTypes = (uproot.rootio.TStreamerObject, uproot.rootio.TStreamerObjectAny, uproot.rootio.TStreamerObjectPointer, uproot.rootio.TStreamerObjectAnyPointer) + digDeeperTypes = (uproot3.rootio.TStreamerObject, uproot3.rootio.TStreamerObjectAny, uproot3.rootio.TStreamerObjectPointer, uproot3.rootio.TStreamerObjectAnyPointer) members = None - if isinstance(streamer, uproot.rootio.TStreamerInfo): + if isinstance(streamer, uproot3.rootio.TStreamerInfo): members = streamer.members elif isinstance(streamer, digDeeperTypes): typename = streamer._fTypeName.rstrip(b"*") @@ -243,7 +243,7 @@ def _attachstreamer(self, branch, streamer, streamerinfosmap, isTClonesArray): if typename == b'TClonesArray' and m is not None: typename = m.group(1) members = streamerinfosmap[typename].members - elif isinstance(streamer, uproot.rootio.TStreamerSTL): + elif isinstance(streamer, uproot3.rootio.TStreamerSTL): try: # FIXME: string manipulation only works for one-parameter templates typename = streamer._fTypeName[streamer._fTypeName.index(b"<") + 1 : streamer._fTypeName.rindex(b">")].rstrip(b"*") @@ -272,7 +272,7 @@ def _attachstreamer(self, branch, streamer, streamerinfosmap, isTClonesArray): try: submembers = streamerinfosmap[key].members except KeyError: - for regex, substitution in uproot.interp.auto.streamer_aliases: + for regex, substitution in uproot3.interp.auto.streamer_aliases: new_key, n_matched = regex.subn(substitution, key) if n_matched: submembers = streamerinfosmap[new_key].members @@ -406,7 +406,7 @@ def _get(self, name, recursive=True, filtername=nofilter, filtertitle=nofilter, if n == name: self._branchlookup[name] = b return b - raise uproot.rootio._KeyError("not found: {0}\n in file: {1}".format(repr(name), self._context.sourcepath)) + raise uproot3.rootio._KeyError("not found: {0}\n in file: {1}".format(repr(name), self._context.sourcepath)) def get(self, name, recursive=True, filtername=nofilter, filtertitle=nofilter, aliases=True): name = _bytesid(name) @@ -431,8 +431,8 @@ def mempartitions(self, numbytes, branches=None, entrystart=None, entrystop=None if numbytes <= 0: raise ValueError("target numbytes must be positive") - awkward = _normalize_awkwardlib(None) - branches = list(self._normalize_branches(branches, awkward)) + awkward0 = _normalize_awkwardlib(None) + branches = list(self._normalize_branches(branches, awkward0)) entrystart, entrystop = _normalize_entrystartstop(self.numentries, entrystart, entrystop) if not linear: @@ -459,8 +459,8 @@ def mempartitions(self, numbytes, branches=None, entrystart=None, entrystop=None start = stop def clusters(self, branches=None, entrystart=None, entrystop=None, strict=False): - awkward = _normalize_awkwardlib(None) - branches = list(self._normalize_branches(branches, awkward)) + awkward0 = _normalize_awkwardlib(None) + branches = list(self._normalize_branches(branches, awkward0)) # convenience class; simplifies presentation of the algorithm class BranchCursor(object): @@ -521,8 +521,8 @@ def entrystop(self): break def array(self, branch, interpretation=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, blocking=True): - awkward = _normalize_awkwardlib(awkwardlib) - branches = list(self._normalize_branches(branch, awkward)) + awkward0 = _normalize_awkwardlib(awkwardlib) + branches = list(self._normalize_branches(branch, awkward0)) if len(branches) == 1: if interpretation is None: tbranch, interpretation = branches[0] @@ -533,8 +533,8 @@ def array(self, branch, interpretation=None, entrystart=None, entrystop=None, fl return tbranch.array(interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=flatten, awkwardlib=awkwardlib, cache=cache, basketcache=basketcache, keycache=keycache, executor=executor, blocking=blocking) def arrays(self, branches=None, outputtype=dict, namedecode=None, entrystart=None, entrystop=None, flatten=False, flatname=None, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, blocking=True, recursive=True): - awkward = _normalize_awkwardlib(awkwardlib) - branches = list(self._normalize_branches(branches, awkward)) + awkward0 = _normalize_awkwardlib(awkwardlib) + branches = list(self._normalize_branches(branches, awkward0)) for branch, interpretation in branches: if branch._recoveredbaskets is None: branch._tryrecover() @@ -558,9 +558,9 @@ def wrap_name(branch, namedecode): else: return branch.name if namedecode is None else branch.name.decode(namedecode) - futures = [(wrap_name(branch, namedecode), interpretation, branch.array(interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=(flatten and not ispandas), awkwardlib=awkward, cache=cache, basketcache=basketcache, keycache=keycache, executor=executor, blocking=False)) for branch, interpretation in branches] + futures = [(wrap_name(branch, namedecode), interpretation, branch.array(interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=(flatten and not ispandas), awkwardlib=awkward0, cache=cache, basketcache=basketcache, keycache=keycache, executor=executor, blocking=False)) for branch, interpretation in branches] else: - futures = [(branch.name if namedecode is None else branch.name.decode(namedecode), interpretation, branch.array(interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=(flatten and not ispandas), awkwardlib=awkward, cache=cache, basketcache=basketcache, keycache=keycache, executor=executor, blocking=False)) for branch, interpretation in branches] + futures = [(branch.name if namedecode is None else branch.name.decode(namedecode), interpretation, branch.array(interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=(flatten and not ispandas), awkwardlib=awkward0, cache=cache, basketcache=basketcache, keycache=keycache, executor=executor, blocking=False)) for branch, interpretation in branches] # make functions that wait for the filling job to be done and return the right outputtype if outputtype == namedtuple: @@ -569,9 +569,9 @@ def wait(): return outputtype(*[future() for name, interpretation, future in futures]) elif ispandas: - import uproot._connect._pandas + import uproot3._connect._pandas def wait(): - return uproot._connect._pandas.futures2df(futures, outputtype, entrystart, entrystop, flatten, flatname, awkward) + return uproot3._connect._pandas.futures2df(futures, outputtype, entrystart, entrystop, flatten, flatname, awkward0) elif isinstance(outputtype, type) and issubclass(outputtype, dict): def wait(): @@ -592,8 +592,8 @@ def wait(): return wait def lazyarray(self, branch, interpretation=None, entrysteps=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, persistvirtual=False, chunked=True): - awkward = _normalize_awkwardlib(awkwardlib) - branches = list(self._normalize_branches(branch, awkward)) + awkward0 = _normalize_awkwardlib(awkwardlib) + branches = list(self._normalize_branches(branch, awkward0)) if len(branches) == 1: if interpretation is None: tbranch, interpretation = branches[0] @@ -608,38 +608,38 @@ def lazyarrays(self, branches=None, namedecode="utf-8", entrysteps=None, entryst if not chunked and entrysteps is None: entrysteps = float('inf') entrysteps = list(self._normalize_entrysteps(entrysteps, branches, entrystart, entrystop, keycache)) - awkward = _normalize_awkwardlib(awkwardlib) - branches = list(self._normalize_branches(branches, awkward)) + awkward0 = _normalize_awkwardlib(awkwardlib) + branches = list(self._normalize_branches(branches, awkward0)) for branch, interpretation in branches: if branch._recoveredbaskets is None: branch._tryrecover() - lazytree = _LazyTree(self._context.sourcepath, self._context.treename, self, dict((b.name, x) for b, x in branches), flatten, awkward.__name__, basketcache, keycache, executor) + lazytree = _LazyTree(self._context.sourcepath, self._context.treename, self, dict((b.name, x) for b, x in branches), flatten, awkward0.__name__, basketcache, keycache, executor) - out = awkward.Table() + out = awkward0.Table() for branch, interpretation in branches: inner = interpretation while isinstance(inner, asjagged): inner = inner.content if isinstance(inner, asobj) and getattr(inner.cls, "_arraymethods", None) is not None: - VirtualArray = awkward.Methods.mixin(inner.cls._arraymethods, awkward.VirtualArray) + VirtualArray = awkward0.Methods.mixin(inner.cls._arraymethods, awkward0.VirtualArray) elif isinstance(inner, asgenobj) and getattr(inner.generator.cls, "_arraymethods", None) is not None: - VirtualArray = awkward.Methods.mixin(inner.generator.cls._arraymethods, awkward.VirtualArray) + VirtualArray = awkward0.Methods.mixin(inner.generator.cls._arraymethods, awkward0.VirtualArray) else: - VirtualArray = awkward.VirtualArray + VirtualArray = awkward0.VirtualArray name = branch.name.decode("ascii") if namedecode is None else branch.name.decode(namedecode) if chunked: chunks = [] counts = [] for start, stop in entrysteps: - chunks.append(VirtualArray(lazytree, (branch.name, start, stop), cache=cache, type=awkward.type.ArrayType(stop - start, interpretation.type), persistvirtual=persistvirtual)) + chunks.append(VirtualArray(lazytree, (branch.name, start, stop), cache=cache, type=awkward0.type.ArrayType(stop - start, interpretation.type), persistvirtual=persistvirtual)) counts.append(stop - start) - out[name] = awkward.ChunkedArray(chunks, counts) + out[name] = awkward0.ChunkedArray(chunks, counts) out[name].__doc__ = branch.title.decode('ascii') else: start, stop = entrysteps[0] - out[name] = VirtualArray(lazytree, (branch.name, start, stop), cache=cache, type=awkward.type.ArrayType(stop - start, interpretation.type), persistvirtual=persistvirtual) + out[name] = VirtualArray(lazytree, (branch.name, start, stop), cache=cache, type=awkward0.type.ArrayType(stop - start, interpretation.type), persistvirtual=persistvirtual) out[name].__doc__ = branch.title.decode('ascii') if profile is not None: @@ -688,8 +688,8 @@ def iterate(self, branches=None, entrysteps=None, outputtype=dict, namedecode=No entrystart, entrystop = _normalize_entrystartstop(self.numentries, entrystart, entrystop) entrysteps = self._normalize_entrysteps(entrysteps, branches, entrystart, entrystop, keycache) - awkward = _normalize_awkwardlib(awkwardlib) - branches = list(self._normalize_branches(branches, awkward)) + awkward0 = _normalize_awkwardlib(awkwardlib) + branches = list(self._normalize_branches(branches, awkward0)) for branch, interpretation in branches: if branch._recoveredbaskets is None: branch._tryrecover() @@ -717,11 +717,11 @@ def wrap_for_python_scope(futures, start, stop): return lambda: outputtype(*[evaluate(branch, interpretation, future, past, cachekey, False) for branch, interpretation, future, past, cachekey in futures]) elif ispandas: - import uproot._connect._pandas + import uproot3._connect._pandas def wrap_for_python_scope(futures, start, stop): def wrap_again(branch, interpretation, future): return lambda: interpretation.finalize(future(), branch) - return lambda: uproot._connect._pandas.futures2df([(branch.name, interpretation, wrap_again(branch, interpretation, future)) for branch, interpretation, future, past, cachekey in futures], outputtype, start, stop, flatten, flatname, awkward) + return lambda: uproot3._connect._pandas.futures2df([(branch.name, interpretation, wrap_again(branch, interpretation, future)) for branch, interpretation, future, past, cachekey in futures], outputtype, start, stop, flatten, flatname, awkward0) elif isinstance(outputtype, type) and issubclass(outputtype, dict): def wrap_for_python_scope(futures, start, stop): @@ -758,7 +758,7 @@ def wrap_for_python_scope(futures, start, stop): if out is not None: futures.append((branch, interpretation, None, out, cachekey)) continue - future = branch._step_array(interpretation, basket_itemoffset, basket_entryoffset, start, stop, awkward, basketcache, keycache, executor, explicit_basketcache) + future = branch._step_array(interpretation, basket_itemoffset, basket_entryoffset, start, stop, awkward0, basketcache, keycache, executor, explicit_basketcache) futures.append((branch, interpretation, future, None, cachekey)) out = wrap_for_python_scope(futures, start, stop) @@ -791,8 +791,8 @@ def _recover(self): branch._recover() def matches(self, branches): - awkward = _normalize_awkwardlib(None) - return [b.name for b, i in self._normalize_branches(branches, awkward, allownone=False, allowcallable=False, allowdict=False, allowstring=True)] + awkward0 = _normalize_awkwardlib(None) + return [b.name for b, i in self._normalize_branches(branches, awkward0, allownone=False, allowcallable=False, allowdict=False, allowstring=True)] _branch_regex = re.compile(b"^/(.*)/([iLmsux]*)$") @@ -814,10 +814,10 @@ def _branch_flags(flags): flagsbyte += re.X return flagsbyte - def _normalize_branches(self, arg, awkward, allownone=True, allowcallable=True, allowdict=True, allowstring=True, aliases=True): + def _normalize_branches(self, arg, awkward0, allownone=True, allowcallable=True, allowdict=True, allowstring=True, aliases=True): if allownone and arg is None: # no specification; read all branches for branch in self.allvalues(): # that have interpretations - interpretation = interpret(branch, awkward) + interpretation = interpret(branch, awkward0) if interpretation is not None: yield branch, interpretation @@ -827,11 +827,11 @@ def _normalize_branches(self, arg, awkward, allownone=True, allowcallable=True, if result is None or result is False: pass elif result is True: # function is a filter - interpretation = interpret(branch, awkward) + interpretation = interpret(branch, awkward0) if interpretation is not None: yield branch, interpretation else: # function is giving interpretations - yield branch, branch._normalize_dtype(result, awkward) + yield branch, branch._normalize_dtype(result, awkward0) elif allowdict and isinstance(arg, dict): for word, interpretation in arg.items(): @@ -842,19 +842,19 @@ def _normalize_branches(self, arg, awkward, allownone=True, allowcallable=True, regex, flags = isregex.groups() for name, branch in self.iteritems(recursive=True, aliases=aliases): if re.match(regex, name, self._branch_flags(flags)): - yield branch, branch._normalize_dtype(interpretation, awkward) + yield branch, branch._normalize_dtype(interpretation, awkward0) elif b"*" in word or b"?" in word or b"[" in word: for name, branch in self.iteritems(recursive=True, aliases=aliases): if name == word or glob.fnmatch.fnmatchcase(name, word): - yield branch, branch._normalize_dtype(interpretation, awkward) + yield branch, branch._normalize_dtype(interpretation, awkward0) else: branch = self.get(word, aliases=aliases) - yield branch, branch._normalize_dtype(interpretation, awkward) + yield branch, branch._normalize_dtype(interpretation, awkward0) elif allowstring and isinstance(arg, string_types): - for x in self._normalize_branches([arg], awkward): + for x in self._normalize_branches([arg], awkward0): yield x else: @@ -871,7 +871,7 @@ def _normalize_branches(self, arg, awkward, allownone=True, allowcallable=True, regex, flags = isregex.groups() for name, branch in self.iteritems(recursive=True, aliases=aliases): if re.match(regex, name, self._branch_flags(flags)): - interpretation = interpret(branch, awkward) + interpretation = interpret(branch, awkward0) if interpretation is None: if name == word: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(branch.name), self._context.sourcepath)) @@ -881,7 +881,7 @@ def _normalize_branches(self, arg, awkward, allownone=True, allowcallable=True, elif b"*" in word or b"?" in word or b"[" in word: for name, branch in self.iteritems(recursive=True, aliases=aliases): if name == word or glob.fnmatch.fnmatchcase(name, word): - interpretation = interpret(branch, awkward) + interpretation = interpret(branch, awkward0) if interpretation is None: if name == word: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(branch.name), self._context.sourcepath)) @@ -890,7 +890,7 @@ def _normalize_branches(self, arg, awkward, allownone=True, allowcallable=True, else: branch = self.get(word, aliases=aliases) - interpretation = interpret(branch, awkward) + interpretation = interpret(branch, awkward0) if interpretation is None: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(branch.name), self._context.sourcepath)) else: @@ -908,14 +908,14 @@ def __iter__(self): @property def pandas(self): - import uproot._connect._pandas - return uproot._connect._pandas.TTreeMethods_pandas(self) + import uproot3._connect._pandas + return uproot3._connect._pandas.TTreeMethods_pandas(self) ################################################################ methods for TBranch class TBranchMethods(object): # makes __doc__ attribute mutable before Python 3.3 - __metaclass__ = type.__new__(type, "type", (uproot.rootio.ROOTObject.__metaclass__,), {}) + __metaclass__ = type.__new__(type, "type", (uproot3.rootio.ROOTObject.__metaclass__,), {}) def _postprocess(self, source, cursor, context, parent): self._source = source @@ -959,9 +959,9 @@ def title(self): @property def interpretation(self): - awkward = _normalize_awkwardlib(None) + awkward0 = _normalize_awkwardlib(None) if self._interpretation is None: - self._interpretation = interpret(self, awkward) + self._interpretation = interpret(self, awkward0) return self._interpretation @property @@ -1032,7 +1032,7 @@ def _get(self, name, recursive=True, filtername=nofilter, filtertitle=nofilter): for n, b in self.iteritems(recursive=recursive, filtername=filtername, filtertitle=filtertitle): if n == name: return b - raise uproot.rootio._KeyError("not found: {0}\n in file: {1}".format(repr(name), self._context.sourcepath)) + raise uproot3.rootio._KeyError("not found: {0}\n in file: {1}".format(repr(name), self._context.sourcepath)) def get(self, name, recursive=True, filtername=nofilter, filtertitle=nofilter): name = _bytesid(name) @@ -1112,47 +1112,47 @@ def compressionratio(self, keycache=None): denom += key._fNbytes - key._fKeylen return float(numer) / float(denom) - def _normalize_dtype(self, interpretation, awkward): - if inspect.isclass(interpretation) and issubclass(interpretation, awkward.numpy.generic): - return self._normalize_dtype(awkward.numpy.dtype(interpretation), awkward) + def _normalize_dtype(self, interpretation, awkward0): + if inspect.isclass(interpretation) and issubclass(interpretation, awkward0.numpy.generic): + return self._normalize_dtype(awkward0.numpy.dtype(interpretation), awkward0) - elif isinstance(interpretation, awkward.numpy.dtype): # user specified a Numpy dtype - default = interpret(self, awkward) + elif isinstance(interpretation, awkward0.numpy.dtype): # user specified a Numpy dtype + default = interpret(self, awkward0) if isinstance(default, (asdtype, asjagged)): return default.to(interpretation) else: raise ValueError("cannot cast branch {0} (default interpretation {1}) as dtype {2}".format(repr(self.name), default, interpretation)) - elif isinstance(interpretation, awkward.numpy.ndarray): # user specified a Numpy array - default = interpret(self, awkward) + elif isinstance(interpretation, awkward0.numpy.ndarray): # user specified a Numpy array + default = interpret(self, awkward0) if isinstance(default, asdtype): return default.toarray(interpretation) else: raise ValueError("cannot cast branch {0} (default interpretation {1}) as dtype {2}".format(repr(self.name), default, interpretation)) - elif not isinstance(interpretation, uproot.interp.interp.Interpretation): + elif not isinstance(interpretation, uproot3.interp.interp.Interpretation): raise TypeError("branch interpretation must be an Interpretation, not {0} (type {1})".format(interpretation, type(interpretation))) else: return interpretation - def _normalize_interpretation(self, interpretation, awkward): + def _normalize_interpretation(self, interpretation, awkward0): if interpretation is None: - interpretation = interpret(self, awkward) + interpretation = interpret(self, awkward0) else: - interpretation = self._normalize_dtype(interpretation, awkward) + interpretation = self._normalize_dtype(interpretation, awkward0) if interpretation is None: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(self.name), self._context.sourcepath)) - if interpretation.awkward is not awkward: - interpretation = interpretation.awkwardlib(awkward) + if interpretation.awkward0 is not awkward0: + interpretation = interpretation.awkwardlib(awkward0) return interpretation def numitems(self, interpretation=None, keycache=None): - awkward = _normalize_awkwardlib(None) - interpretation = self._normalize_interpretation(interpretation, awkward) + awkward0 = _normalize_awkwardlib(None) + interpretation = self._normalize_interpretation(interpretation, awkward0) if interpretation is None: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(self.name), self._context.sourcepath)) if self._recoveredbaskets is None: @@ -1162,7 +1162,7 @@ def numitems(self, interpretation=None, keycache=None): @property def compression(self): try: - return uproot.source.compressed.Compression(self._fCompress) + return uproot3.source.compressed.Compression(self._fCompress) except ValueError: return self._context.compression @@ -1204,8 +1204,8 @@ def basket_compressedbytes(self, i, keycache=None): def basket_numitems(self, i, interpretation=None, keycache=None): if self._recoveredbaskets is None: self._tryrecover() - awkward = _normalize_awkwardlib(None) - interpretation = self._normalize_interpretation(interpretation, awkward) + awkward0 = _normalize_awkwardlib(None) + interpretation = self._normalize_interpretation(interpretation, awkward0) key = self._threadsafe_key(i, keycache, True) return interpretation.numitems(key.border, self.basket_numentries(i)) @@ -1214,7 +1214,7 @@ def _localentries(self, i, entrystart, entrystop): local_entrystop = max(0, min(entrystop - self.basket_entrystart(i), self.basket_entrystop(i) - self.basket_entrystart(i))) return local_entrystart, local_entrystop - def _basket(self, i, interpretation, local_entrystart, local_entrystop, awkward, basketcache, keycache): + def _basket(self, i, interpretation, local_entrystart, local_entrystop, awkward0, basketcache, keycache): basketdata = None if basketcache is not None: basketcachekey = self._basketcachekey(i) @@ -1231,29 +1231,29 @@ def _basket(self, i, interpretation, local_entrystart, local_entrystop, awkward, if key._fObjlen == key.border: data, byteoffsets = basketdata, None - if self._countbranch is not None and awkward.numpy.uint8(self._tree_iofeatures) & awkward.numpy.uint8(uproot.const.kGenerateOffsetMap) != 0: + if self._countbranch is not None and awkward0.numpy.uint8(self._tree_iofeatures) & awkward0.numpy.uint8(uproot3.const.kGenerateOffsetMap) != 0: counts = self._countbranch.array(entrystart=(local_entrystart + self.basket_entrystart(i)), entrystop=(local_entrystop + self.basket_entrystart(i))) itemsize = 1 if isinstance(interpretation, asjagged): itemsize = interpretation.content.fromdtype.itemsize - awkward.numpy.multiply(counts, itemsize, counts) - byteoffsets = awkward.numpy.empty(len(counts) + 1, dtype=awkward.numpy.int32) + awkward0.numpy.multiply(counts, itemsize, counts) + byteoffsets = awkward0.numpy.empty(len(counts) + 1, dtype=awkward0.numpy.int32) byteoffsets[0] = 0 - awkward.numpy.cumsum(counts, out=byteoffsets[1:]) + awkward0.numpy.cumsum(counts, out=byteoffsets[1:]) else: data = basketdata[:key.border] - byteoffsets = awkward.numpy.empty((key._fObjlen - key.border - 4) // 4, dtype=awkward.numpy.int32) # native endian + byteoffsets = awkward0.numpy.empty((key._fObjlen - key.border - 4) // 4, dtype=awkward0.numpy.int32) # native endian byteoffsets[:-1] = basketdata[key.border + 4 : -4].view(">i4") # read as big-endian and convert byteoffsets[-1] = key._fLast - awkward.numpy.subtract(byteoffsets, key._fKeylen, byteoffsets) + awkward0.numpy.subtract(byteoffsets, key._fKeylen, byteoffsets) return interpretation.fromroot(data, byteoffsets, local_entrystart, local_entrystop, key._fKeylen) def basket(self, i, interpretation=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None): - awkward = _normalize_awkwardlib(awkwardlib) - interpretation = self._normalize_interpretation(interpretation, awkward) + awkward0 = _normalize_awkwardlib(awkwardlib) + interpretation = self._normalize_interpretation(interpretation, awkward0) if interpretation is None: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(self.name), self._context.sourcepath)) if self._recoveredbaskets is None: @@ -1277,7 +1277,7 @@ def basket(self, i, interpretation=None, entrystart=None, entrystop=None, flatte else: return out - source = self._basket(i, interpretation, local_entrystart, local_entrystop, awkward, basketcache, keycache) + source = self._basket(i, interpretation, local_entrystart, local_entrystop, awkward0, basketcache, keycache) numitems = interpretation.source_numitems(source) destination = interpretation.destination(numitems, numentries) @@ -1308,8 +1308,8 @@ def _basketstartstop(self, entrystart, entrystop): return basketstart, basketstop def baskets(self, interpretation=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, reportentries=False, executor=None, blocking=True): - awkward = _normalize_awkwardlib(awkwardlib) - interpretation = self._normalize_interpretation(interpretation, awkward) + awkward0 = _normalize_awkwardlib(awkwardlib) + interpretation = self._normalize_interpretation(interpretation, awkward0) if interpretation is None: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(self.name), self._context.sourcepath)) if self._recoveredbaskets is None: @@ -1330,7 +1330,7 @@ def wait(): def fill(j): try: - basket = self.basket(j + basketstart, interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=flatten, awkwardlib=awkward, cache=cache, basketcache=basketcache, keycache=keycache) + basket = self.basket(j + basketstart, interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=flatten, awkwardlib=awkward0, cache=cache, basketcache=basketcache, keycache=keycache) if reportentries: local_entrystart, local_entrystop = self._localentries(j + basketstart, entrystart, entrystop) basket = (local_entrystart + self.basket_entrystart(j + basketstart), @@ -1361,8 +1361,8 @@ def wait(): return wait def iterate_baskets(self, interpretation=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, reportentries=False): - awkward = _normalize_awkwardlib(awkwardlib) - interpretation = self._normalize_interpretation(interpretation, awkward) + awkward0 = _normalize_awkwardlib(awkwardlib) + interpretation = self._normalize_interpretation(interpretation, awkward0) if interpretation is None: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(self.name), self._context.sourcepath)) if self._recoveredbaskets is None: @@ -1378,9 +1378,9 @@ def iterate_baskets(self, interpretation=None, entrystart=None, entrystop=None, if reportentries: yield (local_entrystart + self.basket_entrystart(i), local_entrystop + self.basket_entrystart(i), - self.basket(i, interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=flatten, awkwardlib=awkward, cache=cache, basketcache=basketcache, keycache=keycache)) + self.basket(i, interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=flatten, awkwardlib=awkward0, cache=cache, basketcache=basketcache, keycache=keycache)) else: - yield self.basket(i, interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=flatten, awkwardlib=awkward, cache=cache, basketcache=basketcache, keycache=keycache) + yield self.basket(i, interpretation=interpretation, entrystart=entrystart, entrystop=entrystop, flatten=flatten, awkwardlib=awkward0, cache=cache, basketcache=basketcache, keycache=keycache) def _basket_itemoffset(self, interpretation, basketstart, basketstop, keycache): basket_itemoffset = [0] @@ -1399,8 +1399,8 @@ def _basket_entryoffset(self, basketstart, basketstop): def array(self, interpretation=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, blocking=True): if self._recoveredbaskets is None: self._tryrecover() - awkward = _normalize_awkwardlib(awkwardlib) - interpretation = self._normalize_interpretation(interpretation, awkward) + awkward0 = _normalize_awkwardlib(awkwardlib) + interpretation = self._normalize_interpretation(interpretation, awkward0) if interpretation is None: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(self.name), self._context.sourcepath)) entrystart, entrystop = _normalize_entrystartstop(self.numentries, entrystart, entrystop) @@ -1440,7 +1440,7 @@ def fill(j): try: i = j + basketstart local_entrystart, local_entrystop = self._localentries(i, entrystart, entrystop) - source = self._basket(i, interpretation, local_entrystart, local_entrystop, awkward, basketcache, keycache) + source = self._basket(i, interpretation, local_entrystart, local_entrystop, awkward0, basketcache, keycache) expecteditems = basket_itemoffset[j + 1] - basket_itemoffset[j] source_numitems = interpretation.source_numitems(source) @@ -1500,7 +1500,7 @@ def wait(): else: return wait - def _step_array(self, interpretation, basket_itemoffset, basket_entryoffset, entrystart, entrystop, awkward, basketcache, keycache, executor, explicit_basketcache): + def _step_array(self, interpretation, basket_itemoffset, basket_entryoffset, entrystart, entrystop, awkward0, basketcache, keycache, executor, explicit_basketcache): if interpretation is None: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(self.name), self._context.sourcepath)) if self._recoveredbaskets is None: @@ -1517,7 +1517,7 @@ def fill(j): try: i = j + basketstart local_entrystart, local_entrystop = self._localentries(i, entrystart, entrystop) - source = self._basket(i, interpretation, local_entrystart, local_entrystop, awkward, basketcache, keycache) + source = self._basket(i, interpretation, local_entrystart, local_entrystop, awkward0, basketcache, keycache) expecteditems = basket_itemoffset[j + 1] - basket_itemoffset[j] source_numitems = interpretation.source_numitems(source) @@ -1581,7 +1581,7 @@ def mempartitions(self, numbytes, entrystart=None, entrystop=None, keycache=None if numbytes <= 0: raise ValueError("target numbytes must be positive") - awkward = _normalize_awkwardlib(None) + awkward0 = _normalize_awkwardlib(None) entrystart, entrystop = _normalize_entrystartstop(self.numentries, entrystart, entrystop) if not linear: @@ -1641,8 +1641,8 @@ def _normalize_entrysteps(self, entrysteps, entrystart, entrystop, keycache): def lazyarray(self, interpretation=None, entrysteps=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, persistvirtual=False, chunked=True): if self._recoveredbaskets is None: self._tryrecover() - awkward = _normalize_awkwardlib(awkwardlib) - interpretation = self._normalize_interpretation(interpretation, awkward) + awkward0 = _normalize_awkwardlib(awkwardlib) + interpretation = self._normalize_interpretation(interpretation, awkward0) if interpretation is None: raise ValueError("cannot interpret branch {0} as a Python type\n in file: {1}".format(repr(self.name), self._context.sourcepath)) entrystart, entrystop = _normalize_entrystartstop(self.numentries, entrystart, entrystop) @@ -1654,23 +1654,23 @@ def lazyarray(self, interpretation=None, entrysteps=None, entrystart=None, entry while isinstance(inner, asjagged): inner = inner.content if isinstance(inner, asobj) and getattr(inner.cls, "_arraymethods", None) is not None: - VirtualArray = awkward.Methods.mixin(inner.cls._arraymethods, awkward.VirtualArray) - chunkedarray = awkward.Methods.mixin(inner.cls._arraymethods, awkward.ChunkedArray) + VirtualArray = awkward0.Methods.mixin(inner.cls._arraymethods, awkward0.VirtualArray) + chunkedarray = awkward0.Methods.mixin(inner.cls._arraymethods, awkward0.ChunkedArray) elif isinstance(inner, asgenobj) and getattr(inner.generator.cls, "_arraymethods", None) is not None: - VirtualArray = awkward.Methods.mixin(inner.generator.cls._arraymethods, awkward.VirtualArray) - chunkedarray = awkward.Methods.mixin(inner.generator.cls._arraymethods, awkward.ChunkedArray) + VirtualArray = awkward0.Methods.mixin(inner.generator.cls._arraymethods, awkward0.VirtualArray) + chunkedarray = awkward0.Methods.mixin(inner.generator.cls._arraymethods, awkward0.ChunkedArray) else: - VirtualArray = awkward.VirtualArray - chunkedarray = awkward.ChunkedArray + VirtualArray = awkward0.VirtualArray + chunkedarray = awkward0.ChunkedArray - lazybranch = _LazyBranch(self._context.sourcepath, self._context.treename, self.name, self, interpretation, flatten, awkward.__name__, basketcache, keycache, executor) + lazybranch = _LazyBranch(self._context.sourcepath, self._context.treename, self.name, self, interpretation, flatten, awkward0.__name__, basketcache, keycache, executor) if chunked: chunks = [] counts = [] for start, stop in entrysteps: numentries = stop - start - chunks.append(VirtualArray(lazybranch, (start, stop), cache=cache, type=awkward.type.ArrayType(numentries, interpretation.type), persistvirtual=persistvirtual)) + chunks.append(VirtualArray(lazybranch, (start, stop), cache=cache, type=awkward0.type.ArrayType(numentries, interpretation.type), persistvirtual=persistvirtual)) counts.append(numentries) out = chunkedarray(chunks, counts) @@ -1678,7 +1678,7 @@ def lazyarray(self, interpretation=None, entrysteps=None, entrystart=None, entry return out else: start, stop = entrysteps[0] - out = VirtualArray(lazybranch, (start, stop), cache=cache, type=awkward.type.ArrayType(stop - start, interpretation.type), persistvirtual=persistvirtual) + out = VirtualArray(lazybranch, (start, stop), cache=cache, type=awkward0.type.ArrayType(stop - start, interpretation.type), persistvirtual=persistvirtual) out.__doc__ = self.title.decode('ascii') return out @@ -1705,7 +1705,7 @@ def __init__(self, source, cursor, compression, complete): raise ValueError("TKey declares that object has {0} bytes but only {1} remain in the file\n in file: {2}".format(self._fNbytes, source.size() - self._fSeekKey, s.path)) if self._fObjlen != self._fNbytes - self._fKeylen: - self.source = uproot.source.compressed.CompressedSource(compression, source, Cursor(self._fSeekKey + self._fKeylen), self._fNbytes - self._fKeylen, self._fObjlen) + self.source = uproot3.source.compressed.CompressedSource(compression, source, Cursor(self._fSeekKey + self._fKeylen), self._fNbytes - self._fKeylen, self._fObjlen) self.cursor = Cursor(0) else: self.source = source @@ -1734,7 +1734,7 @@ def basketdata(self): finally: datasource.dismiss() - class _RecoveredTBasket(uproot.rootio.ROOTObject): + class _RecoveredTBasket(uproot3.rootio.ROOTObject): @classmethod def _readinto(cls, self, source, cursor, context, parent): start = cursor.index @@ -1781,7 +1781,7 @@ def numentries(self): return self._fNevBuf def _recover(self): - recoveredbaskets = [x for x in uproot.rootio.TObjArray.read(self._source, self._fBaskets._cursor, self._context, self, asclass=TBranchMethods._RecoveredTBasket) if x is not None] + recoveredbaskets = [x for x in uproot3.rootio.TObjArray.read(self._source, self._fBaskets._cursor, self._context, self, asclass=TBranchMethods._RecoveredTBasket) if x is not None] if self._numgoodbaskets == 0: entryoffsets = [0] @@ -1873,7 +1873,7 @@ def __init__(self, paths, treepath, branches, entrysteps, flatten, awkwardlib, b def _init(self): self.trees = cachetools.LRUCache(5) # last 5 TTrees if self.basketcache is None: - self.basketcache = uproot.cache.ThreadSafeArrayCache(1024**2) # 1 MB + self.basketcache = uproot3.cache.ThreadSafeArrayCache(1024**2) # 1 MB if self.keycache is None: self.keycache = cachetools.LRUCache(10000) # last 10000 TKeys @@ -1908,12 +1908,12 @@ def __setstate__(self, state): self._init() def __call__(self, pathi, branchname): - awkward = _normalize_awkwardlib(self.awkwardlib) + awkward0 = _normalize_awkwardlib(self.awkwardlib) tree = self.trees.get(self.paths[pathi], None) if tree is None: - tree = self.trees[self.paths[pathi]] = uproot.rootio.open(self.paths[pathi])[self.treepath] - tree.interpretations = dict((b.name, x) for b, x in tree._normalize_branches(self.branches, awkward)) - return tree[branchname].lazyarray(interpretation=tree.interpretations[branchname], entrysteps=self.entrysteps, entrystart=None, entrystop=None, flatten=self.flatten, awkwardlib=awkward, cache=None, basketcache=self.basketcache, keycache=self.keycache, executor=self.executor, persistvirtual=self.persistvirtual) + tree = self.trees[self.paths[pathi]] = uproot3.rootio.open(self.paths[pathi])[self.treepath] + tree.interpretations = dict((b.name, x) for b, x in tree._normalize_branches(self.branches, awkward0)) + return tree[branchname].lazyarray(interpretation=tree.interpretations[branchname], entrysteps=self.entrysteps, entrystart=None, entrystop=None, flatten=self.flatten, awkwardlib=awkward0, cache=None, basketcache=self.basketcache, keycache=self.keycache, executor=self.executor, persistvirtual=self.persistvirtual) class _LazyTree(object): def __init__(self, path, treepath, tree, interpretation, flatten, awkwardlib, basketcache, keycache, executor): @@ -1930,9 +1930,9 @@ def __init__(self, path, treepath, tree, interpretation, flatten, awkwardlib, ba def _init(self): if self.tree is None: - self.tree = uproot.rootio.open(self.path)[self.treepath] + self.tree = uproot3.rootio.open(self.path)[self.treepath] if self.basketcache is None: - self.basketcache = uproot.cache.ThreadSafeArrayCache(1024**2) # 1 MB + self.basketcache = uproot3.cache.ThreadSafeArrayCache(1024**2) # 1 MB if self.keycache is None: self.keycache = {} # unlimited @@ -1974,9 +1974,9 @@ def __init__(self, path, treepath, branchname, branch, interpretation, flatten, def _init(self): if self.branch is None: - self.branch = uproot.rootio.open(self.path)[self.treepath][self.branchname] + self.branch = uproot3.rootio.open(self.path)[self.treepath][self.branchname] if self.basketcache is None: - self.basketcache = uproot.cache.ThreadSafeArrayCache(1024**2) # 1 MB + self.basketcache = uproot3.cache.ThreadSafeArrayCache(1024**2) # 1 MB if self.keycache is None: self.keycache = {} # unlimited @@ -2011,11 +2011,11 @@ def lazyarray(path, treepath, branchname, interpretation=None, namedecode="utf-8 branches = {branchname: interpretation} out = lazyarrays(path, treepath, branches=branches, namedecode=namedecode, entrysteps=entrysteps, flatten=flatten, profile=None, awkwardlib=awkwardlib, cache=cache, basketcache=basketcache, keycache=keycache, executor=executor, persistvirtual=persistvirtual, localsource=localsource, xrootdsource=xrootdsource, httpsource=httpsource, **options) if len(out.columns) != 1: - raise ValueError("list of branch names or glob/regex matches more than one branch; use uproot.lazyarrays (plural)") + raise ValueError("list of branch names or glob/regex matches more than one branch; use uproot3.lazyarrays (plural)") return out[out.columns[0]] def lazyarrays(path, treepath, branches=None, namedecode="utf-8", entrysteps=float("inf"), flatten=False, profile=None, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, persistvirtual=False, localsource=MemmapSource.defaults, xrootdsource=XRootDSource.defaults, httpsource=HTTPSource.defaults, **options): - awkward = _normalize_awkwardlib(awkwardlib) + awkward0 = _normalize_awkwardlib(awkwardlib) if isinstance(path, string_types): paths = _filename_explode(path) else: @@ -2023,40 +2023,40 @@ def lazyarrays(path, treepath, branches=None, namedecode="utf-8", entrysteps=flo path2count = numentries(path, treepath, total=False, localsource=localsource, xrootdsource=xrootdsource, httpsource=httpsource, executor=executor, blocking=True) - lazyfiles = _LazyFiles(paths, treepath, branches, entrysteps, flatten, awkward.__name__, basketcache, keycache, executor, persistvirtual, localsource, xrootdsource, httpsource, options) + lazyfiles = _LazyFiles(paths, treepath, branches, entrysteps, flatten, awkward0.__name__, basketcache, keycache, executor, persistvirtual, localsource, xrootdsource, httpsource, options) brancheslist = None for path in paths: - file = uproot.rootio.open(path, localsource=localsource, xrootdsource=xrootdsource, httpsource=httpsource, **options) + file = uproot3.rootio.open(path, localsource=localsource, xrootdsource=xrootdsource, httpsource=httpsource, **options) try: tree = file[treepath] except KeyError: continue - brancheslist = list(tree._normalize_branches(branches, awkward)) + brancheslist = list(tree._normalize_branches(branches, awkward0)) break if brancheslist is None: raise ValueError("no matching paths contained a tree named {0}".format(repr(treepath))) - out = awkward.Table() + out = awkward0.Table() for branch, interpretation in brancheslist: inner = interpretation while isinstance(inner, asjagged): inner = inner.content if isinstance(inner, asobj) and getattr(inner.cls, "_arraymethods", None) is not None: - VirtualArray = awkward.Methods.mixin(inner.cls._arraymethods, awkward.VirtualArray) + VirtualArray = awkward0.Methods.mixin(inner.cls._arraymethods, awkward0.VirtualArray) elif isinstance(inner, asgenobj) and getattr(inner.generator.cls, "_arraymethods", None) is not None: - VirtualArray = awkward.Methods.mixin(inner.generator.cls._arraymethods, awkward.VirtualArray) + VirtualArray = awkward0.Methods.mixin(inner.generator.cls._arraymethods, awkward0.VirtualArray) else: - VirtualArray = awkward.VirtualArray + VirtualArray = awkward0.VirtualArray chunks = [] counts = [] for pathi, path in enumerate(paths): - chunks.append(VirtualArray(lazyfiles, (pathi, branch.name), cache=cache, type=awkward.type.ArrayType(path2count[path], interpretation.type), persistvirtual=persistvirtual)) + chunks.append(VirtualArray(lazyfiles, (pathi, branch.name), cache=cache, type=awkward0.type.ArrayType(path2count[path], interpretation.type), persistvirtual=persistvirtual)) counts.append(path2count[path]) name = branch.name.decode("ascii") if namedecode is None else branch.name.decode(namedecode) - out[name] = awkward.ChunkedArray(chunks, counts) + out[name] = awkward0.ChunkedArray(chunks, counts) if profile is not None: out = uproot_methods.profiles.transformer(profile)(out) @@ -2094,14 +2094,14 @@ def numentries(path, treepath, total=True, localsource=MemmapSource.defaults, xr return _numentries(paths, treepath, total, localsource, xrootdsource, httpsource, executor, blocking, [None] * len(paths), options) def _numentries(paths, treepath, total, localsource, xrootdsource, httpsource, executor, blocking, uuids, options): - class _TTreeForNumEntries(uproot.rootio.ROOTStreamedObject): + class _TTreeForNumEntries(uproot3.rootio.ROOTStreamedObject): @classmethod def _readinto(cls, self, source, cursor, context, parent): - start, cnt, classversion = uproot.rootio._startcheck(source, cursor) - tnamed = uproot.rootio.Undefined.read(source, cursor, context, parent) - tattline = uproot.rootio.Undefined.read(source, cursor, context, parent) - tattfill = uproot.rootio.Undefined.read(source, cursor, context, parent) - tattmarker = uproot.rootio.Undefined.read(source, cursor, context, parent) + start, cnt, classversion = uproot3.rootio._startcheck(source, cursor) + tnamed = uproot3.rootio.Undefined.read(source, cursor, context, parent) + tattline = uproot3.rootio.Undefined.read(source, cursor, context, parent) + tattfill = uproot3.rootio.Undefined.read(source, cursor, context, parent) + tattmarker = uproot3.rootio.Undefined.read(source, cursor, context, parent) self._fEntries, = cursor.fields(source, _TTreeForNumEntries._format1) return self _format1 = struct.Struct('>q') @@ -2110,7 +2110,7 @@ def _readinto(cls, self, source, cursor, context, parent): def fill(i): try: - file = uproot.rootio.open(paths[i], localsource=localsource, xrootdsource=xrootdsource, httpsource=httpsource, read_streamers=False, **options) + file = uproot3.rootio.open(paths[i], localsource=localsource, xrootdsource=xrootdsource, httpsource=httpsource, read_streamers=False, **options) except Exception: return sys.exc_info() else: diff --git a/uproot/version.py b/uproot3/version.py similarity index 83% rename from uproot/version.py rename to uproot3/version.py index 17273ffb..a8045b50 100644 --- a/uproot/version.py +++ b/uproot3/version.py @@ -1,12 +1,12 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import import re -__version__ = "3.13.0" +__version__ = "3.14.0" version = __version__ version_info = tuple(re.split(r"[-\.]", __version__)) diff --git a/uproot/write/TDirectory.py b/uproot3/write/TDirectory.py similarity index 72% rename from uproot/write/TDirectory.py rename to uproot3/write/TDirectory.py index 3221ed4c..2e098762 100644 --- a/uproot/write/TDirectory.py +++ b/uproot3/write/TDirectory.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -8,9 +8,9 @@ import struct import uuid -import uproot.write.sink.cursor -import uproot.write.TKey -import uproot.write.util +import uproot3.write.sink.cursor +import uproot3.write.TKey +import uproot3.write.util class TDirectory(object): def __init__(self, tfile, fName, fNbytesName, fSeekDir=100, fSeekParent=0, fSeekKeys=0, allocationbytes=128, growfactor=8): @@ -21,32 +21,32 @@ def __init__(self, tfile, fName, fNbytesName, fSeekDir=100, fSeekParent=0, fSeek self.fSeekDir = fSeekDir self.fSeekParent = fSeekParent self.fSeekKeys = fSeekKeys - self.fDatimeC = uproot.write.util.datime() + self.fDatimeC = uproot3.write.util.datime() self.fUUID = b'\x00\x01' + uuid.uuid1().bytes self.allocationbytes = allocationbytes self.growfactor = growfactor - self.headkey = uproot.write.TKey.TKey(fClassName = b"TFile", - fName = self.fName, - fObjlen = self._format2.size, - fSeekKey = self.fSeekKeys) + self.headkey = uproot3.write.TKey.TKey(fClassName = b"TFile", + fName = self.fName, + fObjlen = self._format2.size, + fSeekKey = self.fSeekKeys) self.keys = collections.OrderedDict() self.maxcycle = collections.Counter() def size(self): - return uproot.write.sink.cursor.Cursor.length_string(self.fName) + 1 + self._format1.size + len(self.fUUID) + 12 + return uproot3.write.sink.cursor.Cursor.length_string(self.fName) + 1 + self._format1.size + len(self.fUUID) + 12 def update(self): fVersion = 5 - fDatimeM = uproot.write.util.datime() + fDatimeM = uproot3.write.util.datime() self.cursor.update_fields(self.sink, self._format1, fVersion, self.fDatimeC, fDatimeM, self.fNbytesKeys, self.fNbytesName, self.fSeekDir, self.fSeekParent, self.fSeekKeys) def write(self, cursor, sink): cursor.write_string(sink, self.fName) cursor.write_data(sink, b"\x00") - self.cursor = uproot.write.sink.cursor.Cursor(cursor.index) + self.cursor = uproot3.write.sink.cursor.Cursor(cursor.index) self.sink = sink self.update() @@ -64,11 +64,11 @@ def writekeys(self, cursor): self.fSeekKeys = cursor.index self.fNbytesKeys = self._nbyteskeys() - self.tfile._expandfile(uproot.write.sink.cursor.Cursor(self.fSeekKeys + self.allocationbytes)) + self.tfile._expandfile(uproot3.write.sink.cursor.Cursor(self.fSeekKeys + self.allocationbytes)) - self.keycursor = uproot.write.sink.cursor.Cursor(self.fSeekKeys) + self.keycursor = uproot3.write.sink.cursor.Cursor(self.fSeekKeys) self.headkey.write(self.keycursor, self.sink) - self.nkeycursor = uproot.write.sink.cursor.Cursor(self.keycursor.index) + self.nkeycursor = uproot3.write.sink.cursor.Cursor(self.keycursor.index) self.keycursor.write_fields(self.sink, self._format2, len(self.keys)) for key in self.keys.values(): key.write(self.keycursor, self.sink) @@ -83,7 +83,7 @@ def setkey(self, newkey): newcursor = None if (newkey.fName, newkey.fCycle) in self.keys: self.headkey.fObjlen -= self.keys[(newkey.fName, newkey.fCycle)].fKeylen - newcursor = uproot.write.sink.cursor.Cursor(self.fSeekKeys) + newcursor = uproot3.write.sink.cursor.Cursor(self.fSeekKeys) self.headkey.fObjlen += newkey.fKeylen self.keys[(newkey.fName, newkey.fCycle)] = newkey @@ -91,7 +91,7 @@ def setkey(self, newkey): self.fNbytesKeys = self._nbyteskeys() while self.fNbytesKeys > self.allocationbytes: self.allocationbytes *= self.growfactor - newcursor = uproot.write.sink.cursor.Cursor(self.tfile._fSeekFree) + newcursor = uproot3.write.sink.cursor.Cursor(self.tfile._fSeekFree) if newcursor is not None: self.writekeys(newcursor) @@ -112,4 +112,4 @@ def delkey(self, name, cycle): del self.keys[(name, cycle)] self.fNbytesKeys = self._nbyteskeys() - self.writekeys(uproot.write.sink.cursor.Cursor(self.fSeekKeys)) + self.writekeys(uproot3.write.sink.cursor.Cursor(self.fSeekKeys)) diff --git a/uproot/write/TFile.py b/uproot3/write/TFile.py similarity index 77% rename from uproot/write/TFile.py rename to uproot3/write/TFile.py index c4f3aa11..7cc9ebcc 100644 --- a/uproot/write/TFile.py +++ b/uproot3/write/TFile.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -17,18 +17,18 @@ import uproot_methods.convert -import uproot.const -import uproot.source.file -import uproot.write.compress -import uproot.write.sink.cursor -import uproot.write.sink.file -import uproot.write.streamers -import uproot.write.TDirectory -import uproot.write.TFree -import uproot.write.TKey -from uproot.rootio import nofilter -from uproot.write.objects.util import Util -from uproot.write.objects.TTree import TTree +import uproot3.const +import uproot3.source.file +import uproot3.write.compress +import uproot3.write.sink.cursor +import uproot3.write.sink.file +import uproot3.write.streamers +import uproot3.write.TDirectory +import uproot3.write.TFree +import uproot3.write.TKey +from uproot3.rootio import nofilter +from uproot3.write.objects.util import Util +from uproot3.write.objects.TTree import TTree class TFileUpdate(object): def __init__(self, path): @@ -49,7 +49,7 @@ def _openfile(self, path, compression): self.compression = compression self._treedict = {} - self._sink = uproot.write.sink.file.FileSink(path) + self._sink = uproot3.write.sink.file.FileSink(path) self._path = path self._filename = os.path.split(path)[1].encode("utf-8") @@ -74,10 +74,10 @@ def _normalizewhere(where): def newtree(self, name, branches={}, title="", **options): if "compression" in options: - self.__setitem__(name, uproot.write.objects.TTree.newtree(branches, title, compression=options["compression"])) + self.__setitem__(name, uproot3.write.objects.TTree.newtree(branches, title, compression=options["compression"])) del options["compression"] else: - self.__setitem__(name, uproot.write.objects.TTree.newtree(branches, title)) + self.__setitem__(name, uproot3.write.objects.TTree.newtree(branches, title)) if len(options) > 0: raise TypeError("{0} not supported".format(options)) @@ -88,19 +88,19 @@ def __setitem__(self, where, what): what = uproot_methods.convert.towriteable(what) elif what.__class__.__name__ == "newtree": what = TTree(where, what, self) - cursor = uproot.write.sink.cursor.Cursor(self._fSeekFree) - newkey = uproot.write.TKey.TKey(fClassName = what._fClassName, - fName = where, - fTitle = what._fTitle, - fObjlen = 0, - fSeekKey = self._fSeekFree, - fSeekPdir = self._fBEGIN, - fCycle = cycle if cycle is not None else self._rootdir.newcycle(where)) + cursor = uproot3.write.sink.cursor.Cursor(self._fSeekFree) + newkey = uproot3.write.TKey.TKey(fClassName = what._fClassName, + fName = where, + fTitle = what._fTitle, + fObjlen = 0, + fSeekKey = self._fSeekFree, + fSeekPdir = self._fBEGIN, + fCycle = cycle if cycle is not None else self._rootdir.newcycle(where)) if what.__class__.__name__ == "newtree" or what.__class__.__name__ == "TTree": # Need to (re)attach the cycle number to allow getitem to access writable TTree tree_where = where + b";" + str(newkey.fCycle).encode("utf-8") self._treedict[tree_where] = what - newkeycursor = uproot.write.sink.cursor.Cursor(newkey.fSeekKey) + newkeycursor = uproot3.write.sink.cursor.Cursor(newkey.fSeekKey) newkey.write(cursor, self._sink) what._write(self, cursor, where, self.compression, newkey, newkeycursor, self.util) self._expandfile(cursor) @@ -117,7 +117,7 @@ def update(self, *args, **kwargs): items = chain(items, kwargs.items()) self.util = Util() - cursor = uproot.write.sink.cursor.Cursor(self._fSeekFree) + cursor = uproot3.write.sink.cursor.Cursor(self._fSeekFree) for where, what in items: where, cycle = self._normalizewhere(where) @@ -128,7 +128,7 @@ def update(self, *args, **kwargs): elif what.__class__.__name__ == "newtree": what = TTree(where, what, self) - newkey = uproot.write.TKey.TKey( + newkey = uproot3.write.TKey.TKey( fClassName=what._fClassName, fName=where, fTitle=what._fTitle, @@ -142,7 +142,7 @@ def update(self, *args, **kwargs): tree_where = where + b";" + str(newkey.fCycle).encode("utf-8") self._treedict[tree_where] = what - newkeycursor = uproot.write.sink.cursor.Cursor(newkey.fSeekKey) + newkeycursor = uproot3.write.sink.cursor.Cursor(newkey.fSeekKey) newkey.write(cursor, self._sink) what._write(self, cursor, where, self.compression, newkey, newkeycursor, self.util) @@ -170,7 +170,7 @@ def __delitem__(self, where): raise KeyError("ROOT directory does not contain key {0}".format(where)) def _reopen(self): - return uproot.open(self._path, localsource=lambda path: uproot.source.file.FileSource(path, **uproot.source.file.FileSource.defaults)) + return uproot3.open(self._path, localsource=lambda path: uproot3.source.file.FileSource(path, **uproot3.source.file.FileSource.defaults)) @property def compression(self): @@ -265,7 +265,7 @@ def __exit__(self, type, value, traceback): self.close() class TFileRecreate(TFileUpdate): - def __init__(self, path, compression=uproot.write.compress.ZLIB(1)): + def __init__(self, path, compression=uproot3.write.compress.ZLIB(1)): self._openfile(path, compression) self._writeheader() self._writerootdir() @@ -285,21 +285,21 @@ def compression(self): if self._fCompress == 0: return None else: - return uproot.write.compress.algo[self._fCompress // 100](self._fCompress % 100) + return uproot3.write.compress.algo[self._fCompress // 100](self._fCompress % 100) @compression.setter def compression(self, value): if value is None: self._fCompress = 0 else: - if not isinstance(value, uproot.write.compress.Compression): - raise TypeError("uproot.write.TFile.compression must be a Compression object like ZLIB(4)") + if not isinstance(value, uproot3.write.compress.Compression): + raise TypeError("uproot3.write.TFile.compression must be a Compression object like ZLIB(4)") self._fCompress = value.code if hasattr(self, "_compresscursor"): self._compresscursor.update_fields(self._sink, self._format3, self._fCompress) def _writeheader(self): - cursor = uproot.write.sink.cursor.Cursor(0) + cursor = uproot3.write.sink.cursor.Cursor(0) self._fVersion = self._fVersion = 1061800 self._fBEGIN = 100 cursor.write_fields(self._sink, self._format1, b"root", self._fVersion, self._fBEGIN) @@ -308,31 +308,31 @@ def _writeheader(self): self._fSeekFree = 0 self._fNbytesFree = 0 self._nfree = 0 - self._endcursor = uproot.write.sink.cursor.Cursor(cursor.index) + self._endcursor = uproot3.write.sink.cursor.Cursor(cursor.index) cursor.write_fields(self._sink, self._format_end, self._fEND, self._fSeekFree, self._fNbytesFree, self._nfree) self._fNbytesName = 2*len(self._filename) + 36 + 8 # + 8 because two fields in TKey are 'q' rather than 'i' fUnits = 4 cursor.write_fields(self._sink, self._format2, self._fNbytesName, fUnits) - self._compresscursor = uproot.write.sink.cursor.Cursor(cursor.index) + self._compresscursor = uproot3.write.sink.cursor.Cursor(cursor.index) cursor.write_fields(self._sink, self._format3, self._fCompress) self._fSeekInfo = 0 - self._seekcursor = uproot.write.sink.cursor.Cursor(cursor.index) + self._seekcursor = uproot3.write.sink.cursor.Cursor(cursor.index) cursor.write_fields(self._sink, self._format_seekinfo, self._fSeekInfo) self._fNbytesInfo = 0 - self._nbytescursor = uproot.write.sink.cursor.Cursor(cursor.index) + self._nbytescursor = uproot3.write.sink.cursor.Cursor(cursor.index) cursor.write_fields(self._sink, self._format_nbytesinfo, self._fNbytesInfo) cursor.write_data(self._sink, b'\x00\x01' + uuid.uuid1().bytes) def _expandfile(self, cursor): if cursor.index > self._fSeekFree: - freecursor = uproot.write.sink.cursor.Cursor(cursor.index) - freekey = uproot.write.TKey.TKey(b"TFile", self._filename, fObjlen=0, fSeekKey=cursor.index, fSeekPdir=self._fBEGIN) - freeseg = uproot.write.TFree.TFree(cursor.index + freekey.fNbytes) + freecursor = uproot3.write.sink.cursor.Cursor(cursor.index) + freekey = uproot3.write.TKey.TKey(b"TFile", self._filename, fObjlen=0, fSeekKey=cursor.index, fSeekPdir=self._fBEGIN) + freeseg = uproot3.write.TFree.TFree(cursor.index + freekey.fNbytes) freekey.fObjlen = freeseg.size() freekey.fNbytes += freekey.fObjlen @@ -346,11 +346,11 @@ def _expandfile(self, cursor): self._endcursor.update_fields(self._sink, self._format_end, self._fEND, self._fSeekFree, self._fNbytesFree, self._nfree) def _writerootdir(self): - cursor = uproot.write.sink.cursor.Cursor(self._fBEGIN) + cursor = uproot3.write.sink.cursor.Cursor(self._fBEGIN) - self._rootdir = uproot.write.TDirectory.TDirectory(self, self._filename, self._fNbytesName) + self._rootdir = uproot3.write.TDirectory.TDirectory(self, self._filename, self._fNbytesName) - key = uproot.write.TKey.TKey(b"TFile", self._filename, fObjlen=self._rootdir.size()) + key = uproot3.write.TKey.TKey(b"TFile", self._filename, fObjlen=self._rootdir.size()) key.write(cursor, self._sink) self._rootdir.write(cursor, self._sink) @@ -360,16 +360,16 @@ def _writestreamers(self): self._fSeekInfo = self._fSeekFree self._seekcursor.update_fields(self._sink, self._format_seekinfo, self._fSeekInfo) - cursor = uproot.write.sink.cursor.Cursor(self._fSeekInfo) - streamerkey = uproot.write.TKey.TKey32(fClassName = b"TList", - fName = b"StreamerInfo", - fTitle = b"Doubly linked list", - fSeekKey = self._fSeekInfo, - fSeekPdir = self._fBEGIN) - streamerkeycursor = uproot.write.sink.cursor.Cursor(self._fSeekInfo) + cursor = uproot3.write.sink.cursor.Cursor(self._fSeekInfo) + streamerkey = uproot3.write.TKey.TKey32(fClassName = b"TList", + fName = b"StreamerInfo", + fTitle = b"Doubly linked list", + fSeekKey = self._fSeekInfo, + fSeekPdir = self._fBEGIN) + streamerkeycursor = uproot3.write.sink.cursor.Cursor(self._fSeekInfo) streamerkey.write(cursor, self._sink) - uproot.write.compress.write(self, cursor, uproot.write.streamers.streamers, self.compression, streamerkey, streamerkeycursor) + uproot3.write.compress.write(self, cursor, uproot3.write.streamers.streamers, self.compression, streamerkey, streamerkeycursor) self._fNbytesInfo = streamerkey.fNbytes self._nbytescursor.update_fields(self._sink, self._format_nbytesinfo, self._fNbytesInfo) @@ -377,7 +377,7 @@ def _writestreamers(self): self._expandfile(cursor) def _writerootkeys(self): - self._rootdir.writekeys(uproot.write.sink.cursor.Cursor(self._fSeekFree)) + self._rootdir.writekeys(uproot3.write.sink.cursor.Cursor(self._fSeekFree)) class TFileCreate(TFileRecreate): def __init__(self, path): diff --git a/uproot/write/TFree.py b/uproot3/write/TFree.py similarity index 97% rename from uproot/write/TFree.py rename to uproot3/write/TFree.py index 1fe6224e..430807af 100644 --- a/uproot/write/TFree.py +++ b/uproot3/write/TFree.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import diff --git a/uproot/write/TKey.py b/uproot3/write/TKey.py similarity index 83% rename from uproot/write/TKey.py rename to uproot3/write/TKey.py index 53b99c0c..8e661695 100644 --- a/uproot/write/TKey.py +++ b/uproot3/write/TKey.py @@ -1,13 +1,13 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import import struct -import uproot.write.sink.cursor -import uproot.write.util +import uproot3.write.sink.cursor +import uproot3.write.util class BasketKey(object): def __init__(self, fName, fTitle, fNevBuf, fNevBufSize, fObjlen=0, fSeekKey=100, fSeekPdir=0, fBufferSize=0): @@ -19,7 +19,7 @@ def __init__(self, fName, fTitle, fNevBuf, fNevBufSize, fObjlen=0, fSeekKey=100, self.fSeekKey = fSeekKey self.fSeekPdir = fSeekPdir self.fCycle = 0 - self.fDatime = uproot.write.util.datime() + self.fDatime = uproot3.write.util.datime() self.fNbytes = self.fObjlen + self.fKeylen self.fBufferSize = fBufferSize self.fNevBuf = fNevBuf @@ -29,7 +29,7 @@ def __init__(self, fName, fTitle, fNevBuf, fNevBufSize, fObjlen=0, fSeekKey=100, @property def fKeylen(self): - return self._format1.size + uproot.write.sink.cursor.Cursor.length_strings([self.fClassName, self.fName, self.fTitle]) + self._format_basketkey.size + 1 + return self._format1.size + uproot3.write.sink.cursor.Cursor.length_strings([self.fClassName, self.fName, self.fTitle]) + self._format_basketkey.size + 1 @property def fLast(self): @@ -39,7 +39,7 @@ def update(self): self.cursor.update_fields(self.sink, self._format1, self.fNbytes, self._version, self.fObjlen, self.fDatime, self.fKeylen, self.fCycle, self.fSeekKey, self.fSeekPdir) def write(self, cursor, sink, isjagged=False): - self.cursor = uproot.write.sink.cursor.Cursor(cursor.index) + self.cursor = uproot3.write.sink.cursor.Cursor(cursor.index) self.sink = sink self.update() @@ -73,12 +73,12 @@ def __init__(self, fClassName, fName, fTitle=b"", fObjlen=0, fSeekKey=100, fSeek self.fSeekKey = fSeekKey self.fSeekPdir = fSeekPdir self.fCycle = fCycle - self.fDatime = uproot.write.util.datime() + self.fDatime = uproot3.write.util.datime() self.fNbytes = self.fObjlen + self.fKeylen @property def fKeylen(self): - return self._format1.size + uproot.write.sink.cursor.Cursor.length_strings([self.fClassName, self.fName, self.fTitle]) + return self._format1.size + uproot3.write.sink.cursor.Cursor.length_strings([self.fClassName, self.fName, self.fTitle]) def update(self): self.cursor.update_fields(self.sink, self._format1, self.fNbytes, self._version, self.fObjlen, self.fDatime, self.fKeylen, self.fCycle, self.fSeekKey, self.fSeekPdir) @@ -86,7 +86,7 @@ def update(self): def write(self, cursor, sink, isjagged=False): if isjagged: raise Exception("isjagged flag should be False") - self.cursor = uproot.write.sink.cursor.Cursor(cursor.index) + self.cursor = uproot3.write.sink.cursor.Cursor(cursor.index) self.sink = sink self.update() diff --git a/uproot/source/__init__.py b/uproot3/write/__init__.py similarity index 59% rename from uproot/source/__init__.py rename to uproot3/write/__init__.py index 9f2be71d..d0cdd065 100644 --- a/uproot/source/__init__.py +++ b/uproot3/write/__init__.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE -from __future__ import absolute_import \ No newline at end of file +from __future__ import absolute_import diff --git a/uproot/write/compress.py b/uproot3/write/compress.py similarity index 92% rename from uproot/write/compress.py rename to uproot3/write/compress.py index b44308f8..c0883fd1 100644 --- a/uproot/write/compress.py +++ b/uproot3/write/compress.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -10,8 +10,8 @@ import numpy -import uproot -import uproot.const +import uproot3 +import uproot3.const class Compression(object): def __init__(self, level): @@ -31,7 +31,7 @@ def level(self, value): if not 0 <= value <= 9: raise ValueError("Compression level must be between 0 and 9 (inclusive)") self._level = int(value) - + @property def pair(self): for const, cls in algo.items(): @@ -49,9 +49,9 @@ class ZLIB(Compression): pass class LZMA(Compression): pass class LZ4(Compression): pass -algo = {uproot.const.kZLIB: ZLIB, - uproot.const.kLZMA: LZMA, - uproot.const.kLZ4: LZ4} +algo = {uproot3.const.kZLIB: ZLIB, + uproot3.const.kLZMA: LZMA, + uproot3.const.kLZ4: LZ4} def write(context, cursor, givenbytes, compression, key, keycursor, isjagged=False): retaincursor = copy.copy(keycursor) @@ -84,7 +84,7 @@ def write(context, cursor, givenbytes, compression, key, keycursor, isjagged=Fal u2 = (uncompressedbytes >> 8) & 0xff u3 = (uncompressedbytes >> 16) & 0xff - if algorithm == uproot.const.kZLIB: + if algorithm == uproot3.const.kZLIB: algo = b"ZL" import zlib after_compressed = zlib.compress(givenbytes, level) @@ -103,7 +103,7 @@ def write(context, cursor, givenbytes, compression, key, keycursor, isjagged=Fal key.write(keycursor, context._sink, isjagged) cursor.write_data(context._sink, givenbytes) - elif algorithm == uproot.const.kLZ4: + elif algorithm == uproot3.const.kLZ4: algo = b"L4" try: import xxhash @@ -134,7 +134,7 @@ def write(context, cursor, givenbytes, compression, key, keycursor, isjagged=Fal key.write(keycursor, context._sink, isjagged) cursor.write_data(context._sink, givenbytes) - elif algorithm == uproot.const.kLZMA: + elif algorithm == uproot3.const.kLZMA: algo = b"XZ" try: import lzma @@ -160,10 +160,10 @@ def write(context, cursor, givenbytes, compression, key, keycursor, isjagged=Fal key.write(keycursor, context._sink, isjagged) cursor.write_data(context._sink, givenbytes) - elif algorithm == uproot.const.kOldCompressionAlgo: + elif algorithm == uproot3.const.kOldCompressionAlgo: raise ValueError("unsupported compression algorithm: 'old' (according to ROOT comments, hasn't been used in 20+ years!)") else: raise ValueError("Unrecognized compression algorithm: {0}".format(algorithm)) if "remainingbytes" in locals() and len(remainingbytes)>0: - uproot.write.compress.write(context, cursor, remainingbytes, compression, key, retaincursor) + uproot3.write.compress.write(context, cursor, remainingbytes, compression, key, retaincursor) diff --git a/uproot/write/objects/TH.py b/uproot3/write/objects/TH.py similarity index 94% rename from uproot/write/objects/TH.py rename to uproot3/write/objects/TH.py index 63e891c4..451f65e7 100644 --- a/uproot/write/objects/TH.py +++ b/uproot3/write/objects/TH.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -9,10 +9,10 @@ import numpy -import uproot.const -import uproot.write.compress -import uproot.write.sink.cursor -from uproot.rootio import _bytesid +import uproot3.const +import uproot3.write.compress +import uproot3.write.sink.cursor +from uproot3.rootio import _bytesid class TH(object): def __init__(self, histogram): @@ -184,7 +184,7 @@ def _put_tnamed(self, cursor, name, title): buff = (self._put_tobject(cursor) + cursor.put_string(name) + cursor.put_string(title)) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tarray = struct.Struct(">i") @@ -198,7 +198,7 @@ def _put_tobjstring(self, cursor, value, bit=0): vers = 1 buff = cursor.put_string(value) length = len(buff) + self._format_tobjstring.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_tobjstring, cnt, vers, 1, bit, numpy.uint32(0x03000000)) + buff _format_tlist1 = struct.Struct(">i") @@ -217,7 +217,7 @@ def _put_tlist(self, cursor, values): buff += b"" # cursor.bytes(source, n) givenbytes += buff length = len(givenbytes) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + givenbytes _format_tattline = struct.Struct(">hhh") @@ -230,7 +230,7 @@ def _put_tattline(self, cursor): self._fields["_fLineStyle"], self._fields["_fLineWidth"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tattfill = struct.Struct(">hh") @@ -242,7 +242,7 @@ def _put_tattfill(self, cursor): self._fields["_fFillColor"], self._fields["_fFillStyle"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tattmarker = struct.Struct(">hhf") @@ -255,7 +255,7 @@ def _put_tattmarker(self, cursor): self._fields["_fMarkerStyle"], self._fields["_fMarkerSize"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tattaxis = struct.Struct(">ihhhfffffhh") @@ -276,7 +276,7 @@ def _put_tattaxis(self, cursor, axis): axis["_fTitleColor"], axis["_fTitleFont"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_taxis_1 = struct.Struct(">idd") @@ -301,7 +301,7 @@ def _put_taxis(self, cursor, axis): self.util.put_objany(cursor, (axis["_fLabels"], "THashList"), self.keycursor) + self.util.put_objany(cursor, (axis["_fModLabs"], "TList"), self.keycursor)) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_th1_1 = struct.Struct(">i") @@ -342,7 +342,7 @@ def _put_th1(self, cursor, name): self._fields["_fBinStatErrOpt"], self._fields["_fStatOverflows"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_th2_1 = struct.Struct(">dddd") @@ -357,7 +357,7 @@ def _put_th2(self, cursor, name): self._fields["_fTsumwy2"], self._fields["_fTsumwxy"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_th3_1 = struct.Struct(">ddddddd") @@ -375,13 +375,13 @@ def _put_th3(self, cursor, name): self._fields["_fTsumwxz"], self._fields["_fTsumwyz"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff def _put_tatt3d(self, cursor): copy_cursor = copy(cursor) cursor.skip(self._format_cntvers.size) - cnt = numpy.int64(self._format_cntvers.size - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(self._format_cntvers.size - 4) | uproot3.const.kByteCountMask vers = 1 return copy_cursor.put_fields(self._format_cntvers, cnt, vers) @@ -391,7 +391,7 @@ def _put_th1d(self, cursor, name): vers = 3 buff = self._put_th1(cursor, name) + self._put_tarray(cursor, self._valuesarray) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff def _put_th2d(self, cursor, name): @@ -400,7 +400,7 @@ def _put_th2d(self, cursor, name): vers = 4 buff = self._put_th2(cursor, name) + self._put_tarray(cursor, self._valuesarray) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff def _put_th3d(self, cursor, name): @@ -409,7 +409,7 @@ def _put_th3d(self, cursor, name): vers = 4 buff = self._put_th3(cursor, name) + self._put_tarray(cursor, self._valuesarray) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tprofile = struct.Struct(">idddd") @@ -450,6 +450,6 @@ def _write(self, context, cursor, name, compression, key, keycursor, util): self._fields["_fTmax"], self._fields["_fTsumwt"], self._fields["_fTsumwt2"]) + self._put_tarray(cursor, self._fields["_fBinSumw2"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask givenbytes = copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff - uproot.write.compress.write(context, write_cursor, givenbytes, compression, key, keycursor) + uproot3.write.compress.write(context, write_cursor, givenbytes, compression, key, keycursor) diff --git a/uproot/write/objects/TObjString.py b/uproot3/write/objects/TObjString.py similarity index 72% rename from uproot/write/objects/TObjString.py rename to uproot3/write/objects/TObjString.py index 58993ea2..9b588ce1 100644 --- a/uproot/write/objects/TObjString.py +++ b/uproot3/write/objects/TObjString.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -9,9 +9,9 @@ import numpy -import uproot.const -import uproot.write.compress -import uproot.write.sink.cursor +import uproot3.const +import uproot3.write.compress +import uproot3.write.sink.cursor class TObjString(object): def __init__(self, string): @@ -32,6 +32,6 @@ def _write(self, context, cursor, name, compression, key, keycursor, util): vers = 1 buff = cursor.put_string(self.value) length = len(buff) + self._format.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask - givenbytes = copy_cursor.put_fields(self._format, cnt, vers, 1, 0, uproot.const.kNotDeleted) + buff - uproot.write.compress.write(context, write_cursor, givenbytes, compression, key, keycursor) + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask + givenbytes = copy_cursor.put_fields(self._format, cnt, vers, 1, 0, uproot3.const.kNotDeleted) + buff + uproot3.write.compress.write(context, write_cursor, givenbytes, compression, key, keycursor) diff --git a/uproot/write/objects/TTree.py b/uproot3/write/objects/TTree.py similarity index 87% rename from uproot/write/objects/TTree.py rename to uproot3/write/objects/TTree.py index 8cba064d..8dcfdd65 100644 --- a/uproot/write/objects/TTree.py +++ b/uproot3/write/objects/TTree.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE import struct from copy import copy @@ -8,18 +8,18 @@ import math import numpy -import awkward - -import uproot -import uproot.const -from uproot.rootio import _bytesid -from uproot.rootio import nofilter -from uproot.rootio import _memsize -from uproot._util import _tobytes -import uproot.write.compress -import uproot.write.sink.cursor -from uproot.write.TKey import BasketKey -from uproot.write.objects.util import Util +import awkward0 + +import uproot3 +import uproot3.const +from uproot3.rootio import _bytesid +from uproot3.rootio import nofilter +from uproot3.rootio import _memsize +from uproot3._util import _tobytes +import uproot3.write.compress +import uproot3.write.sink.cursor +from uproot3.write.TKey import BasketKey +from uproot3.write.objects.util import Util class newbranch(object): @@ -126,11 +126,11 @@ def extend(self, branchdict): #Convert to numpy arrays of required dtype for key, value in branchdict.items(): - if not isinstance(value, awkward.array.jagged.JaggedArray): + if not isinstance(value, awkward0.array.jagged.JaggedArray): branchdict[key] = numpy.array(value, dtype=self._branches[key]._branch.type, copy=False) for key, value in branchdict.items(): - if isinstance(value, awkward.array.jagged.JaggedArray): + if isinstance(value, awkward0.array.jagged.JaggedArray): self._branches[key].newbasket(value) elif value.ndim == 1: self._branches[key].newbasket(value) @@ -148,7 +148,7 @@ def title(self): @property def numentries(self): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.numentries @property @@ -156,83 +156,83 @@ def numbranches(self): return len(self._branches) def iterkeys(self, recursive=False, filtername=nofilter, filtertitle=nofilter, aliases=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.iterkeys(recursive, filtername, filtertitle, aliases) def itervalues(self, recursive=False, filtername=nofilter, filtertitle=nofilter): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.itervalues(recursive, filtername, filtertitle) def iteritems(self, recursive=False, filtername=nofilter, filtertitle=nofilter, aliases=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.iteritems(recursive, filtername, filtertitle, aliases) def keys(self, recursive=False, filtername=nofilter, filtertitle=nofilter, aliases=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.keys(recursive, filtername, filtertitle, aliases) def values(self, recursive=False, filtername=nofilter, filtertitle=nofilter): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.values(recursive, filtername, filtertitle) def items(self, recursive=False, filtername=nofilter, filtertitle=nofilter, aliases=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.items(recursive, filtername, filtertitle, aliases) def allkeys(self, filtername=nofilter, filtertitle=nofilter, aliases=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.allkeys(filtername, filtertitle, aliases) def allvalues(self, filtername=nofilter, filtertitle=nofilter): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.allvalues(filtername, filtertitle) def allitems(self, filtername=nofilter, filtertitle=nofilter, aliases=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.allitems(filtername, filtertitle, aliases) def get(self, name, recursive=True, filtername=nofilter, filtertitle=nofilter, aliases=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.get(name, recursive, filtername, filtertitle, aliases) def __contains__(self, name): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.__contains__(name) def mempartitions(self, numbytes, branches=None, entrystart=None, entrystop=None, keycache=None, linear=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.mempartitions(numbytes, branches, entrystart, entrystop, keycache, linear) def clusters(self, branches=None, entrystart=None, entrystop=None, strict=False): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.clusters(branches, entrystart, entrystop, strict) def array(self, branch, interpretation=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, blocking=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.array(branch, interpretation, entrystart, entrystop, flatten, awkwardlib, cache, basketcache, keycache, executor, blocking) def arrays(self, branches=None, outputtype=dict, namedecode=None, entrystart=None, entrystop=None, flatten=False, flatname=None, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, blocking=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.arrays(branches, outputtype, namedecode, entrystart, entrystop, flatten, flatname, awkwardlib, cache, basketcache, keycache, executor, blocking) def lazyarray(self, branch, interpretation=None, entrysteps=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, persistvirtual=False): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.lazyarray(branch, interpretation, entrysteps, entrystart, entrystop, flatten, awkwardlib, cache, basketcache, keycache, executor, persistvirtual) def lazyarrays(self, branches=None, namedecode="utf-8", entrysteps=None, entrystart=None, entrystop=None, flatten=False, profile=None, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, persistvirtual=False): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.lazyarrays(branches, namedecode, entrysteps, entrystart, entrystop, flatten, profile, awkwardlib, cache, basketcache, keycache, executor, persistvirtual) def iterate(self, branches=None, entrysteps=None, outputtype=dict, namedecode=None, reportentries=False, entrystart=None, entrystop=None, flatten=False, flatname=None, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, blocking=True): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.iterate(branches, entrysteps, outputtype, namedecode, reportentries, entrystart, entrystop, flatten, flatname, awkwardlib, cache, basketcache, keycache, executor, blocking) def show(self, foldnames=False, stream=sys.stdout): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.show(foldnames, stream) def matches(self, branches): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.matches(branches) def __len__(self): @@ -244,7 +244,7 @@ def __iter__(self): @property def pandas(self): - t = uproot.open(self._file._path)[self.name] + t = uproot3.open(self._file._path)[self.name] return t.pandas class TBranch(object): @@ -307,15 +307,15 @@ def newbasket(self, items, multidim=None): tree.branches = copy(temp_branches) - cursor = uproot.write.sink.cursor.Cursor(self._branch.file._fSeekFree) - tree.write_key = uproot.write.TKey.TKey(fClassName=self._treelvl1._tree.write_key.fClassName, - fName=self._treelvl1._tree.write_key.fName, - fTitle=self._treelvl1._tree.write_key.fTitle, - fObjlen=0, - fSeekKey=copy(self._branch.file._fSeekFree), - fSeekPdir=self._treelvl1._tree.write_key.fSeekPdir, - fCycle=self._treelvl1._tree.write_key.fCycle) - tree.keycursor = uproot.write.sink.cursor.Cursor(tree.write_key.fSeekKey) + cursor = uproot3.write.sink.cursor.Cursor(self._branch.file._fSeekFree) + tree.write_key = uproot3.write.TKey.TKey(fClassName=self._treelvl1._tree.write_key.fClassName, + fName=self._treelvl1._tree.write_key.fName, + fTitle=self._treelvl1._tree.write_key.fTitle, + fObjlen=0, + fSeekKey=copy(self._branch.file._fSeekFree), + fSeekPdir=self._treelvl1._tree.write_key.fSeekPdir, + fCycle=self._treelvl1._tree.write_key.fCycle) + tree.keycursor = uproot3.write.sink.cursor.Cursor(tree.write_key.fSeekKey) tree.write_key.write(cursor, self._branch.file._sink) tree.write(tree.file, cursor, self._treelvl1._tree.write_name, tree.write_key, copy(tree.keycursor), Util()) tree.file._expandfile(cursor) @@ -332,16 +332,16 @@ def newbasket(self, items, multidim=None): self._branch.fields["_fEntries"] = multidim self._branch.fields["_fEntryNumber"] = multidim self._branch.fields["_fBasketEntry"][self._branch.fields["_fWriteBasket"]] = self._branch.fields["_fEntries"] - if isinstance(items, awkward.array.jagged.JaggedArray): + if isinstance(items, awkward0.array.jagged.JaggedArray): givenbytes = b"" for i in range(items.shape[0]): givenbytes += _tobytes(numpy.array(items[i], dtype=self._branch.type)) else: givenbytes = _tobytes(numpy.array(items, dtype=self._branch.type, copy=False)) - cursor = uproot.write.sink.cursor.Cursor(self._branch.file._fSeekFree) + cursor = uproot3.write.sink.cursor.Cursor(self._branch.file._fSeekFree) self._branch.fields["_fBasketSeek"][self._branch.fields["_fWriteBasket"] - 1] = cursor.index - if isinstance(items, awkward.array.jagged.JaggedArray): + if isinstance(items, awkward0.array.jagged.JaggedArray): key = BasketKey(fName=self._branch.name, fTitle=self._treelvl1._tree.write_key.fName, fNevBuf=items.shape[0], @@ -357,10 +357,10 @@ def newbasket(self, items, multidim=None): fSeekKey=copy(self._branch.file._fSeekFree), fSeekPdir=copy(self._branch.file._fBEGIN), fBufferSize=32000) - keycursor = uproot.write.sink.cursor.Cursor(key.fSeekKey) + keycursor = uproot3.write.sink.cursor.Cursor(key.fSeekKey) key.write(cursor, self._branch.file._sink) - uproot.write.compress.write(self._branch.file, cursor, givenbytes, self._branch.compression, key, copy(keycursor)) - if isinstance(items, awkward.array.jagged.JaggedArray): + uproot3.write.compress.write(self._branch.file, cursor, givenbytes, self._branch.compression, key, copy(keycursor)) + if isinstance(items, awkward0.array.jagged.JaggedArray): # 3 looks like a harmless value for the first entry of offsetbytes # Relevant code - https://github.com/root-project/root/blob/master/tree/tree/src/TBasket.cxx#L921-L954 offsetbytes = [3, key.fKeylen] @@ -368,7 +368,7 @@ def newbasket(self, items, multidim=None): offsetbytes += [(len(items[i]) * numpy.dtype(self._branch.type).itemsize) + offsetbytes[-1]] offsetbytes += [0] offsetbytes = _tobytes(numpy.array(offsetbytes, dtype=">i4")) - uproot.write.compress.write(self._branch.file, cursor, offsetbytes, self._branch.compression, key, + uproot3.write.compress.write(self._branch.file, cursor, offsetbytes, self._branch.compression, key, copy(keycursor), isjagged=True) self._branch.file._expandfile(cursor) @@ -404,136 +404,136 @@ def title(self): @property def interpretation(self): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.interpretation @property def countbranch(self): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.countbranch @property def countleaf(self): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.countleaf @property def numentries(self): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.numentries @property def numbranches(self): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.numbranches def iterkeys(self, recursive=False, filtername=nofilter, filtertitle=nofilter): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.iterkeys(recursive, filtername, filtertitle) def itervalues(self, recursive=False, filtername=nofilter, filtertitle=nofilter): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.itervalues(recursive, filtername, filtertitle) def iteritems(self, recursive=False, filtername=nofilter, filtertitle=nofilter): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.iteritems(recursive, filtername, filtertitle) def keys(self, recursive=False, filtername=nofilter, filtertitle=nofilter): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.keys(recursive, filtername, filtertitle) def values(self, recursive=False, filtername=nofilter, filtertitle=nofilter): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.keys(recursive, filtername, filtertitle) def items(self, recursive=False, filtername=nofilter, filtertitle=nofilter): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.items(recursive, filtername, filtertitle) def allkeys(self, recursive=False, filtername=nofilter, filtertitle=nofilter): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.allkeys(recursive, filtername, filtertitle) def allvalues(self, filtername=nofilter, filtertitle=nofilter): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.keys(filtername, filtertitle) def allitems(self, filtername=nofilter, filtertitle=nofilter): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.allitems(filtername, filtertitle) def get(self, name, recursive=True, filtername=nofilter, filtertitle=nofilter, aliases=True): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.get(name, recursive, filtername, filtertitle, aliases) @property def numbaskets(self): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.numbaskets def uncompressedbytes(self, keycache=None): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.uncompressedbhytes(keycache) def compressedbytes(self, keycache=None): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.compressedbhytes(keycache) def compressionratio(self, keycache=None): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.compressionratio(keycache) def numitems(self, interpretation=None, keycache=None): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.numitems(interpretation, keycache) def basket_entrystart(self, i): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.basket_entrystart(i) def basket_entrystop(self, i): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.basket_entrystop(i) def basket_numentries(self, i): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.basket_numentries(i) def basket_uncompressedbytes(self, i, keycache=None): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.basket_uncompressedbytes(i, keycache) def basket_compressedbytes(self, i): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.basked_compressedbytes(i) def basket_numitems(self, i, interpretation=None, keycache=None): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.basket_numitems(i, interpretation, keycache) def basket(self, i, interpretation=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.basket(i, interpretation, entrystart, entrystop, flatten, awkwardlib, cache, basketcache, keycache) def baskets(self, interpretation=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, reportentries=False, executor=None, blocking=True): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.baskets(interpretation, entrystart, entrystop, flatten, awkwardlib, cache, basketcache, keycache, reportentries, executor, blocking) def iterate_baskets(self, interpretation=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, reportentries=False): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.baskets(interpretation, entrystart, entrystop, flatten, awkwardlib, cache, basketcache, keycache, reportentries) def array(self, interpretation=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, blocking=True): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.array(interpretation, entrystart, entrystop, flatten, awkwardlib, cache, basketcache, keycache, executor, blocking) def mempartitions(self, numbytes, entrystart=None, entrystop=None, keycache=None, linear=True): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.mempartitions(numbytes, entrystart, entrystop, keycache, linear) def lazyarray(self, interpretation=None, entrysteps=None, entrystart=None, entrystop=None, flatten=False, awkwardlib=None, cache=None, basketcache=None, keycache=None, executor=None, persistvirtual=False): - b = uproot.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] + b = uproot3.open(self._treelvl1._file._path)[self._treelvl1.name][self.name] return b.lazyarray(interpretation, entrysteps, entrystart, entrystop, flatten, awkwardlib, cache, basketcache, keycache, executor, persistvirtual) class TTreeImpl(object): @@ -593,7 +593,7 @@ def put_tnamed(self, cursor, name, title, hexbytes=numpy.uint32(0x03000000)): buff = (self.put_tobject(cursor, hexbytes) + cursor.put_string(name) + cursor.put_string(title)) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tattline = struct.Struct(">hhh") @@ -606,7 +606,7 @@ def put_tattline(self, cursor): self.fields["_fLineStyle"], self.fields["_fLineWidth"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tattfill = struct.Struct(">hh") @@ -618,7 +618,7 @@ def put_tattfill(self, cursor): self.fields["_fFillColor"], self.fields["_fFillStyle"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tattmarker = struct.Struct(">hhf") @@ -631,7 +631,7 @@ def put_tattmarker(self, cursor): self.fields["_fMarkerStyle"], self.fields["_fMarkerSize"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_rootiofeatures = struct.Struct(">B") @@ -643,7 +643,7 @@ def put_rootiofeatures(self, cursor): cursor.skip(4) buff = b"\x1a\xa1/\x10" + cursor.put_fields(self._format_rootiofeatures, fIOBits) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tobjarray1 = struct.Struct(">ii") @@ -662,7 +662,7 @@ def put_tobjarray(self, cursor, values, classname, fBits=50331648): for i in range(len(self.fields["_fLeaves"])): buff += self.util.put_objany(cursor, (values[i], classname[i] if type(classname)==list else classname), self.write_keycursor) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_skiptobj1 = struct.Struct(">h") @@ -745,9 +745,9 @@ def write(self, context, cursor, name, key, keycursor, util): self.util.put_objany(cursor, (self.fields["_fUserInfo"], "TList"), self.write_keycursor) + self.util.put_objany(cursor, (self.fields["_fBranchRef"], "TBranchRef"), self.write_keycursor)) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask givenbytes = copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff - uproot.write.compress.write(context, copy(self.tree_write_cursor), givenbytes, None, key, copy(self.write_keycursor)) + uproot3.write.compress.write(context, copy(self.tree_write_cursor), givenbytes, None, key, copy(self.write_keycursor)) class TBranchImpl(object): @@ -853,7 +853,7 @@ def put_tnamed(self, cursor, name, title, hexbytes=numpy.uint32(0x03000000)): buff = (self.put_tobject(cursor, hexbytes) + cursor.put_string(name) + cursor.put_string(title)) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tattfill = struct.Struct(">hh") @@ -865,7 +865,7 @@ def put_tattfill(self, cursor): self.fields["_fFillColor"], self.fields["_fFillStyle"])) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_rootiofeatures = struct.Struct(">B") @@ -878,7 +878,7 @@ def put_rootiofeatures(self, cursor): buff = b"\x1a\xa1/\x10" buff += cursor.put_fields(self._format_rootiofeatures, fIOBits) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tleaf1 = struct.Struct(">iii??") @@ -907,7 +907,7 @@ def put_tleaf(self, cursor): cursor.put_fields(self._format_tleaf1, fLen, fLenType, fOffset, fIsRange, fIsUnsigned) + self.util.put_objany(cursor, (fLeafCount, "TLeaf"), self.keycursor)) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tleafI1 = struct.Struct(">ii") @@ -922,7 +922,7 @@ def put_tleafI(self, cursor): fMaximum = 0 buff = self.put_tleaf(cursor) + cursor.put_fields(self._format_tleafI1, fMinimum, fMaximum) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tleafB1 = struct.Struct(">bb") @@ -937,7 +937,7 @@ def put_tleafB(self, cursor): fMaximum = 0 buff = self.put_tleaf(cursor) + cursor.put_fields(self._format_tleafB1, fMinimum, fMaximum) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tleafD1 = struct.Struct(">dd") @@ -952,7 +952,7 @@ def put_tleafD(self, cursor): fMaximum = 0 buff = self.put_tleaf(cursor) + cursor.put_fields(self._format_tleafD1, fMinimum, fMaximum) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tleafF1 = struct.Struct(">ff") @@ -967,7 +967,7 @@ def put_tleafF(self, cursor): fMaximum = 0 buff = self.put_tleaf(cursor) + cursor.put_fields(self._format_tleafF1, fMinimum, fMaximum) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tleafL1 = struct.Struct(">qq") @@ -982,7 +982,7 @@ def put_tleafL(self, cursor): fMaximum = 0 buff = self.put_tleaf(cursor) + cursor.put_fields(self._format_tleafL1, fMinimum, fMaximum) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tleafO1 = struct.Struct(">??") @@ -997,7 +997,7 @@ def put_tleafO(self, cursor): fMaximum = 0 buff = self.put_tleaf(cursor) + cursor.put_fields(self._format_tleafO1, fMinimum, fMaximum) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tleafS1 = struct.Struct(">hh") @@ -1012,7 +1012,7 @@ def put_tleafS(self, cursor): fMaximum = 0 buff = self.put_tleaf(cursor) + cursor.put_fields(self._format_tleafS1, fMinimum, fMaximum) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_tobjarray1 = struct.Struct(">ii") @@ -1031,7 +1031,7 @@ def put_tobjarray(self, cursor, values, classname, fBits=50331648): for value in values: buff += self.util.put_objany(cursor, (value, classname), self.keycursor) length = len(buff) + self._format_cntvers.size - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff _format_skiptobj1 = struct.Struct(">h") @@ -1100,5 +1100,5 @@ def write(self, cursor): self._fbasketseek_cursor = copy(cursor) buff += (cursor.put_array(self.fields["_fBasketSeek"]) + cursor.put_string(self.fields["_fFileName"])) length = (len(buff) + self._format_cntvers.size) - cnt = numpy.int64(length - 4) | uproot.const.kByteCountMask + cnt = numpy.int64(length - 4) | uproot3.const.kByteCountMask return copy_cursor.put_fields(self._format_cntvers, cnt, vers) + buff diff --git a/uproot3/write/objects/__init__.py b/uproot3/write/objects/__init__.py new file mode 100644 index 00000000..d0cdd065 --- /dev/null +++ b/uproot3/write/objects/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python + +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE + +from __future__ import absolute_import diff --git a/uproot/write/objects/util.py b/uproot3/write/objects/util.py similarity index 88% rename from uproot/write/objects/util.py rename to uproot3/write/objects/util.py index fde03ecd..2e0d9f3a 100644 --- a/uproot/write/objects/util.py +++ b/uproot3/write/objects/util.py @@ -3,7 +3,7 @@ import numpy -import uproot +import uproot3 class Util(object): @@ -22,14 +22,14 @@ def _putclass(self, cursor, obj, keycursor, beg): buf += cursor.put_fields(self._format_putobjany1, numpy.uint32(self._written[id(objct)])) return buf if clsname in self._written: - buf += cursor.put_fields(self._format_putobjany1, self._written[clsname] | uproot.const.kClassMask) + buf += cursor.put_fields(self._format_putobjany1, self._written[clsname] | uproot3.const.kClassMask) if clsname != "TBranch": - self._written[id(objct)] = beg + uproot.const.kMapOffset + self._written[id(objct)] = beg + uproot3.const.kMapOffset else: - buf += cursor.put_fields(self._format_putobjany1, uproot.const.kNewClassTag) + buf += cursor.put_fields(self._format_putobjany1, uproot3.const.kNewClassTag) buf += cursor.put_cstring(clsname) - self._written[clsname] = numpy.uint32(start + uproot.const.kMapOffset) | uproot.const.kClassMask - self._written[id(objct)] = beg + uproot.const.kMapOffset + self._written[clsname] = numpy.uint32(start + uproot3.const.kMapOffset) | uproot3.const.kClassMask + self._written[id(objct)] = beg + uproot3.const.kMapOffset if clsname == "THashList" or clsname == "TList": buf += self.parent_obj._put_tlist(cursor, objct) elif clsname == "TObjString": @@ -65,7 +65,7 @@ def put_objany(self, cursor, obj, keycursor): beg = cursor.index cursor.skip(self._format_putobjany1.size) class_buf = self._putclass(cursor, obj, keycursor, beg) - buff = copy_cursor.put_fields(self._format_putobjany1, numpy.uint32(len(class_buf)) | uproot.const.kByteCountMask) + buff = copy_cursor.put_fields(self._format_putobjany1, numpy.uint32(len(class_buf)) | uproot3.const.kByteCountMask) else: copy_cursor = copy(cursor) cursor.skip(self._format_putobjany1.size) diff --git a/uproot3/write/sink/__init__.py b/uproot3/write/sink/__init__.py new file mode 100644 index 00000000..d0cdd065 --- /dev/null +++ b/uproot3/write/sink/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python + +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE + +from __future__ import absolute_import diff --git a/uproot/write/sink/cursor.py b/uproot3/write/sink/cursor.py similarity index 97% rename from uproot/write/sink/cursor.py rename to uproot3/write/sink/cursor.py index 5fd85753..4d2110da 100644 --- a/uproot/write/sink/cursor.py +++ b/uproot3/write/sink/cursor.py @@ -1,13 +1,13 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import import struct import sys -from uproot._util import _tobytes +from uproot3._util import _tobytes class Cursor(object): def __init__(self, index): diff --git a/uproot/write/sink/file.py b/uproot3/write/sink/file.py similarity index 97% rename from uproot/write/sink/file.py rename to uproot3/write/sink/file.py index 086254be..45e2601d 100644 --- a/uproot/write/sink/file.py +++ b/uproot3/write/sink/file.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import diff --git a/uproot/write/streamers.py b/uproot3/write/streamers.py similarity index 99% rename from uproot/write/streamers.py rename to uproot3/write/streamers.py index eac25761..862d57da 100644 --- a/uproot/write/streamers.py +++ b/uproot3/write/streamers.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import @@ -8,4 +8,4 @@ # streamers = b'@\x00\x01n\x00\x05\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01@\x00\x01X\xff\xff\xff\xffTStreamerInfo\x00@\x00\x01B\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTObjString\x00\x9c\x8eH\x00\x00\x00\x00\x01@\x00\x01\x18\xff\xff\xff\xffTObjArray\x00@\x00\x01\x06\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00u\xff\xff\xff\xffTStreamerBase\x00@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00t\xff\xff\xff\xffTStreamerString\x00@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fString\x0fwrapped TString\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString\x00' # all useful streamers (histograms, etc.) -streamers = b'@\x00\xa0u\x00\x05\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00C@\x00\x01X\xff\xff\xff\xffTStreamerInfo\x00@\x00\x01B\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTObjString\x00\x9c\x8eH\x00\x00\x00\x00\x01@\x00\x01\x18\xff\xff\xff\xffTObjArray\x00@\x00\x01\x06\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00u\xff\xff\xff\xffTStreamerBase\x00@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00t\xff\xff\xff\xffTStreamerString\x00@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fString\x0fwrapped TString\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString\x00@\x00\x01H\x80\x00\x00[@\x00\x01@\x00\t@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x07TObject\x00\x90\x1b\xc0-\x00\x00\x00\x01@\x00\x01\x19\x80\x00\x00\x9b@\x00\x01\x11\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00\x87\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00l\x00\x02@\x00\x00f\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfUniqueID\x18object unique identifier\x00\x00\x00\r\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cunsigned int@\x00\x00m\x80\x00\x02\x08@\x00\x00e\x00\x02@\x00\x00_\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fBits\x15bit field status word\x00\x00\x00\x0f\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cunsigned int\x00@\x00\x01\xb3\x80\x00\x00[@\x00\x01\xab\x00\t@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0eTLorentzVector\x00\xe3\xde\xc1\xa1\x00\x00\x00\x04@\x00\x01}\x80\x00\x00\x9b@\x00\x01u\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00s\xff\xff\xff\xffTStreamerObject\x00@\x00\x00[\x00\x02@\x00\x00U\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fP\x123 vector component\x00\x00\x00=\x00\x00\x00(\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08TVector3@\x00\x00z\x80\x00\x02\x08@\x00\x00r\x00\x02@\x00\x00l\x00\x04@\x00\x00;\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fE+time or energy of (x,y,z,t) or (px,py,pz,e)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x01\xb1\x80\x00\x00[@\x00\x01\xa9\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TVector3\x00\xab\xb6\xbe\x1e\x00\x00\x00\x03@\x00\x01\x81\x80\x00\x00\x9b@\x00\x01y\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00O\x80\x00\x02\x08@\x00\x00G\x00\x02@\x00\x00A\x00\x04@\x00\x00\x10\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fX\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00O\x80\x00\x02\x08@\x00\x00G\x00\x02@\x00\x00A\x00\x04@\x00\x00\x10\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fY\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00O\x80\x00\x02\x08@\x00\x00G\x00\x02@\x00\x00A\x00\x04@\x00\x00\x10\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fZ\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x01v\x80\x00\x00[@\x00\x01n\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TVector2\x00\x00\x89\xb7\xf4\x00\x00\x00\x03@\x00\x01F\x80\x00\x00\x9b@\x00\x01>\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00g\x80\x00\x02\x08@\x00\x00_\x00\x02@\x00\x00Y\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fX\x18components of the vector\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00O\x80\x00\x02\x08@\x00\x00G\x00\x02@\x00\x00A\x00\x04@\x00\x00\x10\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fY\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x04\r\x80\x00\x00[@\x00\x04\x05\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TProfile\x00K\xed\xeeT\x00\x00\x00\x07@\x00\x03\xdd\x80\x00\x00\x9b@\x00\x03\xd5\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00@\x00\x00|\x80\x00\x00\xc6@\x00\x00t\x00\x03@\x00\x00j\x00\x04@\x00\x00;\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04TH1D)1-Dim histograms (one double per channel)\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf9\xb1V\x9f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x03@\x00\x00\x85\xff\xff\xff\xffTStreamerObjectAny\x00@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBinEntries\x19number of entries per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfErrorMode\x18Option to compute errors\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nEErrorType@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fYmin\x19Lower limit in Y (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fYmax\x19Upper limit in Y (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwy\x15Total Sum of weight*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwy2\x17Total Sum of weight*Y*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00\x81\x80\x00\x08\xbf@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBinSumw2*Array of sum of squares of weights per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD\x00@\x00\x01#\x80\x00\x00[@\x00\x01\x1b\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH1D\x00\xf9\xb1V\x9f\x00\x00\x00\x03@\x00\x00\xf7\x80\x00\x00\x9b@\x00\x00\xef\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayD\x10Array of doubles\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00q9\xef4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x0cl\x80\x00\x00[@\x00\x0cd\x00\t@\x00\x00\x11\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x03TH1\x00\x1c7@\xc4\x00\x00\x00\x08@\x00\x0cA\x80\x00\x00\x9b@\x00\x0c9\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttLine\x0fLine attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x07EI\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00k\x80\x00\x00\xc6@\x00\x00c\x00\x03@\x00\x00Y\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttFill\x14Fill area attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xd9*\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00j\x80\x00\x00\xc6@\x00\x00b\x00\x03@\x00\x00X\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nTAttMarker\x11Marker attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00)\x1d\x8b\xec\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00@\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fNcells+number of bins(1D), cells (2D) +U/Overflows\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00c\x80\x00\x03\xc7@\x00\x00[\x00\x02@\x00\x00U\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fXaxis\x11X axis descriptor\x00\x00\x00=\x00\x00\x00\xd8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TAxis@\x00\x00c\x80\x00\x03\xc7@\x00\x00[\x00\x02@\x00\x00U\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fYaxis\x11Y axis descriptor\x00\x00\x00=\x00\x00\x00\xd8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TAxis@\x00\x00c\x80\x00\x03\xc7@\x00\x00[\x00\x02@\x00\x00U\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fZaxis\x11Z axis descriptor\x00\x00\x00=\x00\x00\x00\xd8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TAxis@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00=\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfBarOffset%(1000*offset) for bar charts or legos\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00y\x80\x00\x02\x08@\x00\x00q\x00\x02@\x00\x00k\x00\x04@\x00\x00;\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBarWidth$(1000*width) for bar charts or legos\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00f\x80\x00\x02\x08@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fEntries\x11Number of entries\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00g\x80\x00\x02\x08@\x00\x00_\x00\x02@\x00\x00Y\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fTsumw\x14Total Sum of weights\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x004\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumw2\x1fTotal Sum of squares of weights\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwx\x15Total Sum of weight*X\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwx2\x17Total Sum of weight*X*X\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00o\x80\x00\x02\x08@\x00\x00g\x00\x02@\x00\x00a\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum\x1aMaximum value for plotting\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00o\x80\x00\x02\x08@\x00\x00g\x00\x02@\x00\x00a\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum\x1aMinimum value for plotting\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfNormFactor\x14Normalization factor\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00u\x80\x00\x08\xbf@\x00\x00m\x00\x02@\x00\x00g\x00\x04@\x00\x005\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fContour\x1fArray to display contour levels\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00v\x80\x00\x08\xbf@\x00\x00n\x00\x02@\x00\x00h\x00\x04@\x00\x006\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fSumw2"Array of sum of squares of weights\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00f\x80\x00\x01?@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fOption\x11histogram options\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00\x9c\xff\xff\xff\xffTStreamerObjectPointer\x00@\x00\x00}\x00\x02@\x00\x00w\x00\x04@\x00\x00F\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFunctions.->Pointer to list of functions (fits and user)\x00\x00\x00?\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00a\x80\x00\x02\x08@\x00\x00Y\x00\x02@\x00\x00S\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBufferSize\x0cfBuffer size\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x99\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00{\x00\x02@\x00\x00a\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fBuffer\x1a[fBufferSize] entry buffer\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x08\x0bfBufferSize\x03TH1@\x00\x00\x87\x80\x00\x02\x08@\x00\x00\x7f\x00\x02@\x00\x00y\x00\x04@\x00\x00=\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0efBinStatErrOpt!option for bin statistical errors\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11TH1::EBinErrorOpt@\x00\x00\x9c\x80\x00\x02\x08@\x00\x00\x94\x00\x02@\x00\x00\x8e\x00\x04@\x00\x00P\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0efStatOverflows4per object flag to use under/overflows in statistics\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13TH1::EStatOverflows\x00@\x00\x01\x82\x80\x00\x00[@\x00\x01z\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TNamed\x00\xdf\xb7J<\x00\x00\x00\x01@\x00\x01T\x80\x00\x00\x9b@\x00\x01L\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00d\x80\x00\x01?@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fName\x11object identifier\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00`\x80\x00\x01?@\x00\x00X\x00\x02@\x00\x00R\x00\x04@\x00\x00 \x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fTitle\x0cobject title\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString\x00@\x00\x01y\x80\x00\x00[@\x00\x01q\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TAttLine\x00\x94\x07EI\x00\x00\x00\x02@\x00\x01I\x80\x00\x00\x9b@\x00\x01A\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00`\x80\x00\x02\x08@\x00\x00X\x00\x02@\x00\x00R\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLineColor\nLine color\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00`\x80\x00\x02\x08@\x00\x00X\x00\x02@\x00\x00R\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLineStyle\nLine style\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00`\x80\x00\x02\x08@\x00\x00X\x00\x02@\x00\x00R\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLineWidth\nLine width\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short\x00@\x00\x01\x1f\x80\x00\x00[@\x00\x01\x17\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TAttFill\x00\xff\xd9*\x92\x00\x00\x00\x02@\x00\x00\xef\x80\x00\x00\x9b@\x00\x00\xe7\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00e\x80\x00\x02\x08@\x00\x00]\x00\x02@\x00\x00W\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFillColor\x0fFill area color\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00e\x80\x00\x02\x08@\x00\x00]\x00\x02@\x00\x00W\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFillStyle\x0fFill area style\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short\x00@\x00\x01\x85\x80\x00\x00[@\x00\x01}\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTAttMarker\x00)\x1d\x8b\xec\x00\x00\x00\x02@\x00\x01S\x80\x00\x00\x9b@\x00\x01K\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00d\x80\x00\x02\x08@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfMarkerColor\x0cMarker color\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00d\x80\x00\x02\x08@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfMarkerStyle\x0cMarker style\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00b\x80\x00\x02\x08@\x00\x00Z\x00\x02@\x00\x00T\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfMarkerSize\x0bMarker size\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float\x00@\x00\x00\xb3\x80\x00\x00[@\x00\x00\xab\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TArray\x00\x00p!\xb2\x00\x00\x00\x01@\x00\x00\x85\x80\x00\x00\x9b@\x00\x00}\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00d\x80\x00\x02\x08@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fN\x18Number of array elements\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x06\t\x80\x00\x00[@\x00\x06\x01\x00\t@\x00\x00\x13\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x05TAxis\x00ZInp\x00\x00\x00\n@\x00\x05\xdc\x80\x00\x00\x9b@\x00\x05\xd4\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\r\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttAxis\x0fAxis attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\o\xff>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x04@\x00\x00^\x80\x00\x02\x08@\x00\x00V\x00\x02@\x00\x00P\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fNbins\x0eNumber of bins\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00g\x80\x00\x02\x08@\x00\x00_\x00\x02@\x00\x00Y\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fXmin\x15low edge of first bin\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00h\x80\x00\x02\x08@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fXmax\x16upper edge of last bin\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00h\x80\x00\x08\xbf@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fXbins\x14Bin edges array in X\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00d\x80\x00\x02\x08@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fFirst\x14first bin to display\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00b\x80\x00\x02\x08@\x00\x00Z\x00\x02@\x00\x00T\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fLast\x13last bin to display\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00q\x80\x00\x02\x08@\x00\x00i\x00\x02@\x00\x00c\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fBits2\x16second bit status word\x00\x00\x00\x0c\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0eunsigned short@\x00\x00\x88\x80\x00\x02\x08@\x00\x00\x80\x00\x02@\x00\x00z\x00\x04@\x00\x00K\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfTimeDisplay1on/off displaying time values instead of numerics\x00\x00\x00\x12\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04bool@\x00\x00\x80\x80\x00\x01?@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00@\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfTimeFormat\'Date&time format, ex: 09/12/99 12:34:00\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00f\x80\x00\x16\xcb@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00#\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fLabels\x0eList of labels\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nTHashList*@\x00\x00l\x80\x00\x16\xcb@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fModLabs\x17List of modified labels\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*\x00@\x00\x05\x0e\x80\x00\x00[@\x00\x05\x06\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TAttAxis\x00\\o\xff>\x00\x00\x00\x04@\x00\x04\xde\x80\x00\x00\x9b@\x00\x04\xd6\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x00@\x00\x00\x80\x80\x00\x02\x08@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfNdivisions+Number of divisions(10000*n3 + 100*n2 + n1)\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00.\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfAxisColor\x16Color of the line axis\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00f\x80\x00\x02\x08@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfLabelColor\x0fColor of labels\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00e\x80\x00\x02\x08@\x00\x00]\x00\x02@\x00\x00W\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLabelFont\x0fFont for labels\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00h\x80\x00\x02\x08@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfLabelOffset\x10Offset of labels\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00d\x80\x00\x02\x08@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLabelSize\x0eSize of labels\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfTickLength\x14Length of tick marks\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00.\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfTitleOffset\x14Offset of axis title\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00h\x80\x00\x02\x08@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfTitleSize\x12Size of axis title\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00j\x80\x00\x02\x08@\x00\x00b\x00\x02@\x00\x00\\\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfTitleColor\x13Color of axis title\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfTitleFont\x13Font for axis title\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short\x00@\x00\x00\xb8\x80\x00\x00[@\x00\x00\xb0\x00\t@\x00\x00\x17\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\tTHashList\x00\xcc~I\xc1\x00\x00\x00\x00@\x00\x00\x87\x80\x00\x00\x9b@\x00\x00\x7f\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TList\x12Doubly linked list\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00i\xc5\xc3\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05\x00@\x00\x00\xc6\x80\x00\x00[@\x00\x00\xbe\x00\t@\x00\x00\x13\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x05TList\x00i\xc5\xc3\xbb\x00\x00\x00\x05@\x00\x00\x99\x80\x00\x00\x9b@\x00\x00\x91\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00x\x80\x00\x00\xc6@\x00\x00p\x00\x03@\x00\x00f\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0eTSeqCollection\x1bSequenceable collection ABC\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfcl;\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x00\x00@\x00\x00\xcf\x80\x00\x00[@\x00\x00\xc7\x00\t@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0eTSeqCollection\x00\xfcl;\xc6\x00\x00\x00\x00@\x00\x00\x99\x80\x00\x00\x9b@\x00\x00\x91\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00x\x80\x00\x00\xc6@\x00\x00p\x00\x03@\x00\x00f\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bTCollection\x1eCollection abstract base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00W\xe3\xcb\x9c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x03\x00@\x00\x01\x9b\x80\x00\x00[@\x00\x01\x93\x00\t@\x00\x00\x19\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0bTCollection\x00W\xe3\xcb\x9c\x00\x00\x00\x03@\x00\x01h\x80\x00\x00\x9b@\x00\x01`\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00i\x80\x00\x01?@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fName\x16name of the collection\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00o\x80\x00\x02\x08@\x00\x00g\x00\x02@\x00\x00a\x00\x04@\x00\x003\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fSize number of elements in collection\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x00L\x80\x00\x00[@\x00\x00D\x00\t@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x07TString\x00\x00\x01t\x19\x00\x00\x00\x02@\x00\x00\x1d\x80\x00\x00\x9b@\x00\x00\x15\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00@\x00\x03\xfc\x80\x00\x00[@\x00\x03\xf4\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTProfile2D\x006\xa1B\xac\x00\x00\x00\x08@\x00\x03\xca\x80\x00\x00\x9b@\x00\x03\xc2\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00@\x00\x00|\x80\x00\x00\xc6@\x00\x00t\x00\x03@\x00\x00j\x00\x04@\x00\x00;\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04TH2D)2-Dim histograms (one double per channel)\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xba\x82\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x04@\x00\x00r\x80\x00\x08\xbf@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBinEntries\x19number of entries per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfErrorMode\x18Option to compute errors\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nEErrorType@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fZmin\x19Lower limit in Z (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fZmax\x19Upper limit in Z (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwz\x15Total Sum of weight*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwz2\x17Total Sum of weight*Z*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00\x81\x80\x00\x08\xbf@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBinSumw2*Array of sum of squares of weights per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD\x00@\x00\x01#\x80\x00\x00[@\x00\x01\x1b\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH2D\x00\x7f\xba\x82\xf0\x00\x00\x00\x04@\x00\x00\xf7\x80\x00\x00\x9b@\x00\x00\xef\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH2\x1a2-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x824\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayD\x10Array of doubles\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00q9\xef4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x02n\x80\x00\x00[@\x00\x02f\x00\t@\x00\x00\x11\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x03TH2\x00\x01\x824\x7f\x00\x00\x00\x05@\x00\x02C\x80\x00\x00\x9b@\x00\x02;\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00e\x80\x00\x02\x08@\x00\x00]\x00\x02@\x00\x00W\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfScalefactor\x0cScale factor\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwy\x15Total Sum of weight*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwy2\x17Total Sum of weight*Y*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwxy\x17Total Sum of weight*X*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x03\xfc\x80\x00\x00[@\x00\x03\xf4\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTProfile3D\x00\xf6\x0ch\x14\x00\x00\x00\x08@\x00\x03\xca\x80\x00\x00\x9b@\x00\x03\xc2\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00@\x00\x00|\x80\x00\x00\xc6@\x00\x00t\x00\x03@\x00\x00j\x00\x04@\x00\x00;\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04TH3D)3-Dim histograms (one double per channel)\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00d\xb9\xff\x86\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x04@\x00\x00r\x80\x00\x08\xbf@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBinEntries\x19number of entries per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfErrorMode\x18Option to compute errors\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nEErrorType@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fTmin\x19Lower limit in T (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fTmax\x19Upper limit in T (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwt\x15Total Sum of weight*T\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwt2\x17Total Sum of weight*T*T\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00\x81\x80\x00\x08\xbf@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBinSumw2*Array of sum of squares of weights per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD\x00@\x00\x01#\x80\x00\x00[@\x00\x01\x1b\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH3D\x00d\xb9\xff\x86\x00\x00\x00\x04@\x00\x00\xf7\x80\x00\x00\x9b@\x00\x00\xef\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH3\x1a3-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\xd2D_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x06@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayD\x10Array of doubles\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00q9\xef4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x04(\x80\x00\x00[@\x00\x04 \x00\t@\x00\x00\x11\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x03TH3\x00B\xd2D_\x00\x00\x00\x06@\x00\x03\xfd\x80\x00\x00\x9b@\x00\x03\xf5\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\t\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00b\x80\x00\x00\xc6@\x00\x00Z\x00\x03@\x00\x00P\x00\x04@\x00\x00!\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TAtt3D\r3D attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00uz\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwy\x15Total Sum of weight*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwy2\x17Total Sum of weight*Y*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwxy\x17Total Sum of weight*X*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwz\x15Total Sum of weight*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwz2\x17Total Sum of weight*Z*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwxz\x17Total Sum of weight*X*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwyz\x17Total Sum of weight*Y*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x00K\x80\x00\x00[@\x00\x00C\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TAtt3D\x00\x00\x00uz\x00\x00\x00\x01@\x00\x00\x1d\x80\x00\x00\x9b@\x00\x00\x15\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00@\x00\x00\xf1\x80\x00\x00[@\x00\x00\xe9\x00\t@\x00\x00\x19\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0bvector\x00\x00\x98;\x88\x00\x00\x00\x06@\x00\x00\xbe\x80\x00\x00\x9b@\x00\x00\xb6\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x9d\xff\xff\xff\xffTStreamerSTL\x00@\x00\x00\x88\x00\x03@\x00\x00z\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This2 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0bvector\x00\x00\x00\x01\x00\x00\x00\x03\x00@\x00\x00\xed\x80\x00\x00[@\x00\x00\xe5\x00\t@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0evector\x00\x10\x0eCd\x00\x00\x00\x06@\x00\x00\xb7\x80\x00\x00\x9b@\x00\x00\xaf\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x96\x80\x00A\x08@\x00\x00\x8e\x00\x03@\x00\x00\x80\x00\x04@\x00\x00G\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This5 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0evector\x00\x00\x00\x01\x00\x00\x00\x08\x00@\x00\x00\xea\x80\x00\x00[@\x00\x00\xe2\x00\t@\x00\x00\x1b\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\rvector\x00\x05Z\x16\x9b\x00\x00\x00\x06@\x00\x00\xb5\x80\x00\x00\x9b@\x00\x00\xad\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x94\x80\x00A\x08@\x00\x00\x8c\x00\x03@\x00\x00~\x00\x04@\x00\x00F\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This4 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\rvector\x00\x00\x00\x01\x00\x00\x00\x05\x00@\x00\x00\xe7\x80\x00\x00[@\x00\x00\xdf\x00\t@\x00\x00\x1a\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0cvector\x00\x01\xc8\xb4)\x00\x00\x00\x06@\x00\x00\xb3\x80\x00\x00\x9b@\x00\x00\xab\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x92\x80\x00A\x08@\x00\x00\x8a\x00\x03@\x00\x00|\x00\x04@\x00\x00E\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This3 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cvector\x00\x00\x00\x01\x00\x00\x00\x04\x00@\x00\x00\xe7\x80\x00\x00[@\x00\x00\xdf\x00\t@\x00\x00\x1a\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0cvector\x00\x01\xc8\xb0?\x00\x00\x00\x06@\x00\x00\xb3\x80\x00\x00\x9b@\x00\x00\xab\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x92\x80\x00A\x08@\x00\x00\x8a\x00\x03@\x00\x00|\x00\x04@\x00\x00E\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This3 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cvector\x00\x00\x00\x01\x00\x00\x00\x01\x00@\x00\x00\xea\x80\x00\x00[@\x00\x00\xe2\x00\t@\x00\x00\x1b\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\rvector\x00\x05Z"G\x00\x00\x00\x06@\x00\x00\xb5\x80\x00\x00\x9b@\x00\x00\xad\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x94\x80\x00A\x08@\x00\x00\x8c\x00\x03@\x00\x00~\x00\x04@\x00\x00F\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This4 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\rvector\x00\x00\x00\x01\x00\x00\x00\x02\x00@\x00\x00\xeb\x80\x00\x00[@\x00\x00\xe3\x00\t@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0evector\x00\x10\x0er\xbc\x00\x00\x00\x06@\x00\x00\xb5\x80\x00\x00\x9b@\x00\x00\xad\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x94\x80\x00A\x08@\x00\x00\x8c\x00\x03@\x00\x00~\x00\x04@\x00\x00E\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This3 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0evector\x00\x00\x00\x01\x00\x00\x00=\x00@\x00\x11\xe6\x80\x00\x00[@\x00\x11\xde\x00\t@\x00\x00\x13\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x05TTree\x00rd\xe0\x7f\x00\x00\x00\x14@\x00\x11\xb9\x80\x00\x00\x9b@\x00\x11\xb1\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00!\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttLine\x0fLine attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x07EI\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00k\x80\x00\x00\xc6@\x00\x00c\x00\x03@\x00\x00Y\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttFill\x14Fill area attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xd9*\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00j\x80\x00\x00\xc6@\x00\x00b\x00\x03@\x00\x00X\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nTAttMarker\x11Marker attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00)\x1d\x8b\xec\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00h\x80\x00\x02\x08@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fEntries\x11Number of entries\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x90\x80\x00\x02\x08@\x00\x00\x88\x00\x02@\x00\x00\x82\x00\x04@\x00\x00O\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfTotBytes8Total number of bytes in all branches before compression\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x8f\x80\x00\x02\x08@\x00\x00\x87\x00\x02@\x00\x00\x81\x00\x04@\x00\x00N\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfZipBytes7Total number of bytes in all branches after compression\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfSavedBytes\x19Number of autosaved bytes\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00x\x80\x00\x02\x08@\x00\x00p\x00\x02@\x00\x00j\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfFlushedBytes\x1cNumber of auto-flushed bytes\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00v\x80\x00\x02\x08@\x00\x00n\x00\x02@\x00\x00h\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fWeight"Tree weight (see TTree::SetWeight)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00v\x80\x00\x02\x08@\x00\x00n\x00\x02@\x00\x00h\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0efTimerInterval\x1eTimer interval in milliseconds\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfScanField\'Number of runs before prompting in Scan\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00o\x80\x00\x02\x08@\x00\x00g\x00\x02@\x00\x00a\x00\x04@\x00\x003\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fUpdate\x1eUpdate frequency for EntryLoop\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x9a\x80\x00\x02\x08@\x00\x00\x92\x00\x02@\x00\x00\x8c\x00\x04@\x00\x00^\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x16fDefaultEntryOffsetLen:Initial Length of fEntryOffset table in the basket buffers\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x9d\x80\x00\x02\x08@\x00\x00\x95\x00\x02@\x00\x00\x8f\x00\x04@\x00\x00a\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0efNClusterRangeENumber of Cluster range in addition to the one defined by \'AutoFlush\'\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x8f\x80\x00\x02\x08@\x00\x00\x87\x00\x02@\x00\x00\x81\x00\x04@\x00\x00N\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfMaxEntries5Maximum number of entries in case of circular buffers\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x80\x80\x00\x02\x08@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfMaxEntryLoop$Maximum number of entries to process\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x8a\x80\x00\x02\x08@\x00\x00\x82\x00\x02@\x00\x00|\x00\x04@\x00\x00I\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0ffMaxVirtualSize,Maximum total size of buffers kept in memory\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\xae\x80\x00\x02\x08@\x00\x00\xa6\x00\x02@\x00\x00\xa0\x00\x04@\x00\x00m\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfAutoSaveVAutosave tree when fAutoSave entries written or -fAutoSave (compressed) bytes produced\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\xb3\x80\x00\x02\x08@\x00\x00\xab\x00\x02@\x00\x00\xa5\x00\x04@\x00\x00r\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfAutoFlushZAuto-flush tree when fAutoFlush entries written or -fAutoFlush (compressed) bytes produced\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x86\x80\x00\x02\x08@\x00\x00~\x00\x02@\x00\x00x\x00\x04@\x00\x00E\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfEstimate.Number of entries to estimate histogram limits\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\xa8\x80\x00\x17\xd0@\x00\x00\xa0\x00\x02@\x00\x00\x81\x00\x04@\x00\x00M\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x10fClusterRangeEnd/[fNClusterRange] Last entry of a cluster range.\x00\x00\x008\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tLong64_t*\x00\x00\x00\x14\x0efNClusterRange\x05TTree@\x00\x00\xba\x80\x00\x17\xd0@\x00\x00\xb2\x00\x02@\x00\x00\x93\x00\x04@\x00\x00_\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfClusterSizeE[fNClusterRange] Number of entries in each cluster for a given range.\x00\x00\x008\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tLong64_t*\x00\x00\x00\x14\x0efNClusterRange\x05TTree@\x00\x00\xa0\x80\x00\x08\xbf@\x00\x00\x98\x00\x02@\x00\x00\x92\x00\x04@\x00\x00V\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfIOFeatures=IO features to define for newly-written baskets and branches.\x00\x00\x00>\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11ROOT::TIOFeatures@\x00\x00i\x80\x00\x03\xc7@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBranches\x10List of Branches\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00\x82\x80\x00\x03\xc7@\x00\x00z\x00\x02@\x00\x00t\x00\x04@\x00\x00@\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fLeaves+Direct pointers to individual branch leaves\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00\x90\x80\x00\x16\xcb@\x00\x00\x88\x00\x02@\x00\x00\x82\x00\x04@\x00\x00Q\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fAliases;List of aliases for expressions based on the tree branches.\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00m\x80\x00\x08\xbf@\x00\x00e\x00\x02@\x00\x00_\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfIndexValues\x13Sorted index values\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00j\x80\x00\x08\xbf@\x00\x00b\x00\x02@\x00\x00\\\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fIndex\x16Index of sorted values\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayI@\x00\x00\x81\x80\x00\x16\xcb@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfTreeIndex"Pointer to the tree Index (if any)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0eTVirtualIndex*@\x00\x00w\x80\x00\x16\xcb@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fFriends"pointer to list of friend elements\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x8f\x80\x00\x16\xcb@\x00\x00\x87\x00\x02@\x00\x00\x81\x00\x04@\x00\x00P\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfUserInfo9pointer to a list of user objects associated to this Tree\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x84\x80\x00\x16\xcb@\x00\x00|\x00\x02@\x00\x00v\x00\x04@\x00\x00@\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfBranchRef(Branch supporting the TRefTable (if any)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0bTBranchRef*\x00@\x00\x00\xb5\x80\x00\x00[@\x00\x00\xad\x00\t@\x00\x00\x1f\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x11ROOT::TIOFeatures\x00\x1a\xa1/\x10\x00\x00\x00\x01@\x00\x00|\x80\x00\x00\x9b@\x00\x00t\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00[\x80\x00\x02\x08@\x00\x00S\x00\x02@\x00\x00M\x00\x04@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fIOBits\x00\x00\x00\x00\x0b\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\runsigned char\x00@\x00\x0b\xb9\x80\x00\x00[@\x00\x0b\xb1\x00\t@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x07TBranch\x00\x10\x97\x8a\xac\x00\x00\x00\r@\x00\x0b\x8a\x80\x00\x00\x9b@\x00\x0b\x82\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x16\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00k\x80\x00\x00\xc6@\x00\x00c\x00\x03@\x00\x00Y\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttFill\x14Fill area attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xd9*\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00r\x80\x00\x02\x08@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x006\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfCompress\x1fCompression level and algorithm\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBasketSize\x1eInitial Size of Basket Buffer\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x93\x80\x00\x02\x08@\x00\x00\x8b\x00\x02@\x00\x00\x85\x00\x04@\x00\x00W\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0ffEntryOffsetLen:Initial Length of fEntryOffset table in the basket buffers\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00p\x80\x00\x02\x08@\x00\x00h\x00\x02@\x00\x00b\x00\x04@\x00\x004\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfWriteBasket\x1aLast basket number written\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x90\x80\x00\x02\x08@\x00\x00\x88\x00\x02@\x00\x00\x82\x00\x04@\x00\x00O\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfEntryNumber5Current entry number (last one filled in this branch)\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x89\x80\x00\x08\xbf@\x00\x00\x81\x00\x02@\x00\x00{\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfIOFeatures&IO features for newly-created baskets.\x00\x00\x00>\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11ROOT::TIOFeatures@\x00\x00f\x80\x00\x02\x08@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fOffset\x15Offset of this branch\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00u\x80\x00\x02\x08@\x00\x00m\x00\x02@\x00\x00g\x00\x04@\x00\x009\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfMaxBaskets Maximum number of Baskets so far\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00g\x80\x00\x02\x08@\x00\x00_\x00\x02@\x00\x00Y\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfSplitLevel\x12Branch split level\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00h\x80\x00\x02\x08@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fEntries\x11Number of entries\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x82\x80\x00\x02\x08@\x00\x00z\x00\x02@\x00\x00t\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfFirstEntry(Number of the first entry in this branch\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x8e\x80\x00\x02\x08@\x00\x00\x86\x00\x02@\x00\x00\x80\x00\x04@\x00\x00M\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfTotBytes6Total number of bytes in all leaves before compression\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x8d\x80\x00\x02\x08@\x00\x00\x85\x00\x02@\x00\x00\x7f\x00\x04@\x00\x00L\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfZipBytes5Total number of bytes in all leaves after compression\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00{\x80\x00\x03\xc7@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x009\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBranches"-> List of Branches of this branch\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00w\x80\x00\x03\xc7@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x005\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fLeaves -> List of leaves of this branch\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00y\x80\x00\x03\xc7@\x00\x00q\x00\x02@\x00\x00k\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fBaskets!-> List of baskets of this branch\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00\x96\x80\x00\x17\xd0@\x00\x00\x8e\x00\x02@\x00\x00p\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfBasketBytes\'[fMaxBaskets] Length of baskets on file\x00\x00\x00+\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04int*\x00\x00\x00\r\x0bfMaxBaskets\x07TBranch@\x00\x00\xa5\x80\x00\x17\xd0@\x00\x00\x9d\x00\x02@\x00\x00\x7f\x00\x04@\x00\x00K\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfBasketEntry1[fMaxBaskets] Table of first entry in each basket\x00\x00\x008\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tLong64_t*\x00\x00\x00\r\x0bfMaxBaskets\x07TBranch@\x00\x00\x9d\x80\x00\x17\xd0@\x00\x00\x95\x00\x02@\x00\x00w\x00\x04@\x00\x00C\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBasketSeek*[fMaxBaskets] Addresses of baskets on file\x00\x00\x008\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tLong64_t*\x00\x00\x00\r\x0bfMaxBaskets\x07TBranch@\x00\x00\xa0\x80\x00\x01?@\x00\x00\x98\x00\x02@\x00\x00\x92\x00\x04@\x00\x00`\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfFileNameIName of file where buffers are stored ("" if in same file as Tree header)\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString\x00@\x00\x01\xc6\x80\x00\x00[@\x00\x01\xbe\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafI\x00~j\xae\x19\x00\x00\x00\x01@\x00\x01\x98\x80\x00\x00\x9b@\x00\x01\x90\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00z\x80\x00\x02\x08@\x00\x00r\x00\x02@\x00\x00l\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00z\x80\x00\x02\x08@\x00\x00r\x00\x02@\x00\x00l\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x03\xe8\x80\x00\x00[@\x00\x03\xe0\x00\t@\x00\x00\x13\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x05TLeaf\x00m\x1e\x81R\x00\x00\x00\x02@\x00\x03\xbb\x80\x00\x00\x9b@\x00\x03\xb3\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00\x81\x80\x00\x02\x08@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00E\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04fLen3Number of fixed length elements in the leaf\'s data.\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00t\x80\x00\x02\x08@\x00\x00l\x00\x02@\x00\x00f\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fLenType"Number of bytes for this data type\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00v\x80\x00\x02\x08@\x00\x00n\x00\x02@\x00\x00h\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fOffset%Offset in ClonesArray object (if one)\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x81\x80\x00\x02\x08@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fIsRange.(=kTRUE if leaf has a range, kFALSE otherwise)\x00\x00\x00\x12\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04bool@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfIsUnsigned&(=kTRUE if unsigned, kFALSE otherwise)\x00\x00\x00\x12\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04bool@\x00\x00\x9b\x80\x00\x16\xcb@\x00\x00\x93\x00\x02@\x00\x00\x8d\x00\x04@\x00\x00\\\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLeafCountDPointer to Leaf count if variable length (we do not own the counter)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TLeaf*\x00@\x00\x01\xcc\x80\x00\x00[@\x00\x01\xc4\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafD\x00\x11\x8e\x87v\x00\x00\x00\x01@\x00\x01\x9e\x80\x00\x00\x9b@\x00\x01\x96\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00}\x80\x00\x02\x08@\x00\x00u\x00\x02@\x00\x00o\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00}\x80\x00\x02\x08@\x00\x00u\x00\x02@\x00\x00o\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x01\xca\x80\x00\x00[@\x00\x01\xc2\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafF\x00:\xdd\x9dr\x00\x00\x00\x01@\x00\x01\x9c\x80\x00\x00\x9b@\x00\x01\x94\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float\x00@\x00\x01\xd0\x80\x00\x00[@\x00\x01\xc8\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafL\x00\xde2\x08b\x00\x00\x00\x01@\x00\x01\xa2\x80\x00\x00\x9b@\x00\x01\x9a\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00\x7f\x80\x00\x02\x08@\x00\x00w\x00\x02@\x00\x00q\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x7f\x80\x00\x02\x08@\x00\x00w\x00\x02@\x00\x00q\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t\x00@\x00\x01\xc8\x80\x00\x00[@\x00\x01\xc0\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafB\x00\x0f\x1eK^\x00\x00\x00\x01@\x00\x01\x9a\x80\x00\x00\x9b@\x00\x01\x92\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04char@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04char\x00@\x00\x01\xca\x80\x00\x00[@\x00\x01\xc2\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafS\x00\x15\x0c\xee\xcf\x00\x00\x00\x01@\x00\x01\x9c\x80\x00\x00\x9b@\x00\x01\x94\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short\x00@\x00\x01\xc8\x80\x00\x00[@\x00\x01\xc0\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafO\x00\x02\xaeH\xd3\x00\x00\x00\x01@\x00\x01\x9a\x80\x00\x00\x9b@\x00\x01\x92\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x12\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04bool@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x12\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04bool\x00@\x00\x05\xf1\x80\x00\x00[@\x00\x05\xe9\x00\t@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0eTBranchElement\x00\xe7O^c\x00\x00\x00\n@\x00\x05\xbb\x80\x00\x00\x9b@\x00\x05\xb3\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TBranch\x11Branch descriptor\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x97\x8a\xac\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\r@\x00\x00w\x80\x00\x01?@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfClassName\x1fClass name of referenced object\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00m\x80\x00\x01?@\x00\x00e\x00\x02@\x00\x00_\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfParentName\x14Name of parent class\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00\x7f\x80\x00\x01?@\x00\x00w\x00\x02@\x00\x00q\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfClonesName&Name of class in TClonesArray (if any)\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00m\x80\x00\x02\x08@\x00\x00e\x00\x02@\x00\x00_\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfCheckSum\x11CheckSum of class\x00\x00\x00\r\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cunsigned int@\x00\x00p\x80\x00\x02\x08@\x00\x00h\x00\x02@\x00\x00b\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfClassVersion\x17Version number of class\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03fID\x1eelement serial number in fInfo\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00Z\x80\x00\x02\x08@\x00\x00R\x00\x02@\x00\x00L\x00\x04@\x00\x00\x1e\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fType\x0bbranch type\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfStreamerType\x14branch streamer type\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x86\x80\x00\x02\x08@\x00\x00~\x00\x02@\x00\x00x\x00\x04@\x00\x00J\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum4Maximum entries for a TClonesArray or variable array\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x87\x80\x00\x16\xcb@\x00\x00\x7f\x00\x02@\x00\x00y\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfBranchCount%pointer to primary branchcount branch\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0fTBranchElement*@\x00\x00\x8a\x80\x00\x16\xcb@\x00\x00\x82\x00\x02@\x00\x00|\x00\x04@\x00\x00B\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfBranchCount2\'pointer to secondary branchcount branch\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0fTBranchElement*\x00@\x00\x01\x9b\x80\x00\x00[@\x00\x01\x93\x00\t@\x00\x00\x1a\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0cTLeafElement\x00\xa0O\x88\x93\x00\x00\x00\x01@\x00\x01g\x80\x00\x00\x9b@\x00\x01_\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03fID\x1eelement serial number in fInfo\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00X\x80\x00\x02\x08@\x00\x00P\x00\x02@\x00\x00J\x00\x04@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fType\tleaf type\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x01\xc6\x80\x00\x00[@\x00\x01\xbe\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafC\x00\xfb\xe3\xb2\xf3\x00\x00\x00\x01@\x00\x01\x98\x80\x00\x00\x9b@\x00\x01\x90\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00z\x80\x00\x02\x08@\x00\x00r\x00\x02@\x00\x00l\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00z\x80\x00\x02\x08@\x00\x00r\x00\x02@\x00\x00l\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x010\x80\x00\x00[@\x00\x01(\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTBranchRef\x00#`\xb3\xfd\x00\x00\x00\x01@\x00\x00\xfe\x80\x00\x00\x9b@\x00\x00\xf6\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TBranch\x11Branch descriptor\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x97\x8a\xac\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\r@\x00\x00r\x80\x00\x16\xcb@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfRefTable\x18pointer to the TRefTable\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nTRefTable*\x00@\x00\x02\xdc\x80\x00\x00[@\x00\x02\xd4\x00\t@\x00\x00\x17\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\tTRefTable\x00\x8c\x89[\x85\x00\x00\x00\x03@\x00\x02\xab\x80\x00\x00\x9b@\x00\x02\xa3\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00o\x80\x00\x02\x08@\x00\x00g\x00\x02@\x00\x00a\x00\x04@\x00\x003\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fSize dummy for backward compatibility\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\xa2\x80\x00\x16\xcb@\x00\x00\x9a\x00\x02@\x00\x00\x94\x00\x04@\x00\x00_\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fParentsIarray of Parent objects (eg TTree branch) holding the referenced objects\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nTObjArray*@\x00\x00q\x80\x00\x16\xcb@\x00\x00i\x00\x02@\x00\x00c\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fOwner\x1cObject owning this TRefTable\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08TObject*@\x00\x00\x91\x80\x00A\x08@\x00\x00\x89\x00\x03@\x00\x00{\x00\x04@\x00\x00B\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfProcessGUIDs\'UUIDs of TProcessIDs used in fParentIDs\x00\x00\x01\xf4\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0evector\x00\x00\x00\x01\x00\x00\x00=\x00@\x00\x01\xb8\x80\x00\x00[@\x00\x01\xb0\x00\t@\x00\x00\x17\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\tTObjArray\x00\xa9\x9eeR\x00\x00\x00\x03@\x00\x01\x87\x80\x00\x00\x9b@\x00\x01\x7f\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00x\x80\x00\x00\xc6@\x00\x00p\x00\x03@\x00\x00f\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0eTSeqCollection\x1bSequenceable collection ABC\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfcl;\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x00@\x00\x00m\x80\x00\x02\x08@\x00\x00e\x00\x02@\x00\x00_\x00\x04@\x00\x001\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfLowerBound\x18Lower bound of the array\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00y\x80\x00\x02\x08@\x00\x00q\x00\x02@\x00\x00k\x00\x04@\x00\x00=\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fLast*Last element in array containing an object\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x05\x90\x80\x00\x00[@\x00\x05\x88\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TGraph\x00\x05\xf7\xf4e\x00\x00\x00\x04@\x00\x05b\x80\x00\x00\x9b@\x00\x05Z\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttLine\x0fLine attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x07EI\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00k\x80\x00\x00\xc6@\x00\x00c\x00\x03@\x00\x00Y\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttFill\x14Fill area attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xd9*\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00j\x80\x00\x00\xc6@\x00\x00b\x00\x03@\x00\x00X\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nTAttMarker\x11Marker attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00)\x1d\x8b\xec\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00n\x80\x00\x02\x08@\x00\x00f\x00\x02@\x00\x00`\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fNpoints\x1cNumber of points <= fMaxSize\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x80\x80\x00\x17\xd0@\x00\x00x\x00\x02@\x00\x00^\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fX\x1c[fNpoints] array of X points\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph@\x00\x00\x80\x80\x00\x17\xd0@\x00\x00x\x00\x02@\x00\x00^\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fY\x1c[fNpoints] array of Y points\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph@\x00\x00\x83\x80\x00\x16\xcb@\x00\x00{\x00\x02@\x00\x00u\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFunctions,Pointer to list of functions (fits and user)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x80\x80\x00\x16\xcb@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00B\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfHistogram*Pointer to histogram used for drawing axis\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TH1F*@\x00\x00w\x80\x00\x02\x08@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum"Minimum value for plotting along y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00w\x80\x00\x02\x08@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum"Maximum value for plotting along y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH1F\x00\xe2\x93\x96D\x00\x00\x00\x03@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayF\x0fArray of floats\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Z\x0b\xf6\xf1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01:\x80\x00\x00[@\x00\x012\x00\t@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x07TArrayF\x00Z\x0b\xf6\xf1\x00\x00\x00\x01@\x00\x01\x0b\x80\x00\x00\x9b@\x00\x01\x03\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00n\x80\x00\x00\xc6@\x00\x00f\x00\x03@\x00\x00\\\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TArray\x19Abstract array base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00p!\xb2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00x\x80\x00\x17\xd0@\x00\x00p\x00\x02@\x00\x00\\\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fArray\x17[fN] Array of fN floats\x00\x00\x00-\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06float*\x00\x00\x00\x01\x02fN\x06TArray\x00@\x00\x03F\x80\x00\x00[@\x00\x03>\x00\t@\x00\x00\x19\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0bTMultiGraph\x00\xe0\x89<\xd5\x00\x00\x00\x02@\x00\x03\x13\x80\x00\x00\x9b@\x00\x03\x0b\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00n\x80\x00\x16\xcb@\x00\x00f\x00\x02@\x00\x00`\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fGraphs\x1aPointer to list of TGraphs\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x83\x80\x00\x16\xcb@\x00\x00{\x00\x02@\x00\x00u\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFunctions,Pointer to list of functions (fits and user)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x80\x80\x00\x16\xcb@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00B\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfHistogram*Pointer to histogram used for drawing axis\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TH1F*@\x00\x00w\x80\x00\x02\x08@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum"Maximum value for plotting along y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00w\x80\x00\x02\x08@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum"Minimum value for plotting along y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x01!\x80\x00\x00[@\x00\x01\x19\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH1C\x006\xf6\xe4\xad\x00\x00\x00\x03@\x00\x00\xf5\x80\x00\x00\x9b@\x00\x00\xed\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00d\x80\x00\x00\xc6@\x00\x00\\\x00\x03@\x00\x00R\x00\x04@\x00\x00#\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayC\x0eArray of chars\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xae\x87\x996\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01 \x80\x00\x00[@\x00\x01\x18\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH1I\x00bud\xf6\x00\x00\x00\x03@\x00\x00\xf4\x80\x00\x00\x9b@\x00\x00\xec\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00c\x80\x00\x00\xc6@\x00\x00[\x00\x03@\x00\x00Q\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayI\rArray of ints\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd9\xd5q\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH1S\x00\x8cM\x9d\xcb\x00\x00\x00\x03@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayS\x0fArray of shorts\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\\\x93\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01!\x80\x00\x00[@\x00\x01\x19\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH2C\x00\xbd\x00\x10\xfe\x00\x00\x00\x04@\x00\x00\xf5\x80\x00\x00\x9b@\x00\x00\xed\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH2\x1a2-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x824\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05@\x00\x00d\x80\x00\x00\xc6@\x00\x00\\\x00\x03@\x00\x00R\x00\x04@\x00\x00#\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayC\x0eArray of chars\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xae\x87\x996\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH2S\x00\x12V\xca\x1c\x00\x00\x00\x04@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH2\x1a2-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x824\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayS\x0fArray of shorts\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\\\x93\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01 \x80\x00\x00[@\x00\x01\x18\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH2I\x00\xe8~\x91G\x00\x00\x00\x04@\x00\x00\xf4\x80\x00\x00\x9b@\x00\x00\xec\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH2\x1a2-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x824\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05@\x00\x00c\x80\x00\x00\xc6@\x00\x00[\x00\x03@\x00\x00Q\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayI\rArray of ints\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd9\xd5q\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH2F\x00h\x9c\xc2\x95\x00\x00\x00\x04@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH2\x1a2-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x824\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayF\x0fArray of floats\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Z\x0b\xf6\xf1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01!\x80\x00\x00[@\x00\x01\x19\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH3C\x00\xa1\xff\x8d\x94\x00\x00\x00\x04@\x00\x00\xf5\x80\x00\x00\x9b@\x00\x00\xed\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH3\x1a3-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\xd2D_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x06@\x00\x00d\x80\x00\x00\xc6@\x00\x00\\\x00\x03@\x00\x00R\x00\x04@\x00\x00#\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayC\x0eArray of chars\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xae\x87\x996\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH3S\x00\xf7VF\xb2\x00\x00\x00\x04@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH3\x1a3-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\xd2D_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x06@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayS\x0fArray of shorts\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\\\x93\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01 \x80\x00\x00[@\x00\x01\x18\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH3I\x00\xcd~\r\xdd\x00\x00\x00\x04@\x00\x00\xf4\x80\x00\x00\x9b@\x00\x00\xec\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH3\x1a3-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\xd2D_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x06@\x00\x00c\x80\x00\x00\xc6@\x00\x00[\x00\x03@\x00\x00Q\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayI\rArray of ints\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd9\xd5q\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH3F\x00M\x9c?+\x00\x00\x00\x04@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH3\x1a3-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\xd2D_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x06@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayF\x0fArray of floats\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Z\x0b\xf6\xf1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x03a\xff\xff\xff\xffTList\x00@\x00\x03S\x00\x05\x00\x01\x00\x00\x00\x00\x02\x00@\x00\x0blistOfRules\x00\x00\x00\x05@\x00\x00\xa3\xff\xff\xff\xffTObjString\x00@\x00\x00\x90\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x83type=read sourceClass="TProfile" targetClass="TProfile" version="[1-5]" source="" target="fBinSumw2" code="{ fBinSumw2.Reset(); }" \x00@\x00\x00\x9c\x80\x00\x9d\x87@\x00\x00\x94\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x87type=read sourceClass="TProfile2D" targetClass="TProfile2D" version="[1-6]" source="" target="fBinSumw2" code="{ fBinSumw2.Reset(); }" \x00@\x00\x00\x9c\x80\x00\x9d\x87@\x00\x00\x94\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x87type=read sourceClass="TProfile3D" targetClass="TProfile3D" version="[1-6]" source="" target="fBinSumw2" code="{ fBinSumw2.Reset(); }" \x00@\x00\x00\xab\x80\x00\x9d\x87@\x00\x00\xa3\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x96type=read sourceClass="TTree" targetClass="TTree" version="[-16]" source="" target="fDefaultEntryOffsetLen" code="{ fDefaultEntryOffsetLen = 1000; }" \x00@\x00\x00\x98\x80\x00\x9d\x87@\x00\x00\x90\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x83type=read sourceClass="TTree" targetClass="TTree" version="[-18]" source="" target="fNClusterRange" code="{ fNClusterRange = 0; }" \x00\x00' \ No newline at end of file +streamers = b'@\x00\xa0u\x00\x05\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00C@\x00\x01X\xff\xff\xff\xffTStreamerInfo\x00@\x00\x01B\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTObjString\x00\x9c\x8eH\x00\x00\x00\x00\x01@\x00\x01\x18\xff\xff\xff\xffTObjArray\x00@\x00\x01\x06\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00u\xff\xff\xff\xffTStreamerBase\x00@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00t\xff\xff\xff\xffTStreamerString\x00@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fString\x0fwrapped TString\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString\x00@\x00\x01H\x80\x00\x00[@\x00\x01@\x00\t@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x07TObject\x00\x90\x1b\xc0-\x00\x00\x00\x01@\x00\x01\x19\x80\x00\x00\x9b@\x00\x01\x11\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00\x87\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00l\x00\x02@\x00\x00f\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfUniqueID\x18object unique identifier\x00\x00\x00\r\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cunsigned int@\x00\x00m\x80\x00\x02\x08@\x00\x00e\x00\x02@\x00\x00_\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fBits\x15bit field status word\x00\x00\x00\x0f\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cunsigned int\x00@\x00\x01\xb3\x80\x00\x00[@\x00\x01\xab\x00\t@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0eTLorentzVector\x00\xe3\xde\xc1\xa1\x00\x00\x00\x04@\x00\x01}\x80\x00\x00\x9b@\x00\x01u\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00s\xff\xff\xff\xffTStreamerObject\x00@\x00\x00[\x00\x02@\x00\x00U\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fP\x123 vector component\x00\x00\x00=\x00\x00\x00(\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08TVector3@\x00\x00z\x80\x00\x02\x08@\x00\x00r\x00\x02@\x00\x00l\x00\x04@\x00\x00;\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fE+time or energy of (x,y,z,t) or (px,py,pz,e)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x01\xb1\x80\x00\x00[@\x00\x01\xa9\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TVector3\x00\xab\xb6\xbe\x1e\x00\x00\x00\x03@\x00\x01\x81\x80\x00\x00\x9b@\x00\x01y\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00O\x80\x00\x02\x08@\x00\x00G\x00\x02@\x00\x00A\x00\x04@\x00\x00\x10\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fX\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00O\x80\x00\x02\x08@\x00\x00G\x00\x02@\x00\x00A\x00\x04@\x00\x00\x10\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fY\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00O\x80\x00\x02\x08@\x00\x00G\x00\x02@\x00\x00A\x00\x04@\x00\x00\x10\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fZ\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x01v\x80\x00\x00[@\x00\x01n\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TVector2\x00\x00\x89\xb7\xf4\x00\x00\x00\x03@\x00\x01F\x80\x00\x00\x9b@\x00\x01>\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00g\x80\x00\x02\x08@\x00\x00_\x00\x02@\x00\x00Y\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fX\x18components of the vector\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00O\x80\x00\x02\x08@\x00\x00G\x00\x02@\x00\x00A\x00\x04@\x00\x00\x10\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fY\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x04\r\x80\x00\x00[@\x00\x04\x05\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TProfile\x00K\xed\xeeT\x00\x00\x00\x07@\x00\x03\xdd\x80\x00\x00\x9b@\x00\x03\xd5\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00@\x00\x00|\x80\x00\x00\xc6@\x00\x00t\x00\x03@\x00\x00j\x00\x04@\x00\x00;\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04TH1D)1-Dim histograms (one double per channel)\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf9\xb1V\x9f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x03@\x00\x00\x85\xff\xff\xff\xffTStreamerObjectAny\x00@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBinEntries\x19number of entries per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfErrorMode\x18Option to compute errors\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nEErrorType@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fYmin\x19Lower limit in Y (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fYmax\x19Upper limit in Y (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwy\x15Total Sum of weight*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwy2\x17Total Sum of weight*Y*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00\x81\x80\x00\x08\xbf@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBinSumw2*Array of sum of squares of weights per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD\x00@\x00\x01#\x80\x00\x00[@\x00\x01\x1b\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH1D\x00\xf9\xb1V\x9f\x00\x00\x00\x03@\x00\x00\xf7\x80\x00\x00\x9b@\x00\x00\xef\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayD\x10Array of doubles\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00q9\xef4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x0cl\x80\x00\x00[@\x00\x0cd\x00\t@\x00\x00\x11\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x03TH1\x00\x1c7@\xc4\x00\x00\x00\x08@\x00\x0cA\x80\x00\x00\x9b@\x00\x0c9\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttLine\x0fLine attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x07EI\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00k\x80\x00\x00\xc6@\x00\x00c\x00\x03@\x00\x00Y\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttFill\x14Fill area attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xd9*\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00j\x80\x00\x00\xc6@\x00\x00b\x00\x03@\x00\x00X\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nTAttMarker\x11Marker attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00)\x1d\x8b\xec\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00@\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fNcells+number of bins(1D), cells (2D) +U/Overflows\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00c\x80\x00\x03\xc7@\x00\x00[\x00\x02@\x00\x00U\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fXaxis\x11X axis descriptor\x00\x00\x00=\x00\x00\x00\xd8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TAxis@\x00\x00c\x80\x00\x03\xc7@\x00\x00[\x00\x02@\x00\x00U\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fYaxis\x11Y axis descriptor\x00\x00\x00=\x00\x00\x00\xd8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TAxis@\x00\x00c\x80\x00\x03\xc7@\x00\x00[\x00\x02@\x00\x00U\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fZaxis\x11Z axis descriptor\x00\x00\x00=\x00\x00\x00\xd8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TAxis@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00=\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfBarOffset%(1000*offset) for bar charts or legos\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00y\x80\x00\x02\x08@\x00\x00q\x00\x02@\x00\x00k\x00\x04@\x00\x00;\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBarWidth$(1000*width) for bar charts or legos\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00f\x80\x00\x02\x08@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fEntries\x11Number of entries\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00g\x80\x00\x02\x08@\x00\x00_\x00\x02@\x00\x00Y\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fTsumw\x14Total Sum of weights\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x004\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumw2\x1fTotal Sum of squares of weights\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwx\x15Total Sum of weight*X\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwx2\x17Total Sum of weight*X*X\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00o\x80\x00\x02\x08@\x00\x00g\x00\x02@\x00\x00a\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum\x1aMaximum value for plotting\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00o\x80\x00\x02\x08@\x00\x00g\x00\x02@\x00\x00a\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum\x1aMinimum value for plotting\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfNormFactor\x14Normalization factor\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00u\x80\x00\x08\xbf@\x00\x00m\x00\x02@\x00\x00g\x00\x04@\x00\x005\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fContour\x1fArray to display contour levels\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00v\x80\x00\x08\xbf@\x00\x00n\x00\x02@\x00\x00h\x00\x04@\x00\x006\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fSumw2"Array of sum of squares of weights\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00f\x80\x00\x01?@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fOption\x11histogram options\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00\x9c\xff\xff\xff\xffTStreamerObjectPointer\x00@\x00\x00}\x00\x02@\x00\x00w\x00\x04@\x00\x00F\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFunctions.->Pointer to list of functions (fits and user)\x00\x00\x00?\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00a\x80\x00\x02\x08@\x00\x00Y\x00\x02@\x00\x00S\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBufferSize\x0cfBuffer size\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x99\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00{\x00\x02@\x00\x00a\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fBuffer\x1a[fBufferSize] entry buffer\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x08\x0bfBufferSize\x03TH1@\x00\x00\x87\x80\x00\x02\x08@\x00\x00\x7f\x00\x02@\x00\x00y\x00\x04@\x00\x00=\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0efBinStatErrOpt!option for bin statistical errors\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11TH1::EBinErrorOpt@\x00\x00\x9c\x80\x00\x02\x08@\x00\x00\x94\x00\x02@\x00\x00\x8e\x00\x04@\x00\x00P\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0efStatOverflows4per object flag to use under/overflows in statistics\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13TH1::EStatOverflows\x00@\x00\x01\x82\x80\x00\x00[@\x00\x01z\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TNamed\x00\xdf\xb7J<\x00\x00\x00\x01@\x00\x01T\x80\x00\x00\x9b@\x00\x01L\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00d\x80\x00\x01?@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fName\x11object identifier\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00`\x80\x00\x01?@\x00\x00X\x00\x02@\x00\x00R\x00\x04@\x00\x00 \x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fTitle\x0cobject title\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString\x00@\x00\x01y\x80\x00\x00[@\x00\x01q\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TAttLine\x00\x94\x07EI\x00\x00\x00\x02@\x00\x01I\x80\x00\x00\x9b@\x00\x01A\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00`\x80\x00\x02\x08@\x00\x00X\x00\x02@\x00\x00R\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLineColor\nLine color\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00`\x80\x00\x02\x08@\x00\x00X\x00\x02@\x00\x00R\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLineStyle\nLine style\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00`\x80\x00\x02\x08@\x00\x00X\x00\x02@\x00\x00R\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLineWidth\nLine width\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short\x00@\x00\x01\x1f\x80\x00\x00[@\x00\x01\x17\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TAttFill\x00\xff\xd9*\x92\x00\x00\x00\x02@\x00\x00\xef\x80\x00\x00\x9b@\x00\x00\xe7\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00e\x80\x00\x02\x08@\x00\x00]\x00\x02@\x00\x00W\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFillColor\x0fFill area color\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00e\x80\x00\x02\x08@\x00\x00]\x00\x02@\x00\x00W\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFillStyle\x0fFill area style\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short\x00@\x00\x01\x85\x80\x00\x00[@\x00\x01}\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTAttMarker\x00)\x1d\x8b\xec\x00\x00\x00\x02@\x00\x01S\x80\x00\x00\x9b@\x00\x01K\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00d\x80\x00\x02\x08@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfMarkerColor\x0cMarker color\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00d\x80\x00\x02\x08@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfMarkerStyle\x0cMarker style\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00b\x80\x00\x02\x08@\x00\x00Z\x00\x02@\x00\x00T\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfMarkerSize\x0bMarker size\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float\x00@\x00\x00\xb3\x80\x00\x00[@\x00\x00\xab\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TArray\x00\x00p!\xb2\x00\x00\x00\x01@\x00\x00\x85\x80\x00\x00\x9b@\x00\x00}\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00d\x80\x00\x02\x08@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fN\x18Number of array elements\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x06\t\x80\x00\x00[@\x00\x06\x01\x00\t@\x00\x00\x13\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x05TAxis\x00ZInp\x00\x00\x00\n@\x00\x05\xdc\x80\x00\x00\x9b@\x00\x05\xd4\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\r\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttAxis\x0fAxis attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\o\xff>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x04@\x00\x00^\x80\x00\x02\x08@\x00\x00V\x00\x02@\x00\x00P\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fNbins\x0eNumber of bins\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00g\x80\x00\x02\x08@\x00\x00_\x00\x02@\x00\x00Y\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fXmin\x15low edge of first bin\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00h\x80\x00\x02\x08@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fXmax\x16upper edge of last bin\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00h\x80\x00\x08\xbf@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fXbins\x14Bin edges array in X\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00d\x80\x00\x02\x08@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fFirst\x14first bin to display\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00b\x80\x00\x02\x08@\x00\x00Z\x00\x02@\x00\x00T\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fLast\x13last bin to display\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00q\x80\x00\x02\x08@\x00\x00i\x00\x02@\x00\x00c\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fBits2\x16second bit status word\x00\x00\x00\x0c\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0eunsigned short@\x00\x00\x88\x80\x00\x02\x08@\x00\x00\x80\x00\x02@\x00\x00z\x00\x04@\x00\x00K\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfTimeDisplay1on/off displaying time values instead of numerics\x00\x00\x00\x12\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04bool@\x00\x00\x80\x80\x00\x01?@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00@\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfTimeFormat\'Date&time format, ex: 09/12/99 12:34:00\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00f\x80\x00\x16\xcb@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00#\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fLabels\x0eList of labels\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nTHashList*@\x00\x00l\x80\x00\x16\xcb@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fModLabs\x17List of modified labels\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*\x00@\x00\x05\x0e\x80\x00\x00[@\x00\x05\x06\x00\t@\x00\x00\x16\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x08TAttAxis\x00\\o\xff>\x00\x00\x00\x04@\x00\x04\xde\x80\x00\x00\x9b@\x00\x04\xd6\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x00@\x00\x00\x80\x80\x00\x02\x08@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfNdivisions+Number of divisions(10000*n3 + 100*n2 + n1)\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00.\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfAxisColor\x16Color of the line axis\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00f\x80\x00\x02\x08@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfLabelColor\x0fColor of labels\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00e\x80\x00\x02\x08@\x00\x00]\x00\x02@\x00\x00W\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLabelFont\x0fFont for labels\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00h\x80\x00\x02\x08@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfLabelOffset\x10Offset of labels\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00d\x80\x00\x02\x08@\x00\x00\\\x00\x02@\x00\x00V\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLabelSize\x0eSize of labels\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfTickLength\x14Length of tick marks\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00.\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfTitleOffset\x14Offset of axis title\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00h\x80\x00\x02\x08@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfTitleSize\x12Size of axis title\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00j\x80\x00\x02\x08@\x00\x00b\x00\x02@\x00\x00\\\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfTitleColor\x13Color of axis title\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfTitleFont\x13Font for axis title\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short\x00@\x00\x00\xb8\x80\x00\x00[@\x00\x00\xb0\x00\t@\x00\x00\x17\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\tTHashList\x00\xcc~I\xc1\x00\x00\x00\x00@\x00\x00\x87\x80\x00\x00\x9b@\x00\x00\x7f\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TList\x12Doubly linked list\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00i\xc5\xc3\xbb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05\x00@\x00\x00\xc6\x80\x00\x00[@\x00\x00\xbe\x00\t@\x00\x00\x13\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x05TList\x00i\xc5\xc3\xbb\x00\x00\x00\x05@\x00\x00\x99\x80\x00\x00\x9b@\x00\x00\x91\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00x\x80\x00\x00\xc6@\x00\x00p\x00\x03@\x00\x00f\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0eTSeqCollection\x1bSequenceable collection ABC\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfcl;\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x00\x00@\x00\x00\xcf\x80\x00\x00[@\x00\x00\xc7\x00\t@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0eTSeqCollection\x00\xfcl;\xc6\x00\x00\x00\x00@\x00\x00\x99\x80\x00\x00\x9b@\x00\x00\x91\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00x\x80\x00\x00\xc6@\x00\x00p\x00\x03@\x00\x00f\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bTCollection\x1eCollection abstract base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00W\xe3\xcb\x9c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x03\x00@\x00\x01\x9b\x80\x00\x00[@\x00\x01\x93\x00\t@\x00\x00\x19\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0bTCollection\x00W\xe3\xcb\x9c\x00\x00\x00\x03@\x00\x01h\x80\x00\x00\x9b@\x00\x01`\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00i\x80\x00\x01?@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fName\x16name of the collection\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00o\x80\x00\x02\x08@\x00\x00g\x00\x02@\x00\x00a\x00\x04@\x00\x003\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fSize number of elements in collection\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x00L\x80\x00\x00[@\x00\x00D\x00\t@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x07TString\x00\x00\x01t\x19\x00\x00\x00\x02@\x00\x00\x1d\x80\x00\x00\x9b@\x00\x00\x15\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00@\x00\x03\xfc\x80\x00\x00[@\x00\x03\xf4\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTProfile2D\x006\xa1B\xac\x00\x00\x00\x08@\x00\x03\xca\x80\x00\x00\x9b@\x00\x03\xc2\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00@\x00\x00|\x80\x00\x00\xc6@\x00\x00t\x00\x03@\x00\x00j\x00\x04@\x00\x00;\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04TH2D)2-Dim histograms (one double per channel)\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xba\x82\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x04@\x00\x00r\x80\x00\x08\xbf@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBinEntries\x19number of entries per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfErrorMode\x18Option to compute errors\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nEErrorType@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fZmin\x19Lower limit in Z (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fZmax\x19Upper limit in Z (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwz\x15Total Sum of weight*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwz2\x17Total Sum of weight*Z*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00\x81\x80\x00\x08\xbf@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBinSumw2*Array of sum of squares of weights per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD\x00@\x00\x01#\x80\x00\x00[@\x00\x01\x1b\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH2D\x00\x7f\xba\x82\xf0\x00\x00\x00\x04@\x00\x00\xf7\x80\x00\x00\x9b@\x00\x00\xef\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH2\x1a2-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x824\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayD\x10Array of doubles\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00q9\xef4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x02n\x80\x00\x00[@\x00\x02f\x00\t@\x00\x00\x11\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x03TH2\x00\x01\x824\x7f\x00\x00\x00\x05@\x00\x02C\x80\x00\x00\x9b@\x00\x02;\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00e\x80\x00\x02\x08@\x00\x00]\x00\x02@\x00\x00W\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfScalefactor\x0cScale factor\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwy\x15Total Sum of weight*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwy2\x17Total Sum of weight*Y*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwxy\x17Total Sum of weight*X*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x03\xfc\x80\x00\x00[@\x00\x03\xf4\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTProfile3D\x00\xf6\x0ch\x14\x00\x00\x00\x08@\x00\x03\xca\x80\x00\x00\x9b@\x00\x03\xc2\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00@\x00\x00|\x80\x00\x00\xc6@\x00\x00t\x00\x03@\x00\x00j\x00\x04@\x00\x00;\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04TH3D)3-Dim histograms (one double per channel)\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00d\xb9\xff\x86\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x04@\x00\x00r\x80\x00\x08\xbf@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBinEntries\x19number of entries per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfErrorMode\x18Option to compute errors\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nEErrorType@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fTmin\x19Lower limit in T (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fTmax\x19Upper limit in T (if set)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwt\x15Total Sum of weight*T\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwt2\x17Total Sum of weight*T*T\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00\x81\x80\x00\x08\xbf@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBinSumw2*Array of sum of squares of weights per bin\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD\x00@\x00\x01#\x80\x00\x00[@\x00\x01\x1b\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH3D\x00d\xb9\xff\x86\x00\x00\x00\x04@\x00\x00\xf7\x80\x00\x00\x9b@\x00\x00\xef\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH3\x1a3-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\xd2D_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x06@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayD\x10Array of doubles\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00q9\xef4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x04(\x80\x00\x00[@\x00\x04 \x00\t@\x00\x00\x11\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x03TH3\x00B\xd2D_\x00\x00\x00\x06@\x00\x03\xfd\x80\x00\x00\x9b@\x00\x03\xf5\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\t\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00b\x80\x00\x00\xc6@\x00\x00Z\x00\x03@\x00\x00P\x00\x04@\x00\x00!\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TAtt3D\r3D attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00uz\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwy\x15Total Sum of weight*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwy2\x17Total Sum of weight*Y*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwxy\x17Total Sum of weight*X*Y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00i\x80\x00\x02\x08@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fTsumwz\x15Total Sum of weight*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwz2\x17Total Sum of weight*Z*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwxz\x17Total Sum of weight*X*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00l\x80\x00\x02\x08@\x00\x00d\x00\x02@\x00\x00^\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fTsumwyz\x17Total Sum of weight*Y*Z\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x00K\x80\x00\x00[@\x00\x00C\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TAtt3D\x00\x00\x00uz\x00\x00\x00\x01@\x00\x00\x1d\x80\x00\x00\x9b@\x00\x00\x15\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00@\x00\x00\xf1\x80\x00\x00[@\x00\x00\xe9\x00\t@\x00\x00\x19\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0bvector\x00\x00\x98;\x88\x00\x00\x00\x06@\x00\x00\xbe\x80\x00\x00\x9b@\x00\x00\xb6\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x9d\xff\xff\xff\xffTStreamerSTL\x00@\x00\x00\x88\x00\x03@\x00\x00z\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This2 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0bvector\x00\x00\x00\x01\x00\x00\x00\x03\x00@\x00\x00\xed\x80\x00\x00[@\x00\x00\xe5\x00\t@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0evector\x00\x10\x0eCd\x00\x00\x00\x06@\x00\x00\xb7\x80\x00\x00\x9b@\x00\x00\xaf\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x96\x80\x00A\x08@\x00\x00\x8e\x00\x03@\x00\x00\x80\x00\x04@\x00\x00G\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This5 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0evector\x00\x00\x00\x01\x00\x00\x00\x08\x00@\x00\x00\xea\x80\x00\x00[@\x00\x00\xe2\x00\t@\x00\x00\x1b\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\rvector\x00\x05Z\x16\x9b\x00\x00\x00\x06@\x00\x00\xb5\x80\x00\x00\x9b@\x00\x00\xad\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x94\x80\x00A\x08@\x00\x00\x8c\x00\x03@\x00\x00~\x00\x04@\x00\x00F\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This4 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\rvector\x00\x00\x00\x01\x00\x00\x00\x05\x00@\x00\x00\xe7\x80\x00\x00[@\x00\x00\xdf\x00\t@\x00\x00\x1a\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0cvector\x00\x01\xc8\xb4)\x00\x00\x00\x06@\x00\x00\xb3\x80\x00\x00\x9b@\x00\x00\xab\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x92\x80\x00A\x08@\x00\x00\x8a\x00\x03@\x00\x00|\x00\x04@\x00\x00E\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This3 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cvector\x00\x00\x00\x01\x00\x00\x00\x04\x00@\x00\x00\xe7\x80\x00\x00[@\x00\x00\xdf\x00\t@\x00\x00\x1a\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0cvector\x00\x01\xc8\xb0?\x00\x00\x00\x06@\x00\x00\xb3\x80\x00\x00\x9b@\x00\x00\xab\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x92\x80\x00A\x08@\x00\x00\x8a\x00\x03@\x00\x00|\x00\x04@\x00\x00E\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This3 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cvector\x00\x00\x00\x01\x00\x00\x00\x01\x00@\x00\x00\xea\x80\x00\x00[@\x00\x00\xe2\x00\t@\x00\x00\x1b\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\rvector\x00\x05Z"G\x00\x00\x00\x06@\x00\x00\xb5\x80\x00\x00\x9b@\x00\x00\xad\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x94\x80\x00A\x08@\x00\x00\x8c\x00\x03@\x00\x00~\x00\x04@\x00\x00F\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This4 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\rvector\x00\x00\x00\x01\x00\x00\x00\x02\x00@\x00\x00\xeb\x80\x00\x00[@\x00\x00\xe3\x00\t@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0evector\x00\x10\x0er\xbc\x00\x00\x00\x06@\x00\x00\xb5\x80\x00\x00\x9b@\x00\x00\xad\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00\x94\x80\x00A\x08@\x00\x00\x8c\x00\x03@\x00\x00~\x00\x04@\x00\x00E\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04This3 Used to call the proper TStreamerInfo case\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0evector\x00\x00\x00\x01\x00\x00\x00=\x00@\x00\x11\xe6\x80\x00\x00[@\x00\x11\xde\x00\t@\x00\x00\x13\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x05TTree\x00rd\xe0\x7f\x00\x00\x00\x14@\x00\x11\xb9\x80\x00\x00\x9b@\x00\x11\xb1\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00!\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttLine\x0fLine attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x07EI\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00k\x80\x00\x00\xc6@\x00\x00c\x00\x03@\x00\x00Y\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttFill\x14Fill area attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xd9*\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00j\x80\x00\x00\xc6@\x00\x00b\x00\x03@\x00\x00X\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nTAttMarker\x11Marker attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00)\x1d\x8b\xec\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00h\x80\x00\x02\x08@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fEntries\x11Number of entries\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x90\x80\x00\x02\x08@\x00\x00\x88\x00\x02@\x00\x00\x82\x00\x04@\x00\x00O\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfTotBytes8Total number of bytes in all branches before compression\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x8f\x80\x00\x02\x08@\x00\x00\x87\x00\x02@\x00\x00\x81\x00\x04@\x00\x00N\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfZipBytes7Total number of bytes in all branches after compression\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfSavedBytes\x19Number of autosaved bytes\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00x\x80\x00\x02\x08@\x00\x00p\x00\x02@\x00\x00j\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfFlushedBytes\x1cNumber of auto-flushed bytes\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00v\x80\x00\x02\x08@\x00\x00n\x00\x02@\x00\x00h\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fWeight"Tree weight (see TTree::SetWeight)\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00v\x80\x00\x02\x08@\x00\x00n\x00\x02@\x00\x00h\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0efTimerInterval\x1eTimer interval in milliseconds\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfScanField\'Number of runs before prompting in Scan\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00o\x80\x00\x02\x08@\x00\x00g\x00\x02@\x00\x00a\x00\x04@\x00\x003\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fUpdate\x1eUpdate frequency for EntryLoop\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x9a\x80\x00\x02\x08@\x00\x00\x92\x00\x02@\x00\x00\x8c\x00\x04@\x00\x00^\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x16fDefaultEntryOffsetLen:Initial Length of fEntryOffset table in the basket buffers\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x9d\x80\x00\x02\x08@\x00\x00\x95\x00\x02@\x00\x00\x8f\x00\x04@\x00\x00a\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0efNClusterRangeENumber of Cluster range in addition to the one defined by \'AutoFlush\'\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x8f\x80\x00\x02\x08@\x00\x00\x87\x00\x02@\x00\x00\x81\x00\x04@\x00\x00N\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfMaxEntries5Maximum number of entries in case of circular buffers\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x80\x80\x00\x02\x08@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfMaxEntryLoop$Maximum number of entries to process\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x8a\x80\x00\x02\x08@\x00\x00\x82\x00\x02@\x00\x00|\x00\x04@\x00\x00I\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0ffMaxVirtualSize,Maximum total size of buffers kept in memory\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\xae\x80\x00\x02\x08@\x00\x00\xa6\x00\x02@\x00\x00\xa0\x00\x04@\x00\x00m\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfAutoSaveVAutosave tree when fAutoSave entries written or -fAutoSave (compressed) bytes produced\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\xb3\x80\x00\x02\x08@\x00\x00\xab\x00\x02@\x00\x00\xa5\x00\x04@\x00\x00r\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfAutoFlushZAuto-flush tree when fAutoFlush entries written or -fAutoFlush (compressed) bytes produced\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x86\x80\x00\x02\x08@\x00\x00~\x00\x02@\x00\x00x\x00\x04@\x00\x00E\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfEstimate.Number of entries to estimate histogram limits\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\xa8\x80\x00\x17\xd0@\x00\x00\xa0\x00\x02@\x00\x00\x81\x00\x04@\x00\x00M\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x10fClusterRangeEnd/[fNClusterRange] Last entry of a cluster range.\x00\x00\x008\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tLong64_t*\x00\x00\x00\x14\x0efNClusterRange\x05TTree@\x00\x00\xba\x80\x00\x17\xd0@\x00\x00\xb2\x00\x02@\x00\x00\x93\x00\x04@\x00\x00_\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfClusterSizeE[fNClusterRange] Number of entries in each cluster for a given range.\x00\x00\x008\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tLong64_t*\x00\x00\x00\x14\x0efNClusterRange\x05TTree@\x00\x00\xa0\x80\x00\x08\xbf@\x00\x00\x98\x00\x02@\x00\x00\x92\x00\x04@\x00\x00V\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfIOFeatures=IO features to define for newly-written baskets and branches.\x00\x00\x00>\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11ROOT::TIOFeatures@\x00\x00i\x80\x00\x03\xc7@\x00\x00a\x00\x02@\x00\x00[\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBranches\x10List of Branches\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00\x82\x80\x00\x03\xc7@\x00\x00z\x00\x02@\x00\x00t\x00\x04@\x00\x00@\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fLeaves+Direct pointers to individual branch leaves\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00\x90\x80\x00\x16\xcb@\x00\x00\x88\x00\x02@\x00\x00\x82\x00\x04@\x00\x00Q\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fAliases;List of aliases for expressions based on the tree branches.\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00m\x80\x00\x08\xbf@\x00\x00e\x00\x02@\x00\x00_\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfIndexValues\x13Sorted index values\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayD@\x00\x00j\x80\x00\x08\xbf@\x00\x00b\x00\x02@\x00\x00\\\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fIndex\x16Index of sorted values\x00\x00\x00>\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TArrayI@\x00\x00\x81\x80\x00\x16\xcb@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfTreeIndex"Pointer to the tree Index (if any)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0eTVirtualIndex*@\x00\x00w\x80\x00\x16\xcb@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fFriends"pointer to list of friend elements\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x8f\x80\x00\x16\xcb@\x00\x00\x87\x00\x02@\x00\x00\x81\x00\x04@\x00\x00P\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfUserInfo9pointer to a list of user objects associated to this Tree\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x84\x80\x00\x16\xcb@\x00\x00|\x00\x02@\x00\x00v\x00\x04@\x00\x00@\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfBranchRef(Branch supporting the TRefTable (if any)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0bTBranchRef*\x00@\x00\x00\xb5\x80\x00\x00[@\x00\x00\xad\x00\t@\x00\x00\x1f\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x11ROOT::TIOFeatures\x00\x1a\xa1/\x10\x00\x00\x00\x01@\x00\x00|\x80\x00\x00\x9b@\x00\x00t\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00[\x80\x00\x02\x08@\x00\x00S\x00\x02@\x00\x00M\x00\x04@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fIOBits\x00\x00\x00\x00\x0b\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\runsigned char\x00@\x00\x0b\xb9\x80\x00\x00[@\x00\x0b\xb1\x00\t@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x07TBranch\x00\x10\x97\x8a\xac\x00\x00\x00\r@\x00\x0b\x8a\x80\x00\x00\x9b@\x00\x0b\x82\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x16\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00k\x80\x00\x00\xc6@\x00\x00c\x00\x03@\x00\x00Y\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttFill\x14Fill area attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xd9*\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00r\x80\x00\x02\x08@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x006\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfCompress\x1fCompression level and algorithm\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00s\x80\x00\x02\x08@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBasketSize\x1eInitial Size of Basket Buffer\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x93\x80\x00\x02\x08@\x00\x00\x8b\x00\x02@\x00\x00\x85\x00\x04@\x00\x00W\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0ffEntryOffsetLen:Initial Length of fEntryOffset table in the basket buffers\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00p\x80\x00\x02\x08@\x00\x00h\x00\x02@\x00\x00b\x00\x04@\x00\x004\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfWriteBasket\x1aLast basket number written\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x90\x80\x00\x02\x08@\x00\x00\x88\x00\x02@\x00\x00\x82\x00\x04@\x00\x00O\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfEntryNumber5Current entry number (last one filled in this branch)\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x89\x80\x00\x08\xbf@\x00\x00\x81\x00\x02@\x00\x00{\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfIOFeatures&IO features for newly-created baskets.\x00\x00\x00>\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11ROOT::TIOFeatures@\x00\x00f\x80\x00\x02\x08@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fOffset\x15Offset of this branch\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00u\x80\x00\x02\x08@\x00\x00m\x00\x02@\x00\x00g\x00\x04@\x00\x009\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfMaxBaskets Maximum number of Baskets so far\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00g\x80\x00\x02\x08@\x00\x00_\x00\x02@\x00\x00Y\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfSplitLevel\x12Branch split level\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00h\x80\x00\x02\x08@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fEntries\x11Number of entries\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x82\x80\x00\x02\x08@\x00\x00z\x00\x02@\x00\x00t\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfFirstEntry(Number of the first entry in this branch\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x8e\x80\x00\x02\x08@\x00\x00\x86\x00\x02@\x00\x00\x80\x00\x04@\x00\x00M\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfTotBytes6Total number of bytes in all leaves before compression\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x8d\x80\x00\x02\x08@\x00\x00\x85\x00\x02@\x00\x00\x7f\x00\x04@\x00\x00L\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfZipBytes5Total number of bytes in all leaves after compression\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00{\x80\x00\x03\xc7@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x009\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBranches"-> List of Branches of this branch\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00w\x80\x00\x03\xc7@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x005\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fLeaves -> List of leaves of this branch\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00y\x80\x00\x03\xc7@\x00\x00q\x00\x02@\x00\x00k\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fBaskets!-> List of baskets of this branch\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00\x96\x80\x00\x17\xd0@\x00\x00\x8e\x00\x02@\x00\x00p\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfBasketBytes\'[fMaxBaskets] Length of baskets on file\x00\x00\x00+\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04int*\x00\x00\x00\r\x0bfMaxBaskets\x07TBranch@\x00\x00\xa5\x80\x00\x17\xd0@\x00\x00\x9d\x00\x02@\x00\x00\x7f\x00\x04@\x00\x00K\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfBasketEntry1[fMaxBaskets] Table of first entry in each basket\x00\x00\x008\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tLong64_t*\x00\x00\x00\r\x0bfMaxBaskets\x07TBranch@\x00\x00\x9d\x80\x00\x17\xd0@\x00\x00\x95\x00\x02@\x00\x00w\x00\x04@\x00\x00C\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBasketSeek*[fMaxBaskets] Addresses of baskets on file\x00\x00\x008\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tLong64_t*\x00\x00\x00\r\x0bfMaxBaskets\x07TBranch@\x00\x00\xa0\x80\x00\x01?@\x00\x00\x98\x00\x02@\x00\x00\x92\x00\x04@\x00\x00`\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfFileNameIName of file where buffers are stored ("" if in same file as Tree header)\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString\x00@\x00\x01\xc6\x80\x00\x00[@\x00\x01\xbe\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafI\x00~j\xae\x19\x00\x00\x00\x01@\x00\x01\x98\x80\x00\x00\x9b@\x00\x01\x90\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00z\x80\x00\x02\x08@\x00\x00r\x00\x02@\x00\x00l\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00z\x80\x00\x02\x08@\x00\x00r\x00\x02@\x00\x00l\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x03\xe8\x80\x00\x00[@\x00\x03\xe0\x00\t@\x00\x00\x13\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x05TLeaf\x00m\x1e\x81R\x00\x00\x00\x02@\x00\x03\xbb\x80\x00\x00\x9b@\x00\x03\xb3\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00\x81\x80\x00\x02\x08@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00E\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x04fLen3Number of fixed length elements in the leaf\'s data.\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00t\x80\x00\x02\x08@\x00\x00l\x00\x02@\x00\x00f\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fLenType"Number of bytes for this data type\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00v\x80\x00\x02\x08@\x00\x00n\x00\x02@\x00\x00h\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fOffset%Offset in ClonesArray object (if one)\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x81\x80\x00\x02\x08@\x00\x00y\x00\x02@\x00\x00s\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fIsRange.(=kTRUE if leaf has a range, kFALSE otherwise)\x00\x00\x00\x12\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04bool@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfIsUnsigned&(=kTRUE if unsigned, kFALSE otherwise)\x00\x00\x00\x12\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04bool@\x00\x00\x9b\x80\x00\x16\xcb@\x00\x00\x93\x00\x02@\x00\x00\x8d\x00\x04@\x00\x00\\\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfLeafCountDPointer to Leaf count if variable length (we do not own the counter)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TLeaf*\x00@\x00\x01\xcc\x80\x00\x00[@\x00\x01\xc4\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafD\x00\x11\x8e\x87v\x00\x00\x00\x01@\x00\x01\x9e\x80\x00\x00\x9b@\x00\x01\x96\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00}\x80\x00\x02\x08@\x00\x00u\x00\x02@\x00\x00o\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00}\x80\x00\x02\x08@\x00\x00u\x00\x02@\x00\x00o\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x01\xca\x80\x00\x00[@\x00\x01\xc2\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafF\x00:\xdd\x9dr\x00\x00\x00\x01@\x00\x01\x9c\x80\x00\x00\x9b@\x00\x01\x94\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x05\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05float\x00@\x00\x01\xd0\x80\x00\x00[@\x00\x01\xc8\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafL\x00\xde2\x08b\x00\x00\x00\x01@\x00\x01\xa2\x80\x00\x00\x9b@\x00\x01\x9a\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00\x7f\x80\x00\x02\x08@\x00\x00w\x00\x02@\x00\x00q\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x7f\x80\x00\x02\x08@\x00\x00w\x00\x02@\x00\x00q\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t\x00@\x00\x01\xc8\x80\x00\x00[@\x00\x01\xc0\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafB\x00\x0f\x1eK^\x00\x00\x00\x01@\x00\x01\x9a\x80\x00\x00\x9b@\x00\x01\x92\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04char@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04char\x00@\x00\x01\xca\x80\x00\x00[@\x00\x01\xc2\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafS\x00\x15\x0c\xee\xcf\x00\x00\x00\x01@\x00\x01\x9c\x80\x00\x00\x9b@\x00\x01\x94\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00|\x80\x00\x02\x08@\x00\x00t\x00\x02@\x00\x00n\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short\x00@\x00\x01\xc8\x80\x00\x00[@\x00\x01\xc0\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafO\x00\x02\xaeH\xd3\x00\x00\x00\x01@\x00\x01\x9a\x80\x00\x00\x9b@\x00\x01\x92\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x12\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04bool@\x00\x00{\x80\x00\x02\x08@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x12\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04bool\x00@\x00\x05\xf1\x80\x00\x00[@\x00\x05\xe9\x00\t@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0eTBranchElement\x00\xe7O^c\x00\x00\x00\n@\x00\x05\xbb\x80\x00\x00\x9b@\x00\x05\xb3\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TBranch\x11Branch descriptor\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x97\x8a\xac\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\r@\x00\x00w\x80\x00\x01?@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfClassName\x1fClass name of referenced object\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00m\x80\x00\x01?@\x00\x00e\x00\x02@\x00\x00_\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfParentName\x14Name of parent class\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00\x7f\x80\x00\x01?@\x00\x00w\x00\x02@\x00\x00q\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfClonesName&Name of class in TClonesArray (if any)\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString@\x00\x00m\x80\x00\x02\x08@\x00\x00e\x00\x02@\x00\x00_\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfCheckSum\x11CheckSum of class\x00\x00\x00\r\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0cunsigned int@\x00\x00p\x80\x00\x02\x08@\x00\x00h\x00\x02@\x00\x00b\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfClassVersion\x17Version number of class\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05short@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03fID\x1eelement serial number in fInfo\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00Z\x80\x00\x02\x08@\x00\x00R\x00\x02@\x00\x00L\x00\x04@\x00\x00\x1e\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fType\x0bbranch type\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfStreamerType\x14branch streamer type\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x86\x80\x00\x02\x08@\x00\x00~\x00\x02@\x00\x00x\x00\x04@\x00\x00J\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum4Maximum entries for a TClonesArray or variable array\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x87\x80\x00\x16\xcb@\x00\x00\x7f\x00\x02@\x00\x00y\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfBranchCount%pointer to primary branchcount branch\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0fTBranchElement*@\x00\x00\x8a\x80\x00\x16\xcb@\x00\x00\x82\x00\x02@\x00\x00|\x00\x04@\x00\x00B\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfBranchCount2\'pointer to secondary branchcount branch\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0fTBranchElement*\x00@\x00\x01\x9b\x80\x00\x00[@\x00\x01\x93\x00\t@\x00\x00\x1a\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0cTLeafElement\x00\xa0O\x88\x93\x00\x00\x00\x01@\x00\x01g\x80\x00\x00\x9b@\x00\x01_\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00k\x80\x00\x02\x08@\x00\x00c\x00\x02@\x00\x00]\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03fID\x1eelement serial number in fInfo\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00X\x80\x00\x02\x08@\x00\x00P\x00\x02@\x00\x00J\x00\x04@\x00\x00\x1c\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fType\tleaf type\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x01\xc6\x80\x00\x00[@\x00\x01\xbe\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TLeafC\x00\xfb\xe3\xb2\xf3\x00\x00\x00\x01@\x00\x01\x98\x80\x00\x00\x9b@\x00\x01\x90\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00{\x80\x00\x00\xc6@\x00\x00s\x00\x03@\x00\x00i\x00\x04@\x00\x00:\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05TLeaf\'Leaf: description of a Branch data type\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00m\x1e\x81R\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00z\x80\x00\x02\x08@\x00\x00r\x00\x02@\x00\x00l\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum(Minimum value if leaf range is specified\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00z\x80\x00\x02\x08@\x00\x00r\x00\x02@\x00\x00l\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum(Maximum value if leaf range is specified\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x010\x80\x00\x00[@\x00\x01(\x00\t@\x00\x00\x18\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\nTBranchRef\x00#`\xb3\xfd\x00\x00\x00\x01@\x00\x00\xfe\x80\x00\x00\x9b@\x00\x00\xf6\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TBranch\x11Branch descriptor\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x97\x8a\xac\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\r@\x00\x00r\x80\x00\x16\xcb@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfRefTable\x18pointer to the TRefTable\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nTRefTable*\x00@\x00\x02\xdc\x80\x00\x00[@\x00\x02\xd4\x00\t@\x00\x00\x17\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\tTRefTable\x00\x8c\x89[\x85\x00\x00\x00\x03@\x00\x02\xab\x80\x00\x00\x9b@\x00\x02\xa3\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00@\x00\x00g\x80\x00\x00\xc6@\x00\x00_\x00\x03@\x00\x00U\x00\x04@\x00\x00&\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TObject\x11Basic ROOT object\x00\x00\x00B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x90\x1b\xc0-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00o\x80\x00\x02\x08@\x00\x00g\x00\x02@\x00\x00a\x00\x04@\x00\x003\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fSize dummy for backward compatibility\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\xa2\x80\x00\x16\xcb@\x00\x00\x9a\x00\x02@\x00\x00\x94\x00\x04@\x00\x00_\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fParentsIarray of Parent objects (eg TTree branch) holding the referenced objects\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\nTObjArray*@\x00\x00q\x80\x00\x16\xcb@\x00\x00i\x00\x02@\x00\x00c\x00\x04@\x00\x000\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fOwner\x1cObject owning this TRefTable\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08TObject*@\x00\x00\x91\x80\x00A\x08@\x00\x00\x89\x00\x03@\x00\x00{\x00\x04@\x00\x00B\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\rfProcessGUIDs\'UUIDs of TProcessIDs used in fParentIDs\x00\x00\x01\xf4\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0evector\x00\x00\x00\x01\x00\x00\x00=\x00@\x00\x01\xb8\x80\x00\x00[@\x00\x01\xb0\x00\t@\x00\x00\x17\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\tTObjArray\x00\xa9\x9eeR\x00\x00\x00\x03@\x00\x01\x87\x80\x00\x00\x9b@\x00\x01\x7f\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00x\x80\x00\x00\xc6@\x00\x00p\x00\x03@\x00\x00f\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0eTSeqCollection\x1bSequenceable collection ABC\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfcl;\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x00@\x00\x00m\x80\x00\x02\x08@\x00\x00e\x00\x02@\x00\x00_\x00\x04@\x00\x001\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfLowerBound\x18Lower bound of the array\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00y\x80\x00\x02\x08@\x00\x00q\x00\x02@\x00\x00k\x00\x04@\x00\x00=\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x05fLast*Last element in array containing an object\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int\x00@\x00\x05\x90\x80\x00\x00[@\x00\x05\x88\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TGraph\x00\x05\xf7\xf4e\x00\x00\x00\x04@\x00\x05b\x80\x00\x00\x9b@\x00\x05Z\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00f\x80\x00\x00\xc6@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttLine\x0fLine attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x07EI\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00k\x80\x00\x00\xc6@\x00\x00c\x00\x03@\x00\x00Y\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttFill\x14Fill area attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xd9*\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00j\x80\x00\x00\xc6@\x00\x00b\x00\x03@\x00\x00X\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nTAttMarker\x11Marker attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00)\x1d\x8b\xec\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00n\x80\x00\x02\x08@\x00\x00f\x00\x02@\x00\x00`\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fNpoints\x1cNumber of points <= fMaxSize\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x80\x80\x00\x17\xd0@\x00\x00x\x00\x02@\x00\x00^\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fX\x1c[fNpoints] array of X points\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph@\x00\x00\x80\x80\x00\x17\xd0@\x00\x00x\x00\x02@\x00\x00^\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fY\x1c[fNpoints] array of Y points\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph@\x00\x00\x83\x80\x00\x16\xcb@\x00\x00{\x00\x02@\x00\x00u\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFunctions,Pointer to list of functions (fits and user)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x80\x80\x00\x16\xcb@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00B\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfHistogram*Pointer to histogram used for drawing axis\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TH1F*@\x00\x00w\x80\x00\x02\x08@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum"Minimum value for plotting along y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00w\x80\x00\x02\x08@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum"Maximum value for plotting along y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH1F\x00\xe2\x93\x96D\x00\x00\x00\x03@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayF\x0fArray of floats\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Z\x0b\xf6\xf1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01:\x80\x00\x00[@\x00\x012\x00\t@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x07TArrayF\x00Z\x0b\xf6\xf1\x00\x00\x00\x01@\x00\x01\x0b\x80\x00\x00\x9b@\x00\x01\x03\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00n\x80\x00\x00\xc6@\x00\x00f\x00\x03@\x00\x00\\\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TArray\x19Abstract array base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00p!\xb2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00x\x80\x00\x17\xd0@\x00\x00p\x00\x02@\x00\x00\\\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fArray\x17[fN] Array of fN floats\x00\x00\x00-\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06float*\x00\x00\x00\x01\x02fN\x06TArray\x00@\x00\x03F\x80\x00\x00[@\x00\x03>\x00\t@\x00\x00\x19\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0bTMultiGraph\x00\xe0\x89<\xd5\x00\x00\x00\x02@\x00\x03\x13\x80\x00\x00\x9b@\x00\x03\x0b\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x00@\x00\x00\x7f\x80\x00\x00\xc6@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00n\x80\x00\x16\xcb@\x00\x00f\x00\x02@\x00\x00`\x00\x04@\x00\x00/\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fGraphs\x1aPointer to list of TGraphs\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x83\x80\x00\x16\xcb@\x00\x00{\x00\x02@\x00\x00u\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFunctions,Pointer to list of functions (fits and user)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x80\x80\x00\x16\xcb@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00B\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfHistogram*Pointer to histogram used for drawing axis\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TH1F*@\x00\x00w\x80\x00\x02\x08@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum"Maximum value for plotting along y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00w\x80\x00\x02\x08@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum"Minimum value for plotting along y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00@\x00\x01!\x80\x00\x00[@\x00\x01\x19\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH1C\x006\xf6\xe4\xad\x00\x00\x00\x03@\x00\x00\xf5\x80\x00\x00\x9b@\x00\x00\xed\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00d\x80\x00\x00\xc6@\x00\x00\\\x00\x03@\x00\x00R\x00\x04@\x00\x00#\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayC\x0eArray of chars\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xae\x87\x996\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01 \x80\x00\x00[@\x00\x01\x18\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH1I\x00bud\xf6\x00\x00\x00\x03@\x00\x00\xf4\x80\x00\x00\x9b@\x00\x00\xec\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00c\x80\x00\x00\xc6@\x00\x00[\x00\x03@\x00\x00Q\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayI\rArray of ints\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd9\xd5q\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH1S\x00\x8cM\x9d\xcb\x00\x00\x00\x03@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH1\x1a1-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c7@\xc4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x08@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayS\x0fArray of shorts\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\\\x93\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01!\x80\x00\x00[@\x00\x01\x19\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH2C\x00\xbd\x00\x10\xfe\x00\x00\x00\x04@\x00\x00\xf5\x80\x00\x00\x9b@\x00\x00\xed\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH2\x1a2-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x824\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05@\x00\x00d\x80\x00\x00\xc6@\x00\x00\\\x00\x03@\x00\x00R\x00\x04@\x00\x00#\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayC\x0eArray of chars\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xae\x87\x996\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH2S\x00\x12V\xca\x1c\x00\x00\x00\x04@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH2\x1a2-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x824\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayS\x0fArray of shorts\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\\\x93\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01 \x80\x00\x00[@\x00\x01\x18\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH2I\x00\xe8~\x91G\x00\x00\x00\x04@\x00\x00\xf4\x80\x00\x00\x9b@\x00\x00\xec\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH2\x1a2-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x824\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05@\x00\x00c\x80\x00\x00\xc6@\x00\x00[\x00\x03@\x00\x00Q\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayI\rArray of ints\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd9\xd5q\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH2F\x00h\x9c\xc2\x95\x00\x00\x00\x04@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH2\x1a2-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x824\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x05@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayF\x0fArray of floats\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Z\x0b\xf6\xf1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01!\x80\x00\x00[@\x00\x01\x19\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH3C\x00\xa1\xff\x8d\x94\x00\x00\x00\x04@\x00\x00\xf5\x80\x00\x00\x9b@\x00\x00\xed\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH3\x1a3-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\xd2D_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x06@\x00\x00d\x80\x00\x00\xc6@\x00\x00\\\x00\x03@\x00\x00R\x00\x04@\x00\x00#\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayC\x0eArray of chars\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xae\x87\x996\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH3S\x00\xf7VF\xb2\x00\x00\x00\x04@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH3\x1a3-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\xd2D_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x06@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayS\x0fArray of shorts\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\\\x93\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01 \x80\x00\x00[@\x00\x01\x18\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH3I\x00\xcd~\r\xdd\x00\x00\x00\x04@\x00\x00\xf4\x80\x00\x00\x9b@\x00\x00\xec\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH3\x1a3-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\xd2D_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x06@\x00\x00c\x80\x00\x00\xc6@\x00\x00[\x00\x03@\x00\x00Q\x00\x04@\x00\x00"\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayI\rArray of ints\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd9\xd5q\xc7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x01"\x80\x00\x00[@\x00\x01\x1a\x00\t@\x00\x00\x12\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x04TH3F\x00M\x9c?+\x00\x00\x00\x04@\x00\x00\xf6\x80\x00\x00\x9b@\x00\x00\xee\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00@\x00\x00l\x80\x00\x00\xc6@\x00\x00d\x00\x03@\x00\x00Z\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03TH3\x1a3-Dim histogram base class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00B\xd2D_\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x06@\x00\x00e\x80\x00\x00\xc6@\x00\x00]\x00\x03@\x00\x00S\x00\x04@\x00\x00$\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07TArrayF\x0fArray of floats\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Z\x0b\xf6\xf1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01\x00@\x00\x03a\xff\xff\xff\xffTList\x00@\x00\x03S\x00\x05\x00\x01\x00\x00\x00\x00\x02\x00@\x00\x0blistOfRules\x00\x00\x00\x05@\x00\x00\xa3\xff\xff\xff\xffTObjString\x00@\x00\x00\x90\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x83type=read sourceClass="TProfile" targetClass="TProfile" version="[1-5]" source="" target="fBinSumw2" code="{ fBinSumw2.Reset(); }" \x00@\x00\x00\x9c\x80\x00\x9d\x87@\x00\x00\x94\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x87type=read sourceClass="TProfile2D" targetClass="TProfile2D" version="[1-6]" source="" target="fBinSumw2" code="{ fBinSumw2.Reset(); }" \x00@\x00\x00\x9c\x80\x00\x9d\x87@\x00\x00\x94\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x87type=read sourceClass="TProfile3D" targetClass="TProfile3D" version="[1-6]" source="" target="fBinSumw2" code="{ fBinSumw2.Reset(); }" \x00@\x00\x00\xab\x80\x00\x9d\x87@\x00\x00\xa3\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x96type=read sourceClass="TTree" targetClass="TTree" version="[-16]" source="" target="fDefaultEntryOffsetLen" code="{ fDefaultEntryOffsetLen = 1000; }" \x00@\x00\x00\x98\x80\x00\x9d\x87@\x00\x00\x90\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x83type=read sourceClass="TTree" targetClass="TTree" version="[-18]" source="" target="fNClusterRange" code="{ fNClusterRange = 0; }" \x00\x00' diff --git a/uproot/write/util.py b/uproot3/write/util.py similarity index 94% rename from uproot/write/util.py rename to uproot3/write/util.py index 0c6b70cb..3763e372 100644 --- a/uproot/write/util.py +++ b/uproot3/write/util.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# BSD 3-Clause License; see https://github.com/scikit-hep/uproot/blob/master/LICENSE +# BSD 3-Clause License; see https://github.com/scikit-hep/uproot3/blob/master/LICENSE from __future__ import absolute_import