pax_global_header00006660000000000000000000000064150357333720014522gustar00rootroot0000000000000052 comment=53509bb8fd74fb4bc2073565d88c310406839b02 python-ihm-2.7/000077500000000000000000000000001503573337200134665ustar00rootroot00000000000000python-ihm-2.7/.appveyor.yml000066400000000000000000000014041503573337200161330ustar00rootroot00000000000000environment: # For Python versions available on Appveyor, see # https://www.appveyor.com/docs/windows-images-software/#python matrix: - {APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017, PYTHON: "C:\\Python36-x64"} - {APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019, PYTHON: "C:\\Python38-x64"} - {APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019, PYTHON: "C:\\Python39-x64"} - {APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022, PYTHON: "C:\\Python313-x64"} install: - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" - "python.exe -m pip install codecov coverage pytest-cov setuptools" - choco install -y swig - python.exe setup.py build_ext --inplace build: off test_script: - "py.test --cov=ihm --cov-branch -v ." on_success: - "codecov" python-ihm-2.7/.codecov.yml000066400000000000000000000000661503573337200157130ustar00rootroot00000000000000ignore: - test - util - src/cmp.c - src/cmp.h python-ihm-2.7/.github/000077500000000000000000000000001503573337200150265ustar00rootroot00000000000000python-ihm-2.7/.github/workflows/000077500000000000000000000000001503573337200170635ustar00rootroot00000000000000python-ihm-2.7/.github/workflows/codeql-analysis.yml000066400000000000000000000036051503573337200227020ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: [ main ] pull_request: # The branches below must be a subset of the branches above branches: [ main ] schedule: - cron: '27 17 * * 5' jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: [ 'cpp', 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more: # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed steps: - name: Checkout repository uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main - run: | python setup.py build_ext --inplace -t build - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 python-ihm-2.7/.github/workflows/testpy.yml000066400000000000000000000023061503573337200211370ustar00rootroot00000000000000name: build on: [push, pull_request] jobs: build: strategy: fail-fast: false matrix: os: [ubuntu-24.04] python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] include: - os: macos-latest python-version: '3.10' runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies (Mac) if: matrix.os == 'macos-latest' run: | brew install swig - name: Install dependencies run: | python -m pip install --upgrade pip pip install coverage pytest-cov flake8 setuptools pep8-naming - name: Test run: | # Test with Python tokenizer py.test --cov=ihm --cov-branch -v . CFLAGS="-coverage" python setup.py build_ext --inplace -t build # Test with C tokenizer py.test --cov=ihm --cov-branch --cov-report=xml --cov-append -v . flake8 --ignore E402,W503,W504,N816 - uses: codecov/codecov-action@v4 env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} python-ihm-2.7/.gitignore000066400000000000000000000022561503573337200154630ustar00rootroot00000000000000.DS_Store # vim swapfiles .*.swp # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # dotenv .env # virtualenv .venv venv/ ENV/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .pytest_cache python-ihm-2.7/.pylintrc000066400000000000000000000002001503573337200153230ustar00rootroot00000000000000[MASTER] init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" python-ihm-2.7/.readthedocs.yaml000066400000000000000000000020141503573337200167120ustar00rootroot00000000000000# Read the Docs configuration file for Sphinx projects # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 # Set the OS, Python version and other tools you might need build: os: ubuntu-22.04 tools: python: "3.11" # You can also specify other tool versions: # nodejs: "20" # rust: "1.70" # golang: "1.20" # Build documentation in the "docs/" directory with Sphinx sphinx: configuration: docs/conf.py # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs # builder: "dirhtml" # Fail on all warnings to avoid broken references # fail_on_warning: true # Optionally build your docs in additional formats such as PDF and ePub # formats: # - pdf # - epub # Optional but recommended, declare the Python requirements required # to build your documentation # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html # python: # install: # - requirements: docs/requirements.txt python-ihm-2.7/ChangeLog.rst000066400000000000000000000746511503573337200160640ustar00rootroot000000000000002.7 - 2025-07-07 ================ - Representative models for :class:`ihm.model.ModelGroup` can now be denoted using the :class:`ihm.model.ModelRepresentative` class (#173). - Bugfix: fix reference counts of Python bool objects. 2.6 - 2025-06-09 ================ - The new :class:`ihm.License` and :class:`ihm.Disclaimer` classes allow describing how the data in the file can be used, and map to the ``pdbx_data_usage`` mmCIF table (#171). - Two :class:`ihm.Assembly` classes are now considered equal if they contain the same set of constituents, even if those constituents are in differing orders or are split differently into domains (#170). - The new :class:`ihm.format.ChangeFuncValueFilter` class allows modifying mmCIF values by passing them through an arbitrary Python function. - Checks for non-canonical atom names no longer use Ligand Expo to obtain CCD info, as it is being retired (#169). - :class:`ihm.metadata.CIFParser` and :class:`ihm.metadata.BinaryCIFParser` now return the same ``entity_source`` mapping that :class:`ihm.metadata.PDBParser` does (#168). 2.5 - 2025-04-25 ================ - Assemblies are now checked at file-output time to ensure that each asym referenced in an assembly is represented by atoms and/or spheres in at least one model (#165). - ``make_mmcif`` has a new ``--check_atom_names`` option which, if set, will check for any non-canonical atom names (#166). - ``pip install`` should no longer fail to install if the C extension module cannot be built; it will fall back to use the slower pure Python implementation. 2.4 - 2025-03-25 ================ - Add support for fits of model groups or ensembles to cross-links using the new :class:`ihm.restraint.CrossLinkGroupFit` class. 2.3 - 2025-03-13 ================ - The C-accelerated BinaryCIF parser now supports interval quantization encoding and a wider range of data types. - ``make_mmcif`` now has a new ``--histidines`` option to convert any non-standard histidine names to HIS (#164). - Bugfix: don't change name or description of the complete assembly in ``make_mmcif``. - Bugfix: if :class:`ihm.reference.SeqDif` is used to annotate an insertion or deletion, don't erroneously claim the alignment is incorrect. 2.2 - 2025-02-13 ================ - The C-accelerated BinaryCIF parser now supports a wider range of data types. 2.1 - 2025-02-12 ================ - Minor fixes to the C-accelerated BinaryCIF parser. 2.0 - 2025-02-11 ================ - python-ihm no longer supports Python 2; the oldest supported version is now Python 3.6 (#161). - BinaryCIF files are now read in using a C-accelerated parser, which is faster and more memory efficient than the older Python parser (#160). - The tokenizers for mmCIF and BinaryCIF now return data of the correct type (int, float, bool, or string); previously, all values were returned as strings. :class:`ihm.reader.Handler` subclasses now request data of the correct type using Python type annotations. The API of the C-accelerated parsers has changed accordingly (#162). - The new :class:`ihm.metadata.BinaryCIFParser` class can extract metadata such as database IDs and template information from BinaryCIF files, in a similar fashion to the existing :class:`ihm.metadata.CIFParser`. - Information about a deposited structure, such as the deposition date, can now be read from :attr:`System.database_status` (#163). - The new :class:`ihm.format.ReplaceCategoryFilter` class can be used to completely replace or remove an mmCIF category when using :class:`ihm.format.CifTokenReader`. 1.8 - 2024-11-26 ================ - Support added for datasets containing EPR, X-ray diffraction, footprinting or predicted contacts using the :class:`ihm.dataset.EPRDataset`, :class:`ihm.dataset.XRayDiffractionDataset`, :class:`ihm.dataset.HydroxylRadicalFootprintingDataset`, :class:`ihm.dataset.DNAFootprintingDataset` and :class:`ihm.dataset.PredictedContactsDataset` classes (#157). - Revision information (in the ``pdbx_audit_revision_*`` mmCIF tables) can now be read or written using the new :class:`ihm.Revision` and :class:`ihm.RevisionDetails` classes (#156). - The new :class:`ihm.location.BMRbigLocation` class allows for referencing datasets stored in the BMRbig database. - All references to the old PDB-Dev database are now updated to PDB-IHM. 1.7 - 2024-10-22 ================ - Sanity checks when writing out a file can now be disabled if desired, using the new ``check`` argument to :func:`ihm.dumper.write` (#153). - Data that have been split over multiple mmCIF or BinaryCIF files can now be combined into a single :class:`ihm.System` object using the new ``add_to_system`` argument to :func:`ihm.reader.read`. - Input files that assign multiple :class:`ihm.location.Location` objects to a single :class:`ihm.dataset.Dataset` can now be read (#151). - Bugfix: multiple :class:`ihm.restraint.EM3DRestraint` and :class:`ihm.restraint.SASRestraint` objects can now be created for a single dataset, as long as they act on different assemblies, as allowed by the dictionary. - Bugfix: allow for non-standard residues in the ``struct_ref`` table (#154). 1.6 - 2024-09-27 ================ - The new class :class:`ihm.model.NotModeledResidueRange` allows for the annotation of residue ranges that were explicitly not modeled. These are written to the ``_ihm_residues_not_modeled`` mmCIF table, and any residue marked as not-modeled in all models will also be excluded from the ``pdbx_poly_seq_scheme`` table. - The ``make_mmcif`` utility script will now automatically add any missing :class:`ihm.model.NotModeledResidueRange` objects for not-modeled residue ranges (#150). - Bugfix: the residue range checks introduced in version 1.5 broke the API used by python-modelcif. They have been reimplemented using the original API. - Bugfix: an unknown (?) value for ``pdbx_poly_seq_scheme.auth_seq_num`` is now preserved, not silently removed, when reading an mmCIF file. 1.5 - 2024-09-06 ================ - Trying to create a :class:`ihm.Residue`, :class:`ihm.EntityRange`, or :class:`ihm.AsymUnitRange` that references out-of-range residues (i.e. ``seq_id`` less than 1 or beyond the length of the :class:`ihm.Entity` sequence) will now raise an error. - Bugfix: :class:`ihm.reference.Reference` objects are no longer given erroneous duplicate IDs on output (#149). 1.4 - 2024-08-30 ================ - :class:`ihm.metadata.CIFParser` now extracts metadata from mmCIF starting models from Model Archive or compliant with the ModelCIF dictionary. - :meth:`ihm.Citation.from_pubmed_id` now takes an ``is_primary`` argument, to allow denoting the publication as the most pertinent for the modeling. - Duplicate references, pseudo sites, and features are now pruned on output (#148). - :class:`ihm.restraint.ResidueFeature` now reports an error if it is given zero residue ranges (#147). - Bugfix: allow for :class:`ihm.startmodel.Template` ``seq_id_range`` or ``template_seq_id_range`` to be empty. 1.3 - 2024-07-16 ================ - The new class :class:`ihm.location.ProteomeXchangeLocation` can be used for datasets stored in the ProteomeXchange database. - Support is added for changes in the IHMCIF dictionary, specifically the renaming of "CX-MS data" to "Crosslinking-MS data" and the ``_ihm_ordered_ensemble`` category to ``_ihm_ordered_model``. python-ihm will output the new names, but for backwards compatibility will read both old and new names. - :class:`ihm.protocol.Protocol` can now be given additional text to describe the protocol. - :class:`ihm.model.Atom` now takes an ``alt_id`` argument to support alternate conformations (#146). - Support added for NumPy 2.0. 1.2 - 2024-06-12 ================ - :class:`ihm.format.CifTokenReader` allows for reading an mmCIF file and breaking it into tokens. This can be used for various housekeeping tasks directly on an mmCIF file, such as changing chain IDs or renaming categories or data items, while preserving most other formatting such as comments and whitespace (#141). - :class:`ihm.restraint.HDXRestraint` adds support for restraints derived from Hydrogen-Deuterium Exchange experiments (#143). - The ``make_mmcif`` utility script now preserves more "orphan" data from the input file that is not referenced by other tables (#144). 1.1 - 2024-05-09 ================ - :class:`ihm.System` now allows for one or more official database IDs to be associated with the system using the new :class:`ihm.Database` class. This maps to the mmCIF ``_database_2`` category (#135). - :class:`ihm.location.FileLocation` now allows for an optional file format to be specified (#139). - The ``util/make-mmcif.py`` script is now included in the installed package, so can be run if desired with ``python3 -m ihm.util.make_mmcif`` (#134). - Bugfix: allow for file sizes in input mmCIF files to be floating point values (#138). - Bugfix: the 'Other' content type is now handled correctly when reading information about external files from an mmCIF file (#139). 1.0 - 2024-02-13 ================ - Support for multi-state schemes (such as kinetic rates and relaxation times for conversions between states) was added; see :mod:`ihm.multi_state_scheme`. - Residue numbering in non-polymer, water, and branched entities should now be better handled, no longer requiring the various scheme tables to precede ``atom_site``. If you subclass :class:`ihm.model.Model`, atoms may need to be renumbered; see :meth:`ihm.model.Model.add_atom` (#130). - Original author-provided numbering can now be provided for waters, using the ``orig_auth_seq_id_map`` argument to :class:`ihm.WaterAsymUnit`. - The make-mmcif.py utility script now has basic functionality for combining multiple input files into one, relabeling chain IDs if necessary. - An :class:`ihm.Entity` containing just a single sugar is now classified as a nonpolymer, not branched. 0.43 - 2023-12-08 ================= - Branched and polymeric empty entities are now distinguished based on entity.type in the input file (previously, any empty entity would always be reported as a polymer) (#129). - Warn rather than reporting an error if the system contains one or more empty entities (#128). - If an input file contains multiple duplicated datasets, preserve them as is rather than consolidating into a single dataset (#127). - Allow for multiple branched entities to have the same composition (they could have different connectivity) (#126). 0.42 - 2023-11-30 ================= - The :class:`ihm.metadata.CIFParser` class now parses Modeller-specific CIF categories to add information about software and templates for Modeller-generated mmCIF starting models. - Basic support for original author-provided residue numbering is now provided in the :class:`ihm.AsymUnit` class with a new ``orig_auth_seq_id_map`` argument. This information is read from and written to the various mmCIF tables such as ``pdbx_poly_seq_scheme`` (#124). 0.41 - 2023-10-02 ================= - More complete support for oligosaccharides, in particular correct numbering for atoms in `atom_site`, and the addition of some data items to the output which are required for full dictionary compliance. 0.40 - 2023-09-25 ================= - Basic support for oligosaccharides is now provided. New classes are provided to describe saccharide chemical components (:class:`ihm.SaccharideChemComp` and subclasses). Unlike polymers and non-polymers, oligosaccharides can be branched, and a new :class:`ihm.BranchLink` class allows the linkage between individual components to be described. - A summary report of the system can now be produced by calling :meth:`ihm.System.report`. This can help to reveal errors or inconsistencies, and will warn about missing data that may not be technically required for a compliant mmCIF file, but is usually expected to be present. - :class:`ihm.metadata.MRCParser` now uses the new EMDB API to extract version information and details for electron density map datasets. - RPM packages are now available for recent versions of Fedora and RedHat Enterprise Linux. 0.39 - 2023-08-04 ================= - :class:`ihm.location.DatabaseLocation` no longer accepts a ``db_name`` parameter. Derived classes (such as :class:`ihm.location.PDBLocation`) should be used instead; the base class should only be used for "other" databases that are not described in the IHM dictionary (#116). - Bugfix: AlphaFold models in PDB format are no longer categorized by :class:`ihm.metadata.PDBParser` as being deposited in the PDB database with an empty accession code. 0.38 - 2023-05-26 ================= - Convenience classes are added to describe datasets stored in the Model Archive, iProX, and AlphaFoldDB repositories (:class:`ihm.location.ModelArchiveLocation`, :class:`ihm.location.IProXLocation`, and :class:`ihm.location.AlphaFoldDBLocation` respectively). - The new class :class:`ihm.metadata.CIFParser` can be used to extract metadata from starting models in mmCIF format. It is currently in development and only supports model metadata from PDB or Model Archive at this time. - Line wrapping of output mmCIF files can now be turned if desired using :func:`ihm.dumper.set_line_wrap` (by default files are wrapped to 80 characters if possible). - The make-mmcif.py utility script now allows for the name of the output mmCIF file to be overridden (#115). 0.37 - 2023-02-03 ================= - Convenience classes are added to describe ensemble FRET datasets (:class:`ihm.dataset.EnsembleFRETDataset`) and datasets stored in the jPOSTrepo repository (:class:`ihm.location.JPOSTLocation`). - Related depositions can now be grouped using the :class:`ihm.Collection` class (#108). - The :class:`ihm.model.Ensemble` class has a new ``superimposed`` attribute to indicate whether the grouped models are structurally aligned. 0.36 - 2023-01-25 ================= - When reading a file that references external files, preserve any information on the size of those files (#104). - When reading a file containing models not in a model group, preserve any information on the number of models deposited (#105). - Bugfix: :func:`ihm.dictionary.read` now correctly handles dictionaries that define a category after data items in that category (#107). 0.35 - 2022-09-16 ================= - Author names now use PDB style ("Lastname, A.B.") by default rather than PubMed style ("Lastname AB") (#95). - Asyms containing multiple water molecules should now be correctly handled (previously every water molecule in the output ``atom_site`` table was given the same ``auth_seq_id``). Use the new :class:`ihm.WaterAsymUnit` to create an asym containing waters (#98). - Masses for all elements are now included, so that ``_entity.formula_weight`` can be correctly populated for ligands (#99). - Bugfix: :class:`ihm.analysis.Analysis` objects are now read correctly from input files when two objects share the same ID but are part of different protocols (#101). 0.34 - 2022-08-03 ================= - Strings that start with STAR reserved words such as ``stop_`` are now quoted to help some readers such as the GEMMI library (ihmwg/python-modelcif#25). - If an input file defines a chemical descriptor with an empty name but also defines ``linker_type``, use that to fill in the name (#91). - :class:`ihm.ChemComp` now allows for chemical components to be defined in a chemical component dictionary (CCD) outside of the wwPDB CCD. This is not used in python-ihm itself but can be used in python-modelcif. - Bugfix: if a read mmCIF file defines a complete assembly, do not overwrite its name and description on output (#92). - Bugfix: only allow clustering methods/features that are supported by the underlying IHM dictionary for :class:`ihm.model.Ensemble` (#94). - Bugfix: categories such as ``_struct`` that are not typically looped now support multi-line strings (ihmwg/python-modelcif#27). 0.33 - 2022-06-27 ================= - Improve reading of mmCIF files with incomplete data (#86, #87) or with categories in an unexpected order (#85). - Bugfix: fix sanity check for multiple atoms with the same atom_id and seq_id to handle bulk water (where such duplicates are OK) (#88). 0.32 - 2022-05-31 ================= - :class:`ihm.protocol.Step` now takes an ``ensemble`` flag, to indicate whether the modeling involved an ensemble, and which defaults to True if the system contains at least one :class:`ihm.model.Ensemble` (#83). - When reading an incomplete mmCIF file, such as that generated by some versions of PyMOL, python-ihm will now fill in missing entity-related information by guessing the sequence from the atom_site table (#67). - Bugfix: :class:`ihm.flr.RefMeasurementGroup` objects are now read from mmCIF files correctly. 0.31 - 2022-04-14 ================= - The :class:`ihm.dumper.IgnoreVariant` class can now be used to exclude selected categories from the mmCIF/BinaryCIF output. - The _pdbx_nonpoly_scheme CIF table should now fully comply with the PDBx dictionary. - Atoms are now checked at file-output time to ensure that a given model chain does not contain multiple atoms with the same atom_id and seq_id (#81). 0.30 - 2022-04-05 ================= - Add support for a long description of the system (like an abstract) using struct.pdbx_model_details (#80). - Bugfix: correctly read mmCIF files with missing entity.type. 0.29 - 2022-04-01 ================= - Output mmCIF files containing non-polymers should now validate against the PDBx dictionary (#76). - Bugfix: non-polymers that are erroneously marked as polymers in the input mmCIF can now be read in without causing a Python exception (#78). - Bugfix: strings starting with an underscore (e.g. chain names) are now quoted in mmCIF output to conform to the CIF syntax (#75). 0.28 - 2022-03-21 ================= - :class:`ihm.Citation` now takes a ``is_primary`` argument, which can be used to denote the most pertinent publication for the modeling. - Improved support for non-standard residues, and for standard amino acids used as nonpolymers. 0.27 - 2022-01-27 ================= - Minor documentation improvements. - Add support for the _struct.pdbx_structure_determination_methodology mmCIF data item. 0.26 - 2022-01-12 ================= - :func:`ihm.dumper.write` and :func:`ihm.reader.read` both now take a ``variant`` argument which can be used to control the set of tables that are read/written. This can be used by other libraries (such as python-ma) to support other mmCIF extensions. 0.25 - 2021-12-03 ================= - :func:`ihm.dictionary.Dictionary.validate` will now report errors for any keywords or categories in the file that are not present in the dictionary. - :class:`ihm.LPeptideAlphabet` now supports the ASX and GLX ambiguous residue types. 0.24 - 2021-12-01 ================= - :class:`ihm.AsymUnit` now supports insertion codes in its ``auth_seq_id_map``. The target of this mapping can either be an author-provided residue number (as previously) or a 2-element tuple containing this number and an insertion code. - :class:`ihm.AsymUnit` now allows the PDB or author-provided strand/chain ID to be different from the regular ID. - Bugfix: if two :class:`ihm.dictionary.Dictionary` objects both contain information about a given category, adding the two dictionaries together now combines the category information, rather than just using that from one dictionary. - Bugfix: :class:`ihm.dictionary.Dictionary` should now be able to validate BinaryCIF files containing integer or float values (#66). 0.23 - 2021-11-01 ================= - Bugfix: _struct_ref.pdbx_seq_one_letter_code is now treated as the subset of the reference (e.g. UniProt) sequence that overlaps with our Entities, not the entire sequence (#64). 0.22 - 2021-10-22 ================= - The :class:`ihm.Software` class now allows a citation for the software to be provided. - A new :mod:`ihm.citations` module contains citations for some packages that are commonly used in integrative modeling. 0.21 - 2021-07-14 ================= - BinaryCIF files now use UTF8 msgpack strings for all text, rather than raw bytes. This should make python-ihm's BinaryCIF files interoperable with those used by, e.g., CoordinateServer. - Output mmCIF files now include author-provided numbering (auth_seq_id) for atoms in the atom_site table. This should help packages that don't read the pdbx_poly_seq_scheme table to show the desired residue numbering (#61). 0.20 - 2021-05-06 ================= - Support for Python 2.6 has been dropped. The library needs Python 2.7 or Python 3. - Bugfix: correctly read in multiline reference sequence one-letter codes. - Bugfix: the reader is now more tolerant of omitted or unknown values (. or ?) in input mmCIF files. 0.19 - 2021-04-16 ================= - A convenience class is added to describe datasets stored in the ProXL database (:class:`ihm.location.ProXLLocation`). 0.18 - 2020-11-06 ================= - Update to match latest FLR dictionary. - Add a simple utility (util/make-mmcif.py) to make a minimal compliant IHM mmCIF file, given an mmCIF file (potentially just coordinates) as input. - Bugfix: the full residue range spanned by a starting model is now reported, rather than just the subset that is mapped to one or more templates (#55). - Bugfix: handle TrEMBL UniProt sequences (#57). 0.17 - 2020-07-10 ================= - Convenience classes are added to describe hydrogen/deuterium exchange data (:class:`ihm.dataset.HDXDataset`) and datasets stored in the PDB-Dev database (:class:`ihm.location.PDBDevLocation`). - Multiple :class:`ihm.restraint.CrossLinkPseudoSite` objects can now be assigned to a given :class:`ihm.restraint.CrossLink`. - Bugfix: the :class:`ihm.dataset.Dataset` base class now has a type of "Other" rather than "unspecified" to conform with the latest IHM dictionary. 0.16 - 2020-05-29 ================= - :func:`ihm.reader.read` no longer discards models read from non-IHM mmCIF files; they are instead placed in their own :class:`ihm.model.ModelGroup`. - Bugfix: both the pure Python and C-accelerated mmCIF readers are now more robust, able to handle files in binary mode (e.g. from opening a URL) and in Unicode (mmCIF files are supposed to be ASCII but python-ihm should handle any encoding Python supports). 0.15 - 2020-04-14 ================= - :class:`ihm.dataset.Dataset` objects that derive from another dataset can now record any transformation involved; see :class:`ihm.dataset.TransformedDataset`. - :class:`ihm.metadata.PDBParser` now extracts basic metadata from PDB files generated by SWISS-MODEL. - An :class:`ihm.Entity` can now be linked to one or more reference databases (e.g. UniProt). See the classes in the :mod:`ihm.reference` module. 0.14 - 2020-02-26 ================= - A cross-link can now use pseudo sites to represent one or both ends of the link. The new :class:`ihm.restraint.CrossLinkPseudoSite` object is used when the end of the cross-link is not represented in the model but its position is known (e.g. it may have been approximated given the position of nearby residues). - :class:`ihm.restraint.PseudoSiteFeature` now references an underlying :class:`ihm.restraint.PseudoSite`, allowing a single pseudo site to be shared between a feature and a cross-link if desired. - :class:`ihm.model.Ensemble` now supports describing subsamples from which the ensemble was constructed; see :class:`ihm.model.Subsample`. - Bugfix: :meth:`ihm.Citation.from_pubmed_id` now works correctly when the journal volume or page range are empty, or the page "range" is just a single page. 0.13 - 2019-11-14 ================= - :func:`ihm.reader.read` has a new optional ``reject_old_file`` argument. If set, it will raise an exception if asked to read a file that conforms to too old a version of the IHM extension dictionary. - Definitions for the DHSO and BMSO cross-linkers are now provided in the :mod:`ihm.cross_linkers` module. 0.12 - 2019-10-16 ================= - :class:`ihm.restraint.ResidueFeature` objects can now act on one or more :class:`Residue` objects, which act equivalently to 1-residue ranges (:class:`AsymUnitRange` or :class:`EntityRange`). - The new :class:`ihm.dataset.GeneticInteractionsDataset` class and the ``mic_value`` argument to :class:`ihm.restraint.DerivedDistanceRestraint` can be used to represent restraints from genetic interactions, such as point-mutant epistatic miniarray profile (pE-MAP) data. 0.11 - 2019-09-05 ================= - :class:`ihm.Assembly` objects can now only contain :class:`AsymUnit` and :class:`AsymUnitRange` objects (not :class:`Entity` or :class:`EntityRange`). - Bugfix: ensembles that don't reference a :class:`ihm.model.ModelGroup` no longer cause the reader to create bogus empty model groups. 0.10 - 2019-07-09 ================= - Features (:class:`ihm.restraint.AtomFeature`, :class:`ihm.restraint.ResidueFeature`, and :class:`ihm.restraint.NonPolyFeature`), which previously could select part or all of an :class:`ihm.AsymUnit`, can now also select parts of an :class:`Entity`. A restraint acting on an entity-feature is assumed to apply to all instances of that entity. 0.9 - 2019-05-31 ================ - Add support for the latest version of the IHM dictionary. 0.8 - 2019-05-28 ================ - :func:`ihm.reader.read` can now be asked to warn if it encounters categories or keywords in the mmCIF or BinaryCIF file that it doesn't know about (and will ignore). - Predicted contacts (:class:`ihm.restraint.PredictedContactRestraint`) are now supported. - :func:`ihm.reader.read` will now read starting model coordinates and sequence difference information into the :class:`ihm.startmodel.StartingModel` class. Applications that don't require coordinates can instruct the reader to ignore them with the new `read_starting_model_coord` flag. - The new :mod:`ihm.flr` module allows for information from Fluorescence / FRET experiments to be stored. This follows the definitions in the `FLR dictionary `_. 0.7 - 2019-04-24 ================ - Authors of the mmCIF file itself (`_audit_author` category) can now be set by manipulating :attr:`ihm.System.authors`. (If this list is empty on output, the set of all citation authors is used instead, as before.) - Any grants that supported the modeling can now be listed in :attr:`ihm.System.grants`. - A copy of `SWIG `_ is no longer needed to install releases of python-ihm via `pip` as pre-generated SWIG outputs are included in the PyPI package. SWIG is still needed to build directly from source code though. 0.6 - 2019-03-22 ================ - :class:`Entity` now takes an optional :class:`ihm.source.Source` object to describe the method by which the sample for the entity was produced. :class:`ihm.metadata.PDBParser` will also extract this information from input PDB files. - :func:`ihm.reader.read` and :func:`ihm.dumper.write` now support reading or writing additional user-defined mmCIF categories. 0.5 - 2019-01-17 ================ - :class:`ihm.restraint.CrossLinkRestraint` now takes an :class:`ihm.ChemDescriptor` object rather than the name of the cross-linker used. This allows the use of novel cross-linkers (beyond those currently listed in a fixed enumeration in the IHM dictionary). :class:`ihm.ChemDescriptor` allows for the chemical structure of the cross-linker to be uniquely specified, as a SMILES or INCHI string. The :mod:`ihm.cross_linkers` module provides chemical descriptors for some commonly-used cross-linkers. - Pseudo sites are now supported. :class:`ihm.restraint.PseudoSiteFeature` allows points or spheres with arbitrary coordinates to be designated as features, which can then be used in :class:`ihm.restraint.DerivedDistanceRestraint`. 0.4 - 2018-12-17 ================ - Certain restraints can now be grouped using the :class:`ihm.restraint.RestraintGroup` class. Due to limitations of the underlying dictionary, this only works for some restraint types (currently only :class:`ihm.restraint.DerivedDistanceRestraint`) and all restraints in the group must be of the same type. - Bugfix: the model's representation (see :mod:`ihm.representation`) need not be a strict subset of the model's :class:`ihm.Assembly`. However, any :class:`ihm.model.Atom` or :class:`ihm.model.Sphere` objects must be covered by both the representation and the model's :class:`ihm.Assembly`. - Bugfix: the reader no longer fails to read files that contain _entity.formula_weight. 0.3 - 2018-11-21 ================ - The library now includes basic support for nonpolymers and water molecules. In addition to the previous support for polymers (amino or nucleic acid chains), :class:`ihm.Entity` objects can now comprise ligands, water molecules, and user-defined chemical components. - The library can now read mmCIF dictionaries and validate mmCIF or BinaryCIF files against them. See :mod:`ihm.dictionary`. - Any :class:`ihm.model.Atom` or :class:`ihm.model.Sphere` objects are now checked against the model's representation (see :mod:`ihm.representation`); for example, an :class:`ihm.model.Atom` must correspond to an :class:`ihm.representation.AtomicSegment`. The representation in turn must be a subset of the model's :class:`ihm.Assembly`. - More examples are now provided, of creating and using non-standard residue types (chemical components); representing nonpolymers; and using the C mmCIF parser in other C programs. 0.2 - 2018-09-06 ================ - This release should fix installation of the package using pip: `pip install ihm` should now work correctly. 0.1 - 2018-09-06 ================ - First stable release. This provides largely complete support for the current version of the wwPDB IHM mmCIF extension dictionary, and will read and write mmCIF and BinaryCIF files that are compliant with the PDBx and IHM dictionaries. python-ihm-2.7/LICENSE000066400000000000000000000020671503573337200145000ustar00rootroot00000000000000MIT License Copyright (c) 2018-2025 IHM Working Group Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. python-ihm-2.7/MANIFEST.in000066400000000000000000000002371503573337200152260ustar00rootroot00000000000000include ChangeLog.rst include LICENSE include examples/* include src/cmp.h include src/ihm_format.h include src/ihm_format.i include src/ihm_format_wrap_2.7.c python-ihm-2.7/README.md000066400000000000000000000067231503573337200147550ustar00rootroot00000000000000[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.2603378.svg)](https://doi.org/10.5281/zenodo.2603378) [![docs](https://readthedocs.org/projects/python-ihm/badge/)](https://python-ihm.readthedocs.org/) [![conda package](https://img.shields.io/conda/vn/conda-forge/ihm.svg)](https://anaconda.org/conda-forge/ihm) [![pypi package](https://badge.fury.io/py/ihm.svg)](https://badge.fury.io/py/ihm) [![Linux Build Status](https://github.com/ihmwg/python-ihm/workflows/build/badge.svg)](https://github.com/ihmwg/python-ihm/actions?query=workflow%3Abuild) [![Windows Build Status](https://ci.appveyor.com/api/projects/status/5o28oe477ii8ur4h?svg=true)](https://ci.appveyor.com/project/benmwebb/python-ihm) [![codecov](https://codecov.io/gh/ihmwg/python-ihm/branch/main/graph/badge.svg)](https://codecov.io/gh/ihmwg/python-ihm) This is a Python package to assist in handling [mmCIF](https://mmcif.wwpdb.org/) and [BinaryCIF](https://github.com/molstar/BinaryCIF) files compliant with the [integrative/hybrid modeling (IHM)](https://mmcif.wwpdb.org/dictionaries/mmcif_ihm_ext.dic/Index/) extension. It works with Python 3.6 or later. To handle non-integrative theoretical models (for example, homology models), see the [python-modelcif](https://github.com/ihmwg/python-modelcif) package which supports files compliant with the [ModelCIF](https://mmcif.wwpdb.org/dictionaries/mmcif_ma.dic/Index/) extension. Please [see the documentation](https://python-ihm.readthedocs.org/) or some [worked examples](https://github.com/ihmwg/python-ihm/tree/main/examples) for more details. # Installation with conda, Homebrew or pip If you are using [Anaconda Python](https://www.anaconda.com/), install with ``` conda install -c conda-forge ihm ``` On a Mac with [Homebrew](https://brew.sh/), install with ``` brew tap salilab/salilab; brew install ihm ``` On a Fedora or RedHat Enterprise Linux box, install with ``` dnf copr enable salilab/salilab; dnf install python3-ihm ``` On an Ubuntu LTS box, install from [our PPA](https://launchpad.net/~salilab/+archive/ubuntu/ppa) with ``` apt install software-properties-common; add-apt-repository ppa:salilab/ppa; apt install python3-ihm ``` Alternatively, install with pip: ``` pip install ihm ``` (Note that pip builds a C extension module for faster reading of mmCIF and BinaryCIF files. This requires that your system has a C compiler. If you don't have a C compiler available, the library will read files using pure Python instead.) # Installation from source code To build and install from a clone of the GitHub repository, run ``` python setup.py build python setup.py install ``` Note that this will attempt to build a C extension module for faster reading of mmCIF and BinaryCIF files. This requires that your system has a C compiler and [SWIG](https://www.swig.org/). If either of these components are missing, the library will fall back to reading files using pure Python instead. If you want to write [BinaryCIF](https://github.com/molstar/BinaryCIF) files (or to read them without the C extension module), you will also need the Python [msgpack](https://github.com/msgpack/msgpack-python) package. # Testing There are a number of testcases in the `test` directory. Each one can be run like a normal Python script to test the library. They can also be all run at once using [nose](https://nose.readthedocs.io/en/latest/) or [pytest](https://docs.pytest.org/en/latest/). They will also test the C extension module if it is first built with `python setup.py build_ext --inplace`. python-ihm-2.7/docs/000077500000000000000000000000001503573337200144165ustar00rootroot00000000000000python-ihm-2.7/docs/Makefile000066400000000000000000000011371503573337200160600ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SPHINXPROJ = Python-IHM SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)python-ihm-2.7/docs/analysis.rst000066400000000000000000000006521503573337200167760ustar00rootroot00000000000000.. highlight:: rest .. _analysis_module: The :mod:`ihm.analysis` Python module ===================================== .. automodule:: ihm.analysis .. autoclass:: Step :members: .. autoclass:: FilterStep :members: .. autoclass:: ClusterStep :members: .. autoclass:: RescoreStep :members: .. autoclass:: ValidationStep :members: .. autoclass:: EmptyStep :members: .. autoclass:: Analysis :members: python-ihm-2.7/docs/changes.rst000066400000000000000000000001431503573337200165560ustar00rootroot00000000000000.. _changes: .. currentmodule:: ihm Change history ************** .. include:: ../ChangeLog.rst python-ihm-2.7/docs/citations.rst000066400000000000000000000023241503573337200171460ustar00rootroot00000000000000.. highlight:: rest .. _citations_module: The :mod:`ihm.citations` Python module ====================================== .. automodule:: ihm.citations .. data:: imp The Integrative Modeling Platform (IMP). .. data:: pmi The PMI module of the Integrative Modeling Platform (IMP). .. data:: modeller MODELLER, comparative modeling by satisfaction of spatial restraints. .. data:: psipred PSIPRED, protein secondary structure prediction based on position-specific scoring matrices. .. data:: disopred DISOPRED, disordered region prediction. .. data:: hhpred HHpred, protein homology detection and structure prediction. .. data:: relion RELION, a Bayesian approach for cryo-EM structure determination. .. data:: phyre2 Phyre2, a web portal for protein modeling, prediction and analysis. .. data:: swiss_model SWISS-MODEL: homology modeling of protein structures and complexes. .. data:: alphafold2 AlphaFold: ab-initio modeling of protein structures. .. data:: colabfold ColabFold: accessible AlphaFold pipeline. .. data:: qmeandisco QMEANDisCo: model quality estimation with distance constraints. .. data:: mmseqs2 MMseqs2: app for fast, interactive sequence searches. python-ihm-2.7/docs/conf.py000066400000000000000000000115351503573337200157220ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Python-IHM documentation build configuration file, created by # sphinx-quickstart on Thu Mar 1 14:05:33 2018. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath('..')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Python-IHM' copyright = u'2018-2025, Benjamin Webb' author = u'Benjamin Webb' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'' # The full version, including alpha/beta/rc tags. release = u'' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # html_sidebars = {} # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'Python-IHMdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'Python-IHM.tex', u'Python-IHM Documentation', u'Benjamin Webb', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'python-ihm', u'Python-IHM Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'Python-IHM', u'Python-IHM Documentation', author, 'Python-IHM', 'One line description of project.', 'Miscellaneous'), ] # Warn about broken links to classes, etc. nitpicky = True python-ihm-2.7/docs/cross_linkers.rst000066400000000000000000000046321503573337200200350ustar00rootroot00000000000000.. highlight:: rest .. _cross_linkers_module: The :mod:`ihm.cross_linkers` Python module ========================================== .. automodule:: ihm.cross_linkers .. data:: dss DSS cross-linker that links a primary amine with another primary amine (non-water-soluble). .. data:: dsg DSG cross-linker that links a primary amine with another primary amine (non-water-soluble). .. data:: bs3 BS3 cross-linker that links a primary amine with another primary amine (water-soluble). .. data:: dsso DSSO cross-linker that links a primary amine with another primary amine (non-water-soluble). It is similar to DSS but can be cleaved in the gas phase using collision-induced dissociation. .. data:: edc EDC cross-linker that links a carboxyl group with a primary amine. .. data:: dhso DHSO (dihydrazide sulfoxide) MS-cleavable cross-linker that links carboxyl groups, described in `Gutierrez et al, 2016 `_. .. data:: bmso BMSO (bismaleimide sulfoxide) MS-cleavable cross-linker that links cysteines, described in `Gutierrez et al, 2018 `_. .. data:: sda SDA (NHS-Diazirine) (succinimidyl 4,4′-azipentanoate) cross-linker that links primary amines with nearly any other functional group via long-wave UV-light activation. .. data:: photo_leucine L-photo-leucine. Non-canonical amino acid incorporated at leucine positions that links leucine to any other functional group via long-wave UV-light activation. See `Suchanek et al, 2005 `_. .. data:: dsbu dsbu (disuccinimidyl dibutyric urea) cross-linker that links a primary amine with another primary amine (non-water-soluble). Cleavable in the gas phase using collision-induced dissociation. See `Müller et al, 2011 `_. .. data:: phoX PhoX cross-linker that links a primary amine with another primary amine. The spacer group contains a phosphonate group, making the cross-linker IMAC-enrichable. Also known by the name DSPP. See `Steigenberger et al, 2019 `_. .. data:: tbuphoX Tert-butyl PhoX cross-linker. Similar to PhoX, but containing a tert-butyl group that renders the cross-linker cell permeable. See `Jiang et al, 2021 `_. python-ihm-2.7/docs/dataset.rst000066400000000000000000000024511503573337200165770ustar00rootroot00000000000000.. highlight:: rest .. _dataset_module: The :mod:`ihm.dataset` Python module ==================================== .. automodule:: ihm.dataset .. autoclass:: Dataset :members: .. autoclass:: TransformedDataset :members: .. autoclass:: DatasetGroup :members: .. autoclass:: CXMSDataset :members: .. autoclass:: MassSpecDataset :members: .. autoclass:: HDXDataset :members: .. autoclass:: PDBDataset :members: .. autoclass:: ComparativeModelDataset :members: .. autoclass:: IntegrativeModelDataset :members: .. autoclass:: DeNovoModelDataset :members: .. autoclass:: NMRDataset :members: .. autoclass:: MutagenesisDataset :members: .. autoclass:: EMDensityDataset :members: .. autoclass:: EMMicrographsDataset :members: .. autoclass:: EM2DClassDataset :members: .. autoclass:: SASDataset :members: .. autoclass:: FRETDataset :members: .. autoclass:: EnsembleFRETDataset :members: .. autoclass:: YeastTwoHybridDataset :members: .. autoclass:: GeneticInteractionsDataset :members: .. autoclass:: EPRDataset :members: .. autoclass:: XRayDiffractionDataset :members: .. autoclass:: HydroxylRadicalFootprintingDataset :members: .. autoclass:: DNAFootprintingDataset :members: .. autoclass:: PredictedContactsDataset :members: python-ihm-2.7/docs/design.rst000066400000000000000000000074541503573337200164330ustar00rootroot00000000000000Design principles ***************** Lightweight =========== The classes in this package are designed to be lightweight, taking up as little memory as possible. For example, individual atoms are *not* stored in Python classes, and are only requested when needed. This is because the library is designed to work with an existing modeling package, which likely already stores data on the system in its own files or data structures, such that duplicating this information would be very inefficient. Mutable ======= All classes are designed to be *mutable*; that is, their contents can be changed after creation. For example, protein chains can be added to or removed from an existing :class:`ihm.Assembly` object, or the amino acid sequence of an :class:`ihm.Entity` can be extended. This because some of the modeling packages which use these classes build up their own data model in a similar way. Types rather than enums ======================= Where the underlying IHM mmCIF dictionary uses an enumeration, generally this corresponds to separate sibling classes in this package. For example, two datasets which differ only in their ``data_type`` `in the dictionary `_ (such as a electron microscopy density map and small angle scattering data) are represented with two classes in this package: :class:`ihm.dataset.EMDensityDataset` and :class:`ihm.dataset.SASDataset`. This cleanly enforces the allowed types in the most Pythonic manner. Hierarchy of classes ==================== The underlying IHM mmCIF dictionary is essentially structured as a set of rows in database tables, with IDs acting as keys or pointers into other tables. This is naturally represented in Python as a hierarchy of classes, with members pointing to other objects as appropriate. IDs are not used to look up other objects, and are only used internally to populate the tables. For example, to group multiple models together, the dictionary assigns all of the models the same `model_group id `_ while in the Python package the :class:`ihm.model.Model` objects are placed into a :class:`ihm.model.ModelGroup` object, which acts like a simple Python list. The table-based representation of the dictionary does allow for objects to exist that are not referenced by other objects, unlike the Python-based hierarchy. Such 'orphan' objects can be referenced from orphan lists in the top-level :class:`ihm.System` if necessary. Equal versus identical objects ============================== Since the Python objects are mutable, can be constructed iteratively by a modeling package, and live in a hierarchy, it can sometimes turn out that two Python objects while not identical (they point to different locations in memory) are equal (their contents are the same). For example, the two :class:`ihm.Assembly` objects, one of proteins A, B, and C, and the other of A, C, and B, are not identical (they are different objects) but are equal (the order of the proteins does not matter). The library will attempt to detect such objects and consolidate them on output, describing both of them in the mmCIF file with the same ID, to avoid meaningless duplication of rows in the output tables. This removes some of the burden from the author of the modeling package, which may not care about such a distinction. mmCIF backend ============= The classes in this package roughly correspond to `categories `_ in the underlying IHM mmCIF dictionary. This allows for simple output of mmCIF formatted files, but also allows for the potential future support for other file formats that support the dictionary or a subset of it, such as `MMTF `_. python-ihm-2.7/docs/dictionary.rst000066400000000000000000000005611503573337200173170ustar00rootroot00000000000000.. highlight:: rest .. _dictionary_module: The :mod:`ihm.dictionary` Python module ======================================= .. automodule:: ihm.dictionary .. autoclass:: Dictionary :members: .. autoclass:: Category :members: .. autoclass:: ItemType :members: .. autoclass:: Keyword :members: .. autofunction:: read .. autoexception:: ValidatorError python-ihm-2.7/docs/dumper.rst000066400000000000000000000005111503573337200164410ustar00rootroot00000000000000.. highlight:: rest .. _dumper_module: The :mod:`ihm.dumper` Python module =================================== .. automodule:: ihm.dumper .. autoclass:: Dumper :members: .. autoclass:: Variant :members: .. autoclass:: IHMVariant .. autoclass:: IgnoreVariant .. autofunction:: set_line_wrap .. autofunction:: write python-ihm-2.7/docs/flr.rst000066400000000000000000000034251503573337200157370ustar00rootroot00000000000000.. highlight:: rest .. _flr_module: The :mod:`ihm.flr` Python module ===================================== .. automodule:: ihm.flr .. autoclass:: Probe :members: .. autoclass:: ProbeDescriptor :members: .. autoclass:: ProbeList :members: .. autoclass:: SampleProbeDetails :members: .. autoclass:: PolyProbeConjugate :members: .. autoclass:: PolyProbePosition :members: .. autoclass:: Sample :members: .. autoclass:: EntityAssembly :members: .. autoclass:: SampleCondition :members: .. autoclass:: Experiment :members: .. autoclass:: Instrument :members: .. autoclass:: InstSetting :members: .. autoclass:: ExpCondition :members: .. autoclass:: FRETAnalysis :members: .. autoclass:: LifetimeFitModel :members: .. autoclass:: RefMeasurementGroup :members: .. autoclass:: RefMeasurement :members: .. autoclass:: RefMeasurementLifetime :members: .. autoclass:: FRETDistanceRestraintGroup :members: .. autoclass:: FRETDistanceRestraint :members: .. autoclass:: FRETForsterRadius :members: .. autoclass:: FRETCalibrationParameters :members: .. autoclass:: PeakAssignment :members: .. autoclass:: FRETModelQuality :members: .. autoclass:: FRETModelDistance :members: .. autoclass:: FPSModeling :members: .. autoclass:: FPSGlobalParameters :members: .. autoclass:: FPSAVModeling :members: .. autoclass:: FPSAVParameter :members: .. autoclass:: FPSMPPModeling :members: .. autoclass:: FPSMeanProbePosition :members: .. autoclass:: FPSMPPAtomPositionGroup :members: .. autoclass:: FPSMPPAtomPosition :members: .. autoclass:: KineticRateFretAnalysisConnection :members: .. autoclass:: RelaxationTimeFretAnalysisConnection :members: .. autoclass:: FLRData :members: python-ihm-2.7/docs/format.rst000066400000000000000000000010161503573337200164360ustar00rootroot00000000000000.. highlight:: rest .. _format_module: The :mod:`ihm.format` Python module =================================== .. automodule:: ihm.format .. autoclass:: CifWriter :members: .. autoclass:: CifReader :members: .. autoclass:: CifTokenReader :members: .. autoclass:: Filter :members: .. autoclass:: ChangeValueFilter .. autoclass:: ChangeFuncValueFilter .. autoclass:: RemoveItemFilter .. autoclass:: ChangeKeywordFilter .. autoclass:: ReplaceCategoryFilter .. autoexception:: CifParserError :members: python-ihm-2.7/docs/format_bcif.rst000066400000000000000000000003731503573337200174260ustar00rootroot00000000000000.. highlight:: rest .. _format_bcif_module: The :mod:`ihm.format_bcif` Python module ======================================== .. automodule:: ihm.format_bcif .. autoclass:: BinaryCifWriter :members: .. autoclass:: BinaryCifReader :members: python-ihm-2.7/docs/geometry.rst000066400000000000000000000012351503573337200170040ustar00rootroot00000000000000.. highlight:: rest .. _geometry_module: The :mod:`ihm.geometry` Python module ===================================== .. automodule:: ihm.geometry .. autoclass:: Center :members: .. autoclass:: Transformation :members: .. autoclass:: GeometricObject :members: .. autoclass:: Sphere :members: .. autoclass:: Torus :members: .. autoclass:: HalfTorus :members: .. autoclass:: Axis :members: .. autoclass:: XAxis :members: .. autoclass:: YAxis :members: .. autoclass:: ZAxis :members: .. autoclass:: Plane :members: .. autoclass:: XYPlane :members: .. autoclass:: YZPlane :members: .. autoclass:: XZPlane :members: python-ihm-2.7/docs/index.rst000066400000000000000000000020251503573337200162560ustar00rootroot00000000000000Python-IHM documentation ======================== This is a Python package to assist in handling mmCIF files compliant with the integrative/hybrid modeling (IHM) extension. The documentation below documents the library API. For complete worked examples, see `the examples directory at GitHub `_ or real systems deposited using the library, such as `Nup133 `_. Contents ======== .. toctree:: :maxdepth: 2 introduction usage provenance design lowlevel changes API Reference: .. toctree:: :maxdepth: 1 main source reference location dataset metadata startmodel representation geometry restraint cross_linkers citations protocol analysis model format format_bcif dumper reader dictionary flr multi_state_scheme Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` python-ihm-2.7/docs/introduction.rst000066400000000000000000000035631503573337200177000ustar00rootroot00000000000000Introduction ************ This package provides a mechanism to describe an integrative modeling application with a set of Python objects. This includes - the data used for the modeling, such as previous computional models from comparative or integrative modeling, and experimental datasets from X-ray crystallography, mass spectrometry, electron microscopy; - the protocol used to generate models, such as molecular dynamics, clustering, and rescoring; - the actual coordinates of output models, which may be multi-scale (including both atomic coordinates and more coarse-grained representations), multi-state (multiple conformations and/or compositions of the system needed to explain the input data), or ordered (such as different points in a chemical reaction); - grouping of multiple models into ensembles or clusters; - validation of models, for example by scoring against data not used in the modeling itself. Once created, this set of Python objects can be written to an mmCIF file that is compliant with the `IHMCIF extension `_ to the `PDBx/mmCIF dictionary `_, suitable for deposition in the `PDB-IHM repository `_. The files are best viewed in a viewer that supports IHMCIF, such as `UCSF ChimeraX `_, although they may be partially viewable in regular PDBx mmCIF viewers (likely only the atomic coordinates will be visible). The Python package can be used standalone, but is primarily intended for use within modeling software such as `IMP `_, or `HADDOCK `_. For example, IMP provides `a class `_ which uses this library to convert an IMP::pmi modeling protocol into an mmCIF file. python-ihm-2.7/docs/location.rst000066400000000000000000000023161503573337200167620ustar00rootroot00000000000000.. highlight:: rest .. _location_module: The :mod:`ihm.location` Python module ===================================== .. automodule:: ihm.location .. autoclass:: Location :members: .. autoclass:: DatabaseLocation :members: .. autoclass:: EMDBLocation :members: .. autoclass:: PDBLocation :members: .. autoclass:: PDBDevLocation :members: .. autoclass:: ModelArchiveLocation :members: .. autoclass:: BMRBLocation :members: .. autoclass:: MassIVELocation :members: .. autoclass:: EMPIARLocation :members: .. autoclass:: SASBDBLocation :members: .. autoclass:: PRIDELocation :members: .. autoclass:: JPOSTLocation :members: .. autoclass:: BioGRIDLocation :members: .. autoclass:: ProXLLocation :members: .. autoclass:: IProXLocation :members: .. autoclass:: AlphaFoldDBLocation :members: .. autoclass:: ProteomeXchangeLocation :members: .. autoclass:: BMRbigLocation :members: .. autoclass:: FileLocation :members: .. autoclass:: InputFileLocation :members: .. autoclass:: OutputFileLocation :members: .. autoclass:: WorkflowFileLocation :members: .. autoclass:: VisualizationFileLocation :members: .. autoclass:: Repository :members: python-ihm-2.7/docs/lowlevel.rst000066400000000000000000000006601503573337200170030ustar00rootroot00000000000000Low-level usage *************** The library can also be used at a lower level, to extract a subset of data from an mmCIF file. This can be done in either C or Python code. For more information, see the :mod:`ihm.format` module, or the `atom_reader.c `_ or `stream_parser.py `_ examples. python-ihm-2.7/docs/main.rst000066400000000000000000000036361503573337200161040ustar00rootroot00000000000000.. highlight:: rest .. _main_module: The :mod:`ihm` Python module =================================== .. automodule:: ihm .. autodata:: unknown .. autoclass:: System :members: .. autoclass:: DatabaseStatus :members: .. autoclass:: Database :members: .. autoclass:: Software :members: .. autoclass:: Citation :members: .. autoclass:: Grant :members: .. autoclass:: ChemComp :members: .. autoclass:: PeptideChemComp :members: .. autoclass:: LPeptideChemComp :members: .. autoclass:: DPeptideChemComp :members: .. autoclass:: RNAChemComp :members: .. autoclass:: DNAChemComp :members: .. autoclass:: SaccharideChemComp :members: .. autoclass:: LSaccharideChemComp :members: .. autoclass:: LSaccharideAlphaChemComp :members: .. autoclass:: LSaccharideBetaChemComp :members: .. autoclass:: DSaccharideChemComp :members: .. autoclass:: DSaccharideAlphaChemComp :members: .. autoclass:: DSaccharideBetaChemComp :members: .. autoclass:: NonPolymerChemComp :members: .. autoclass:: WaterChemComp :members: .. autoclass:: Alphabet :members: .. autoclass:: LPeptideAlphabet :members: .. autoclass:: DPeptideAlphabet :members: .. autoclass:: RNAAlphabet :members: .. autoclass:: DNAAlphabet :members: .. autoclass:: Entity :members: .. autoclass:: EntityRange :members: .. autoclass:: AsymUnit :members: .. autoclass:: AsymUnitRange :members: .. autoclass:: WaterAsymUnit :members: .. autoclass:: Atom :members: .. autoclass:: Residue :members: .. autoclass:: Assembly :members: .. autoclass:: ChemDescriptor :members: .. autoclass:: Collection :members: .. autoclass:: BranchDescriptor :members: .. autoclass:: BranchLink :members: .. autoclass:: DataUsage :members: .. autoclass:: License .. autoclass:: Disclaimer .. autoclass:: Revision :members: .. autoclass:: RevisionDetails :members: python-ihm-2.7/docs/make.bat000066400000000000000000000014561503573337200160310ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build set SPHINXPROJ=Python-IHM if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd python-ihm-2.7/docs/metadata.rst000066400000000000000000000005331503573337200167310ustar00rootroot00000000000000.. highlight:: rest .. _metadata_module: The :mod:`ihm.metadata` Python module ===================================== .. automodule:: ihm.metadata .. autoclass:: Parser :members: .. autoclass:: MRCParser :members: .. autoclass:: PDBParser :members: .. autoclass:: CIFParser :members: .. autoclass:: BinaryCIFParser :members: python-ihm-2.7/docs/model.rst000066400000000000000000000015141503573337200162510ustar00rootroot00000000000000.. highlight:: rest .. _model_module: The :mod:`ihm.model` Python module ================================== .. automodule:: ihm.model .. autoclass:: Sphere :members: .. autoclass:: Atom :members: .. autoclass:: Model :members: .. autoclass:: ModelRepresentative :members: .. autoclass:: ModelGroup :members: .. autoclass:: State :members: .. autoclass:: StateGroup :members: .. autoclass:: Ensemble :members: .. autoclass:: NotModeledResidueRange :members: .. autoclass:: OrderedProcess :members: .. autoclass:: ProcessStep :members: .. autoclass:: ProcessEdge :members: .. autoclass:: LocalizationDensity :members: .. autoclass:: DCDWriter :members: .. autoclass:: Subsample :members: .. autoclass:: RandomSubsample :members: .. autoclass:: IndependentSubsample :members: python-ihm-2.7/docs/multi_state_scheme.rst000066400000000000000000000010221503573337200210210ustar00rootroot00000000000000.. highlight:: rest .. _multi_state_scheme_module: The :mod:`ihm.multi_state_scheme` Python module =============================================== .. automodule:: ihm.multi_state_scheme .. autoclass:: MultiStateScheme :members: .. autoclass:: Connectivity :members: .. autoclass:: EquilibriumConstant :members: .. autoclass:: PopulationEquilibriumConstant :members: .. autoclass:: KineticRateEquilibriumConstant :members: .. autoclass:: KineticRate :members: .. autoclass:: RelaxationTime :members: python-ihm-2.7/docs/protocol.rst000066400000000000000000000003351503573337200170120ustar00rootroot00000000000000.. highlight:: rest .. _protocol_module: The :mod:`ihm.protocol` Python module ===================================== .. automodule:: ihm.protocol .. autoclass:: Step :members: .. autoclass:: Protocol :members: python-ihm-2.7/docs/provenance.rst000066400000000000000000000032331503573337200173110ustar00rootroot00000000000000Provenance ********** The IHM dictionary is designed to capture all aspects of integrative modeling, from the original deposited experimental data to the final validated models. This allows for maximum reproducibility and resuability. However, many modeling packages are only concerned with the conversion of their own inputs to output models (for example, a model of a complex may be generated by docking comparative models guided by some experimental data of the entire complex). If only this last step of the procedure is captured in the output mmCIF file (in this case, without any information on how the comparative models were themselves obtained) the chain is broken and the outputs cannot be reproduced. One solution to this problem is to diligently ensure that every input to the modeling has been deposited in an appropriate database and always refer to inputs using :class:`ihm.location.DatabaseLocation`. In cases where this is not possible, the library provides some metadata parsers in the :mod:`ihm.metadata` module. These will make a best effort to extract any metadata from files available on the local hard drive to better describe their provenance. For example, if the file contains headers or other information that shows that it is merely a copy of a file deposited in an official database, the metadata parsers will return a suitable :class:`~ihm.location.DatabaseLocation` for the dataset. Other information, such as the software used to generate the file, may be available in the metadata. For more details, see :class:`ihm.metadata.MRCParser` for electron microscopy density maps (MRC files) or :class:`ihm.metadata.PDBParser` for coordinate files in PDB format. python-ihm-2.7/docs/reader.rst000066400000000000000000000010221503573337200164050ustar00rootroot00000000000000.. highlight:: rest .. _reader_module: The :mod:`ihm.reader` Python module =================================== .. automodule:: ihm.reader .. autofunction:: read .. autoexception:: UnknownCategoryWarning .. autoexception:: UnknownKeywordWarning .. autoexception:: OldFileError .. autoclass:: Handler :members: .. autoclass:: SystemReader :members: :inherited-members: .. autoclass:: IDMapper :members: .. autoclass:: RangeIDMapper :members: .. autoclass:: Variant :members: .. autoclass:: IHMVariant python-ihm-2.7/docs/reference.rst000066400000000000000000000005361503573337200171120ustar00rootroot00000000000000.. highlight:: rest .. _reference_module: The :mod:`ihm.reference` Python module ====================================== .. automodule:: ihm.reference .. autoclass:: Reference :members: .. autoclass:: Sequence :members: .. autoclass:: UniProtSequence :members: .. autoclass:: Alignment :members: .. autoclass:: SeqDif :members: python-ihm-2.7/docs/representation.rst000066400000000000000000000006621503573337200202160ustar00rootroot00000000000000.. highlight:: rest .. _representation_module: The :mod:`ihm.representation` Python module =========================================== .. automodule:: ihm.representation .. autoclass:: Segment :members: .. autoclass:: AtomicSegment :members: .. autoclass:: ResidueSegment :members: .. autoclass:: MultiResidueSegment :members: .. autoclass:: FeatureSegment :members: .. autoclass:: Representation :members: python-ihm-2.7/docs/restraint.rst000066400000000000000000000034301503573337200171630ustar00rootroot00000000000000.. highlight:: rest .. _restraint_module: The :mod:`ihm.restraint` Python module ====================================== .. automodule:: ihm.restraint .. autoclass:: PseudoSite :members: .. autoclass:: Restraint :members: .. autoclass:: RestraintGroup :members: .. autoclass:: EM3DRestraint :members: .. autoclass:: EM3DRestraintFit :members: .. autoclass:: EM2DRestraint :members: .. autoclass:: EM2DRestraintFit :members: .. autoclass:: SASRestraint :members: .. autoclass:: SASRestraintFit :members: .. autoclass:: DistanceRestraint :members: .. autoclass:: HarmonicDistanceRestraint :members: .. autoclass:: UpperBoundDistanceRestraint :members: .. autoclass:: LowerBoundDistanceRestraint :members: .. autoclass:: LowerUpperBoundDistanceRestraint :members: .. autoclass:: CrossLinkRestraint :members: .. autoclass:: ExperimentalCrossLink :members: .. autoclass:: CrossLinkPseudoSite :members: .. autoclass:: CrossLink :members: .. autoclass:: ResidueCrossLink :members: .. autoclass:: FeatureCrossLink :members: .. autoclass:: AtomCrossLink :members: .. autoclass:: CrossLinkFit :members: .. autoclass:: CrossLinkGroupFit :members: .. autoclass:: Feature :members: .. autoclass:: ResidueFeature :members: .. autoclass:: AtomFeature :members: .. autoclass:: NonPolyFeature :members: .. autoclass:: PseudoSiteFeature :members: .. autoclass:: GeometricRestraint :members: .. autoclass:: CenterGeometricRestraint :members: .. autoclass:: InnerSurfaceGeometricRestraint :members: .. autoclass:: OuterSurfaceGeometricRestraint :members: .. autoclass:: DerivedDistanceRestraint :members: .. autoclass:: PredictedContactRestraint :members: .. autoclass:: HDXRestraint :members: python-ihm-2.7/docs/source.rst000066400000000000000000000005151503573337200164510ustar00rootroot00000000000000.. highlight:: rest .. _source_module: The :mod:`ihm.source` Python module ===================================== .. automodule:: ihm.source .. autoclass:: Source :members: .. autoclass:: Details :members: .. autoclass:: Manipulated :members: .. autoclass:: Natural :members: .. autoclass:: Synthetic :members: python-ihm-2.7/docs/startmodel.rst000066400000000000000000000007061503573337200173310ustar00rootroot00000000000000.. highlight:: rest .. _startmodel_module: The :mod:`ihm.startmodel` Python module ======================================= .. automodule:: ihm.startmodel .. autoclass:: SequenceIdentityDenominator :members: .. autoclass:: SequenceIdentity :members: .. autoclass:: Template :members: .. autoclass:: StartingModel :members: .. autoclass:: PDBHelix :members: .. autoclass:: SeqDif :members: .. autoclass:: MSESeqDif :members: python-ihm-2.7/docs/usage.rst000066400000000000000000000206201503573337200162540ustar00rootroot00000000000000Usage ***** Usage of the library for output consists of first creating a hierarchy of Python objects that together describe the system, and then dumping that hierarchy to an mmCIF file. For a complete worked example, see the `simple docking example `_. The top level of the hierarchy in IHM is the :class:`ihm.System`. All other objects are referenced from a System object. Datasets ======== Any data used anywhere in the modeling (including in validation) can be referenced with an :class:`ihm.dataset.Dataset`. For example, electron microscopy data is referenced with :class:`ihm.dataset.EMDensityDataset` and small angle scattering data with :class:`ihm.dataset.SASDataset`. A dataset uses an :class:`ihm.location.Location` object to describe where it is stored. Typically this is an :class:`ihm.location.DatabaseLocation` for something that's deposited in a experiment-specific database such as PDB, EMDB, PRIDE, or EMPIAR, or :class:`ihm.location.InputFileLocation` for something that's stored as a simple file, either on the local disk or at a location described with a DOI such as `Zenodo `_ or a publication's supplementary information. See the `locations example `_ for more examples. System architecture =================== The architecture of the system is described with a number of classes: - :class:`ihm.Entity` describes each unique sequence. - :class:`ihm.AsymUnit` describes each asymmetric unit (chain) in the system. For example, a homodimer would consist of two asymmetric units, both pointing to the same entity, while a heterodimer contains two entities. It is also possible for an entity to exist with no asymmetric units pointing to it - this typically corresponds to something seen in an experiment (such as a cross-linking study) which was not modeled. Note that the IHM extension currently contains no support for symmetry, so two chains that are symmetrically related should each be represented as an "asymmetric" unit. - :class:`ihm.Assembly` groups asymmetric units and/or entities, or parts of them. Assemblies are used to describe which parts of the system correspond to each input source of data, or that were modeled. - :class:`ihm.representation.Representation` describes how each part of the system was represented in the modeling, for example :class:`as atoms ` or :class:`as coarse-grained spheres `. Restraints and sampling ======================= Restraints, that score or otherwise fit the computational model against the input data, can be created as :class:`ihm.restraint.Restraint` objects. These generally take as input a :class:`~ihm.dataset.Dataset` pointing to the input data, and an :class:`~ihm.Assembly` describing which part of the model the data corresponds to. For example, there are restraints for :class:`3D EM ` and :class:`small angle scattering `. :class:`ihm.protocol.Protocol` objects describe how models were generated from the input data. A protocol can consist of :class:`multiple steps `, such as molecular dynamics or Monte Carlo, followed by one or more analyses, such as clustering, filtering, rescoring, or validation, described by :class:`ihm.analysis.Analysis` objects. These objects generally take an :class:`~ihm.Assembly` to indicate what part of the system was considered and a :class:`group of datasets ` to show which data guided the modeling or analysis. Model coordinates ================= :class:`ihm.model.Model` objects give the actual coordinates of the final generated models. These point to the :class:`~ihm.Assembly` of what was modeled, the :class:`~ihm.protocol.Protocol` describing how the modeling was done, and the :class:`~ihm.representation.Representation` showing how the model was represented. Models can be grouped together for any purpose using the :class:`ihm.model.ModelGroup` class. If a given group describes an ensemble of models, the :class:`ihm.model.Ensemble` class allows for additional information on the ensemble to be provided, such as :class:`localization densities ` of parts of the system and precision. Due to size, generally only representative models of an ensemble are deposited in mmCIF, but the :class:`~ihm.model.Ensemble` class allows the full ensemble to be referred to, for example in a more compact binary format (e.g. DCD) deposited at a given DOI. Groups of models can also be shown as corresponding to different states of the system using the :class:`ihm.model.State` class. Metadata ======== Metadata can also be added to the system, such as - :class:`ihm.Citation`: publication(s) that describe this modeling or the methods used in it. - :class:`ihm.Software`: software packages used to process the experimental data, generate intermediate inputs, do the modeling itself, and/or process the output. - :class:`ihm.Grant`: funding support for the modeling. - :class:`ihm.reference.UniProtSequence`: information on a sequence used in modeling, in UniProt. Residue numbering ================= The library keeps track of several numbering schemes to reflect the reality of the data used in modeling: - *Internal numbering*. Residues are always numbered sequentially starting at 1 in an :class:`~ihm.Entity`. All references to residues or residue ranges in the library use this numbering. For polymers, this internal numbering matches the ``seq_id`` used in the mmCIF dictionary, while for branched entities, this matches ``num`` in the dictionary. (For other types of entities (non-polymers, waters) ``seq_id`` is not used in mmCIF, but the residues are still numbered sequentially from 1 in this library.) - *Author-provided numbering*. If a different numbering scheme is used by the authors, for example to correspond to the numbering of the original sequence that is modeled, this can be given as an author-provided numbering for one or more asymmetric units. See the ``auth_seq_id_map`` and ``orig_auth_seq_id_map`` parameters to :class:`~ihm.AsymUnit`. (The mapping between author-provided and internal numbering is given in tables such as ``pdbx_poly_seq_scheme`` in the mmCIF file.) Two maps are provided as PDB provides for two distinct author-provided schemes; the "original" author-provided numbering ``orig_auth_seq_id_map`` is entirely unrestricted but is only used internally, while ``auth_seq_id_map`` must follow certain PDB rules (and generally matches the residue numbers used in legacy PDB files). In most cases, only ``auth_seq_id_map`` is used. - *Starting model numbering*. If the initial state of the modeling is given by one or more PDB files, the numbering of residues in those files may not line up with the internal numbering. In this case an offset from starting model numbering to internal numbering can be provided - see the ``offset`` parameter to :class:`~ihm.startmodel.StartingModel`. - *Reference sequence numbering*. The modeled sequence may differ from that in a database such as UniProt, which is itself numbered sequentially from 1 (for example, the modeled sequence may be a subset of the UniProt sequence, such that the first modeled residue is not the first residue in UniProt). The correspondence between the internal and reference sequences is given with :class:`ihm.reference.Alignment` objects. Output ====== Once the hierarchy of classes is complete, it can be freely inspected or modified. All the classes are simple lightweight Python objects, generally with the relevant data available as member variables. For example, modeling packages such as `IMP `_ will typically generate an IHM hierarchy from their own internal data models, but in many cases some information relevant to IHM (such as the :class:`associated publication `) cannot be determined automatically and can be filled in by adding more objects to the hierarchy. The complete hierarchy can be written out to an mmCIF or BinaryCIF file using the :func:`ihm.dumper.write` function. Input ===== Hierarchies of IHM classes can also be read from mmCIF or BinaryCIF files. This is done using the :func:`ihm.reader.read` function, which returns a list of :class:`ihm.System` objects. python-ihm-2.7/examples/000077500000000000000000000000001503573337200153045ustar00rootroot00000000000000python-ihm-2.7/examples/atom_reader.c000066400000000000000000000070311503573337200177330ustar00rootroot00000000000000/* This is a simple demonstration of using the C mmCIF parser directly from C code. It will read the named mmCIF file and print the name and coordinates of each atom in the file. It is probably most instructive to read the comments in this file starting at the bottom (main function) and working back up. Compile with something like gcc -g -Wall atom_reader.c ../src/ihm_format.c -I ../src/ -o atom_reader */ #include #include #include #include #include "ihm_format.h" /* Data that is passed to our callback function */ struct atom_site_data { struct ihm_keyword *id, *x, *y, *z; }; /* Callback function called for each data item in atom_site */ static void atom_site_handler(struct ihm_reader *reader, void *data, struct ihm_error **err) { struct atom_site_data *ad = data; /* Here we assume that data is actually present in the file for each keyword. More generally, we should query the in_file, omitted, and unknown flags in the ihm_keyword struct to handle missing keywords or those that have the '.' or '?' values, respectively */ printf("Atom %s at %s,%s,%s\n", ad->id->data, ad->x->data, ad->y->data, ad->z->data); } /* Register a callback function with the ihm_reader to handle the atom_site category */ static void add_atom_site_handler(struct ihm_reader *reader) { struct atom_site_data *data = malloc(sizeof(struct atom_site_data)); /* Register a callback for the atom_site category. 'data' will be passed to it (and 'data' will be freed with 'free' when we're done) */ struct ihm_category *c = ihm_category_new(reader, "_atom_site", atom_site_handler, NULL, NULL, data, free); /* Ask the reader to extract a set of keywords from the atom_site category. ihm_keywords are stored in the ihm_category and are automatically freed when no longer needed. The actual values are stored in the ihm_keyword objects, so we give our callback a pointer to each one so it can get the values. */ data->id = ihm_keyword_new(c, "label_atom_id"); data->x = ihm_keyword_new(c, "cartn_x"); data->y = ihm_keyword_new(c, "cartn_y"); data->z = ihm_keyword_new(c, "cartn_z"); } static void read_mmcif_filedesc(int fd) { int more_data; /* IHM error indicator. NULL corresponds to no error. If a function fails this will be set to non-NULL */ struct ihm_error *err = NULL; /* Point an ihm_reader object to the file */ struct ihm_file *fh = ihm_file_new_from_fd(fd); struct ihm_reader *reader = ihm_reader_new(fh); /* Add callback functions that will handle file data */ add_atom_site_handler(reader); /* Actually read the file. more_data will be set TRUE on return iff the file contains more data blocks after this one. */ if (!ihm_read_file(reader, &more_data, &err)) { fprintf(stderr, "IHM error: %s\n", err->msg); ihm_error_free(err); ihm_reader_free(reader); exit(1); } ihm_reader_free(reader); } static void read_mmcif_filename(const char *fname) { int fd; printf("Reading atoms from %s\n", fname); fd = open(fname, O_RDONLY); if (fd >= 0) { read_mmcif_filedesc(fd); close(fd); } else { fprintf(stderr, "Could not open %s: %s\n", fname, strerror(errno)); exit(1); } } int main(int argc, char *argv[]) { if (argc != 2) { fprintf(stderr, "Usage: atom_reader filename.cif\n"); return 1; } read_mmcif_filename(argv[1]); return 0; } python-ihm-2.7/examples/ligands_water.py000066400000000000000000000073411503573337200205060ustar00rootroot00000000000000# This example demonstrates how non-polymeric entities (ligands, water) # are handled by the Python IHM library. See the simple-docking.py example # for an introduction to the library. import ihm import ihm.dumper import ihm.protocol import ihm.representation import ihm.model system = ihm.System() # An entity corresponding to an amino acid (polyalanine) sequence entity_protein = ihm.Entity('AAA', description='Subunit A') # An entity corresponding to an RNA sequence entity_rna = ihm.Entity('ACG', alphabet=ihm.RNAAlphabet, description='RNA chain') # An entity corresponding to a DNA sequence entity_dna = ihm.Entity(['DA', 'DC', 'DG'], alphabet=ihm.DNAAlphabet, description='DNA chain') # Non-polymers such as ligands or water should each live in their own Entity: # A ligand entity (in this case, heme) heme = ihm.NonPolymerChemComp("HEM", name='PROTOPORPHYRIN IX CONTAINING FE', formula='C34 H32 Fe N4 O4') entity_heme = ihm.Entity([heme], description='Heme') # Water entity_h2o = ihm.Entity([ihm.WaterChemComp()], description='Water') system.entities.extend((entity_protein, entity_rna, entity_dna, entity_heme, entity_h2o)) # Next, we define asymmetric units for everything we modeled. # Here, we have a single instance of each protein, RNA and DNA, two hemes, # plus crystal waters. Note that waters must use the WaterAsymUnit class rather # than AsymUnit, as the number of waters in the unit must be specified. asym_protein = ihm.AsymUnit(entity_protein, details='Subunit A') asym_rna = ihm.AsymUnit(entity_rna, details='RNA chain') asym_dna = ihm.AsymUnit(entity_dna, details='DNA chain') asym_heme1 = ihm.AsymUnit(entity_heme, details='First heme') asym_heme2 = ihm.AsymUnit(entity_heme, details='Second heme') asym_h2o = ihm.WaterAsymUnit(entity_h2o, number=2, details='Crystal waters') system.asym_units.extend((asym_protein, asym_rna, asym_dna, asym_heme1, asym_heme2, asym_h2o)) # Just as in the simple-docking.py example, we can add models with coordinates. # Here we define an atomic model containing just the two hemes and the water. assembly = ihm.Assembly((asym_heme1, asym_heme2, asym_h2o), name="Modeled assembly") rep = ihm.representation.Representation( [ihm.representation.AtomicSegment(asym_heme1, rigid=False), ihm.representation.AtomicSegment(asym_heme2, rigid=False), ihm.representation.AtomicSegment(asym_h2o, rigid=False)]) protocol = ihm.protocol.Protocol(name='Modeling') class MyModel(ihm.model.Model): def get_atoms(self): # seq_id only makes sense for polymers and waters; # for ligands it should be None yield ihm.model.Atom(asym_unit=asym_heme1, type_symbol='FE', het=True, seq_id=None, atom_id='FE', x=0., y=0., z=0.) yield ihm.model.Atom(asym_unit=asym_heme2, type_symbol='FE', het=True, seq_id=None, atom_id='FE', x=10., y=10., z=10.) yield ihm.model.Atom(asym_unit=asym_h2o, type_symbol='O', het=True, seq_id=1, atom_id='O', x=20., y=20., z=20.) yield ihm.model.Atom(asym_unit=asym_h2o, type_symbol='O', het=True, seq_id=2, atom_id='O', x=30., y=30., z=30.) # We have only a single model in a single state: model = MyModel(assembly=assembly, protocol=protocol, representation=rep, name='Best model') model_group = ihm.model.ModelGroup([model], name='All models') state = ihm.model.State([model_group]) system.state_groups.append(ihm.model.StateGroup([state])) # Once the system is complete, we can write it out to an mmCIF file: with open('output.cif', 'w') as fh: ihm.dumper.write(fh, [system]) python-ihm-2.7/examples/locations.py000066400000000000000000000062351503573337200176570ustar00rootroot00000000000000# This example demonstrates a variety of ways an IHM mmCIF file can point # to external resources, such as script files, modeling trajectories, or # electron microscopy density maps. These may be too big to fit efficiently # in the mmCIF file, or may already be deposited in an experiment-specific # database (so it makes no sense to copy them). import ihm.location import ihm.dataset import ihm.dumper system = ihm.System() # To point to an external file, we use one of the classes in the ihm.location # module. Here we reference this Python script itself on the local disk (output # paths in the mmCIF file will be relative to the current working directory): loc = ihm.location.WorkflowFileLocation( "locations.py", details="The Python script used to generate this mmCIF file") # Add the location to the system, so it gets output to the mmCIF file system.locations.append(loc) # For public mmCIF files, external files need to also be in a public location, # for example, in an archive file stored at a service such as Zenodo that # assigns a DOI. To handle this, we use a Repository object: r = ihm.location.Repository( doi='10.5281/zenodo.820724', url='https://zenodo.org/record/820724/files/archive.zip') loc = ihm.location.OutputFileLocation("densities/subunitA.mrc", repo=r) system.locations.append(loc) # Users of the mmCIF can then obtain the file subunitA.mrc by downloading # archive.zip from the given DOI or URL, unzipping it, and then looking in the # densities directory. Multiple files can share the same repository. # Note that this URL is for example purposes only (there isn't really an # 'archive.zip' at that DOI). # Datasets are the most common users of external files. For example, to refer # to an input PDB file in the current directory: loc = ihm.location.InputFileLocation("simple.pdb", details="Input PDB file") d = ihm.dataset.PDBDataset(loc) # Add the dataset to the mmCIF file. (Normally a dataset would be added to the # object that uses it, such as a restraint. If we want to include a dataset # that isn't referenced from anything else, as in this example, we can add it # to the 'orphan' list.) system.orphan_datasets.append(d) # Generally, datasets will be deposited in an experiment-specific database. # We can point to such a database using a subclass of DatabaseLocation, for # example to point to PDB: loc = ihm.location.PDBLocation('1abc') system.orphan_datasets.append(ihm.dataset.PDBDataset(loc)) # If the current working directory is itself a checkout of a repository which # is archived at a DOI, we can retroactively update all 'local' paths added # above to point to this DOI. After calling update_locations_in_repositories(), # all files under the parent directory (..) are assumed to be available in # the python-ihm.zip archive. For example, simple.pdb can be found as # python-ihm-v0.1/examples/simple.pdb in the archive. r = ihm.location.Repository( doi='10.5281/zenodo.802915', url='https://zenodo.org/record/802915/files/python-ihm.zip', top_directory="python-ihm-v0.1", root="..") system.update_locations_in_repositories([r]) # Write out everything to an mmCIF file with open('output.cif', 'w') as fh: ihm.dumper.write(fh, [system]) python-ihm-2.7/examples/mini.cif000066400000000000000000000130351503573337200167250ustar00rootroot00000000000000data_model # _exptl.method 'model, MODELLER Version 9.24 2020/08/21 11:54:31' # _modeller.version 9.24 # loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 ? B 2 ? # loop_ _entity_poly_seq.entity_id _entity_poly_seq.num _entity_poly_seq.mon_id 1 1 VAL 1 2 GLY 1 3 GLN 1 4 GLN 1 5 TYR 1 6 SER 1 7 SER 2 1 ASP 2 2 GLU # loop_ _atom_site.group_PDB _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_asym_id _atom_site.auth_asym_id _atom_site.label_seq_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.label_entity_id _atom_site.id _atom_site.pdbx_PDB_model_num ATOM N N . VAL A A 1 2 ? 115.846 27.965 -26.370 1.000 141.830 1 1 1 ATOM C CA . VAL A A 1 2 ? 114.370 27.980 -26.088 1.000 143.490 1 2 1 ATOM C C . VAL A A 1 2 ? 113.517 27.504 -27.287 1.000 143.910 1 3 1 ATOM O O . VAL A A 1 2 ? 113.885 27.746 -28.441 1.000 146.600 1 4 1 ATOM C CB . VAL A A 1 2 ? 113.901 29.406 -25.683 1.000 143.750 1 5 1 ATOM C CG1 . VAL A A 1 2 ? 115.030 30.438 -25.931 1.000 144.590 1 6 1 ATOM C CG2 . VAL A A 1 2 ? 112.669 29.783 -26.486 1.000 144.500 1 7 1 ATOM N N . GLY A A 2 3 ? 112.371 26.869 -27.012 1.000 142.200 1 8 1 ATOM C CA . GLY A A 2 3 ? 111.506 26.368 -28.075 1.000 137.530 1 9 1 ATOM C C . GLY A A 2 3 ? 111.719 24.869 -28.275 1.000 135.820 1 10 1 ATOM O O . GLY A A 2 3 ? 110.768 24.093 -28.268 1.000 134.380 1 11 1 ATOM N N . GLN A A 3 4 ? 112.989 24.479 -28.392 1.000 134.310 1 12 1 ATOM C CA . GLN A A 3 4 ? 113.468 23.113 -28.639 1.000 128.420 1 13 1 ATOM C C . GLN A A 3 4 ? 113.556 22.956 -30.163 1.000 121.240 1 14 1 ATOM O O . GLN A A 3 4 ? 113.552 23.977 -30.840 1.000 127.090 1 15 1 ATOM C CB . GLN A A 3 4 ? 112.614 22.038 -27.919 1.000 132.340 1 16 1 ATOM C CG . GLN A A 3 4 ? 113.028 21.943 -26.407 1.000 135.370 1 17 1 ATOM C CD . GLN A A 3 4 ? 112.604 20.667 -25.677 1.000 138.260 1 18 1 ATOM O OE1 . GLN A A 3 4 ? 112.836 19.543 -26.150 1.000 141.450 1 19 1 ATOM N NE2 . GLN A A 3 4 ? 112.006 20.839 -24.497 1.000 139.310 1 20 1 ATOM N N . GLN A A 4 5 ? 113.648 21.739 -30.710 1.000 124.970 1 21 1 ATOM C CA . GLN A A 4 5 ? 113.808 21.534 -32.168 1.000 117.620 1 22 1 ATOM C C . GLN A A 4 5 ? 114.778 22.519 -32.833 1.000 112.980 1 23 1 ATOM O O . GLN A A 4 5 ? 114.677 23.727 -32.677 1.000 116.850 1 24 1 ATOM C CB . GLN A A 4 5 ? 112.456 21.545 -32.905 1.000 121.870 1 25 1 ATOM C CG . GLN A A 4 5 ? 111.763 20.153 -32.917 1.000 123.750 1 26 1 ATOM C CD . GLN A A 4 5 ? 110.863 19.874 -34.145 1.000 123.650 1 27 1 ATOM O OE1 . GLN A A 4 5 ? 110.040 20.712 -34.537 1.000 122.500 1 28 1 ATOM N NE2 . GLN A A 4 5 ? 111.008 18.674 -34.737 1.000 122.090 1 29 1 ATOM N N . TYR A A 5 6 ? 115.713 21.980 -33.598 1.000 109.460 1 30 1 ATOM C CA . TYR A A 5 6 ? 116.743 22.770 -34.259 1.000 103.700 1 31 1 ATOM C C . TYR A A 5 6 ? 116.348 23.366 -35.602 1.000 100.320 1 32 1 ATOM O O . TYR A A 5 6 ? 115.530 22.799 -36.311 1.000 98.760 1 33 1 ATOM C CB . TYR A A 5 6 ? 117.973 21.876 -34.402 1.000 104.580 1 34 1 ATOM C CG . TYR A A 5 6 ? 119.003 22.282 -35.425 1.000 105.030 1 35 1 ATOM C CD1 . TYR A A 5 6 ? 119.591 23.546 -35.395 1.000 106.020 1 36 1 ATOM C CD2 . TYR A A 5 6 ? 119.450 21.366 -36.380 1.000 105.180 1 37 1 ATOM C CE1 . TYR A A 5 6 ? 120.606 23.890 -36.289 1.000 106.990 1 38 1 ATOM C CE2 . TYR A A 5 6 ? 120.461 21.694 -37.276 1.000 106.420 1 39 1 ATOM C CZ . TYR A A 5 6 ? 121.039 22.958 -37.226 1.000 107.110 1 40 1 ATOM O OH . TYR A A 5 6 ? 122.057 23.290 -38.095 1.000 107.500 1 41 1 ATOM N N . SER A A 6 7 ? 116.921 24.519 -35.944 1.000 96.290 1 42 1 ATOM C CA . SER A A 6 7 ? 116.626 25.161 -37.229 1.000 93.490 1 43 1 ATOM C C . SER A A 6 7 ? 117.900 25.595 -37.944 1.000 91.900 1 44 1 ATOM O O . SER A A 6 7 ? 118.767 26.246 -37.352 1.000 91.810 1 45 1 ATOM C CB . SER A A 6 7 ? 115.732 26.388 -37.048 1.000 93.090 1 46 1 ATOM O OG . SER A A 6 7 ? 116.503 27.521 -36.705 1.000 92.330 1 47 1 ATOM N N . SER A A 7 8 ? 117.999 25.245 -39.224 1.000 89.750 1 48 1 ATOM C CA . SER A A 7 8 ? 119.165 25.590 -40.036 1.000 87.320 1 49 1 ATOM C C . SER A A 7 8 ? 119.224 27.089 -40.277 1.000 84.820 1 50 1 ATOM O O . SER A A 7 8 ? 120.074 27.594 -41.008 1.000 84.020 1 51 1 ATOM C CB . SER A A 7 8 ? 119.112 24.859 -41.383 1.000 88.180 1 52 1 ATOM O OG . SER A A 7 8 ? 117.956 25.221 -42.117 1.000 88.850 1 53 1 ATOM N N . ASP B B 1 3 ? 71.339 57.678 52.031 1.000 152.010 2 54 1 ATOM C CA . ASP B B 1 3 ? 70.427 58.819 51.717 1.000 152.390 2 55 1 ATOM C C . ASP B B 1 3 ? 70.144 58.821 50.222 1.000 151.960 2 56 1 ATOM O O . ASP B B 1 3 ? 70.984 59.245 49.435 1.000 151.590 2 57 1 ATOM C CB . ASP B B 1 3 ? 71.083 60.142 52.119 1.000 153.250 2 58 1 ATOM C CG . ASP B B 1 3 ? 71.660 60.105 53.526 1.000 154.120 2 59 1 ATOM O OD1 . ASP B B 1 3 ? 72.652 59.371 53.741 1.000 154.200 2 60 1 ATOM O OD2 . ASP B B 1 3 ? 71.119 60.804 54.415 1.000 154.250 2 61 1 ATOM N N . GLU B B 2 4 ? 68.956 58.362 49.837 1.000 151.910 2 62 1 ATOM C CA . GLU B B 2 4 ? 68.584 58.274 48.425 1.000 152.090 2 63 1 ATOM C C . GLU B B 2 4 ? 68.584 59.573 47.616 1.000 151.320 2 64 1 ATOM O O . GLU B B 2 4 ? 67.786 59.730 46.686 1.000 150.840 2 65 1 ATOM C CB . GLU B B 2 4 ? 67.218 57.585 48.274 1.000 153.600 2 66 1 ATOM C CG . GLU B B 2 4 ? 66.035 58.328 48.890 1.000 155.740 2 67 1 ATOM C CD . GLU B B 2 4 ? 64.690 57.699 48.526 1.000 156.760 2 68 1 ATOM O OE1 . GLU B B 2 4 ? 64.487 56.498 48.819 1.000 156.940 2 69 1 ATOM O OE2 . GLU B B 2 4 ? 63.835 58.409 47.947 1.000 157.060 2 70 1 HETATM FE FE1 . SF4 C C . 1 ? 14.698 20.785 10.230 1.00 13.78 3 71 1 python-ihm-2.7/examples/non_standard_residues.py000066400000000000000000000047361503573337200222450ustar00rootroot00000000000000# This example demonstrates how to add non-standard residues to # sequences handled by the Python IHM library. See the simple-docking.py # example for an introduction to the library. import ihm import ihm.dumper system = ihm.System() # IHM contains definitions for standard amino and nucleic acids, plus # a few common non-standard residues such as MSE and UNK. # To create a new non-standard residue, we first need to create a chemical # component for it. In this case, we add a definition for norvaline, an # isomer of valine. IHM provides a ChemComp class for this purpose and a # number of more specialized subclasses. Since norvaline is a chiral peptide, # here we define it in its L- form using the LPeptideChemComp class. # # 'id' should match the officially defined name of the component, as defined # in the chemical component dictionary: https://www.wwpdb.org/data/ccd # (See also https://www3.rcsb.org/ligand/NVA) # 'code' is used to populate the primary sequence in the output mmCIF file. # For non-standard residues it should normally match 'id'. # 'code_canonical' is the one-letter code of the closest standard residue. # Here we use 'V', valine. norvaline = ihm.LPeptideChemComp(id='NVA', code='NVA', code_canonical='V', name='NORVALINE', formula='C5 H11 N O2') # The Entity constructor takes a sequence of either or both one-letter codes # and ChemComp objects, so now we can make a sequence containing both # alanine and norvaline: entity1 = ihm.Entity(['A', 'A', norvaline, 'A'], description='First entity') # If a particular non-standard residue is commonly used in your own software, # and you have assigned a one-letter code for it, you can subclass # the ihm Alphabet class appropriately. Here we extend the normal set of # one-letter codes (uppercase) for standard L- amino acids to add 'n' for # norvaline: class MyAlphabet(ihm.LPeptideAlphabet): # Alphabet contains a _comps dictionary that is a simple mapping from # codes (usually one-letter) to ChemComp objects _comps = {} _comps.update(ihm.LPeptideAlphabet._comps) _comps['n'] = norvaline # Now we can pass a primary sequence using our custom alphabet to include # norvaline alongside standard cysteine: entity2 = ihm.Entity('CCnC', alphabet=MyAlphabet, description="Second entity") system.entities.extend((entity1, entity2)) # Once the system is complete, we can write it out to an mmCIF file: with open('output.cif', 'w') as fh: ihm.dumper.write(fh, [system]) python-ihm-2.7/examples/simple-docking.py000066400000000000000000000153431503573337200205710ustar00rootroot00000000000000# This example demonstrates the use of the Python IHM library to generate # an mmCIF file for a very simple integrative docking study. Two subunits, # A and B, each of which is fitted against small angle X-ray (SAXS) data, are # docked together into a complex, AB, which is fitted against an electron # microscopy density map. import ihm import ihm.location import ihm.dataset import ihm.representation import ihm.restraint import ihm.protocol import ihm.model import ihm.dumper # First, we create a system, which contains everything we know about the # modeling. A single mmCIF file can contain multiple Systems, but in most # cases we use just one: system = ihm.System() # Next, we describe the input data we used, using dataset classes. # Each source of data has a location, such as a file on disk or a database # entry, and a type. In this example we used EM density data, which we'll # say lives in the EMDB database: loc = ihm.location.EMDBLocation('EMDB-1234') em_dataset = ihm.dataset.EMDensityDataset(loc) # We also used two SAXS profiles, which we'll say live in SASBDB: saxsA_dataset = ihm.dataset.SASDataset(ihm.location.SASBDBLocation('SASDB123')) saxsB_dataset = ihm.dataset.SASDataset(ihm.location.SASBDBLocation('SASDB456')) # Where datasets are derived from some other data, it is helpful to also point # back to that primary data. In this case, let's say the EM density was # derived from a set of EM micrographs, deposited in the EMPIAR database: m = ihm.dataset.EMMicrographsDataset(ihm.location.EMPIARLocation('EMPIAR-123')) em_dataset.parents.append(m) # Next, define the entities for each unique sequence in the system # (here represented as polyalanines): entityA = ihm.Entity('AAA', description='Subunit A') entityB = ihm.Entity('AAAAAA', description='Subunit B') system.entities.extend((entityA, entityB)) # Next, we define asymmetric units for everything we modeled. # These roughly correspond to chains in a traditional PDB file. Multiple # asymmetric units may map to the same entity (for example if there are # several copies of a given protein). Parts of the system that were seen in # an experiment but were not modeled are represented as entities to which no # asymmetric units map. asymA = ihm.AsymUnit(entityA, details='Subunit A') asymB = ihm.AsymUnit(entityB, details='Subunit B') system.asym_units.extend((asymA, asymB)) # Next, we group asymmetric units (and/or entities) into assemblies. # Here, we'll define an assembly of everything that we modeled, plus # two subassemblies (of the subunits) that the SAXS data applies to: modeled_assembly = ihm.Assembly((asymA, asymB), name='Modeled assembly') assemblyA = ihm.Assembly((asymA,), name='Subunit A') assemblyB = ihm.Assembly((asymB,), name='Subunit B') # Define how the system was represented. Multiple representations of the # system are possible, and can overlap. Here we'll say we represent A # atomically as a rigid body and B as 3 flexible coarse-grained spheres: rep = ihm.representation.Representation( [ihm.representation.AtomicSegment(asymA, rigid=True), ihm.representation.FeatureSegment(asymB, rigid=False, primitive='sphere', count=3)]) # Set up restraints on the system. First, two on the subunits that use # the SAXS data; we'll say we used the FoXS software to do this fit: saxsA_rsr = ihm.restraint.SASRestraint( dataset=saxsA_dataset, assembly=assemblyA, fitting_method='FoXS', fitting_atom_type='Heavy atoms') saxsB_rsr = ihm.restraint.SASRestraint( dataset=saxsB_dataset, assembly=assemblyB, fitting_method='FoXS', fitting_atom_type='Heavy atoms') system.restraints.extend((saxsA_rsr, saxsB_rsr)) # Next, the EM restraint applied to the entire system: em_rsr = ihm.restraint.EM3DRestraint( dataset=em_dataset, assembly=modeled_assembly) system.restraints.append(em_rsr) # Now we add information about how the modeling was done by defining one # or more protocols. Here we'll say we did simple Monte Carlo on the entire # system using all of the experimental data: all_datasets = ihm.dataset.DatasetGroup((em_dataset, saxsA_dataset, saxsB_dataset)) protocol = ihm.protocol.Protocol(name='Modeling') protocol.steps.append(ihm.protocol.Step( assembly=modeled_assembly, dataset_group=all_datasets, method='Monte Carlo', name='Production sampling', num_models_begin=0, num_models_end=1000, multi_scale=True)) # Finally we can add coordinates for the deposited models. Typically these # will be stored in our own software's data structures somewhere (for this # example in simple lists 'atoms' and 'spheres'): atoms = [('A', 1, 'C', 'CA', 1., 2., 3.), ('A', 2, 'C', 'CA', 4., 5., 6.), ('A', 3, 'C', 'CA', 7., 8., 9.)] spheres = [('B', 1, 2, 1., 2., 3., 1.2), ('B', 3, 4, 4., 5., 6., 1.2), ('B', 5, 6, 7., 8., 9., 1.2)] # Rather than storing another copy of the coordinates in the IHM library # (which could use a lot of memory), we need to provide a mechanism to # translate them into the IHM data model. We do this straightforwardly by # subclassing the IHM Model class and overriding the get_atoms # and get_spheres methods: class MyModel(ihm.model.Model): # Map our asym unit names A and B to IHM asym_unit objects: asym_unit_map = {'A': asymA, 'B': asymB} def get_atoms(self): for asym, seq_id, type_symbol, atom_id, x, y, z in atoms: yield ihm.model.Atom(asym_unit=self.asym_unit_map[asym], type_symbol=type_symbol, seq_id=seq_id, atom_id=atom_id, x=x, y=y, z=z) def get_spheres(self): for asym, seq_id_start, seq_id_end, x, y, z, radius in spheres: yield ihm.model.Sphere(asym_unit=self.asym_unit_map[asym], seq_id_range=(seq_id_start, seq_id_end), x=x, y=y, z=z, radius=radius) model = MyModel(assembly=modeled_assembly, protocol=protocol, representation=rep, name='Best scoring model') # Note that the model was scored against all three restraints saxsA_rsr.fits[model] = ihm.restraint.SASRestraintFit(chi_value=1.4) saxsB_rsr.fits[model] = ihm.restraint.SASRestraintFit(chi_value=2.1) em_rsr.fits[model] = ihm.restraint.EM3DRestraintFit( cross_correlation_coefficient=0.9) # Similar models can be grouped together. Here we only have a single model # in the group model_group = ihm.model.ModelGroup([model], name='All models') # Groups are then placed into states, which can in turn be grouped. In this # case we have only a single state: state = ihm.model.State([model_group]) system.state_groups.append(ihm.model.StateGroup([state])) # Once the system is complete, we can write it out to an mmCIF file: with open('output.cif', 'w') as fh: ihm.dumper.write(fh, [system]) python-ihm-2.7/examples/simple.pdb000066400000000000000000000024201503573337200172620ustar00rootroot00000000000000ATOM 1 N ALA 1 17.807 17.608 5.019 1.00 17.18 5FD1 135 ATOM 2 CA ALA 1 17.121 17.162 6.197 1.00 15.60 5FD1 136 ATOM 3 C ALA 1 18.085 17.018 7.343 1.00 14.54 5FD1 137 ATOM 4 O ALA 1 19.244 16.654 7.119 1.00 15.42 5FD1 138 ATOM 5 CB ALA 1 16.496 15.827 5.961 1.00 16.91 5FD1 139 ATOM 6 N PHE 2 17.637 17.305 8.563 1.00 14.35 5FD1 140 ATOM 7 CA PHE 2 18.425 17.005 9.748 1.00 14.39 5FD1 141 ATOM 8 C PHE 2 17.911 15.673 10.298 1.00 12.39 5FD1 142 ATOM 9 O PHE 2 16.799 15.252 9.994 1.00 12.59 5FD1 143 ATOM 10 CB PHE 2 18.304 18.163 10.740 1.00 13.38 5FD1 144 ATOM 11 CG PHE 2 19.393 19.213 10.475 1.00 14.14 5FD1 145 ATOM 12 CD1 PHE 2 19.373 19.980 9.320 1.00 14.21 5FD1 146 ATOM 13 CD2 PHE 2 20.410 19.419 11.375 1.00 14.04 5FD1 147 ATOM 14 CE1 PHE 2 20.346 20.929 9.090 1.00 13.22 5FD1 148 ATOM 15 CE2 PHE 2 21.378 20.374 11.132 1.00 14.13 5FD1 149 ATOM 16 CZ PHE 2 21.361 21.133 9.992 1.00 13.09 5FD1 150 python-ihm-2.7/examples/stream_parser.py000066400000000000000000000026031503573337200205260ustar00rootroot00000000000000# This example demonstrates the use of the Python IHM library at a low # level, to parse an mmCIF file and extract a subset of its data. # This particular example just extracts the atomic coordinates. import ihm.format # Make an object to handle a given mmCIF category in the file; it will # be called for each line in the loop construct. class AtomSiteHandler(object): # If a given keyword is not in the file, or has the special # mmCIF omitted (.) or unknown (?) value, the corresponding argument # to __call__ will be given these values: not_in_file = omitted = None unknown = ihm.unknown # Extract the group_PDB, Cartn_x, Cartn_y, Cartn_z keywords from # the mmCIF category (mmCIF keywords are case-insensitive, but the # Python arguments here should be lowercase). def __call__(self, group_pdb, cartn_x, cartn_y, cartn_z): if group_pdb == 'ATOM': print("Atom at %s, %s, %s" % (cartn_x, cartn_y, cartn_z)) ash = AtomSiteHandler() with open('mini.cif') as fh: # Extract keywords from the _atom_site mmCIF category using the # AtomSiteHandler defined above c = ihm.format.CifReader(fh, category_handler={'_atom_site': ash}) # Read the first data block in mini.cif # (This will return True as long as there are more blocks, so it can # be put in a while loop instead if you want to read all data blocks.) c.read_file() python-ihm-2.7/examples/token_reader.py000066400000000000000000000026041503573337200203220ustar00rootroot00000000000000# This example demonstrates the use of the Python IHM library at a very # low level, to perform housekeeping tasks on an mmCIF file without # making large changes to its structure, and preserving whitespace, # case, and comments. # Note that unlike higher-level interfaces, the tokenizer can generate # invalid mmCIF if used incorrectly. It is recommended that the resulting # mmCIF files are run through a validator, as in the `validate_pdb_dev.py` # example. import ihm.format filters = [ # Change chain ID 'B' to 'Z' by altering the _struct_asym table ihm.format.ChangeValueFilter('_struct_asym.id', old='B', new='Z'), # Note that the tokenizer does not parse parent-child relationships # or understand the underlying dictionary. So we must also change other # tables that reference chain IDs. Here we change the label_asym_id keyword # in *any* table (typically in _atom_site). ihm.format.ChangeValueFilter('.label_asym_id', old='B', new='Z'), # Remove the non-standard _modeller.version data item from the file ihm.format.RemoveItemFilter('_modeller.version')] # Read the input file as a set of tokens, modify them using the filters # above, and write a new file: with open('mini.cif') as fh_in: r = ihm.format.CifTokenReader(fh_in) with open('output.cif', 'w') as fh_out: for token in r.read_file(filters): fh_out.write(token.as_mmcif()) python-ihm-2.7/examples/validate_pdb_ihm.py000066400000000000000000000036461503573337200211420ustar00rootroot00000000000000# This example demonstrates the use of the Python IHM library's validator. # A structure is downloaded from the PDB-IHM database and checked against # the PDBx and IHM dictionaries for compliance. This validator can be used # to perform basic integrity checking against any mmCIF dictionary; for an # example of using it to validate homology models against the ModelCIF # dictionary, see # https://github.com/ihmwg/python-modelcif/blob/main/examples/validate_modbase.py. import io import ihm.reader import ihm.dictionary import urllib.request # Read in the PDBx dictionary from wwPDB as a Dictionary object fh = urllib.request.urlopen( 'http://mmcif.wwpdb.org/dictionaries/ascii/mmcif_pdbx_v50.dic') d_pdbx = ihm.dictionary.read(fh) fh.close() # Also read in the IHM dictionary fh = urllib.request.urlopen( 'http://mmcif.wwpdb.org/dictionaries/ascii/mmcif_ihm.dic') d_ihm = ihm.dictionary.read(fh) fh.close() # Deposited integrative models should conform to both the PDBx dictionary # (used to define basic structural information such as residues and chains) # and the IHM dictionary (used for information specific to integrative # modeling). Make a dictionary that combines the PDBx and IHM dictionaries # using the + operator. pdbx_ihm = d_pdbx + d_ihm # Validate a structure against PDBx+IHM. # A correct structure here should result in no output; an invalid structure # will result in a ValidatorError Python exception. # Here, a structure from PDB-IHM (which should be valid) is used. acc = '8zz1' cif = urllib.request.urlopen('https://pdb-ihm.org/cif/%s.cif' % acc).read() # The encoding for mmCIF files isn't strictly defined, so first try UTF-8 # and if that fails, strip out any non-ASCII characters. This ensures that # we handle accented characters in string fields correctly. try: fh = io.StringIO(cif.decode('utf-8')) except UnicodeDecodeError: fh = io.StringIO(cif.decode('ascii', errors='ignore')) pdbx_ihm.validate(fh) python-ihm-2.7/ihm/000077500000000000000000000000001503573337200142435ustar00rootroot00000000000000python-ihm-2.7/ihm/__init__.py000066400000000000000000002363251503573337200163670ustar00rootroot00000000000000"""Representation of an IHM mmCIF file as a set of Python classes. Generally class names correspond to mmCIF table names and class attributes to mmCIF attributes (with prefixes like `pdbx_` stripped). For example, the data item _entity.details is found in the :class:`Entity` class, as the `details` member. Ordinals and IDs are generally not used in this representation (instead, pointers to objects are used). """ import itertools import numbers import re import sys import urllib.request import json import collections from . import util __version__ = '2.7' class __UnknownValue: # Represent the mmCIF 'unknown' special value def __str__(self): return '?' __repr__ = __str__ def __bool__(self): return False # Needs to be hashable so that classes like Software (that might # use unknown values as attributes) are hashable def __hash__(self): return 0 # Unknown value is a singleton and should only compare equal to itself def __eq__(self, other): return self is other def __lt__(self, other): return False __gt__ = __lt__ __le__ = __ge__ = __eq__ #: A value that isn't known. Note that this is distinct from a value that #: is deliberately omitted, which is represented by Python None. unknown = __UnknownValue() def _remove_identical(gen): """Return only unique objects from `gen`. Objects that are identical are only returned once, although multiple non-identical objects that compare equal may be returned.""" seen_objs = {} for obj in gen: if id(obj) in seen_objs: continue seen_objs[id(obj)] = None yield obj class System: """Top-level class representing a complete modeled system. :param str title: Title (longer text description) of the system. :param str id: Unique identifier for this system in the mmCIF file. :param str model_details: Detailed description of the system, like an abstract. :param databases: If this system is part of one or more official databases (e.g. PDB, SwissModel), details of the database identifiers. :type databases: sequence of :class:`Database` """ structure_determination_methodology = "integrative" def __init__(self, title=None, id='model', model_details=None, databases=[]): self.id = id self.title = title self.model_details = model_details self.databases = [] self.databases.extend(databases) #: Information about data processing and entry status. #: See :class:`DatabaseStatus`. self.database_status = DatabaseStatus() #: List of plain text comments. These will be added to the top of #: the mmCIF file. self.comments = [] #: List of all software used in the modeling. See :class:`Software`. self.software = [] #: List of all authors of this system, as a list of strings (last name #: followed by initials, e.g. "Smith, A.J."). When writing out a file, #: if this list is empty, the set of all citation authors (see #: :class:`Citation`) is used instead. self.authors = [] #: List of all grants that supported this work. See :class:`Grant`. self.grants = [] #: List of all citations. See :class:`Citation`. self.citations = [] #: All entities used in the system. See :class:`Entity`. self.entities = [] #: All asymmetric units used in the system. See :class:`AsymUnit`. self.asym_units = [] #: Collections (if any) to which this entry belongs. #: These are used to group depositions of related entries. #: See :class:`Collection`. self.collections = [] #: Revision/update history. See :class:`Revision`. self.revisions = [] #: Information on usage of the data. See :class:`DataUsage`. self.data_usage = [] #: All orphaned chemical descriptors in the system. #: See :class:`ChemDescriptor`. This can be used to track descriptors #: that are not otherwise used - normally one is assigned to a #: :class:`ihm.restraint.CrossLinkRestraint`. self.orphan_chem_descriptors = [] #: All orphaned assemblies in the system. See :class:`Assembly`. #: This can be used to keep track of all assemblies that are not #: otherwise used - normally one is assigned to a #: :class:`~ihm.model.Model`, #: :class:`ihm.protocol.Step`, or #: :class:`~ihm.restraint.Restraint`. self.orphan_assemblies = [] #: The assembly of the entire system. By convention this is always #: the first assembly in the mmCIF file (assembly_id=1). Note that #: currently this isn't filled in on output until dumper.write() #: is called. See :class:`Assembly`. self.complete_assembly = Assembly((), name='Complete assembly', description='All known components') #: Locations of all extra resources. #: See :class:`~ihm.location.Location`. self.locations = [] #: All orphaned datasets. #: This can be used to keep track of all datasets that are not #: otherwise used - normally a dataset is assigned to a #: :class:`~ihm.dataset.DatasetGroup`, #: :class:`~ihm.startmodel.StartingModel`, #: :class:`~ihm.restraint.Restraint`, #: :class:`~ihm.startmodel.Template`, #: or as the parent of another :class:`~ihm.dataset.Dataset`. #: See :class:`~ihm.dataset.Dataset`. self.orphan_datasets = [] #: All orphaned groups of datasets. #: This can be used to keep track of all dataset groups that are not #: otherwise used - normally a group is assigned to a #: :class:`~ihm.protocol.Protocol`. #: See :class:`~ihm.dataset.DatasetGroup`. self.orphan_dataset_groups = [] #: All orphaned representations of the system. #: This can be used to keep track of all representations that are not #: otherwise used - normally one is assigned to a #: :class:`~ihm.model.Model`. #: See :class:`~ihm.representation.Representation`. self.orphan_representations = [] #: All orphaned starting models for the system. #: This can be used to keep track of all starting models that are not #: otherwise used - normally one is assigned to an #: :class:`ihm.representation.Segment`. #: See :class:`~ihm.startmodel.StartingModel`. self.orphan_starting_models = [] #: All restraints on the system. #: See :class:`~ihm.restraint.Restraint`. self.restraints = [] #: All restraint groups. #: See :class:`~ihm.restraint.RestraintGroup`. self.restraint_groups = [] #: All orphaned modeling protocols. #: This can be used to keep track of all protocols that are not #: otherwise used - normally a protocol is assigned to a #: :class:`~ihm.model.Model`. #: See :class:`~ihm.protocol.Protocol`. self.orphan_protocols = [] #: All ensembles. #: See :class:`~ihm.model.Ensemble`. self.ensembles = [] #: All ordered processes. #: See :class:`~ihm.model.OrderedProcess`. self.ordered_processes = [] #: All state groups (collections of models). #: See :class:`~ihm.model.StateGroup`. self.state_groups = [] #: All orphaned geometric objects. #: This can be used to keep track of all objects that are not #: otherwise used - normally an object is assigned to a #: :class:`~ihm.restraint.GeometricRestraint`. #: See :class:`~ihm.geometry.GeometricObject`. self.orphan_geometric_objects = [] #: All orphaned features. #: This can be used to keep track of all features that are not #: otherwise used - normally a feature is assigned to a #: :class:`~ihm.restraint.GeometricRestraint`. #: See :class:`~ihm.restraint.Feature`. self.orphan_features = [] #: All orphaned pseudo sites. #: This can be used to keep track of all pseudo sites that are not #: otherwise used - normally a site is used in a #: :class:`~ihm.restraint.PseudoSiteFeature` or a #: :class:`~ihm.restraint.CrossLinkPseudoSite`. self.orphan_pseudo_sites = [] #: Contains the fluorescence (FLR) part. #: See :class:`~ihm.flr.FLRData`. self.flr_data = [] #: All multi-state schemes #: See :class:`~ihm.multi_state_scheme.MultiStateScheme`. self.multi_state_schemes = [] self._orphan_centers = [] self._orphan_dataset_transforms = [] self._orphan_geometric_transforms = [] self._orphan_relaxation_times = [] self._orphan_repos = [] self._orphan_chem_comps = [] _database_status = property(lambda self: self.database_status._map) def _make_complete_assembly(self): """Fill in the complete assembly with all asym units""" # Clear out any existing components self.complete_assembly[:] = [] # Include all asym units for asym in self.asym_units: self.complete_assembly.append(asym) def _all_models(self): """Iterate over all Models in the system""" # todo: raise an error if a model is present in multiple groups for group in self._all_model_groups(): seen_models = {} for model in group: if model in seen_models: continue seen_models[model] = None yield group, model def update_locations_in_repositories(self, repos): """Update all :class:`~ihm.location.Location` objects in the system that lie within a checked-out :class:`~ihm.location.Repository` to point to that repository. This is intended for the use case where the current working directory is a checkout of a repository which is archived somewhere with a DOI. Locations can then be simply constructed pointing to local files, and retroactively updated with this method to point to the DOI if appropriate. For each Location, if it points to a local file that is below the `root` of one of the `repos`, update it to point to that repository. If is under multiple roots, pick the one that gives the shortest path. For example, if run in a subdirectory `foo` of a repository archived as `repo.zip`, the local path `simple.pdb` will be updated to be `repo-top/foo/simple.pdb` in `repo.zip`:: l = ihm.location.InputFileLocation("simple.pdb") system.locations.append(l) r = ihm.location.Repository(doi='1.2.3.4', url='https://example.com/repo.zip',) top_directory="repo-top", root="..") system.update_locations_in_repositories([r]) """ import ihm.location for loc in self._all_locations(): if isinstance(loc, ihm.location.FileLocation): ihm.location.Repository._update_in_repos(loc, repos) def report(self, fh=sys.stdout): """Print a summary report of this system. This can be used to more easily spot errors or inconsistencies. It will also warn about missing data that may not be technically required for a compliant mmCIF file, but is usually expected to be present. :param file fh: The file handle to print the report to, if not standard output. """ import ihm.report r = ihm.report.Reporter(self, fh) r.report() def _all_restraints(self): """Iterate over all Restraints in the system. Duplicates may be present.""" def _all_restraints_in_groups(): for rg in self.restraint_groups: for r in rg: yield r return itertools.chain(self.restraints, _all_restraints_in_groups()) def _all_chem_descriptors(self): """Iterate over all ChemDescriptors in the system. Duplicates may be present.""" return itertools.chain( self.orphan_chem_descriptors, (restraint.linker for restraint in self._all_restraints() if hasattr(restraint, 'linker') and restraint.linker), (itertools.chain.from_iterable( f._all_flr_chemical_descriptors() for f in self.flr_data))) def _all_model_groups(self, only_in_states=True): """Iterate over all ModelGroups in the system. If only_in_states is True, only return ModelGroups referenced by a State object; otherwise, also include ModelGroups referenced by an OrderedProcess or Ensemble.""" # todo: raise an error if a modelgroup is present in multiple states seen_model_groups = [] for state_group in self.state_groups: for state in state_group: for model_group in state: seen_model_groups.append(model_group) yield model_group for mssc in self._all_multi_state_scheme_connectivities(): for model_group in mssc.begin_state: if model_group not in seen_model_groups: seen_model_groups.append(model_group) yield model_group if mssc.end_state: for model_group in mssc.end_state: if model_group not in seen_model_groups: seen_model_groups.append(model_group) yield model_group if not only_in_states: for ensemble in self.ensembles: if ensemble.model_group: yield ensemble.model_group for ss in ensemble.subsamples: if ss.model_group: yield ss.model_group for proc in self.ordered_processes: for step in proc.steps: for edge in step: yield edge.group_begin yield edge.group_end def _all_representations(self): """Iterate over all Representations in the system. This includes all Representations referenced from other objects, plus any orphaned Representations. Duplicates are filtered out.""" return _remove_identical(itertools.chain( self.orphan_representations, (model.representation for group, model in self._all_models() if model.representation))) def _all_segments(self): for representation in self._all_representations(): for segment in representation: yield segment def _all_starting_models(self): """Iterate over all StartingModels in the system. This includes all StartingModels referenced from other objects, plus any orphaned StartingModels. Duplicates are filtered out.""" return _remove_identical(itertools.chain( self.orphan_starting_models, (segment.starting_model for segment in self._all_segments() if segment.starting_model))) def _all_protocols(self): """Iterate over all Protocols in the system. This includes all Protocols referenced from other objects, plus any orphaned Protocols. Duplicates are filtered out.""" return _remove_identical(itertools.chain( self.orphan_protocols, (model.protocol for group, model in self._all_models() if model.protocol))) def _all_protocol_steps(self): for protocol in self._all_protocols(): for step in protocol.steps: yield step def _all_analysis_steps(self): for protocol in self._all_protocols(): for analysis in protocol.analyses: for step in analysis.steps: yield step def _all_assemblies(self): """Iterate over all Assemblies in the system. This includes all Assemblies referenced from other objects, plus any orphaned Assemblies. Duplicates may be present.""" return itertools.chain( # Complete assembly is always first (self.complete_assembly,), self.orphan_assemblies, (model.assembly for group, model in self._all_models() if model.assembly), (step.assembly for step in self._all_protocol_steps() if step.assembly), (step.assembly for step in self._all_analysis_steps() if step.assembly), (restraint.assembly for restraint in self._all_restraints() if restraint.assembly)) def _all_dataset_groups(self): """Iterate over all DatasetGroups in the system. This includes all DatasetGroups referenced from other objects, plus any orphaned groups. Duplicates may be present.""" return itertools.chain( self.orphan_dataset_groups, (step.dataset_group for step in self._all_protocol_steps() if step.dataset_group), (step.dataset_group for step in self._all_analysis_steps() if step.dataset_group), (rt.dataset_group for rt in self._all_relaxation_times() if rt.dataset_group), (kr.dataset_group for kr in self._all_kinetic_rates() if kr.dataset_group), (mssc.dataset_group for mssc in self._all_multi_state_scheme_connectivities() if mssc.dataset_group)) def _all_templates(self): """Iterate over all Templates in the system.""" for startmodel in self._all_starting_models(): for template in startmodel.templates: yield template def _all_datasets_except_parents(self): """Iterate over all Datasets except those referenced only as the parent of another Dataset. Duplicates may be present.""" def _all_datasets_in_groups(): for dg in self._all_dataset_groups(): for d in dg: yield d return itertools.chain( self.orphan_datasets, _all_datasets_in_groups(), (sm.dataset for sm in self._all_starting_models() if sm.dataset), (restraint.dataset for restraint in self._all_restraints() if restraint.dataset), (template.dataset for template in self._all_templates() if template.dataset)) def _all_datasets(self): """Iterate over all Datasets in the system. This includes all Datasets referenced from other objects, plus any orphaned datasets. Duplicates may be present.""" def _all_datasets_and_parents(d): for p in d.parents: # Handle transformed datasets if hasattr(p, 'dataset'): pd = p.dataset else: pd = p for alld in _all_datasets_and_parents(pd): yield alld yield d for d in self._all_datasets_except_parents(): for alld in _all_datasets_and_parents(d): yield alld def _all_densities(self): for ensemble in self.ensembles: for density in ensemble.densities: yield density def _all_locations(self): """Iterate over all Locations in the system. This includes all Locations referenced from other objects, plus any referenced from the top-level system. Duplicates may be present.""" def _all_ensemble_locations(): for ensemble in self.ensembles: if ensemble.file: yield ensemble.file for ss in ensemble.subsamples: if ss.file: yield ss.file return itertools.chain( self.locations, (dataset.location for dataset in self._all_datasets() if hasattr(dataset, 'location') and dataset.location), _all_ensemble_locations(), (density.file for density in self._all_densities() if density.file), (sm.script_file for sm in self._all_starting_models() if sm.script_file), (template.alignment_file for template in self._all_templates() if template.alignment_file), (step.script_file for step in self._all_protocol_steps() if step.script_file), (step.script_file for step in self._all_analysis_steps() if step.script_file), (rt.external_file for rt in self._all_relaxation_times() if rt.external_file), (kr.external_file for kr in self._all_kinetic_rates() if kr.external_file)) def _all_geometric_objects(self): """Iterate over all GeometricObjects in the system. This includes all GeometricObjects referenced from other objects, plus any referenced from the top-level system. Duplicates may be present.""" return itertools.chain( self.orphan_geometric_objects, (restraint.geometric_object for restraint in self._all_restraints() if hasattr(restraint, 'geometric_object') and restraint.geometric_object)) def _all_features(self): """Iterate over all Features in the system. This includes all Features referenced from other objects, plus any referenced from the top-level system. Duplicates may be present.""" def _all_restraint_features(): for r in self._all_restraints(): if hasattr(r, '_all_features'): for feature in r._all_features: if feature: yield feature return itertools.chain(self.orphan_features, _all_restraint_features()) def _all_pseudo_sites(self): """Iterate over all PseudoSites in the system. This includes all PseudoSites referenced from other objects, plus any referenced from the top-level system. Duplicates may be present.""" def _all_restraint_sites(): for r in self._all_restraints(): if hasattr(r, 'cross_links'): for xl in r.cross_links: if xl.pseudo1: for x in xl.pseudo1: yield x.site if xl.pseudo2: for x in xl.pseudo2: yield x.site return itertools.chain(self.orphan_pseudo_sites, _all_restraint_sites(), (f.site for f in self._all_features() if hasattr(f, 'site') and f.site)) def _all_software(self): """Iterate over all Software in the system. This includes all Software referenced from other objects, plus any referenced from the top-level system. Duplicates may be present.""" return (itertools.chain( self.software, (sm.software for sm in self._all_starting_models() if sm.software), (step.software for step in self._all_protocol_steps() if step.software), (step.software for step in self._all_analysis_steps() if step.software), (r.software for r in self._all_restraints() if hasattr(r, 'software') and r.software))) def _all_citations(self): """Iterate over all Citations in the system. This includes all Citations referenced from other objects, plus any referenced from the top-level system. Duplicates are filtered out.""" return _remove_identical(itertools.chain( self.citations, (software.citation for software in self._all_software() if software.citation), (restraint.fitting_method_citation_id for restraint in self._all_restraints() if hasattr(restraint, 'fitting_method_citation_id') and restraint.fitting_method_citation_id))) def _all_entity_ranges(self): """Iterate over all Entity ranges in the system (these may be :class:`Entity`, :class:`AsymUnit`, :class:`EntityRange` or :class:`AsymUnitRange` objects). Note that we don't include self.entities or self.asym_units here, as we only want ranges that were actually used. Duplicates may be present.""" return (itertools.chain( (sm.asym_unit for sm in self._all_starting_models()), (seg.asym_unit for seg in self._all_segments()), (comp for a in self._all_assemblies() for comp in a), (comp for f in self._all_features() for comp in f._all_entities_or_asyms()), (d.asym_unit for d in self._all_densities()))) def _all_multi_state_schemes(self): for mss in self.multi_state_schemes: yield mss def _all_multi_state_scheme_connectivities(self): """Iterate over all multi-state scheme connectivities""" for mss in self.multi_state_schemes: for mssc in mss.get_connectivities(): yield mssc def _all_kinetic_rates(self): """Iterate over all kinetic rates within multi-state schemes""" return _remove_identical(itertools.chain( (mssc.kinetic_rate for mssc in self._all_multi_state_scheme_connectivities() if mssc.kinetic_rate), (c.kinetic_rate for f in self.flr_data for c in f.kinetic_rate_fret_analysis_connections if self.flr_data))) def _all_relaxation_times(self): """Iterate over all relaxation times. This includes relaxation times from :class:`ihm.multi_state_scheme.MultiStateScheme` and those assigned to connectivities in :class:`ihm.multi_state_scheme.Connectivity`""" seen_relaxation_times = [] for mss in self._all_multi_state_schemes(): for rt in mss.get_relaxation_times(): if rt in seen_relaxation_times: continue seen_relaxation_times.append(rt) yield rt for mssc in self._all_multi_state_scheme_connectivities(): if mssc.relaxation_time: rt = mssc.relaxation_time if rt in seen_relaxation_times: continue seen_relaxation_times.append(rt) yield rt # Get the relaxation times from the # flr.RelaxationTimeFRETAnalysisConnection objects if self.flr_data: for f in self.flr_data: for c in f.relaxation_time_fret_analysis_connections: rt = c.relaxation_time if rt in seen_relaxation_times: continue seen_relaxation_times.append(rt) yield rt for rt in self._orphan_relaxation_times: if rt in seen_relaxation_times: continue seen_relaxation_times.append(rt) yield rt def _before_write(self): """Do any setup necessary before writing out to a file""" # Here, we initialize all RestraintGroups by removing any assigned ID for g in self.restraint_groups: util._remove_id(g) # Fill in complete assembly self._make_complete_assembly() def _check_after_write(self): """Make sure everything was successfully written""" # Here, we check that all RestraintGroups were successfully dumped""" for g in self.restraint_groups: if len(g) > 0 and not hasattr(g, '_id'): raise TypeError( "RestraintGroup(%s) contains an unsupported combination " "of Restraints. Due to limitations of the underlying " "dictionary, all objects in a RestraintGroup must be of " "the same type, and only certain types (currently only " "DerivedDistanceRestraint or PredictedContactRestraint) " "can be grouped." % g) class DatabaseStatus: """Information about data processing and entry status. This information is usually accessed via :attr:`System.database_status`. """ def __init__(self): self._map = {} status_code = property(lambda self: self._map['status_code'], doc="The status of the entry, e.g. released.") deposit_site = property(lambda self: self._map['deposit_site'], doc="The site where the file was deposited.") process_site = property(lambda self: self._map['process_site'], doc="The site where the file was processed.") recvd_initial_deposition_date = property( lambda self: util._get_iso_date(self._map['recvd_initial_deposition_date']), doc="The date of initial deposition.") class Database: """Information about a System that is part of an official database. If a :class:`System` is part of one or more official databases (e.g. PDB, SwissModel), this class contains details of the database identifiers. It should be passed to the :class:`System` constructor. :param str id: Abbreviated name of the database (e.g. PDB). :param str code: Identifier from the database (e.g. 1abc). :param str doi: Digital Object Identifier of the database entry. :param str accession: Extended accession code of the database entry. """ def __init__(self, id, code, doi=None, accession=None): self.id, self.code = id, code self.doi, self.accession = doi, accession class Software: """Software used as part of the modeling protocol. :param str name: The name of the software. :param str classification: The major function of the software, for example 'model building', 'sample preparation', 'data collection'. :param str description: A longer text description of the software. :param str location: Place where the software can be found (e.g. URL). :param str type: Type of software (program/package/library/other). :param str version: The version used. :param citation: Publication describing the software. :type citation: :class:`Citation` Generally these objects are added to :attr:`System.software` or passed to :class:`ihm.startmodel.StartingModel`, :class:`ihm.protocol.Step`, :class:`ihm.analysis.Step`, or :class:`ihm.restraint.PredictedContactRestraint` objects. """ def __init__(self, name, classification, description, location, type='program', version=None, citation=None): self.name = name self.classification = classification self.description = description self.location = location self.type = type self.version = version self.citation = citation def __str__(self): return "" % repr(self.name) # Software compares equal if the names and versions are the same def _eq_vals(self): return (self.name, self.version) def __eq__(self, other): return self._eq_vals() == other._eq_vals() def __hash__(self): return hash(self._eq_vals()) class Grant: """Information on funding support for the modeling. See :attr:`System.grants`. :param str funding_organization: The name of the organization providing the funding, e.g. "National Institutes of Health". :param str country: The country that hosts the funding organization, e.g. "United States". :param str grant_number: Identifying information for the grant, e.g. "1R01GM072999-01". """ def __init__(self, funding_organization, country, grant_number): self.funding_organization = funding_organization self.country = country self.grant_number = grant_number class Citation: """A publication that describes the modeling. Generally citations are added to :attr:`System.citations` or passed to :class:`ihm.Software` or :class:`ihm.restraint.EM3DRestraint` objects. :param str pmid: The PubMed ID. :param str title: Full title of the publication. :param str journal: Abbreviated journal name. :param volume: Journal volume as int for a plain number or str for journals adding a label to the number (e.g. "46(W1)" for a web server issue). :param page_range: The page (int) or page range (as a 2-element int tuple). Using str also works for labelled page numbers. :param int year: Year of publication. :param authors: All authors in order, as a list of strings (last name followed by initials, e.g. "Smith, A.J."). :param str doi: Digital Object Identifier of the publication. :param bool is_primary: Denotes the most pertinent publication for the modeling itself (as opposed to a method or piece of software used in the protocol). Only one such publication is allowed, and it is assigned the ID "primary" in the mmCIF file. """ def __init__(self, pmid, title, journal, volume, page_range, year, authors, doi, is_primary=False): self.title, self.journal, self.volume = title, journal, volume self.page_range, self.year = page_range, year self.pmid, self.doi = pmid, doi self.authors = authors if authors is not None else [] self.is_primary = is_primary @classmethod def from_pubmed_id(cls, pubmed_id, is_primary=False): """Create a Citation from just a PubMed ID. This is done by querying NCBI's web API, so requires network access. :param int pubmed_id: The PubMed identifier. :param bool is_primary: Denotes the most pertinent publication for the modeling itself; see :class:`Citation` for more info. :return: A new Citation for the given identifier. :rtype: :class:`Citation` """ def get_doi(ref): for art_id in ref['articleids']: if art_id['idtype'] == 'doi': return art_id['value'] def get_page_range(ref): rng = ref['pages'].split('-') if len(rng) == 2 and len(rng[1]) < len(rng[0]): # map ranges like "2730-43" to 2730,2743 not 2730, 43 rng[1] = rng[0][:len(rng[0]) - len(rng[1])] + rng[1] # Handle one page or empty page range if len(rng) == 1: rng = rng[0] if rng == '': rng = None return rng url = ('https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi' '?db=pubmed&retmode=json&rettype=abstract&id=%s' % pubmed_id) fh = urllib.request.urlopen(url) j = json.load(fh) fh.close() ref = j['result'][str(pubmed_id)] authors = [x['name'] for x in ref['authors'] if x['authtype'] == 'Author'] # PubMed authors are usually of the form "Lastname AB" but PDB uses # "Lastname, A.B." so map one to the other if possible r = re.compile(r'(^\w+.*?)\s+(\w+)$') def auth_sub(m): return m.group(1) + ", " + "".join(initial + "." for initial in m.group(2)) authors = [r.sub(auth_sub, auth) for auth in authors] return cls(pmid=pubmed_id, title=ref['title'], journal=ref['source'], volume=ref['volume'] or None, page_range=get_page_range(ref), year=ref['pubdate'].split()[0], authors=authors, doi=get_doi(ref), is_primary=is_primary) class ChemComp: """A chemical component from which :class:`Entity` objects are constructed. Usually these are amino acids (see :class:`LPeptideChemComp`) or nucleic acids (see :class:`DNAChemComp` and :class:`RNAChemComp`), but non-polymers such as ligands or water (see :class:`NonPolymerChemComp` and :class:`WaterChemComp`) and saccharides (see :class:`SaccharideChemComp`) are also supported. For standard amino and nucleic acids, it is generally easier to use a :class:`Alphabet` and refer to the components with their one-letter (amino acids, RNA) or two-letter (DNA) codes. :param str id: A globally unique identifier for this component (usually three letters). :param str code: A shorter identifier (usually one letter) that only needs to be unique in the entity. :param str code_canonical: Canonical version of `code` (which need not be unique). :param str name: A longer human-readable name for the component. :param str formula: The chemical formula. This is a space-separated list of the element symbols in the component, each followed by an optional count (if omitted, 1 is assumed). The formula is terminated with the formal charge (if not zero). The element list should be sorted alphabetically, unless carbon is present, in which case C and H precede the rest of the elements. For example, water would be "H2 O" and arginine (with +1 formal charge) "C6 H15 N4 O2 1". :param str ccd: The chemical component dictionary (CCD) where this component is defined. Can be "core" for the wwPDB CCD (https://www.wwpdb.org/data/ccd), "ma" for the ModelArchive CCD, or "local" for a novel component that is defined in the mmCIF file itself. If unspecified, defaults to "core" unless ``descriptors`` is given in which case it defaults to "local". This information is essentially ignored by python-ihm (since the IHM dictionary has no support for custom CCDs) but is used by python-modelcif. :param list descriptors: When ``ccd`` is "local", this can be one or more descriptor objects that describe the chemistry. python-ihm does not define any, but python-modelcif does. For example, glycine would have ``id='GLY', code='G', code_canonical='G'`` while selenomethionine would use ``id='MSE', code='MSE', code_canonical='M'``, guanosine (RNA) ``id='G', code='G', code_canonical='G'``, and deoxyguanosine (DNA) ``id='DG', code='DG', code_canonical='G'``. """ type = 'other' _element_mass = {'H': 1.008, 'C': 12.011, 'N': 14.007, 'O': 15.999, 'P': 30.974, 'S': 32.060, 'Se': 78.971, 'Fe': 55.845, 'Ac': 227.028, 'Ag': 107.868, 'Al': 26.982, 'Ar': 39.948, 'As': 74.922, 'Au': 196.966, 'B': 10.81, 'Ba': 137.327, 'Be': 9.012, 'Bi': 208.98, 'Br': 79.904, 'Ca': 40.078, 'Cd': 112.414, 'Ce': 140.116, 'Cl': 35.453, 'Co': 58.933, 'Cr': 51.996, 'Cs': 132.905, 'Cu': 63.546, 'Dy': 162.5, 'Er': 167.259, 'Eu': 151.964, 'F': 18.998, 'Ga': 69.723, 'Gd': 157.25, 'Ge': 72.53, 'He': 4.003, 'Hf': 178.486, 'Hg': 200.592, 'Ho': 164.93, 'I': 126.904, 'In': 114.818, 'Ir': 192.217, 'K': 39.098, 'Kr': 83.798, 'La': 138.905, 'Li': 6.938, 'Lu': 174.967, 'Mg': 24.305, 'Mn': 54.938, 'Mo': 95.95, 'Na': 22.99, 'Nb': 92.906, 'Nd': 144.242, 'Ne': 20.180, 'Ni': 58.693, 'Np': 237.0, 'Os': 190.23, 'Pa': 231.036, 'Pb': 207.2, 'Pd': 106.42, 'Pr': 140.908, 'Pt': 195.084, 'Ra': 226.025, 'Rb': 85.468, 'Re': 186.207, 'Rh': 102.906, 'Ru': 101.07, 'Sb': 121.760, 'Sc': 44.956, 'Si': 28.086, 'Sm': 150.36, 'Sn': 118.710, 'Sr': 87.62, 'Ta': 180.948, 'Tb': 158.925, 'Te': 127.6, 'Th': 232.038, 'Ti': 47.867, 'Tl': 204.383, 'Tm': 168.934, 'U': 238.029, 'V': 50.942, 'W': 183.84, 'Xe': 131.293, 'Y': 88.906, 'Yb': 173.045, 'Zn': 65.38, 'Zr': 91.224} def __init__(self, id, code, code_canonical, name=None, formula=None, ccd=None, descriptors=None): self.id = id self.code, self.code_canonical, self.name = code, code_canonical, name self.formula = formula self.ccd, self.descriptors = ccd, descriptors def __str__(self): return ('<%s.%s(%s)>' % (self.__class__.__module__, self.__class__.__name__, self.id)) def __get_weight(self): # Calculate weight from formula if self.formula in (None, unknown): return spl = self.formula.split() # Remove formal charge if present if len(spl) > 0 and spl[-1].isdigit(): del spl[-1] r = re.compile(r'(\D+)(\d*)$') weight = 0. for s in spl: m = r.match(s) if m is None: raise ValueError("Bad formula fragment: %s" % s) emass = self._element_mass.get(m.group(1), None) if emass: weight += emass * (int(m.group(2)) if m.group(2) else 1) elif m.group(1) != 'X': # If element is unknown, weight is unknown too # Element 'X' is used for GLX/ASX and has zero weight return None return weight formula_weight = property( __get_weight, doc="Formula weight (dalton). This is calculated automatically from " "the chemical formula and known atomic masses.") # Equal if all identifiers are the same def __eq__(self, other): return ((self.code, self.code_canonical, self.id, self.type) == (other.code, other.code_canonical, other.id, other.type)) def __hash__(self): return hash((self.code, self.code_canonical, self.id, self.type)) class PeptideChemComp(ChemComp): """A single peptide component. Usually :class:`LPeptideChemComp` is used instead (except for glycine) to specify chirality. See :class:`ChemComp` for a description of the parameters.""" type = 'peptide linking' class LPeptideChemComp(PeptideChemComp): """A single peptide component with (normal) L- chirality. See :class:`ChemComp` for a description of the parameters.""" type = 'L-peptide linking' class DPeptideChemComp(PeptideChemComp): """A single peptide component with (unusual) D- chirality. See :class:`ChemComp` for a description of the parameters.""" type = 'D-peptide linking' class DNAChemComp(ChemComp): """A single DNA component. See :class:`ChemComp` for a description of the parameters.""" type = 'DNA linking' class RNAChemComp(ChemComp): """A single RNA component. See :class:`ChemComp` for a description of the parameters.""" type = 'RNA linking' class SaccharideChemComp(ChemComp): """A saccharide chemical component. Usually a subclass that specifies the chirality and linkage (e.g. :class:`LSaccharideBetaChemComp`) is used. :param str id: A globally unique identifier for this component. :param str name: A longer human-readable name for the component. :param str formula: The chemical formula. See :class:`ChemComp` for more details. :param str ccd: The chemical component dictionary (CCD) where this component is defined. See :class:`ChemComp` for more details. :param list descriptors: Information on the component's chemistry. See :class:`ChemComp` for more details. """ type = "saccharide" def __init__(self, id, name=None, formula=None, ccd=None, descriptors=None): super().__init__( id, id, id, name=name, formula=formula, ccd=ccd, descriptors=descriptors) class LSaccharideChemComp(SaccharideChemComp): """A single saccharide component with L-chirality and unspecified linkage. See :class:`SaccharideChemComp` for a description of the parameters.""" type = "L-saccharide" class LSaccharideAlphaChemComp(LSaccharideChemComp): """A single saccharide component with L-chirality and alpha linkage. See :class:`SaccharideChemComp` for a description of the parameters.""" type = "L-saccharide, alpha linking" class LSaccharideBetaChemComp(LSaccharideChemComp): """A single saccharide component with L-chirality and beta linkage. See :class:`SaccharideChemComp` for a description of the parameters.""" type = "L-saccharide, beta linking" class DSaccharideChemComp(SaccharideChemComp): """A single saccharide component with D-chirality and unspecified linkage. See :class:`SaccharideChemComp` for a description of the parameters.""" type = "D-saccharide" class DSaccharideAlphaChemComp(DSaccharideChemComp): """A single saccharide component with D-chirality and alpha linkage. See :class:`SaccharideChemComp` for a description of the parameters.""" type = "D-saccharide, alpha linking" class DSaccharideBetaChemComp(DSaccharideChemComp): """A single saccharide component with D-chirality and beta linkage. See :class:`SaccharideChemComp` for a description of the parameters.""" type = "D-saccharide, beta linking" class NonPolymerChemComp(ChemComp): """A non-polymer chemical component, such as a ligand or a non-standard residue (for crystal waters, use :class:`WaterChemComp`). :param str id: A globally unique identifier for this component. :param str code_canonical: Canonical one-letter identifier. This is used for non-standard residues and should be the one-letter code of the closest standard residue (or by default, 'X'). :param str name: A longer human-readable name for the component. :param str formula: The chemical formula. See :class:`ChemComp` for more details. :param str ccd: The chemical component dictionary (CCD) where this component is defined. See :class:`ChemComp` for more details. :param list descriptors: Information on the component's chemistry. See :class:`ChemComp` for more details. """ type = "non-polymer" def __init__(self, id, code_canonical='X', name=None, formula=None, ccd=None, descriptors=None): super().__init__( id, id, code_canonical, name=name, formula=formula, ccd=ccd, descriptors=descriptors) class WaterChemComp(NonPolymerChemComp): """The chemical component for crystal water. """ def __init__(self): super().__init__('HOH', name='WATER', formula="H2 O") class Alphabet: """A mapping from codes (usually one-letter, or two-letter for DNA) to chemical components. These classes can be used to construct sequences of components when creating an :class:`Entity`. They can also be used like a Python dict to get standard components, e.g.:: a = ihm.LPeptideAlphabet() met = a['M'] gly = a['G'] See :class:`LPeptideAlphabet`, :class:`RNAAlphabet`, :class:`DNAAlphabet`. """ def __getitem__(self, key): return self._comps[key] def __contains__(self, key): return key in self._comps keys = property(lambda self: self._comps.keys()) values = property(lambda self: self._comps.values()) items = property(lambda self: self._comps.items()) class LPeptideAlphabet(Alphabet): """A mapping from one-letter amino acid codes (e.g. H, M) to L-amino acids (as :class:`LPeptideChemComp` objects, except for achiral glycine which maps to :class:`PeptideChemComp`). Some other common modified residues are also included (e.g. MSE). For these their full name rather than a one-letter code is used. """ _comps = dict([code, LPeptideChemComp(id, code, code, name, formula)] for code, id, name, formula in [ ('A', 'ALA', 'ALANINE', 'C3 H7 N O2'), ('C', 'CYS', 'CYSTEINE', 'C3 H7 N O2 S'), ('D', 'ASP', 'ASPARTIC ACID', 'C4 H7 N O4'), ('E', 'GLU', 'GLUTAMIC ACID', 'C5 H9 N O4'), ('F', 'PHE', 'PHENYLALANINE', 'C9 H11 N O2'), ('H', 'HIS', 'HISTIDINE', 'C6 H10 N3 O2 1'), ('I', 'ILE', 'ISOLEUCINE', 'C6 H13 N O2'), ('K', 'LYS', 'LYSINE', 'C6 H15 N2 O2 1'), ('L', 'LEU', 'LEUCINE', 'C6 H13 N O2'), ('M', 'MET', 'METHIONINE', 'C5 H11 N O2 S'), ('N', 'ASN', 'ASPARAGINE', 'C4 H8 N2 O3'), ('P', 'PRO', 'PROLINE', 'C5 H9 N O2'), ('Q', 'GLN', 'GLUTAMINE', 'C5 H10 N2 O3'), ('R', 'ARG', 'ARGININE', 'C6 H15 N4 O2 1'), ('S', 'SER', 'SERINE', 'C3 H7 N O3'), ('T', 'THR', 'THREONINE', 'C4 H9 N O3'), ('V', 'VAL', 'VALINE', 'C5 H11 N O2'), ('W', 'TRP', 'TRYPTOPHAN', 'C11 H12 N2 O2'), ('Y', 'TYR', 'TYROSINE', 'C9 H11 N O3'), ('B', 'ASX', 'ASP/ASN AMBIGUOUS', 'C4 H6 N O2 X2'), ('Z', 'GLX', 'GLU/GLN AMBIGUOUS', 'C5 H8 N O2 X2'), ('U', 'SEC', 'SELENOCYSTEINE', 'C3 H7 N O2 Se')]) _comps['G'] = PeptideChemComp('GLY', 'G', 'G', name='GLYCINE', formula="C2 H5 N O2") # common non-standard L-amino acids _comps.update([id, LPeptideChemComp(id, id, canon, name, formula)] for id, canon, name, formula in [ ('MSE', 'M', 'SELENOMETHIONINE', 'C5 H11 N O2 Se'), ('UNK', 'X', 'UNKNOWN', 'C4 H9 N O2')]) class DPeptideAlphabet(Alphabet): """A mapping from D-amino acid codes (e.g. DHI, MED) to D-amino acids (as :class:`DPeptideChemComp` objects, except for achiral glycine which maps to :class:`PeptideChemComp`). See :class:`LPeptideAlphabet` for more details. """ _comps = dict([code, DPeptideChemComp(code, code, canon, name, formula)] for canon, code, name, formula in [ ('A', 'DAL', 'D-ALANINE', 'C3 H7 N O2'), ('C', 'DCY', 'D-CYSTEINE', 'C3 H7 N O2 S'), ('D', 'DAS', 'D-ASPARTIC ACID', 'C4 H7 N O4'), ('E', 'DGL', 'D-GLUTAMIC ACID', 'C5 H9 N O4'), ('F', 'DPN', 'D-PHENYLALANINE', 'C9 H11 N O2'), ('H', 'DHI', 'D-HISTIDINE', 'C6 H10 N3 O2 1'), ('I', 'DIL', 'D-ISOLEUCINE', 'C6 H13 N O2'), ('K', 'DLY', 'D-LYSINE', 'C6 H14 N2 O2'), ('L', 'DLE', 'D-LEUCINE', 'C6 H13 N O2'), ('M', 'MED', 'D-METHIONINE', 'C5 H11 N O2 S'), ('N', 'DSG', 'D-ASPARAGINE', 'C4 H8 N2 O3'), ('P', 'DPR', 'D-PROLINE', 'C5 H9 N O2'), ('Q', 'DGN', 'D-GLUTAMINE', 'C5 H10 N2 O3'), ('R', 'DAR', 'D-ARGININE', 'C6 H15 N4 O2 1'), ('S', 'DSN', 'D-SERINE', 'C3 H7 N O3'), ('T', 'DTH', 'D-THREONINE', 'C4 H9 N O3'), ('V', 'DVA', 'D-VALINE', 'C5 H11 N O2'), ('W', 'DTR', 'D-TRYPTOPHAN', 'C11 H12 N2 O2'), ('Y', 'DTY', 'D-TYROSINE', 'C9 H11 N O3')]) _comps['G'] = PeptideChemComp('GLY', 'G', 'G', name='GLYCINE', formula="C2 H5 N O2") class RNAAlphabet(Alphabet): """A mapping from one-letter nucleic acid codes (e.g. A) to RNA (as :class:`RNAChemComp` objects).""" _comps = dict([id, RNAChemComp(id, id, id, name, formula)] for id, name, formula in [ ('A', "ADENOSINE-5'-MONOPHOSPHATE", 'C10 H14 N5 O7 P'), ('C', "CYTIDINE-5'-MONOPHOSPHATE", 'C9 H14 N3 O8 P'), ('G', "GUANOSINE-5'-MONOPHOSPHATE", 'C10 H14 N5 O8 P'), ('U', "URIDINE-5'-MONOPHOSPHATE", 'C9 H13 N2 O9 P')]) class DNAAlphabet(Alphabet): """A mapping from two-letter nucleic acid codes (e.g. DA) to DNA (as :class:`DNAChemComp` objects).""" _comps = dict([code, DNAChemComp(code, code, canon, name, formula)] for code, canon, name, formula in [ ('DA', 'A', "2'-DEOXYADENOSINE-5'-MONOPHOSPHATE", 'C10 H14 N5 O6 P'), ('DC', 'C', "2'-DEOXYCYTIDINE-5'-MONOPHOSPHATE", 'C9 H14 N3 O7 P'), ('DG', 'G', "2'-DEOXYGUANOSINE-5'-MONOPHOSPHATE", 'C10 H14 N5 O7 P'), ('DT', 'T', "THYMIDINE-5'-MONOPHOSPHATE", 'C10 H15 N2 O8 P')]) class EntityRange: """Part of an entity. Usually these objects are created from an :class:`Entity`, e.g. to get a range covering residues 4 through 7 in `entity` use:: entity = ihm.Entity(sequence=...) rng = entity(4,7) """ def __init__(self, entity, seq_id_begin, seq_id_end): if not entity.is_polymeric(): raise TypeError("Can only create ranges for polymeric entities") self.entity = entity self.seq_id_range = (seq_id_begin, seq_id_end) util._check_residue_range(self.seq_id_range, self.entity) def __eq__(self, other): try: return (self.entity is other.entity and self.seq_id_range == other.seq_id_range) except AttributeError: return False def __hash__(self): return hash((id(self.entity), self.seq_id_range)) # Use same ID as the original entity _id = property(lambda self: self.entity._id) class Atom: """A single atom in an entity or asymmetric unit. Usually these objects are created by calling :meth:`Residue.atom`. Note that this class does not store atomic coordinates of a given atom in a given model; for that, see :class:`ihm.model.Atom`. """ __slots__ = ['residue', 'id'] def __init__(self, residue, id): self.residue, self.id = residue, id entity = property(lambda self: self.residue.entity) asym = property(lambda self: self.residue.asym) seq_id = property(lambda self: self.residue.seq_id) class Residue: """A single residue in an entity or asymmetric unit. Usually these objects are created by calling :meth:`Entity.residue` or :meth:`AsymUnit.residue`. """ __slots__ = ['entity', 'asym', 'seq_id', '_range_id'] def __init__(self, seq_id, entity=None, asym=None): self.entity = entity self.asym = asym if entity is None and asym: self.entity = asym.entity self.seq_id = seq_id if self.entity is not None and self.entity.is_polymeric(): util._check_residue(self) def atom(self, atom_id): """Get a :class:`~ihm.Atom` in this residue with the given name.""" return Atom(residue=self, id=atom_id) def _get_auth_seq_id(self): return self.asym._get_auth_seq_id_ins_code(self.seq_id)[0] auth_seq_id = property(_get_auth_seq_id, doc="Author-provided seq_id; only makes sense " "for asymmetric units") def _get_ins_code(self): return self.asym._get_auth_seq_id_ins_code(self.seq_id)[1] ins_code = property(_get_ins_code, doc="Insertion code; only makes sense " "for asymmetric units") def _get_comp(self): return self.entity.sequence[self.seq_id - 1] comp = property(_get_comp, doc="Chemical component (residue type)") # Allow passing residues where a range is requested # (e.g. to ResidueFeature) seq_id_range = property(lambda self: (self.seq_id, self.seq_id)) class Entity: """Represent a CIF entity (with a unique sequence) :param sequence sequence: The primary sequence, as a sequence of :class:`ChemComp` objects, and/or codes looked up in `alphabet`. :param alphabet: The mapping from code to chemical components to use (it is not necessary to instantiate this class). :type alphabet: :class:`Alphabet` :param str description: A short text name for the sequence. :param str details: Longer text describing the sequence. :param source: The method by which the sample for this entity was produced. :type source: :class:`ihm.source.Source` :param references: Information about this entity stored in external databases (for example the sequence in UniProt) :type references: sequence of :class:`ihm.reference.Reference` objects The sequence for an entity can be specified explicitly as a list of chemical components, or (more usually) as a list or string of codes, or a mixture of both. For example:: # Construct with a string of one-letter amino acid codes protein = ihm.Entity('AHMD') # Some less common amino acids (e.g. MSE) have three-letter codes protein_with_mse = ihm.Entity(['A', 'H', 'MSE', 'D']) # Can use a non-default alphabet to make DNA or RNA sequences dna = ihm.Entity(('DA', 'DC'), alphabet=ihm.DNAAlphabet) rna = ihm.Entity('AC', alphabet=ihm.RNAAlphabet) # Can pass explicit ChemComp objects by looking them up in Alphabets dna_al = ihm.DNAAlphabet() rna_al = ihm.RNAAlphabet() dna_rna_hybrid = ihm.Entity((dna_al['DG'], rna_al['C'])) # For unusual components (e.g. modified residues or ligands), # new ChemComp objects can be constructed psu = ihm.RNAChemComp(id='PSU', code='PSU', code_canonical='U', name="PSEUDOURIDINE-5'-MONOPHOSPHATE", formula='C9 H13 N2 O9 P') rna_with_psu = ihm.Entity(('A', 'C', psu), alphabet=ihm.RNAAlphabet) For more examples, see the `ligands and water example `_. All entities should be stored in the top-level System object; see :attr:`System.entities`. """ # noqa: E501 _force_polymer = None _hint_branched = None # Set to False to allow invalid seq_ids for residue or residue_range; # this is done, for example, when reading a file. _range_check = True def __get_type(self): if self.is_polymeric(): return 'polymer' elif self.is_branched(): return 'branched' else: return 'water' if self.sequence[0].code == 'HOH' else 'non-polymer' type = property(__get_type) def __get_src_method(self): if self.source: return self.source.src_method elif self.type == 'water': return 'nat' else: return 'man' def __set_src_method(self, val): raise TypeError("src_method is read-only; assign an appropriate " "subclass of ihm.source.Source to source instead") src_method = property(__get_src_method, __set_src_method) def __get_weight(self): weight = 0. for s in self.sequence: w = s.formula_weight # If any component's weight is unknown, the total is too if w: weight += w else: return None return weight formula_weight = property( __get_weight, doc="Formula weight (dalton). This is calculated automatically " "from that of the chemical components.") def __init__(self, sequence, alphabet=LPeptideAlphabet, description=None, details=None, source=None, references=[]): def get_chem_comp(s): if isinstance(s, ChemComp): return s else: return alphabet._comps[s] self.sequence = tuple(get_chem_comp(s) for s in sequence) self.description, self.details = description, details self.source = source self.references = [] self.references.extend(references) #: String descriptors of branched chemical structure. #: These generally only make sense for oligosaccharide entities, #: and should be a list of :class:`~ihm.BranchDescriptor` objects. self.branch_descriptors = [] #: Any links between components in a branched entity. #: This is a list of :class:`~ihm.BranchLink` objects. self.branch_links = [] def __str__(self): return "" % self.description def is_polymeric(self): """Return True iff this entity represents a polymer, such as an amino acid sequence or DNA/RNA chain (and not a ligand or water)""" return (self._force_polymer or (len(self.sequence) == 0 and not self._hint_branched) or len(self.sequence) > 1 and any(isinstance(x, (PeptideChemComp, DNAChemComp, RNAChemComp)) for x in self.sequence)) def is_branched(self): """Return True iff this entity is branched (generally an oligosaccharide)""" return ((len(self.sequence) > 1 and isinstance(self.sequence[0], SaccharideChemComp)) or (len(self.sequence) == 0 and self._hint_branched)) def residue(self, seq_id): """Get a :class:`Residue` at the given sequence position""" return Residue(entity=self, seq_id=seq_id) # Entities are considered identical if they have the same sequence, # unless they are branched def __eq__(self, other): if not isinstance(other, Entity): return False if self.is_branched() or other.is_branched(): return self is other else: return self.sequence == other.sequence def __hash__(self): if self.is_branched(): return hash(id(self)) else: return hash(self.sequence) def __call__(self, seq_id_begin, seq_id_end): return EntityRange(self, seq_id_begin, seq_id_end) def __get_seq_id_range(self): if self.is_polymeric() or self.is_branched(): return (1, len(self.sequence)) else: # Nonpolymers don't have the concept of seq_id return (None, None) seq_id_range = property(__get_seq_id_range, doc="Sequence range") class AsymUnitRange: """Part of an asymmetric unit. Usually these objects are created from an :class:`AsymUnit`, e.g. to get a range covering residues 4 through 7 in `asym` use:: asym = ihm.AsymUnit(entity) rng = asym(4,7) """ def __init__(self, asym, seq_id_begin, seq_id_end): if asym.entity is not None and not asym.entity.is_polymeric(): raise TypeError("Can only create ranges for polymeric entities") self.asym = asym self.seq_id_range = (seq_id_begin, seq_id_end) util._check_residue_range(self.seq_id_range, self.entity) def __eq__(self, other): try: return (self.asym is other.asym and self.seq_id_range == other.seq_id_range) except AttributeError: return False def __hash__(self): return hash((id(self.asym), self.seq_id_range)) # Use same ID and entity as the original asym unit _id = property(lambda self: self.asym._id) _ordinal = property(lambda self: self.asym._ordinal) entity = property(lambda self: self.asym.entity) details = property(lambda self: self.asym.details) class AsymUnitSegment: """An aligned part of an asymmetric unit. Usually these objects are created from an :class:`AsymUnit`, e.g. to get a segment covering residues 1 through 3 in `asym` use:: asym = ihm.AsymUnit(entity) seg = asym.segment('--ACG', 1, 3) """ def __init__(self, asym, gapped_sequence, seq_id_begin, seq_id_end): self.asym = asym self.gapped_sequence = gapped_sequence self.seq_id_range = (seq_id_begin, seq_id_end) class AsymUnit: """An asymmetric unit, i.e. a unique instance of an Entity that was modeled. Note that this class should not be used to describe crystal waters; for that, see :class:`ihm.WaterAsymUnit`. :param entity: The unique sequence of this asymmetric unit. :type entity: :class:`Entity` :param str details: Longer text description of this unit. :param auth_seq_id_map: Mapping from internal 1-based consecutive residue numbering (`seq_id`) to PDB "author-provided" numbering (`auth_seq_id` plus an optional `ins_code`). This can be either be an int offset, in which case ``auth_seq_id = seq_id + auth_seq_id_map`` with no insertion codes, or a mapping type (dict, list, tuple) in which case ``auth_seq_id = auth_seq_id_map[seq_id]`` with no insertion codes, or ``auth_seq_id, ins_code = auth_seq_id_map[seq_id]`` - i.e. the output of the mapping is either the author-provided number, or a 2-element tuple containing that number and an insertion code. (Note that if a `list` or `tuple` is used for the mapping, the first element in the list or tuple does **not** correspond to the first residue and will never be used - since `seq_id` can never be zero.) The default if not specified, or not in the mapping, is for ``auth_seq_id == seq_id`` and for no insertion codes to be used. :param str id: User-specified ID (usually a string of one or more upper-case letters, e.g. A, B, C, AA). If not specified, IDs are automatically assigned alphabetically. :param str strand_id: PDB or "author-provided" strand/chain ID. If not specified, it will be the same as the regular ID. :param orig_auth_seq_id_map: Mapping from internal 1-based consecutive residue numbering (`seq_id`) to original "author-provided" numbering. This differs from `auth_seq_id_map` as the original numbering need not follow any defined scheme, while `auth_seq_id_map` must follow certain PDB-defined rules. This can be any mapping type (dict, list, tuple) in which case ``orig_auth_seq_id = orig_auth_seq_id_map[seq_id]``. If the mapping is None (the default), or a given `seq_id` cannot be found in the mapping, ``orig_auth_seq_id = auth_seq_id``. This mapping is only used in the various `scheme` tables, such as ``pdbx_poly_seq_scheme``. See :attr:`System.asym_units`. """ number_of_molecules = 1 def __init__(self, entity, details=None, auth_seq_id_map=0, id=None, strand_id=None, orig_auth_seq_id_map=None): if (entity is not None and entity.type == 'water' and not isinstance(self, WaterAsymUnit)): raise TypeError("Use WaterAsymUnit instead for creating waters") self.entity, self.details = entity, details self.auth_seq_id_map = auth_seq_id_map self.orig_auth_seq_id_map = orig_auth_seq_id_map self.id = id self._strand_id = strand_id #: For branched entities read from files, mapping from provisional #: to final internal numbering (`seq_id`), or None if no mapping is #: necessary. See :meth:`ihm.model.Model.add_atom`. self.num_map = None def _get_auth_seq_id_ins_code(self, seq_id): if isinstance(self.auth_seq_id_map, numbers.Integral): return seq_id + self.auth_seq_id_map, None else: try: ret = self.auth_seq_id_map[seq_id] if isinstance(ret, (numbers.Integral, str)): return ret, None else: return ret except (KeyError, IndexError): return seq_id, None def _get_pdb_auth_seq_id_ins_code(self, seq_id): pdb_seq_num, ins_code = self._get_auth_seq_id_ins_code(seq_id) if self.orig_auth_seq_id_map is None: auth_seq_num = pdb_seq_num else: auth_seq_num = self.orig_auth_seq_id_map.get(seq_id, pdb_seq_num) return pdb_seq_num, auth_seq_num, ins_code def __call__(self, seq_id_begin, seq_id_end): return AsymUnitRange(self, seq_id_begin, seq_id_end) def residue(self, seq_id): """Get a :class:`Residue` at the given sequence position""" return Residue(asym=self, seq_id=seq_id) def segment(self, gapped_sequence, seq_id_begin, seq_id_end): """Get an object representing the alignment of part of this sequence. :param str gapped_sequence: Sequence of the segment, including gaps. :param int seq_id_begin: Start of the segment. :param int seq_id_end: End of the segment. """ # todo: cache so we return the same object for same parameters return AsymUnitSegment(self, gapped_sequence, seq_id_begin, seq_id_end) seq_id_range = property(lambda self: self.entity.seq_id_range, doc="Sequence range") sequence = property(lambda self: self.entity.sequence, doc="Primary sequence") strand_id = property(lambda self: self._strand_id or self._id, doc="PDB or author-provided strand/chain ID") class WaterAsymUnit(AsymUnit): """A collection of crystal waters, all with the same "chain" ID. :param int number: The number of water molecules in this unit. For more information on this class and the rest of the parameters, see :class:`AsymUnit`. """ def __init__(self, entity, number, details=None, auth_seq_id_map=0, id=None, strand_id=None, orig_auth_seq_id_map=None): if entity.type != 'water': raise TypeError( "WaterAsymUnit can only be used for water entities") super().__init__( entity, details=details, auth_seq_id_map=auth_seq_id_map, id=id, strand_id=strand_id, orig_auth_seq_id_map=orig_auth_seq_id_map) self.number = number self._water_sequence = [entity.sequence[0]] * number seq_id_range = property(lambda self: (1, self.number), doc="Sequence range") sequence = property(lambda self: self._water_sequence, doc="Primary sequence") number_of_molecules = property(lambda self: self.number, doc="Number of molecules") class Assembly(list): """A collection of parts of the system that were modeled or probed together. :param sequence elements: Initial set of parts of the system. :param str name: Short text name of this assembly. :param str description: Longer text that describes this assembly. This is implemented as a simple list of asymmetric units (or parts of them), i.e. a list of :class:`AsymUnit` and/or :class:`AsymUnitRange` objects. An Assembly is typically assigned to one or more of - :class:`~ihm.model.Model` - :class:`ihm.protocol.Step` - :class:`ihm.analysis.Step` - :class:`~ihm.restraint.Restraint` See also :attr:`System.complete_assembly` and :attr:`System.orphan_assemblies`. Note that any duplicate assemblies will be pruned on output.""" #: :class:`Assembly` that is the immediate parent in a hierarchy, or `None` parent = None def __init__(self, elements=(), name=None, description=None): super().__init__(elements) self.name, self.description = name, description def _signature(self): """Get a Python object that represents this Assembly. Notably, two Assemblies that cover the part of the system (even if the components are in a different order) will have the same signature. Signatures are also hashable, unlike the Assembly itself.""" d = collections.defaultdict(list) for a in self: # a might be an AsymUnit or an AsymUnitRange asym = a.asym if hasattr(a, 'asym') else a d[asym].append(a.seq_id_range) ret = [] # asyms might not have IDs yet, so just put them in a consistent order for asym in sorted(d.keys(), key=lambda x: id(x)): ranges = d[asym] # Non-polymers have no ranges if all(r == (None, None) for r in ranges): ret.append((asym, None)) else: ret.append((asym, tuple(util._combine_ranges(d[asym])))) return tuple(ret) class ChemDescriptor: """Description of a non-polymeric chemical component used in the experiment. For example, this might be a fluorescent probe or cross-linking agent. This class describes the chemical structure of the component, for example with a SMILES or INCHI descriptor, so that it is uniquely defined. A descriptor is typically assigned to a :class:`ihm.restraint.CrossLinkRestraint`. See :mod:`ihm.cross_linkers` for chemical descriptors of some commonly-used cross-linking agents. :param str auth_name: Author-provided name :param str chem_comp_id: If this chemical is listed in the Chemical Component Dictionary, its three-letter identifier :param str chemical_name: The systematic (IUPAC) chemical name :param str common_name: Common name for the component :param str smiles: SMILES string :param str smiles_canonical: Canonical SMILES string :param str inchi: IUPAC INCHI descriptor :param str inchi_key: Hashed INCHI key See also :attr:`System.orphan_chem_descriptors`. """ def __init__(self, auth_name, chem_comp_id=None, chemical_name=None, common_name=None, smiles=None, smiles_canonical=None, inchi=None, inchi_key=None): self.auth_name, self.chem_comp_id = auth_name, chem_comp_id self.chemical_name, self.common_name = chemical_name, common_name self.smiles, self.smiles_canonical = smiles, smiles_canonical self.inchi, self.inchi_key = inchi, inchi_key class Collection: """A collection of entries belonging to single deposition or group. These are used by the archive to group multiple related entries, e.g. all entries deposited as part of a given study, or all models for a genome. An entry (:class:`System`) can belong to multiple collections. :param str id: Unique identifier (assigned by the archive). :param str name: Short name for the collection. :param str details: Longer description of the collection. See also :attr:`System.collections`. """ def __init__(self, id, name=None, details=None): self.id, self.name, self.details = id, name, details class BranchDescriptor: """String descriptor of branched chemical structure. These generally only make sense for oligosaccharide entities. See :attr:`Entity.branch_descriptors`. :param str text: The value of this descriptor. :param str type: The type of the descriptor; one of "Glycam Condensed Core Sequence", "Glycam Condensed Sequence", "LINUCS", or "WURCS". :param str program: The name of the program or library used to compute the descriptor. :param str program_version: The version of the program or library used to compute the descriptor. """ def __init__(self, text, type, program=None, program_version=None): self.text, self.type = text, type self.program, self.program_version = program, program_version class BranchLink: """A link between components in a branched entity. These generally only make sense for oligosaccharide entities. See :attr:`Entity.branch_links`. :param int num1: 1-based index of the first component. :param str atom_id1: Name of the first atom in the linkage. :param str leaving_atom_id1: Name of the first leaving atom. :param int num2: 1-based index of the second component. :param str atom_id2: Name of the second atom in the linkage. :param str leaving_atom_id2: Name of the second leaving atom. :param str order: Bond order (e.g. sing, doub, trip). :param str details: More information about this link. """ def __init__(self, num1, atom_id1, leaving_atom_id1, num2, atom_id2, leaving_atom_id2, order=None, details=None): self.num1, self.atom_id1 = num1, atom_id1 self.num2, self.atom_id2 = num2, atom_id2 self.leaving_atom_id1 = leaving_atom_id1 self.leaving_atom_id2 = leaving_atom_id2 self.order, self.details = order, details class DataUsage: """Information on how the data in the file can be used. Do not use this class itself, but one of its subclasses, either :class:`License` or :class:`Disclaimer`. DataUsage objects are stored in :data:`ihm.System.data_usage`. :param str details: Information about the data usage. :param str name: An optional well-known name for the usage. :param str url: An optional URL providing more information. """ type = 'other' def __init__(self, details, name=None, url=None): self.details, self.name, self.url = details, name, url class License(DataUsage): """A license describing how the data in the file can be used. See :class:`DataUsage` for more information.""" type = 'license' class Disclaimer(DataUsage): """A disclaimer relating to usage of the data in the file. See :class:`DataUsage` for more information.""" type = 'disclaimer' class Revision: """Represent part of the history of a :class:`System`. :param str data_content_type: The type of file that was changed. :param int major: Major version number. :param int minor: Minor version number. :param date: Release date. :type date: :class:`datetime.date` Generally these objects are added to :attr:`System.revisions`. """ def __init__(self, data_content_type, minor, major, date): self.data_content_type = data_content_type self.minor, self.major = minor, major self.date = date #: More details of the changes, as :class:`RevisionDetails` objects self.details = [] #: Collection of categories (as strings) updated with this revision self.groups = [] #: Categories (as strings) updated with this revision self.categories = [] #: Items (as strings) updated with this revision self.items = [] class RevisionDetails: """More information on the changes in a given :class:`Revision`. :param str provider: The provider (author, repository) of the revision. :param str type: Classification of the revision. :param str description: Additional details describing the revision. These objects are typically stored in :attr:`Revision.details`. """ def __init__(self, provider, type, description): self.provider = provider self.type = type self.description = description python-ihm-2.7/ihm/analysis.py000066400000000000000000000070161503573337200164440ustar00rootroot00000000000000"""Classes for handling the analysis of a modeling run. """ from ihm.util import _text_choice_property class Step: """A single step in an :class:`Analysis`. Normally one of the more specific derived classes is used; see :class:`FilterStep`, :class:`ClusterStep`, :class:`RescoreStep`, :class:`ValidationStep`, and :class:`EmptyStep`, although this base class can be used for a generic 'other' step. :param str feature: feature energy/score;RMSD;dRMSD;other :param int num_models_begin: The number of models at the beginning of the step :param int num_models_end: The number of models at the end of the step :param assembly: The part of the system analyzed in this step :type assembly: :class:`~ihm.Assembly` :param dataset_group: The collection of datasets used in this analysis, if applicable :type dataset_group: :class:`~ihm.dataset.DatasetGroup` :param software: The software used in this step :type software: :class:`~ihm.Software` :param script_file: Reference to the external file containing the script used in this step (usually a :class:`~ihm.location.WorkflowFileLocation`). :type script_file: :class:`~ihm.location.Location` :param str details: Additional text describing this step """ type = 'other' def _get_report(self): return ("%s (%s->%s models)" % (self.type, self.num_models_begin, self.num_models_end)) def __init__(self, feature, num_models_begin, num_models_end, assembly=None, dataset_group=None, software=None, script_file=None, details=None): self.assembly, self.dataset_group = assembly, dataset_group self.feature, self.software = feature, software self.num_models_begin = num_models_begin self.num_models_end = num_models_end self.script_file = script_file self.details = details feature = _text_choice_property( "feature", ["energy/score", "RMSD", "dRMSD", "other", "none"], doc="The feature used in the analysis, if applicable") class FilterStep(Step): """A single filtering step in an :class:`Analysis`. See :class:`Step` for a description of the parameters. """ type = 'filter' class ClusterStep(Step): """A single clustering step in an :class:`Analysis`. See :class:`Step` for a description of the parameters. """ type = 'cluster' class RescoreStep(Step): """A single rescoring step in an :class:`Analysis`. See :class:`Step` for a description of the parameters. """ type = 'rescore' class ValidationStep(Step): """A single validation step in an :class:`Analysis`. See :class:`Step` for a description of the parameters. """ type = 'validation' class EmptyStep(Step): """A 'do nothing' step in an :class:`Analysis`. This can be used if modeling outputs were used directly without any kind of analysis. """ type = 'none' def __init__(self): super().__init__( feature='none', num_models_begin=None, num_models_end=None) class Analysis: """Analysis of a modeling run. Each analysis consists of a number of steps (e.g. filtering, rescoring, clustering). A modeling run may be followed by any number of separate analyses. See :attr:`ihm.protocol.Protocol.analyses`. """ def __init__(self): #: All analysis steps (:class:`Step` objects) self.steps = [] python-ihm-2.7/ihm/citations.py000066400000000000000000000125551503573337200166220ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Citations for some commonly-used software packages. Each of these is an instance of the :class:`ihm.Citation` class, and so can be used anywhere these objects are required, generally for :class:`ihm.Software`. """ import ihm imp = ihm.Citation( pmid='22272186', title='Putting the pieces together: integrative modeling platform ' 'software for structure determination of macromolecular assemblies', journal='PLoS Biol', volume=10, page_range='e1001244', year=2012, authors=['Russel, D.', 'Lasker, K.', 'Webb, B.', 'Velázquez-Muriel, J.', 'Tjioe, E.', 'Schneidman-Duhovny, D.', 'Peterson, B.', 'Sali, A.'], doi='10.1371/journal.pbio.1001244') pmi = ihm.Citation( pmid='31396911', title='Modeling Biological Complexes Using Integrative Modeling Platform.', journal='Methods Mol Biol', volume=2022, page_range=(353, 377), year=2019, authors=['Saltzberg, D.', 'Greenberg, C.H.', 'Viswanath, S.', 'Chemmama, I.', 'Webb, B.', 'Pellarin, R.', 'Echeverria, I.', 'Sali, A.'], doi='10.1007/978-1-4939-9608-7_15') modeller = ihm.Citation( pmid='8254673', title='Comparative protein modelling by satisfaction of ' 'spatial restraints.', journal='J Mol Biol', volume=234, page_range=(779, 815), year=1993, authors=['Sali, A.', 'Blundell, T.L.'], doi='10.1006/jmbi.1993.1626') psipred = ihm.Citation( pmid='10493868', title='Protein secondary structure prediction based on position-specific ' 'scoring matrices.', journal='J Mol Biol', volume=292, page_range=(195, 202), year=1999, authors=['Jones, D.T.'], doi='10.1006/jmbi.1999.3091') disopred = ihm.Citation( pmid='25391399', title='DISOPRED3: precise disordered region predictions with annotated ' 'protein-binding activity.', journal='Bioinformatics', volume=31, page_range=(857, 863), year=2015, authors=['Jones, D.T.', 'Cozzetto D'], doi='10.1093/bioinformatics/btu744') hhpred = ihm.Citation( pmid='15980461', title='The HHpred interactive server for protein homology detection ' 'and structure prediction.', journal='Nucleic Acids Res', volume=33, page_range=('W244', 'W248'), year=2005, authors=['Söding, J.', 'Biegert, A.', 'Lupas, A.N.'], doi='10.1093/nar/gki408') relion = ihm.Citation( pmid='23000701', title='RELION: implementation of a Bayesian approach to cryo-EM ' 'structure determination.', journal='J Struct Biol', volume=180, page_range=(519, 530), year=2012, authors=['Scheres, S.H.'], doi='10.1016/j.jsb.2012.09.006') phyre2 = ihm.Citation( pmid='25950237', title='The Phyre2 web portal for protein modeling, prediction ' 'and analysis.', journal='Nat Protoc', volume=10, page_range=('845', '858'), year=2015, authors=['Kelley, L.A.', 'Mezulis, S.', 'Yates, C.M.', 'Wass, M.N.', 'Sternberg, M.J.'], doi='10.1038/nprot.2015.053') swiss_model = ihm.Citation( pmid='29788355', title='SWISS-MODEL: homology modelling of protein structures ' 'and complexes.', journal='Nucleic Acids Res', volume=46, page_range=('W296', 'W303'), year=2018, authors=['Waterhouse, A.', 'Bertoni, M.', 'Bienert, S.', 'Studer, G.', 'Tauriello, G.', 'Gumienny, R.', 'Heer, F.T.', 'de Beer, T.A.P.', 'Rempfer, C.', 'Bordoli, L.', 'Lepore, R.', 'Schwede, T.'], doi='10.1093/nar/gky427') alphafold2 = ihm.Citation( pmid='34265844', title='Highly accurate protein structure prediction with AlphaFold.', journal='Nature', volume=596, page_range=(583, 589), year=2021, authors=['Jumper, J.', 'Evans, R.', 'Pritzel, A.', 'Green, T.', 'Figurnov, M.', 'Ronneberger, O.', 'Tunyasuvunakool, K.', 'Bates, R.', 'Zidek, A.', 'Potapenko, A.', 'Bridgland, A.', 'Meyer, C.', 'Kohl, S.A.A.', 'Ballard, A.J.', 'Cowie, A.', 'Romera-Paredes, B.', 'Nikolov, S.', 'Jain, R.', 'Adler, J.', 'Back, T.', 'Petersen, S.', 'Reiman, D.', 'Clancy, E.', 'Zielinski, M.', 'Steinegger, M.', 'Pacholska, M.', 'Berghammer, T.', 'Bodenstein, S.', 'Silver, D.', 'Vinyals, O.', 'Senior, A.W.', 'Kavukcuoglu, K.', 'Kohli, P.', 'Hassabis, D.'], doi='10.1038/s41586-021-03819-2') colabfold = ihm.Citation( pmid='35637307', title='ColabFold: making protein folding accessible to all.', journal='Nature Methods', volume=19, page_range=(679, 682), year=2022, authors=['Mirdita, M.', 'Schuetze, K.', 'Moriwaki, Y.', 'Heo, L.', 'Ovchinnikov, S.', 'Steinegger, M.'], doi='10.1038/s41592-022-01488-1') qmeandisco = ihm.Citation( pmid='31697312', title='QMEANDisCo-distance constraints applied on model quality ' 'estimation.', journal='Bioinformatics', volume=36, page_range=(1765, 1771), year=2019, authors=['Studer, G.', 'Rempfer, C.', 'Waterhouse, A.M.', 'Gumienny, R.', 'Haas, J.', 'Schwede, T.'], doi='10.1093/bioinformatics/btz828') mmseqs2 = ihm.Citation( pmid='30615063', title='MMseqs2 desktop and local web server app for fast, interactive ' 'sequence searches.', journal='Bioinformatics', volume=35, page_range=(2856, 2858), year=2019, authors=['Mirdita, M.', 'Steinegger, M.', 'Soeding, J.'], doi='10.1093/bioinformatics/bty1057') python-ihm-2.7/ihm/cross_linkers.py000066400000000000000000000071531503573337200175030ustar00rootroot00000000000000"""Chemical descriptors of commonly-used cross-linkers. Each of these is an instance of the :class:`ihm.ChemDescriptor` class, and so can be used anywhere these objects are required, generally for :class:`ihm.restraint.CrossLinkRestraint`. """ import ihm dss = ihm.ChemDescriptor( 'DSS', chemical_name='disuccinimidyl suberate', smiles='C1CC(=O)N(C1=O)OC(=O)CCCCCCC(=O)ON2C(=O)CCC2=O', inchi='1S/C16H20N2O8/c19-11-7-8-12(20)17(11)25-15(23)5-' '3-1-2-4-6-16(24)26-18-13(21)9-10-14(18)22/h1-10H2', inchi_key='ZWIBGKZDAWNIFC-UHFFFAOYSA-N') dsg = ihm.ChemDescriptor( 'DSG', chemical_name='disuccinimidyl glutarate', smiles='C1CC(=O)N(C1=O)OC(=O)CCCC(=O)ON2C(=O)CCC2=O', inchi='1S/C13H14N2O8/c16-8-4-5-9(17)14(8)22-12(20)2-1-3-' '13(21)23-15-10(18)6-7-11(15)19/h1-7H2', inchi_key='LNQHREYHFRFJAU-UHFFFAOYSA-N') bs3 = ihm.ChemDescriptor( 'BS3', chemical_name='bissulfosuccinimidyl suberate', smiles='C1C(C(=O)N(C1=O)OC(=O)CCCCCCC(=O)ON2C(=O)CC(C2=O)S(=O)' '(=O)O)S(=O)(=O)O', inchi='1S/C16H20N2O14S2/c19-11-7-9(33(25,26)27)15(23)17(11)31' '-13(21)5-3-1-2-4-6-14(22)32-18-12(20)8-10(16(18)24)' '34(28,29)30/h9-10H,1-8H2,(H,25,26,27)(H,28,29,30)', inchi_key='VYLDEYYOISNGST-UHFFFAOYSA-N') dsso = ihm.ChemDescriptor( 'DSSO', chemical_name='disuccinimidyl sulfoxide', smiles='O=C(CCS(CCC(ON1C(CCC1=O)=O)=O)=O)ON2C(CCC2=O)=O', inchi='1S/C14H16N2O9S/c17-9-1-2-10(18)15(9)24-13(21)5-7-' '26(23)8-6-14(22)25-16-11(19)3-4-12(16)20/h1-8H2', inchi_key='XJSVVHDQSGMHAJ-UHFFFAOYSA-N') edc = ihm.ChemDescriptor( 'EDC', chemical_name='1-ethyl-3-(3-dimethylaminopropyl)carbodiimide', smiles='CCN=C=NCCCN(C)C', inchi='1S/C8H17N3/c1-4-9-8-10-6-5-7-11(2)3/h4-7H2,1-3H3', inchi_key='LMDZBCPBFSXMTL-UHFFFAOYSA-N') dhso = ihm.ChemDescriptor( 'DHSO', chemical_name='dihydrazide sulfoxide', smiles='NNC(=O)CC[S](=O)CCC(=O)NN', inchi='1S/C6H14N4O3S/c7-9-5(11)1-3-14(13)4-2-6(12)10-8' '/h1-4,7-8H2,(H,9,11)(H,10,12)', inchi_key='XTCXQISMAWBOOT-UHFFFAOYSA-N') bmso = ihm.ChemDescriptor( 'BMSO', chemical_name='bismaleimide sulfoxide', smiles='O=C(CC[S](=O)CCC(=O)NCCN1C(=O)C=CC1=O)NCCN2C(=O)C=CC2=O', inchi='1S/C18H22N4O7S/c23-13(19-7-9-21-15(25)1-2-16(21)26)5-' '11-30(29)12-6-14(24)20-8-10-22-17(27)3-4-18(22)28/h1-' '4H,5-12H2,(H,19,23)(H,20,24)', inchi_key='PUNDHDZIOGBGHG-UHFFFAOYSA-N') sda = ihm.ChemDescriptor( 'SDA', chemical_name="succinimidyl 4,4'-azipentanoate", smiles='CC1(N=N1)CCC(ON2C(CCC2=O)=O)=O', inchi='1S/C9H11N3O4/c1-9(10-11-9)5-4-8(15)16-12-6(13)2-3-' '7(12)14/h2-5H2,1H3', inchi_key=' SYYLQNPWAPHRFV-UHFFFAOYSA-N') photo_leucine = ihm.ChemDescriptor( 'L-Photo-Leucine', chemical_name='L-Photo-Leucine', smiles='CC1(C[C@H](N)C(O)=O)N=N1', inchi='1S/C5H9N3O2/c1-5(7-8-5)' '2-3(6)4(9)10/h3H,2,6H2,1H3,(H,9,10)/t3-/m0/s1', inchi_key='MJRDGTVDJKACQZ-VKHMYHEASA-N') dsbu = ihm.ChemDescriptor( 'DSBU', chemical_name='disuccinimidyl dibutyric urea', smiles='O=C(NCCCC(=O)ON1C(=O)CCC1=O)NCCCC(=O)ON2C(=O)CCC2=O', inchi='S/C17H22N4O9/c22-11-5-6-12(23)20(11)29-15(26)' '3-1-9-18-17(28)19-10-2-4-16(27)30-21-13(24)7-8-14(21)' '25/h1-10H2,(H2,18,19,28)', inchi_key='XZSQCCZQFXUQCY-UHFFFAOYSA-N') phoX = ihm.ChemDescriptor( 'DSPP', chemical_name='(3,5-bis(((2,5-dioxopyrrolidin-1-yl)oxy)' 'carbonyl) phenyl)phosphonic acid') tbuphoX = ihm.ChemDescriptor( 'TBDSPP', chemical_name='tert-butyl disuccinimidyl' 'phenyl phosphonate, tBu-PhoX') python-ihm-2.7/ihm/dataset.py000066400000000000000000000165771503573337200162620ustar00rootroot00000000000000# coding=utf-8 """Classes for handling experimental datasets used by mmCIF models. """ class Dataset: """A set of input data, for example, a crystal structure or EM map. :param location: a pointer to where the dataset is stored. This is usually a subclass of :class:`~ihm.location.DatabaseLocation` if the dataset is deposited in a database such as PDB or EMDB, or :class:`~ihm.location.InputFileLocation` if the dataset is stored in an external file. :type location: :class:`ihm.location.Location` :param str details: Text giving more information about the dataset. """ _eq_keys = ['_locations'] _allow_duplicates = False # Datasets compare equal iff they are the same class, have the # same attributes, and allow_duplicates=False def _eq_vals(self): if self._allow_duplicates: return id(self) else: return tuple([self.__class__] + [getattr(self, x) for x in self._eq_keys]) def __eq__(self, other): return self._eq_vals() == other._eq_vals() def __hash__(self): return hash(self._eq_vals()) data_type = 'Other' def __init__(self, location, details=None): # The dictionary actually allows for multiple locations for a given # dataset. Support this via a private attribute so we can at least # handle reading existing files. 'location' just references the # first location in this list. self._locations = () self.location, self.details = location, details #: A list of :class:`Dataset` and/or :class:`TransformedDataset` #: objects from which this one was derived. #: For example, a 3D EM map may be derived from a set of 2D images. self.parents = [] def _add_location(self, loc): if self.location is None: self.location = loc else: self._locations += (loc,) def __get_location(self): return self._locations[0] def __set_location(self, val): self._locations = (val, ) location = property(__get_location, __set_location, doc="A pointer to where the dataset is stored") def add_primary(self, dataset): """Add another Dataset from which this one was ultimately derived, i.e. it is added as a parent, unless a parent already exists, in which case it is added as a grandparent, and so on.""" root = self while root.parents: if len(root.parents) > 1: raise ValueError("This dataset has multiple parents - don't " "know which one to add to") root = root.parents[0] root.parents.append(dataset) class TransformedDataset: """A :class:`Dataset` that should be rotated or translated before using. This is typically used for derived datasets (see :attr:`Dataset.parents`) where the derived dataset lies in a different dataset from the parent (for example, it was moved to better align with the model's reference frame or other experimental data). The transformation that places the derived dataset on the parent is recorded here. :param dataset: The (parent) dataset. :type dataset: :class:`Dataset` :param transform: The rotation and translation that places a derived dataset on this dataset. :type transform: :class:`ihm.geometry.Transformation` """ def __init__(self, dataset, transform): self.dataset, self.transform = dataset, transform class DatasetGroup(list): """A set of :class:`Dataset` objects that are handled together. This is implemented as a simple list. :param sequence elements: Initial set of datasets. :param str name: Short text name of this group. :param str application: Text that shows how this group is used. :param str details: Longer text that describes this group. Normally a group is passed to one or more :class:`~ihm.protocol.Protocol` or :class:`~ihm.analysis.Analysis` objects, although unused groups can still be included in the file if desired by adding them to :attr:`ihm.System.orphan_dataset_groups`. """ # For backwards compatibility with earlier versions of this class which # didn't specify name/application/details name = application = details = None def __init__(self, elements=(), name=None, application=None, details=None): super().__init__(elements) self.name, self.application = name, application self.details = details class CXMSDataset(Dataset): """Processed cross-links from a CX-MS experiment""" data_type = 'Crosslinking-MS data' class MassSpecDataset(Dataset): """Raw mass spectrometry files such as peaklists""" data_type = 'Mass Spectrometry data' class HDXDataset(Dataset): """Data from a hydrogen/deuterium exchange experiment""" data_type = 'H/D exchange data' class PDBDataset(Dataset): """An experimentally-determined 3D structure as a set of a coordinates, usually in a PDB or mmCIF file""" data_type = 'Experimental model' class ComparativeModelDataset(Dataset): """A 3D structure determined by comparative modeling""" data_type = 'Comparative model' class IntegrativeModelDataset(Dataset): """A 3D structure determined by integrative modeling""" data_type = 'Integrative model' class DeNovoModelDataset(Dataset): """A 3D structure determined by de novo modeling""" data_type = 'De Novo model' class NMRDataset(Dataset): """A nuclear magnetic resonance (NMR) dataset""" data_type = 'NMR data' class MutagenesisDataset(Dataset): """Mutagenesis data""" data_type = 'Mutagenesis data' class EMDensityDataset(Dataset): """A 3D electron microscopy dataset""" data_type = '3DEM volume' class EMMicrographsDataset(Dataset): """Raw 2D electron micrographs""" data_type = 'EM raw micrographs' class EM2DClassDataset(Dataset): """2DEM class average""" data_type = '2DEM class average' class SASDataset(Dataset): """SAS data""" data_type = 'SAS data' class FRETDataset(Dataset): """Single molecule data from a Förster resonance energy transfer (FRET) experiment""" data_type = 'Single molecule FRET data' class EnsembleFRETDataset(Dataset): """Ensemble data from a Förster resonance energy transfer (FRET) experiment""" data_type = 'Ensemble FRET data' class YeastTwoHybridDataset(Dataset): """Yeast two-hybrid data""" data_type = 'Yeast two-hybrid screening data' class GeneticInteractionsDataset(Dataset): """Quantitative measurements of genetic interactions""" data_type = 'Quantitative measurements of genetic interactions' class EPRDataset(Dataset): """Electron paramagnetic resonance (EPR) data""" data_type = 'EPR data' class XRayDiffractionDataset(Dataset): """Data from X-ray diffraction""" data_type = 'X-ray diffraction data' class HydroxylRadicalFootprintingDataset(Dataset): """Data from hydroxyl radical footprinting""" data_type = 'Hydroxyl radical footprinting data' class DNAFootprintingDataset(Dataset): """Data from DNA footprinting""" data_type = 'DNA footprinting data' class PredictedContactsDataset(Dataset): """A collection of predicted contacts""" data_type = 'Predicted contacts' python-ihm-2.7/ihm/dictionary.py000066400000000000000000000406621503573337200167720ustar00rootroot00000000000000"""Classes to read in and represent an mmCIF extension dictionary""" import ihm.reader import ihm.format import ihm.format_bcif import re import itertools from ihm.reader import Handler # Handle special values for CIF data items ('.', '?', or missing entirely) class _CifSpecialValue: pass class _NotInFileCif(_CifSpecialValue): pass class _OmittedCif(_CifSpecialValue): pass class _UnknownCif(_CifSpecialValue): pass class _KeywordEnumeration(set): """Set of possible values for a keyword. Can be case insensitive.""" def __init__(self): super().__init__() self.case_sensitive = True self._upper_set = None def add(self, item): self._upper_set = None # Invalidate upper_set super().add(item) def __contains__(self, item): if self.case_sensitive: return super().__contains__(item) else: if self._upper_set is None: self._upper_set = set(x.upper() for x in self) return item.upper() in self._upper_set class ValidatorError(Exception): """Exception raised if a file fails to validate. See :meth:`Dictionary.validate`.""" pass class _ValidatorCategoryHandler(Handler): # Handle special values for CIF data items ('.', '?', or missing entirely) # explicitly, rather the default behavior (mapping to None or '?') not_in_file = _NotInFileCif() omitted = _OmittedCif() unknown = _UnknownCif() def __init__(self, sysr, category): super().__init__(sysr) self.category = '_' + category.name self.category_obj = category self._keys = [k.lower() for k in category.keywords.keys()] self.link_keys = set() li = sysr.dictionary.linked_items for link in itertools.chain(li.keys(), li.values()): cat, key = link.split('.') if cat == self.category: self.link_keys.add(key) def __call__(self, *args): self.sysr.validate_data(self.category_obj, self._keys, args, self.link_keys) class _ValidatorReader: """Track information used for validation while reading an mmCIF file""" def __init__(self, dictionary): self.dictionary = dictionary self._seen_categories = set() self._unknown_categories = set() self._unknown_keywords = set() # Keep track of all values (IDs) seen for keys that are involved in # parent-child relationships self._seen_ids = {} li = dictionary.linked_items for link in itertools.chain(li.keys(), li.values()): self._seen_ids[link] = set() self.errors = [] def validate_data(self, category, keywords, args, link_keys): self._seen_categories.add(category.name) for key, value in zip(keywords, args): if key in link_keys and not isinstance(value, _CifSpecialValue): self._seen_ids["_%s.%s" % (category.name, key)].add(value) kwobj = category.keywords[key] if kwobj.mandatory: if isinstance(value, _UnknownCif): self.errors.append("Mandatory keyword %s.%s cannot have " "value '?'" % (category.name, key)) elif isinstance(value, _NotInFileCif): self.errors.append("Mandatory keyword %s.%s cannot be " "missing from the file" % (category.name, key)) if isinstance(value, _CifSpecialValue): continue if kwobj.enumeration and value not in kwobj.enumeration: self.errors.append("Keyword %s.%s value %s is not a valid " "enumerated value (options are %s)" % (category.name, key, value, ", ".join(sorted(kwobj.enumeration)))) if kwobj.item_type and not kwobj.item_type.regex.match(str(value)): self.errors.append("Keyword %s.%s value %s does not match " "item type (%s) regular expression (%s)" % (category.name, key, value, kwobj.item_type.name, kwobj.item_type.construct)) def _check_mandatory_categories(self): all_categories = self.dictionary.categories mandatory_categories = [c.name for c in all_categories.values() if c.mandatory] missing = set(mandatory_categories) - self._seen_categories if missing: self.errors.append( "The following mandatory categories are missing " "in the file: %s" % ", ".join(sorted(missing))) def _check_linked_items(self): """Check to make sure any ID referenced by a child item is defined in the parent""" for child, parent in self.dictionary.linked_items.items(): if not self._seen_ids[child] <= self._seen_ids[parent]: # Strip _ prefix from category cat, key = parent[1:].split('.') # Only warn about relationships where the parent is defined # in this dictionary (e.g. a lot of IHM items point back # to PDBx categories) # Chemical component dictionary checks are handled elsewhere; # the chem_comp_* categories don't need to be fully populated if cat in self.dictionary.categories \ and not cat.startswith('chem_comp_'): missing = sorted(self._seen_ids[child] - self._seen_ids[parent]) self.errors.append( "The following IDs referenced by %s " "were not defined in the parent category (%s): %s" % (child, parent, ", ".join(missing))) def _check_unknown(self): """Report errors for any unknown keywords or categories""" if self._unknown_categories: self.errors.append( "The following categories are not defined in the " "dictionary: %s" % ", ".join(sorted(self._unknown_categories))) if self._unknown_keywords: self.errors.append( "The following keywords are not defined in the dictionary: %s" % ", ".join(sorted(self._unknown_keywords))) def report_errors(self): self._check_mandatory_categories() self._check_linked_items() self._check_unknown() if self.errors: raise ValidatorError("\n\n".join(self.errors)) class _UnknownCategoryHandler: def __init__(self, sysr): self.sysr = sysr def __call__(self, catname, line): self.sysr._unknown_categories.add(catname) class _UnknownKeywordHandler: def __init__(self, sysr): self.sysr = sysr def __call__(self, catname, keyname, line): self.sysr._unknown_keywords.add("%s.%s" % (catname, keyname)) class Dictionary: """Representation of an mmCIF dictionary. See :func:`read` to create a Dictionary from a file. Multiple Dictionaries can be added together to yield a Dictionary that includes all the data in the original Dictionaries. See the `validator example `_ for an example of using this class.""" # noqa: E501 def __init__(self): #: Mapping from name to :class:`Category` objects self.categories = {} #: Links between items; keys are children, values are parents e.g. #: ``linked_items['_ihm_starting_model_details.asym_id'] = #: '_struct_asym.id'`` self.linked_items = {} def __iadd__(self, other): for name, cat in other.categories.items(): if name in self.categories: # If both dictionaries contain information on the same # category, combine it self.categories[name]._update(cat) else: self.categories[name] = cat self.linked_items.update(other.linked_items) return self def __add__(self, other): d = Dictionary() d += self d += other return d def validate(self, fh, format='mmCIF'): """Validate the given file against this dictionary. :param file fh: The file handle to read from. :param str format: The format of the file. This can be 'mmCIF' (the default) for the (text-based) mmCIF format or 'BCIF' for BinaryCIF. :raises: :class:`ValidatorError` if the file fails to validate. """ reader_map = {'mmCIF': ihm.format.CifReader, 'BCIF': ihm.format_bcif.BinaryCifReader} s = _ValidatorReader(self) uchandler = _UnknownCategoryHandler(s) ukhandler = _UnknownKeywordHandler(s) r = reader_map[format](fh, {}, unknown_category_handler=uchandler, unknown_keyword_handler=ukhandler) handlers = [_ValidatorCategoryHandler(s, cat) for cat in self.categories.values()] r.category_handler = dict((h.category, h) for h in handlers) # Read all data blocks while r.read_file(): pass s.report_errors() class Category: """Representation of a single category in a :class:`Dictionary`.""" def __init__(self): #: Category name self.name = None #: Human-readable text self.description = None #: Mapping from name to :class:`Keyword` objects self.keywords = {} #: True iff this category is required in a compliant mmCIF file self.mandatory = None def _update(self, other): """Update with information from another Category object""" assert other.name == self.name self.keywords.update(other.keywords) self.description = self.description or other.description if self.mandatory is None: # e.g. if other.mandatory is False and self.mandatory is None # we want to use False; "None or False" returns None. self.mandatory = other.mandatory else: self.mandatory = self.mandatory or other.mandatory class _DoNothingRegEx: """A mock regex object which always matches""" def match(self, value): return True class ItemType: """Represent the type of a data item. This keeps the set of valid strings for values of a given :class:`Keyword`. For example, integer values can only contain the digits 0-9 with an optional +/- prefix.""" def __init__(self, name, primitive_code, construct): self.name = name # The dictionary only defines matches against ASCII characters. # Extend this to match any Unicode "word" character so we don't # fail to validate as soon as we see an accented character. self.construct = construct.replace('A-Za-z0-9', r'\w') self.primitive_code = primitive_code # Ensure that regex matches the entire value try: self.regex = re.compile(self.construct + '$') except re.error: # Some CIF regexes aren't valid Python regexes; skip these self.regex = _DoNothingRegEx() case_sensitive = property(lambda x: x.primitive_code != 'uchar', doc='True iff this type is case sensitive') class Keyword: """Representation of a single keyword in a :class:`Category`.""" def __init__(self): #: Keyword name self.name = None #: True iff this keyword is required in a compliant mmCIF file self.mandatory = None #: Set of acceptable values, or None self.enumeration = None #: :class:`ItemType` for this keyword, or None self.item_type = None class _DictionaryReader: """Track information for a Dictionary being read from a file.""" def __init__(self): self.dictionary = Dictionary() self.item_types = {} # Mapping from name to ItemType object self._reset_category() self._reset_keyword() def _reset_category(self): self.category = Category() self.category_good = False def _reset_keyword(self): self._keyword_info = [] self._keyword_item_type = None self._keyword_enumeration = None self.keyword_good = False def end_save_frame(self): if self.keyword_good: for (name, category, mandatory) in self._keyword_info: k = Keyword() k.name, k.mandatory = name.lower(), mandatory k.enumeration = self._keyword_enumeration k.item_type = self._keyword_item_type # If the owning category does not exist, make it; this can # happen if we extend something in the core dictionary # (e.g. atom_site.ihm_model_id) if category not in self.dictionary.categories: c = Category() c.name = category self.dictionary.categories[c.name] = c else: c = self.dictionary.categories[category] c.keywords[k.name] = k self._reset_keyword() if self.category_good: c = self.category if c.name in self.dictionary.categories: # Handle case where keywords were defined before category self.dictionary.categories[c.name]._update(c) else: self.dictionary.categories[c.name] = c self._reset_category() class _CategoryHandler(Handler): category = '_category' def __call__(self, id, description, mandatory_code: bool): c = self.sysr.category c.name, c.description = id, description c.mandatory = mandatory_code self.sysr.category_good = True def end_save_frame(self): self.sysr.end_save_frame() class _ItemHandler(Handler): category = '_item' def __call__(self, name, category_id, mandatory_code: bool): cat, name = name.split('.') ki = self.sysr._keyword_info # If category_id is missing, strip leading _ from the keyword's # own category name and use that instead if category_id is None: category_id = cat[1:] ki.append((name, category_id, mandatory_code)) self.sysr.keyword_good = True class _ItemEnumerationHandler(Handler): category = '_item_enumeration' def __call__(self, value): if self.sysr._keyword_enumeration is None: self.sysr._keyword_enumeration = _KeywordEnumeration() self.sysr._keyword_enumeration.add(value) class _ItemTypeListHandler(Handler): category = '_item_type_list' def __call__(self, code, primitive_code, construct): it = ItemType(code, primitive_code, construct) self.sysr.item_types[it.name] = it class _ItemTypeHandler(Handler): category = '_item_type' def __call__(self, code): self.sysr._keyword_item_type = code def finalize(self): for c in self.sysr.dictionary.categories.values(): for k in c.keywords.values(): if k.item_type is not None: # Map unrecognized type codes to None # For example, the ihm dictionary often uses the # 'atcode' type which is not defined in the dictionary # itself (but presumably is in the base PDBx dict) k.item_type = self.sysr.item_types.get(k.item_type) if k.item_type is not None and k.enumeration: k.enumeration.case_sensitive = k.item_type.case_sensitive class _ItemLinkedHandler(Handler): category = '_item_linked' def __call__(self, child_name, parent_name): self.sysr.dictionary.linked_items[child_name] = parent_name def read(fh): """Read dictionary data from the mmCIF file handle `fh`. :return: The dictionary data. :rtype: :class:`Dictionary` """ r = ihm.format.CifReader(fh, {}) s = _DictionaryReader() handlers = [_CategoryHandler(s), _ItemHandler(s), _ItemEnumerationHandler(s), _ItemTypeListHandler(s), _ItemTypeHandler(s), _ItemLinkedHandler(s)] r.category_handler = dict((h.category, h) for h in handlers) r.read_file() for h in handlers: h.finalize() return s.dictionary python-ihm-2.7/ihm/dumper.py000066400000000000000000005624551503573337200161320ustar00rootroot00000000000000"""Utility classes to dump out information in mmCIF or BinaryCIF format""" import re import os import numbers import collections import operator import itertools import warnings import datetime import ihm.format import ihm.format_bcif import ihm.model import ihm.representation import ihm.source import ihm.multi_state_scheme import ihm.flr import ihm.dataset from . import util from . import location from . import restraint from . import geometry def _is_subrange(rng1, rng2): """Return True iff rng1 is wholly inside rng2""" # Nonpolymers should have an empty range if rng1 == (None, None) or rng2 == (None, None): return rng1 == rng2 else: return rng1[0] >= rng2[0] and rng1[1] <= rng2[1] class Dumper: """Base class for helpers to dump output to mmCIF or BinaryCIF. See :func:`write`.""" # Set to False to disable dump-time sanity checks _check = True def __init__(self): pass def finalize(self, system): """Called for all dumpers prior to :meth:`dump`. This can be used to assign numeric IDs to objects, check for sanity, etc.""" pass def dump(self, system, writer): """Use `writer` to write information about `system` to mmCIF or BinaryCIF. :param system: The :class:`ihm.System` object containing all information about the system. :type system: :class:`ihm.System` :param writer: Utility class to write data to the output file. :type writer: :class:`ihm.format.CifWriter` or :class:`ihm.format_bcif.BinaryCifWriter`. """ pass def _get_transform(rot_matrix, tr_vector): """Return a dict encoding a transform, suitable for passing to loop.write()""" if rot_matrix in (None, ihm.unknown): rm = [[rot_matrix for _ in range(3)] for _ in range(3)] else: # mmCIF writer usually outputs floats to 3 decimal # places, but we need more precision for rotation # matrices rm = [["%.6f" % e for e in rot_matrix[i]] for i in range(3)] if tr_vector in (None, ihm.unknown): tr_vector = [tr_vector for _ in range(3)] return {'rot_matrix11': rm[0][0], 'rot_matrix21': rm[1][0], 'rot_matrix31': rm[2][0], 'rot_matrix12': rm[0][1], 'rot_matrix22': rm[1][1], 'rot_matrix32': rm[2][1], 'rot_matrix13': rm[0][2], 'rot_matrix23': rm[1][2], 'rot_matrix33': rm[2][2], 'tr_vector1': tr_vector[0], 'tr_vector2': tr_vector[1], 'tr_vector3': tr_vector[2]} class _EntryDumper(Dumper): def dump(self, system, writer): # Write CIF header (so this dumper should always be first) writer.start_block(re.subn('[^0-9a-zA-Z_-]', '', system.id)[0]) with writer.category("_entry") as lp: lp.write(id=system.id) class _CollectionDumper(Dumper): def dump(self, system, writer): with writer.loop("_ihm_entry_collection", ["id", "name", "details"]) as lp: for c in system.collections: lp.write(id=c.id, name=c.name, details=c.details) class _AuditConformDumper(Dumper): URL = ("https://raw.githubusercontent.com/" + "ihmwg/IHMCIF/%s/dist/mmcif_ihm.dic") def dump(self, system, writer): with writer.category("_audit_conform") as lp: # Update to match the version of the IHM dictionary we support: lp.write(dict_name="mmcif_ihm.dic", dict_version="1.28", dict_location=self.URL % "44ed2c3") class _StructDumper(Dumper): def dump(self, system, writer): with writer.category("_struct") as lp: mth = system.structure_determination_methodology lp.write(title=system.title, entry_id=system.id, pdbx_structure_determination_methodology=mth, pdbx_model_details=system.model_details) class _CommentDumper(Dumper): def dump(self, system, writer): for comment in system.comments: writer.write_comment(comment) class _SoftwareDumper(Dumper): def finalize(self, system): seen_software = {} self._software_by_id = [] for s in system._all_software(): util._remove_id(s) for s in system._all_software(): util._assign_id(s, seen_software, self._software_by_id) def dump(self, system, writer): # todo: specify these attributes in only one place (e.g. in the # Software class) with writer.loop("_software", ["pdbx_ordinal", "name", "classification", "description", "version", "type", "location", "citation_id"]) as lp: for s in self._software_by_id: lp.write(pdbx_ordinal=s._id, name=s.name, classification=s.classification, description=s.description, version=s.version, type=s.type, location=s.location, citation_id=s.citation._id if s.citation else None) class _CitationDumper(Dumper): def finalize(self, system): primaries = [] non_primaries = [] for c in system._all_citations(): (primaries if c.is_primary else non_primaries).append(c) # Put primary citations first in list self._all_citations = primaries + non_primaries for nc, c in enumerate(self._all_citations): c._id = nc + 1 if primaries: if len(primaries) > 1: raise ValueError( "Multiple Citations with is_primary=True; only one can " "be primary: %s" % primaries) else: primaries[0]._id = 'primary' def dump(self, system, writer): self.dump_citations(self._all_citations, writer) self.dump_authors(self._all_citations, writer) def dump_citations(self, citations, writer): with writer.loop("_citation", ["id", "title", "journal_abbrev", "journal_volume", "page_first", "page_last", "year", "pdbx_database_id_PubMed", "pdbx_database_id_DOI"]) as lp: for c in citations: if isinstance(c.page_range, (tuple, list)): page_first, page_last = c.page_range else: page_first = c.page_range page_last = None lp.write(id=c._id, title=c.title, journal_abbrev=c.journal, journal_volume=c.volume, page_first=page_first, page_last=page_last, year=c.year, pdbx_database_id_PubMed=c.pmid, pdbx_database_id_DOI=c.doi) def dump_authors(self, citations, writer): with writer.loop("_citation_author", ["citation_id", "name", "ordinal"]) as lp: ordinal = itertools.count(1) for c in citations: for a in c.authors: lp.write(citation_id=c._id, name=a, ordinal=next(ordinal)) class _AuditAuthorDumper(Dumper): def _get_citation_authors(self, system): # If system.authors is empty, get the set of all citation authors # instead seen_authors = set() # Only look at explicitly-added citations (since these are likely to # describe the modeling) not that describe a method or a piece of # software we used (system._all_citations()) for c in system.citations: for a in c.authors: if a not in seen_authors: seen_authors.add(a) yield a def dump(self, system, writer): authors = system.authors or self._get_citation_authors(system) with writer.loop("_audit_author", ["name", "pdbx_ordinal"]) as lp: for n, author in enumerate(authors): lp.write(name=author, pdbx_ordinal=n + 1) class _AuditRevisionDumper(Dumper): def finalize(self, system): for n, rev in enumerate(system.revisions): rev._id = n + 1 def dump(self, system, writer): self._dump_history(system, writer) self._dump_details(system, writer) self._dump_groups(system, writer) self._dump_categories(system, writer) self._dump_items(system, writer) def _dump_history(self, system, writer): with writer.loop("_pdbx_audit_revision_history", ["ordinal", "data_content_type", "major_revision", "minor_revision", "revision_date"]) as lp: for rev in system.revisions: lp.write(ordinal=rev._id, data_content_type=rev.data_content_type, major_revision=rev.major, minor_revision=rev.minor, revision_date=datetime.date.isoformat(rev.date) if rev.date else rev.date) def _dump_details(self, system, writer): ordinal = itertools.count(1) with writer.loop("_pdbx_audit_revision_details", ["ordinal", "revision_ordinal", "data_content_type", "provider", "type", "description"]) as lp: for rev in system.revisions: for d in rev.details: lp.write(ordinal=next(ordinal), revision_ordinal=rev._id, data_content_type=rev.data_content_type, provider=d.provider, type=d.type, description=d.description) def _dump_groups(self, system, writer): ordinal = itertools.count(1) with writer.loop("_pdbx_audit_revision_group", ["ordinal", "revision_ordinal", "data_content_type", "group"]) as lp: for rev in system.revisions: for group in rev.groups: lp.write(ordinal=next(ordinal), revision_ordinal=rev._id, data_content_type=rev.data_content_type, group=group) def _dump_categories(self, system, writer): ordinal = itertools.count(1) with writer.loop("_pdbx_audit_revision_category", ["ordinal", "revision_ordinal", "data_content_type", "category"]) as lp: for rev in system.revisions: for category in rev.categories: lp.write(ordinal=next(ordinal), revision_ordinal=rev._id, data_content_type=rev.data_content_type, category=category) def _dump_items(self, system, writer): ordinal = itertools.count(1) with writer.loop("_pdbx_audit_revision_item", ["ordinal", "revision_ordinal", "data_content_type", "item"]) as lp: for rev in system.revisions: for item in rev.items: lp.write(ordinal=next(ordinal), revision_ordinal=rev._id, data_content_type=rev.data_content_type, item=item) class _DataUsageDumper(Dumper): def dump(self, system, writer): ordinal = itertools.count(1) with writer.loop("_pdbx_data_usage", ["id", "type", "details", "url", "name"]) as lp: for d in system.data_usage: lp.write(id=next(ordinal), type=d.type, details=d.details, url=d.url, name=d.name) class _GrantDumper(Dumper): def dump(self, system, writer): with writer.loop("_pdbx_audit_support", ["funding_organization", "country", "grant_number", "ordinal"]) as lp: for n, grant in enumerate(system.grants): lp.write(funding_organization=grant.funding_organization, country=grant.country, grant_number=grant.grant_number, ordinal=n + 1) class _DatabaseDumper(Dumper): def dump(self, system, writer): with writer.loop("_database_2", ["database_id", "database_code", "pdbx_database_accession", "pdbx_DOI"]) as lp: for d in system.databases: lp.write(database_id=d.id, database_code=d.code, pdbx_DOI=d.doi, pdbx_database_accession=d.accession) class _DatabaseStatusDumper(Dumper): def dump(self, system, writer): with writer.category("_pdbx_database_status") as lp: # Pass through all data items from a Python dict lp.write(**system.database_status._map) class _ChemCompDumper(Dumper): def dump(self, system, writer): comps = frozenset(itertools.chain( (comp for e in system.entities for comp in e.sequence), system._orphan_chem_comps)) with writer.loop("_chem_comp", ["id", "type", "name", "formula", "formula_weight"]) as lp: for comp in sorted(comps, key=operator.attrgetter('id')): if comp.ccd or comp.descriptors: raise ValueError( "Non-default values for 'ccd' or 'descriptors' are " "not supported by the IHM dictionary for %s" % comp) lp.write(id=comp.id, type=comp.type, name=comp.name, formula=comp.formula, formula_weight=comp.formula_weight) class _ChemDescriptorDumper(Dumper): def finalize(self, system): seen_desc = {} # Assign IDs to all descriptors self._descriptor_by_id = [] for d in system._all_chem_descriptors(): util._remove_id(d) for d in system._all_chem_descriptors(): util._assign_id(d, seen_desc, self._descriptor_by_id) def dump(self, system, writer): with writer.loop( "_ihm_chemical_component_descriptor", ["id", "auth_name", "chemical_name", "common_name", "smiles", "smiles_canonical", "inchi", "inchi_key"]) as lp: # note that we don't write out chem_comp_id; this is no longer # present in the dictionary for d in self._descriptor_by_id: lp.write(id=d._id, auth_name=d.auth_name, chemical_name=d.chemical_name, common_name=d.common_name, smiles=d.smiles, smiles_canonical=d.smiles_canonical, inchi=d.inchi, inchi_key=d.inchi_key) class _EntityDumper(Dumper): def finalize(self, system): # Assign IDs and check for duplicates or empty entities seen = {} empty = [] for num, entity in enumerate(system.entities): if self._check and entity in seen and len(entity.sequence) > 0: raise ValueError("Duplicate entity %s found" % entity) if len(entity.sequence) == 0: empty.append(entity) entity._id = num + 1 seen[entity] = None if empty: warnings.warn( "At least one empty Entity (with no sequence) was found: %s" % empty) def dump(self, system, writer): # Count all molecules (if any) for each entity num_molecules = collections.defaultdict(lambda: 0) for asym in system.asym_units: num_molecules[asym.entity._id] += asym.number_of_molecules with writer.loop("_entity", ["id", "type", "src_method", "pdbx_description", "formula_weight", "pdbx_number_of_molecules", "details"]) as lp: for entity in system.entities: lp.write(id=entity._id, type=entity.type, src_method=entity.src_method, pdbx_description=entity.description, formula_weight=entity.formula_weight, pdbx_number_of_molecules=num_molecules[entity._id], details=entity.details) def _assign_src_ids(system, srccls): """Assign IDs to all entity sources of type `srccls`.""" # Assign IDs seen_src = {} src_by_id = [] for e in system.entities: if isinstance(e.source, srccls): util._remove_id(e.source) for e in system.entities: if isinstance(e.source, srccls): util._assign_id(e.source, seen_src, src_by_id) class _EntitySrcGenDumper(Dumper): def finalize(self, system): _assign_src_ids(system, ihm.source.Manipulated) def dump(self, system, writer): with writer.loop( "_entity_src_gen", ["entity_id", "pdbx_src_id", "pdbx_gene_src_ncbi_taxonomy_id", "pdbx_gene_src_scientific_name", "gene_src_common_name", "gene_src_strain", "pdbx_host_org_ncbi_taxonomy_id", "pdbx_host_org_scientific_name", "host_org_common_name", "pdbx_host_org_strain"]) as lp: for e in system.entities: if isinstance(e.source, ihm.source.Manipulated): self._dump_source(lp, e) def _dump_source(self, lp, e): s = e.source # Skip output if all fields are blank if s.gene is None and s.host is None: return lp.write(entity_id=e._id, pdbx_src_id=s._id, pdbx_gene_src_ncbi_taxonomy_id=s.gene.ncbi_taxonomy_id if s.gene else None, pdbx_gene_src_scientific_name=s.gene.scientific_name if s.gene else None, gene_src_strain=s.gene.strain if s.gene else None, gene_src_common_name=s.gene.common_name if s.gene else None, pdbx_host_org_ncbi_taxonomy_id=s.host.ncbi_taxonomy_id if s.host else None, pdbx_host_org_scientific_name=s.host.scientific_name if s.host else None, host_org_common_name=s.host.common_name if s.host else None, pdbx_host_org_strain=s.host.strain if s.host else None) class _EntitySrcNatDumper(Dumper): def finalize(self, system): _assign_src_ids(system, ihm.source.Natural) def dump(self, system, writer): with writer.loop( "_entity_src_nat", ["entity_id", "pdbx_src_id", "pdbx_ncbi_taxonomy_id", "pdbx_organism_scientific", "common_name", "strain"]) as lp: for e in system.entities: s = e.source if isinstance(s, ihm.source.Natural): lp.write(entity_id=e._id, pdbx_src_id=s._id, pdbx_ncbi_taxonomy_id=s.ncbi_taxonomy_id, pdbx_organism_scientific=s.scientific_name, common_name=s.common_name, strain=s.strain) class _EntitySrcSynDumper(Dumper): def finalize(self, system): _assign_src_ids(system, ihm.source.Synthetic) def dump(self, system, writer): # Note that _pdbx_entity_src_syn.strain is not used in current PDB # entries with writer.loop( "_pdbx_entity_src_syn", ["entity_id", "pdbx_src_id", "ncbi_taxonomy_id", "organism_scientific", "organism_common_name"]) as lp: for e in system.entities: s = e.source if isinstance(s, ihm.source.Synthetic): lp.write(entity_id=e._id, pdbx_src_id=s._id, ncbi_taxonomy_id=s.ncbi_taxonomy_id, organism_scientific=s.scientific_name, organism_common_name=s.common_name) def _prettyprint_seq(seq, width): """Join the sequence of strings together and generate a set of lines that don't exceed the provided width.""" current_width = 0 line = [] for s in seq: if line and current_width + len(s) > width: yield ''.join(line) line = [] current_width = 0 line.append(s) current_width += len(s) if line: yield ''.join(line) def _get_comp_id(entity, seq_id): """Get the component ID for a given seq_id, or ? if it is out of range""" if 1 <= seq_id <= len(entity.sequence): return entity.sequence[seq_id - 1].id else: return ihm.unknown class _StructRefDumper(Dumper): def finalize(self, system): # List of (entity, ref) by ID self._refs_by_id = [] seen_refs = {} align_id = itertools.count(1) for e in system.entities: for r in e.references: util._remove_id(r) for e in system.entities: # Two refs are not considered duplicated if they relate to # different entities, so add entity to reference signature for r in e.references: sig = (id(e), r._signature()) util._assign_id(r, seen_refs, self._refs_by_id, seen_obj=sig, by_id_obj=(e, r)) for a in r._get_alignments(): a._id = next(align_id) def _get_sequence(self, reference): """Get the sequence string""" if reference.sequence in (None, ihm.unknown): return reference.sequence # We only want the subset of the sequence that overlaps with # our entities db_begin = min(a.db_begin for a in reference._get_alignments()) db_end = max(a.db_end for a in reference._get_alignments()) fullrefseq = list(util._get_codes(reference.sequence)) # Split into lines to get tidier CIF output return "\n".join(_prettyprint_seq( (code if len(code) == 1 else '(%s)' % code for code in fullrefseq[db_begin - 1:db_end]), 70)) def _check_seq_dif(self, entity, ref, align): """Check all SeqDif objects for the Entity sequence. Return the mutated sequence (to match the reference).""" entseq = [comp.code_canonical for comp in entity.sequence] for sd in align.seq_dif: if sd.seq_id < 1 or sd.seq_id > len(entseq): raise IndexError("SeqDif.seq_id for %s is %d, out of " "range 1-%d" % (entity, sd.seq_id, len(entseq))) if (sd.monomer and sd.monomer.code_canonical != entseq[sd.seq_id - 1]): raise ValueError("SeqDif.monomer one-letter code (%s) does " "not match that in %s (%s at position %d)" % (sd.monomer.code_canonical, entity, entseq[sd.seq_id - 1], sd.seq_id)) if sd.db_monomer: entseq[sd.seq_id - 1] = sd.db_monomer.code_canonical return entseq def _get_ranges(self, entity, fullrefseq, align): """Get the sequence ranges for an Entity and Reference""" return ((align.entity_begin, len(entity.sequence) if align.entity_end is None else align.entity_end), (align.db_begin, len(fullrefseq) if align.db_end is None else align.db_end)) def _check_reference_sequence(self, entity, ref): """Make sure that the Entity and Reference sequences match""" for align in ref._get_alignments(): self._check_alignment(entity, ref, align) def _check_alignment(self, entity, ref, align): """Make sure that an alignment makes sense""" if ref.sequence in (None, ihm.unknown): # We just have to trust the range if the ref sequence is blank return # Our sanity-checking logic doesn't currently support insertions # or deletions if any(sd.details in ('insertion', 'deletion') for sd in align.seq_dif): return entseq = self._check_seq_dif(entity, ref, align) # Reference sequence may contain non-standard residues, so parse them # out; e.g. "FLGHGGN(WP9)LHFVQLAS" fullrefseq = list(util._get_codes(ref.sequence)) def check_rng(rng, seq, rngstr, obj): if any(r < 1 or r > len(seq) for r in rng): raise IndexError("Alignment.%s for %s is (%d-%d), " "out of range 1-%d" % (rngstr, obj, rng[0], rng[1], len(seq))) entity_rng, db_rng = self._get_ranges(entity, fullrefseq, align) check_rng(entity_rng, entseq, "entity_begin,entity_end", entity) check_rng(db_rng, fullrefseq, "db_begin,db_end", ref) matchlen = min(entity_rng[1] - entity_rng[0], db_rng[1] - db_rng[0]) entseq = entseq[entity_rng[0] - 1:entity_rng[0] + matchlen - 1] refseq = fullrefseq[db_rng[0] - 1:db_rng[0] + matchlen - 1] # Entity sequence is canonical so likely won't match any non-standard # residue (anything of length > 1), so just skip checks of these def matchseq(a, b): return a == b or len(a) > 1 or len(b) > 1 if (len(refseq) != len(entseq) or not all(matchseq(a, b) for (a, b) in zip(refseq, entseq))): raise ValueError( "Reference sequence from %s does not match entity canonical" " sequence (after mutations) for %s - you may need to " "adjust Alignment.db_begin,db_end (%d-%d), " "Alignment.entity_begin,entity_end (%d-%d), " "or add to Alignment.seq_dif:\n" "Reference: %s\nEntity: %s\n" "Match: %s" % (ref, entity, db_rng[0], db_rng[1], entity_rng[0], entity_rng[1], # Use "X" for any non-standard residue so the alignment # lines up ''.join(x if len(x) == 1 else 'X' for x in refseq), ''.join(entseq), ''.join('*' if matchseq(a, b) else ' ' for (a, b) in zip(refseq, entseq)))) def dump(self, system, writer): with writer.loop( "_struct_ref", ["id", "entity_id", "db_name", "db_code", "pdbx_db_accession", "pdbx_align_begin", "pdbx_seq_one_letter_code", "details"]) as lp: for e, r in self._refs_by_id: if self._check: self._check_reference_sequence(e, r) db_begin = min(a.db_begin for a in r._get_alignments()) lp.write(id=r._id, entity_id=e._id, db_name=r.db_name, db_code=r.db_code, pdbx_db_accession=r.accession, pdbx_align_begin=db_begin, details=r.details, pdbx_seq_one_letter_code=self._get_sequence(r)) self.dump_seq(system, writer) self.dump_seq_dif(system, writer) def dump_seq(self, system, writer): def _all_alignments(): for e, r in self._refs_by_id: for a in r._get_alignments(): yield e, r, a with writer.loop( "_struct_ref_seq", ["align_id", "ref_id", "seq_align_beg", "seq_align_end", "db_align_beg", "db_align_end"]) as lp: for e, r, a in _all_alignments(): fullrefseq = list(util._get_codes(r.sequence)) entity_rng, db_rng = self._get_ranges(e, fullrefseq, a) matchlen = min(entity_rng[1] - entity_rng[0], db_rng[1] - db_rng[0]) lp.write(align_id=a._id, ref_id=r._id, seq_align_beg=entity_rng[0], seq_align_end=entity_rng[0] + matchlen, db_align_beg=db_rng[0], db_align_end=db_rng[0] + matchlen) def dump_seq_dif(self, system, writer): ordinal = itertools.count(1) with writer.loop( "_struct_ref_seq_dif", ["pdbx_ordinal", "align_id", "seq_num", "db_mon_id", "mon_id", "details"]) as lp: for e, r in self._refs_by_id: for a in r._get_alignments(): for sd in a.seq_dif: lp.write(pdbx_ordinal=next(ordinal), align_id=a._id, seq_num=sd.seq_id, db_mon_id=sd.db_monomer.id if sd.db_monomer else ihm.unknown, mon_id=sd.monomer.id if sd.monomer else ihm.unknown, details=sd.details) class _EntityPolyDumper(Dumper): def __init__(self): super().__init__() # Determine the type of the entire entity's sequence based on the # type(s) of all chemical components it contains self._seq_type_map = { frozenset(('D-peptide linking',)): 'polypeptide(D)', frozenset(('D-peptide linking', 'peptide linking')): 'polypeptide(D)', frozenset(('RNA linking',)): 'polyribonucleotide', frozenset(('DNA linking',)): 'polydeoxyribonucleotide', frozenset(('DNA linking', 'RNA linking')): 'polydeoxyribonucleotide/polyribonucleotide hybrid'} def _get_sequence(self, entity): """Get the sequence for an entity as a string""" # Split into lines to get tidier CIF output return "\n".join(_prettyprint_seq((comp.code if len(comp.code) == 1 else '(%s)' % comp.code for comp in entity.sequence), 70)) def _get_canon(self, entity): """Get the canonical sequence for an entity as a string""" # Split into lines to get tidier CIF output seq = "\n".join(_prettyprint_seq( (comp.code_canonical for comp in entity.sequence), 70)) return seq def _get_seq_type(self, entity): """Get the sequence type for an entity""" all_types = frozenset(comp.type for comp in entity.sequence) # For a mix of L-peptides and D-peptides, current PDB entries always # seem to use 'polypeptide(L)' so let's do that too: if 'L-peptide linking' in all_types: return 'polypeptide(L)' else: return self._seq_type_map.get(all_types, 'other') def dump(self, system, writer): # Get all asym units (if any) for each entity strands = collections.defaultdict(list) for asym in system.asym_units: strands[asym.entity._id].append(asym.strand_id) with writer.loop("_entity_poly", ["entity_id", "type", "nstd_linkage", "nstd_monomer", "pdbx_strand_id", "pdbx_seq_one_letter_code", "pdbx_seq_one_letter_code_can"]) as lp: for entity in system.entities: if not entity.is_polymeric(): continue nstd = any(isinstance(x, ihm.NonPolymerChemComp) for x in entity.sequence) sids = strands[entity._id] lp.write(entity_id=entity._id, type=self._get_seq_type(entity), nstd_linkage='no', nstd_monomer='yes' if nstd else 'no', pdbx_strand_id=",".join(sids) if sids else None, pdbx_seq_one_letter_code=self._get_sequence(entity), pdbx_seq_one_letter_code_can=self._get_canon(entity)) class _EntityNonPolyDumper(Dumper): def dump(self, system, writer): with writer.loop("_pdbx_entity_nonpoly", ["entity_id", "name", "comp_id"]) as lp: for entity in system.entities: if entity.is_polymeric() or entity.is_branched(): continue lp.write(entity_id=entity._id, name=entity.description, comp_id=entity.sequence[0].id) class _EntityPolySeqDumper(Dumper): def dump(self, system, writer): with writer.loop("_entity_poly_seq", ["entity_id", "num", "mon_id", "hetero"]) as lp: for entity in system.entities: if not entity.is_polymeric(): continue for num, comp in enumerate(entity.sequence): lp.write(entity_id=entity._id, num=num + 1, mon_id=comp.id) class _EntityPolySegmentDumper(Dumper): def finalize(self, system): seen_ranges = {} self._ranges_by_id = [] # Need to assign ranges for all starting models too for sm in system._all_starting_models(): rng = sm.asym_unit util._remove_id(rng, attr='_range_id') for rng in system._all_entity_ranges(): util._remove_id(rng, attr='_range_id') for rng in itertools.chain(system._all_entity_ranges(), (sm.asym_unit for sm in system._all_starting_models())): entity = rng.entity if hasattr(rng, 'entity') else rng if entity.is_polymeric(): util._assign_id(rng, seen_ranges, self._ranges_by_id, attr='_range_id', # Two ranges are considered the same if they # have the same entity ID and refer to # the same residue range seen_obj=(entity._id, rng.seq_id_range)) else: rng._range_id = None def dump(self, system, writer): with writer.loop("_ihm_entity_poly_segment", ["id", "entity_id", "seq_id_begin", "seq_id_end", "comp_id_begin", "comp_id_end"]) as lp: for rng in self._ranges_by_id: if hasattr(rng, 'entity'): entity = rng.entity if self._check: util._check_residue_range(rng.seq_id_range, entity) else: entity = rng lp.write( id=rng._range_id, entity_id=entity._id, seq_id_begin=rng.seq_id_range[0], seq_id_end=rng.seq_id_range[1], comp_id_begin=_get_comp_id(entity, rng.seq_id_range[0]), comp_id_end=_get_comp_id(entity, rng.seq_id_range[1])) class _EntityBranchListDumper(Dumper): def dump(self, system, writer): with writer.loop("_pdbx_entity_branch_list", ["entity_id", "num", "comp_id", "hetero"]) as lp: for entity in system.entities: if not entity.is_branched(): continue for num, comp in enumerate(entity.sequence): lp.write(entity_id=entity._id, num=num + 1, comp_id=comp.id) class _EntityBranchDumper(Dumper): def dump(self, system, writer): # todo: we currently only support branched oligosaccharides with writer.loop("_pdbx_entity_branch", ["entity_id", "type"]) as lp: for entity in system.entities: if not entity.is_branched(): continue lp.write(entity_id=entity._id, type="oligosaccharide") class _PolySeqSchemeDumper(Dumper): """Output the _pdbx_poly_seq_scheme table. This is needed because it is a parent category of atom_site.""" def dump(self, system, writer): with writer.loop("_pdbx_poly_seq_scheme", ["asym_id", "entity_id", "seq_id", "mon_id", "pdb_seq_num", "auth_seq_num", "pdb_mon_id", "auth_mon_id", "pdb_strand_id", "pdb_ins_code"]) as lp: for asym in system.asym_units: entity = asym.entity if not entity.is_polymeric(): continue for start, end, modeled in self._get_ranges(system, asym): for num in range(start, end + 1): comp = entity.sequence[num - 1] auth_comp_id = comp.id pdb_seq_num, auth_seq_num, ins = \ asym._get_pdb_auth_seq_id_ins_code(num) if not modeled: # If a residue wasn't modeled, PDB convention is # to state ? for auth_seq_num, pdb_mon_id, # auth_mon_id. # See, e.g., https://files.rcsb.org/view/8QB4.cif auth_comp_id = ihm.unknown auth_seq_num = ihm.unknown elif auth_seq_num is ihm.unknown: # If we don't know the seq num, we can't know # the component ID either auth_comp_id = ihm.unknown lp.write(asym_id=asym._id, pdb_strand_id=asym.strand_id, entity_id=entity._id, seq_id=num, pdb_seq_num=pdb_seq_num, auth_seq_num=auth_seq_num, mon_id=comp.id, pdb_mon_id=auth_comp_id, auth_mon_id=auth_comp_id, pdb_ins_code=ins) def _get_ranges(self, system, asym): """Get a list of (seq_id_begin, seq_id_end, modeled) residue ranges for the given asym. The list is guaranteed to be sorted and to cover all residues in the asym. `modeled` is True if no Model has any residue in that range in a NotModeledResidueRange.""" _all_modeled = [] num_models = 0 for group, model in system._all_models(): num_models += 1 # Handle Model-like objects with no not-modeled member (e.g. # older versions of python-modelcif) if hasattr(model, 'not_modeled_residue_ranges'): ranges = model.not_modeled_residue_ranges else: ranges = [] # Get a sorted non-overlapping list of all not-modeled ranges _all_not_modeled = util._combine_ranges( (rr.seq_id_begin, rr.seq_id_end) for rr in ranges if rr.asym_unit is asym) # Invert to get a list of modeled ranges for this model _all_modeled.extend(util._invert_ranges(_all_not_modeled, len(asym.entity.sequence))) # If no models, there are no "not modeled residues", so say everything # was modeled if num_models == 0: _all_modeled = [(1, len(asym.entity.sequence))] return util._pred_ranges(util._combine_ranges(_all_modeled), len(asym.entity.sequence)) class _NonPolySchemeDumper(Dumper): """Output the _pdbx_nonpoly_scheme table. For now we assume we're using auth_seq_num==pdb_seq_num.""" def dump(self, system, writer): with writer.loop("_pdbx_nonpoly_scheme", ["asym_id", "entity_id", "mon_id", "ndb_seq_num", "pdb_seq_num", "auth_seq_num", "auth_mon_id", "pdb_strand_id", "pdb_ins_code"]) as lp: for asym in system.asym_units: entity = asym.entity if entity.is_polymeric() or entity.is_branched(): continue for num, comp in enumerate(asym.sequence): pdb_seq_num, auth_seq_num, ins = \ asym._get_pdb_auth_seq_id_ins_code(num + 1) # ndb_seq_num is described as the "NDB/RCSB residue # number". We don't have one of those but real PDBs # usually seem to just count sequentially from 1, so # we'll do that too. lp.write(asym_id=asym._id, pdb_strand_id=asym.strand_id, entity_id=entity._id, ndb_seq_num=num + 1, pdb_seq_num=pdb_seq_num, auth_seq_num=auth_seq_num, mon_id=comp.id, auth_mon_id=comp.id, pdb_ins_code=ins) class _BranchSchemeDumper(Dumper): def dump(self, system, writer): with writer.loop("_pdbx_branch_scheme", ["asym_id", "entity_id", "mon_id", "num", "pdb_seq_num", "pdb_ins_code", "auth_seq_num", "auth_mon_id", "pdb_mon_id", "pdb_asym_id"]) as lp: for asym in system.asym_units: entity = asym.entity if not entity.is_branched(): continue for num, comp in enumerate(asym.sequence): pdb_seq_num, auth_seq_num, ins = \ asym._get_pdb_auth_seq_id_ins_code(num + 1) # Assume num counts sequentially from 1 (like seq_id) lp.write(asym_id=asym._id, pdb_asym_id=asym.strand_id, entity_id=entity._id, num=num + 1, pdb_seq_num=pdb_seq_num, pdb_ins_code=ins, auth_seq_num=auth_seq_num, mon_id=comp.id, auth_mon_id=comp.id, pdb_mon_id=comp.id) class _BranchDescriptorDumper(Dumper): def dump(self, system, writer): ordinal = itertools.count(1) with writer.loop("_pdbx_entity_branch_descriptor", ["ordinal", "entity_id", "descriptor", "type", "program", "program_version"]) as lp: for entity in system.entities: for d in entity.branch_descriptors: lp.write(ordinal=next(ordinal), entity_id=entity._id, descriptor=d.text, type=d.type, program=d.program, program_version=d.program_version) class _BranchLinkDumper(Dumper): def dump(self, system, writer): ordinal = itertools.count(1) with writer.loop("_pdbx_entity_branch_link", ["link_id", "entity_id", "entity_branch_list_num_1", "comp_id_1", "atom_id_1", "leaving_atom_id_1", "entity_branch_list_num_2", "comp_id_2", "atom_id_2", "leaving_atom_id_2", "value_order", "details"]) as lp: for entity in system.entities: for lnk in entity.branch_links: lp.write( link_id=next(ordinal), entity_id=entity._id, entity_branch_list_num_1=lnk.num1, comp_id_1=entity.sequence[lnk.num1 - 1].id, atom_id_1=lnk.atom_id1, leaving_atom_id_1=lnk.leaving_atom_id1, entity_branch_list_num_2=lnk.num2, comp_id_2=entity.sequence[lnk.num2 - 1].id, atom_id_2=lnk.atom_id2, leaving_atom_id_2=lnk.leaving_atom_id2, value_order=lnk.order, details=lnk.details) class _AsymIDProvider: """Provide unique asym IDs""" def __init__(self, seen_ids): self.seen_ids = seen_ids self.ids = util._AsymIDs() self.index = -1 def get_next_id(self): """Get the next unique ID""" self.index += 1 while self.ids[self.index] in self.seen_ids: self.index += 1 # Note that we don't need to add our own IDs to seen_ids since # they are already guaranteed to be unique return self.ids[self.index] class _StructAsymDumper(Dumper): def finalize(self, system): # Handle user-assigned IDs first seen_asym_ids = set() duplicates = set() for asym in system.asym_units: if asym.id is not None: if asym.id in seen_asym_ids: duplicates.add(asym.id) asym._id = asym.id seen_asym_ids.add(asym.id) if duplicates: raise ValueError("One or more duplicate asym (chain) IDs " "detected - %s" % ", ".join(sorted(duplicates))) ordinal = itertools.count(1) # Assign remaining asym IDs id_prov = _AsymIDProvider(seen_asym_ids) for asym in system.asym_units: if asym.id is None: asym._id = id_prov.get_next_id() asym._ordinal = next(ordinal) def dump(self, system, writer): with writer.loop("_struct_asym", ["id", "entity_id", "details"]) as lp: for asym in system.asym_units: lp.write(id=asym._id, entity_id=asym.entity._id, details=asym.details) class _AssemblyDumperBase(Dumper): def finalize(self, system): # Sort each assembly by entity id/asym id/range def component_key(comp): return (comp.entity._id, comp._ordinal, comp.seq_id_range) for a in system._all_assemblies(): a.sort(key=component_key) seen_assemblies = {} # Assign IDs to all assemblies; duplicate assemblies (same signature) # get same ID self._assembly_by_id = [] description_by_id = {} all_assemblies = list(system._all_assemblies()) seen_assembly_ids = {} for a in all_assemblies: # Assembly isn't hashable but its signature is sig = a._signature() if sig not in seen_assemblies: self._assembly_by_id.append(a) seen_assemblies[sig] = a._id = len(self._assembly_by_id) description_by_id[a._id] = [] else: a._id = seen_assemblies[sig] if a.description and id(a) not in seen_assembly_ids: descs = description_by_id[a._id] # Don't duplicate descriptions if len(descs) == 0 or descs[-1] != a.description: descs.append(a.description) seen_assembly_ids[id(a)] = None # If multiple assemblies map to the same ID, give them all the same # composite description for a_id, description in description_by_id.items(): description_by_id[a_id] = ' & '.join(description) \ if description else None for a in all_assemblies: a.description = description_by_id[a._id] class _AssemblyDumper(_AssemblyDumperBase): def dump(self, system, writer): self.dump_summary(system, writer) self.dump_details(system, writer) def dump_summary(self, system, writer): with writer.loop("_ihm_struct_assembly", ["id", "name", "description"]) as lp: for a in self._assembly_by_id: lp.write(id=a._id, name=a.name, description=a.description) def dump_details(self, system, writer): ordinal = itertools.count(1) with writer.loop("_ihm_struct_assembly_details", ["id", "assembly_id", "parent_assembly_id", "entity_description", "entity_id", "asym_id", "entity_poly_segment_id"]) as lp: for a in self._assembly_by_id: for comp in a: entity = comp.entity if hasattr(comp, 'entity') else comp lp.write( id=next(ordinal), assembly_id=a._id, # if no hierarchy then assembly is self-parent parent_assembly_id=a.parent._id if a.parent else a._id, entity_description=entity.description, entity_id=entity._id, asym_id=comp._id if hasattr(comp, 'entity') else None, entity_poly_segment_id=comp._range_id) class _ExternalReferenceDumper(Dumper): """Output information on externally referenced files (i.e. anything that refers to a Location that isn't a DatabaseLocation).""" class _LocalFiles: reference_provider = None reference_type = 'Supplementary Files' reference = None refers_to = 'Other' url = None details = None def __init__(self, top_directory): self.top_directory = top_directory def _get_full_path(self, path): return os.path.relpath(path, start=self.top_directory) def finalize(self, system): # Keep only locations that don't point into databases (these are # handled elsewhere) self._refs = [x for x in system._all_locations() if not isinstance(x, location.DatabaseLocation)] # Assign IDs to all locations and repos (including the None repo, which # is for local files) seen_refs = {} seen_repos = {} self._ref_by_id = [] self._repo_by_id = [] # Special dummy repo for repo=None (local files) self._local_files = self._LocalFiles(os.getcwd()) for r in self._refs: util._remove_id(r) if r.repo: util._remove_id(r.repo) for r in system._orphan_repos: util._remove_id(r) for r in self._refs: # Assign a unique ID to the reference util._assign_id(r, seen_refs, self._ref_by_id) # Assign a unique ID to the repository util._assign_id(r.repo or self._local_files, seen_repos, self._repo_by_id) for r in system._orphan_repos: util._assign_id(r, seen_repos, self._repo_by_id) def dump(self, system, writer): self.dump_repos(writer) self.dump_refs(writer) def dump_repos(self, writer): with writer.loop("_ihm_external_reference_info", ["reference_id", "reference_provider", "reference_type", "reference", "refers_to", "associated_url", "details"]) as lp: for repo in self._repo_by_id: lp.write(reference_id=repo._id, reference_provider=repo.reference_provider, reference_type=repo.reference_type, reference=repo.reference, refers_to=repo.refers_to, associated_url=repo.url, details=repo.details) def dump_refs(self, writer): with writer.loop("_ihm_external_files", ["id", "reference_id", "file_path", "content_type", "file_format", "file_size_bytes", "details"]) as lp: for r in self._ref_by_id: repo = r.repo or self._local_files if r.path is None: file_path = None else: file_path = self._posix_path(repo._get_full_path(r.path)) lp.write(id=r._id, reference_id=repo._id, file_path=file_path, content_type=r.content_type, file_format=r.file_format, file_size_bytes=r.file_size, details=r.details) # On Windows systems, convert native paths to POSIX-like (/-separated) # paths if os.sep == '/': def _posix_path(self, path): return path else: def _posix_path(self, path): return path.replace(os.sep, '/') class _DatasetDumper(Dumper): def finalize(self, system): def _all_transforms(dataset): for p in dataset.parents: if isinstance(p, ihm.dataset.TransformedDataset): yield p.transform seen_datasets = {} seen_transforms = {} # Assign IDs to all datasets and transforms self._dataset_by_id = [] self._transform_by_id = [] for d in system._all_datasets(): for t in _all_transforms(d): # Can't use default _id attribute here since a given transform # may be used by both a dataset and a geometric object, and # since they live in different tables they need different IDs util._remove_id(t, attr='_dtid') util._remove_id(d) for t in system._orphan_dataset_transforms: util._remove_id(t, attr='_dtid') for d in system._all_datasets(): util._assign_id(d, seen_datasets, self._dataset_by_id) for t in _all_transforms(d): util._assign_id(t, seen_transforms, self._transform_by_id, attr='_dtid') for t in system._orphan_dataset_transforms: util._assign_id(t, seen_transforms, self._transform_by_id, attr='_dtid') # Assign IDs to all groups and remove duplicates seen_group_ids = {} self._dataset_group_by_id = [] for g in system._all_dataset_groups(): ids = tuple(sorted(d._id for d in g)) if ids not in seen_group_ids: self._dataset_group_by_id.append(g) g._id = len(self._dataset_group_by_id) seen_group_ids[ids] = g else: g._id = seen_group_ids[ids]._id def dump(self, system, writer): with writer.loop("_ihm_dataset_list", ["id", "data_type", "database_hosted", "details"]) as lp: for d in self._dataset_by_id: lp.write(id=d._id, data_type=d.data_type, details=d.details, database_hosted=any(isinstance( loc, location.DatabaseLocation) for loc in d._locations)) self.dump_groups(writer) self.dump_other(writer) self.dump_rel_dbs(writer) self.dump_related(system, writer) self.dump_related_transform(system, writer) def dump_groups(self, writer): self.dump_group_summary(writer) self.dump_group_links(writer) def dump_group_summary(self, writer): with writer.loop("_ihm_dataset_group", ["id", "name", "application", "details"]) as lp: for g in self._dataset_group_by_id: lp.write(id=g._id, name=g.name, application=g.application, details=g.details) def dump_group_links(self, writer): with writer.loop("_ihm_dataset_group_link", ["group_id", "dataset_list_id"]) as lp: for g in self._dataset_group_by_id: # Don't duplicate IDs, and output in sorted order for dataset_id in sorted(set(d._id for d in g)): lp.write(group_id=g._id, dataset_list_id=dataset_id) def dump_other(self, writer): ordinal = itertools.count(1) with writer.loop("_ihm_dataset_external_reference", ["id", "dataset_list_id", "file_id"]) as lp: for d in self._dataset_by_id: for loc in d._locations: if (loc is not None and not isinstance(loc, location.DatabaseLocation)): lp.write(id=next(ordinal), dataset_list_id=d._id, file_id=loc._id) def dump_rel_dbs(self, writer): ordinal = itertools.count(1) with writer.loop("_ihm_dataset_related_db_reference", ["id", "dataset_list_id", "db_name", "accession_code", "version", "details"]) as lp: for d in self._dataset_by_id: for loc in d._locations: if (loc is not None and isinstance(loc, location.DatabaseLocation)): lp.write(id=next(ordinal), dataset_list_id=d._id, db_name=loc.db_name, accession_code=loc.access_code, version=loc.version, details=loc.details) def dump_related(self, system, writer): with writer.loop("_ihm_related_datasets", ["dataset_list_id_derived", "dataset_list_id_primary", "transformation_id"]) as lp: for derived in self._dataset_by_id: ids = set() for p in derived.parents: if isinstance(p, ihm.dataset.TransformedDataset): ids.add((p.dataset._id, p.transform._dtid)) else: ids.add((p._id, None)) # Don't duplicate IDs, and sort by parent ID (cannot sort # by transform ID because it might be None and we can't # compare None with int) for pid, tid in sorted(ids, key=operator.itemgetter(0)): lp.write(dataset_list_id_derived=derived._id, dataset_list_id_primary=pid, transformation_id=tid) def dump_related_transform(self, system, writer): with writer.loop( "_ihm_data_transformation", ["id", "rot_matrix[1][1]", "rot_matrix[2][1]", "rot_matrix[3][1]", "rot_matrix[1][2]", "rot_matrix[2][2]", "rot_matrix[3][2]", "rot_matrix[1][3]", "rot_matrix[2][3]", "rot_matrix[3][3]", "tr_vector[1]", "tr_vector[2]", "tr_vector[3]"]) as lp: for t in self._transform_by_id: if self._check: util._check_transform(t) lp.write(id=t._dtid, **_get_transform(t.rot_matrix, t.tr_vector)) class _ModelRepresentationDumper(Dumper): def finalize(self, system): # Assign IDs to representations and segments for nr, r in enumerate(system._all_representations()): r._id = nr + 1 for ns, s in enumerate(r): s._id = ns + 1 def dump(self, system, writer): self.dump_summary(system, writer) self.dump_details(system, writer) def dump_summary(self, system, writer): with writer.loop("_ihm_model_representation", ["id", "name", "details"]) as lp: for r in system._all_representations(): lp.write(id=r._id, name=r.name, details=r.details) def dump_details(self, system, writer): ordinal = itertools.count(1) with writer.loop("_ihm_model_representation_details", ["id", "representation_id", "entity_id", "entity_description", "entity_asym_id", "entity_poly_segment_id", "model_object_primitive", "starting_model_id", "model_mode", "model_granularity", "model_object_count", "description"]) as lp: for r in system._all_representations(): for segment in r: entity = segment.asym_unit.entity lp.write( id=next(ordinal), representation_id=r._id, entity_id=entity._id, entity_description=entity.description, entity_asym_id=segment.asym_unit._id, entity_poly_segment_id=segment.asym_unit._range_id, model_object_primitive=segment.primitive, starting_model_id=segment.starting_model._id if segment.starting_model else None, model_mode='rigid' if segment.rigid else 'flexible', model_granularity=segment.granularity, model_object_count=segment.count, description=segment.description) class _StartingModelRangeChecker: """Check Atoms in StartingModels to make sure they match the Entities""" def __init__(self, model, check): self.model = model self._check = check def __call__(self, atom): if not self._check: return # Check that atom seq_id is in range e = atom.asym_unit.entity if atom.seq_id > len(e.sequence) or atom.seq_id < 1: raise IndexError( "Starting model %d atom seq_id (%d) out of range (1-%d) for %s" % (self.model._id, atom.seq_id, len(e.sequence), e)) class _StartingModelDumper(Dumper): def finalize(self, system): # Assign IDs to starting models for nm, m in enumerate(system._all_starting_models()): m._id = nm + 1 def dump(self, system, writer): self.dump_details(system, writer) self.dump_computational(system, writer) self.dump_comparative(system, writer) self.dump_coords(system, writer) self.dump_seq_dif(system, writer) def dump_details(self, system, writer): # Map dataset types to starting model sources source_map = {'Comparative model': 'comparative model', 'Integrative model': 'integrative model', 'Experimental model': 'experimental model', 'De Novo model': 'ab initio model', 'Other': 'other'} with writer.loop( "_ihm_starting_model_details", ["starting_model_id", "entity_id", "entity_description", "asym_id", "entity_poly_segment_id", "starting_model_source", "starting_model_auth_asym_id", "starting_model_sequence_offset", "dataset_list_id", "description"]) as lp: for sm in system._all_starting_models(): lp.write( starting_model_id=sm._id, entity_id=sm.asym_unit.entity._id, entity_description=sm.asym_unit.entity.description, asym_id=sm.asym_unit._id, entity_poly_segment_id=sm.asym_unit._range_id, starting_model_source=source_map[sm.dataset.data_type], starting_model_auth_asym_id=sm.asym_id, dataset_list_id=sm.dataset._id, starting_model_sequence_offset=sm.offset, description=sm.description) def dump_computational(self, system, writer): """Dump details on computational models.""" with writer.loop( "_ihm_starting_computational_models", ["starting_model_id", "software_id", "script_file_id"]) as lp: for sm in system._all_starting_models(): if sm.software or sm.script_file: lp.write(starting_model_id=sm._id, software_id=sm.software._id if sm.software else None, script_file_id=sm.script_file._id if sm.script_file else None) def dump_comparative(self, system, writer): """Dump details on comparative models.""" with writer.loop( "_ihm_starting_comparative_models", ["id", "starting_model_id", "starting_model_auth_asym_id", "starting_model_seq_id_begin", "starting_model_seq_id_end", "template_auth_asym_id", "template_seq_id_begin", "template_seq_id_end", "template_sequence_identity", "template_sequence_identity_denominator", "template_dataset_list_id", "alignment_file_id"]) as lp: ordinal = itertools.count(1) for sm in system._all_starting_models(): for template in sm.templates: self._dump_template(template, sm, lp, ordinal) def _dump_template(self, template, sm, lp, ordinal): off = sm.offset denom = template.sequence_identity.denominator if denom is not None and denom is not ihm.unknown: denom = int(denom) # Add offset only if seq_id_range isn't . or ? seq_id_begin = template.seq_id_range[0] if isinstance(template.seq_id_range[0], numbers.Integral): seq_id_begin += off seq_id_end = template.seq_id_range[1] if isinstance(template.seq_id_range[1], numbers.Integral): seq_id_end += off lp.write(id=next(ordinal), starting_model_id=sm._id, starting_model_auth_asym_id=sm.asym_id, starting_model_seq_id_begin=seq_id_begin, starting_model_seq_id_end=seq_id_end, template_auth_asym_id=template.asym_id, template_seq_id_begin=template.template_seq_id_range[0], template_seq_id_end=template.template_seq_id_range[1], template_sequence_identity=template.sequence_identity.value, template_sequence_identity_denominator=denom, template_dataset_list_id=template.dataset._id if template.dataset else None, alignment_file_id=template.alignment_file._id if template.alignment_file else None) def dump_coords(self, system, writer): """Write out coordinate information""" ordinal = itertools.count(1) with writer.loop( "_ihm_starting_model_coord", ["starting_model_id", "group_PDB", "id", "type_symbol", "atom_id", "comp_id", "entity_id", "asym_id", "seq_id", "Cartn_x", "Cartn_y", "Cartn_z", "B_iso_or_equiv", "ordinal_id"]) as lp: for model in system._all_starting_models(): rngcheck = _StartingModelRangeChecker(model, self._check) for natom, atom in enumerate(model.get_atoms()): rngcheck(atom) lp.write(starting_model_id=model._id, group_PDB='HETATM' if atom.het else 'ATOM', id=natom + 1, type_symbol=atom.type_symbol, atom_id=atom.atom_id, comp_id=_get_comp_id(atom.asym_unit.entity, atom.seq_id), asym_id=atom.asym_unit._id, entity_id=atom.asym_unit.entity._id, seq_id=atom.seq_id, Cartn_x=atom.x, Cartn_y=atom.y, Cartn_z=atom.z, B_iso_or_equiv=atom.biso, ordinal_id=next(ordinal)) def dump_seq_dif(self, system, writer): """Write out sequence difference information""" ordinal = itertools.count(1) with writer.loop( "_ihm_starting_model_seq_dif", ["id", "entity_id", "asym_id", "seq_id", "comp_id", "starting_model_id", "db_asym_id", "db_seq_id", "db_comp_id", "details"]) as lp: for model in system._all_starting_models(): for sd in model.get_seq_dif(): comp = model.asym_unit.entity.sequence[sd.seq_id - 1] lp.write( id=next(ordinal), entity_id=model.asym_unit.entity._id, asym_id=model.asym_unit._id, seq_id=sd.seq_id, comp_id=comp.id, db_asym_id=model.asym_id, db_seq_id=sd.db_seq_id, db_comp_id=sd.db_comp_id, starting_model_id=model._id, details=sd.details) class _ProtocolDumper(Dumper): def finalize(self, system): # Assign IDs to protocols and steps for np, p in enumerate(system._all_protocols()): p._id = np + 1 for ns, s in enumerate(p.steps): s._id = ns + 1 def dump(self, system, writer): self.dump_summary(system, writer) self.dump_details(system, writer) def dump_summary(self, system, writer): with writer.loop("_ihm_modeling_protocol", ["id", "protocol_name", "num_steps", "details"]) as lp: for p in system._all_protocols(): lp.write(id=p._id, protocol_name=p.name, num_steps=len(p.steps), details=p.details) def dump_details(self, system, writer): ordinal = itertools.count(1) with writer.loop("_ihm_modeling_protocol_details", ["id", "protocol_id", "step_id", "struct_assembly_id", "dataset_group_id", "step_name", "step_method", "num_models_begin", "num_models_end", "multi_scale_flag", "multi_state_flag", "ordered_flag", "ensemble_flag", "software_id", "script_file_id", "description"]) as lp: for p in system._all_protocols(): for s in p.steps: if s.ensemble == 'default': ensemble = len(system.ensembles) > 0 else: ensemble = s.ensemble lp.write( id=next(ordinal), protocol_id=p._id, step_id=s._id, struct_assembly_id=s.assembly._id, dataset_group_id=s.dataset_group._id if s.dataset_group else None, step_name=s.name, step_method=s.method, num_models_begin=s.num_models_begin, num_models_end=s.num_models_end, multi_state_flag=s.multi_state, ordered_flag=s.ordered, multi_scale_flag=s.multi_scale, ensemble_flag=ensemble, software_id=s.software._id if s.software else None, script_file_id=s.script_file._id if s.script_file else None, description=s.description) class _PostProcessDumper(Dumper): def finalize(self, system): pp_id = itertools.count(1) # Assign IDs to analyses and steps # todo: handle case where one analysis is referred to from multiple # protocols for p in system._all_protocols(): for na, a in enumerate(p.analyses): a._id = na + 1 for ns, s in enumerate(a.steps): s._id = ns + 1 # Assign globally unique postproc id s._post_proc_id = next(pp_id) def dump(self, system, writer): with writer.loop("_ihm_modeling_post_process", ["id", "protocol_id", "analysis_id", "step_id", "type", "feature", "num_models_begin", "num_models_end", "struct_assembly_id", "dataset_group_id", "software_id", "script_file_id", "details"]) as lp: for p in system._all_protocols(): for a in p.analyses: for s in a.steps: lp.write( id=s._post_proc_id, protocol_id=p._id, analysis_id=a._id, step_id=s._id, type=s.type, feature=s.feature, num_models_begin=s.num_models_begin, num_models_end=s.num_models_end, struct_assembly_id=s.assembly._id if s.assembly else None, dataset_group_id=s.dataset_group._id if s.dataset_group else None, software_id=s.software._id if s.software else None, script_file_id=s.script_file._id if s.script_file else None, details=s.details) class _RangeChecker: """Check Atom or Sphere objects to make sure they match the Representation and Assembly""" def __init__(self, model, check=True): self.check = check self._setup_representation(model) self._setup_assembly(model) self._seen_atoms = set() def _setup_representation(self, model): """Make map from asym_id to representation segments for that ID""" r = model.representation if model.representation else [] self.repr_asym_ids = {} for segment in r: asym_id = segment.asym_unit._id if asym_id not in self.repr_asym_ids: self.repr_asym_ids[asym_id] = [] self.repr_asym_ids[asym_id].append(segment) self._last_repr_segment_matched = None def _setup_assembly(self, model): """Make map from asym_id to assembly seq_id ranges for that ID""" a = model.assembly if model.assembly else [] self.asmb_asym_ids = {} for obj in a: if hasattr(obj, 'entity'): asym_id = obj._id if asym_id not in self.asmb_asym_ids: self.asmb_asym_ids[asym_id] = [] self.asmb_asym_ids[asym_id].append(obj.seq_id_range) self._last_asmb_range_matched = None self._last_asmb_asym_matched = None def _type_check_atom(self, obj, segment): """Check an Atom object against a representation segment.""" # Atom objects can only match an AtomicSegment return isinstance(segment, ihm.representation.AtomicSegment) def _type_check_sphere(self, obj, segment): """Check a Sphere object against a representation segment.""" if isinstance(segment, ihm.representation.ResidueSegment): # Only 1-residue Spheres are OK for by-residue segments return obj.seq_id_range[0] == obj.seq_id_range[1] elif isinstance(segment, ihm.representation.MultiResidueSegment): # Sphere must cover the entire range for multi-residue segments return ( obj.seq_id_range[0] == segment.asym_unit.seq_id_range[0] and obj.seq_id_range[1] == segment.asym_unit.seq_id_range[1]) elif isinstance(segment, ihm.representation.FeatureSegment): # Sphere can cover any set of residues but must fall within the # segment range for by-feature (already checked) return True else: # Spheres can never be used to represent a by-atom segment return False def __call__(self, obj): """Check the given Atom or Sphere object""" if not self.check: return asym = obj.asym_unit if isinstance(obj, ihm.model.Sphere): type_check = self._type_check_sphere seq_id_range = obj.seq_id_range else: type_check = self._type_check_atom seq_id_range = (obj.seq_id, obj.seq_id) # Allow seq_id to be either 1 or None for ligands if obj.seq_id == 1 and asym.entity.type == 'non-polymer': seq_id_range = (None, None) self._check_duplicate_atom(obj) self._check_assembly(obj, asym, seq_id_range) self._check_representation(obj, asym, type_check, seq_id_range) def _check_duplicate_atom(self, atom): # e.g. multiple bulk water oxygen atoms can have "same" seq_id (None) if atom.seq_id is None: return k = (atom.asym_unit._id, atom.atom_id, atom.seq_id, atom.alt_id) if k in self._seen_atoms: raise ValueError( "Multiple atoms with same atom_id (%s), seq_id (%d) " "and alt_id (%s) found in asym ID %s" % (atom.atom_id, atom.seq_id, atom.alt_id, atom.asym_unit._id)) self._seen_atoms.add(k) def _check_assembly(self, obj, asym, seq_id_range): # Check last match first last_rng = self._last_asmb_range_matched if last_rng and asym._id == self._last_asmb_asym_matched \ and _is_subrange(seq_id_range, last_rng): return # Check asym_id if asym._id not in self.asmb_asym_ids: raise ValueError( "%s refers to an asym ID (%s) that is not in this " "model's assembly (which includes the following asym IDs: %s)" % (obj, asym._id, ", ".join(sorted(a for a in self.asmb_asym_ids)))) # Check range for rng in self.asmb_asym_ids[asym._id]: if _is_subrange(seq_id_range, rng): self._last_asmb_asym_matched = asym._id self._last_asmb_range_matched = rng return def print_range(rng): if rng == (None, None): return "None" else: return "%d-%d" % rng raise ValueError( "%s seq_id range (%s) does not match any range " "in the assembly for asym ID %s (ranges are %s)" % (obj, print_range(seq_id_range), asym._id, ", ".join(print_range(x) for x in self.asmb_asym_ids[asym._id]))) def _check_representation(self, obj, asym, type_check, seq_id_range): # Check last match first last_seg = self._last_repr_segment_matched if last_seg and asym._id == last_seg.asym_unit._id \ and _is_subrange(seq_id_range, last_seg.asym_unit.seq_id_range) \ and type_check(obj, last_seg): return # Check asym_id if asym._id not in self.repr_asym_ids: raise ValueError( "%s refers to an asym ID (%s) that is not in this " "model's representation (which includes the following asym " "IDs: %s)" % (obj, asym._id, ", ".join(sorted(a for a in self.repr_asym_ids)))) # Check range bad_type_segments = [] for segment in self.repr_asym_ids[asym._id]: rng = segment.asym_unit.seq_id_range if _is_subrange(seq_id_range, rng): if type_check(obj, segment): self._last_repr_segment_matched = segment return else: bad_type_segments.append(segment) if bad_type_segments: raise ValueError( "%s does not match the type of any representation " "segment in the seq_id_range (%d-%d) for asym ID %s. " "Representation segments are: %s" % (obj, seq_id_range[0], seq_id_range[1], asym._id, ", ".join(str(s) for s in bad_type_segments))) else: raise ValueError( "%s seq_id range (%d-%d) does not match any range " "in the representation for asym ID %s (representation " "ranges are %s)" % (obj, seq_id_range[0], seq_id_range[1], asym._id, ", ".join("%d-%d" % x.asym_unit.seq_id_range for x in self.repr_asym_ids[asym._id]))) class _AssemblyChecker: """Check that all Assembly asyms are in a Model""" def __init__(self): # Map from Assembly id to set of Asym ids self._asmb_asyms = {} # Map from Assembly id to Assembly object self._asmb_from_id = {} # Map from Assembly id to set of all represented Asym ids (in models) self._asmb_model_asyms = {} def add_model_asyms(self, model, seen_asym_ids): """Add a set of asym IDs seen in atoms or spheres in the model""" asmb = model.assembly # If this is the first time we've seen this assembly, get its # declared set of asym IDs if id(asmb) not in self._asmb_asyms: self._asmb_from_id[id(asmb)] = asmb asyms = frozenset(x._id for x in asmb if hasattr(x, 'entity')) self._asmb_asyms[id(asmb)] = asyms # Add asym IDs from model if id(asmb) not in self._asmb_model_asyms: self._asmb_model_asyms[id(asmb)] = set() self._asmb_model_asyms[id(asmb)] |= seen_asym_ids def check(self): """Make sure each Assembly only references asym IDs that are represented by atoms or spheres in at least one Model, or raise ValueError.""" def get_extra_asyms(): for asmb_id, asyms in self._asmb_asyms.items(): extra = asyms - self._asmb_model_asyms[asmb_id] if extra: asmb = self._asmb_from_id[asmb_id] asmb_id = ("ID %s" % asmb._id if hasattr(asmb, '_id') else asmb) yield asmb_id, ", ".join(sorted(extra)) err = "; ".join("%s, asym IDs %s" % extra for extra in get_extra_asyms()) if err: raise ValueError( "The following Assemblies reference asym IDs that don't " "have coordinates in any Model: " + err) class _ModelDumperBase(Dumper): def finalize(self, system): # Remove any existing ID for g in system._all_model_groups(only_in_states=False): if hasattr(g, '_id'): del g._id for m in g: if hasattr(m, '_id'): del m._id model_id = itertools.count(1) # Assign IDs to models and groups in states for ng, g in enumerate(system._all_model_groups()): g._id = ng + 1 for m in g: if not hasattr(m, '_id'): m._id = next(model_id) # Check for any groups not referenced by states for g in system._all_model_groups(only_in_states=False): if not hasattr(g, '_id'): raise ValueError("%s is referenced only by an Ensemble or " "OrderedProcess. ModelGroups should be " "stored in State objects." % g) def dump_atom_type(self, seen_types, system, writer): """Output the atom_type table with a list of elements used in atom_site. This table is needed by atom_site. Note that we output it *after* atom_site (otherwise we would need to iterate through all atoms in the system twice).""" # Also check all assemblies, after dumping all atoms/spheres if self._check: self._assembly_checker.check() elements = [x for x in sorted(seen_types.keys()) if x is not None] with writer.loop("_atom_type", ["symbol"]) as lp: for element in elements: lp.write(symbol=element) def __get_assembly_checker(self): if not hasattr(self, '_asmb_check'): self._asmb_check = _AssemblyChecker() return self._asmb_check _assembly_checker = property(__get_assembly_checker) def dump_atoms(self, system, writer, add_ihm=True): seen_types = {} ordinal = itertools.count(1) it = ["group_PDB", "id", "type_symbol", "label_atom_id", "label_alt_id", "label_comp_id", "label_seq_id", "auth_seq_id", "pdbx_PDB_ins_code", "label_asym_id", "Cartn_x", "Cartn_y", "Cartn_z", "occupancy", "label_entity_id", "auth_asym_id", "auth_comp_id", "B_iso_or_equiv", "pdbx_PDB_model_num"] if add_ihm: it.append("ihm_model_id") with writer.loop("_atom_site", it) as lp: for group, model in system._all_models(): seen_asym_ids = set() rngcheck = _RangeChecker(model, self._check) for atom in model.get_atoms(): rngcheck(atom) seen_asym_ids.add(atom.asym_unit._id) seq_id = 1 if atom.seq_id is None else atom.seq_id label_seq_id = atom.seq_id if not atom.asym_unit.entity.is_polymeric(): label_seq_id = None comp = atom.asym_unit.sequence[seq_id - 1] seen_types[atom.type_symbol] = None auth_seq_id, ins = \ atom.asym_unit._get_auth_seq_id_ins_code(seq_id) lp.write(id=next(ordinal), type_symbol=atom.type_symbol, group_PDB='HETATM' if atom.het else 'ATOM', label_atom_id=atom.atom_id, label_alt_id=atom.alt_id, label_comp_id=comp.id, label_asym_id=atom.asym_unit._id, label_entity_id=atom.asym_unit.entity._id, label_seq_id=label_seq_id, auth_seq_id=auth_seq_id, auth_comp_id=comp.id, pdbx_PDB_ins_code=ins or ihm.unknown, auth_asym_id=atom.asym_unit.strand_id, Cartn_x=atom.x, Cartn_y=atom.y, Cartn_z=atom.z, B_iso_or_equiv=atom.biso, occupancy=atom.occupancy, pdbx_PDB_model_num=model._id, ihm_model_id=model._id) self._assembly_checker.add_model_asyms(model, seen_asym_ids) return seen_types class _ModelDumper(_ModelDumperBase): def dump(self, system, writer): self.dump_model_list(system, writer) self.dump_model_groups(system, writer) seen_types = self.dump_atoms(system, writer) self.dump_spheres(system, writer) self.dump_atom_type(seen_types, system, writer) def dump_model_groups(self, system, writer): self.dump_model_group_summary(system, writer) self.dump_model_group_link(system, writer) def dump_model_list(self, system, writer): with writer.loop("_ihm_model_list", ["model_id", "model_name", "assembly_id", "protocol_id", "representation_id"]) as lp: for group, model in system._all_models(): lp.write(model_id=model._id, model_name=model.name, assembly_id=model.assembly._id, protocol_id=model.protocol._id if model.protocol else None, representation_id=model.representation._id) def dump_model_group_summary(self, system, writer): with writer.loop("_ihm_model_group", ["id", "name", "details"]) as lp: for group in system._all_model_groups(): lp.write(id=group._id, name=group.name, details=group.details) def dump_model_group_link(self, system, writer): with writer.loop("_ihm_model_group_link", ["group_id", "model_id"]) as lp: for group in system._all_model_groups(): for model_id in sorted(set(model._id for model in group)): lp.write(model_id=model_id, group_id=group._id) def dump_spheres(self, system, writer): ordinal = itertools.count(1) with writer.loop("_ihm_sphere_obj_site", ["id", "entity_id", "seq_id_begin", "seq_id_end", "asym_id", "Cartn_x", "Cartn_y", "Cartn_z", "object_radius", "rmsf", "model_id"]) as lp: for group, model in system._all_models(): rngcheck = _RangeChecker(model, self._check) seen_asym_ids = set() for sphere in model.get_spheres(): rngcheck(sphere) seen_asym_ids.add(sphere.asym_unit._id) lp.write(id=next(ordinal), entity_id=sphere.asym_unit.entity._id, seq_id_begin=sphere.seq_id_range[0], seq_id_end=sphere.seq_id_range[1], asym_id=sphere.asym_unit._id, Cartn_x=sphere.x, Cartn_y=sphere.y, Cartn_z=sphere.z, object_radius=sphere.radius, rmsf=sphere.rmsf, model_id=model._id) self._assembly_checker.add_model_asyms(model, seen_asym_ids) class _ModelRepresentativeDumper(Dumper): def dump(self, system, writer): ordinal = itertools.count(1) with writer.loop("_ihm_model_representative", ["id", "model_group_id", "model_id", "selection_criteria"]) as lp: for group in system._all_model_groups(): for rep in group.representatives: # This assumes that each representative is also a # member of the group, so we don't need to assign an ID. lp.write(id=next(ordinal), model_group_id=group._id, model_id=rep.model._id, selection_criteria=rep.selection_criteria) class _NotModeledResidueRangeDumper(Dumper): def dump(self, system, writer): ordinal = itertools.count(1) with writer.loop("_ihm_residues_not_modeled", ["id", "model_id", "entity_description", "entity_id", "asym_id", "seq_id_begin", "seq_id_end", "comp_id_begin", "comp_id_end", "reason"]) as lp: for group, model in system._all_models(): for rr in model.not_modeled_residue_ranges: e = rr.asym_unit.entity if self._check: util._check_residue_range( (rr.seq_id_begin, rr.seq_id_end), e) lp.write(id=next(ordinal), model_id=model._id, entity_description=e.description, entity_id=e._id, asym_id=rr.asym_unit._id, seq_id_begin=rr.seq_id_begin, seq_id_end=rr.seq_id_end, comp_id_begin=_get_comp_id(e, rr.seq_id_begin), comp_id_end=_get_comp_id(e, rr.seq_id_end), reason=rr.reason) class _EnsembleDumper(Dumper): def finalize(self, system): # Assign IDs for ne, e in enumerate(system.ensembles): e._id = ne + 1 def dump(self, system, writer): self.dump_info(system, writer) self.dump_subsamples(system, writer) def dump_info(self, system, writer): with writer.loop("_ihm_ensemble_info", ["ensemble_id", "ensemble_name", "post_process_id", "model_group_id", "ensemble_clustering_method", "ensemble_clustering_feature", "num_ensemble_models", "num_ensemble_models_deposited", "ensemble_precision_value", "ensemble_file_id", "details", "model_group_superimposed_flag", "sub_sample_flag", "sub_sampling_type"]) as lp: for e in system.ensembles: if e.subsamples: sstype = e.subsamples[0].sub_sampling_type else: sstype = None lp.write(ensemble_id=e._id, ensemble_name=e.name, post_process_id=e.post_process._id if e.post_process else None, model_group_id=e.model_group._id if e.model_group is not None else None, ensemble_clustering_method=e.clustering_method, ensemble_clustering_feature=e.clustering_feature, num_ensemble_models=e.num_models, num_ensemble_models_deposited=e.num_models_deposited, ensemble_precision_value=e.precision, ensemble_file_id=e.file._id if e.file else None, details=e.details, model_group_superimposed_flag=e.superimposed, sub_sample_flag=len(e.subsamples) > 0, sub_sampling_type=sstype) def dump_subsamples(self, system, writer): ordinal = itertools.count(1) with writer.loop("_ihm_ensemble_sub_sample", ["id", "name", "ensemble_id", "num_models", "num_models_deposited", "model_group_id", "file_id"]) as lp: for e in system.ensembles: for s in e.subsamples: lp.write(id=next(ordinal), name=s.name, ensemble_id=e._id, num_models=s.num_models, num_models_deposited=s.num_models_deposited, model_group_id=s.model_group._id if s.model_group else None, file_id=s.file._id if s.file else None) if type(s) != type(e.subsamples[0]): # noqa: E721 raise TypeError( "Subsamples are not all of the same type " "(%s vs %s) for ensemble %s" % (s, e.subsamples[0], e)) class _DensityDumper(Dumper): def finalize(self, system): # Assign globally unique IDs did = itertools.count(1) for e in system.ensembles: for d in e.densities: d._id = next(did) def dump(self, system, writer): with writer.loop("_ihm_localization_density_files", ["id", "file_id", "ensemble_id", "entity_id", "asym_id", "entity_poly_segment_id"]) as lp: for ensemble in system.ensembles: for density in ensemble.densities: lp.write( id=density._id, file_id=density.file._id, ensemble_id=ensemble._id, entity_id=density.asym_unit.entity._id, asym_id=density.asym_unit._id, entity_poly_segment_id=density.asym_unit._range_id) class _MultiStateDumper(Dumper): def finalize(self, system): state_id = itertools.count(1) # Assign IDs for ng, g in enumerate(system.state_groups): g._id = ng + 1 for state in g: state._id = next(state_id) def dump(self, system, writer): # Nothing to do for single state modeling if len(system.state_groups) == 1 and len(system.state_groups[0]) <= 1: return self.dump_summary(system, writer) self.dump_model_groups(system, writer) def dump_summary(self, system, writer): with writer.loop("_ihm_multi_state_modeling", ["state_id", "state_group_id", "population_fraction", "state_type", "state_name", "experiment_type", "details"]) as lp: for state_group in system.state_groups: for state in state_group: lp.write(state_id=state._id, state_group_id=state_group._id, population_fraction=state.population_fraction, state_type=state.type, state_name=state.name, experiment_type=state.experiment_type, details=state.details) def dump_model_groups(self, system, writer): with writer.loop("_ihm_multi_state_model_group_link", ["state_id", "model_group_id"]) as lp: for state_group in system.state_groups: for state in state_group: for model_group in state: lp.write(state_id=state._id, model_group_id=model_group._id) class _OrderedDumper(Dumper): def finalize(self, system): for nproc, proc in enumerate(system.ordered_processes): proc._id = nproc + 1 edge_id = itertools.count(1) for nstep, step in enumerate(proc.steps): step._id = nstep + 1 for edge in step: edge._id = next(edge_id) def dump(self, system, writer): with writer.loop("_ihm_ordered_model", ["process_id", "process_description", "ordered_by", "step_id", "step_description", "edge_id", "edge_description", "model_group_id_begin", "model_group_id_end"]) as lp: for proc in system.ordered_processes: for step in proc.steps: for edge in step: lp.write(process_id=proc._id, process_description=proc.description, ordered_by=proc.ordered_by, step_id=step._id, step_description=step.description, edge_id=edge._id, edge_description=edge.description, model_group_id_begin=edge.group_begin._id, model_group_id_end=edge.group_end._id) class _GeometricObjectDumper(Dumper): def finalize(self, system): seen_objects = {} seen_centers = {} seen_transformations = {} self._centers_by_id = [] self._transformations_by_id = [] self._objects_by_id = [] for o in system._all_geometric_objects(): util._remove_id(o) if hasattr(o, 'center'): util._remove_id(o.center) if hasattr(o, 'transformation') and o.transformation: util._remove_id(o.transformation) for t in system._orphan_geometric_transforms: util._remove_id(t) for c in system._orphan_centers: util._remove_id(c) for o in system._all_geometric_objects(): util._assign_id(o, seen_objects, self._objects_by_id) if hasattr(o, 'center'): util._assign_id(o.center, seen_centers, self._centers_by_id) if hasattr(o, 'transformation') and o.transformation: util._assign_id(o.transformation, seen_transformations, self._transformations_by_id) for t in system._orphan_geometric_transforms: util._assign_id(t, seen_transformations, self._transformations_by_id) for c in system._orphan_centers: util._assign_id(c, seen_centers, self._centers_by_id) def dump(self, system, writer): self.dump_centers(writer) self.dump_transformations(writer) self.dump_generic(writer) self.dump_sphere(writer) self.dump_torus(writer) self.dump_half_torus(writer) self.dump_axis(writer) self.dump_plane(writer) def dump_centers(self, writer): with writer.loop("_ihm_geometric_object_center", ["id", "xcoord", "ycoord", "zcoord"]) as lp: for c in self._centers_by_id: lp.write(id=c._id, xcoord=c.x, ycoord=c.y, zcoord=c.z) def dump_transformations(self, writer): with writer.loop( "_ihm_geometric_object_transformation", ["id", "rot_matrix[1][1]", "rot_matrix[2][1]", "rot_matrix[3][1]", "rot_matrix[1][2]", "rot_matrix[2][2]", "rot_matrix[3][2]", "rot_matrix[1][3]", "rot_matrix[2][3]", "rot_matrix[3][3]", "tr_vector[1]", "tr_vector[2]", "tr_vector[3]"]) as lp: for t in self._transformations_by_id: if self._check: util._check_transform(t) lp.write(id=t._id, **_get_transform(t.rot_matrix, t.tr_vector)) def dump_generic(self, writer): with writer.loop("_ihm_geometric_object_list", ["object_id", "object_type", "object_name", "object_description"]) as lp: for o in self._objects_by_id: lp.write(object_id=o._id, object_type=o.type, object_name=o.name, object_description=o.description) def dump_sphere(self, writer): with writer.loop("_ihm_geometric_object_sphere", ["object_id", "center_id", "transformation_id", "radius_r"]) as lp: for o in self._objects_by_id: if not isinstance(o, geometry.Sphere): continue lp.write(object_id=o._id, center_id=o.center._id, transformation_id=o.transformation._id if o.transformation else None, radius_r=o.radius) def dump_torus(self, writer): with writer.loop("_ihm_geometric_object_torus", ["object_id", "center_id", "transformation_id", "major_radius_R", "minor_radius_r"]) as lp: for o in self._objects_by_id: if not isinstance(o, (geometry.Torus, geometry.HalfTorus)): continue lp.write(object_id=o._id, center_id=o.center._id, transformation_id=o.transformation._id if o.transformation else None, major_radius_R=o.major_radius, minor_radius_r=o.minor_radius) def dump_half_torus(self, writer): section_map = {True: 'inner half', False: 'outer half'} with writer.loop("_ihm_geometric_object_half_torus", ["object_id", "thickness_th", "section"]) as lp: for o in self._objects_by_id: if not isinstance(o, geometry.HalfTorus): continue lp.write(object_id=o._id, thickness_th=o.thickness, section=section_map.get(o.inner, 'other')) def dump_axis(self, writer): with writer.loop("_ihm_geometric_object_axis", ["object_id", "axis_type", "transformation_id"]) as lp: for o in self._objects_by_id: if not isinstance(o, geometry.Axis): continue lp.write(object_id=o._id, axis_type=o.axis_type, transformation_id=o.transformation._id if o.transformation else None) def dump_plane(self, writer): with writer.loop("_ihm_geometric_object_plane", ["object_id", "plane_type", "transformation_id"]) as lp: for o in self._objects_by_id: if not isinstance(o, geometry.Plane): continue lp.write(object_id=o._id, plane_type=o.plane_type, transformation_id=o.transformation._id if o.transformation else None) class _FeatureDumper(Dumper): def finalize(self, system): seen_features = {} self._features_by_id = [] for f in system._all_features(): util._remove_id(f) for f in system._all_features(): util._assign_id(f, seen_features, self._features_by_id, seen_obj=f._signature() if hasattr(f, '_signature') else f) def dump(self, system, writer): self.dump_list(writer) self.dump_poly_residue(writer) self.dump_poly_atom(writer) self.dump_non_poly(writer) self.dump_pseudo_site(writer) def dump_list(self, writer): with writer.loop("_ihm_feature_list", ["feature_id", "feature_type", "entity_type", "details"]) as lp: for f in self._features_by_id: if self._check and f.type is ihm.unknown: raise ValueError("Invalid null feature %s" % f) lp.write(feature_id=f._id, feature_type=f.type, entity_type=f._get_entity_type(), details=f.details) def dump_poly_residue(self, writer): def _get_entity(x): return x if isinstance(x, ihm.Entity) else x.entity def _get_asym_id(x): return (x._id if isinstance(x, (ihm.AsymUnit, ihm.AsymUnitRange)) else None) ordinal = itertools.count(1) with writer.loop("_ihm_poly_residue_feature", ["ordinal_id", "feature_id", "entity_id", "asym_id", "seq_id_begin", "comp_id_begin", "seq_id_end", "comp_id_end"]) as lp: for f in self._features_by_id: if not isinstance(f, restraint.ResidueFeature): continue if not f.ranges: raise ValueError("%s selects no residues" % f) for r in f.ranges: entity = _get_entity(r) seq = entity.sequence lp.write(ordinal_id=next(ordinal), feature_id=f._id, entity_id=entity._id, asym_id=_get_asym_id(r), seq_id_begin=r.seq_id_range[0], comp_id_begin=seq[r.seq_id_range[0] - 1].id, seq_id_end=r.seq_id_range[1], comp_id_end=seq[r.seq_id_range[1] - 1].id) def dump_poly_atom(self, writer): ordinal = itertools.count(1) with writer.loop("_ihm_poly_atom_feature", ["ordinal_id", "feature_id", "entity_id", "asym_id", "seq_id", "comp_id", "atom_id"]) as lp: for f in self._features_by_id: if not isinstance(f, restraint.AtomFeature): continue for a in f.atoms: r = a.residue entity = r.entity if r.entity else r.asym.entity if entity.is_polymeric(): seq = entity.sequence lp.write(ordinal_id=next(ordinal), feature_id=f._id, entity_id=entity._id, asym_id=r.asym._id if r.asym else None, seq_id=r.seq_id, comp_id=seq[r.seq_id - 1].id, atom_id=a.id) def dump_non_poly(self, writer): ordinal = itertools.count(1) with writer.loop("_ihm_non_poly_feature", ["ordinal_id", "feature_id", "entity_id", "asym_id", "comp_id", "atom_id"]) as lp: for f in self._features_by_id: if isinstance(f, restraint.AtomFeature): for a in f.atoms: r = a.residue entity = r.entity if r.entity else r.asym.entity if not entity.is_polymeric(): seq = entity.sequence lp.write(ordinal_id=next(ordinal), feature_id=f._id, entity_id=entity._id, asym_id=r.asym._id if r.asym else None, comp_id=seq[r.seq_id - 1].id, atom_id=a.id) elif isinstance(f, restraint.NonPolyFeature): _ = f._get_entity_type() # trigger check for poly/nonpoly for a in f.objs: entity = a if isinstance(a, ihm.Entity) else a.entity asym_id = (a._id if isinstance(a, ihm.AsymUnit) else None) seq = entity.sequence lp.write(ordinal_id=next(ordinal), feature_id=f._id, entity_id=entity._id, asym_id=asym_id, comp_id=seq[0].id, atom_id=None) def dump_pseudo_site(self, writer): with writer.loop("_ihm_pseudo_site_feature", ["feature_id", "pseudo_site_id"]) as lp: for f in self._features_by_id: if not isinstance(f, restraint.PseudoSiteFeature): continue lp.write(feature_id=f._id, pseudo_site_id=f.site._id) class _PseudoSiteDumper(Dumper): def finalize(self, system): seen_sites = {} self._sites_by_id = [] for f in system._all_pseudo_sites(): util._remove_id(f) for f in system._all_pseudo_sites(): util._assign_id(f, seen_sites, self._sites_by_id, seen_obj=f._signature()) def dump(self, system, writer): with writer.loop("_ihm_pseudo_site", ["id", "Cartn_x", "Cartn_y", "Cartn_z", "radius", "description"]) as lp: for s in self._sites_by_id: lp.write(id=s._id, Cartn_x=s.x, Cartn_y=s.y, Cartn_z=s.z, radius=s.radius, description=s.description) class _CrossLinkDumper(Dumper): def _all_restraints(self, system): return [r for r in system._all_restraints() if isinstance(r, restraint.CrossLinkRestraint)] def finalize(self, system): self.finalize_experimental(system) self.finalize_modeling(system) def finalize_experimental(self, system): seen_cross_links = {} seen_group_ids = {} xl_id = itertools.count(1) self._ex_xls_by_id = [] for r in self._all_restraints(system): for g in r.experimental_cross_links: for xl in g: # Assign identical cross-links the same ID and group ID sig = (xl.residue1.entity, xl.residue1.seq_id, xl.residue2.entity, xl.residue2.seq_id, r.linker) if sig in seen_cross_links: xl._id, xl._group_id = seen_cross_links[sig] else: if id(g) not in seen_group_ids: seen_group_ids[id(g)] = len(seen_group_ids) + 1 xl._group_id = seen_group_ids[id(g)] xl._id = next(xl_id) self._ex_xls_by_id.append((r, xl)) seen_cross_links[sig] = xl._id, xl._group_id def finalize_modeling(self, system): seen_cross_links = {} xl_id = itertools.count(1) self._xls_by_id = [] for r in self._all_restraints(system): for xl in r.cross_links: # Assign identical cross-links the same ID ex_xl = xl.experimental_cross_link sig = (xl.asym1._id, ex_xl.residue1.seq_id, xl.atom1, xl.asym2._id, ex_xl.residue2.seq_id, xl.atom2, r.linker) if sig in seen_cross_links: xl._id = seen_cross_links[sig] else: xl._id = next(xl_id) self._xls_by_id.append((r, xl)) seen_cross_links[sig] = xl._id def dump(self, system, writer): self.dump_list(system, writer) pseudo_xls = self.dump_restraint(system, writer) self.dump_pseudo_sites(system, writer, pseudo_xls) self.dump_result(system, writer) self.dump_result_parameters(system, writer) def dump_list(self, system, writer): with writer.loop("_ihm_cross_link_list", ["id", "group_id", "entity_description_1", "entity_id_1", "seq_id_1", "comp_id_1", "entity_description_2", "entity_id_2", "seq_id_2", "comp_id_2", "linker_chem_comp_descriptor_id", "linker_type", "dataset_list_id", "details"]) as lp: for r, xl in self._ex_xls_by_id: entity1 = xl.residue1.entity entity2 = xl.residue2.entity if self._check: util._check_residue(xl.residue1) util._check_residue(xl.residue2) lp.write(id=xl._id, group_id=xl._group_id, entity_description_1=entity1.description, entity_id_1=entity1._id, seq_id_1=xl.residue1.seq_id, comp_id_1=_get_comp_id(entity1, xl.residue1.seq_id), entity_description_2=entity2.description, entity_id_2=entity2._id, seq_id_2=xl.residue2.seq_id, comp_id_2=_get_comp_id(entity2, xl.residue2.seq_id), linker_chem_comp_descriptor_id=r.linker._id, linker_type=r.linker.auth_name, dataset_list_id=r.dataset._id, details=xl.details) def dump_restraint(self, system, writer): pseudo_xls = [] with writer.loop("_ihm_cross_link_restraint", ["id", "group_id", "entity_id_1", "asym_id_1", "seq_id_1", "comp_id_1", "entity_id_2", "asym_id_2", "seq_id_2", "comp_id_2", "atom_id_1", "atom_id_2", "restraint_type", "conditional_crosslink_flag", "model_granularity", "distance_threshold", "psi", "sigma_1", "sigma_2", "pseudo_site_flag"]) as lp: condmap = {True: 'ALL', False: 'ANY', None: None} for r, xl in self._xls_by_id: ex_xl = xl.experimental_cross_link entity1 = ex_xl.residue1.entity entity2 = ex_xl.residue2.entity pseudo = False for np, ps in enumerate((xl.pseudo1, xl.pseudo2)): if ps: pseudo = True for p in ps: pseudo_xls.append((p, np, xl)) lp.write(id=xl._id, group_id=ex_xl._id, entity_id_1=entity1._id, asym_id_1=xl.asym1._id, seq_id_1=ex_xl.residue1.seq_id, comp_id_1=_get_comp_id(entity1, ex_xl.residue1.seq_id), entity_id_2=entity2._id, asym_id_2=xl.asym2._id, seq_id_2=ex_xl.residue2.seq_id, comp_id_2=_get_comp_id(entity2, ex_xl.residue2.seq_id), atom_id_1=xl.atom1, atom_id_2=xl.atom2, restraint_type=xl.distance.restraint_type, conditional_crosslink_flag=condmap[xl.restrain_all], model_granularity=xl.granularity, distance_threshold=xl.distance.distance, psi=xl.psi, sigma_1=xl.sigma1, sigma_2=xl.sigma2, pseudo_site_flag=pseudo) return pseudo_xls def dump_pseudo_sites(self, system, writer, pseudo_xls): with writer.loop("_ihm_cross_link_pseudo_site", ["id", "restraint_id", "cross_link_partner", "pseudo_site_id", "model_id"]) as lp: ordinal = itertools.count(1) for p, partner, rsr in pseudo_xls: lp.write(id=next(ordinal), restraint_id=rsr._id, cross_link_partner=partner + 1, pseudo_site_id=p.site._id, model_id=p.model._id if p.model else None) def dump_result(self, system, writer): with writer.loop("_ihm_cross_link_result", ["id", "restraint_id", "ensemble_id", "model_group_id", "num_models", "distance_threshold", "median_distance", "details"]) as lp: ordinal = itertools.count(1) for r in self._all_restraints(system): for xl in r.cross_links: # all fits ordered by ID for g, fit in sorted( (it for it in xl.fits.items() if not isinstance(it[0], ihm.model.Model)), key=lambda i: i[0]._id): if isinstance(g, ihm.model.Ensemble): ens_id = g._id if g.model_group is None: mg_id = None else: mg_id = g.model_group._id else: mg_id = g._id ens_id = None lp.write(id=next(ordinal), restraint_id=xl._id, model_group_id=mg_id, ensemble_id=ens_id, num_models=fit.num_models, distance_threshold=xl.distance.distance, median_distance=fit.median_distance, details=fit.details) def dump_result_parameters(self, system, writer): with writer.loop("_ihm_cross_link_result_parameters", ["id", "restraint_id", "model_id", "psi", "sigma_1", "sigma_2"]) as lp: ordinal = itertools.count(1) for r in self._all_restraints(system): for xl in r.cross_links: # all fits ordered by model ID for model, fit in sorted( (it for it in xl.fits.items() if isinstance(it[0], ihm.model.Model)), key=lambda i: i[0]._id): lp.write(id=next(ordinal), restraint_id=xl._id, model_id=model._id, psi=fit.psi, sigma_1=fit.sigma1, sigma_2=fit.sigma2) class _GeometricRestraintDumper(Dumper): def _all_restraints(self, system): return [r for r in system._all_restraints() if isinstance(r, restraint.GeometricRestraint)] def finalize(self, system): for nr, r in enumerate(self._all_restraints(system)): r._id = nr + 1 def dump(self, system, writer): condmap = {True: 'ALL', False: 'ANY', None: None} with writer.loop("_ihm_geometric_object_distance_restraint", ["id", "object_id", "feature_id", "object_characteristic", "restraint_type", "harmonic_force_constant", "distance_lower_limit", "distance_upper_limit", "group_conditionality", "dataset_list_id"]) as lp: for r in self._all_restraints(system): lp.write(id=r._id, object_id=r.geometric_object._id, feature_id=r.feature._id, object_characteristic=r.object_characteristic, restraint_type=r.distance.restraint_type, distance_lower_limit=r.distance.distance_lower_limit, distance_upper_limit=r.distance.distance_upper_limit, harmonic_force_constant=r.harmonic_force_constant, group_conditionality=condmap[r.restrain_all], dataset_list_id=r.dataset._id if r.dataset else None) def _finalize_restraints_and_groups(system, restraint_class): """Assign IDs to all restraints of the given class, and also assign IDs to groups of these restraints.""" def _all_restraints(): return [r for r in system._all_restraints() if isinstance(r, restraint_class)] def _all_restraint_groups(): return [rg for rg in system.restraint_groups if all(isinstance(r, restraint_class) for r in rg) and len(rg) > 0] restraints_by_id = [] seen_restraints = {} for r in _all_restraints(): util._remove_id(r) for r in _all_restraints(): util._assign_id(r, seen_restraints, restraints_by_id) group_for_id = {} for nrg, rg in enumerate(_all_restraint_groups()): rg._id = nrg + 1 for r in rg: if r._id in group_for_id: raise ValueError("%s cannot be in more than one group" % r) group_for_id[r._id] = rg._id return restraints_by_id, group_for_id class _DerivedDistanceRestraintDumper(Dumper): def finalize(self, system): (self._restraints_by_id, self._group_for_id) = _finalize_restraints_and_groups( system, restraint.DerivedDistanceRestraint) def dump(self, system, writer): condmap = {True: 'ALL', False: 'ANY', None: None} with writer.loop("_ihm_derived_distance_restraint", ["id", "group_id", "feature_id_1", "feature_id_2", "restraint_type", "distance_lower_limit", "distance_upper_limit", "probability", "mic_value", "group_conditionality", "dataset_list_id"]) as lp: for r in self._restraints_by_id: lp.write(id=r._id, feature_id_1=r.feature1._id, group_id=self._group_for_id.get(r._id, None), feature_id_2=r.feature2._id, restraint_type=r.distance.restraint_type, distance_lower_limit=r.distance.distance_lower_limit, distance_upper_limit=r.distance.distance_upper_limit, probability=r.probability, mic_value=r.mic_value, group_conditionality=condmap[r.restrain_all], dataset_list_id=r.dataset._id if r.dataset else None) class _HDXRestraintDumper(Dumper): def _all_restraints(self, system): return [r for r in system._all_restraints() if isinstance(r, restraint.HDXRestraint)] def finalize(self, system): for nr, r in enumerate(self._all_restraints(system)): r._id = nr + 1 def dump(self, system, writer): with writer.loop("_ihm_hdx_restraint", ["id", "feature_id", "protection_factor", "dataset_list_id", "details"]) as lp: for r in self._all_restraints(system): lp.write(id=r._id, feature_id=r.feature._id, protection_factor=r.protection_factor, details=r.details, dataset_list_id=r.dataset._id if r.dataset else None) class _PredictedContactRestraintDumper(Dumper): def finalize(self, system): (self._restraints_by_id, self._group_for_id) = _finalize_restraints_and_groups( system, restraint.PredictedContactRestraint) def dump(self, system, writer): with writer.loop("_ihm_predicted_contact_restraint", ["id", "group_id", "entity_id_1", "asym_id_1", "comp_id_1", "seq_id_1", "rep_atom_1", "entity_id_2", "asym_id_2", "comp_id_2", "seq_id_2", "rep_atom_2", "restraint_type", "distance_lower_limit", "distance_upper_limit", "probability", "model_granularity", "dataset_list_id", "software_id"]) as lp: for r in self._restraints_by_id: e = r.resatom1.asym.entity comp1 = e.sequence[r.resatom1.seq_id - 1].id e = r.resatom2.asym.entity comp2 = e.sequence[r.resatom2.seq_id - 1].id atom1 = atom2 = None if isinstance(r.resatom1, ihm.Atom): atom1 = r.resatom1.id if isinstance(r.resatom2, ihm.Atom): atom2 = r.resatom2.id lp.write(id=r._id, group_id=self._group_for_id.get(r._id, None), entity_id_1=r.resatom1.asym.entity._id, asym_id_1=r.resatom1.asym._id, comp_id_1=comp1, seq_id_1=r.resatom1.seq_id, rep_atom_1=atom1, entity_id_2=r.resatom2.asym.entity._id, asym_id_2=r.resatom2.asym._id, comp_id_2=comp2, seq_id_2=r.resatom2.seq_id, rep_atom_2=atom2, restraint_type=r.distance.restraint_type, distance_lower_limit=r.distance.distance_lower_limit, distance_upper_limit=r.distance.distance_upper_limit, probability=r.probability, model_granularity="by-residue" if r.by_residue else 'by-feature', dataset_list_id=r.dataset._id if r.dataset else None, software_id=r.software._id if r.software else None) class _EM3DDumper(Dumper): def _all_restraints(self, system): return [r for r in system._all_restraints() if isinstance(r, restraint.EM3DRestraint)] def finalize(self, system): for nr, r in enumerate(self._all_restraints(system)): r._id = nr + 1 def dump(self, system, writer): ordinal = itertools.count(1) with writer.loop("_ihm_3dem_restraint", ["id", "dataset_list_id", "fitting_method", "fitting_method_citation_id", "struct_assembly_id", "map_segment_flag", "number_of_gaussians", "model_id", "cross_correlation_coefficient", "details"]) as lp: for r in self._all_restraints(system): if r.fitting_method_citation: citation_id = r.fitting_method_citation._id else: citation_id = None # all fits ordered by model ID for model, fit in sorted(r.fits.items(), key=lambda i: i[0]._id): ccc = fit.cross_correlation_coefficient lp.write(id=next(ordinal), dataset_list_id=r.dataset._id, fitting_method=r.fitting_method, fitting_method_citation_id=citation_id, struct_assembly_id=r.assembly._id, map_segment_flag=r.segment, number_of_gaussians=r.number_of_gaussians, model_id=model._id, cross_correlation_coefficient=ccc, details=r.details) class _EM2DDumper(Dumper): def _all_restraints(self, system): return [r for r in system._all_restraints() if isinstance(r, restraint.EM2DRestraint)] def finalize(self, system): for nr, r in enumerate(self._all_restraints(system)): r._id = nr + 1 def dump(self, system, writer): self.dump_restraint(system, writer) self.dump_fitting(system, writer) def dump_restraint(self, system, writer): with writer.loop("_ihm_2dem_class_average_restraint", ["id", "dataset_list_id", "number_raw_micrographs", "pixel_size_width", "pixel_size_height", "image_resolution", "image_segment_flag", "number_of_projections", "struct_assembly_id", "details"]) as lp: for r in self._all_restraints(system): lp.write(id=r._id, dataset_list_id=r.dataset._id, number_raw_micrographs=r.number_raw_micrographs, pixel_size_width=r.pixel_size_width, pixel_size_height=r.pixel_size_height, image_resolution=r.image_resolution, number_of_projections=r.number_of_projections, struct_assembly_id=r.assembly._id, image_segment_flag=r.segment, details=r.details) def dump_fitting(self, system, writer): ordinal = itertools.count(1) with writer.loop( "_ihm_2dem_class_average_fitting", ["id", "restraint_id", "model_id", "cross_correlation_coefficient", "rot_matrix[1][1]", "rot_matrix[2][1]", "rot_matrix[3][1]", "rot_matrix[1][2]", "rot_matrix[2][2]", "rot_matrix[3][2]", "rot_matrix[1][3]", "rot_matrix[2][3]", "rot_matrix[3][3]", "tr_vector[1]", "tr_vector[2]", "tr_vector[3]"]) as lp: for r in self._all_restraints(system): # all fits ordered by model ID for model, fit in sorted(r.fits.items(), key=lambda i: i[0]._id): ccc = fit.cross_correlation_coefficient if fit.tr_vector is None: t = [None] * 3 else: t = fit.tr_vector if fit.rot_matrix is None: rm = [[None] * 3] * 3 else: # mmCIF writer usually outputs floats to 3 decimal # places, but we need more precision for rotation # matrices rm = [["%.6f" % e for e in fit.rot_matrix[i]] for i in range(3)] lp.write(id=next(ordinal), restraint_id=r._id, model_id=model._id, cross_correlation_coefficient=ccc, rot_matrix11=rm[0][0], rot_matrix21=rm[1][0], rot_matrix31=rm[2][0], rot_matrix12=rm[0][1], rot_matrix22=rm[1][1], rot_matrix32=rm[2][1], rot_matrix13=rm[0][2], rot_matrix23=rm[1][2], rot_matrix33=rm[2][2], tr_vector1=t[0], tr_vector2=t[1], tr_vector3=t[2]) class _SASDumper(Dumper): def _all_restraints(self, system): return [r for r in system._all_restraints() if isinstance(r, restraint.SASRestraint)] def finalize(self, system): for nr, r in enumerate(self._all_restraints(system)): r._id = nr + 1 def dump(self, system, writer): ordinal = itertools.count(1) with writer.loop("_ihm_sas_restraint", ["id", "dataset_list_id", "model_id", "struct_assembly_id", "profile_segment_flag", "fitting_atom_type", "fitting_method", "fitting_state", "radius_of_gyration", "chi_value", "details"]) as lp: for r in self._all_restraints(system): # all fits ordered by model ID for model, fit in sorted(r.fits.items(), key=lambda i: i[0]._id): lp.write(id=next(ordinal), dataset_list_id=r.dataset._id, fitting_method=r.fitting_method, fitting_atom_type=r.fitting_atom_type, fitting_state='Multiple' if r.multi_state else 'Single', profile_segment_flag=r.segment, radius_of_gyration=r.radius_of_gyration, struct_assembly_id=r.assembly._id, model_id=model._id, chi_value=fit.chi_value, details=r.details) def _assign_all_ids(all_objs_func): """Given a function that returns a list of all objects, assign IDs and return a list of objects sorted by ID""" objs_by_id = [] obj_id = itertools.count(1) for f in all_objs_func(): util._remove_id(f) for f in all_objs_func(): if not hasattr(f, '_id'): f._id = next(obj_id) objs_by_id.append(f) return objs_by_id class _MultiStateSchemeDumper(Dumper): def finalize(self, system): # Assign IDs s_id = itertools.count(1) for s in system.multi_state_schemes: if not hasattr(s, '_id'): s._id = next(s_id) def dump(self, system, writer): with writer.loop('_ihm_multi_state_scheme', ['id', 'name', 'details']) as lp: seen_multi_state_schemes = [] for s in system.multi_state_schemes: if s not in seen_multi_state_schemes: seen_multi_state_schemes.append(s) lp.write(id=s._id, name=s.name, details=s.details) class _MultiStateSchemeConnectivityDumper(Dumper): def finalize(self, system): # Assign IDs c_id = itertools.count(1) for c in system._all_multi_state_scheme_connectivities(): if not hasattr(c, '_id'): c._id = next(c_id) def dump(self, system, writer): with writer.loop('_ihm_multi_state_scheme_connectivity', ['id', 'scheme_id', 'begin_state_id', 'end_state_id', 'dataset_group_id', 'details']) as lp: for mss in system.multi_state_schemes: for c in mss.get_connectivities(): end_state_id = c.end_state._id if \ c.end_state is not None else None dataset_group_id = c.dataset_group._id if \ c.dataset_group else None lp.write(id=c._id, scheme_id=mss._id, begin_state_id=c.begin_state._id, end_state_id=end_state_id, dataset_group_id=dataset_group_id, details=c.details) class _RelaxationTimeDumper(Dumper): def finalize(self, system): # Assign IDs r_id = itertools.count(1) for r in system._all_relaxation_times(): if not hasattr(r, '_id'): r._id = next(r_id) def dump(self, system, writer): self.dump_ihm_relaxation_time(system, writer) self.dump_ihm_relaxation_time_multi_state_scheme(system, writer) def dump_ihm_relaxation_time(self, system, writer): with writer.loop('_ihm_relaxation_time', ['id', 'value', 'unit', 'amplitude', 'dataset_group_id', 'external_file_id', 'details']) as lp: # Relaxation times that are only assigned to multi-state schemes for r in system._all_relaxation_times(): dataset_group_id = r.dataset_group._id if \ r.dataset_group else None external_file_id = r.external_file._id if \ r.external_file else None lp.write( id=r._id, value=r.value, unit=r.unit, amplitude=r.amplitude, dataset_group_id=dataset_group_id, external_file_id=external_file_id, details=r.details) def dump_ihm_relaxation_time_multi_state_scheme(self, system, writer): with writer.loop('_ihm_relaxation_time_multi_state_scheme', ['id', 'relaxation_time_id', 'scheme_id', 'scheme_connectivity_id', 'details']) as lp: # Relaxation times that are only assigned to multi-state schemes ordinal = itertools.count(1) # Keep track of the seen relaxation times, in order to avoid # writing duplicates when it comes to the flr_data seen_relaxation_times = [] for mss in system.multi_state_schemes: for r in mss.get_relaxation_times(): if r not in seen_relaxation_times: seen_relaxation_times.append(r) lp.write(id=next(ordinal), relaxation_time_id=r._id, scheme_id=mss._id, scheme_connectivity_id=None, details=None) # Relaxation times assigned to multi-state scheme connectivities for mss in system.multi_state_schemes: for mssc in mss.get_connectivities(): if mssc.relaxation_time is not None: if mssc.relaxation_time not in seen_relaxation_times: seen_relaxation_times.append(mssc.relaxation_time) lp.write( id=next(ordinal), relaxation_time_id=mssc.relaxation_time._id, scheme_id=mss._id, scheme_connectivity_id=mssc._id, details=None) # This case only occurs if the relaxation time was not assigned to # a multi-state scheme, but occurs within the context of flr_data for f in system.flr_data: for c in f.relaxation_time_fret_analysis_connections: r = c.relaxation_time if r not in seen_relaxation_times: seen_relaxation_times.append(r) lp.write(id=next(ordinal), relaxation_time_id=r._id, scheme_id=None, scheme_connectivity_id=None, details=None) class _KineticRateDumper(Dumper): def finalize(self, system): # Assign IDs k_id = itertools.count(1) for k in system._all_kinetic_rates(): if not hasattr(k, '_id'): k._id = next(k_id) def dump(self, system, writer): with writer.loop('_ihm_kinetic_rate', ['id', 'transition_rate_constant', 'equilibrium_constant', 'equilibrium_constant_determination_method', 'equilibrium_constant_unit', 'details', 'scheme_connectivity_id', 'dataset_group_id', 'external_file_id']) as lp: ordinal = itertools.count(1) seen_kinetic_rates = [] for mssc in system._all_multi_state_scheme_connectivities(): if mssc.kinetic_rate is not None: k = mssc.kinetic_rate seen_kinetic_rates.append(k) trconst = k.transition_rate_constant eqconst = k.equilibrium_constant dataset_group_id = k.dataset_group._id if \ k.dataset_group else None external_file_id = k.external_file._id if \ k.external_file else None lp.write( id=next(ordinal), transition_rate_constant=trconst, equilibrium_constant=None if eqconst is None else eqconst.value, equilibrium_constant_determination_method=None if eqconst is None else eqconst.method, equilibrium_constant_unit=None if eqconst is None else eqconst.unit, details=k.details, scheme_connectivity_id=mssc._id, dataset_group_id=dataset_group_id, external_file_id=external_file_id) for f in system.flr_data: for c in f.kinetic_rate_fret_analysis_connections: k = c.kinetic_rate if k not in seen_kinetic_rates: seen_kinetic_rates.append(k) trconst = k.transition_rate_constant eqconst = k.equilibrium_constant dataset_group_id = k.dataset_group._id if \ k.dataset_group else None external_file_id = k.external_file._id if \ k.external_file else None lp.write( id=next(ordinal), transition_rate_constant=trconst, equilibrium_constant=None if eqconst is None else eqconst.value, equilibrium_constant_determination_method=None if eqconst is None else eqconst.method, equilibrium_constant_unit=None if eqconst is None else eqconst.unit, details=k.details, scheme_connectivity_id=None, dataset_group_id=dataset_group_id, external_file_id=external_file_id) class _FLRExperimentDumper(Dumper): def finalize(self, system): def all_experiments(): return itertools.chain.from_iterable(f._all_experiments() for f in system.flr_data) self._experiments_by_id = _assign_all_ids(all_experiments) def dump(self, system, writer): with writer.loop( '_flr_experiment', ['ordinal_id', 'id', 'instrument_id', 'inst_setting_id', 'exp_condition_id', 'sample_id', 'details']) as lp: ordinal = 1 for x in self._experiments_by_id: for i in range(len(x.sample_list)): lp.write(ordinal_id=ordinal, id=x._id, instrument_id=x.instrument_list[i]._id, inst_setting_id=x.inst_setting_list[i]._id, exp_condition_id=x.exp_condition_list[i]._id, sample_id=x.sample_list[i]._id, details=x.details_list[i]) ordinal += 1 class _FLRInstSettingDumper(Dumper): def finalize(self, system): def all_inst_settings(): return itertools.chain.from_iterable(f._all_inst_settings() for f in system.flr_data) self._inst_settings_by_id = _assign_all_ids(all_inst_settings) def dump(self, system, writer): with writer.loop('_flr_inst_setting', ['id', 'details']) as lp: for x in self._inst_settings_by_id: lp.write(id=x._id, details=x.details) class _FLRExpConditionDumper(Dumper): def finalize(self, system): def all_exp_conditions(): return itertools.chain.from_iterable(f._all_exp_conditions() for f in system.flr_data) self._exp_conditions_by_id = _assign_all_ids(all_exp_conditions) def dump(self, system, writer): with writer.loop('_flr_exp_condition', ['id', 'details']) as lp: for x in self._exp_conditions_by_id: lp.write(id=x._id, details=x.details) class _FLRInstrumentDumper(Dumper): def finalize(self, system): def all_instruments(): return itertools.chain.from_iterable(f._all_instruments() for f in system.flr_data) self._instruments_by_id = _assign_all_ids(all_instruments) def dump(self, system, writer): with writer.loop('_flr_instrument', ['id', 'details']) as lp: for x in self._instruments_by_id: lp.write(id=x._id, details=x.details) class _FLREntityAssemblyDumper(Dumper): def finalize(self, system): def all_entity_assemblies(): return itertools.chain.from_iterable( (s.entity_assembly for s in f._all_samples()) for f in system.flr_data) self._entity_assemblies_by_id = _assign_all_ids(all_entity_assemblies) def dump(self, system, writer): with writer.loop( '_flr_entity_assembly', ['ordinal_id', 'assembly_id', 'entity_id', 'num_copies', 'entity_description']) as lp: ordinal = itertools.count(1) for x in self._entity_assemblies_by_id: for i in range(len(x.entity_list)): lp.write(ordinal_id=next(ordinal), assembly_id=x._id, entity_id=x.entity_list[i]._id, num_copies=x.num_copies_list[i], entity_description=x.entity_list[i].description) class _FLRSampleConditionDumper(Dumper): def finalize(self, system): def all_sample_conditions(): return itertools.chain.from_iterable( (s.condition for s in f._all_samples()) for f in system.flr_data) self._sample_conditions_by_id = _assign_all_ids(all_sample_conditions) def dump(self, system, writer): with writer.loop('_flr_sample_condition', ['id', 'details']) as lp: for x in self._sample_conditions_by_id: lp.write(id=x._id, details=x.details) class _FLRSampleDumper(Dumper): def finalize(self, system): def all_samples(): return itertools.chain.from_iterable(f._all_samples() for f in system.flr_data) self._samples_by_id = _assign_all_ids(all_samples) def dump(self, system, writer): with writer.loop('_flr_sample', ['id', 'entity_assembly_id', 'num_of_probes', 'sample_condition_id', 'sample_description', 'sample_details', 'solvent_phase']) as lp: for x in self._samples_by_id: lp.write(id=x._id, entity_assembly_id=x.entity_assembly._id, num_of_probes=x.num_of_probes, sample_condition_id=x.condition._id, sample_description=x.description, sample_details=x.details, solvent_phase=x.solvent_phase) class _FLRProbeDumper(Dumper): def finalize(self, system): def all_probes(): return itertools.chain.from_iterable(f._all_probes() for f in system.flr_data) self._probes_by_id = _assign_all_ids(all_probes) def dump(self, system, writer): self.dump_probe_list(system, writer) self.dump_probe_descriptor(system, writer) def dump_probe_list(self, system, writer): with writer.loop('_flr_probe_list', ['probe_id', 'chromophore_name', 'reactive_probe_flag', 'reactive_probe_name', 'probe_origin', 'probe_link_type']) as lp: for x in self._probes_by_id: entry = x.probe_list_entry lp.write(probe_id=x._id, chromophore_name=entry.chromophore_name, reactive_probe_flag=entry.reactive_probe_flag, reactive_probe_name=entry.reactive_probe_name, probe_origin=entry.probe_origin, probe_link_type=entry.probe_link_type) def dump_probe_descriptor(self, system, writer): with writer.loop('_flr_probe_descriptor', ['probe_id', 'reactive_probe_chem_descriptor_id', 'chromophore_chem_descriptor_id', 'chromophore_center_atom']) as lp: for x in self._probes_by_id: reactive = x.probe_descriptor.reactive_probe_chem_descriptor chrom = x.probe_descriptor.chromophore_chem_descriptor reactive_id = None if reactive is None else reactive._id chrom_id = None if chrom is None else chrom._id center = x.probe_descriptor.chromophore_center_atom lp.write(probe_id=x._id, reactive_probe_chem_descriptor_id=reactive_id, chromophore_chem_descriptor_id=chrom_id, chromophore_center_atom=center) class _FLRSampleProbeDetailsDumper(Dumper): def finalize(self, system): def all_sample_probe_details(): return itertools.chain.from_iterable(f._all_sample_probe_details() for f in system.flr_data) self._sample_probe_details_by_id = _assign_all_ids( all_sample_probe_details) def dump(self, system, writer): with writer.loop('_flr_sample_probe_details', ['sample_probe_id', 'sample_id', 'probe_id', 'fluorophore_type', 'description', 'poly_probe_position_id']) as lp: for x in self._sample_probe_details_by_id: lp.write(sample_probe_id=x._id, sample_id=x.sample._id, probe_id=x.probe._id, fluorophore_type=x.fluorophore_type, description=x.description, poly_probe_position_id=x.poly_probe_position._id) class _FLRPolyProbePositionDumper(Dumper): def finalize(self, system): def all_poly_probe_positions(): return itertools.chain.from_iterable(f._all_poly_probe_positions() for f in system.flr_data) self._positions_by_id = _assign_all_ids(all_poly_probe_positions) def dump(self, system, writer): self.dump_position(system, writer) self.dump_position_mutated(system, writer) self.dump_position_modified(system, writer) def dump_position(self, system, writer): with writer.loop('_flr_poly_probe_position', ['id', 'entity_id', 'entity_description', 'asym_id', 'seq_id', 'comp_id', 'atom_id', 'mutation_flag', 'modification_flag', 'auth_name']) as lp: for x in self._positions_by_id: atom = None if isinstance(x.resatom, ihm.Atom): atom = x.resatom.id if x.resatom.asym is None: comp = x.resatom.entity.sequence[x.resatom.seq_id - 1].id a_id = None e_id = x.resatom.entity._id e_desc = x.resatom.entity.description else: e = x.resatom.asym.entity comp = e.sequence[x.resatom.seq_id - 1].id a_id = x.resatom.asym._id e_id = x.resatom.asym.entity._id e_desc = x.resatom.asym.entity.description lp.write(id=x._id, entity_id=e_id, entity_description=e_desc, asym_id=a_id, seq_id=x.resatom.seq_id, comp_id=comp, atom_id=atom, mutation_flag=x.mutation_flag, modification_flag=x.modification_flag, auth_name=x.auth_name) def dump_position_mutated(self, system, writer): with writer.loop('_flr_poly_probe_position_mutated', ['id', 'chem_comp_id', 'atom_id']) as lp: for x in self._positions_by_id: if x.mutation_flag is True: atom = None if isinstance(x.resatom, ihm.Atom): atom = x.resatom.id lp.write(id=x._id, chem_comp_id=x.mutated_chem_comp_id.id, atom_id=atom) def dump_position_modified(self, system, writer): with writer.loop('_flr_poly_probe_position_modified', ['id', 'chem_descriptor_id', 'atom_id']) as lp: for x in self._positions_by_id: if x.modification_flag is True: atom = None if isinstance(x.resatom, ihm.Atom): atom = x.resatom.id lp.write(id=x._id, chem_descriptor_id=x.modified_chem_descriptor._id, atom_id=atom) class _FLRConjugateDumper(Dumper): def finalize(self, system): def all_conjugates(): return itertools.chain.from_iterable(f.poly_probe_conjugates for f in system.flr_data) self._conjugates_by_id = _assign_all_ids(all_conjugates) def dump(self, system, writer): with writer.loop('_flr_poly_probe_conjugate', ['id', 'sample_probe_id', 'chem_descriptor_id', 'ambiguous_stoichiometry_flag', 'probe_stoichiometry']) as lp: for x in self._conjugates_by_id: lp.write( id=x._id, sample_probe_id=x.sample_probe._id, chem_descriptor_id=x.chem_descriptor._id, ambiguous_stoichiometry_flag=x.ambiguous_stoichiometry, probe_stoichiometry=x.probe_stoichiometry) class _FLRForsterRadiusDumper(Dumper): def finalize(self, system): def all_forster_radii(): return itertools.chain.from_iterable(f._all_forster_radii() for f in system.flr_data) self._radii_by_id = _assign_all_ids(all_forster_radii) def dump(self, system, writer): with writer.loop('_flr_fret_forster_radius', ['id', 'donor_probe_id', 'acceptor_probe_id', 'forster_radius', 'reduced_forster_radius']) as lp: for x in self._radii_by_id: lp.write(id=x._id, donor_probe_id=x.donor_probe._id, acceptor_probe_id=x.acceptor_probe._id, forster_radius=x.forster_radius, reduced_forster_radius=x.reduced_forster_radius) class _FLRCalibrationParametersDumper(Dumper): def finalize(self, system): def all_calibration_parameters(): return itertools.chain.from_iterable( f._all_calibration_parameters() for f in system.flr_data) self._parameters_by_id = _assign_all_ids(all_calibration_parameters) def dump(self, system, writer): with writer.loop('_flr_fret_calibration_parameters', ['id', 'phi_acceptor', 'alpha', 'alpha_sd', 'gG_gR_ratio', 'beta', 'gamma', 'delta', 'a_b']) as lp: for x in self._parameters_by_id: lp.write(id=x._id, phi_acceptor=x.phi_acceptor, alpha=x.alpha, alpha_sd=x.alpha_sd, gG_gR_ratio=x.gg_gr_ratio, beta=x.beta, gamma=x.gamma, delta=x.delta, a_b=x.a_b) class _FLRLifetimeFitModelDumper(Dumper): def finalize(self, system): def all_lifetime_fit_models(): return itertools.chain.from_iterable(f._all_lifetime_fit_models() for f in system.flr_data) self._lifetime_fit_models_by_id = \ _assign_all_ids(all_lifetime_fit_models) def dump(self, system, writer): with writer.loop('_flr_lifetime_fit_model', ['id', 'name', 'description', 'external_file_id', 'citation_id']) as lp: for x in self._lifetime_fit_models_by_id: lp.write(id=x._id, name=x.name, description=x.description, external_file_id=None if x.external_file is None else x.external_file._id, citation_id=None if x.citation is None else x.citation._id) class _FLRRefMeasurementDumper(Dumper): def finalize(self, system): def all_ref_measurement_groups(): return itertools.chain.from_iterable( f._all_ref_measurement_groups() for f in system.flr_data) self._ref_measurement_groups_by_id = _assign_all_ids( all_ref_measurement_groups) def _all_ref_measurements(): return itertools.chain.from_iterable(f._all_ref_measurements() for f in system.flr_data) self._ref_measurements_by_id = _assign_all_ids(_all_ref_measurements) def _all_ref_measurement_lifetimes(): return itertools.chain.from_iterable( f._all_ref_measurement_lifetimes() for f in system.flr_data) self._ref_measurement_lifetimes_by_id = _assign_all_ids( _all_ref_measurement_lifetimes) def dump(self, system, writer): self.dump_ref_measurement_group(system, writer) self.dump_ref_measurement_group_link(system, writer) self.dump_ref_measurement(system, writer) self.dump_ref_measurement_lifetimes(system, writer) def dump_ref_measurement_group(self, system, writer): with writer.loop('_flr_reference_measurement_group', ['id', 'num_measurements', 'details']) as lp: for x in self._ref_measurement_groups_by_id: lp.write(id=x._id, num_measurements=len(x.ref_measurement_list), details=x.details) def dump_ref_measurement_group_link(self, system, writer): with writer.loop('_flr_reference_measurement_group_link', ['group_id', 'reference_measurement_id']) as lp: for x in self._ref_measurement_groups_by_id: for m in x.ref_measurement_list: lp.write(group_id=x._id, reference_measurement_id=m._id) def dump_ref_measurement(self, system, writer): with writer.loop('_flr_reference_measurement', ['id', 'reference_sample_probe_id', 'num_species', 'details']) as lp: for x in self._ref_measurements_by_id: lp.write(id=x._id, reference_sample_probe_id=x.ref_sample_probe._id, num_species=len(x.list_of_lifetimes), details=x.details) def dump_ref_measurement_lifetimes(self, system, writer): with writer.loop('_flr_reference_measurement_lifetime', ['ordinal_id', 'reference_measurement_id', 'species_name', 'species_fraction', 'lifetime']) as lp: ordinal = itertools.count(1) for x in self._ref_measurements_by_id: for m in x.list_of_lifetimes: lp.write(ordinal_id=next(ordinal), reference_measurement_id=x._id, species_name=m.species_name, species_fraction=m.species_fraction, lifetime=m.lifetime) class _FLRAnalysisDumper(Dumper): def finalize(self, system): def all_analyses(): return itertools.chain.from_iterable(f._all_analyses() for f in system.flr_data) self._analyses_by_id = _assign_all_ids(all_analyses) def dump(self, system, writer): self.dump_fret_analysis_general(system, writer) self.dump_fret_analysis_intensity(system, writer) self.dump_fret_analysis_lifetime(system, writer) def dump_fret_analysis_general(self, system, writer): with writer.loop('_flr_fret_analysis', ['id', 'experiment_id', 'type', 'sample_probe_id_1', 'sample_probe_id_2', 'forster_radius_id', 'dataset_list_id', 'external_file_id', 'software_id']) as lp: for x in self._analyses_by_id: lp.write(id=x._id, experiment_id=x.experiment._id, type=x.type, sample_probe_id_1=x.sample_probe_1._id, sample_probe_id_2=x.sample_probe_2._id, forster_radius_id=x.forster_radius._id, dataset_list_id=x.dataset._id, external_file_id=None if x.external_file is None else x.external_file._id, software_id=None if x.software is None else x.software._id) def dump_fret_analysis_intensity(self, system, writer): with writer.loop('_flr_fret_analysis_intensity', ['ordinal_id', 'analysis_id', 'calibration_parameters_id', 'donor_only_fraction', 'chi_square_reduced', 'method_name', 'details']) as lp: ordinal = itertools.count(1) for x in self._analyses_by_id: # if it is an intensity-based analysis. if 'intensity' in x.type: p = x.calibration_parameters lp.write( ordinal_id=next(ordinal), analysis_id=x._id, calibration_parameters_id=None if p is None else p._id, donor_only_fraction=x.donor_only_fraction, chi_square_reduced=x.chi_square_reduced, method_name=x.method_name, details=x.details) def dump_fret_analysis_lifetime(self, system, writer): with writer.loop('_flr_fret_analysis_lifetime', ['ordinal_id', 'analysis_id', 'reference_measurement_group_id', 'lifetime_fit_model_id', 'donor_only_fraction', 'chi_square_reduced', 'method_name', 'details']) as lp: ordinal = itertools.count(1) for x in self._analyses_by_id: # if it is a lifetime-based analysis if 'lifetime' in x.type: mgid = x.ref_measurement_group._id lp.write( ordinal_id=next(ordinal), analysis_id=x._id, reference_measurement_group_id=mgid, lifetime_fit_model_id=x.lifetime_fit_model._id, donor_only_fraction=x.donor_only_fraction, chi_square_reduced=x.chi_square_reduced, method_name=x.method_name, details=x.details) class _FLRPeakAssignmentDumper(Dumper): def finalize(self, system): def all_peak_assignments(): return itertools.chain.from_iterable(f._all_peak_assignments() for f in system.flr_data) self._peak_assignments_by_id = _assign_all_ids(all_peak_assignments) def dump(self, system, writer): with writer.loop('_flr_peak_assignment', ['id', 'method_name', 'details']) as lp: for x in self._peak_assignments_by_id: lp.write(id=x._id, method_name=x.method_name, details=x.details) class _FLRDistanceRestraintDumper(Dumper): def finalize(self, system): def all_restraint_groups(): return itertools.chain.from_iterable(f.distance_restraint_groups for f in system.flr_data) self._restraint_groups_by_id = _assign_all_ids(all_restraint_groups) def _all_restraints(): return itertools.chain.from_iterable( rg.distance_restraint_list for rg in self._restraint_groups_by_id) for i, r in enumerate(_all_restraints()): r._id = i + 1 def dump(self, system, writer): with writer.loop('_flr_fret_distance_restraint', ['ordinal_id', 'id', 'group_id', 'sample_probe_id_1', 'sample_probe_id_2', 'state_id', 'analysis_id', 'distance', 'distance_error_plus', 'distance_error_minus', 'distance_type', 'population_fraction', 'peak_assignment_id']) as lp: ordinal = itertools.count(1) for rg in self._restraint_groups_by_id: for r in rg.distance_restraint_list: lp.write(ordinal_id=next(ordinal), id=r._id, group_id=rg._id, sample_probe_id_1=r.sample_probe_1._id, sample_probe_id_2=r.sample_probe_2._id, state_id=None if r.state is None else r.state._id, analysis_id=r.analysis._id, distance=r.distance, distance_error_plus=r.distance_error_plus, distance_error_minus=r.distance_error_minus, distance_type=r.distance_type, population_fraction=r.population_fraction, peak_assignment_id=r.peak_assignment._id) class _FLRModelQualityDumper(Dumper): def finalize(self, system): def all_model_qualities(): return itertools.chain.from_iterable(f.fret_model_qualities for f in system.flr_data) self._model_qualities_by_id = _assign_all_ids(all_model_qualities) def dump(self, system, writer): with writer.loop('_flr_fret_model_quality', ['model_id', 'chi_square_reduced', 'dataset_group_id', 'method', 'details']) as lp: for x in self._model_qualities_by_id: lp.write(model_id=x.model._id, chi_square_reduced=x.chi_square_reduced, dataset_group_id=x.dataset_group._id, method=x.method, details=x.details) class _FLRModelDistanceDumper(Dumper): def finalize(self, system): def all_model_distances(): return itertools.chain.from_iterable(f.fret_model_distances for f in system.flr_data) self._model_distances_by_id = _assign_all_ids(all_model_distances) def dump(self, system, writer): with writer.loop('_flr_fret_model_distance', ['id', 'restraint_id', 'model_id', 'distance', 'distance_deviation']) as lp: for x in self._model_distances_by_id: lp.write(id=x._id, restraint_id=x.restraint._id, model_id=x.model._id, distance=x.distance, distance_deviation=x.distance_deviation) class _FLRFPSModelingDumper(Dumper): def finalize(self, system): def all_fps_modeling(): return itertools.chain.from_iterable(f._all_fps_modeling() for f in system.flr_data) self._fps_modeling_by_id = _assign_all_ids(all_fps_modeling) def all_fps_global_parameters(): return itertools.chain.from_iterable(f._all_fps_global_parameters() for f in system.flr_data) self._fps_modeling_by_id = _assign_all_ids(all_fps_modeling) self._fps_parameters_by_id = _assign_all_ids(all_fps_global_parameters) def dump(self, system, writer): self.dump_fps_modeling(system, writer) self.dump_fps_global_parameters(system, writer) def dump_fps_modeling(self, system, writer): with writer.loop('_flr_FPS_modeling', ['id', 'ihm_modeling_protocol_ordinal_id', 'restraint_group_id', 'global_parameter_id', 'probe_modeling_method', 'details']) as lp: for x in self._fps_modeling_by_id: lp.write(id=x._id, ihm_modeling_protocol_ordinal_id=x.protocol._id, restraint_group_id=x.restraint_group._id, global_parameter_id=x.global_parameter._id, probe_modeling_method=x.probe_modeling_method, details=x.details) def dump_fps_global_parameters(self, system, writer): with writer.loop('_flr_FPS_global_parameter', ['id', 'forster_radius_value', 'conversion_function_polynom_order', 'repetition', 'AV_grid_rel', 'AV_min_grid_A', 'AV_allowed_sphere', 'AV_search_nodes', 'AV_E_samples_k', 'sim_viscosity_adjustment', 'sim_dt_adjustment', 'sim_max_iter_k', 'sim_max_force', 'sim_clash_tolerance_A', 'sim_reciprocal_kT', 'sim_clash_potential', 'convergence_E', 'convergence_K', 'convergence_F', 'convergence_T']) as lp: for x in self._fps_parameters_by_id: polynom_order = x.conversion_function_polynom_order lp.write(id=x._id, forster_radius_value=x.forster_radius, conversion_function_polynom_order=polynom_order, repetition=x.repetition, AV_grid_rel=x.av_grid_rel, AV_min_grid_A=x.av_min_grid_a, AV_allowed_sphere=x.av_allowed_sphere, AV_search_nodes=x.av_search_nodes, AV_E_samples_k=x.av_e_samples_k, sim_viscosity_adjustment=x.sim_viscosity_adjustment, sim_dt_adjustment=x.sim_dt_adjustment, sim_max_iter_k=x.sim_max_iter_k, sim_max_force=x.sim_max_force, sim_clash_tolerance_A=x.sim_clash_tolerance_a, sim_reciprocal_kT=x.sim_reciprocal_kt, sim_clash_potential=x.sim_clash_potential, convergence_E=x.convergence_e, convergence_K=x.convergence_k, convergence_F=x.convergence_f, convergence_T=x.convergence_t) class _FLRFPSAVModelingDumper(Dumper): def finalize(self, system): def all_fps_av_modeling(): return itertools.chain.from_iterable(f._all_fps_av_modeling() for f in system.flr_data) self._fps_av_modeling_by_id = _assign_all_ids(all_fps_av_modeling) def all_fps_av_parameter(): return itertools.chain.from_iterable(f._all_fps_av_parameter() for f in system.flr_data) self._fps_av_modeling_by_id = _assign_all_ids(all_fps_av_modeling) self._fps_av_parameter_by_id = _assign_all_ids(all_fps_av_parameter) def dump(self, system, writer): self.dump_parameter(system, writer) self.dump_modeling(system, writer) def dump_parameter(self, system, writer): with writer.loop('_flr_FPS_AV_parameter', ['id', 'num_linker_atoms', 'linker_length', 'linker_width', 'probe_radius_1', 'probe_radius_2', 'probe_radius_3']) as lp: for x in self._fps_av_parameter_by_id: lp.write(id=x._id, num_linker_atoms=x.num_linker_atoms, linker_length=x.linker_length, linker_width=x.linker_width, probe_radius_1=x.probe_radius_1, probe_radius_2=x.probe_radius_2, probe_radius_3=x.probe_radius_3) def dump_modeling(self, system, writer): with writer.loop('_flr_FPS_AV_modeling', ['id', 'sample_probe_id', 'FPS_modeling_id', 'parameter_id']) as lp: for x in self._fps_av_modeling_by_id: lp.write(id=x._id, sample_probe_id=x.sample_probe._id, FPS_modeling_id=x.fps_modeling._id, parameter_id=x.parameter._id) class _FLRFPSMPPModelingDumper(Dumper): def finalize(self, system): def all_fps_mpp_modeling(): return itertools.chain.from_iterable(f._all_fps_mpp_modeling() for f in system.flr_data) self._fps_mpp_modeling_by_id = _assign_all_ids(all_fps_mpp_modeling) def all_fps_mean_probe_position(): return itertools.chain.from_iterable( f._all_fps_mean_probe_position() for f in system.flr_data) self._fps_mpp_modeling_by_id = _assign_all_ids(all_fps_mpp_modeling) self._fps_mpp_by_id = _assign_all_ids(all_fps_mean_probe_position) def all_atom_position_group(): return itertools.chain.from_iterable( f._all_fps_atom_position_group() for f in system.flr_data) self._atom_group_by_id = _assign_all_ids(all_atom_position_group) def _all_atom_positions(): return itertools.chain.from_iterable( ag.mpp_atom_position_list for ag in self._atom_group_by_id) for i, a in enumerate(_all_atom_positions()): a._id = i + 1 def dump(self, system, writer): self.dump_mean_probe_position(system, writer) self.dump_mpp_atom_position(system, writer) self.dump_mpp_modeling(system, writer) def dump_mean_probe_position(self, system, writer): with writer.loop('_flr_FPS_mean_probe_position', ['id', 'sample_probe_id', 'mpp_xcoord', 'mpp_ycoord', 'mpp_zcoord']) as lp: for x in self._fps_mpp_by_id: lp.write(id=x._id, sample_probe_id=x.sample_probe._id, mpp_xcoord=x.x, mpp_ycoord=x.y, mpp_zcoord=x.z) def dump_mpp_atom_position(self, system, writer): with writer.loop('_flr_FPS_MPP_atom_position', ['id', 'entity_id', 'seq_id', 'comp_id', 'atom_id', 'asym_id', 'xcoord', 'ycoord', 'zcoord', 'group_id']) as lp: for group in self._atom_group_by_id: for x in group.mpp_atom_position_list: comp = x.atom.asym.entity.sequence[x.atom.seq_id - 1].id lp.write(id=x._id, entity_id=x.atom.asym.entity._id, seq_id=x.atom.seq_id, comp_id=comp, atom_id=x.atom.id, asym_id=x.atom.asym._id, xcoord=x.x, ycoord=x.y, zcoord=x.z, group_id=group._id) def dump_mpp_modeling(self, system, writer): ordinal = itertools.count(1) with writer.loop('_flr_FPS_MPP_modeling', ['ordinal_id', 'FPS_modeling_id', 'mpp_id', 'mpp_atom_position_group_id']) as lp: for x in self._fps_mpp_modeling_by_id: lp.write( ordinal_id=next(ordinal), FPS_modeling_id=x.fps_modeling._id, mpp_id=x.mpp._id, mpp_atom_position_group_id=x.mpp_atom_position_group._id) class _FLRKineticRateFretAnalysisConnectionDumper(Dumper): def finalize(self, system): # Assign IDs c_id = itertools.count(1) if system.flr_data: for f in system.flr_data: for c in f.kinetic_rate_fret_analysis_connections: if not hasattr(c, '_id'): c._id = next(c_id) def dump(self, system, writer): with writer.loop('_flr_kinetic_rate_analysis', ['id', 'fret_analysis_id', 'kinetic_rate_id', 'details']) as lp: if system.flr_data: for f in system.flr_data: for c in f.kinetic_rate_fret_analysis_connections: lp.write(id=c._id, fret_analysis_id=c.fret_analysis._id, kinetic_rate_id=c.kinetic_rate._id, details=c.details) class _FLRRelaxationTimeFretAnalysisConnectionDumper(Dumper): def finalize(self, system): # Assign IDs c_id = itertools.count(1) if system.flr_data: for f in system.flr_data: for c in f.relaxation_time_fret_analysis_connections: if not hasattr(c, '_id'): c._id = next(c_id) def dump(self, system, writer): with writer.loop('_flr_relaxation_time_analysis', ['id', 'fret_analysis_id', 'relaxation_time_id', 'details']) as lp: if system.flr_data: for f in system.flr_data: for c in f.relaxation_time_fret_analysis_connections: lp.write(id=c._id, fret_analysis_id=c.fret_analysis._id, relaxation_time_id=c.relaxation_time._id, details=c.details) _flr_dumpers = [_FLRExperimentDumper, _FLRInstSettingDumper, _FLRExpConditionDumper, _FLRInstrumentDumper, _FLREntityAssemblyDumper, _FLRSampleConditionDumper, _FLRSampleDumper, _FLRProbeDumper, _FLRSampleProbeDetailsDumper, _FLRPolyProbePositionDumper, _FLRConjugateDumper, _FLRForsterRadiusDumper, _FLRCalibrationParametersDumper, _FLRLifetimeFitModelDumper, _FLRRefMeasurementDumper, _FLRAnalysisDumper, _FLRPeakAssignmentDumper, _FLRDistanceRestraintDumper, _FLRModelQualityDumper, _FLRModelDistanceDumper, _FLRFPSModelingDumper, _FLRFPSAVModelingDumper, _FLRFPSMPPModelingDumper, _FLRKineticRateFretAnalysisConnectionDumper, _FLRRelaxationTimeFretAnalysisConnectionDumper] class _NullLoopCategoryWriter: """A do-nothing replacement for format._CifLoopWriter or format._CifCategoryWriter""" def write(self, *args, **keys): pass def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): pass class _IgnoreWriter: """Utility class which normally just passes through to the default ``base_writer``, but ignores selected categories.""" def __init__(self, base_writer, ignores): self._base_writer = base_writer # Allow for categories with or without leading underscore self._ignore_category = frozenset('_' + c.lstrip('_').lower() for c in ignores) def category(self, category): if category in self._ignore_category: return _NullLoopCategoryWriter() else: return self._base_writer.category(category) def loop(self, category, keys): if category in self._ignore_category: return _NullLoopCategoryWriter() else: return self._base_writer.loop(category, keys) # Pass through other methods to base_writer def flush(self): return self._base_writer.flush() def end_block(self): return self._base_writer.end_block() def start_block(self, name): return self._base_writer.start_block(name) def write_comment(self, comment): return self._base_writer.write_comment(comment) class Variant: """Utility class to select the type of file to output by :func:`write`.""" def get_dumpers(self): """Get the :class:`Dumper` objects to use to write output. :return: a list of :class:`Dumper` objects. """ pass def get_system_writer(self, system, writer_class, writer): """Get a writer tailored to the given system. By default, this just returns the ``writer`` unchanged.""" return writer class IHMVariant(Variant): """Used to select typical PDBx/IHM file output. See :func:`write`.""" _dumpers = [ _EntryDumper, # must be first _CollectionDumper, _StructDumper, _CommentDumper, _AuditConformDumper, _DatabaseDumper, _DatabaseStatusDumper, _CitationDumper, _SoftwareDumper, _AuditAuthorDumper, _AuditRevisionDumper, _DataUsageDumper, _GrantDumper, _ChemCompDumper, _ChemDescriptorDumper, _EntityDumper, _EntitySrcGenDumper, _EntitySrcNatDumper, _EntitySrcSynDumper, _StructRefDumper, _EntityPolyDumper, _EntityNonPolyDumper, _EntityPolySeqDumper, _EntityPolySegmentDumper, _EntityBranchListDumper, _EntityBranchDumper, _StructAsymDumper, _PolySeqSchemeDumper, _NonPolySchemeDumper, _BranchSchemeDumper, _BranchDescriptorDumper, _BranchLinkDumper, _AssemblyDumper, _ExternalReferenceDumper, _DatasetDumper, _ModelRepresentationDumper, _StartingModelDumper, _ProtocolDumper, _PostProcessDumper, _PseudoSiteDumper, _GeometricObjectDumper, _FeatureDumper, _CrossLinkDumper, _GeometricRestraintDumper, _DerivedDistanceRestraintDumper, _HDXRestraintDumper, _PredictedContactRestraintDumper, _EM3DDumper, _EM2DDumper, _SASDumper, _ModelDumper, _ModelRepresentativeDumper, _NotModeledResidueRangeDumper, _EnsembleDumper, _DensityDumper, _MultiStateDumper, _OrderedDumper, _MultiStateSchemeDumper, _MultiStateSchemeConnectivityDumper, _RelaxationTimeDumper, _KineticRateDumper] def get_dumpers(self): return [d() for d in self._dumpers + _flr_dumpers] class IgnoreVariant(IHMVariant): """Exclude selected CIF categories from output. This generates the same PDBx/IHM output as :class:`IHMVariant`, but explicitly listed CIF categories are discarded, for example:: ihm.dumper.write(fh, systems, variant=IgnoreVariant(['_audit_conform'])) This is intended for advanced users that have a working knowledge of the PDBx and IHM CIF dictionaries. :param sequence ignores: A list or tuple of CIF categories to exclude. """ def __init__(self, ignores): self._ignores = ignores def get_system_writer(self, system, writer_class, writer): return _IgnoreWriter(writer, self._ignores) def set_line_wrap(line_wrap): """Set whether output lines are wrapped at 80 characters. By default the mmCIF writer tries to avoid writing lines longer than 80 characters, for compatibility with traditional PDB. When disabled, each row in a "loop" construct will be written on a single line. This setting has no effect on binary formats (BinaryCIF). :param bool line_wrap: whether to wrap lines at 80 characters. """ ihm.format.CifWriter._set_line_wrap(line_wrap) def write(fh, systems, format='mmCIF', dumpers=[], variant=IHMVariant, check=True): """Write out all `systems` to the file handle `fh`. Files can be written in either the text-based mmCIF format or the BinaryCIF format. The BinaryCIF writer needs the msgpack Python module to function. The file handle should be opened in binary mode for BinaryCIF files. For mmCIF, text mode should be used, usually with UTF-8 encoding, e.g.:: with open('output.cif', 'w', encoding='utf-8') as fh: ihm.dumper.write(fh, systems) with open('output.bcif', 'wb') as fh: ihm.dumper.write(fh, systems, format='BCIF') If generating files for a tool that is sensitive to non-ASCII data, a more restrictive encoding such as ASCII or ISO-8859-1 could also be used (although note that this may lose some information such as accented characters):: with open('output.cif', 'w', encoding='ascii', errors='replace') as fh: ihm.dumper.write(fh, systems) :param file fh: The file handle to write to. :param list systems: The list of :class:`ihm.System` objects to write. :param str format: The format of the file. This can be 'mmCIF' (the default) for the (text-based) mmCIF format or 'BCIF' for BinaryCIF. :param list dumpers: A list of :class:`Dumper` classes (not objects). These can be used to add extra categories to the file. :param variant: A class or object that selects the type of file to output. This primarily controls the set of tables that are written to the file. In most cases the default :class:`IHMVariant` should be used. :type variant: :class:`Variant` :param bool check: If True (the default), check the output objects for self-consistency. If this is set to False, disabling some of these checks, the output files may not correctly validate against the mmCIF dictionaries. (Note that some checks are always performed, as the library cannot function correctly without these.) """ if isinstance(variant, type): variant = variant() dumpers = variant.get_dumpers() + [d() for d in dumpers] writer_map = {'mmCIF': ihm.format.CifWriter, 'BCIF': ihm.format_bcif.BinaryCifWriter} writer = writer_map[format](fh) for system in systems: w = variant.get_system_writer(system, writer_map[format], writer) system._before_write() for d in dumpers: d._check = check d.finalize(system) system._check_after_write() for d in dumpers: d.dump(system, w) w.end_block() # start_block is called by EntryDumper writer.flush() python-ihm-2.7/ihm/flr.py000066400000000000000000001373511503573337200154120ustar00rootroot00000000000000# coding=utf-8 """Classes to handle fluorescence data. The classes roughly correspond to categories in the `FLR dictionary `_. See the top level :class:`FLRData` class for more information. """ class Probe: """Defines a fluorescent probe. This class is not in the FLR dictionary, but it collects all the information connected by the probe_ids. :param probe_list_entry: A probe list object. :type probe_list_entry: :class:`ProbeList` :param probe_descriptor: A probe descriptor. :type probe_descriptor: :class:`ProbeDescriptor` """ def __init__(self, probe_list_entry=None, probe_descriptor=None): self.probe_list_entry = probe_list_entry self.probe_descriptor = probe_descriptor def __eq__(self, other): return self.__dict__ == other.__dict__ class ProbeDescriptor: """Collects the chemical descriptors for a fluorescent probe. This includes the chemical descriptor of the reactive probe and the chromophore. :param reactive_probe_chem_descriptor: The chemical descriptor for the reactive probe. :type reactive_probe_chem_descriptor: :class:`ihm.ChemDescriptor` :param chromophore_chem_descriptor: The chemical descriptor of the chromophore. :type chromophore_chem_descriptor: :class:`ihm.ChemDescriptor` :param chromophore_center_atom: The atom describing the center of the chromophore. """ def __init__(self, reactive_probe_chem_descriptor, chromophore_chem_descriptor, chromophore_center_atom=None): self.reactive_probe_chem_descriptor = reactive_probe_chem_descriptor self.chromophore_chem_descriptor = chromophore_chem_descriptor self.chromophore_center_atom = chromophore_center_atom def __eq__(self, other): return self.__dict__ == other.__dict__ class ProbeList: """Store the chromophore name, whether there is a reactive probe available, the origin of the probe and the type of linkage of the probe. :param str chromophore_name: The name of the chromophore. :param bool reactive_probe_flag: Flag to indicate whether a reactive probe is given. :param str reactive_probe_name: The name of the reactive probe. :param str probe_origin: The origin of the probe (intrinsic or extrinsic). :param str probe_link_type: The type of linkage for the probe (covalent or ligand). """ def __init__(self, chromophore_name, reactive_probe_flag=False, reactive_probe_name=None, probe_origin=None, probe_link_type=None): self.chromophore_name = chromophore_name self.reactive_probe_flag = reactive_probe_flag self.reactive_probe_name = reactive_probe_name self.probe_origin = probe_origin self.probe_link_type = probe_link_type def __eq__(self, other): return self.__dict__ == other.__dict__ class SampleProbeDetails: """Connects a probe to a sample. :param sample: The sample. :type sample: :class:`Sample` :param probe: A probe that is attached to the sample. :type probe: :class:`Probe` :param str fluorophore_type: The type of the fluorophore (donor, acceptor, or unspecified). :param poly_probe_position: The position on the polymer where the dye is attached to. :type poly_probe_position: :class:`PolyProbePosition` :param str description: A description of the sample-probe-connection. """ def __init__(self, sample, probe, fluorophore_type='unspecified', poly_probe_position=None, description=None): self.sample = sample self.probe = probe self.fluorophore_type = fluorophore_type self.description = description self.poly_probe_position = poly_probe_position def __eq__(self, other): return self.__dict__ == other.__dict__ class PolyProbeConjugate: """Describes the conjugate of polymer residue and probe (including possible linker) :param sample_probe: The :class:`SampleProbeDetails` object to which the conjugate is related. :type sample_probe: :class:`SampleProbeDetails` :param chem_descriptor: The chemical descriptor of the conjugate of polymer residue and probe. :type chem_descriptor: :class:`ihm.ChemDescriptor` :param bool ambiguous_stoichiometry: Flag whether the labeling is ambiguous. :param float probe_stoichiometry: The stoichiometry of the ambiguous labeling. """ def __init__(self, sample_probe, chem_descriptor, ambiguous_stoichiometry=False, probe_stoichiometry=None): self.sample_probe = sample_probe self.chem_descriptor = chem_descriptor self.ambiguous_stoichiometry = ambiguous_stoichiometry self.probe_stoichiometry = probe_stoichiometry def __eq__(self, other): return self.__dict__ == other.__dict__ class PolyProbePosition: """Describes a position on the polymer used for attaching the probe. This class combines Poly_probe_position, Poly_probe_position_modified, and Poly_probe_position_mutated from the FLR dictionary. :param resatom: The residue or atom that the probe is attached to. :type resatom: :class:`ihm.Residue` or :class:`ihm.Atom` :param bool mutation_flag: Flag whether the residue was mutated (e.g. a Cys mutation). :param bool modification_flag: Flag whether the residue was modified (e.g. replacement of a residue with a labeled residue in case of nucleic acids). :param str auth_name: An author-given name for the position. :param mutated_chem_comp_id: The chemical component ID of the mutated residue. :type modified_chem_descriptor: :class:`ihm.ChemComp` :param modified_chem_descriptor: The chemical descriptor of the modified residue. :type modified_chem_descriptor: :class:`ihm.ChemDescriptor` """ def __init__(self, resatom, mutation_flag=False, modification_flag=False, auth_name=None, mutated_chem_comp_id=None, modified_chem_descriptor=None): self.resatom = resatom self.mutation_flag = mutation_flag self.modification_flag = modification_flag self.auth_name = auth_name if self.mutation_flag: self.mutated_chem_comp_id = mutated_chem_comp_id if self.modification_flag: self.modified_chem_descriptor = modified_chem_descriptor def __eq__(self, other): return self.__dict__ == other.__dict__ class Sample: """Sample corresponds to a measurement. :param entity_assembly: The assembly of the entities that was measured. :type entity_assembly: :class:`EntityAssembly` :param int num_of_probes: The number of probes in the sample. :param condition: The sample conditions for the Sample. :type condition: :class:`SampleCondition` :param str description: A description of the sample. :param str details: Details about the sample. :param solvent_phase: The solvent phase of the sample (liquid, vitrified, or other). """ def __init__(self, entity_assembly, num_of_probes, condition, description=None, details=None, solvent_phase=None): self.entity_assembly = entity_assembly self.num_of_probes = num_of_probes self.condition = condition self.description = description self.details = details self.solvent_phase = solvent_phase def __eq__(self, other): return self.__dict__ == other.__dict__ class EntityAssembly: """The assembly of the entities that are in the system. :param entity: The entity to add. :type entity: :class:`ihm.Entity` :param num_copies: The number of copies for the entity in the assembly. """ def __init__(self, entity=None, num_copies=0): self.entity_list = [] self.num_copies_list = [] if entity is not None and num_copies != 0: self.add_entity(entity, num_copies) def add_entity(self, entity, num_copies): if num_copies < 0: raise ValueError("Number of copies for Entity must be " "larger than zero.") self.entity_list.append(entity) self.num_copies_list.append(num_copies) def __eq__(self, other): return self.__dict__ == other.__dict__ class SampleCondition: """Description of the sample conditions. *Currently this is only text, but will be extended in the future.* :param str details: Description of the sample conditions. """ def __init__(self, details=None): self.details = details def __eq__(self, other): return self.__dict__ == other.__dict__ class Experiment: """The Experiment collects combinations of instrument, experimental settings and sample. :param instrument: The instrument. :type instrument: :class:`Instrument` :param inst_setting: The instrument setting. :type inst_setting: :class:`InstSetting` :param exp_condition: The experimental conditions. :type exp_condition: :class:`ExpCondition` :param sample: The sample. :type sample: :class:`Sample` :param details: Details on the experiment. """ def __init__(self, instrument=None, inst_setting=None, exp_condition=None, sample=None, details=None): """The Experiment object can either be initiated with empty lists, or with an entry for each of them. In this way, an experiment object is created and filled with one entry. """ self.instrument_list = [] self.inst_setting_list = [] self.exp_condition_list = [] self.sample_list = [] self.details_list = [] if (instrument is not None and inst_setting is not None and exp_condition is not None and sample is not None): self.add_entry(instrument=instrument, inst_setting=inst_setting, exp_condition=exp_condition, sample=sample, details=details) def add_entry(self, instrument, inst_setting, exp_condition, sample, details=None): """Entries to the experiment object can also be added one by one. """ self.instrument_list.append(instrument) self.inst_setting_list.append(inst_setting) self.exp_condition_list.append(exp_condition) self.sample_list.append(sample) self.details_list.append(details) def get_entry_by_index(self, index): """Returns the combination of :class:`Instrument`, :class:`InstSetting`, :class:`ExpCondition`, :class:`Sample`, and details for a given index. """ return (self.instrument_list[index], self.inst_setting_list[index], self.exp_condition_list[index], self.sample_list[index], self.details_list[index]) def __eq__(self, other): return ((self.instrument_list == other.instrument_list) and (self.inst_setting_list == other.inst_setting_list) and (self.exp_condition_list == other.exp_condition_list) and (self.sample_list == other.sample_list) and (self.details_list == other.details_list)) def contains(self, instrument, inst_setting, exp_condition, sample): """Checks whether a combination of :class:`Instrument`, :class:`InstSetting`, :class:`ExpCondition`, :class:`Sample` is already included in the experiment object. """ # TODO: possibly extend this by the details_list? for i in range(len(self.instrument_list)): if ((instrument == self.instrument_list[i]) and (inst_setting == self.inst_setting_list[i]) and (exp_condition == self.exp_condition_list[i]) and (sample == self.sample_list[i])): return True return False class Instrument: """Description of the Instrument used for the measurements. *Currently this is only text, but will be extended in the future.* :param details: Description of the instrument used for the measurements. """ def __init__(self, details=None): self.details = details def __eq__(self, other): return self.__dict__ == other.__dict__ class InstSetting: """Description of the instrument settings. *Currently this is only text, but will be extended in the future.* :param str details: Description of the instrument settings used for the measurement (e.g. laser power or size of observation volume in case of confocal measurements). """ def __init__(self, details=None): self.details = details def __eq__(self, other): return self.__dict__ == other.__dict__ class ExpCondition: """Description of the experimental conditions. * Currently this is only text, but will be extended in the future.* :param str details: Description of the experimental conditions (e.g. the temperature at which the experiment was carried out). """ def __init__(self, details=None): self.details = details def __eq__(self, other): return self.__dict__ == other.__dict__ class FRETAnalysis: """An analysis of FRET data that was performed. :param experiment: The Experiment object for this FRET analysis. :type experiment: :class:`Experiment` :param sample_probe_1: The combination of sample and probe for the first probe. :type sample_probe_1: :class:`SampleProbeDetails` :param sample_probe_2: The combination of sample and probe for the second probe. :type sample_probe_2: :class:`SampleProbeDetails` :param forster_radius: The Förster radius object for this FRET analysis. :type forster_radius: :class:`FRETForsterRadius`. :param str type: The type of the FRET analysis (intensity-based or lifetime-based). :param calibration_parameters: The calibration parameters used for this analysis (only in case of intensity-based analyses). :type calibration_parameters: :class:`FRETCalibrationParameters` :param lifetime_fit_model: The fit model used in case of lifetime-based analyses. :type lifetime_fit_model: :class:`LifetimeFitModel` :param ref_measurement_group: The group of reference measurements in case of lifetime-based analyses. :type ref_measurement_group: :class:`RefMeasurementGroup` :param str method_name: The method used for the analysis. :param float chi_square_reduced: The chi-square reduced as a quality measure for the fit. :param float donor_only_fraction: The donor-only fraction. :param dataset: The dataset used. :type dataset: :class:`ihm.dataset.Dataset` :param file: The external file that contains (results of) the analysis. :type file: :class:`ihm.location.OutputFileLocation` :param software: The software used for the analysis. :type software: :class:`ihm.Software` """ def __init__(self, experiment, sample_probe_1, sample_probe_2, forster_radius, type, calibration_parameters=None, lifetime_fit_model=None, ref_measurement_group=None, method_name=None, details=None, chi_square_reduced=None, donor_only_fraction=None, dataset=None, file=None, software=None): if type not in ['lifetime-based', 'intensity-based', None]: raise ValueError( 'FRETAnalysis.type can be \'lifetime-based\' or ' '\'intensity-based\'. The value is %s' % type) self.experiment = experiment self.sample_probe_1 = sample_probe_1 self.sample_probe_2 = sample_probe_2 self.forster_radius = forster_radius self.type = type self.calibration_parameters = calibration_parameters self.lifetime_fit_model = lifetime_fit_model self.ref_measurement_group = ref_measurement_group self.method_name = method_name self.details = details self.chi_square_reduced = chi_square_reduced self.donor_only_fraction = donor_only_fraction self.dataset = dataset self.external_file = file self.software = software def __eq__(self, other): return self.__dict__ == other.__dict__ class LifetimeFitModel: """A lifetime-fit model used for lifetime-based analysis. :param str name: The name of the fit model. :param str description: A description of the fit model. :param file: An external file that contains additional information on the fit model. :type file: :class:`ihm.location.OutputFileLocation` :param citation: A citation for the fit model. :type citation: :class:`ihm.Citation` """ def __init__(self, name, description, file=None, citation=None): self.name = name self.description = description self.external_file = file self.citation = citation def __eq__(self, other): return self.__dict__ == other.__dict__ class RefMeasurementGroup: """A Group containing reference measurements for lifetime-based analysis. :param str details: Details on the Group of reference measurements. """ def __init__(self, details=None): self.details = details self.ref_measurement_list = [] self.num_measurements = len(self.ref_measurement_list) def add_ref_measurement(self, ref_measurement): """Add a lifetime reference measurement to a ref_measurement_group.""" self.ref_measurement_list.append(ref_measurement) self.num_measurements = len(self.ref_measurement_list) def get_info(self): return self.ref_measurement_list def __eq__(self, other): return self.__dict__ == other.__dict__ class RefMeasurement: """A reference measurement for lifetime-based analysis. :param ref_sample_probe: The combination of sample and probe used for the reference measurement. :type ref_sample_probe: :class:`SampleProbeDetails` :param str details: Details on the measurement. :param list_of_lifetimes: A list of the results from the reference measurement. :type list_of_lifetimes: List of :class:`RefMeasurementLifetime` """ def __init__(self, ref_sample_probe, details=None, list_of_lifetimes=None): self.ref_sample_probe = ref_sample_probe self.details = details self.list_of_lifetimes = \ list_of_lifetimes if list_of_lifetimes is not None else [] self.num_species = len(self.list_of_lifetimes) def add_lifetime(self, lifetime): """Add a lifetime to the list_of_lifetimes.""" self.list_of_lifetimes.append(lifetime) self.num_species = len(self.list_of_lifetimes) def __eq__(self, other): return self.__dict__ == other.__dict__ class RefMeasurementLifetime: """Lifetime for a species in a reference measurement. :param float species_fraction: The species-fraction for the respective lifetime. :param float lifetime: The lifetime (in ns). :param str species_name: A name for the species. """ def __init__(self, species_fraction, lifetime, species_name=None): self.species_fraction = species_fraction self.lifetime = lifetime self.species_name = species_name def __eq__(self, other): return self.__dict__ == other.__dict__ class FRETDistanceRestraintGroup: """A collection of FRET distance restraints that are used together. """ def __init__(self): self.distance_restraint_list = [] def add_distance_restraint(self, distance_restraint): """Add a distance restraint to a distance_restraint_group""" self.distance_restraint_list.append(distance_restraint) def get_info(self): return self.distance_restraint_list def __eq__(self, other): return self.__dict__ == other.__dict__ class FRETDistanceRestraint: """A distance restraint from FRET. :param sample_probe_1: The combination of sample and probe for the first probe. :type sample_probe_1: :class:`SampleProbeDetails` :param sample_probe_2: The combination of sample and probe for the second probe. :type sample_probe_2: :class:`SampleProbeDetails` :param analysis: The FRET analysis from which the distance restraint originated. :type analysis: :class:`FRETAnalysis` :param float distance: The distance of the restraint. :param float distance_error_plus: The (absolute, e.g. in Angstrom) error in the upper direction, such that ``upper boundary = distance + distance_error_plus``. :param float distance_error_minus: The (absolute, e.g. in Angstrom) error in the lower direction, such that ``lower boundary = distance + distance_error_minus``. :param str distance_type: The type of distance (, _E, or R_mp). :param state: The state the distance restraints is connected to. Important for multi-state models. :type state: :class:`ihm.model.State` :param float population_fraction: The population fraction of the state in case of multi-state models. :param peak_assignment: The method how a peak was assigned. :type peak_assignment: :class:`PeakAssignment` """ def __init__(self, sample_probe_1, sample_probe_2, analysis, distance, distance_error_plus=0., distance_error_minus=0., distance_type=None, state=None, population_fraction=0., peak_assignment=None): self.sample_probe_1 = sample_probe_1 self.sample_probe_2 = sample_probe_2 self.state = state self.analysis = analysis self.distance = distance self.distance_error_plus = distance_error_plus self.distance_error_minus = distance_error_minus self.distance_type = distance_type self.population_fraction = population_fraction self.peak_assignment = peak_assignment def __eq__(self, other): return self.__dict__ == other.__dict__ class FRETForsterRadius: """The FRET Förster radius between two probes. :param donor_probe: The donor probe. :type donor_probe: :class:`Probe` :param acceptor_probe: The acceptor probe. :type acceptor_probe: :class:`Probe` :param float forster_radius: The Förster radius between the two probes. :param float reduced_forster_radius: The reduced Förster radius between the two probes. """ def __init__(self, donor_probe, acceptor_probe, forster_radius, reduced_forster_radius=None): self.donor_probe = donor_probe self.acceptor_probe = acceptor_probe self.forster_radius = forster_radius self.reduced_forster_radius = reduced_forster_radius def __eq__(self, other): return self.__dict__ == other.__dict__ class FRETCalibrationParameters: """The calibration parameter from the FRET measurements. For the definitions of the parameters see Hellenkamp et al. Nat. Methods 2018. :param float phi_acceptor: The quantum yield of the acceptor. :param float alpha: The alpha parameter. :param float alpha_sd: The standard deviation of the alpha parameter. :param float gg_gr_ratio: The ratio of the green and red detection efficiencies. :param float beta: The beta parameter. :param float gamma: The gamma parameter. :param float delta: The delta parameter. :param float a_b: The fraction of bright molecules. """ def __init__(self, phi_acceptor=None, alpha=None, alpha_sd=None, gg_gr_ratio=None, beta=None, gamma=None, delta=None, a_b=None): self.phi_acceptor = phi_acceptor self.alpha = alpha self.alpha_sd = alpha_sd self.gg_gr_ratio = gg_gr_ratio self.beta = beta self.gamma = gamma self.delta = delta self.a_b = a_b def __eq__(self, other): return self.__dict__ == other.__dict__ class PeakAssignment: """The method of peak assignment in case of multiple peaks, e.g. by population. :param str method_name: The method used for peak assignment. :param str details: The details of the peak assignment procedure. """ def __init__(self, method_name, details=None): self.method_name = method_name self.details = details def __eq__(self, other): return self.__dict__ == other.__dict__ class FRETModelQuality: """The quality measure for a Model based on FRET data. :param model: The model being described. :type model: :class:`ihm.model.Model` :param chi_square_reduced: The quality of the model in terms of chi_square_reduced based on the Distance restraints used for the modeling. :param dataset_group: The group of datasets that was used for the quality estimation. :type dataset_group: :class:`ihm.dataset.DatasetGroup` :param method: The method used for judging the model quality. :param str details: Details on the model quality. """ def __init__(self, model, chi_square_reduced, dataset_group, method, details=None): self.model = model self.chi_square_reduced = chi_square_reduced self.dataset_group = dataset_group self.method = method self.details = details def __eq__(self, other): return self.__dict__ == other.__dict__ class FRETModelDistance: """The distance in a model for a certain distance restraint. :param restraint: The Distance restraint. :type restraint: :class:`FRETDistanceRestraint` :param model: The model the distance applies to. :type model: :class:`ihm.model.Model` :param distance: The distance obtained for the distance restraint in the current model. :param distance_deviation: The deviation of the distance in the model compared to the value of the distance restraint. """ def __init__(self, restraint, model, distance, distance_deviation=None): self.restraint = restraint self.model = model self.distance = distance self.distance_deviation = distance_deviation if self.distance_deviation is None and self.restraint is not None: self.calculate_deviation() def calculate_deviation(self): if self.distance_deviation is None and self.restraint is not None: self.distance_deviation = \ float(self.restraint.distance) - float(self.distance) def update_deviation(self): if self.restraint is not None: self.distance_deviation = \ float(self.restraint.distance) - float(self.distance) def __eq__(self, other): return self.__dict__ == other.__dict__ class FPSModeling: """Collect the modeling parameters for different steps of FPS, e.g. Docking, Refinement, or Error estimation. :param protocol: The modeling protocol to which the FPS modeling step belongs. :type protocol: :class:`ihm.protocol.Protocol` :param restraint_group: The restraint group used for the modeling. :type restraint_group: :class:`FRETDistanceRestraintGroup` :param global_parameter: The global FPS parameters used. :type global_parameter: :class:`FPSGlobalParameters` :param str probe_modeling_method: either "AV" or "MPP". :param str details: Details on the FPS modeling. """ def __init__(self, protocol, restraint_group, global_parameter, probe_modeling_method, details=None): self.protocol = protocol self.restraint_group = restraint_group self.global_parameter = global_parameter self.probe_modeling_method = probe_modeling_method self.details = details def __eq__(self, other): return self.__dict__ == other.__dict__ class FPSGlobalParameters: """The global parameters in the FPS program. *For a description of the parameters, see also the FPS manual.* :param float forster_radius: The Förster radius used in the FPS program. :param int conversion_function_polynom_order: Order of the polynom for the conversion function between Rmp and E. :param int repetition: The number of repetitions. :param float av_grid_rel: The AV grid spacing relative to the smallest dye or linker dimension. :param float av_min_grid_a: The minimal AV grid spacing in Angstrom. :param float av_allowed_sphere: The allowed sphere radius. :param int av_search_nodes: Number of neighboring positions to be scanned for clashes. :param float av_e_samples_k: The number of samples for calculation of E (in thousand). :param float sim_viscosity_adjustment: Daming rate during docking and refinement. :param float sim_dt_adjustment: Time step during simulation. :param float sim_max_iter_k: Maximal number of iterations (in thousand). :param float sim_max_force: Maximal force. :param float sim_clash_tolerance_a: Clash tolerance in Angstrom. :param float sim_reciprocal_kt: reciprocal kT. :param str sim_clash_potential: The clash potential. :param float convergence_e: Convergence criterion E. :param float convergence_k: Convergence criterion K. :param float convergence_f: Convergence criterion F. :param float convergence_t: Convergence criterion T. :param str optimized_distances: Which distances are optimized? """ def __init__(self, forster_radius, conversion_function_polynom_order, repetition, av_grid_rel, av_min_grid_a, av_allowed_sphere, av_search_nodes, av_e_samples_k, sim_viscosity_adjustment, sim_dt_adjustment, sim_max_iter_k, sim_max_force, sim_clash_tolerance_a, sim_reciprocal_kt, sim_clash_potential, convergence_e, convergence_k, convergence_f, convergence_t, optimized_distances='All'): self.forster_radius = forster_radius self.conversion_function_polynom_order \ = conversion_function_polynom_order self.repetition = repetition self.av_grid_rel = av_grid_rel self.av_min_grid_a = av_min_grid_a self.av_allowed_sphere = av_allowed_sphere self.av_search_nodes = av_search_nodes self.av_e_samples_k = av_e_samples_k self.sim_viscosity_adjustment = sim_viscosity_adjustment self.sim_dt_adjustment = sim_dt_adjustment self.sim_max_iter_k = sim_max_iter_k self.sim_max_force = sim_max_force self.sim_clash_tolerance_a = sim_clash_tolerance_a self.sim_reciprocal_kt = sim_reciprocal_kt self.sim_clash_potential = sim_clash_potential self.convergence_e = convergence_e self.convergence_k = convergence_k self.convergence_f = convergence_f self.convergence_t = convergence_t self.optimized_distances = optimized_distances def __eq__(self, other): return self.__dict__ == other.__dict__ class FPSAVModeling: """FPS modeling using AV. This object connects the FPS_modeling step, the sample_probe and the respective AV parameters. :param fps_modeling: The FPS modeling ID. :type fps_modeling: :class:`FPSModeling` :param sample_probe: The Sample probe ID. :type sample_probe: :class:`SampleProbeDetails` :param parameter: The FPS AV parameters used. :type parameter: :class:`FPSAVParameter` """ def __init__(self, fps_modeling, sample_probe, parameter): # fps_modeling is the object containing information on the # ihm modeling protocol, the restraint group and the global # FPS parameters self.fps_modeling = fps_modeling self.sample_probe = sample_probe self.parameter = parameter def __eq__(self, other): return self.__dict__ == other.__dict__ class FPSAVParameter: """The AV parameters used for the modeling using FPS. :param int num_linker_atoms: The number of atoms in the linker. :param float linker_length: The length of the linker in Angstrom. :param float linker_width: The width of the linker in Angstrom. :param float probe_radius_1: The first radius of the probe. :param float probe_radius_2: If AV3 is used, the second radius of the probe. :param float probe_radius_3: If AV3 is used, the third radius of the probe. """ def __init__(self, num_linker_atoms, linker_length, linker_width, probe_radius_1, probe_radius_2=None, probe_radius_3=None): self.num_linker_atoms = num_linker_atoms self.linker_length = linker_length self.linker_width = linker_width self.probe_radius_1 = probe_radius_1 self.probe_radius_2 = probe_radius_2 self.probe_radius_3 = probe_radius_3 def __eq__(self, other): return self.__dict__ == other.__dict__ class FPSMPPModeling: """Maps the FPSModeling object to a mean probe position and connects it to the reference coordinate system. :param fps_modeling: The FPS modeling object. :type fps_modeling: :class:`FPSModeling` :param mpp: The ID of the mean probe position. :type mpp: :class:`FPSMeanProbePosition` :param mpp_atom_position_group: :type mpp_atom_position_group: :class:`FPSMPPAtomPositionGroup` """ def __init__(self, fps_modeling, mpp, mpp_atom_position_group): # fps_modeling is the object containing information on the # ihm modeling protocol, the restraint group and the global # FPS parameters self.fps_modeling = fps_modeling self.mpp = mpp self.mpp_atom_position_group = mpp_atom_position_group def __eq__(self, other): return self.__dict__ == other.__dict__ class FPSMeanProbePosition: """The mean probe position of an AV, which can be used instead of an AV. *It is usually not recommended to use this. Use AVs instead.* The coordinates are with respect to a reference coordinate system defined by :class:`FPSMPPAtomPositionGroup`. :param sample_probe: The Sample probe. :type sample_probe: :class:`SampleProbeDetails` :param float x: The x-coordinate of the mean probe position. :param float y: The y-coordinate of the mean probe position. :param float z: The z-coordinate of the mean probe position. """ def __init__(self, sample_probe, x, y, z): self.sample_probe = sample_probe self.x, self.y, self.z = x, y, z def __eq__(self, other): return self.__dict__ == other.__dict__ class FPSMPPAtomPositionGroup: """A group of atom positions used to define the coordinate system of a mean probe position. *Not part of the FLR dictionary.* """ def __init__(self): self.mpp_atom_position_list = [] def add_atom_position(self, atom_position): self.mpp_atom_position_list.append(atom_position) def __eq__(self, other): return self.__dict__ == other.__dict__ class FPSMPPAtomPosition: """An atom used to describe the coordinate system for a mean probe position :param atom: The atom being described. :type atom: :class:`ihm.Atom` :param float x: The x-coordinate of the atom. :param float y: The y-coordinate of the atom. :param float z: The z-coordinate of the atom. """ # atoms describing the coordinate system for a mean probe position def __init__(self, atom, x, y, z): self.atom, self.x, self.y, self.z = atom, x, y, z def __eq__(self, other): return self.__dict__ == other.__dict__ class KineticRateFretAnalysisConnection: """Connects a kinetic rate with a FRET analysis. :param fret_analysis: The FRETAnalysis object assigned to a kinetic rate :type analysis: :class:`FRETAnalysis` :param kinetic_rate: The kinetic rate. :type kinetic_rate: :class:`ihm.multi_state_scheme.KineticRate` :param str details: Details about the connection between the FRETAnalysis object and the KineticRate object """ def __init__(self, fret_analysis, kinetic_rate, details=None): self.fret_analysis = fret_analysis self.kinetic_rate = kinetic_rate self.details = details def __eq__(self, other): return self.__dict__ == other.__dict__ class RelaxationTimeFretAnalysisConnection: """Connects a relaxation time with a FRET analysis. :param fret_analysis: The FRETAnalysis object assigned to a kinetic rate :type analysis: :class:`FRETAnalysis` :param relaxation_time: The relaxation time. :type relaxation_time: :class:`ihm.multi_state_scheme.RelaxationTime` :param str details: Details about the connection between the FRETAnalysis object and the RelaxationTime object """ def __init__(self, fret_analysis, relaxation_time, details=None): self.fret_analysis = fret_analysis self.relaxation_time = relaxation_time self.details = details def __eq__(self, other): return self.__dict__ == other.__dict__ class FLRData: """A collection of the fluorescence data to be added to the system. Instances of this class are generally added to :attr:`~ihm.System.flr_data`. """ def __init__(self): #: All groups of FRET distance restraints. #: See :class:`FRETDistanceRestraintGroup`. self.distance_restraint_groups = [] #: All conjugates of polymer residue and probe. #: See :class:`PolyProbeConjugate`. self.poly_probe_conjugates = [] #: All quality measures for models based on FRET data. #: See :class:`FRETModelQuality`. self.fret_model_qualities = [] #: All distances in models for distance restraints. #: See :class:`FRETModelDistance`. self.fret_model_distances = [] #: All modeling objects. #: See :class:`FPSAVModeling` and :class:`FPSMPPModeling`. self.fps_modeling = [] #: All Connections between FRETAnalysis and KineticRate objects #: See :class: `KineticRateFRETAnalysisConnection` self.kinetic_rate_fret_analysis_connections = [] #: All Connections between FRETAnalysis and RelaxationTime objects #: See :class: `RelaxationTimeFRETAnalysisConnection` self.relaxation_time_fret_analysis_connections = [] # The following dictionaries are so far only used when reading data self._collection_flr_experiment = {} self._collection_flr_inst_setting = {} self._collection_flr_exp_condition = {} self._collection_flr_instrument = {} self._collection_flr_entity_assembly = {} self._collection_flr_sample_condition = {} self._collection_flr_sample = {} self._collection_flr_sample_probe_details = {} self._collection_flr_probe = {} self._collection_flr_poly_probe_position = {} self._collection_flr_poly_probe_position_modified = {} self._collection_flr_poly_probe_position_mutated = {} self._collection_flr_poly_probe_conjugate = {} self._collection_flr_fret_forster_radius = {} self._collection_flr_fret_calibration_parameters = {} self._collection_flr_fret_analysis = {} self._collection_flr_lifetime_fit_model = {} self._collection_flr_ref_measurement_group = {} self._collection_flr_ref_measurement = {} self._collection_flr_ref_measurement_lifetime = {} self._collection_flr_peak_assignment = {} self._collection_flr_fret_distance_restraint = {} self._collection_flr_fret_distance_restraint_group = {} self._collection_flr_fret_model_quality = {} self._collection_flr_fret_model_distance = {} self._collection_flr_fps_global_parameters = {} self._collection_flr_fps_modeling = {} self._collection_flr_fps_av_parameter = {} self._collection_flr_fps_av_modeling = {} self._collection_flr_fps_mean_probe_position = {} self._collection_flr_fps_mpp_atom_position = {} self._collection_flr_fps_mpp_modeling = {} self._collection_flr_kinetic_rate_fret_analysis_connection = {} self._collection_flr_relaxation_time_fret_analysis_connection = {} def _all_distance_restraints(self): """Yield all FRETDistanceRestraint objects""" for rg in self.distance_restraint_groups: for r in rg.distance_restraint_list: yield r def _all_analyses(self): """Yield all FRETAnalysis objects""" for r in self._all_distance_restraints(): yield r.analysis # Get the analyses from the kinetic rate and # relaxation time connections for c in self.kinetic_rate_fret_analysis_connections: yield c.fret_analysis for c in self.relaxation_time_fret_analysis_connections: yield c.fret_analysis def _all_peak_assignments(self): """Yield all PeakAssignment objects""" for r in self._all_distance_restraints(): yield r.peak_assignment def _all_experiments(self): """Yield all Experiment objects""" for a in self._all_analyses(): yield a.experiment def _all_forster_radii(self): """Yield all FRETForsterRadius objects""" for a in self._all_analyses(): yield a.forster_radius def _all_calibration_parameters(self): """Yield all FRETCalibrationParameters objects""" for a in self._all_analyses(): if a.type == 'intensity-based': yield a.calibration_parameters def _all_lifetime_fit_models(self): """Yield all LifetimeFitModel objects""" for a in self._all_analyses(): if a.type == 'lifetime-based': yield a.lifetime_fit_model def _all_ref_measurement_groups(self): """Yield all RefMeasurementGroup objects""" for a in self._all_analyses(): if a.type == 'lifetime-based': yield a.ref_measurement_group def _all_ref_measurements(self): """Yield all RefMeasurement objects""" for rg in self._all_ref_measurement_groups(): for x in rg.ref_measurement_list: yield x def _all_ref_measurement_lifetimes(self): """Yield all RefMeasurementLifetime objects""" for r in self._all_ref_measurements(): for x in r.list_of_lifetimes: yield x def _all_sample_probe_details(self): """Yield all SampleProbeDetails objects""" for r in self._all_distance_restraints(): yield r.sample_probe_1 yield r.sample_probe_2 for r in self._all_ref_measurements(): yield r.ref_sample_probe def _all_samples(self): """Yield all Sample objects""" for s in self._all_sample_probe_details(): yield s.sample def _all_probes(self): """Yield all Probe objects""" for s in self._all_sample_probe_details(): yield s.probe def _all_poly_probe_positions(self): """Yield all PolyProbePosition objects""" for s in self._all_sample_probe_details(): yield s.poly_probe_position def _all_inst_settings(self): """Yield all InstSetting objects""" for e in self._all_experiments(): for s in e.inst_setting_list: yield s def _all_exp_conditions(self): """Yield all ExpCondition objects""" for e in self._all_experiments(): for s in e.exp_condition_list: yield s def _all_instruments(self): """Yield all Instrument objects""" for e in self._all_experiments(): for s in e.instrument_list: yield s def _all_fps_modeling(self): """Yield all FPSModeling objects""" for m in self.fps_modeling: yield m.fps_modeling def _all_fps_global_parameters(self): """Yield all FPSGlobalParameters objects""" for m in self._all_fps_modeling(): yield m.global_parameter def _all_fps_av_modeling(self): """Yield all FPSAVModeling objects""" for m in self.fps_modeling: if isinstance(m, FPSAVModeling): yield m def _all_fps_av_parameter(self): """Yield all FPSAVParameter objects""" for m in self._all_fps_av_modeling(): yield m.parameter def _all_fps_mpp_modeling(self): """Yield all FPSMPPModeling objects""" for m in self.fps_modeling: if isinstance(m, FPSMPPModeling): yield m def _all_fps_mean_probe_position(self): """Yield all FPSMeanProbePosition objects""" for m in self._all_fps_mpp_modeling(): yield m.mpp def _all_fps_atom_position_group(self): """Yield all FPSMPPAtomPositionGroup objects""" for m in self._all_fps_mpp_modeling(): yield m.mpp_atom_position_group def _all_flr_chemical_descriptors(self): """Collect the chemical descriptors from the flr part. *This might contain duplicates.* """ # collect from all distance_restraint_groups for drgroup in self.distance_restraint_groups: # collect from all distance restraints for dr in drgroup.distance_restraint_list: # collect from both sample_probe_1 and sample_probe_2 for this_sample_probe in (dr.sample_probe_1, dr.sample_probe_2): # collect from the probe probe = this_sample_probe.probe # reactive probe yield probe.probe_descriptor.reactive_probe_chem_descriptor # chromophore yield probe.probe_descriptor.chromophore_chem_descriptor # collect from the poly_probe_position pos = this_sample_probe.poly_probe_position # modified chem descriptor if pos.modification_flag: yield pos.modified_chem_descriptor # collect from all analyses if they are lifetime-based a = dr.analysis if a.type == 'lifetime-based': # RefMeasurementGroup rg = a.ref_measurement_group # collect from all RefMeasurements for rm in rg.ref_measurement_list: # collect from the ref_sample_probe this_ref_sample_probe = rm.ref_sample_probe probe = this_ref_sample_probe.probe pd = probe.probe_descriptor # reactive probe yield pd.reactive_probe_chem_descriptor # chromophore yield pd.chromophore_chem_descriptor # collect from the poly_probe_position pos = this_ref_sample_probe.poly_probe_position # modified chem descriptor if pos.modification_flag: yield pos.modified_chem_descriptor # and collect from all poly_probe_conjugates for c in self.poly_probe_conjugates: yield c.chem_descriptor python-ihm-2.7/ihm/format.py000066400000000000000000001442541503573337200161170ustar00rootroot00000000000000"""Utility classes to handle CIF format. This module provides classes to read in and write out mmCIF files. It is only concerned with handling syntactically correct CIF - it does not know the set of tables or the mapping to ihm objects. For that, see :mod:`ihm.dumper` for writing and :mod:`ihm.reader` for reading. See also the `stream parser example `_ and the `token reader example `_. """ # noqa: E501 import textwrap import operator import ihm from io import StringIO import inspect import re try: from . import _format except ImportError: _format = None def _write_multiline(val, fh): fh.write("\n;") fh.write(val) if not val.endswith('\n'): fh.write("\n") fh.write(";\n") class _LineWriter: def __init__(self, writer, line_len=80): self.writer = writer self.line_len = line_len self.column = 0 def write(self, val): if isinstance(val, str) and '\n' in val: _write_multiline(val, self.writer.fh) self.column = 0 return val = '.' if val is None else self.writer._repr(val) if self.column > 0: if self.line_len and self.column + len(val) + 1 > self.line_len: self.writer.fh.write("\n") self.column = 0 else: self.writer.fh.write(" ") self.column += 1 self.writer.fh.write(val) self.column += len(val) class _CifCategoryWriter: def __init__(self, writer, category): self.writer = writer self.category = category def write(self, **kwargs): self.writer._write(self.category, kwargs) def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): pass class _CifLoopWriter: def __init__(self, writer, category, keys, line_wrap=True): self._line_wrap = line_wrap self.writer = writer self.category = category self.keys = keys # Remove characters that we can't use in Python identifiers self.python_keys = [k.replace('[', '').replace(']', '') for k in keys] self._empty_loop = True def write(self, **kwargs): if self._empty_loop: f = self.writer.fh f.write("#\nloop_\n") for k in self.keys: f.write("%s.%s\n" % (self.category, k)) self._empty_loop = False lw = _LineWriter(self.writer, line_len=80 if self._line_wrap else 0) for k in self.python_keys: lw.write(kwargs.get(k, None)) self.writer.fh.write("\n") def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): if not self._empty_loop: self.writer.fh.write("#\n") class _Writer: """Base class for all writers""" omitted = '.' unknown = '?' _boolmap = {False: 'NO', True: 'YES'} def __init__(self, fh): self.fh = fh class CifWriter(_Writer): """Write information to a CIF file. The constructor takes a single argument - a Python filelike object to write to - and provides methods to write Python objects to that file. Most simple Python types are supported (string, float, bool, int). The Python bool type is mapped to CIF strings 'NO' and 'YES'. Floats are always represented with 3 decimal places (or in scientific notation with 3 digits of precision if smaller than 1e-3); if a different amount of precision is desired, convert the float to a string first.""" _line_wrap = True @classmethod def _set_line_wrap(cls, line_wrap): cls._line_wrap = line_wrap def flush(self): # noop - data is written as it is encountered pass def start_block(self, name): """Start a new data block in the file with the given name.""" self.fh.write('data_%s\n' % name) def end_block(self): # noop - mmCIF has no end-of-block indicator pass def category(self, category): """Return a context manager to write a CIF category. A CIF category is a simple list of key:value pairs. :param str category: the name of the category (e.g. "_struct_conf_type"). :return: an object with a single method `write` which takes keyword arguments. For example:: with writer.category("_struct_conf_type") as l: l.write(id='HELX_P', criteria=writer.unknown) """ return _CifCategoryWriter(self, category) def loop(self, category, keys): """Return a context manager to write a CIF loop. :param str category: the name of the category (e.g. "_struct_conf") :param list keys: the field keys in that category :return: an object with a single method `write` which takes keyword arguments; this can be called any number of times to add entries to the loop. Any field keys in `keys` that are not provided as arguments to `write`, or values that are the Python value `None`, will get the CIF omitted value ('.'), while arguments to `write` that are not present in `keys` will be ignored. For example:: with writer.loop("_struct_conf", ["id", "conf_type_id"]) as l: for i in range(5): l.write(id='HELX_P1%d' % i, conf_type_id='HELX_P') """ return _CifLoopWriter(self, category, keys, line_wrap=self._line_wrap) def write_comment(self, comment): """Write a simple comment to the CIF file. The comment will be wrapped if necessary for readability. See :meth:`_set_line_wrap`.""" if self._line_wrap: for line in textwrap.wrap(comment, 78): self.fh.write('# ' + line + '\n') else: self.fh.write('# ' + comment + '\n') def _write(self, category, kwargs): for key, val in sorted(kwargs.items(), key=operator.itemgetter(0)): if isinstance(val, str) and '\n' in val: self.fh.write("%s.%s" % (category, key)) _write_multiline(val, self.fh) else: self.fh.write("%s.%s %s\n" % (category, key, self.omitted if val is None else self._repr(val))) def _repr(self, obj): if isinstance(obj, str) and '"' not in obj \ and "'" not in obj and " " not in obj \ and len(obj) > 0 \ and not obj.startswith('_') \ and not obj.startswith('global_') \ and not obj.startswith('[') \ and obj[:5] not in ('data_', 'save_', 'loop_', 'stop_', '?', '.'): return obj elif isinstance(obj, float): if abs(obj) < 1e-3: return "%.3g" % obj else: return "%.3f" % obj elif isinstance(obj, bool): return self._boolmap[obj] elif isinstance(obj, str): return repr(obj) else: return str(obj) # Acceptable 'whitespace' characters in CIF _WHITESPACE = set(" \t") class CifParserError(Exception): """Exception raised for invalid format mmCIF files""" pass class _Token: """A token in an mmCIF file""" pass class _ValueToken(_Token): """The value of a variable in mmCIF""" pass class _OmittedValueToken(_ValueToken): """A value that is deliberately omitted (the '.' string in mmCIF)""" def as_mmcif(self): return "." class _UnknownValueToken(_ValueToken): """A value that is unknown (the '?' string in mmCIF)""" def as_mmcif(self): return "?" class _TextValueToken(_ValueToken): """The value of a variable in mmCIF as a piece of text""" __slots__ = ['txt', 'quote'] def __init__(self, txt, quote): self.txt = txt self.quote = quote def as_mmcif(self): if '\n' in self.txt or self.quote == ';': suffix = ";\n" if self.txt.endswith('\n') else "\n;\n" return ";" + self.txt + suffix elif self.quote == "'": return "'" + self.txt + "'" elif self.quote == '"' or ' ' in self.txt: return '"' + self.txt + '"' else: return self.txt class _VariableToken(_Token): """A variable name, e.g. _entry.id, in mmCIF""" __slots__ = ['category', 'keyword'] def __init__(self, val, linenum): # mmCIF categories and keywords are case insensitive, so make # everything lowercase self.category, _, self.keyword = val.lower().partition('.') if not self.category or not self.keyword: raise CifParserError("Malformed mmCIF variable name " "(%s) on line %d" % (val, linenum)) class _PreservingVariableToken(_VariableToken): """A variable name that preserves the original case of the keyword""" __slots__ = ['category', 'keyword', 'orig_keyword'] def __init__(self, val, linenum): super().__init__(val, linenum) _, _, self.orig_keyword = val.partition('.') def as_mmcif(self): if self.orig_keyword and self.orig_keyword.lower() == self.keyword: return self.category + '.' + self.orig_keyword else: return self.category + '.' + self.keyword class _CommentToken(_Token): """A comment in mmCIF without the leading '#'""" __slots__ = ['txt'] def __init__(self, txt): self.txt = txt def as_mmcif(self): return "#" + self.txt class _WhitespaceToken(_Token): """Space between other mmCIF tokens""" __slots__ = ['txt'] def __init__(self, txt): self.txt = txt def as_mmcif(self): return self.txt class _EndOfLineToken(_Token): """End of a line in an mmCIF file""" def as_mmcif(self): return "\n" class _NullToken(_Token): """Null token""" def as_mmcif(self): return "" # Return dummy values for filters that expect a variable or value token keyword = property(lambda self: None) class _DataToken(_Token): """A data_* keyword in mmCIF, denoting a new data block""" __slots__ = ['txt'] def __init__(self, txt): self.txt = txt def as_mmcif(self): return 'data_' + self.txt class _LoopToken(_Token): """A loop_ keyword in mmCIF, denoting the start of a loop construct""" def as_mmcif(self): return "loop_" class _SaveToken(_Token): """A save_* keyword in mmCIF, denoting the start or end of a save frame""" pass class _Reader: """Base class for reading a file and extracting some or all of its data.""" def _add_category_keys(self): """Populate _keys for each category by inspecting its __call__ method""" def python_to_cif(field): # Map valid Python identifiers to mmCIF keywords if field.startswith('tr_vector') or field.startswith('rot_matrix'): return re.sub(r'(\d)', r'[\1]', field) else: return field def fill_keys(h, s, attr, typ): if not hasattr(h, attr): setattr(h, attr, frozenset( python_to_cif(k) for k, v in s.annotations.items() if v is typ)) def check_extra(h, attr): extra = frozenset(getattr(h, attr)) - frozenset(h._keys) if extra: raise ValueError("For %s, %s not in _keys: %s" % (h, attr, ", ".join(extra))) for h in self.category_handler.values(): s = inspect.getfullargspec(h.__call__) if not hasattr(h, '_keys'): h._keys = [python_to_cif(x) for x in s.args[1:]] fill_keys(h, s, '_int_keys', int) fill_keys(h, s, '_float_keys', float) fill_keys(h, s, '_bool_keys', bool) bad_keys = frozenset(k for k, v in s.annotations.items() if v not in (int, float, str, bool)) if bad_keys: raise ValueError("For %s, bad annotations: %s" % (h, ", ".join(bad_keys))) check_extra(h, '_int_keys') check_extra(h, '_float_keys') check_extra(h, '_bool_keys') class _CifTokenizer: def __init__(self, fh): self.fh = fh self._tokens = [] self._token_index = 0 self._linenum = 0 # Read a line from the file. Treat it as ASCII (not Unicode) # but be tolerant of 8-bit characters by assuming latin-1 encoding def _read_line(self): line = self.fh.readline() if isinstance(line, bytes): return line.decode('latin-1') else: return line def _read_multiline_token(self, first_line, ignore_multiline): """Read a semicolon-delimited (multiline) token""" lines = [first_line[1:]] # Skip initial semicolon start_linenum = self._linenum while True: self._linenum += 1 nextline = self._read_line() if nextline == '': raise CifParserError( "End of file while reading multiline " "string which started on line %d" % start_linenum) elif nextline.startswith(';'): # Strip last newline lines[-1] = lines[-1].rstrip('\r\n') self._tokens = [_TextValueToken("".join(lines), ';')] return elif not ignore_multiline: lines.append(nextline) def _handle_quoted_token(self, line, strlen, start_pos, quote_type): """Given the start of a quoted string, find the end and add a token for it""" quote = line[start_pos] # Get the next quote that is followed by whitespace (or line end). # In mmCIF a quote within a string is not considered an end quote as # long as it is not followed by whitespace. end = start_pos while True: end = line.find(quote, end + 1) if end == -1: raise CifParserError("%s-quoted string not terminated " "at line %d" % (quote_type, self._linenum)) elif end == strlen - 1 or line[end + 1] in _WHITESPACE: # A quoted string is always a literal string, even if it is # "?" or ".", not an unknown/omitted value self._tokens.append(_TextValueToken(line[start_pos + 1:end], quote)) return end + 1 # Step past the closing quote def _skip_initial_whitespace(self, line, strlen, start_pos): while start_pos < strlen and line[start_pos] in _WHITESPACE: start_pos += 1 return start_pos def _extract_line_token(self, line, strlen, start_pos): """Extract the next token from the given line starting at start_pos, populating self._tokens. The new start_pos is returned.""" start_pos = self._skip_initial_whitespace(line, strlen, start_pos) if start_pos >= strlen: return strlen if line[start_pos] == '"': return self._handle_quoted_token(line, strlen, start_pos, "Double") elif line[start_pos] == "'": return self._handle_quoted_token(line, strlen, start_pos, "Single") elif line[start_pos] == "#": # Comment - discard the rest of the line self._handle_comment(line, start_pos) return strlen else: # Find end of token (whitespace or end of line) end_pos = start_pos while end_pos < strlen and line[end_pos] not in _WHITESPACE: end_pos += 1 val = line[start_pos:end_pos] if val == 'loop_': tok = _LoopToken() elif val.startswith('data_'): tok = _DataToken(val[5:]) elif val.startswith('save_'): tok = _SaveToken() elif val.startswith('_'): tok = self._handle_variable_token(val, self._linenum) elif val == '.': tok = _OmittedValueToken() elif val == '?': tok = _UnknownValueToken() else: # Note that we do no special processing for other reserved # words (global_, save_, stop_). But the probability of # them occurring where we expect a value is pretty small. tok = _TextValueToken(val, None) # don't alter case of values self._tokens.append(tok) return end_pos def _handle_variable_token(self, val, linenum): return _VariableToken(val, linenum) def _handle_comment(self, line, start_pos): """Potentially handle a comment that spans line[start_pos:].""" pass def _tokenize(self, line): """Break up a line into tokens, populating self._tokens""" self._tokens = [] if line.startswith('#'): self._handle_comment(line, 0) return # Skip comment lines start_pos = 0 strlen = len(line) while start_pos < strlen: start_pos = self._extract_line_token(line, strlen, start_pos) def _unget_token(self): """Push back the last token returned by _get_token() so it can be read again""" self._token_index -= 1 def _get_token(self, ignore_multiline=False): """Get the next :class:`_Token` from an mmCIF file, or None on end of file. If ignore_multiline is TRUE, the string contents of any multiline value tokens (those that are semicolon-delimited) are not stored in memory. """ while len(self._tokens) <= self._token_index: # No tokens left - read the next non-blank line in self._linenum += 1 line = self._read_line() if line == '': # End of file return if line.startswith(';'): self._read_multiline_token(line, ignore_multiline) else: self._tokenize(line.rstrip('\r\n')) self._token_index = 0 self._token_index += 1 return self._tokens[self._token_index - 1] class _PreservingCifTokenizer(_CifTokenizer): """A tokenizer subclass which preserves comments, case and whitespace""" def _tokenize(self, line): _CifTokenizer._tokenize(self, line) self._tokens.append(_EndOfLineToken()) def _handle_comment(self, line, start_pos): self._tokens.append(_CommentToken(line[start_pos + 1:])) def _handle_variable_token(self, val, linenum): return _PreservingVariableToken(val, linenum) def _skip_initial_whitespace(self, line, strlen, start_pos): end_pos = start_pos while end_pos < strlen and line[end_pos] in _WHITESPACE: end_pos += 1 if end_pos > start_pos: self._tokens.append(_WhitespaceToken(line[start_pos:end_pos])) return end_pos class _CategoryTokenGroup: """A group of tokens which set a single data item""" def __init__(self, vartoken, valtoken): self.vartoken, self.valtoken = vartoken, valtoken def __str__(self): return ("<_CategoryTokenGroup(%s, %s)>" % (self.vartoken.as_mmcif(), self.valtoken.token.as_mmcif())) def as_mmcif(self): return self.vartoken.as_mmcif() + self.valtoken.as_mmcif() + "\n" def __set_value(self, val): self.valtoken.value = val category = property(lambda self: self.vartoken.category) keyword = property(lambda self: self.vartoken.keyword) value = property(lambda self: self.valtoken.value, __set_value) class _LoopHeaderTokenGroup: """A group of tokens that form the start of a loop_ construct""" def __init__(self, looptoken, category, keywords, end_spacers): self._loop, self.category = looptoken, category self.keywords = keywords self.end_spacers = end_spacers def keyword_index(self, keyword): """Get the zero-based index of the given keyword, or ValueError""" return [k.token.keyword for k in self.keywords].index(keyword) def __str__(self): return ("<_LoopHeaderTokenGroup(%s, %s)>" % (self.category, str([k.token.keyword for k in self.keywords]))) def as_mmcif(self): all_tokens = [self._loop] + self.keywords + self.end_spacers return "".join(x.as_mmcif() for x in all_tokens) class _LoopRowTokenGroup: """A group of tokens that represent one row in a loop_ construct""" def __init__(self, items): self.items = items def as_mmcif(self): return "".join(x.as_mmcif() for x in self.items) class _SpacedToken: """A token with zero or more leading whitespace or newline tokens""" def __init__(self, spacers, token): self.spacers, self.token = spacers, token def as_mmcif(self): return ("".join(x.as_mmcif() for x in self.spacers) + self.token.as_mmcif()) def __get_value(self): if isinstance(self.token, _OmittedValueToken): return None elif isinstance(self.token, _UnknownValueToken): return ihm.unknown else: return self.token.txt def __set_value(self, val): if val is None: self.token = _OmittedValueToken() elif val is ihm.unknown: self.token = _UnknownValueToken() elif isinstance(self.token, _TextValueToken): self.token.txt = val else: self.token = _TextValueToken(val, quote=None) value = property(__get_value, __set_value) class Filter: """Base class for filters used by :meth:`CifTokenReader.read_file`. Typically, a subclass such as :class:`ChangeValueFilter` is used when reading an mmCIF file. :param str target: the mmCIF data item this filter should act on. It can be the full name of the data item (including category) such as ``_entity.type``; or just the attribute or keyword name such as ``.type_symbol`` which would match any category (e.g. ``_atom_site.type_symbol``). """ def __init__(self, target): ts = target.lower().split('.') if len(ts) == 1 or not ts[0]: self.category = None elif ts[0].startswith('_'): self.category = ts[0] else: self.category = '_' + ts[0] self.keyword = ts[-1] def _set_category_from_target(self, target): if target.startswith('_'): self.category = target else: self.category = '_' + target self.keyword = None def match_token_category(self, tok): """Return true iff the given token matches the target's category""" return self.category is None or tok.category == self.category def match_token_keyword(self, tok): """Return true iff the given token matches the target's category and keyword""" return self.match_token_category(tok) and tok.keyword == self.keyword def filter_category(self, tok): """Filter the given category token. :return: the original token (which may have been modified), a replacement token, or None if the token should be deleted. """ raise NotImplementedError def filter_loop_header(self, tok): """Filter the given loop header token. :return: the original token (which must not have been modified), a replacement token, or None if the token should be deleted. If the header token is replaced or deleted, all of the original loop rows will also be deleted. """ return tok def get_loop_filter(self, tok): """Given a loop header token, potentially return a handler for each loop row token. This function is also permitted to alter the header in place (but not replace or remove it). Keywords should not be removed from the header (as that may confuse other filters) but can be replaced with null tokens. :return: a callable which will be called for each loop row token (and acts like :meth:`filter_category`), or None if no filtering is needed for this loop. """ raise NotImplementedError class ChangeValueFilter(Filter): """Change any token that sets a data item to ``old`` to be ``new``. For example, this could be used to rename certain chains, or change all residues of a certain type. :param str old: The existing value of the data item. :param str new: The new value of the data item. See :class:`Filter` for a description of the ``target`` parameter. """ def __init__(self, target, old, new): super().__init__(target) self.old, self.new = old, new def filter_category(self, tok): if self.match_token_keyword(tok) and tok.value == self.old: tok.value = self.new return tok def get_loop_filter(self, tok): if self.match_token_category(tok): try: keyword_index = tok.keyword_index(self.keyword) except ValueError: return def loop_filter(t): if t.items[keyword_index].value == self.old: t.items[keyword_index].value = self.new return t return loop_filter class ChangeFuncValueFilter(Filter): """Change any token that sets a data item to x to be f(x). For example, this could be used to perform a search and replace on a string, or match against a regex. :param callable func: A function that is given the existing value of the data item, the category name (e.g. ``_atom_site``), and the keyword name (e.g. ``auth_seq_id``), and should return the new value of the data item (perhaps unchanged). See :class:`Filter` for a description of the ``target`` parameter. """ def __init__(self, target, func): super().__init__(target) self.func = func def filter_category(self, tok): if self.match_token_keyword(tok): tok.value = self.func(tok.value, tok.category, tok.keyword) return tok def get_loop_filter(self, tok): if self.match_token_category(tok): try: keyword_index = tok.keyword_index(self.keyword) except ValueError: return def loop_filter(t): item = t.items[keyword_index] item.value = self.func(item.value, tok.category, self.keyword) return t return loop_filter class RemoveItemFilter(Filter): """Remove any token from the file that sets the given data item. See :class:`Filter` for a description of the ``target`` parameter. """ def filter_category(self, tok): if self.match_token_keyword(tok): return None else: return tok def get_loop_filter(self, tok): if self.match_token_category(tok): try: keyword_index = tok.keyword_index(self.keyword) except ValueError: return # Remove keyword from loop header tok.keywords[keyword_index].spacers = [] tok.keywords[keyword_index].token = _NullToken() def loop_filter(t): # Remove item from loop row (we don't want to pop from # t.items as other filters may reference later indexes) spc = t.items[keyword_index].spacers if len(spc) > 0 and isinstance(spc[0], _EndOfLineToken): del spc[1:] else: t.items[keyword_index].spacers = [] t.items[keyword_index].token = _NullToken() return t return loop_filter class ChangeKeywordFilter(Filter): """Change the keyword in any applicable token to be ``new``. :param str new: The new keyword. See :class:`Filter` for a description of the ``target`` parameter. """ def __init__(self, target, new): super().__init__(target) self.new = new def filter_category(self, tok): if self.match_token_keyword(tok): tok.vartoken.keyword = self.new return tok def get_loop_filter(self, tok): if self.match_token_category(tok): try: keyword_index = tok.keyword_index(self.keyword) except ValueError: return tok.keywords[keyword_index].token.keyword = self.new class ReplaceCategoryFilter(Filter): """Replace any token from the file that sets the given category. This can also be used to completely remove a category if no replacement is given. :param str target: the mmCIF category name this filter should act on, such as ``_entity``. :param str raw_cif: if given, text in mmCIF format which should replace the first instance of the category. :param dumper: if given, a dumper object that should generate mmCIF output to replace the first instance of the category. :type dumper: :class:`ihm.dumper.Dumper` :param system: the System that the given dumper will work on. :type system: :class:`ihm.System` """ class _RawCifToken(_Token): __slots__ = ['txt'] category = keyword = None def __init__(self, txt): self.txt = txt def as_mmcif(self): return self.txt def __init__(self, target, raw_cif=None, dumper=None, system=None): self._set_category_from_target(target) self.raw_cif = raw_cif self.dumper = dumper self.system = system #: The number of times the category was found in the mmCIF file self.num_matches = 0 def _get_replacement_token(self): if self.num_matches > 1: return None if self.raw_cif: return self._RawCifToken(self.raw_cif) elif self.dumper and self.system: fh = StringIO() writer = CifWriter(fh) self.dumper.finalize(self.system) self.dumper.dump(self.system, writer) return self._RawCifToken(fh.getvalue()) def filter_category(self, tok): if self.match_token_category(tok): self.num_matches += 1 return self._get_replacement_token() else: return tok def filter_loop_header(self, tok): return self.filter_category(tok) def get_loop_filter(self, tok): return None class CifTokenReader(_PreservingCifTokenizer): """Read an mmCIF file and break it into tokens. Unlike :class:`CifReader` which extracts selected data from an mmCIF file, this class operates on the file at a lower level, splitting it into tokens, and preserving data such as comments and whitespace. This can be used for various housekeeping tasks directly on an mmCIF file, such as changing chain IDs or renaming categories or data items. Use :meth:`read_file` to actually read the file. :param file fh: Open handle to the mmCIF file """ def __init__(self, fh): super().__init__(fh) def read_file(self, filters=None): """Read the file and yield tokens and/or token groups. The exact type of the tokens is subject to change and is not currently documented; however, each token or group object has an ``as_mmcif`` method which returns the corresponding text in mmCIF format. Thus, the file can be reconstructed by concatenating the result of ``as_mmcif`` for all tokens. :exc:`CifParserError` will be raised if the file cannot be parsed. :param filters: if a list of :class:`Filter` objects is provided, the read tokens will be modified or removed by each of these filters before being returned. :type filters: sequence of :class:`Filter` :return: tokens and/or token groups. """ if filters is None: return self._read_file_internal() else: return self._read_file_with_filters(filters) def _read_file_with_filters(self, filters): loop_filters = None remove_all_loop_rows = False for tok in self._read_file_internal(): if isinstance(tok, _CategoryTokenGroup): tok = self._filter_category(tok, filters) elif isinstance(tok, ihm.format._LoopHeaderTokenGroup): new_tok = self._filter_loop_header(tok, filters) if new_tok is not tok: tok = new_tok remove_all_loop_rows = True else: remove_all_loop_rows = False loop_filters = [f.get_loop_filter(tok) for f in filters] loop_filters = [f for f in loop_filters if f is not None] # Did filters remove all keywords from the loop? if all(isinstance(k.token, _NullToken) for k in tok.keywords): tok = None remove_all_loop_rows = True elif isinstance(tok, ihm.format._LoopRowTokenGroup): if remove_all_loop_rows: tok = None elif loop_filters: tok = self._filter_loop(tok, loop_filters) if tok is not None: yield tok def _filter_category(self, tok, filters): for f in filters: tok = f.filter_category(tok) if tok is None: return return tok def _filter_loop_header(self, tok, filters): orig_tok = tok for f in filters: tok = f.filter_loop_header(tok) if tok is not orig_tok: break return tok def _filter_loop(self, tok, filters): for f in filters: tok = f(tok) if tok is None: return return tok def _read_file_internal(self): while True: token = self._get_token() if token is None: break if isinstance(token, _VariableToken): yield self._read_value(token) elif isinstance(token, _LoopToken): for tok in self._read_loop(token): yield tok # Did we hit the end of the file? if self._token_index < 0: break else: yield token def _get_spaced_token(self): """Get the next token plus any number of leading space/EOL tokens""" spacers = [] while True: token = self._get_token() if isinstance(token, (_EndOfLineToken, _WhitespaceToken)): spacers.append(token) else: return _SpacedToken(spacers, token) def _read_value(self, vartoken): """Read a line that sets a single value, e.g. "_entry.id 1YTI""" spval = self._get_spaced_token() if not isinstance(spval.token, _ValueToken): raise CifParserError( "No valid value found for %s.%s on line %d" % (vartoken.category, vartoken.keyword, self._linenum)) eoltok = self._get_token() if not isinstance(eoltok, _EndOfLineToken): raise CifParserError( "No end of line after %s.%s on line %d" % (vartoken.category, vartoken.keyword, self._linenum)) return _CategoryTokenGroup(vartoken, spval) def _read_loop(self, looptoken): """Handle a loop_ construct""" header = self._read_loop_header(looptoken) # Record original number of keywords, in case the header token # is filtered num_keywords = len(header.keywords) yield header for line in self._read_loop_data(num_keywords): yield line def _read_loop_header(self, looptoken): """Read the set of keywords for a loop_ construct""" category = None keywords = [] while True: spt = self._get_spaced_token() if isinstance(spt.token, _VariableToken): if category is None: category = spt.token.category elif category != spt.token.category: raise CifParserError( "mmCIF files cannot contain multiple " "categories within a single loop at line %d" % self._linenum) keywords.append(spt) elif isinstance(spt.token, _ValueToken): # OK, end of keywords; proceed on to values self._unget_token() return _LoopHeaderTokenGroup(looptoken, category, keywords, spt.spacers) else: raise CifParserError("Was expecting a keyword or value for " "loop at line %d" % self._linenum) def _read_loop_data(self, num_keywords): """Read the data for a loop_ construct""" while True: items = [] for i in range(num_keywords): spt = self._get_spaced_token() if isinstance(spt.token, _ValueToken): items.append(spt) elif i == 0: # OK, end of the loop for s in spt.spacers: yield s if spt.token is not None: self._unget_token() return else: raise CifParserError( "Wrong number of data values in loop " "(should be an exact multiple of the number " "of keys) at line %d" % self._linenum) yield _LoopRowTokenGroup(items) def _int_type_handler(txt, linenum): try: return int(txt) except ValueError as exc: raise ValueError("%s at line %d" % (str(exc), linenum)) def _float_type_handler(txt, linenum): try: return float(txt) except ValueError as exc: raise ValueError("%s at line %d" % (str(exc), linenum)) class _BoolTypeHandler: _bool_map = {'YES': True, 'NO': False} def __init__(self, omitted): self.omitted = omitted def __call__(self, txt, linenum): return self._bool_map.get(txt.upper(), self.omitted) def _str_type_handler(txt, linenum): return txt class CifReader(_Reader, _CifTokenizer): """Class to read an mmCIF file and extract some or all of its data. Use :meth:`read_file` to actually read the file. See also :class:`CifTokenReader` for a class that operates on the lower-level structure of an mmCIF file, preserving data such as comments and whitespace. :param file fh: Open handle to the mmCIF file :param dict category_handler: A dict to handle data extracted from the file. Keys are category names (e.g. "_entry") and values are objects that have a `__call__` method and `not_in_file`, `omitted`, and `unknown` attributes. The names of the arguments to this `__call__` method are mmCIF keywords that are extracted from the file (for the keywords tr_vector[N] and rot_matrix[N][M] simply omit the [ and ] characters, since these are not valid for Python identifiers). The object will be called with the data from the file as a set of strings, or `not_in_file`, `omitted` or `unknown` for any keyword that is not present in the file, the mmCIF omitted value (.), or mmCIF unknown value (?) respectively. (mmCIF keywords are case insensitive, so this class always treats them as lowercase regardless of the file contents.) :param unknown_category_handler: A callable (or `None`) that is called for each category in the file that isn't handled; it is given two arguments: the name of the category, and the line in the file at which the category was encountered (if known, otherwise None). :param unknown_keyword_handler: A callable (or `None`) that is called for each keyword in the file that isn't handled (within a category that is handled); it is given three arguments: the names of the category and keyword, and the line in the file at which the keyword was encountered (if known, otherwise None). """ def __init__(self, fh, category_handler, unknown_category_handler=None, unknown_keyword_handler=None): if _format is not None: c_file = _format.ihm_file_new_from_python(fh, False) self._c_format = _format.ihm_reader_new(c_file, False) self.category_handler = category_handler self.unknown_category_handler = unknown_category_handler self.unknown_keyword_handler = unknown_keyword_handler self._category_data = {} _CifTokenizer.__init__(self, fh) def __del__(self): if hasattr(self, '_c_format'): _format.ihm_reader_free(self._c_format) def _read_value(self, vartoken): """Read a line that sets a single value, e.g. "_entry.id 1YTI""" # Only read the value if we're interested in this category and key if vartoken.category in self.category_handler: if vartoken.keyword \ in self.category_handler[vartoken.category]._keys: valtoken = self._get_token() if isinstance(valtoken, _ValueToken): ch = self.category_handler[vartoken.category] if vartoken.category not in self._category_data: self._category_data[vartoken.category] = {} if isinstance(valtoken, _OmittedValueToken): val = ch.omitted elif isinstance(valtoken, _UnknownValueToken): val = ch.unknown else: tc = self._get_type_handler(ch, vartoken.keyword) val = tc(valtoken.txt, self._linenum) self._category_data[vartoken.category][vartoken.keyword] \ = val else: raise CifParserError( "No valid value found for %s.%s on line %d" % (vartoken.category, vartoken.keyword, self._linenum)) elif self.unknown_keyword_handler is not None: self.unknown_keyword_handler(vartoken.category, vartoken.keyword, self._linenum) elif self.unknown_category_handler is not None: self.unknown_category_handler(vartoken.category, self._linenum) def _read_loop_keywords(self): """Read the set of keywords for a loop_ construct""" category = None keywords = [] first_line = None keyword_lines = [] while True: token = self._get_token() if isinstance(token, _VariableToken): if category is None: category = token.category first_line = self._linenum elif category != token.category: raise CifParserError( "mmCIF files cannot contain multiple " "categories within a single loop at line %d" % self._linenum) keywords.append(token.keyword) keyword_lines.append(self._linenum) elif isinstance(token, _ValueToken): # OK, end of keywords; proceed on to values self._unget_token() return category, keywords, keyword_lines, first_line else: raise CifParserError("Was expecting a keyword or value for " "loop at line %d" % self._linenum) def _read_loop_data(self, handler, num_wanted_keys, keyword_indices, type_handlers): """Read the data for a loop_ construct""" data = [handler.not_in_file] * num_wanted_keys while True: for i, index in enumerate(keyword_indices): token = self._get_token() if isinstance(token, _ValueToken): if index >= 0: if isinstance(token, _OmittedValueToken): data[index] = handler.omitted elif isinstance(token, _UnknownValueToken): data[index] = handler.unknown else: data[index] = type_handlers[index](token.txt, self._linenum) elif i == 0: # OK, end of the loop self._unget_token() return else: raise CifParserError( "Wrong number of data values in loop " "(should be an exact multiple of the number " "of keys) at line %d" % self._linenum) handler(*data) def _get_type_handler(self, category_handler, keyword): """Return a function that converts keyword string into desired type""" if keyword in category_handler._int_keys: return _int_type_handler elif keyword in category_handler._bool_keys: return _BoolTypeHandler(category_handler.omitted) elif keyword in category_handler._float_keys: return _float_type_handler else: return _str_type_handler def _read_loop(self): """Handle a loop_ construct""" (category, keywords, keyword_lines, first_line) = self._read_loop_keywords() # Skip data if we don't have a handler for it if category in self.category_handler: ch = self.category_handler[category] type_handlers = [self._get_type_handler(ch, k) for k in ch._keys] wanted_key_index = {} for i, k in enumerate(ch._keys): wanted_key_index[k] = i indices = [wanted_key_index.get(k, -1) for k in keywords] if self.unknown_keyword_handler is not None: for k, i, line in zip(keywords, indices, keyword_lines): if i == -1: self.unknown_keyword_handler(category, k, line) self._read_loop_data(ch, len(ch._keys), indices, type_handlers) elif self.unknown_category_handler is not None: self.unknown_category_handler(category, first_line) def read_file(self): """Read the file and extract data. Category handlers will be called as data becomes available - for ``loop_`` constructs, this will be once for each row in the loop; for categories (e.g. ``_entry.id model``), this will be once at the very end of the file. If the C-accelerated _format module is available, then it is used instead of the (much slower) Python tokenizer. :exc:`CifParserError` will be raised if the file cannot be parsed. :return: True iff more data blocks are available to be read. """ self._add_category_keys() if hasattr(self, '_c_format'): return self._read_file_c() def call_all_categories(): for cat, data in self._category_data.items(): ch = self.category_handler[cat] ch(*[data.get(k, ch.not_in_file) for k in ch._keys]) # Clear category data for next call to read_file() self._category_data = {} ndata = 0 in_save = False while True: token = self._get_token(ignore_multiline=True) if token is None: break if isinstance(token, _VariableToken): self._read_value(token) elif isinstance(token, _DataToken): ndata += 1 # Only read the first data block if ndata > 1: # Allow reading the next data block self._unget_token() break elif isinstance(token, _LoopToken): self._read_loop() # Did we hit the end of the file? if self._token_index < 0: break elif isinstance(token, _SaveToken): in_save = not in_save if not in_save: call_all_categories() for handler in self.category_handler.values(): handler.end_save_frame() call_all_categories() return ndata > 1 def _read_file_c(self): """Read the file using the C parser""" _format.ihm_reader_remove_all_categories(self._c_format) for category, handler in self.category_handler.items(): func = getattr(handler, '_add_c_handler', None) \ or _format.add_category_handler func(self._c_format, category, handler._keys, frozenset(handler._int_keys), frozenset(handler._float_keys), frozenset(handler._bool_keys), handler) if self.unknown_category_handler is not None: _format.add_unknown_category_handler(self._c_format, self.unknown_category_handler) if self.unknown_keyword_handler is not None: _format.add_unknown_keyword_handler(self._c_format, self.unknown_keyword_handler) try: ret_ok, more_data = _format.ihm_read_file(self._c_format) except _format.FileFormatError as exc: # Convert to the same exception used by the Python code raise CifParserError(str(exc)) return more_data python-ihm-2.7/ihm/format_bcif.py000066400000000000000000000533251503573337200171000ustar00rootroot00000000000000"""Utility classes to handle BinaryCIF format. See https://github.com/molstar/BinaryCIF for a description of the BinaryCIF file format. This module provides classes to read in and write out BinaryCIF files. It is only concerned with handling syntactically correct BinaryCIF - it does not know the set of tables or the mapping to ihm objects. For that, see :mod:`ihm.reader`. """ import struct import sys import inspect import ihm.format import ihm try: from . import _format except ImportError: _format = None # ByteArray types _Int8 = 1 _Int16 = 2 _Int32 = 3 _Uint8 = 4 _Uint16 = 5 _Uint32 = 6 _Float32 = 32 _Float64 = 33 class _Decoder: """Base class for all decoders.""" _kind = None # Encoder kind (in BinaryCIF specification) def __call__(self, enc, data): """Given encoding information `enc` and raw data `data`, return decoded data. This can be a generator.""" pass class _StringArrayDecoder(_Decoder): """Decode an array of strings stored as a concatenation of all unique strings, an array of offsets describing substrings, and indices into the offset array.""" _kind = 'StringArray' def __call__(self, enc, data): offsets = list(_decode(enc['offsets'], enc['offsetEncoding'])) indices = _decode(data, enc['dataEncoding']) substr = [] string_data = enc['stringData'] for i in range(0, len(offsets) - 1): substr.append(string_data[offsets[i]:offsets[i + 1]]) # todo: return a listlike class instead? for i in indices: yield None if i < 0 else substr[i] class _ByteArrayDecoder(_Decoder): """Decode an array of numbers of specified type stored as raw bytes""" _kind = 'ByteArray' # Map integer/float type to struct format string _struct_map = { _Int8: 'b', _Int16: 'h', _Int32: 'i', _Uint8: 'B', _Uint16: 'H', _Uint32: 'I', _Float32: 'f', _Float64: 'd', } def __call__(self, enc, data): fmt = self._struct_map[enc['type']] sz = len(data) // struct.calcsize(fmt) # All data is encoded little-endian in bcif return struct.unpack('<' + fmt * sz, data) class _IntegerPackingDecoder(_Decoder): """Decode a (32-bit) integer array stored as 8- or 16-bit values.""" _kind = 'IntegerPacking' def _unsigned_decode(self, enc, data): limit = 0xFF if enc['byteCount'] == 1 else 0xFFFF i = 0 while i < len(data): value = 0 t = data[i] while t == limit: value += t i += 1 t = data[i] yield value + t i += 1 def _signed_decode(self, enc, data): upper_limit = 0x7F if enc['byteCount'] == 1 else 0x7FFF lower_limit = -upper_limit - 1 i = 0 while i < len(data): value = 0 t = data[i] while t == upper_limit or t == lower_limit: value += t i += 1 t = data[i] yield value + t i += 1 def __call__(self, enc, data): if enc['isUnsigned']: return self._unsigned_decode(enc, data) else: return self._signed_decode(enc, data) class _DeltaDecoder(_Decoder): """Decode an integer array stored as an array of consecutive differences.""" _kind = 'Delta' def __call__(self, enc, data): val = enc['origin'] for d in data: val += d yield val class _RunLengthDecoder(_Decoder): """Decode an integer array stored as pairs of (value, number of repeats)""" _kind = 'RunLength' def __call__(self, enc, data): data = list(data) for i in range(0, len(data), 2): for j in range(data[i + 1]): yield data[i] class _FixedPointDecoder(_Decoder): """Decode a floating point array stored as integers multiplied by a given factor.""" _kind = 'FixedPoint' def __call__(self, enc, data): factor = float(enc['factor']) for d in data: yield float(d) / factor class _IntervalQuantizationDecoder(_Decoder): """Decode a floating point array stored as integers quantized within a given interval into a number of discrete steps.""" _kind = 'IntervalQuantization' def __call__(self, enc, data): minval = float(enc['min']) maxval = float(enc['max']) numsteps = int(enc['numSteps']) delta = (maxval - minval) / (numsteps - 1) for d in data: yield minval + delta * d def _get_decoder_map(): m = {} for d in [x[1] for x in inspect.getmembers(sys.modules[__name__], inspect.isclass) if issubclass(x[1], _Decoder)]: m[d._kind] = d() return m # Mapping from BinaryCIF encoding names to _Decoder objects _decoder_map = _get_decoder_map() def _decode(data, encoding): """Decode the data using the list of encodings, and return it.""" for enc in reversed(encoding): data = _decoder_map[enc['kind']](enc, data) return data class _BoolTypeHandler: _bool_map = {'YES': True, 'NO': False} def __init__(self, omitted): self.omitted = omitted def __call__(self, txt): return self._bool_map.get(str(txt).upper(), self.omitted) class BinaryCifReader(ihm.format._Reader): """Class to read a BinaryCIF file and extract some or all of its data. Use :meth:`read_file` to actually read the file. See :class:`ihm.format.CifReader` for a description of the parameters. """ def __init__(self, fh, category_handler, unknown_category_handler=None, unknown_keyword_handler=None): if _format is not None: c_file = _format.ihm_file_new_from_python(fh, True) self._c_format = _format.ihm_reader_new(c_file, True) self.category_handler = category_handler self.unknown_category_handler = unknown_category_handler self.unknown_keyword_handler = unknown_keyword_handler self.fh = fh self._file_blocks = None def __del__(self): if hasattr(self, '_c_format'): _format.ihm_reader_free(self._c_format) def read_file(self): """Read the file and extract data. If the C-accelerated _format module is available, then it is used instead of the (much slower) Python reader. :return: True iff more data blocks are available to be read. """ self._add_category_keys() if hasattr(self, '_c_format'): return self._read_file_c() if self._file_blocks is None: self._file_blocks = self._read_msgpack() if len(self._file_blocks) > 0: for category in self._file_blocks[0]['categories']: cat_name = category['name'].lower() handler = self.category_handler.get(cat_name, None) if handler: self._handle_category(handler, category, cat_name) elif self.unknown_category_handler is not None: self.unknown_category_handler(cat_name, 0) del self._file_blocks[0] return len(self._file_blocks) > 0 def _read_file_c(self): """Read the file using the C parser""" _format.ihm_reader_remove_all_categories(self._c_format) for category, handler in self.category_handler.items(): func = getattr(handler, '_add_c_handler', None) \ or _format.add_category_handler func(self._c_format, category, handler._keys, frozenset(handler._int_keys), frozenset(handler._float_keys), frozenset(handler._bool_keys), handler) if self.unknown_category_handler is not None: _format.add_unknown_category_handler(self._c_format, self.unknown_category_handler) if self.unknown_keyword_handler is not None: _format.add_unknown_keyword_handler(self._c_format, self.unknown_keyword_handler) ret_ok, more_data = _format.ihm_read_file(self._c_format) return more_data def _get_type_handler(self, category_handler, keyword): """Return a function that converts keyword string into desired type""" if keyword in category_handler._int_keys: return int elif keyword in category_handler._bool_keys: return _BoolTypeHandler(category_handler.omitted) elif keyword in category_handler._float_keys: return float else: return str def _handle_category(self, handler, category, cat_name): """Extract data for the given category""" num_cols = len(handler._keys) type_handlers = [self._get_type_handler(handler, k) for k in handler._keys] # Read all data for the category; # category_data[col][row] category_data = [None] * num_cols num_rows = 0 # Only read columns that match a handler key (case insensitive) key_index = {} for i, key in enumerate(handler._keys): key_index[key] = i column_indices = [] for c in category['columns']: key_name = c['name'].lower() ki = key_index.get(key_name, None) if ki is not None: column_indices.append(ki) r = self._read_column(c, handler, type_handlers[ki]) num_rows = len(r) category_data[ki] = r elif self.unknown_keyword_handler is not None: self.unknown_keyword_handler(cat_name, key_name, 0) row_data = [handler.not_in_file] * num_cols for row in range(num_rows): # Only update data for columns that we read (others will # remain None) for i in column_indices: row_data[i] = category_data[i][row] handler(*row_data) def _read_column(self, column, handler, type_handler): """Read a single category column data""" data = _decode(column['data']['data'], column['data']['encoding']) # Handle 'unknown' values (mask==2) or 'omitted' (mask==1) if column['mask'] is not None: mask = _decode(column['mask']['data'], column['mask']['encoding']) return [handler.unknown if m == 2 else handler.omitted if m == 1 else type_handler(d) for d, m in zip(data, mask)] else: return [type_handler(d) for d in data] def _read_msgpack(self): """Read the msgpack data from the file and return data blocks""" import msgpack d = msgpack.unpack(self.fh, raw=False) return d['dataBlocks'] class _CategoryWriter: def __init__(self, writer, category): self.writer = writer self.category = category self._data = {} def write(self, **kwargs): self._data.update(kwargs) def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): for k in self._data: self._data[k] = [self._data[k]] self.writer._add_category(self.category, self._data) class _LoopWriter: def __init__(self, writer, category, keys): self.writer = writer self.category = category self.keys = keys # Remove characters that we can't use in Python identifiers self.python_keys = [k.replace('[', '').replace(']', '') for k in keys] self._values = [] for i in range(len(keys)): self._values.append([]) def write(self, **kwargs): for i, k in enumerate(self.python_keys): val = kwargs.get(k, None) self._values[i].append(val) def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): data = {} for key, value in zip(self.keys, self._values): data[key] = value self.writer._add_category(self.category, data) class EncodeError(Exception): """Exception raised if input data cannot be encoded""" pass class _Encoder: """Base class for all encoders""" _kind = None # Encoder kind (in BinaryCIF specification) def __call__(self, data): """Given raw data `data`, return encoded data and a BinaryCIF encoder information dict.""" pass def _get_int_float_type(data): """Determine the int/float type of the given data""" # If anything is float, treat everything as single-precision float for d in data: if isinstance(d, float): return _Float32 # Otherwise, figure out the most appropriate int type min_val = min(data) max_val = max(data) if min_val >= 0: # Unsigned types for typ, limit in [(_Uint8, 0xFF), (_Uint16, 0xFFFF), (_Uint32, 0xFFFFFFFF)]: if max_val <= limit: return typ else: # Signed types for typ, up_limit in [(_Int8, 0x7F), (_Int16, 0x7FFF), (_Int32, 0x7FFFFFFF)]: low_limit = -up_limit - 1 if min_val >= low_limit and max_val <= up_limit: return typ raise TypeError("Cannot represent data as BinaryCIF") class _ByteArrayEncoder(_Encoder): # Map integer/float type to struct format string _struct_map = _ByteArrayDecoder._struct_map def __call__(self, data): ba_type = _get_int_float_type(data) encdict = {'kind': 'ByteArray', 'type': ba_type} fmt = self._struct_map[ba_type] # All data is encoded little-endian in bcif return struct.pack('<' + fmt * len(data), *data), encdict class _DeltaEncoder(_Encoder): """Encode an integer array as an array of consecutive differences.""" def __call__(self, data): # Don't try to compress small arrays; the overhead of the compression # probably will exceed the space savings if len(data) <= 40: return data, None data_type = _get_int_float_type(data) encdict = {'kind': 'Delta', 'origin': data[0], 'srcType': data_type} encdata = [0] + [data[i] - data[i - 1] for i in range(1, len(data))] return encdata, encdict class _RunLengthEncoder(_Encoder): """Encode an integer array as pairs of (value, number of repeats)""" def __call__(self, data): # Don't try to compress small arrays; the overhead of the compression # probably will exceed the space savings if len(data) <= 40: return data, None data_type = _get_int_float_type(data) encdict = {'kind': 'RunLength', 'srcType': data_type, 'srcSize': len(data)} encdata = [] val = None for d in data: if d != val: if val is not None: encdata.extend((val, repeat)) # noqa: F821 val = d repeat = 1 else: repeat += 1 encdata.extend((val, repeat)) # If we didn't save any space, return the original unchanged if len(encdata) > len(data): return data, None else: return encdata, encdict def _encode(data, encoders): """Encode data using the given encoder objects. Return the encoded data and a list of BinaryCIF encoding dicts.""" encdicts = [] for enc in encoders: data, encdict = enc(data) if encdict is not None: encdicts.append(encdict) return data, encdicts class _MaskedEncoder: """Base class for all encoders that handle potentially masked data""" def __call__(self, data, mask): """Given raw data `data`, and `mask`, return encoded data""" pass class _StringArrayMaskedEncoder(_MaskedEncoder): _int_encoders = [_DeltaEncoder(), _RunLengthEncoder(), _ByteArrayEncoder()] def __call__(self, data, mask): seen_substrs = {} # keys are substrings, values indices sorted_substrs = [] indices = [] for i, reals in enumerate(data): if mask is not None and mask[i]: indices.append(-1) else: s = reals # Map bool to YES/NO strings if isinstance(s, bool): s = ihm.format._Writer._boolmap[s] else: s = str(s) # coerce non-str data to str if s not in seen_substrs: seen_substrs[s] = len(seen_substrs) sorted_substrs.append(s) indices.append(seen_substrs[s]) offsets = [0] total_len = 0 for s in sorted_substrs: total_len += len(s) offsets.append(total_len) data_offsets, enc_offsets = _encode(offsets, self._int_encoders) data_indices, enc_indices = _encode(indices, self._int_encoders) enc_dict = {'kind': 'StringArray', 'dataEncoding': enc_indices, 'stringData': ''.join(sorted_substrs), 'offsetEncoding': enc_offsets, 'offsets': data_offsets} return data_indices, [enc_dict] class _IntArrayMaskedEncoder(_MaskedEncoder): _encoders = [_DeltaEncoder(), _RunLengthEncoder(), _ByteArrayEncoder()] def __call__(self, data, mask): if mask: masked_data = [-1 if m else d for m, d in zip(mask, data)] else: masked_data = data encdata, encoders = _encode(masked_data, self._encoders) return encdata, encoders class _FloatArrayMaskedEncoder(_MaskedEncoder): _encoders = [_ByteArrayEncoder()] def __call__(self, data, mask): if mask: masked_data = [0. if m else d for m, d in zip(mask, data)] else: masked_data = data encdata, encoders = _encode(masked_data, self._encoders) return encdata, encoders def _get_mask_and_type(data): """Detect missing/omitted values in `data` and determine the type of the remaining values (str, int, float)""" mask = None seen_types = set() for i, val in enumerate(data): if val is None or val == ihm.unknown: if mask is None: mask = [0] * len(data) mask[i] = 1 if val is None else 2 else: seen_types.add(type(val)) # If a mix of types, coerce to that of the highest precedence # (mixed int/float can be represented as float; mix int/float/str can # be represented as str; bool is represented as str) if not seen_types or bool in seen_types or str in seen_types: return mask, str elif float in seen_types: return mask, float elif int in seen_types: return mask, int for t in seen_types: # Handle numpy float types like Python float # todo: this is a hack if 'numpy.float' in str(t): return mask, float raise ValueError("Cannot determine type of data %s" % data) class BinaryCifWriter(ihm.format._Writer): """Write information to a BinaryCIF file. See :class:`ihm.format.CifWriter` for more information. The constructor takes a single argument - a Python filelike object, open for writing in binary mode.""" _mask_encoders = [_DeltaEncoder(), _RunLengthEncoder(), _ByteArrayEncoder()] def __init__(self, fh): super().__init__(fh) self._blocks = [] self._masked_encoder = {str: _StringArrayMaskedEncoder(), int: _IntArrayMaskedEncoder(), float: _FloatArrayMaskedEncoder()} def category(self, category): """See :meth:`ihm.format.CifWriter.category`.""" return _CategoryWriter(self, category) def loop(self, category, keys): """See :meth:`ihm.format.CifWriter.loop`.""" return _LoopWriter(self, category, keys) def write_comment(self, comment): """See :meth:`ihm.format.CifWriter.write_comment`. .. note:: BinaryCIF does not support comments, so this is a noop. """ pass def _encode_data(self, data): mask, typ = _get_mask_and_type(data) enc = self._masked_encoder[typ] encdata, encs = enc(data, mask) if mask: data_mask, enc_mask = _encode(mask, self._mask_encoders) mask = {'data': data_mask, 'encoding': enc_mask} return mask, encdata, encs def _encode_column(self, name, data): mask, encdata, encs = self._encode_data(data) return {'name': name, 'mask': mask, 'data': {'data': encdata, 'encoding': encs}} def start_block(self, name): """See :meth:`ihm.format.CifWriter.start_block`.""" block = {'header': name, 'categories': []} self._categories = block['categories'] self._blocks.append(block) def end_block(self): # noop - end-of-block is handled by start_block() and flush() pass def _add_category(self, category, data): row_count = 0 cols = [] for k, v in data.items(): row_count = len(v) # Do nothing if the category has no data if row_count == 0: return cols.append(self._encode_column(k, v)) self._categories.append({'name': category, 'columns': cols, 'rowCount': row_count}) def flush(self): data = {'version': ihm.__version__, 'encoder': 'python-ihm library', 'dataBlocks': self._blocks} self._write_msgpack(data) def _write_msgpack(self, data): """Read the msgpack data from the file and return data blocks""" import msgpack msgpack.pack(data, self.fh, use_bin_type=True) python-ihm-2.7/ihm/geometry.py000066400000000000000000000154651503573337200164630ustar00rootroot00000000000000"""Classes for handling geometry. Geometric objects (see :class:`GeometricObject`) are usually used in :class:`~ihm.restraint.GeometricRestraint` objects. """ class Center: """Define the center of a geometric object in Cartesian space. :param float x: x coordinate :param float y: y coordinate :param float z: z coordinate """ def __init__(self, x, y, z): self.x, self.y, self.z = x, y, z class Transformation: """Rotation and translation applied to an object. Transformation objects are typically used in subclasses of :class:`GeometricObject`, or by :class:`ihm.dataset.TransformedDataset`. :param rot_matrix: Rotation matrix (as a 3x3 array of floats) that places the object in its final position. :param tr_vector: Translation vector (as a 3-element float list) that places the object in its final position. """ def __init__(self, rot_matrix, tr_vector): self.rot_matrix, self.tr_vector = rot_matrix, tr_vector """Return the identity transformation. :return: A new identity Transformation. :rtype: :class:`Transformation` """ @classmethod def identity(cls): return cls([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]], [0., 0., 0.]) class GeometricObject: """A generic geometric object. See also :class:`Sphere`, :class:`Torus`, :class:`Axis`, :class:`Plane`. Geometric objects are typically assigned to one or more :class:`~ihm.restraint.GeometricRestraint` objects. :param str name: A short user-provided name. :param str description: A brief description of the object. """ type = 'other' def __init__(self, name=None, description=None): self.name, self.description = name, description class Sphere(GeometricObject): """A sphere in Cartesian space. :param center: Coordinates of the center of the sphere. :type center: :class:`Center` :param radius: Radius of the sphere. :param transformation: Rotation and translation that moves the sphere from the original center to its final location, if any. :type transformation: :class:`Transformation` :param str name: A short user-provided name. :param str description: A brief description of the object. """ type = 'sphere' def __init__(self, center, radius, transformation=None, name=None, description=None): super().__init__(name, description) self.center, self.transformation = center, transformation self.radius = radius class Torus(GeometricObject): """A torus in Cartesian space. :param center: Coordinates of the center of the torus. :type center: :class:`Center` :param major_radius: The major radius - the distance from the center of the tube to the center of the torus. :param minor_radius: The minor radius - the radius of the tube. :param transformation: Rotation and translation that moves the torus (which by default lies in the xy plane) from the original center to its final location, if any. :type transformation: :class:`Transformation` :param str name: A short user-provided name. :param str description: A brief description of the object. """ type = 'torus' def __init__(self, center, major_radius, minor_radius, transformation=None, name=None, description=None): super().__init__(name, description) self.center, self.transformation = center, transformation self.major_radius, self.minor_radius = major_radius, minor_radius class HalfTorus(GeometricObject): """A section of a :class:`Torus`. This is defined as a surface over part of the torus with a given thickness, and is often used to represent a membrane. :param thickness: The thickness of the surface. :param inner: True if the surface is the 'inner' half of the torus (i.e. closer to the center), False for the outer surface, or None for some other section (described in `description`). See :class:`Torus` for a description of the other parameters. """ type = 'half-torus' def __init__(self, center, major_radius, minor_radius, thickness, transformation=None, inner=None, name=None, description=None): super().__init__(name, description) self.center, self.transformation = center, transformation self.major_radius, self.minor_radius = major_radius, minor_radius self.thickness, self.inner = thickness, inner class Axis(GeometricObject): """One of the three Cartesian axes - see :class:`XAxis`, :class:`YAxis`, :class:`ZAxis`. :param transformation: Rotation and translation that moves the axis from the original Cartesian axis to its final location, if any. :type transformation: :class:`Transformation` :param str name: A short user-provided name. :param str description: A brief description of the object. """ type = 'axis' def __init__(self, transformation=None, name=None, description=None): super().__init__(name, description) self.transformation = transformation class XAxis(Axis): """The x Cartesian axis. See :class:`GeometricObject` for a description of the parameters. """ axis_type = 'x-axis' class YAxis(Axis): """The y Cartesian axis. See :class:`GeometricObject` for a description of the parameters. """ axis_type = 'y-axis' class ZAxis(Axis): """The z Cartesian axis. See :class:`GeometricObject` for a description of the parameters. """ axis_type = 'z-axis' class Plane(GeometricObject): """A plane in Cartesian space - see :class:`XYPlane`, :class:`YZPlane`, :class:`XZPlane`. :param transformation: Rotation and translation that moves the plane from the original position to its final location, if any. :type transformation: :class:`Transformation` :param str name: A short user-provided name. :param str description: A brief description of the object. """ type = 'plane' def __init__(self, transformation=None, name=None, description=None): super().__init__(name, description) self.transformation = transformation class XYPlane(Plane): """The xy plane in Cartesian space. See :class:`GeometricObject` for a description of the parameters. """ plane_type = 'xy-plane' class YZPlane(Plane): """The yz plane in Cartesian space. See :class:`GeometricObject` for a description of the parameters. """ plane_type = 'yz-plane' class XZPlane(Plane): """The xz plane in Cartesian space. See :class:`GeometricObject` for a description of the parameters. """ plane_type = 'xz-plane' python-ihm-2.7/ihm/location.py000066400000000000000000000350151503573337200164310ustar00rootroot00000000000000"""Classes for tracking external data used by mmCIF models. """ import os class Location: """Identifies the location where a resource can be found. Do not use this class itself, but one of its subclasses. Typically the resource may be found in a file (either on the local disk or at a DOI) - for this use one of the subclasses of :class:`FileLocation`. Alternatively the resource may be found in an experiment-specific database such as PDB or EMDB - for this use :class:`DatabaseLocation` or one of its subclasses. A Location may be passed to - a :class:`~ihm.dataset.Dataset` to point to where an experimental dataset may be found; - an :class:`~ihm.model.Ensemble` to point to coordinates for an entire ensemble, for example as a DCD file; - a :class:`ihm.model.LocalizationDensity` to point to an external localization density, for example in MRC format; - :data:`ihm.System.locations` to point to other files relating to the modeling in general, such as a modeling control script (:class:`WorkflowFileLocation`) or a command script for a visualization package such as ChimeraX (:class:`VisualizationFileLocation`); - a :class:`ihm.protocol.Step` or :class:`ihm.analysis.Step` to describe an individual modeling step; - or a :class:`~ihm.startmodel.StartingModel` to describe how a starting model was constructed. :param str details: Additional details about the dataset, if known. """ # 'details' can differ without affecting dataset equality _eq_keys = [] _allow_duplicates = False def __init__(self, details=None): self.details = details # Locations compare equal iff they are the same class, have the # same attributes, and allow_duplicates=False def _eq_vals(self): if self._allow_duplicates: return id(self) else: return tuple([self.__class__] + [getattr(self, x) for x in self._eq_keys]) def __eq__(self, other): # We can never be equal to None return other is not None and self._eq_vals() == other._eq_vals() def __hash__(self): return hash(self._eq_vals()) class DatabaseLocation(Location): """A dataset stored in an official database (PDB, EMDB, PRIDE, etc.). Generally a subclass should be used specific to the database - for example, :class:`PDBLocation`, :class:`EMDBLocation`, or :class:`PRIDELocation`, although this base class can be used directly for "other" databases not currently supported by the IHM dictionary. :param str db_code: The accession code inside the database. :param str version: The version of the dataset in the database. :param str details: Additional details about the dataset, if known. """ _eq_keys = Location._eq_keys + ['db_name', 'access_code', 'version'] db_name = 'Other' def __init__(self, db_code, version=None, details=None): super().__init__(details) self.access_code = db_code self.version = version def __str__(self): return "<%s.%s(%s)>" % (self.__module__, self.__class__.__name__, repr(self.access_code)) class EMDBLocation(DatabaseLocation): """Something stored in the EMDB database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'EMDB' class PDBLocation(DatabaseLocation): """Something stored in the PDB database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'PDB' class PDBDevLocation(DatabaseLocation): """Something stored in the PDB-Dev database. This should only be used for legacy entries. All former PDB-Dev entries (now PDB-IHM) should now have PDB identifiers; use :class:`PDBLocation` instead. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'PDB-Dev' class ModelArchiveLocation(DatabaseLocation): """Something stored in Model Archive. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'MODEL ARCHIVE' class BMRBLocation(DatabaseLocation): """Something stored in the BMRB database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'BMRB' class MassIVELocation(DatabaseLocation): """Something stored in the MassIVE database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'MASSIVE' class EMPIARLocation(DatabaseLocation): """Something stored in the EMPIAR database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'EMPIAR' class SASBDBLocation(DatabaseLocation): """Something stored in the SASBDB database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'SASBDB' class PRIDELocation(DatabaseLocation): """Something stored in the PRIDE database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'PRIDE' class JPOSTLocation(DatabaseLocation): """Something stored in the JPOST database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'jPOSTrepo' class BioGRIDLocation(DatabaseLocation): """Something stored in the BioGRID database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'BioGRID' class ProXLLocation(DatabaseLocation): """Something stored in the ProXL database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'ProXL' class IProXLocation(DatabaseLocation): """Something stored in the iProX database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'iProX' class AlphaFoldDBLocation(DatabaseLocation): """Something stored in the AlphaFoldDB database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'AlphaFoldDB' class ProteomeXchangeLocation(DatabaseLocation): """Something stored in the ProteomeXchange database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'ProteomeXchange' class BMRbigLocation(DatabaseLocation): """Something stored in the BMRbig database. See :class:`DatabaseLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects.""" db_name = 'BMRbig' class FileLocation(Location): """Base class for an individual file or directory stored externally. :param str path: the location of the file or directory (this can be `None` if `repo` is set, to refer to the entire repository) :param repo: object that describes the repository containing the file, or `None` if it is stored on the local disk :type repo: :class:`Repository` :param str details: optional description of the file :param str file_format: optional file type (e.g. TXT, PNG, FASTA) """ _eq_keys = Location._eq_keys + ['repo', 'path', 'content_type'] content_type = 'Other' def __init__(self, path, repo=None, details=None, file_format=None): super().__init__(details) self.repo, self.file_format = repo, file_format if repo: self.path = path # Cannot determine file size if non-local self.file_size = None else: if not os.path.exists(path): raise ValueError("%s does not exist" % path) self.file_size = os.stat(path).st_size # Store absolute path in case the working directory changes later self.path = os.path.abspath(path) def __str__(self): return "<%s.%s(%s)>" % (self.__module__, self.__class__.__name__, repr(self.path)) class InputFileLocation(FileLocation): """An externally stored file used as input. See :class:`FileLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects. For example, any :class:`~ihm.dataset.Dataset` that isn't stored in a domain-specific database would use this class.""" content_type = 'Input data or restraints' class OutputFileLocation(FileLocation): """An externally stored file used for output. See :class:`FileLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects. For example, this can be used to point to an externally-stored :class:`model ensemble ` or a :class:`localization density `. """ content_type = "Modeling or post-processing output" class WorkflowFileLocation(FileLocation): """An externally stored file that controls the workflow (e.g. a script). See :class:`FileLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects. Typically these objects are used to provide more information on how a :class:`~ihm.startmodel.StartingModel` was generated, how an individual :class:`ihm.protocol.Step` or :class:`ihm.analysis.Step` was performed, or to describe the overall modeling (by addition to :data:`ihm.System.locations`). This can be useful to capture fine details of the modeling that aren't covered by the mmCIF dictionary, and to allow models to be precisely reproduced. """ content_type = "Modeling workflow or script" class VisualizationFileLocation(FileLocation): """An externally stored file that is used for visualization. See :class:`FileLocation` for a description of the parameters and :class:`Location` for discussion of the usage of these objects. """ content_type = "Visualization script" class Repository: """A repository containing modeling files, i.e. a collection of related files at a remote, public location. This can include code repositories such as GitHub, file archival services such as Zenodo, or any other service that provides a DOI, such as the supplementary information for a publication. This can also be used if the script plus related files are part of a repository, which has been archived somewhere with a DOI. This will be used to construct permanent references to files used in this modeling, even if they haven't been uploaded to a database such as PDB or EMDB. See :meth:`ihm.System.update_locations_in_repositories`. See also :class:`FileLocation`. :param str doi: the Digital Object Identifier for the repository :param str root: the path on the local disk to the top-level directory of the repository, or `None` if files in this repository aren't checked out. :param str url: If given, a location that this repository can be downloaded from. :param str top_directory: If given, prefix all paths for files in this repository with this value. This is useful when the archived version of the repository is found in a subdirectory at the URL or DOI (for example, GitHub repositories archived at Zenodo get placed in a subdirectory named for the repository and git hash). :param str details: Additional text describing this repository """ reference_type = 'DOI' # Two repositories compare equal if their DOIs and URLs are the same def __eq__(self, other): return self.doi == other.doi and self.url == other.url def __hash__(self): return hash((self.doi, self.url)) def __str__(self): return "" % self.doi def __init__(self, doi, root=None, url=None, top_directory=None, details=None): # todo: DOI should be optional (could also use URL, local path) self.doi = doi self.url, self.top_directory = url, top_directory self.details = details if root is not None: # Store absolute path in case the working directory changes later self._root = os.path.abspath(root) reference = property(lambda self: self.doi) def __get_reference_provider(self): if self.reference and 'zenodo' in self.reference: return 'Zenodo' reference_provider = property(__get_reference_provider) def __get_refers_to(self): if self.url: return 'Archive' if self.url.endswith(".zip") else 'File' return 'Other' refers_to = property(__get_refers_to) @staticmethod def _update_in_repos(fileloc, repos): """If the given FileLocation maps to somewhere within one of the passed repositories, update it to reflect that.""" if fileloc.repo: return orig_path = fileloc.path for repo in repos: relpath = os.path.relpath(orig_path, repo._root) if not relpath.startswith('..'): # Prefer the shortest paths if multiple repositories can match if fileloc.repo is None or len(fileloc.path) > len(relpath): fileloc.repo = repo fileloc.path = relpath def _get_full_path(self, path): """Prefix the given path with our top-level directory""" return os.path.join(self.top_directory or "", path) python-ihm-2.7/ihm/metadata.py000066400000000000000000001274051503573337200164060ustar00rootroot00000000000000"""Classes to extract metadata from various input files. Often input files contain metadata that would be useful to include in the mmCIF file, but the metadata is stored in a different way for each domain-specific file type. For example, MRC files used for electron microscopy maps may contain an EMDB identifier, which the mmCIF file can point to in preference to the local file. This module provides classes for each file type to extract suitable metadata where available. """ import ihm from . import location, dataset, startmodel, util from .startmodel import SequenceIdentityDenominator import ihm.source import ihm.citations import ihm.reader import ihm.format import ihm.format_bcif import operator import struct import json import string import warnings import re import collections import urllib.request import urllib.error def _get_modeller(version, date): return ihm.Software( name='MODELLER', classification='comparative modeling', description='Comparative modeling by satisfaction ' 'of spatial restraints, build ' + date, location='https://salilab.org/modeller/', version=version, citation=ihm.citations.modeller) ModellerTemplate = collections.namedtuple( 'ModellerTemplate', ['name', 'template_begin', 'template_chain', 'template_end', 'target_begin', 'target_chain', 'target_end', 'pct_seq_id']) def _handle_modeller_template(info, template_path_map, target_dataset, alnfile): """Create a Template object from Modeller PDB header information.""" template_seq_id_range = (int(info.template_begin), int(info.template_end)) seq_id_range = (int(info.target_begin), int(info.target_end)) sequence_identity = startmodel.SequenceIdentity( float(info.pct_seq_id), SequenceIdentityDenominator.SHORTER_LENGTH) # Assume a code of 1abc, 1abc_N, 1abcX, or 1abcX_N refers # to a real PDB structure m = re.match(r'(\d[a-zA-Z0-9]{3})[a-zA-Z]?(_.*)?$', info.name) if m: template_db_code = m.group(1).upper() loc = location.PDBLocation(template_db_code) else: # Otherwise, look up the PDB file in TEMPLATE PATH remarks fname = template_path_map[info.name] loc = location.InputFileLocation( fname, details="Template for comparative modeling") d = dataset.PDBDataset(loc, details=loc.details) # Make the comparative model dataset derive from the template's target_dataset.parents.append(d) return (info.target_chain, startmodel.Template( dataset=d, asym_id=info.template_chain, seq_id_range=seq_id_range, template_seq_id_range=template_seq_id_range, sequence_identity=sequence_identity, alignment_file=alnfile)) class Parser: """Base class for all metadata parsers.""" def parse_file(self, filename): """Extract metadata from the given file. :param str filename: the file to extract metadata from. :return: a dict with extracted metadata (generally including a :class:`~ihm.dataset.Dataset`).""" pass class MRCParser(Parser): """Extract metadata from an EM density map (MRC file).""" def parse_file(self, filename): """Extract metadata. See :meth:`Parser.parse_file` for details. :return: a dict with key `dataset` pointing to the density map, as an EMDB entry if the file contains EMDB headers, otherwise to the file itself. If the file turns out to be an EMDB entry, this will also query the EMDB web API (if available) to extract version information and details for the dataset. """ emdb = self._get_emdb(filename) if emdb: loc = _ParsedEMDBLocation(emdb) else: loc = location.InputFileLocation( filename, details="Electron microscopy density map") return {'dataset': dataset.EMDensityDataset(loc)} def _get_emdb(self, filename): """Return the EMDB id of the file, or None.""" r = re.compile(b'EMDATABANK\\.org.*(EMD\\-\\d+)') with open(filename, 'rb') as fh: fh.seek(220) # Offset of number of labels num_labels_raw = fh.read(4) # Number of labels in MRC is usually a very small number, so it's # very likely to be the smaller of the big-endian and little-endian # interpretations of this field num_labels_big, = struct.unpack_from('>i', num_labels_raw) num_labels_little, = struct.unpack_from(' 62 and first_line[62] in string.digits): self._parse_official_pdb(fh, first_line, ret) elif first_line.startswith('EXPDTA DERIVED FROM PDB:'): self._parse_derived_from_pdb(fh, first_line, local_file, ret) elif first_line.startswith('EXPDTA DERIVED FROM COMPARATIVE ' 'MODEL, DOI:'): self._parse_derived_from_comp_model(fh, first_line, local_file, ret) elif first_line.startswith('EXPDTA DERIVED FROM INTEGRATIVE ' 'MODEL, DOI:'): self._parse_derived_from_int_model(fh, first_line, local_file, ret) elif first_line.startswith( 'EXPDTA THEORETICAL MODEL, MODELLER'): self._parse_modeller_model(fh, first_line, local_file, filename, ret) elif first_line.startswith('REMARK 99 Chain ID :'): self._parse_phyre_model(fh, first_line, local_file, filename, ret) elif first_line.startswith('TITLE SWISS-MODEL SERVER'): self._parse_swiss_model(fh, first_line, local_file, filename, ret) else: self._parse_unknown_model(fh, first_line, local_file, filename, ret) return ret def _parse_official_pdb(self, fh, first_line, ret): """Handle a file that's from the official PDB database.""" version, details, metadata, entity_source \ = self._parse_pdb_records(fh, first_line) loc = location.PDBLocation(first_line[62:66].strip(), version, details) ret['entity_source'] = entity_source ret['metadata'] = metadata ret['dataset'] = dataset.PDBDataset(loc, details=loc.details) def _parse_derived_from_pdb(self, fh, first_line, local_file, ret): # Model derived from a PDB structure; treat as a local experimental # model with the official PDB as a parent local_file.details = self._parse_details(fh) db_code = first_line[27:].strip() d = dataset.PDBDataset(local_file, details=local_file.details) d.parents.append(dataset.PDBDataset(location.PDBLocation(db_code))) ret['dataset'] = d def _parse_derived_from_comp_model(self, fh, first_line, local_file, ret): """Model derived from a comparative model; link back to the original model as a parent""" self._parse_derived_from_model( fh, first_line, local_file, ret, dataset.ComparativeModelDataset, 'comparative') def _parse_derived_from_int_model(self, fh, first_line, local_file, ret): """Model derived from an integrative model; link back to the original model as a parent""" self._parse_derived_from_model( fh, first_line, local_file, ret, dataset.IntegrativeModelDataset, 'integrative') def _parse_derived_from_model(self, fh, first_line, local_file, ret, dataset_class, model_type): local_file.details = self._parse_details(fh) d = dataset_class(local_file) repo = location.Repository(doi=first_line[46:].strip()) # todo: better specify an unknown path orig_loc = location.InputFileLocation( repo=repo, path='.', details="Starting %s model structure" % model_type) d.parents.append(dataset_class(orig_loc)) ret['dataset'] = d def _parse_modeller_model(self, fh, first_line, local_file, filename, ret): version, date = first_line[38:].rstrip('\r\n').split(' ', 1) s = _get_modeller(version, date) ret['software'].append(s) self._handle_comparative_model(local_file, filename, ret) def _parse_phyre_model(self, fh, first_line, local_file, filename, ret): # Model generated by Phyre2 s = ihm.Software( name='Phyre2', classification='protein homology modeling', description='Protein Homology/analogY Recognition ' 'Engine V 2.0', version='2.0', location='http://www.sbg.bio.ic.ac.uk/~phyre2/', citation=ihm.citations.phyre2) ret['software'].append(s) self._handle_comparative_model(local_file, filename, ret) def _parse_swiss_model(self, fh, first_line, local_file, filename, ret): # Model generated by SWISS-MODEL meta = _get_swiss_model_metadata(filename) s = ihm.Software( name='SWISS-MODEL', classification='protein homology modeling', description='SWISS-MODEL: homology modelling of protein ' 'structures and complexes, using %s engine' % meta.get('info', {}).get('ENGIN', 'unknown'), version=meta.get('info', {}).get('VERSN', ihm.unknown), location='https://swissmodel.expasy.org/', citation=ihm.citations.swiss_model) ret['software'].append(s) comp_model_ds = dataset.ComparativeModelDataset(local_file) ret['dataset'] = comp_model_ds ret['templates'] = self._add_swiss_model_templates( local_file, meta, comp_model_ds, ret) def _add_swiss_model_templates(self, local_file, meta, comp_model_ds, ret): """Add template information extracted from SWISS-MODEL PDB metadata""" ret_templates = {} templates = [v for k, v in sorted(((k, v) for k, v in meta.items() if k.startswith('TEMPLATE')), key=operator.itemgetter(0))] for t in templates: loc = location.PDBLocation(t['PDBID']) d = dataset.PDBDataset(loc) # Make the comparative model dataset derive from the template's comp_model_ds.parents.append(d) for chain in t['MMCIF']: # todo: check we're using the right chain ID and that target # and template chain IDs really are always the same offset = int(t[chain, 'OFF']) tgt_seq, tgt_len = _parse_seq(t[chain, 'TRG']) tmpl_seq, tmpl_len = _parse_seq(t[chain, 'TPL']) tgt_rng, tmpl_rng = _get_aligned_region(tgt_seq, tmpl_seq) # apply offset tmpl_rng = (tmpl_rng[0] + offset, tmpl_rng[1] + offset) seq_id = float(t['SID']) seq_id = startmodel.SequenceIdentity( float(t['SID']), SequenceIdentityDenominator.NUM_ALIGNED_WITHOUT_GAPS) tmpl = startmodel.Template( dataset=d, asym_id=chain, seq_id_range=tgt_rng, template_seq_id_range=tmpl_rng, sequence_identity=seq_id, alignment_file=local_file) ret_templates[chain] = [tmpl] return ret_templates def _parse_unknown_model(self, fh, first_line, local_file, filename, ret): # todo: revisit assumption that all unknown source PDBs are # comparative models self._handle_comparative_model(local_file, filename, ret) def _handle_comparative_model(self, local_file, pdbname, ret): d = dataset.ComparativeModelDataset(local_file) ret['dataset'] = d ret['templates'], ret['script'] \ = self._get_templates_script(pdbname, d) def _get_templates_script(self, pdbname, target_dataset): template_path_map = {} alnfile = None script = None alnfilere = re.compile(r'REMARK 6 ALIGNMENT: (\S+)') scriptre = re.compile(r'REMARK 6 SCRIPT: (\S+)') tmppathre = re.compile(r'REMARK 6 TEMPLATE PATH (\S+) (\S+)') tmpre = re.compile(r'REMARK 6 TEMPLATE: ' r'(\S+) (\S+):(\S+) \- (\S+):\S+ ' r'MODELS (\S+):(\S+) \- (\S+):\S+ AT (\S+)%') template_info = [] with open(pdbname) as fh: for line in fh: if line.startswith('ATOM'): # Read only the header break m = tmppathre.match(line) if m: template_path_map[m.group(1)] = \ util._get_relative_path(pdbname, m.group(2)) m = alnfilere.match(line) if m: # Path to alignment is relative to that of the PDB file fname = util._get_relative_path(pdbname, m.group(1)) alnfile = location.InputFileLocation( fname, details="Alignment for starting comparative model") m = scriptre.match(line) if m: # Path to script is relative to that of the PDB file fname = util._get_relative_path(pdbname, m.group(1)) script = location.WorkflowFileLocation( fname, details="Script for starting comparative model") m = tmpre.match(line) if m: t = ModellerTemplate( name=m.group(1), template_begin=m.group(2), template_chain=m.group(3), template_end=m.group(4), target_begin=m.group(5), target_chain=m.group(6), target_end=m.group(7), pct_seq_id=m.group(8)) template_info.append(t) templates = {} for t in template_info: chain, template = _handle_modeller_template( t, template_path_map, target_dataset, alnfile) if chain not in templates: templates[chain] = [] templates[chain].append(template) # Sort templates by starting residue, then ending residue for chain in templates.keys(): templates[chain] = sorted(templates[chain], key=operator.attrgetter('seq_id_range')) return templates, script def _parse_pdb_records(self, fh, first_line): """Extract information from an official PDB""" metadata = [] details = '' compnd = '' source = '' for line in fh: if line.startswith('TITLE'): details += line[10:].rstrip() elif line.startswith('COMPND'): compnd += line[10:].rstrip() elif line.startswith('SOURCE'): source += line[10:].rstrip() elif line.startswith('HELIX'): metadata.append(startmodel.PDBHelix(line)) return (first_line[50:59].strip(), details if details else None, metadata, self._make_entity_source(compnd, source)) def _make_one_entity_source(self, compnd, source): """Make a single ihm.source.Source object""" def make_from_source(cls): return cls(scientific_name=source.get('ORGANISM_SCIENTIFIC'), common_name=source.get('ORGANISM_COMMON'), strain=source.get('STRAIN'), ncbi_taxonomy_id=source.get('ORGANISM_TAXID')) if compnd.get('ENGINEERED', None) == 'YES': gene = make_from_source(ihm.source.Details) host = ihm.source.Details( scientific_name=source.get('EXPRESSION_SYSTEM'), common_name=source.get('EXPRESSION_SYSTEM_COMMON'), strain=source.get('EXPRESSION_SYSTEM_STRAIN'), ncbi_taxonomy_id=source.get('EXPRESSION_SYSTEM_TAXID')) return ihm.source.Manipulated(gene=gene, host=host) else: if source.get('SYNTHETIC', None) == 'YES': cls = ihm.source.Synthetic else: cls = ihm.source.Natural return make_from_source(cls) def _make_entity_source(self, compnd, source): """Make ihm.source.Source objects given PDB COMPND and SOURCE lines""" entity_source = {} # Convert each string into dict of mol_id vs keys compnd = self._parse_pdb_mol_id(compnd) source = self._parse_pdb_mol_id(source) for mol_id, c in compnd.items(): if mol_id in source and 'CHAIN' in c: s = self._make_one_entity_source(c, source[mol_id]) for chain in c['CHAIN'].split(','): entity_source[chain.strip()] = s return entity_source def _parse_pdb_mol_id(self, txt): """Convert text COMPND or SOURCE records to a dict of mol_id vs keys""" d = {} mol_id = None for pair in txt.split(';'): spl = pair.split(':') if len(spl) == 2: key = spl[0].upper().strip() val = spl[1].upper().strip() if key == 'MOL_ID': mol_id = d[val] = {} elif mol_id is not None: mol_id[key] = val return d def _parse_details(self, fh): """Extract TITLE records from a PDB file""" details = '' for line in fh: if line.startswith('TITLE'): details += line[10:].rstrip() elif line.startswith('ATOM'): break return details class _Database2Handler(ihm.reader.Handler): def __init__(self, m): self.m = m def __call__(self, database_id, database_code): self.m['db'][database_id.upper()] = database_code class _StructHandler(ihm.reader.Handler): def __init__(self, m): self.m = m def __call__(self, title): self.m['title'] = title class _AuditRevHistHandler(ihm.reader.Handler): def __init__(self, m): self.m = m def __call__(self, revision_date): self.m['version'] = revision_date class _ExptlHandler(ihm.reader.Handler): def __init__(self, m): self.m = m def __call__(self, method): # Modeller currently sets _exptl.method, not _software if method.startswith('model, MODELLER Version '): version, date = method[24:].split(' ', 1) s = _get_modeller(version, date) self.m['software'].append(s) class _ModellerHandler(ihm.reader.Handler): """Handle the Modeller-specific _modeller category""" def __init__(self, m, filename): self.m = m self.filename = filename self.m['alnfile'] = self.m['script'] = None def __call__(self, alignment, script): if alignment: # Paths are relative to that of the mmCIF file fname = util._get_relative_path(self.filename, alignment) self.m['alnfile'] = location.InputFileLocation( fname, details="Alignment for starting comparative model") if script: fname = util._get_relative_path(self.filename, script) self.m['script'] = location.WorkflowFileLocation( fname, details="Script for starting comparative model") class _ModellerTemplateHandler(ihm.reader.Handler): """Handle the Modeller-specific _modeller_template category""" def __init__(self, m): self.m = m self.m['modeller_templates'] = [] def __call__(self, name, template_begin, template_end, target_begin, target_end, pct_seq_id): tmp_begin, tmp_chain = template_begin.split(':', 1) tmp_end, tmp_chain = template_end.split(':', 1) tgt_begin, tgt_chain = target_begin.split(':', 1) tgt_end, tgt_chain = target_end.split(':', 1) t = ModellerTemplate(name=name, template_begin=tmp_begin, template_end=tmp_end, template_chain=tmp_chain, target_begin=tgt_begin, target_end=tgt_end, target_chain=tgt_chain, pct_seq_id=pct_seq_id) self.m['modeller_templates'].append(t) class _ModelCifAlignment: """Store alignment information from a ModelCIF file""" def __init__(self): self.target = self.template = self.seq_id = None def get_template_object(self, target_dataset): """Convert the alignment information into an IHM Template object""" return self.template.template.get_template_object(target_dataset, aln=self) class _TemplateRange: """Store information about a template residue range from a ModelCIF file""" def __init__(self): self.seq_id_range = None self.template = None class _TargetRange: """Store information about a target residue range from a ModelCIF file""" def __init__(self): self.seq_id_range = None self.asym_id = None class _Template: """Store template information from a ModelCIF file""" # Map ModelCIF ma_template_ref_db_details.db_name to IHMCIF equivalents _modelcif_dbmap = {'PDB': (dataset.PDBDataset, location.PDBLocation), 'PDB-DEV': (dataset.IntegrativeModelDataset, location.PDBDevLocation), 'MA': (dataset.DeNovoModelDataset, location.ModelArchiveLocation), 'ALPHAFOLDDB': (dataset.DeNovoModelDataset, location.AlphaFoldDBLocation)} def __init__(self): self.auth_asym_id = self.db_name = self.db_accession_code = None self.db_version_date = self.target_asym_id = None def get_template_object(self, target_dataset, aln=None): """Convert the template information into an IHM Template object""" dsetcls, loccls = self._modelcif_dbmap.get( self.db_name.upper(), (dataset.Dataset, location.DatabaseLocation)) loc = loccls(db_code=self.db_accession_code, version=self.db_version_date) d = dsetcls(location=loc) # Make the computed model dataset derive from the template's target_dataset.parents.append(d) t = startmodel.Template( dataset=d, asym_id=self.auth_asym_id, seq_id_range=aln.target.seq_id_range if aln else (None, None), template_seq_id_range=aln.template.seq_id_range if aln else (None, None), sequence_identity=aln.seq_id if aln else None) return aln.target.asym_id if aln else self.target_asym_id, t class _SystemReader: """A minimal implementation, so we can use some of the Handlers in ihm.reader but get outputs in the results dict.""" def __init__(self, m): self.software = ihm.reader.IDMapper(m['software'], ihm.Software, *(None,) * 4) self.citations = ihm.reader.IDMapper(None, ihm.Citation, *(None,) * 8) self.alignments = ihm.reader.IDMapper(m['alignments'], _ModelCifAlignment) self.template_ranges = ihm.reader.IDMapper(None, _TemplateRange) self.target_ranges = ihm.reader.IDMapper(None, _TargetRange) self.templates = ihm.reader.IDMapper(m['templates'], _Template) self.entities = ihm.reader.IDMapper(None, ihm.Entity, []) self.asym_units = ihm.reader.IDMapper(m['asyms'], ihm.AsymUnit, None) self.src_gens = ihm.reader.IDMapper(None, ihm.source.Manipulated) self.src_nats = ihm.reader.IDMapper(None, ihm.source.Natural) self.src_syns = ihm.reader.IDMapper(None, ihm.source.Synthetic) class _TemplateDetailsHandler(ihm.reader.Handler): """Extract template information from a ModelCIF file""" def __init__(self, sysr): self.sysr = sysr def __call__(self, template_id, target_asym_id, template_auth_asym_id): template = self.sysr.templates.get_by_id(template_id) template.auth_asym_id = template_auth_asym_id template.target_asym_id = target_asym_id class _TemplateRefDBDetailsHandler(ihm.reader.Handler): """Extract template database information from a ModelCIF file""" def __init__(self, sysr): self.sysr = sysr def __call__(self, template_id, db_name, db_accession_code, db_version_date): template = self.sysr.templates.get_by_id(template_id) template.db_name = db_name template.db_accession_code = db_accession_code template.db_version_date = db_version_date class _TemplatePolySegmentHandler(ihm.reader.Handler): """Extract template residue range information from a ModelCIF file""" def __init__(self, sysr): self.sysr = sysr def __call__(self, id, template_id, residue_number_begin, residue_number_end): tr = self.sysr.template_ranges.get_by_id(id) tr.seq_id_range = (self.get_int(residue_number_begin), self.get_int(residue_number_end)) tr.template = self.sysr.templates.get_by_id(template_id) class _TemplatePolyMappingHandler(ihm.reader.Handler): """Extract target residue range information from a ModelCIF file""" def __init__(self, sysr): self.sysr = sysr def __call__(self, id, template_segment_id, target_asym_id, target_seq_id_begin, target_seq_id_end): m = self.sysr.target_ranges.get_by_id((template_segment_id, target_asym_id)) m.seq_id_range = (self.get_int(target_seq_id_begin), self.get_int(target_seq_id_end)) class _SeqIDMapper: """Map ModelCIF sequence identity to IHMCIF equivalent""" identity_map = { "length of the shorter sequence": SequenceIdentityDenominator.SHORTER_LENGTH, "number of aligned positions (including gaps)": SequenceIdentityDenominator.NUM_ALIGNED_WITH_GAPS} def __call__(self, pct_id, denom): denom = self.identity_map.get( denom.lower() if denom else None, SequenceIdentityDenominator.OTHER) return startmodel.SequenceIdentity( value=pct_id, denominator=denom) class _AlignmentDetailsHandler(ihm.reader.Handler): """Read pairwise alignments (ma_alignment_details table)""" def __init__(self, sysr): self.sysr = sysr self.seq_id_mapper = _SeqIDMapper() def __call__(self, alignment_id, template_segment_id, target_asym_id, percent_sequence_identity, sequence_identity_denominator): aln = self.sysr.alignments.get_by_id(alignment_id) aln.seq_id = self.seq_id_mapper( self.get_float(percent_sequence_identity), sequence_identity_denominator) tgt_rng = self.sysr.target_ranges.get_by_id((template_segment_id, target_asym_id)) tmpl_rng = self.sysr.template_ranges.get_by_id(template_segment_id) aln.target = tgt_rng aln.target.asym_id = target_asym_id aln.template = tmpl_rng class _ModBaseLocation(location.DatabaseLocation): """A model deposited in ModBase""" def __init__(self, db_code, version=None, details=None): # Use details to describe ModBase, ignoring the file title super().__init__( db_code, version=version, details="ModBase database of comparative protein structure models") class _CIFParserBase(Parser): # Map PDBx database_2.database_name to IHMCIF equivalents dbmap = {'PDB': (location.PDBLocation, dataset.PDBDataset), 'PDB-DEV': (location.PDBDevLocation, dataset.IntegrativeModelDataset), 'MODELARCHIVE': (location.ModelArchiveLocation, dataset.DeNovoModelDataset), 'ALPHAFOLDDB': (location.AlphaFoldDBLocation, dataset.DeNovoModelDataset), 'MODBASE': (_ModBaseLocation, dataset.ComparativeModelDataset)} def parse_file(self, filename): m = {'db': {}, 'title': 'Starting model structure', 'software': [], 'templates': [], 'alignments': [], 'asyms': []} with self._open_file(filename) as fh: dbh = _Database2Handler(m) structh = _StructHandler(m) arevhisth = _AuditRevHistHandler(m) exptlh = _ExptlHandler(m) modellerh = _ModellerHandler(m, filename) modtmplh = _ModellerTemplateHandler(m) sysr = _SystemReader(m) r = self._reader_class( fh, {'_database_2': dbh, '_struct': structh, '_pdbx_audit_revision_history': arevhisth, '_exptl': exptlh, '_modeller': modellerh, '_modeller_template': modtmplh, '_software': ihm.reader._SoftwareHandler(sysr), '_citation': ihm.reader._CitationHandler(sysr), '_struct_asym': ihm.reader._StructAsymHandler(sysr), '_entity': ihm.reader._EntityHandler(sysr), '_entity_src_nat': ihm.reader._EntitySrcNatHandler(sysr), '_pdbx_entity_src_syn': ihm.reader._EntitySrcSynHandler(sysr), '_entity_src_gen': ihm.reader._EntitySrcGenHandler(sysr), '_citation_author': ihm.reader._CitationAuthorHandler(sysr), '_ma_template_details': _TemplateDetailsHandler(sysr), '_ma_template_ref_db_details': _TemplateRefDBDetailsHandler(sysr), '_ma_template_poly_segment': _TemplatePolySegmentHandler(sysr), '_ma_target_template_poly_mapping': _TemplatePolyMappingHandler(sysr), '_ma_alignment_details': _AlignmentDetailsHandler(sysr)}) r.read_file() dset = self._get_dataset(filename, m) return {'dataset': dset, 'software': m['software'], 'templates': self._get_templates(filename, m, dset), 'entity_source': {asym.id: asym.entity.source for asym in m['asyms']}, 'script': m['script']} def _get_dataset(self, filename, m): # Check for known databases. Note that if a file is in multiple # databases, we currently return one "at random" for dbid, dbcode in m['db'].items(): if dbid in self.dbmap: loccls, dsetcls = self.dbmap[dbid] loc = loccls(db_code=dbcode, version=m.get('version'), details=m['title']) return dsetcls(location=loc, details=loc.details) # Fall back to a local file loc = location.InputFileLocation(filename, details=m['title']) return dataset.ComparativeModelDataset( location=loc, details=loc.details) def _get_templates(self, filename, m, dset): alnfile = m['alnfile'] template_path_map = {} templates = {} def _handle_templates(): # Use Modeller-provided templates if available if m['modeller_templates']: for t in m['modeller_templates']: yield _handle_modeller_template( t, template_path_map, dset, alnfile) # Otherwise, use ModelCIF templates else: seen_templates = set() for aln in m['alignments']: seen_templates.add(aln.template.template) yield aln.get_template_object(dset) # Handle any unaligned templates (e.g. AlphaFold) for t in m['templates']: if t not in seen_templates: yield t.get_template_object(dset) for chain, template in _handle_templates(): if chain not in templates: templates[chain] = [] templates[chain].append(template) # Sort templates by starting residue, then ending residue for chain in templates.keys(): templates[chain] = sorted(templates[chain], key=operator.attrgetter('seq_id_range')) return templates class CIFParser(_CIFParserBase): """Extract metadata (e.g. PDB ID, comparative modeling templates) from an mmCIF file. This currently handles mmCIF files from the PDB database itself, models compliant with the ModelCIF dictionary, plus files from Model Archive or the outputs from the MODELLER comparative modeling package. See also :class:`PDBParser` for coordinate files in legacy PDB format, or :class:`BinaryCIFParser` for BinaryCIF format. """ _reader_class = ihm.format.CifReader def _open_file(self, filename): return open(filename) def parse_file(self, filename): """Extract metadata. See :meth:`Parser.parse_file` for details. :param str filename: the file to extract metadata from. :return: a dict with key `dataset` pointing to the coordinate file, as an entry in the PDB or Model Archive databases if the file contains appropriate headers, otherwise to the file itself; 'templates' pointing to a dict with keys the asym (chain) IDs in the PDB file and values the list of comparative model templates used to model that chain as :class:`ihm.startmodel.Template` objects; 'entity_source' pointing to a dict with keys the asym IDs and values :class:`ihm.source.Source` objects; 'software' pointing to a list of software used to generate the file (as :class:`ihm.Software` objects); 'script' pointing to the script used to generate the file, if any (as :class:`ihm.location.WorkflowFileLocation` objects). """ return super().parse_file(filename) class BinaryCIFParser(_CIFParserBase): """Extract metadata from a BinaryCIF file. This works in a very similar fashion to :class:`CIFParser`; see that class for more information. """ _reader_class = ihm.format_bcif.BinaryCifReader def _open_file(self, filename): return open(filename, 'rb') python-ihm-2.7/ihm/model.py000066400000000000000000000534111503573337200157210ustar00rootroot00000000000000"""Classes for handling models (sets of coordinates) as well as groups of models. """ import struct import itertools from ihm.util import _text_choice_property, _check_residue_range class Sphere: """Coordinates of part of the model represented by a sphere. See :meth:`Model.get_spheres` for more details. :param asym_unit: The asymmetric unit that this sphere represents :type asym_unit: :class:`ihm.AsymUnit` :param tuple seq_id_range: The range of residues represented by this sphere (as a two-element tuple) :param float x: x coordinate of the center of the sphere :param float y: y coordinate of the center of the sphere :param float z: z coordinate of the center of the sphere :param float radius: radius of the sphere :param float rmsf: root-mean-square fluctuation of the coordinates """ # Reduce memory usage __slots__ = ['asym_unit', 'seq_id_range', 'x', 'y', 'z', 'radius', 'rmsf'] def __init__(self, asym_unit, seq_id_range, x, y, z, radius, rmsf=None): self.asym_unit = asym_unit self.seq_id_range = seq_id_range self.x, self.y, self.z = x, y, z self.radius, self.rmsf = radius, rmsf class Atom: """Coordinates of part of the model represented by an atom. See :meth:`Model.get_atoms` for more details. Note that this class is used only to represent the coordinates of an atom. To access atom-specific properties of the model, see the :class:`ihm.Atom` class. :param asym_unit: The asymmetric unit that this atom represents :type asym_unit: :class:`ihm.AsymUnit` :param int seq_id: The sequence ID of the residue represented by this atom. This should generally be a number starting at 1 for any polymer chain, water, or oligosaccharide. For ligands, a seq_id is not needed (as a given asym can only contain a single ligand), so either 1 or None can be used. :param str atom_id: The name of the atom in the residue :param str type_symbol: Element name :param float x: x coordinate of the atom :param float y: y coordinate of the atom :param float z: z coordinate of the atom :param bool het: True for HETATM sites, False (default) for ATOM :param float biso: Temperature factor or equivalent (if applicable) :param float occupancy: Fraction of the atom type present (if applicable) :param float alt_id: Alternate conformation indicator (if applicable) """ # Reduce memory usage __slots__ = ['asym_unit', 'seq_id', 'atom_id', 'type_symbol', 'x', 'y', 'z', 'het', 'biso', 'occupancy', 'alt_id'] def __init__(self, asym_unit, seq_id, atom_id, type_symbol, x, y, z, het=False, biso=None, occupancy=None, alt_id=None): self.asym_unit = asym_unit self.seq_id, self.atom_id = seq_id, atom_id self.type_symbol = type_symbol self.x, self.y, self.z = x, y, z self.het, self.biso = het, biso self.occupancy = occupancy self.alt_id = alt_id class Model: """A single set of coordinates (conformation). Models are added to the system by placing them inside :class:`ModelGroup` objects, which in turn are placed inside :class:`State` objects, which are grouped in :class:`StateGroup` objects, which are finally added to the system via :attr:`ihm.System.state_groups`. :param assembly: The parts of the system that were modeled. :type assembly: :class:`~ihm.Assembly` :param protocol: Description of how the modeling was done. :type protocol: :class:`~ihm.protocol.Protocol` :param representation: Level of detail at which the system was represented. :type representation: :class:`~ihm.representation.Representation` :param str name: Descriptive name for this model. """ def __init__(self, assembly, protocol, representation, name=None): # Note that a similar Model class is used in python-modelcif but it # is not a subclass. So be careful when modifying this class to not # break the API (e.g. by adding new members). self.assembly, self.protocol = assembly, protocol self.representation, self.name = representation, name self._atoms = [] self._spheres = [] #: List of residue ranges that were explicitly not modeled. See #: :class:`NotModeledResidueRange`. self.not_modeled_residue_ranges = [] def get_spheres(self): """Yield :class:`Sphere` objects that represent this model. The default implementation simply iterates over an internal list of spheres, but this is not very memory-efficient, particularly if the spheres are already stored somewhere else, e.g. in the software's own data structures. It is recommended to subclass and provide a more efficient implementation. For example, the `modeling of Nup133 `_ uses a `custom subclass `_ to pass `BioPython `_ objects through to python-ihm. Note that the set of spheres should match the model's :class:`~ihm.representation.Representation`. This is not currently enforced. """ # noqa: E501 for s in self._spheres: yield s def add_sphere(self, sphere): """Add to the model's set of :class:`Sphere` objects. See :meth:`get_spheres` for more details. """ self._spheres.append(sphere) def get_atoms(self): """Yield :class:`Atom` objects that represent this model. See :meth:`get_spheres` for more details. """ for a in self._atoms: yield a def add_atom(self, atom): """Add to the model's set of :class:`Atom` objects. See :meth:`get_spheres` for more details. Note that for branched entities, the `seq_id` of the new atom is provisional. It should be mapped to the correct ID once the input file is completely read, using :attr:`ihm.AsymUnit.num_map`. This is done automatically by ihm.reader when using the default implementation. """ self._atoms.append(atom) class ModelRepresentative: """A single model that represents all models in a :class:`ModelGroup`. See :attr:`ModelGroup.representatives`. :param model: The actual representative Model. :type model: :class:`Model` :param str selection_criteria: How the representative was chosen """ def __init__(self, model, selection_criteria): self.model, self.selection_criteria = model, selection_criteria selection_criteria = _text_choice_property( "selection_criteria", ["medoid", "closest to the average", "lowest energy", "target function", "fewest violations", "minimized average structure", "best scoring model", "centroid", "other selction criteria"], doc="How the representative was chosen") class ModelGroup(list): """A set of related models. See :class:`Model`. It is implemented as a simple list of the models. These objects are typically stored in a :class:`State`, :class:`Ensemble`, or :class:`OrderedProcess`. :param elements: Initial set of models in the group. :param str name: Descriptive name for the group. :param str details: Additional text describing this group. """ def __init__(self, elements=(), name=None, details=None): self.name = name self.details = details super().__init__(elements) #: Any representative structural model(s). #: See :class:`ModelRepresentative`. self.representatives = [] # Kind of ugly but needed so we can use ModelGroup as keys for # the ihm.restraint.CrossLink.fits dict def __hash__(self): return hash(tuple(self)) class State(list): """A set of model groups that constitute a single state of the system. It is implemented as a simple list of the model groups. See :class:`StateGroup`. :param elements: The initial set of :class:`ModelGroup` objects in this state. """ def __init__(self, elements=(), type=None, name=None, details=None, experiment_type=None, population_fraction=None): self.type, self.name, self.details = type, name, details self.experiment_type = experiment_type self.population_fraction = population_fraction super().__init__(elements) class StateGroup(list): """A set of related states. See :class:`State` and :attr:`ihm.System.state_groups`. It is implemented as a simple list of the states. :param elements: Initial set of states in the group. """ def __init__(self, elements=()): super().__init__(elements) class Ensemble: """Details about a model cluster or ensemble. See :attr:`ihm.System.ensembles`. :param model_group: The set of models in this ensemble. :type model_group: :class:`ModelGroup` :param int num_models: The total number of models in this ensemble. This may be more than the number of models in `model_group`, for example if only representative or top-scoring models are deposited. :param post_process: The final analysis step that generated this ensemble. :type post_process: :class:`ihm.analysis.Step` :param str clustering_method: The method used to obtain the ensemble, if applicable. :param str clustering_feature: The feature used for clustering the models, if applicable. :param str name: A descriptive name for this ensemble. :param float precision: The precision of the entire ensemble. :param file: A reference to an external file containing coordinates for the entire ensemble, for example as a DCD file (see :class:`DCDWriter`). See also :attr:`subsamples`. :type file: :class:`ihm.location.OutputFileLocation` :param str details: Additional text describing this ensemble :param bool superimposed: True if the models in the group are structurally aligned. """ _num_deposited = None def __init__(self, model_group, num_models, post_process=None, clustering_method=None, clustering_feature=None, name=None, precision=None, file=None, details=None, superimposed=None): self.model_group, self.num_models = model_group, num_models self.post_process = post_process self.clustering_method = clustering_method self.clustering_feature = clustering_feature self.name, self.precision, self.file = name, precision, file self.details = details self.superimposed = superimposed #: All localization densities for this ensemble, as #: :class:`LocalizationDensity` objects self.densities = [] #: All subsamples that make up this ensemble (if applicable), #: as :class:`Subsample` objects self.subsamples = [] def _get_num_deposited(self): # Generally we require an associated model_group; however, it is not # required by the dictionary and so input files may not have one, # but use any provided value of num_model_deposited in this case. if self.model_group is None: return self._num_deposited else: return len(self.model_group) num_models_deposited = property(_get_num_deposited, doc="Number of models in this ensemble " "that are in the mmCIF file") clustering_method = _text_choice_property( "clustering_method", ["Hierarchical", "Other", "Partitioning (k-means)", "Density based threshold-clustering"], doc="The clustering method used to obtain the ensemble, if applicable") clustering_feature = _text_choice_property( "clustering_feature", ["RMSD", "dRMSD", "other"], doc="The feature used for clustering the models, if applicable") class NotModeledResidueRange: """A range of residues that were explicitly not modeled. See :attr:`Model.not_modeled_residue_ranges`. :param asym_unit: The asymmetric unit to which the residues belong. :type asym_unit: :class:`~ihm.AsymUnit` :param int seq_id_begin: Starting residue in the range. :param int seq_id_end: Ending residue in the range. :param str reason: Optional text describing why the residues were not modeled. """ def __init__(self, asym_unit, seq_id_begin, seq_id_end, reason=None): self.asym_unit = asym_unit self.seq_id_begin, self.seq_id_end = seq_id_begin, seq_id_end self.reason = reason _check_residue_range((seq_id_begin, seq_id_end), asym_unit.entity) reason = _text_choice_property( "reason", ["Highly variable models with poor precision", "Models do not adequately satisfy input data", "Other"], doc="Reason why the residues were not modeled.") class OrderedProcess: """Details about a process that orders two or more model groups. A process is represented as a directed graph, where the nodes are :class:`ModelGroup` objects and the edges represent transitions. These objects are generally added to :attr:`ihm.System.ordered_processes`. :param str ordered_by: Text that explains how the ordering is done, such as "time steps". :param str description: Text that describes this process. """ def __init__(self, ordered_by, description=None): self.ordered_by, self.description = ordered_by, description #: All steps in this process, as a simple list of #: :class:`ProcessStep` objects self.steps = [] class ProcessStep(list): """A single step in an :class:`OrderedProcess`. This is implemented as a simple list of :class:`ProcessEdge` objects, each of which orders two :class:`ModelGroup` objects. (To order more than two groups, for example to represent a branched reaction step that generates two products, simply add multiple edges to the step.) :param sequence elements: Initial set of :class:`ProcessEdge` objects. :param str description: Text that describes this step. """ def __init__(self, elements=(), description=None): self.description = description super().__init__(elements) class ProcessEdge: """A single directed edge in the graph for a :class:`OrderedProcess`, representing the transition from one :class:`ModelGroup` to another. These objects are added to :class:`ProcessStep` objects. :param group_begin: The set of models at the origin of the edge. :type group_begin: :class:`ModelGroup` :param group_end: The set of models at the end of the edge. :type group_end: :class:`ModelGroup` :param str description: Text that describes this edge. """ def __init__(self, group_begin, group_end, description=None): self.group_begin, self.group_end = group_begin, group_end self.description = description class LocalizationDensity: """Localization density of part of the system, over all models in an ensemble. See :attr:`Ensemble.densities`. :param file: A reference to an external file containing the density, for example as an MRC file. :type file: :class:`ihm.location.OutputFileLocation` :param asym_unit: The asymmetric unit (or part of one) that this density represents. :type asym_unit: :class:`~ihm.AsymUnit` or :class:`~ihm.AsymUnitRange` """ def __init__(self, file, asym_unit): self.file, self.asym_unit = file, asym_unit class Subsample: """Base class for a subsample within an ensemble. In some cases the models that make up an :class:`Ensemble` may be partitioned into subsamples, for example to determine if the sampling was exhaustive (see `Viswanath et al. 2017 `_). This base class can be used to describe the set of models in the subsample, for example by pointing to an externally-deposited set of conformations. Usually a derived class (:class:`RandomSubsample` or :class:`IndependentSubsample`) is used instead of this class. Instances are stored in :attr:`Ensemble.subsamples`. All of the subsamples in a given ensemble must be of the same type. :param str name: A descriptive name for this sample :param int num_models: The total number of models in this sample :param model_group: The set of models in this sample, if applicable. :type model_group: :class:`ModelGroup` :param file: A reference to an external file containing coordinates for the entire sample, for example as a DCD file (see :class:`DCDWriter`). :type file: :class:`ihm.location.OutputFileLocation` """ # noqa: E501 sub_sampling_type = 'other' def __init__(self, name, num_models, model_group=None, file=None): self.name, self.num_models = name, num_models self.model_group, self.file = model_group, file num_models_deposited = property( lambda self: len(self.model_group) if self.model_group else 0, doc="Number of models in this subsample that are in the mmCIF file") class RandomSubsample(Subsample): """A subsample generated by picking a random subset of the models that make up the entire ensemble. See :class:`Subsample`. """ sub_sampling_type = 'random' class IndependentSubsample(Subsample): """A subsample generated in the same fashion as other subsamples but by an independent simulation. See :class:`Subsample`. """ sub_sampling_type = 'independent' class DCDWriter: """Utility class to write model coordinates to a binary DCD file. See :class:`Ensemble` and :class:`Model`. Since mmCIF is a text-based format, it is not efficient to store entire ensembles in this format. Instead, representative models should be deposited as mmCIF and the :class:`Ensemble` then linked to an external file containing only model coordinates. One such format is CHARMM/NAMD's DCD, which is written out by this class. The DCD files simply contain the xyz coordinates of all :class:`Atom` and :class:`Sphere` objects in each :class:`Model`. (Note that no other data is stored, such as sphere radii or restraint parameters.) :param file fh: The filelike object to write the coordinates to. This should be open in binary mode and should be a seekable object. """ def __init__(self, fh): self.fh = fh self.nframes = 0 def add_model(self, model): """Add the coordinates for the given :class:`Model` to the file as a new frame. All models in the file should have the same number of atoms and/or spheres, in the same order. :param model: Model with coordinates to write to the file. :type model: :class:`Model` """ x = [] y = [] z = [] for a in itertools.chain(model.get_atoms(), model.get_spheres()): x.append(a.x) y.append(a.y) z.append(a.z) self._write_frame(x, y, z) def _write_frame(self, x, y, z): self.nframes += 1 if self.nframes == 1: self.ncoord = len(x) remarks = [ b'Produced by python-ihm, https://github.com/ihmwg/python-ihm', b'This file is designed to be used in combination with an ' b'mmCIF file', b'See PDB-IHM at https://pdb-ihm.org/ for more details'] self._write_header(self.ncoord, remarks) else: if len(x) != self.ncoord: raise ValueError( "Frame size mismatch - frames contain %d " "coordinates but attempting to write a frame " "containing %d coordinates" % (self.ncoord, len(x))) # Update number of frames self.fh.seek(self._pos_nframes) self.fh.write(struct.pack('i', self.nframes)) self.fh.seek(0, 2) # Move back to end of file # Write coordinates frame_size = struct.pack('i', struct.calcsize("%df" % self.ncoord)) for coord in x, y, z: self.fh.write(frame_size) self.fh.write(struct.pack("%df" % self.ncoord, *coord)) self.fh.write(frame_size) def _write_header(self, natoms, remarks): self.fh.write(struct.pack('i', 84) + b'CORD') self._pos_nframes = self.fh.tell() self.fh.write(struct.pack('i', self.nframes)) self.fh.write(struct.pack('i', 0)) # istart self.fh.write(struct.pack('i', 0)) # nsavc self.fh.write(struct.pack('5i', 0, 0, 0, 0, 0)) self.fh.write(struct.pack('i', 0)) # number of fixed atoms self.fh.write(struct.pack('d', 0.)) # delta self.fh.write(struct.pack('10i', 0, 0, 0, 0, 0, 0, 0, 0, 0, 84)) remark_size = struct.calcsize('i') + 80 * len(remarks) self.fh.write(struct.pack('i', remark_size)) self.fh.write(struct.pack('i', len(remarks))) for r in remarks: self.fh.write(r.ljust(80)[:80]) self.fh.write(struct.pack('i', remark_size)) self.fh.write(struct.pack('i', struct.calcsize('i'))) self.fh.write(struct.pack('i', natoms)) # total number of atoms self.fh.write(struct.pack('i', struct.calcsize('i'))) python-ihm-2.7/ihm/multi_state_scheme.py000066400000000000000000000273161503573337200205040ustar00rootroot00000000000000# coding=utf-8 import ihm from ihm.model import _text_choice_property """Classes for handling connected/ordered schemes formed by multiple state together with information on kinetic schemes""" class MultiStateScheme: """MultiStateScheme collects information about a collection of multiple states, that can form a connected/ordered scheme. A special case is a kinetic scheme, for which kinetic rates and relaxation times are available. :param str name: The name of the multi-state scheme. :param str details: Details on the scheme. :param connectivities: A list of connectivities that belong to the scheme. :type connectivities: List of :class:`Connectivity` :param relaxation_times: A list of relaxation times not assigned to specific connectivities, but to the scheme :type relaxation_times: List of :class:`RelaxationTime` """ def __init__(self, name, details=None, connectivities=None, relaxation_times=None): self.name = name self.details = details self._connectivity_list = [] self._relaxation_time_list = [] # states is filled automatically based on connectivity_list self._states = [] if connectivities is not None: for c in connectivities: if c not in self._connectivity_list: self.add_connectivity(c) if relaxation_times is not None: for r in relaxation_times: if r not in self._relaxation_time_list: self.add_relaxation_time(r) def add_connectivity(self, connectivity): """Add a connectivity to the scheme. :param connectivity: The connectivity to add to the scheme :type connectivity: :class:`Connectivity` """ if connectivity is None: return if connectivity not in self._connectivity_list: # Make sure that the connectivity has not been assigned to # another scheme if not connectivity._assigned_to_scheme: connectivity.set_assigned_to_scheme() self._connectivity_list.append(connectivity) # If the connectivity has been assigned to another scheme, # create a copy of the connectivity and use that else: old_connectivity = connectivity connectivity = \ ihm.multi_state_scheme.Connectivity( begin_state=old_connectivity.begin_state, end_state=old_connectivity.end_state, details=old_connectivity.details, dataset_group=old_connectivity.dataset_group, kinetic_rate=old_connectivity.kinetic_rate, relaxation_time=old_connectivity.relaxation_time ) connectivity.set_assigned_to_scheme() self._connectivity_list.append(connectivity) # Add the states that belong to the connectivity self._add_state(connectivity.begin_state) self._add_state(connectivity.end_state) def _add_state(self, state): """Add a state to the self._states list if it is not present yet. This function checks whether the state has optional properties, such as a name. If this is the case, the name is compared to the names already in the list. If the state does not have a name, it might only be a list of elements. Then only the contents of the list are checked This is important for empty states, i.e. those that do not have models associated. :param state: The state to add. :type state: :class:`ihm.model.State` """ if state is None: return for tmp_state in self._states: # Check whether both states have the name attributes if hasattr(state, 'name') and hasattr(tmp_state, 'name'): # compare the properties of the two states and the elements of # the lists if state.__dict__ == tmp_state.__dict__ \ and state == tmp_state: # state found return # If neither of the two states has the name attribute, only compare # the elements of the lists if not hasattr(state, 'name') and not hasattr(tmp_state, 'name'): # If the two states have the same elements if state == tmp_state: # state found return # If the state was not found in the list yet, add it self._states.append(state) def add_relaxation_time(self, relaxation_time): """Add a relaxation time to the scheme. This relaxation time is not assigned to a connectivity. :param relaxation_time: The relaxation time to add to the scheme. :type relaxation_time: :class:`RelaxationTime` """ if relaxation_time is not None: self._relaxation_time_list.append(relaxation_time) def get_connectivities(self): """Return the connectivities assigned to a scheme""" return self._connectivity_list def get_relaxation_times(self): """Return the relaxation times assigned to a scheme""" return self._relaxation_time_list def get_states(self): """Return the states involved in a scheme""" return self._states def __eq__(self, other): return ((self.__dict__ == other.__dict__) and (self._connectivity_list == other._connectivity_list) and (self._relaxation_time_list == other._relaxation_time_list)) class Connectivity: """A connectivity between states. Used to describe the directed edge of graph. If no end_state is given, the state is not connected to another state. This could be the case for states where no connection to other states could be resolved. :param begin_state: The start state of the connectivity. :type begin_state: :class:`ihm.model.State` :param end_state: The end state of the connectivity. Can be None in case of states that are not connected to others. :type end_state: :class:`ihm.model.State` :param details: Details to the connectivity. :param dataset_group: The DatasetGroup that was used to obtain information on the connectivity. :type dataset_group: :class:`ihm.dataset.DatasetGroup` :param kinetic_rate: A kinetic rate assigned to the connectivity. :type kinetic_rate: :class:`KineticRate` :param relaxation_time: A relaxation time assigned to the connectivity. :type relaxation_time: :class:`RelaxationTime` """ def __init__(self, begin_state, end_state=None, details=None, dataset_group=None, kinetic_rate=None, relaxation_time=None): self.begin_state = begin_state self.end_state = end_state self.details = details self.dataset_group = dataset_group self.kinetic_rate = kinetic_rate self.relaxation_time = relaxation_time # The _assigned_to_scheme variable tracks whether the connectivity # has been assigned to a scheme. This is to ensure that each # connectivity is only assigned to a single scheme. self._assigned_to_scheme = False def set_assigned_to_scheme(self): self._assigned_to_scheme = True def __eq__(self, other): return self.__dict__ == other.__dict__ class KineticRate: """A base class for a kinetic rate that can be assigned to a connectivity. The kinetic rate could be a transition_rate_constant or an equilibrium_constant. Alternatively, both could be provided. :param float transition_rate_constant: A transition rate constant describing the exchange between two states. Unit: per second. :param equilibrium_constant: An equilibrium constant describing the exchange between two states :type equilibrium_constant: :class:`EquilibriumConstant` or :class:`PopulationEquilibriumConstant` or :class:`KineticRateEquilibriumConstant` :param str details: Details on the kinetic rate. :param dataset_group: The DatasetGroup used to determine the kinetic rate. :type dataset_group: :class:`ihm.dataset.DatasetGroup` :param file: External file containing measurement data for the kinetic rate. :type file: :class:`ihm.location.OutputFileLocation` """ def __init__(self, transition_rate_constant=None, equilibrium_constant=None, details=None, dataset_group=None, file=None): self.transition_rate_constant = transition_rate_constant self.equilibrium_constant = equilibrium_constant self.details = details self.dataset_group = dataset_group self.external_file = file def __eq__(self, other): return self.__dict__ == other.__dict__ class EquilibriumConstant: """Base class for an equilibrium constant. This class handles the case that none of the derived classes is applicable. :param float value: The value of the equilibrium constant :param str unit: Unit of the equilibrium constant. Depending on what the process describes, a unit might be applicable or not""" def __init__(self, value, unit=None): self.method = 'equilibrium constant is determined from another ' \ 'method not listed' self.value = value self.unit = unit def __eq__(self, other): if other is None: return False return self.__dict__ == other.__dict__ class PopulationEquilibriumConstant(EquilibriumConstant): """An equilibrium constant determined from population""" def __init__(self, value, unit=None): super().__init__(value, unit) self.method = 'equilibrium constant is determined from population' class KineticRateEquilibriumConstant(EquilibriumConstant): """An equilibrium constant determined from kinetic rates as kAB/kBA""" def __init__(self, value, unit=None): super().__init__(value, unit) self.method = 'equilibrium constant is determined from kinetic ' \ 'rates, kAB/kBA' class RelaxationTime: """A relaxation time determined for a scheme. The relaxation time can either be connected to a specific connectivity in the scheme or to the scheme in general if no assignment is possible. :param float value: The relaxation time. :param str unit: The unit of the relaxation time. Options are ['seconds','milliseconds', microseconds'] :param float amplitude: The amplitude of the relaxation time if determined. :param str details: Details on the relaxation time. :param dataset_group: DatasetGroup used to determine the relaxation time. :type dataset_group: :class:`ihm.dataset.DatasetGroup` :param file: An external file containing measurement data for the relaxation time. :type file: :class:`ihm.location.OutputFileLocation` """ def __init__(self, value, unit, amplitude=None, details=None, dataset_group=None, file=None): self.value = value self.unit = unit self.amplitude = amplitude self.details = details self.dataset_group = dataset_group self.external_file = file def __eq__(self, other): return self.__dict__ == other.__dict__ # Check whether the given unit is within the allowed options allowed_relaxation_time_units = ['seconds', 'milliseconds', 'microseconds'] unit = _text_choice_property( "unit", allowed_relaxation_time_units, doc="The unit of the relaxation time.") python-ihm-2.7/ihm/protocol.py000066400000000000000000000067201503573337200164630ustar00rootroot00000000000000"""Classes for handling modeling protocols. """ class Step: """A single step in a :class:`Protocol`. :param assembly: The part of the system modeled in this step :type assembly: :class:`~ihm.Assembly` :param dataset_group: The collection of datasets used in this modeling :type dataset_group: :class:`~ihm.dataset.DatasetGroup` :param str method: Description of the method used (e.g. "Monte Carlo") :param str name: A descriptive name for the step :param int num_models_begin: The number of models at the beginning of the step :param int num_models_end: The number of models at the end of the step :param software: The software used in this step :type software: :class:`~ihm.Software` :param script_file: Reference to the external file containing the script used in this step (usually a :class:`~ihm.location.WorkflowFileLocation`). :type script_file: :class:`~ihm.location.Location` :param bool multi_scale: Indicates if the modeling is multi-scale :param bool multi_state: Indicates if the modeling is multi-state :param bool ordered: Indicates if the modeling is ordered :param bool ensemble: Indicates if the modeling involves an ensemble; the default if unspecified is True iff the system contains at least one :class:`~ihm.model.Ensemble`. :param str description: Additional text describing the step """ def __init__(self, assembly, dataset_group, method, num_models_begin=None, num_models_end=None, software=None, script_file=None, multi_scale=False, multi_state=False, ordered=False, ensemble='default', name=None, description=None): self.assembly = assembly self.dataset_group = dataset_group self.method = method self.num_models_begin = num_models_begin self.num_models_end = num_models_end self.multi_scale, self.multi_state = multi_scale, multi_state self.software, self.ordered, self.name = software, ordered, name self.ensemble = ensemble self.script_file = script_file self.description = description def _get_report(self): def _get_flags(): if self.multi_scale: yield "multi-scale" if self.multi_state: yield "multi-state" if self.ordered: yield "ordered" return ("%s (%s) (%s->%s models)" % (self.name or "Unnamed step", "; ".join([self.method] + list(_get_flags())), self.num_models_begin, self.num_models_end)) class Protocol: """A modeling protocol. Each protocol consists of a number of protocol steps (e.g. sampling, refinement) followed by a number of analyses. Normally a protocol is passed to one or more :class:`~ihm.model.Model` objects, although unused protocols can still be included in the file if desired by adding them to :attr:`~ihm.System.orphan_protocols`. :param str name: Optional name for the protocol :param str details: Additional text describing the protocol """ def __init__(self, name=None, details=None): self.name = name self.details = details #: All modeling steps (:class:`Step` objects) self.steps = [] #: All analyses (:class:`~ihm.analysis.Analysis` objects) self.analyses = [] python-ihm-2.7/ihm/reader.py000066400000000000000000005155501503573337200160720ustar00rootroot00000000000000"""Utility classes to read in information in mmCIF or BinaryCIF format""" import ihm.format import ihm.format_bcif import ihm.location import ihm.dataset import ihm.representation import ihm.reference import ihm.startmodel import ihm.protocol import ihm.analysis import ihm.model import ihm.restraint import ihm.geometry import ihm.source import ihm.cross_linkers import ihm.multi_state_scheme import ihm.flr import inspect import warnings import collections from . import util try: from . import _format except ImportError: _format = None class OldFileError(Exception): """Exception raised if a file conforms to too old a version of the IHM extension dictionary. See :func:`read`.""" pass def _make_new_entity(): """Make a new Entity object""" e = ihm.Entity([]) # make sequence mutable (see also SystemReader.finalize) e.sequence = list(e.sequence) # disable residue range checks during file read (see also # _finalize_entities) e._range_check = False return e def _finalize_entities(system): """Finalize all Entities in the given System. This is done here and not in SystemReader.finalize so that it happens both for python-ihm and for python-modelcif; it is also not done in _EntityHandler.finalize as we want to be sure all other finalization is done first.""" for e in system.entities: e._range_check = True def _get_vector3(d, key): """Return a 3D vector (as a list) from d[key+[1..3]] or leave as is if None or ihm.unknown""" if d[key + '1'] in (None, ihm.unknown): return d[key + '1'] else: # Assume if one element is present, all are return [float(d[key + "%d" % k]) for k in (1, 2, 3)] def _get_matrix33(d, key): """Return a 3x3 matrix (as a list of lists) from d[key+[1..3][1..3]]] or leave as is if None or ihm.unknown""" if d[key + '11'] in (None, ihm.unknown): return d[key + '11'] else: # Assume if one element is present, all are return [[float(d[key + "%d%d" % (i, j)]) for j in (1, 2, 3)] for i in (1, 2, 3)] class IDMapper: """Utility class to handle mapping from mmCIF IDs to Python objects. :param list system_list: The list in :class:`ihm.System` that keeps track of these objects. :param class cls: The base class for the Python objects. """ # The attribute in the class used to store the ID id_attr = '_id' def __init__(self, system_list, cls, *cls_args, **cls_keys): self.system_list = system_list self._obj_by_id = {} self._cls = cls self._cls_args = cls_args self._cls_keys = cls_keys # Fill in any existing IDs if available, so that we can add objects # to an existing system # todo: handle objects where system_list is None # e.g. some handlers use FLRListAdapter, which doesn't # support iteration if system_list and hasattr(system_list, '__iter__'): for obj in system_list: self._obj_by_id[getattr(obj, self.id_attr)] = obj def get_all(self): """Yield all objects seen so far (unordered)""" return self._obj_by_id.values() def _make_new_object(self, newcls=None): if newcls is None: newcls = self._cls return newcls(*self._cls_args, **self._cls_keys) def _update_old_object(self, obj, newcls=None): # If this object was referenced by another table before it was # created, it may have the wrong class - fix that retroactively # (need to be careful that old and new classes are compatible) if newcls: obj.__class__ = newcls def get_by_id(self, objid, newcls=None): """Get the object with given ID, creating it if it doesn't already exist. If `newcls` is specified, the object will be an instance of that class (this is commonly used when different subclasses are employed depending on a type specified in the mmCIF file, such as the various subclasses of :class:`ihm.dataset.Dataset`).""" if objid in self._obj_by_id: obj = self._obj_by_id[objid] self._update_old_object(obj, newcls) return obj else: newobj = self._make_new_object(newcls) self._set_object_id(newobj, objid) self._obj_by_id[objid] = newobj if self.system_list is not None: self.system_list.append(newobj) return newobj def _set_object_id(self, obj, objid): if self.id_attr is not None: setattr(obj, self.id_attr, objid) def get_by_id_or_none(self, objid, newcls=None): """Get the object with given ID, creating it if it doesn't already exist. If ID is None or ihm.unknown, return None instead.""" return (None if objid in (None, ihm.unknown) else self.get_by_id(objid, newcls)) class _ChemCompIDMapper(IDMapper): """Add extra handling to IDMapper for the chem_comp category""" id_attr = 'id' def __init__(self, *args, **keys): super().__init__(*args, **keys) # get standard residue types alphabets = [x[1] for x in inspect.getmembers(ihm, inspect.isclass) if issubclass(x[1], ihm.Alphabet) and x[1] is not ihm.Alphabet] self._standard_by_id = {} for alphabet in alphabets: self._standard_by_id.update((item[1].id, item[1]) for item in alphabet._comps.items()) def get_by_id(self, objid, newcls=None): # Don't modify class of standard residue types if objid in self._standard_by_id: obj = self._standard_by_id[objid] if objid not in self._obj_by_id: self._obj_by_id[objid] = obj self.system_list.append(obj) return obj else: # Assign nonpolymer class based on the ID if newcls is ihm.NonPolymerChemComp or newcls is ihm.WaterChemComp: newcls = (ihm.WaterChemComp if objid == 'HOH' else ihm.NonPolymerChemComp) return super().get_by_id(objid, newcls) def _make_new_object(self, newcls=None): if newcls is None: newcls = self._cls if newcls is ihm.NonPolymerChemComp: return newcls(None) elif newcls is ihm.WaterChemComp: return newcls() else: return newcls(*self._cls_args, **self._cls_keys) class RangeIDMapper: """Utility class to handle mapping from mmCIF IDs to :class:`ihm.AsymUnitRange` or :class:`ihm.EntityRange` objects.""" def __init__(self): self._id_map = {} def set(self, range_id, seq_id_begin, seq_id_end): """Add a range. :param str range_id: mmCIF ID :param int seq_id_begin: Index of the start of the range :param int seq_id_end: Index of the end of the range """ self._id_map[range_id] = (seq_id_begin, seq_id_end) def get(self, asym_or_entity, range_id): """Get a range from an ID. :param asym_or_entity: An :class:`ihm.Entity` or :class:`ihm.AsymUnit` object representing the part of the system to which the range will be applied. :param str range_id: mmCIF ID :return: A range as a :class:`ihm.Entity`, :class:`ihm.AsymUnit`, :class:`ihm.EntityRange` or :class:`ihm.AsymUnitRange` object. """ # range_id can be None if the entire asym/entity should be selected # (e.g. for a non-polymer) if range_id is None: return asym_or_entity else: # Allow reading out-of-range ranges return asym_or_entity(*self._id_map[range_id]) class _AnalysisIDMapper(IDMapper): """Add extra handling to IDMapper for Analysis objects""" def _set_object_id(self, obj, objid): # Analysis objects are referenced by (protocol_id, analysis_id) but # we only want to store analysis_id in the Analysis object itself if self.id_attr is not None: setattr(obj, self.id_attr, objid[1]) class _AnalysisStepIDMapper(IDMapper): """Add extra handling to IDMapper for the post processing category""" def _make_new_object(self, newcls=None): if newcls is None: newcls = self._cls if newcls is ihm.analysis.EmptyStep: return newcls() else: return newcls(*self._cls_args, **self._cls_keys) class _FeatureIDMapper(IDMapper): """Add extra handling to IDMapper for restraint features""" def _make_new_object(self, newcls=None): if newcls is None: # Make Feature base class (takes no args) return self._cls() elif newcls is ihm.restraint.PseudoSiteFeature: # Pseudo site constructor needs "site" argument return newcls(None) else: # Make subclass (takes one ranges/atoms argument) return newcls([]) def _update_old_object(self, obj, newcls=None): super()._update_old_object(obj, newcls) # Add missing members if the base class was originally instantianted if (newcls is ihm.restraint.ResidueFeature and not hasattr(obj, 'ranges')): obj.ranges = [] elif (newcls is ihm.restraint.AtomFeature and not hasattr(obj, 'atoms')): obj.atoms = [] elif (newcls is ihm.restraint.NonPolyFeature and not hasattr(obj, 'objs')): obj.objs = [] elif (newcls is ihm.restraint.PseudoSiteFeature and not hasattr(obj, 'site')): obj.site = None class _GeometryIDMapper(IDMapper): """Add extra handling to IDMapper for geometric objects""" _members = {ihm.geometry.Sphere: ('center', 'radius', 'transformation'), ihm.geometry.Torus: ('center', 'transformation', 'major_radius', 'minor_radius'), ihm.geometry.HalfTorus: ('center', 'transformation', 'major_radius', 'minor_radius', 'thickness'), ihm.geometry.XAxis: ('transformation',), ihm.geometry.YAxis: ('transformation',), ihm.geometry.ZAxis: ('transformation',), ihm.geometry.XYPlane: ('transformation',), ihm.geometry.YZPlane: ('transformation',), ihm.geometry.XZPlane: ('transformation',)} def _make_new_object(self, newcls=None): if newcls is None: # Make GeometricObject base class (takes no args) return self._cls() else: # Make subclass (takes variable number of args) len_args = {ihm.geometry.Sphere: 2, ihm.geometry.Torus: 3, ihm.geometry.HalfTorus: 4}.get(newcls, 0) return newcls(*(None,) * len_args) def _update_old_object(self, obj, newcls=None): # Don't revert a HalfTorus back to a Torus if newcls is ihm.geometry.Torus \ and isinstance(obj, ihm.geometry.HalfTorus): return # Don't revert a derived class back to a base class elif newcls and isinstance(obj, newcls): return super()._update_old_object(obj, newcls) # Add missing members if the base class was originally instantianted for member in self._members.get(newcls, ()): if not hasattr(obj, member): setattr(obj, member, None) class _CrossLinkIDMapper(IDMapper): """Add extra handling to IDMapper for cross links""" def _make_new_object(self, newcls=None): if newcls is None: # Make base class (takes no args) obj = self._cls() # Need fits in case we never decide on a type obj.fits = {} return obj elif newcls is ihm.restraint.AtomCrossLink: return newcls(*(None,) * 6) else: return newcls(*(None,) * 4) class _ReferenceIDMapper(IDMapper): """Add extra handling to IDMapper for ihm.reference.Reference objects""" def _make_new_object(self, newcls=None): if newcls is None or newcls is ihm.reference.Sequence: return self._cls(*(None,) * 4) else: return newcls(*(None,) * 3) class _FLRListAdapter: """Take objects from IDMapper and place them in objects in FLRData.""" def __init__(self, collection_dict, collection_list, flr_data): self.collection_dict, self.flr_data = collection_dict, flr_data self.collection_list = collection_list def append(self, obj): # We generally only have a single FLRData object, id=1 d = self.flr_data.get_by_id(1) # Store in collection dict (by ID) in FLRData rather than a # list in System collection_dict = getattr(d, self.collection_dict) collection_dict[obj._id] = obj # Also store in list in FLRData if applicable if self.collection_list is not None: collection_list = getattr(d, self.collection_list) collection_list.append(obj) class _FLRIDMapper(IDMapper): """Handle mapping from mmCIF IDs to FLR Python objects. This differs from the base IDMapper class in that created objects are stored in the FLRData object, not in the System.""" def __init__(self, collection_dict, collection_list, flr_data, cls, *args, **keys): system_list = _FLRListAdapter(collection_dict, collection_list, flr_data) super().__init__(system_list, cls, *args, **keys) class _DatasetAssemblyIDMapper: """Handle mapping from mmCIF dataset IDs to Python objects. This is similar to IDMapper but is intended for objects like restraints that don't have their own IDs but instead use the dataset ID. :param list system_list: The list in :class:`ihm.System` that keeps track of these objects. :param datasets: Mapping from IDs to Dataset objects. :param class cls: The base class for the Python objects. Its constructor is expected to take a Dataset object as the first argument. """ def __init__(self, system_list, datasets, cls, *cls_args, **cls_keys): self.system_list = system_list self.datasets = datasets self._obj_by_id = {} self._cls = cls self._cls_args = cls_args self._cls_keys = cls_keys def get_by_dataset(self, dataset_id, assembly_id): dataset = self.datasets.get_by_id(dataset_id) k = (dataset._id, assembly_id) if k not in self._obj_by_id: r = self._cls(dataset, *self._cls_args, **self._cls_keys) self.system_list.append(r) self._obj_by_id[k] = r else: r = self._obj_by_id[k] return r class _XLRestraintMapper: """Map entries to CrossLinkRestraint""" def __init__(self, system_list): self.system_list = system_list self._seen_rsrs = {} def get_by_attrs(self, dataset, linker): """Group all crosslinks with same dataset and linker in one CrossLinkRestraint object""" k = (dataset._id, linker) if k not in self._seen_rsrs: r = ihm.restraint.CrossLinkRestraint(dataset, linker) self.system_list.append(r) self._seen_rsrs[k] = r return self._seen_rsrs[k] def get_all(self): """Yield all objects seen so far (unordered)""" return self._seen_rsrs.values() class SystemReader: """Utility class to track global information for a :class:`ihm.System` being read from a file, such as the mapping from IDs to objects (as :class:`IDMapper` objects). This can be used by :class:`Handler` subclasses.""" def __init__(self, model_class, starting_model_class, system=None): #: The :class:`ihm.System` object being read in self.system = system or ihm.System() #: Mapping from ID to :class:`ihm.Software` objects self.software = IDMapper(self.system.software, ihm.Software, *(None,) * 4) #: Mapping from ID to :class:`ihm.Citation` objects self.citations = IDMapper(self.system.citations, ihm.Citation, *(None,) * 8) #: Mapping from ID to :class:`ihm.Revision` objects self.revisions = IDMapper(self.system.revisions, ihm.Revision, *(None,) * 4) #: Mapping from ID to :class:`ihm.Entity` objects self.entities = IDMapper(self.system.entities, _make_new_entity) #: Mapping from ID to :class:`ihm.source.Manipulated` objects self.src_gens = IDMapper(None, ihm.source.Manipulated) #: Mapping from ID to :class:`ihm.source.Natural` objects self.src_nats = IDMapper(None, ihm.source.Natural) #: Mapping from ID to :class:`ihm.source.Synthetic` objects self.src_syns = IDMapper(None, ihm.source.Synthetic) #: Mapping from ID to :class:`ihm.AsymUnit` objects self.asym_units = IDMapper(self.system.asym_units, ihm.AsymUnit, None) #: Mapping from ID to :class:`ihm.ChemComp` objects self.chem_comps = _ChemCompIDMapper(self.system._orphan_chem_comps, ihm.ChemComp, *(None,) * 3) #: Mapping from ID to :class:`ihm.reference.Alignment` objects self.alignments = IDMapper(None, ihm.reference.Alignment) #: Mapping from ID to :class:`ihm.reference.Reference` objects self.references = _ReferenceIDMapper(None, ihm.reference.Sequence) #: Mapping from ID to :class:`ihm.ChemDescriptor` objects self.chem_descriptors = IDMapper(self.system.orphan_chem_descriptors, ihm.ChemDescriptor, None) #: Mapping from ID to :class:`ihm.Assembly` objects self.assemblies = IDMapper(self.system.orphan_assemblies, ihm.Assembly) #: Mapping from ID to :class:`ihm.AsymUnitRange` #: or :class:`ihm.EntityRange` objects self.ranges = RangeIDMapper() #: Mapping from ID to :class:`ihm.location.Repository` objects self.repos = IDMapper(self.system._orphan_repos, ihm.location.Repository, None) #: Mapping from ID to :class:`ihm.location.FileLocation` objects self.external_files = IDMapper(self.system.locations, ihm.location.FileLocation, '/') # should always exist? #: Mapping from ID to :class:`ihm.location.DatabaseLocation` objects self.db_locations = IDMapper(self.system.locations, ihm.location.DatabaseLocation, None, None) #: Mapping from ID to :class:`ihm.dataset.Dataset` objects self.datasets = IDMapper(self.system.orphan_datasets, ihm.dataset.Dataset, None) #: Mapping from ID to :class:`ihm.dataset.DatasetGroup` objects self.dataset_groups = IDMapper(self.system.orphan_dataset_groups, ihm.dataset.DatasetGroup) #: Mapping from ID to :class:`ihm.startmodel.StartingModel` objects self.starting_models = IDMapper(self.system.orphan_starting_models, starting_model_class, *(None,) * 3) #: Mapping from ID to :class:`ihm.representation.Representation` #: objects self.representations = IDMapper(self.system.orphan_representations, ihm.representation.Representation) #: Mapping from ID to :class:`ihm.protocol.Protocol` objects self.protocols = IDMapper(self.system.orphan_protocols, ihm.protocol.Protocol) #: Mapping from ID to :class:`ihm.analysis.Step` objects self.analysis_steps = _AnalysisStepIDMapper(None, ihm.analysis.Step, *(None,) * 3) #: Mapping from ID to :class:`ihm.analysis.Analysis` objects self.analyses = _AnalysisIDMapper(None, ihm.analysis.Analysis) #: Mapping from ID to :class:`ihm.model.Model` objects self.models = IDMapper(None, model_class, *(None,) * 3) #: Mapping from ID to :class:`ihm.model.ModelGroup` objects self.model_groups = IDMapper(None, ihm.model.ModelGroup) #: Mapping from ID to :class:`ihm.model.State` objects self.states = IDMapper(None, ihm.model.State) #: Mapping from ID to :class:`ihm.model.StateGroup` objects self.state_groups = IDMapper(self.system.state_groups, ihm.model.StateGroup) #: Mapping from ID to :class:`ihm.model.Ensemble` objects self.ensembles = IDMapper(self.system.ensembles, ihm.model.Ensemble, *(None,) * 2) #: Mapping from ID to :class:`ihm.model.LocalizationDensity` objects self.densities = IDMapper(None, ihm.model.LocalizationDensity, *(None,) * 2) #: Mapping from ID to :class:`ihm.restraint.EM3DRestraint` objects self.em3d_restraints = _DatasetAssemblyIDMapper( self.system.restraints, self.datasets, ihm.restraint.EM3DRestraint, None) #: Mapping from ID to :class:`ihm.restraint.EM2DRestraint` objects self.em2d_restraints = IDMapper(self.system.restraints, ihm.restraint.EM2DRestraint, *(None,) * 2) #: Mapping from ID to :class:`ihm.restraint.SASRestraint` objects self.sas_restraints = _DatasetAssemblyIDMapper( self.system.restraints, self.datasets, ihm.restraint.SASRestraint, None) #: Mapping from ID to :class:`ihm.restraint.Feature` objects self.features = _FeatureIDMapper(self.system.orphan_features, ihm.restraint.Feature) #: Mapping from ID to :class:`ihm.restraint.PseudoSite` objects self.pseudo_sites = IDMapper(self.system.orphan_pseudo_sites, ihm.restraint.PseudoSite, *(None,) * 3) #: Mapping from ID to :class:`ihm.restraint.DerivedDistanceRestraint` #: objects self.dist_restraints = IDMapper( self.system.restraints, ihm.restraint.DerivedDistanceRestraint, *(None,) * 4) #: Mapping from ID to :class:`ihm.restraint.HDXRestraint` objects self.hdx_restraints = IDMapper( self.system.restraints, ihm.restraint.HDXRestraint, *(None,) * 2) #: Mapping from ID to :class:`ihm.restraint.PredictedContactRestraint` #: objects self.pred_cont_restraints = IDMapper( self.system.restraints, ihm.restraint.PredictedContactRestraint, *(None,) * 5) #: Mapping from ID to :class:`ihm.restraint.RestraintGroup` of #: :class:`ihm.restraint.DerivedDistanceRestraint` objects self.dist_restraint_groups = IDMapper( self.system.restraint_groups, ihm.restraint.RestraintGroup) #: Mapping from ID to :class:`ihm.restraint.RestraintGroup` of #: :class:`ihm.restraint.PredictedContactRestraint` objects self.pred_cont_restraint_groups = IDMapper( self.system.restraint_groups, ihm.restraint.RestraintGroup) #: Mapping from ID to :class:`ihm.geometry.GeometricObject` objects self.geometries = _GeometryIDMapper( self.system.orphan_geometric_objects, ihm.geometry.GeometricObject) #: Mapping from ID to :class:`ihm.geometry.Center` objects self.centers = IDMapper(self.system._orphan_centers, ihm.geometry.Center, *(None,) * 3) #: Mapping from ID to :class:`ihm.geometry.Transformation` objects self.transformations = IDMapper( self.system._orphan_geometric_transforms, ihm.geometry.Transformation, *(None,) * 2) #: Mapping from ID to :class:`ihm.geometry.Transformation` objects #: used by :class:`ihm.dataset.TransformedDataset` objects (this is #: distinct from :attr:`transformations` since they are stored in #: separate tables, with different IDs, in the mmCIF file). self.data_transformations = IDMapper( self.system._orphan_dataset_transforms, ihm.geometry.Transformation, *(None,) * 2) #: Mapping from ID to :class:`ihm.restraint.GeometricRestraint` objects self.geom_restraints = IDMapper( self.system.restraints, ihm.restraint.GeometricRestraint, *(None,) * 4) #: Mapping from ID to :class:`ihm.restraint.CrossLinkRestraint` objects self.xl_restraints = _XLRestraintMapper(self.system.restraints) #: Mapping from ID to groups of #: :class:`ihm.restraint.ExperimentalCrossLink` objects self.experimental_xl_groups = IDMapper(None, list) self.experimental_xl_groups.id_attr = None #: Mapping from ID to :class:`ihm.restraint.ExperimentalCrossLink` #: objects self.experimental_xls = IDMapper( None, ihm.restraint.ExperimentalCrossLink, *(None,) * 2) #: Mapping from ID to :class:`ihm.restraint.CrossLink` self.cross_links = _CrossLinkIDMapper( None, ihm.restraint.CrossLink) #: Mapping from ID to :class:`ihm.restraint.CrossLinkPseudoSite` self.cross_link_pseudo_sites = IDMapper( None, ihm.restraint.CrossLinkPseudoSite, None) #: Mapping from ID to :class:`ihm.model.OrderedProcess` objects self.ordered_procs = IDMapper(self.system.ordered_processes, ihm.model.OrderedProcess, None) #: Mapping from ID to :class:`ihm.model.ProcessStep` objects self.ordered_steps = IDMapper(None, ihm.model.ProcessStep) #: Mapping from ID to :class:`ihm.multi_state_scheme.MultiStateScheme` #: objects self.multi_state_schemes = IDMapper( self.system.multi_state_schemes, ihm.multi_state_scheme.MultiStateScheme, None) #: Mapping from ID to #: :class:`ihm.multi_state_scheme.Connectivity` objects self.multi_state_scheme_connectivities = IDMapper( None, ihm.multi_state_scheme.Connectivity, None) #: Mapping from ID to :class:`ihm.multi_state_scheme.KineticRate` #: objects self.kinetic_rates = IDMapper( None, ihm.multi_state_scheme.KineticRate) #: Mapping from ID to #: :class:`ihm.multi_state_scheme.RelaxationTime` objects self.relaxation_times = IDMapper( self.system._orphan_relaxation_times, ihm.multi_state_scheme.RelaxationTime, *(None,) * 2) # FLR part #: Mapping from ID to :class:`ihm.flr.FLRData` objects self.flr_data = IDMapper(self.system.flr_data, ihm.flr.FLRData) #: Mapping from ID to :class:`ihm.flr.InstSetting` objects self.flr_inst_settings = _FLRIDMapper('_collection_flr_inst_setting', None, self.flr_data, ihm.flr.InstSetting) #: Mapping from ID to :class:`ihm.flr.ExpCondition` objects self.flr_exp_conditions = _FLRIDMapper('_collection_flr_exp_condition', None, self.flr_data, ihm.flr.ExpCondition) #: Mapping from ID to :class:`ihm.flr.Instrument` objects self.flr_instruments = _FLRIDMapper('_collection_flr_instrument', None, self.flr_data, ihm.flr.Instrument) #: Mapping from ID to :class:`ihm.flr.EntityAssembly` objects self.flr_entity_assemblies = _FLRIDMapper( '_collection_flr_entity_assembly', None, self.flr_data, ihm.flr.EntityAssembly) #: Mapping from ID to :class:`ihm.flr.SampleCondition` objects self.flr_sample_conditions = _FLRIDMapper( '_collection_flr_sample_condition', None, self.flr_data, ihm.flr.SampleCondition) #: Mapping from ID to :class:`ihm.flr.Sample` objects self.flr_samples = _FLRIDMapper('_collection_flr_sample', None, self.flr_data, ihm.flr.Sample, *(None,) * 6) #: Mapping from ID to :class:`ihm.flr.Experiment` objects self.flr_experiments = _FLRIDMapper('_collection_flr_experiment', None, self.flr_data, ihm.flr.Experiment) #: Mapping from ID to :class:`ihm.flr.Probe` objects self.flr_probes = _FLRIDMapper('_collection_flr_probe', None, self.flr_data, ihm.flr.Probe) #: Mapping from ID to :class:`ihm.flr.PolyProbePosition` objects self.flr_poly_probe_positions = _FLRIDMapper( '_collection_flr_poly_probe_position', None, self.flr_data, ihm.flr.PolyProbePosition, None) #: Mapping from ID to :class:`ihm.flr.SampleProbeDetails` objects self.flr_sample_probe_details = _FLRIDMapper( '_collection_flr_sample_probe_details', None, self.flr_data, ihm.flr.SampleProbeDetails, *(None,) * 5) #: Mapping from ID to :class:`ihm.flr.PolyProbeConjugate` objects self.flr_poly_probe_conjugates = _FLRIDMapper( '_collection_flr_poly_probe_conjugate', 'poly_probe_conjugates', self.flr_data, ihm.flr.PolyProbeConjugate, *(None,) * 4) #: Mapping from ID to :class:`ihm.flr.FRETForsterRadius` objects self.flr_fret_forster_radius = _FLRIDMapper( '_collection_flr_fret_forster_radius', None, self.flr_data, ihm.flr.FRETForsterRadius, *(None,) * 4) #: Mapping from ID to :class:`ihm.flr.FRETCalibrationParameters` #: objects self.flr_fret_calibration_parameters = _FLRIDMapper( '_collection_flr_fret_calibration_parameters', None, self.flr_data, ihm.flr.FRETCalibrationParameters, *(None,) * 8) #: Mapping from ID to :class:`ihm.flr.FRETAnalysis` objects self.flr_fret_analyses = _FLRIDMapper( '_collection_flr_fret_analysis', None, self.flr_data, ihm.flr.FRETAnalysis, *(None,) * 9) #: Mapping from ID to :class:`ihm.flr.LifetimeFitModel` objects self.flr_lifetime_fit_models = _FLRIDMapper( '_collection_flr_lifetime_fit_model', None, self.flr_data, ihm.flr.LifetimeFitModel, *(None,) * 4) #: Mapping from ID to :class:`ihm.flr.RefMeasurementGroup` objects self.flr_ref_measurement_groups = _FLRIDMapper( '_collection_flr_ref_measurement_group', None, self.flr_data, ihm.flr.RefMeasurementGroup, *(None,)) #: Mapping from ID to :class:`ihm.flr.RefMeasurement` objects self.flr_ref_measurements = _FLRIDMapper( '_collection_flr_ref_measurement', None, self.flr_data, ihm.flr.RefMeasurement, *(None,) * 3) #: Mapping from ID to :class:`ihm.flr.RefMeasurementLifetime` objects self.flr_ref_measurement_lifetimes = _FLRIDMapper( '_collection_flr_ref_measurement_lifetime', None, self.flr_data, ihm.flr.RefMeasurementLifetime, *(None,) * 3) #: Mapping from ID to :class:`ihm.flr.PeakAssignment` objects self.flr_peak_assignments = _FLRIDMapper( '_collection_flr_peak_assignment', None, self.flr_data, ihm.flr.PeakAssignment, *(None,) * 2) #: Mapping from ID to :class:`ihm.flr.FRETDistanceRestraint` objects self.flr_fret_distance_restraints = _FLRIDMapper( '_collection_flr_fret_distance_restraint', None, self.flr_data, ihm.flr.FRETDistanceRestraint, *(None,) * 10) #: Mapping from ID to :class:`ihm.flr.FRETDistanceRestraintGroup` #: objects self.flr_fret_distance_restraint_groups = _FLRIDMapper( '_collection_flr_fret_distance_restraint_group', 'distance_restraint_groups', self.flr_data, ihm.flr.FRETDistanceRestraintGroup) #: Mapping from ID to :class:`ihm.flr.FRETModelQuality` objects self.flr_fret_model_qualities = _FLRIDMapper( '_collection_flr_fret_model_quality', 'fret_model_qualities', self.flr_data, ihm.flr.FRETModelQuality, *(None,) * 5) #: Mapping from ID to :class:`ihm.flr.FRETModelDistance` objects self.flr_fret_model_distances = _FLRIDMapper( '_collection_flr_fret_model_distance', 'fret_model_distances', self.flr_data, ihm.flr.FRETModelDistance, *(None,) * 4) #: Mapping from ID to :class:`ihm.flr.FPSModeling` objects self.flr_fps_modeling = _FLRIDMapper( '_collection_flr_fps_modeling', None, self.flr_data, ihm.flr.FPSModeling, *(None,) * 5) #: Mapping from ID to :class:`ihm.flr.FPSGlobalParameters` objects self.flr_fps_global_parameters = _FLRIDMapper( '_collection_flr_fps_global_parameters', None, self.flr_data, ihm.flr.FPSGlobalParameters, *(None,) * 20) #: Mapping from ID to :class:`ihm.flr.FPSAVParameter` objects self.flr_fps_av_parameters = _FLRIDMapper( '_collection_flr_fps_av_parameter', None, self.flr_data, ihm.flr.FPSAVParameter, *(None,) * 6) #: Mapping from ID to :class:`ihm.flr.FPSAVModeling` objects self.flr_fps_av_modeling = _FLRIDMapper( '_collection_flr_fps_av_modeling', 'fps_modeling', self.flr_data, ihm.flr.FPSAVModeling, *(None,) * 3) #: Mapping from ID to :class:`ihm.flr.FPSMeanProbePosition` objects self.flr_fps_mean_probe_positions = _FLRIDMapper( '_collection_flr_fps_mean_probe_position', None, self.flr_data, ihm.flr.FPSMeanProbePosition, *(None,) * 4) #: Mapping from ID to :class:`ihm.flr.FPSMPPAtomPositionGroup` objects self.flr_fps_mpp_atom_position_groups = IDMapper( None, ihm.flr.FPSMPPAtomPositionGroup) #: Mapping from ID to :class:`ihm.flr.FPSMPPAtomPosition` objects self.flr_fps_mpp_atom_positions = _FLRIDMapper( '_collection_flr_fps_mpp_atom_position', None, self.flr_data, ihm.flr.FPSMPPAtomPosition, *(None,) * 4) #: Mapping from ID to :class:`ihm.flr.FPSMPPModeling` objects self.flr_fps_mpp_modeling = _FLRIDMapper( '_collection_flr_fps_mpp_modeling', 'fps_modeling', self.flr_data, ihm.flr.FPSMPPModeling, *(None,) * 3) #: Mapping from ID to #: :class:`ihm.flr.KineticRateFretAnalysisConnection` objects self.flr_kinetic_rate_fret_analysis_connection = _FLRIDMapper( '_collection_flr_kinetic_rate_fret_analysis_connection', 'kinetic_rate_fret_analysis_connections', self.flr_data, ihm.flr.KineticRateFretAnalysisConnection, *(None,) * 3) #: Mapping from ID to #: :class:`ihm.flr.RelaxationTimeFretAnalysisConnection` objects self.flr_relaxation_time_fret_analysis_connection = _FLRIDMapper( '_collection_flr_relaxation_time_fret_analysis_connection', 'relaxation_time_fret_analysis_connections', self.flr_data, ihm.flr.RelaxationTimeFretAnalysisConnection, *(None,) * 3) def finalize(self): # make sequence immutable (see also _make_new_entity) for e in self.system.entities: e.sequence = tuple(e.sequence) class Handler: """Base class for all handlers of mmCIF data. Each class handles a single category in the mmCIF or BinaryCIF file. To add a new handler (for example to handle a custom category) make a subclass and set the class attribute `category` to the mmCIF category name (e.g. `_struct`). Provide a `__call__` method. This will be called for each category (multiple times for loop constructs) with the parameters to `__call__` filled in with the same-named mmCIF keywords. For example the class:: class CustomHandler(Handler): category = "_custom" def __call__(self, key1, key2: int, key3: float): pass will be called with arguments `"x", 42, 1.0` when given the mmCIF input:: _custom.key1 x _custom.key2 42 _custom.key3 1.0 By default, the arguments will be passed as strings. Type annotations (as above) can be used to get arguments as integers, floating-point values, or booleans, using the annotations `int`, `float`, or `bool` respectively (no other type annotations are permitted). """ #: Value passed to `__call__` for keywords not in the file not_in_file = None #: Value passed to `__call__` for data marked as omitted ('.') in the file omitted = None #: Value passed to `__call__` for data marked as unknown ('?') in the file unknown = ihm.unknown #: Keywords which are explicitly ignored (read() will not warn about their #: presence in the file). These are usually things like ordinal fields #: which we don't use. ignored_keywords = [] def __init__(self, sysr): #: Utility class to map IDs to Python objects. self.sysr = sysr def get_int(self, val): """Return int(val) or leave as is if None or ihm.unknown""" return int(val) if val is not None and val is not ihm.unknown else val def get_int_or_string(self, val): """Return val as an int or str as appropriate, or leave as is if None or ihm.unknown""" if val is None or val is ihm.unknown: return val else: return int(val) if isinstance(val, int) or val.isdigit() else val def get_float(self, val): """Return float(val) or leave as is if None or ihm.unknown""" return (float(val) if val is not None and val is not ihm.unknown else val) _boolmap = {'YES': True, 'NO': False} def get_bool(self, val): """Convert val to bool and return, or leave as is if None or ihm.unknown""" return (self._boolmap.get(val.upper(), None) if val is not None and val is not ihm.unknown else val) def get_lower(self, val): """Return lowercase string val or leave as is if None or ihm.unknown""" return (val.lower() if val is not None and val is not ihm.unknown else val) def finalize(self): """Called at the end of each data block.""" pass def end_save_frame(self): """Called at the end of each save frame.""" pass def _get_asym_or_entity(self, asym_id, entity_id): """Return an :class:`AsymUnit`, or an :class:`Entity` if asym_id is omitted""" asym = self.sysr.asym_units.get_by_id_or_none(asym_id) return asym if asym else self.sysr.entities.get_by_id(entity_id) def copy_if_present(self, obj, data, keys=[], mapkeys={}): """Set obj.x from data['x'] for each x in keys if present in data. The dict mapkeys is handled similarly except that its keys are looked up in data and the corresponding value used to set obj.""" for key in keys: d = data.get(key) if d is not None: setattr(obj, key, d) for key, val in mapkeys.items(): d = data.get(key) if d is not None: setattr(obj, val, d) system = property(lambda self: self.sysr.system, doc="The :class:`ihm.System` object to read into") class _CollectionHandler(Handler): category = '_ihm_entry_collection' def __call__(self, id, name, details): c = ihm.Collection(id=id, name=name, details=details) self.system.collections.append(c) class _StructHandler(Handler): category = '_struct' def __call__(self, title, entry_id, pdbx_model_details): self.copy_if_present(self.system, locals(), keys=('title',), mapkeys={'entry_id': 'id', 'pdbx_model_details': 'model_details'}) class _AuditConformHandler(Handler): category = '_audit_conform' def __call__(self, dict_name, dict_version): # Reject old file versions if we can parse the version if dict_name == 'ihm-extension.dic': try: major, minor = [int(x) for x in dict_version.split('.')] if (major, minor) < (1, 0): raise OldFileError( "This version of python-ihm only supports reading " "files that conform to version 1.0 or later of the " "IHM extension dictionary. This file conforms to " "version %s." % dict_version) except ValueError: pass class _SoftwareHandler(Handler): category = '_software' def __call__(self, pdbx_ordinal, name, classification, description, version, type, location, citation_id): s = self.sysr.software.get_by_id(pdbx_ordinal) self.copy_if_present( s, locals(), keys=('name', 'classification', 'description', 'version', 'type', 'location')) s.citation = self.sysr.citations.get_by_id_or_none(citation_id) class _CitationHandler(Handler): category = '_citation' def __call__(self, id, title, year, pdbx_database_id_pubmed, journal_abbrev, journal_volume, pdbx_database_id_doi, page_first, page_last): s = self.sysr.citations.get_by_id(id) s.is_primary = (id == 'primary') self.copy_if_present( s, locals(), keys=('title', 'year'), mapkeys={'pdbx_database_id_pubmed': 'pmid', 'journal_abbrev': 'journal', 'journal_volume': 'volume', 'pdbx_database_id_doi': 'doi'}) if page_first is not None: if page_last is not None: s.page_range = (page_first, page_last) else: s.page_range = page_first class _AuditAuthorHandler(Handler): category = '_audit_author' ignored_keywords = ['pdbx_ordinal'] def __call__(self, name): self.system.authors.append(name) class _AuditRevisionHistoryHandler(Handler): category = '_pdbx_audit_revision_history' def __call__(self, ordinal, data_content_type, major_revision: int, minor_revision: int, revision_date): r = self.sysr.revisions.get_by_id(ordinal) r.data_content_type = data_content_type r.major = major_revision r.minor = minor_revision r.date = util._get_iso_date(revision_date) class _AuditRevisionDetailsHandler(Handler): category = '_pdbx_audit_revision_details' def __call__(self, revision_ordinal, provider, type, description): r = self.sysr.revisions.get_by_id(revision_ordinal) d = ihm.RevisionDetails(provider=provider, type=type, description=description) r.details.append(d) class _AuditRevisionGroupHandler(Handler): category = '_pdbx_audit_revision_group' def __call__(self, revision_ordinal, group): r = self.sysr.revisions.get_by_id(revision_ordinal) r.groups.append(group) class _AuditRevisionCategoryHandler(Handler): category = '_pdbx_audit_revision_category' def __call__(self, revision_ordinal, category): r = self.sysr.revisions.get_by_id(revision_ordinal) r.categories.append(category) class _AuditRevisionItemHandler(Handler): category = '_pdbx_audit_revision_item' def __call__(self, revision_ordinal, item): r = self.sysr.revisions.get_by_id(revision_ordinal) r.items.append(item) class _DataUsageHandler(Handler): category = '_pdbx_data_usage' # Map type to corresponding subclass of ihm.DataUsage _type_map = dict((x[1].type.lower(), x[1]) for x in inspect.getmembers(ihm, inspect.isclass) if issubclass(x[1], ihm.DataUsage)) def __call__(self, type, name, details, url): typ = type.lower() if type else 'other' cls = self._type_map.get(typ, ihm.DataUsage) self.system.data_usage.append(cls(details=details, name=name, url=url)) class _GrantHandler(Handler): category = '_pdbx_audit_support' def __call__(self, funding_organization, country, grant_number): g = ihm.Grant(funding_organization=funding_organization, country=country, grant_number=grant_number) self.system.grants.append(g) class _CitationAuthorHandler(Handler): category = '_citation_author' ignored_keywords = ['ordinal'] def __call__(self, citation_id, name): s = self.sysr.citations.get_by_id(citation_id) if name is not None: s.authors.append(name) class _DatabaseHandler(Handler): category = '_database_2' def __call__(self, database_code, database_id, pdbx_doi, pdbx_database_accession): d = ihm.Database(id=database_id, code=database_code, doi=pdbx_doi, accession=pdbx_database_accession) self.system.databases.append(d) class _DatabaseStatusHandler(Handler): category = '_pdbx_database_status' # placeholder; the reader will otherwise only return strings or None not_in_file = 0 _keys = ['entry_id', 'sg_entry', 'author_approval_type', 'author_release_status_code', 'date_author_approval', 'date_author_release_request', 'date_begin_deposition', 'date_begin_processing', 'date_begin_release_preparation', 'date_chemical_shifts', 'date_coordinates', 'date_deposition_form', 'date_end_processing', 'date_hold_chemical_shifts', 'date_hold_coordinates', 'date_hold_nmr_constraints', 'date_hold_struct_fact', 'date_manuscript', 'date_nmr_constraints', 'date_of_pdb_release', 'date_of_cs_release', 'date_of_mr_release', 'date_of_sf_release', 'date_struct_fact', 'date_submitted', 'dep_release_code_chemical_shifts', 'dep_release_code_coordinates', 'dep_release_code_nmr_constraints', 'dep_release_code_sequence', 'dep_release_code_struct_fact', 'deposit_site', 'hold_for_publication', 'methods_development_category', 'name_depositor', 'pdb_date_of_author_approval', 'pdb_format_compatible', 'process_site', 'rcsb_annotator', 'recvd_author_approval', 'recvd_chemical_shifts', 'recvd_coordinates', 'recvd_deposit_form', 'recvd_initial_deposition_date', 'recvd_internal_approval', 'recvd_manuscript', 'recvd_nmr_constraints', 'recvd_struct_fact', 'status_code', 'status_code_cs', 'status_code_mr', 'status_code_sf'] def __call__(self, *args): # Just pass through all data items present in the file, as a dict self.system.database_status._map = dict( (k, v) for (k, v) in zip(self._keys, args) if v != self.not_in_file) class _ChemCompHandler(Handler): category = '_chem_comp' def __init__(self, *args): super().__init__(*args) # Map _chem_comp.type to corresponding subclass of ihm.ChemComp self.type_map = dict((x[1].type.lower(), x[1]) for x in inspect.getmembers(ihm, inspect.isclass) if issubclass(x[1], ihm.ChemComp)) def __call__(self, type, id, name, formula): typ = 'other' if type is None else type.lower() s = self.sysr.chem_comps.get_by_id( id, self.type_map.get(typ, ihm.ChemComp)) self.copy_if_present(s, locals(), keys=('name', 'formula')) class _ChemDescriptorHandler(Handler): category = '_ihm_chemical_component_descriptor' def __call__(self, id, auth_name, chemical_name, common_name, smiles, smiles_canonical, inchi, inchi_key): d = self.sysr.chem_descriptors.get_by_id(id) self.copy_if_present( d, locals(), keys=('auth_name', 'chemical_name', 'common_name', 'smiles', 'smiles_canonical', 'inchi', 'inchi_key')) class _EntityHandler(Handler): category = '_entity' def __init__(self, *args): super().__init__(*args) self.src_map = dict( (x[1].src_method.lower(), x[1]) for x in inspect.getmembers(ihm.source, inspect.isclass) if issubclass(x[1], ihm.source.Source) and x[1] is not ihm.source.Source) def __call__(self, id, details, type, src_method, formula_weight, pdbx_description, pdbx_number_of_molecules): s = self.sysr.entities.get_by_id(id) self.copy_if_present( s, locals(), keys=('details',), mapkeys={'pdbx_description': 'description', 'pdbx_number_of_molecules': 'number_of_molecules'}) if src_method: source_cls = self.src_map.get(src_method.lower(), None) if source_cls and s.source is None: s.source = source_cls() # Force polymer if _entity.type says so, even if it doesn't look like # one (e.g. just a single amino acid) if type and type.lower() == 'polymer': s._force_polymer = True # Encourage branched if _entity.type says so (otherwise empty entities # are assumed to be polymer) if type and type.lower() == 'branched': s._hint_branched = True class _EntitySrcNatHandler(Handler): category = '_entity_src_nat' def __call__(self, entity_id, pdbx_src_id, pdbx_ncbi_taxonomy_id, pdbx_organism_scientific, common_name, strain): e = self.sysr.entities.get_by_id(entity_id) s = self.sysr.src_nats.get_by_id(pdbx_src_id) s.ncbi_taxonomy_id = pdbx_ncbi_taxonomy_id s.scientific_name = pdbx_organism_scientific s.common_name = common_name s.strain = strain e.source = s class _EntitySrcSynHandler(Handler): category = '_pdbx_entity_src_syn' # Note that _pdbx_entity_src_syn.strain is not used in current PDB entries def __call__(self, entity_id, pdbx_src_id, ncbi_taxonomy_id, organism_scientific, organism_common_name): e = self.sysr.entities.get_by_id(entity_id) s = self.sysr.src_syns.get_by_id(pdbx_src_id) s.ncbi_taxonomy_id = ncbi_taxonomy_id s.scientific_name = organism_scientific s.common_name = organism_common_name e.source = s class _StructRefHandler(Handler): category = '_struct_ref' def __init__(self, *args): super().__init__(*args) # Map db_name to subclass of ihm.reference.Sequence self.type_map = dict( (x[1]._db_name.lower(), x[1]) for x in inspect.getmembers(ihm.reference, inspect.isclass) if issubclass(x[1], ihm.reference.Sequence) and x[1] is not ihm.reference.Sequence) def __call__(self, id, entity_id, db_name, db_code, pdbx_db_accession, pdbx_seq_one_letter_code, details): # todo: handle things that aren't sequences e = self.sysr.entities.get_by_id(entity_id) typ = self.type_map.get(db_name.lower()) ref = self.sysr.references.get_by_id(id, typ) # Strip newlines and whitespace from code if pdbx_seq_one_letter_code not in (None, ihm.unknown): pdbx_seq_one_letter_code \ = pdbx_seq_one_letter_code.replace('\n', '').replace(' ', '') self.copy_if_present( ref, locals(), keys=('db_name', 'db_code', 'details'), mapkeys={'pdbx_db_accession': 'accession', 'pdbx_seq_one_letter_code': '_partial_sequence'}) e.references.append(ref) def finalize(self): # The mmCIF file only contains the subset of the sequence that # overlaps with our entities, but we need the full sequence. Pad it # out with gaps if necessary so that indexing works correctly. for e in self.system.entities: for r in e.references: if hasattr(r, '_partial_sequence'): if r._partial_sequence in (None, ihm.unknown): r.sequence = r._partial_sequence else: db_begin = min(a.db_begin for a in r._get_alignments()) r.sequence = '-' * (db_begin - 1) + r._partial_sequence del r._partial_sequence class _StructRefSeqHandler(Handler): category = '_struct_ref_seq' def __call__(self, align_id, ref_id, seq_align_beg: int, seq_align_end: int, db_align_beg: int, db_align_end: int): ref = self.sysr.references.get_by_id(ref_id) align = self.sysr.alignments.get_by_id(align_id) align.db_begin = db_align_beg align.db_end = db_align_end align.entity_begin = seq_align_beg align.entity_end = seq_align_end ref.alignments.append(align) class _StructRefSeqDifHandler(Handler): category = '_struct_ref_seq_dif' def __call__(self, align_id, seq_num: int, db_mon_id, mon_id, details): align = self.sysr.alignments.get_by_id(align_id) db_monomer = self.sysr.chem_comps.get_by_id_or_none(db_mon_id) monomer = self.sysr.chem_comps.get_by_id_or_none(mon_id) sd = ihm.reference.SeqDif(seq_num, db_monomer, monomer, details) align.seq_dif.append(sd) class _EntitySrcGenHandler(Handler): category = '_entity_src_gen' def __call__(self, entity_id, pdbx_src_id, pdbx_gene_src_ncbi_taxonomy_id, pdbx_gene_src_scientific_name, gene_src_common_name, gene_src_strain, pdbx_host_org_ncbi_taxonomy_id, pdbx_host_org_scientific_name, host_org_common_name, pdbx_host_org_strain): e = self.sysr.entities.get_by_id(entity_id) s = self.sysr.src_gens.get_by_id(pdbx_src_id) s.gene = ihm.source.Details( ncbi_taxonomy_id=pdbx_gene_src_ncbi_taxonomy_id, scientific_name=pdbx_gene_src_scientific_name, common_name=gene_src_common_name, strain=gene_src_strain) s.host = ihm.source.Details( ncbi_taxonomy_id=pdbx_host_org_ncbi_taxonomy_id, scientific_name=pdbx_host_org_scientific_name, common_name=host_org_common_name, strain=pdbx_host_org_strain) e.source = s class _EntityPolySeqHandler(Handler): category = '_entity_poly_seq' def __call__(self, entity_id, num, mon_id): s = self.sysr.entities.get_by_id(entity_id) seq_id = int(num) if seq_id > len(s.sequence): s.sequence.extend([None] * (seq_id - len(s.sequence))) s.sequence[seq_id - 1] = self.sysr.chem_comps.get_by_id(mon_id) class _EntityPolyHandler(Handler): category = '_entity_poly' def __init__(self, *args): super().__init__(*args) self._entity_info = {} def __call__(self, entity_id, type, pdbx_seq_one_letter_code, pdbx_seq_one_letter_code_can): class EntityInfo: pass e = EntityInfo() e.one_letter = tuple(util._get_codes(pdbx_seq_one_letter_code)) e.one_letter_can = tuple(util._get_codes(pdbx_seq_one_letter_code_can)) e.sequence_type = type self._entity_info[entity_id] = e def finalize(self): for e in self.system.entities: ei = self._entity_info.get(e._id, None) if ei is None: continue # Fill in missing information (one-letter codes) for nonstandard # residues # todo: also add info for residues that aren't in entity_poly_seq # at all for i, comp in enumerate(e.sequence): if comp.code is None and i < len(ei.one_letter): comp.code = ei.one_letter[i] if (comp.code_canonical is None and i < len(ei.one_letter_can)): comp.code_canonical = ei.one_letter_can[i] class _EntityPolySegmentHandler(Handler): category = '_ihm_entity_poly_segment' def __call__(self, id, seq_id_begin: int, seq_id_end: int): self.sysr.ranges.set(id, seq_id_begin, seq_id_end) class _EntityNonPolyHandler(Handler): category = '_pdbx_entity_nonpoly' def __call__(self, entity_id, comp_id): s = self.sysr.entities.get_by_id(entity_id) s.sequence = (self.sysr.chem_comps.get_by_id(comp_id),) class _StructAsymHandler(Handler): category = '_struct_asym' def __call__(self, id, entity_id, details): s = self.sysr.asym_units.get_by_id(id) # Keep this ID (like a user-assigned ID); don't reassign it on output s.id = id s.entity = self.sysr.entities.get_by_id(entity_id) self.copy_if_present(s, locals(), keys=('details',)) class _AssemblyHandler(Handler): category = '_ihm_struct_assembly' def __call__(self, id, name, description): s = self.sysr.assemblies.get_by_id(id) self.copy_if_present(s, locals(), keys=('name', 'description')) class _AssemblyDetailsHandler(Handler): category = '_ihm_struct_assembly_details' ignored_keywords = ['ordinal_id', 'entity_description'] def __init__(self, *args): super().__init__(*args) self._read_args = [] def __call__(self, assembly_id, parent_assembly_id, entity_poly_segment_id, asym_id, entity_id): a_id = assembly_id a = self.sysr.assemblies.get_by_id(a_id) parent_id = parent_assembly_id if parent_id and parent_id != a_id and not a.parent: a.parent = self.sysr.assemblies.get_by_id(parent_id) if asym_id: obj = self.sysr.asym_units.get_by_id(asym_id) else: obj = self.sysr.entities.get_by_id(entity_id) # Postpone filling in range until finalize time, as we may not have # read segments yet self._read_args.append((a, obj, entity_poly_segment_id)) def finalize(self): for (a, obj, entity_poly_segment_id) in self._read_args: a.append(self.sysr.ranges.get(obj, entity_poly_segment_id)) self.system._make_complete_assembly() # The order of components should not matter, so put in a consistent # order so we can compare against other assemblies complete = sorted(self.system.complete_assembly, key=lambda x: id(x)) for a in self.system.orphan_assemblies: # Any EntityRange or AsymUnitRange which covers an entire entity, # replace with Entity or AsymUnit object a[:] = [self._handle_component(x) for x in a] # If the input file defines the complete assembly, transfer # user-provided info to system.complete_assembly if sorted(a, key=lambda x: id(x)) == complete: self.system.complete_assembly.name = a.name self.system.complete_assembly.description = a.description def _handle_component(self, comp): if isinstance(comp, ihm.EntityRange) \ and comp.seq_id_range == comp.entity.seq_id_range: return comp.entity if isinstance(comp, ihm.AsymUnitRange) \ and comp.seq_id_range == comp.asym.seq_id_range: return comp.asym else: return comp class _LocalFiles(ihm.location.Repository): """Placeholder for files stored locally""" reference_provider = None reference_type = 'Supplementary Files' reference = None refers_to = 'Other' url = None class _ExtRefHandler(Handler): category = '_ihm_external_reference_info' def __init__(self, *args): super().__init__(*args) self.type_map = {'doi': ihm.location.Repository, 'supplementary files': _LocalFiles} def __call__(self, reference_id, reference_type, reference, associated_url, details): ref_id = reference_id typ = 'doi' if reference_type is None else reference_type.lower() repo = self.sysr.repos.get_by_id( ref_id, self.type_map.get(typ, ihm.location.Repository)) self.copy_if_present( repo, locals(), keys=('details',), mapkeys={'reference': 'doi', 'associated_url': 'url'}) def finalize(self): # Map use of placeholder _LocalFiles repository to repo=None for location in self.system.locations: if hasattr(location, 'repo') \ and isinstance(location.repo, _LocalFiles): location.repo = None class _ExtFileHandler(Handler): category = '_ihm_external_files' def __init__(self, *args): super().__init__(*args) # Map _ihm_external_files.content_type to corresponding # subclass of ihm.location.FileLocation self.type_map = dict( (x[1].content_type.lower(), x[1]) for x in inspect.getmembers(ihm.location, inspect.isclass) if issubclass(x[1], ihm.location.FileLocation) and x[1] is not ihm.location.FileLocation) def __call__(self, content_type, id, reference_id, details, file_path, file_format, file_size_bytes): typ = None if content_type is None else content_type.lower() f = self.sysr.external_files.get_by_id( id, self.type_map.get(typ, ihm.location.FileLocation)) f.repo = self.sysr.repos.get_by_id(reference_id) # IHMCIF dictionary defines file size as a float, although only int # values make sense, so allow for either ints or floats here try: f.file_size = self.get_int(file_size_bytes) except ValueError: f.file_size = self.get_float(file_size_bytes) self.copy_if_present( f, locals(), keys=['details', 'file_format'], mapkeys={'file_path': 'path'}) # Handle DOI that is itself a file if file_path is None: f.path = '.' class _DatasetListHandler(Handler): category = '_ihm_dataset_list' def __init__(self, *args): super().__init__(*args) # Map data_type to corresponding # subclass of ihm.dataset.Dataset self.type_map = dict( (x[1].data_type.lower(), x[1]) for x in inspect.getmembers(ihm.dataset, inspect.isclass) if issubclass(x[1], ihm.dataset.Dataset)) # Map old 'CX-MS' data to new class self.type_map['cx-ms data'] = ihm.dataset.CXMSDataset def __call__(self, data_type, id, details): typ = None if data_type is None else data_type.lower() f = self.sysr.datasets.get_by_id( id, self.type_map.get(typ, ihm.dataset.Dataset)) f.details = details f._allow_duplicates = True class _DatasetGroupHandler(Handler): category = '_ihm_dataset_group' ignored_keywords = ['ordinal_id'] def __call__(self, id, name, application, details): g = self.sysr.dataset_groups.get_by_id(id) self.copy_if_present(g, locals(), keys=('name', 'application', 'details')) class _DatasetGroupLinkHandler(Handler): category = '_ihm_dataset_group_link' def __call__(self, group_id, dataset_list_id): g = self.sysr.dataset_groups.get_by_id(group_id) ds = self.sysr.datasets.get_by_id(dataset_list_id) g.append(ds) class _DatasetExtRefHandler(Handler): category = '_ihm_dataset_external_reference' def __call__(self, file_id, dataset_list_id): ds = self.sysr.datasets.get_by_id(dataset_list_id) f = self.sysr.external_files.get_by_id(file_id) ds._add_location(f) class _DatasetDBRefHandler(Handler): category = '_ihm_dataset_related_db_reference' def __init__(self, *args): super().__init__(*args) # Map data_type to corresponding # subclass of ihm.location.DatabaseLocation # or ihm.location.DatabaseLocation itself self.type_map = dict( (x[1].db_name.lower(), x[1]) for x in inspect.getmembers(ihm.location, inspect.isclass) if issubclass(x[1], ihm.location.DatabaseLocation)) def __call__(self, dataset_list_id, db_name, id, version, details, accession_code): ds = self.sysr.datasets.get_by_id(dataset_list_id) typ = None if db_name is None else db_name.lower() dbloc = self.sysr.db_locations.get_by_id(id, self.type_map.get(typ, None)) # Preserve user-provided name for unknown databases if dbloc.db_name == 'Other' and db_name is not None: dbloc.db_name = db_name ds._add_location(dbloc) self.copy_if_present( dbloc, locals(), keys=['version', 'details'], mapkeys={'accession_code': 'access_code'}) class _DataTransformationHandler(Handler): category = '_ihm_data_transformation' def __call__(self, id, tr_vector1, tr_vector2, tr_vector3, rot_matrix11, rot_matrix21, rot_matrix31, rot_matrix12, rot_matrix22, rot_matrix32, rot_matrix13, rot_matrix23, rot_matrix33): t = self.sysr.data_transformations.get_by_id(id) t.rot_matrix = _get_matrix33(locals(), 'rot_matrix') t.tr_vector = _get_vector3(locals(), 'tr_vector') class _RelatedDatasetsHandler(Handler): category = '_ihm_related_datasets' ignored_keywords = ['ordinal_id'] def __call__(self, dataset_list_id_derived, dataset_list_id_primary, transformation_id): derived = self.sysr.datasets.get_by_id(dataset_list_id_derived) primary = self.sysr.datasets.get_by_id(dataset_list_id_primary) trans = self.sysr.data_transformations.get_by_id_or_none( transformation_id) if trans: primary = ihm.dataset.TransformedDataset( dataset=primary, transform=trans) derived.parents.append(primary) class _ModelRepresentationHandler(Handler): category = '_ihm_model_representation' def __call__(self, id, name, details): rep = self.sysr.representations.get_by_id(id) self.copy_if_present(rep, locals(), keys=('name', 'details')) def _make_atom_segment(asym, rigid, primitive, count, smodel, description): return ihm.representation.AtomicSegment( asym_unit=asym, rigid=rigid, starting_model=smodel, description=description) def _make_residue_segment(asym, rigid, primitive, count, smodel, description): return ihm.representation.ResidueSegment( asym_unit=asym, rigid=rigid, primitive=primitive, starting_model=smodel, description=description) def _make_multi_residue_segment(asym, rigid, primitive, count, smodel, description): return ihm.representation.MultiResidueSegment( asym_unit=asym, rigid=rigid, primitive=primitive, starting_model=smodel, description=description) def _make_feature_segment(asym, rigid, primitive, count, smodel, description): return ihm.representation.FeatureSegment( asym_unit=asym, rigid=rigid, primitive=primitive, count=count, starting_model=smodel, description=description) class _ModelRepresentationDetailsHandler(Handler): category = '_ihm_model_representation_details' ignored_keywords = ['entity_description'] _rigid_map = {'rigid': True, 'flexible': False, None: None} _segment_factory = {'by-atom': _make_atom_segment, 'by-residue': _make_residue_segment, 'multi-residue': _make_multi_residue_segment, 'by-feature': _make_feature_segment} def __init__(self, *args): super().__init__(*args) self._read_args = [] def __call__(self, entity_asym_id, entity_poly_segment_id, representation_id, starting_model_id, model_object_primitive, model_granularity, model_object_count: int, model_mode, description): # Postpone until finalize time as we may not have segments yet self._read_args.append( (entity_asym_id, entity_poly_segment_id, representation_id, starting_model_id, model_object_primitive, model_granularity, model_object_count, model_mode, description)) def finalize(self): for (entity_asym_id, entity_poly_segment_id, representation_id, starting_model_id, model_object_primitive, model_granularity, model_object_count, model_mode, description) in self._read_args: asym = self.sysr.ranges.get( self.sysr.asym_units.get_by_id(entity_asym_id), entity_poly_segment_id) rep = self.sysr.representations.get_by_id(representation_id) smodel = self.sysr.starting_models.get_by_id_or_none( starting_model_id) primitive = self.get_lower(model_object_primitive) gran = self.get_lower(model_granularity) primitive = self.get_lower(model_object_primitive) rigid = self._rigid_map[self.get_lower(model_mode)] segment = self._segment_factory[gran]( asym, rigid, primitive, model_object_count, smodel, description) rep.append(segment) # todo: support user subclass of StartingModel, pass it coordinates, seqdif class _StartingModelDetailsHandler(Handler): category = '_ihm_starting_model_details' ignored_keywords = ['entity_description'] def __call__(self, starting_model_id, asym_id, entity_poly_segment_id, dataset_list_id, starting_model_auth_asym_id, starting_model_sequence_offset, description): m = self.sysr.starting_models.get_by_id(starting_model_id) # We might not have a suitable range yet for this ID, so fill this # in at finalize time m.asym_unit = (asym_id, entity_poly_segment_id) m.dataset = self.sysr.datasets.get_by_id(dataset_list_id) self.copy_if_present( m, locals(), keys=('description',), mapkeys={'starting_model_auth_asym_id': 'asym_id'}) if starting_model_sequence_offset is not None: m.offset = int(starting_model_sequence_offset) def finalize(self): for m in self.sysr.system.orphan_starting_models: # Skip any auto-generated models without range info if m.asym_unit is None: continue # Replace tuple with real Asym/Entity range object (asym_id, entity_poly_segment_id) = m.asym_unit m.asym_unit = self.sysr.ranges.get( self.sysr.asym_units.get_by_id(asym_id), entity_poly_segment_id) class _StartingComputationalModelsHandler(Handler): category = '_ihm_starting_computational_models' def __call__(self, starting_model_id, script_file_id, software_id): m = self.sysr.starting_models.get_by_id(starting_model_id) if script_file_id is not None: m.script_file = self.sysr.external_files.get_by_id(script_file_id) if software_id is not None: m.software = self.sysr.software.get_by_id(software_id) class _StartingComparativeModelsHandler(Handler): category = '_ihm_starting_comparative_models' ignored_keywords = ['ordinal_id'] def __call__(self, starting_model_id, template_dataset_list_id, alignment_file_id, template_auth_asym_id, starting_model_seq_id_begin: int, starting_model_seq_id_end: int, template_seq_id_begin: int, template_seq_id_end: int, template_sequence_identity: float, template_sequence_identity_denominator: int): m = self.sysr.starting_models.get_by_id(starting_model_id) dataset = self.sysr.datasets.get_by_id(template_dataset_list_id) aln = self.sysr.external_files.get_by_id_or_none(alignment_file_id) asym_id = template_auth_asym_id seq_id_range = (starting_model_seq_id_begin, starting_model_seq_id_end) template_seq_id_range = (template_seq_id_begin, template_seq_id_end) identity = ihm.startmodel.SequenceIdentity( template_sequence_identity, template_sequence_identity_denominator) t = ihm.startmodel.Template( dataset, asym_id, seq_id_range, template_seq_id_range, identity, aln) m.templates.append(t) class _ProtocolHandler(Handler): category = '_ihm_modeling_protocol' ignored_keywords = ['ordinal_id', 'struct_assembly_description'] def __call__(self, id, protocol_name, num_steps, details): p = self.sysr.protocols.get_by_id(id) self.copy_if_present(p, locals(), mapkeys={'protocol_name': 'name'}, keys=['details']) class _ProtocolDetailsHandler(Handler): category = '_ihm_modeling_protocol_details' def __call__(self, protocol_id, step_id, num_models_begin: int, num_models_end: int, multi_scale_flag: bool, multi_state_flag: bool, ordered_flag: bool, ensemble_flag: bool, struct_assembly_id, dataset_group_id, software_id, script_file_id, step_name, step_method, description): p = self.sysr.protocols.get_by_id(protocol_id) assembly = self.sysr.assemblies.get_by_id_or_none(struct_assembly_id) dg = self.sysr.dataset_groups.get_by_id_or_none(dataset_group_id) software = self.sysr.software.get_by_id_or_none(software_id) script = self.sysr.external_files.get_by_id_or_none(script_file_id) s = ihm.protocol.Step(assembly=assembly, dataset_group=dg, method=None, num_models_begin=num_models_begin, num_models_end=num_models_end, multi_scale=multi_scale_flag, multi_state=multi_state_flag, ordered=ordered_flag, ensemble=ensemble_flag, software=software, script_file=script, description=description) s._id = step_id self.copy_if_present( s, locals(), mapkeys={'step_name': 'name', 'step_method': 'method'}) p.steps.append(s) class _PostProcessHandler(Handler): category = '_ihm_modeling_post_process' def __init__(self, *args): super().__init__(*args) # Map _ihm_modeling_post_process.type to corresponding subclass # of ihm.analysis.Step self.type_map = dict((x[1].type.lower(), x[1]) for x in inspect.getmembers(ihm.analysis, inspect.isclass) if issubclass(x[1], ihm.analysis.Step) and x[1] is not ihm.analysis.Step) def __call__(self, protocol_id, analysis_id, type, id, num_models_begin: int, num_models_end: int, struct_assembly_id, dataset_group_id, software_id, script_file_id, feature, details): protocol = self.sysr.protocols.get_by_id(protocol_id) analysis = self.sysr.analyses.get_by_id((protocol_id, analysis_id)) if analysis._id not in [a._id for a in protocol.analyses]: protocol.analyses.append(analysis) typ = type.lower() if type is not None else 'other' step = self.sysr.analysis_steps.get_by_id( id, self.type_map.get(typ, ihm.analysis.Step)) analysis.steps.append(step) step.details = details if typ == 'none': # If this step was forward referenced, feature will have been set # to Python None - set it to explicit 'none' instead step.feature = 'none' else: step.num_models_begin = num_models_begin step.num_models_end = num_models_end step.assembly = self.sysr.assemblies.get_by_id_or_none( struct_assembly_id) step.dataset_group = self.sysr.dataset_groups.get_by_id_or_none( dataset_group_id) step.software = self.sysr.software.get_by_id_or_none(software_id) step.script_file = self.sysr.external_files.get_by_id_or_none( script_file_id) # Default to "other" if invalid method/feature read try: self.copy_if_present(step, locals(), keys=['feature']) except ValueError: step.feature = "other" class _ModelListHandler(Handler): category = '_ihm_model_list' def __call__(self, model_id, model_name, assembly_id, representation_id, protocol_id): model = self.sysr.models.get_by_id(model_id) self.copy_if_present(model, locals(), mapkeys={'model_name': 'name'}) model.assembly = self.sysr.assemblies.get_by_id_or_none(assembly_id) model.representation = self.sysr.representations.get_by_id_or_none( representation_id) model.protocol = self.sysr.protocols.get_by_id_or_none(protocol_id) class _ModelGroupHandler(Handler): category = '_ihm_model_group' def __call__(self, id, name, details): model_group = self.sysr.model_groups.get_by_id(id) self.copy_if_present(model_group, locals(), keys=('name', 'details')) def finalize(self): # Put all model groups not assigned to a state in their own state model_groups_in_states = set() for sg in self.system.state_groups: for state in sg: for model_group in state: model_groups_in_states.add(model_group._id) mgs = [mg for mgid, mg in self.sysr.model_groups._obj_by_id.items() if mgid not in model_groups_in_states] if mgs: s = ihm.model.State(mgs) self.system.state_groups.append(ihm.model.StateGroup([s])) # Put all models not in a group in their own group in its own state # (e.g. this will catch models from a non-IHM file) models_in_groups = set() for mg in self.sysr.model_groups._obj_by_id.values(): for m in mg: models_in_groups.add(m._id) ms = [m for mid, m in self.sysr.models._obj_by_id.items() if mid not in models_in_groups] if ms: mg = ihm.model.ModelGroup(ms) s = ihm.model.State([mg]) self.system.state_groups.append(ihm.model.StateGroup([s])) class _ModelGroupLinkHandler(Handler): category = '_ihm_model_group_link' def __call__(self, group_id, model_id): model_group = self.sysr.model_groups.get_by_id(group_id) model = self.sysr.models.get_by_id(model_id) model_group.append(model) class _ModelRepresentativeHandler(Handler): category = '_ihm_model_representative' def __call__(self, model_group_id, model_id, selection_criteria): model_group = self.sysr.model_groups.get_by_id(model_group_id) model = self.sysr.models.get_by_id(model_id) # Default to "other" if invalid criteria read try: rep = ihm.model.ModelRepresentative(model, selection_criteria) except ValueError: rep = ihm.model.ModelRepresentative(model, "other selction criteria") model_group.representatives.append(rep) class _MultiStateHandler(Handler): category = '_ihm_multi_state_modeling' def __call__(self, state_group_id, state_id, population_fraction: float, experiment_type, details, state_name, state_type): state_group = self.sysr.state_groups.get_by_id(state_group_id) state = self.sysr.states.get_by_id(state_id) state_group.append(state) state.population_fraction = population_fraction self.copy_if_present( state, locals(), keys=['experiment_type', 'details'], mapkeys={'state_name': 'name', 'state_type': 'type'}) class _MultiStateLinkHandler(Handler): category = '_ihm_multi_state_model_group_link' def __call__(self, state_id, model_group_id): state = self.sysr.states.get_by_id(state_id) model_group = self.sysr.model_groups.get_by_id(model_group_id) state.append(model_group) class _EnsembleHandler(Handler): category = '_ihm_ensemble_info' # Map subsample type to corresponding subclass _type_map = dict((x[1].sub_sampling_type.lower(), x[1]) for x in inspect.getmembers(ihm.model, inspect.isclass) if issubclass(x[1], ihm.model.Subsample)) def __call__(self, ensemble_id, model_group_id, post_process_id, ensemble_file_id, num_ensemble_models: int, ensemble_precision_value: float, ensemble_name, ensemble_clustering_method, ensemble_clustering_feature, details, sub_sampling_type, num_ensemble_models_deposited: int, model_group_superimposed_flag: bool): ensemble = self.sysr.ensembles.get_by_id(ensemble_id) mg = self.sysr.model_groups.get_by_id_or_none(model_group_id) pp = self.sysr.analysis_steps.get_by_id_or_none(post_process_id) f = self.sysr.external_files.get_by_id_or_none(ensemble_file_id) ensemble.model_group = mg ensemble.num_models = num_ensemble_models ensemble._num_deposited = num_ensemble_models_deposited ensemble.precision = ensemble_precision_value if sub_sampling_type: ensemble._sub_sampling_type = sub_sampling_type.lower() # note that num_ensemble_models_deposited is ignored (should be size of # model group anyway) ensemble.post_process = pp ensemble.file = f ensemble.details = details ensemble.superimposed = model_group_superimposed_flag # Default to "other" if invalid method/feature read try: ensemble.clustering_method = ensemble_clustering_method except ValueError: ensemble.clustering_method = "Other" try: ensemble.clustering_feature = ensemble_clustering_feature except ValueError: ensemble.clustering_feature = "other" self.copy_if_present( ensemble, locals(), mapkeys={'ensemble_name': 'name'}) def finalize(self): for e in self.sysr.system.ensembles: if hasattr(e, '_sub_sampling_type'): t = self._type_map.get(e._sub_sampling_type, ihm.model.Subsample) for s in e.subsamples: s.__class__ = t del e._sub_sampling_type class _NotModeledResidueRangeHandler(Handler): category = '_ihm_residues_not_modeled' def __call__(self, model_id, asym_id, seq_id_begin, seq_id_end, reason): model = self.sysr.models.get_by_id(model_id) asym = self.sysr.asym_units.get_by_id(asym_id) # Allow for out-of-range seq_ids for now rr = ihm.model.NotModeledResidueRange( asym, int(seq_id_begin), int(seq_id_end)) # Default to "Other" if invalid reason read try: rr.reason = reason except ValueError: rr.reason = "Other" model.not_modeled_residue_ranges.append(rr) class _SubsampleHandler(Handler): category = '_ihm_ensemble_sub_sample' def __call__(self, name, ensemble_id, num_models: int, model_group_id, file_id): ensemble = self.sysr.ensembles.get_by_id(ensemble_id) mg = self.sysr.model_groups.get_by_id_or_none(model_group_id) f = self.sysr.external_files.get_by_id_or_none(file_id) # We don't know the type yet (not until ensemble is read); this # will be corrected by EnsembleHandler.finalize() ss = ihm.model.Subsample( name=name, num_models=num_models, model_group=mg, file=f) ensemble.subsamples.append(ss) class _DensityHandler(Handler): category = '_ihm_localization_density_files' def __init__(self, *args): super().__init__(*args) self._read_args = [] def __call__(self, id, ensemble_id, file_id, asym_id, entity_poly_segment_id): # Postpone handling until finalize time, since we might not have # ranges to resolve entity_poly_segment_id yet self._read_args.append((id, ensemble_id, file_id, asym_id, entity_poly_segment_id)) def finalize(self): for (id, ensemble_id, file_id, asym_id, entity_poly_segment_id) in self._read_args: density = self.sysr.densities.get_by_id(id) ensemble = self.sysr.ensembles.get_by_id(ensemble_id) f = self.sysr.external_files.get_by_id(file_id) asym = self.sysr.ranges.get( self.sysr.asym_units.get_by_id(asym_id), entity_poly_segment_id) density.asym_unit = asym density.file = f ensemble.densities.append(density) class _EM3DRestraintHandler(Handler): category = '_ihm_3dem_restraint' def __call__(self, dataset_list_id, struct_assembly_id, fitting_method_citation_id, map_segment_flag: bool, fitting_method, number_of_gaussians: int, model_id, cross_correlation_coefficient: float, details): # EM3D restraints don't have their own IDs - they use the dataset # and assembly IDs r = self.sysr.em3d_restraints.get_by_dataset(dataset_list_id, struct_assembly_id) r.assembly = self.sysr.assemblies.get_by_id_or_none(struct_assembly_id) r.fitting_method_citation = self.sysr.citations.get_by_id_or_none( fitting_method_citation_id) self.copy_if_present(r, locals(), keys=('fitting_method', 'details')) r.segment = map_segment_flag r.number_of_gaussians = number_of_gaussians model = self.sysr.models.get_by_id(model_id) ccc = cross_correlation_coefficient r.fits[model] = ihm.restraint.EM3DRestraintFit(ccc) class _EM2DRestraintHandler(Handler): category = '_ihm_2dem_class_average_restraint' def __call__(self, id, dataset_list_id, number_raw_micrographs: int, pixel_size_width: float, pixel_size_height: float, image_resolution: float, image_segment_flag: bool, number_of_projections: int, struct_assembly_id, details): r = self.sysr.em2d_restraints.get_by_id(id) r.dataset = self.sysr.datasets.get_by_id(dataset_list_id) r.number_raw_micrographs = number_raw_micrographs r.pixel_size_width = pixel_size_width r.pixel_size_height = pixel_size_height r.image_resolution = image_resolution r.segment = image_segment_flag r.number_of_projections = number_of_projections r.assembly = self.sysr.assemblies.get_by_id_or_none( struct_assembly_id) self.copy_if_present(r, locals(), keys=('details',)) class _EM2DFittingHandler(Handler): category = '_ihm_2dem_class_average_fitting' def __call__(self, restraint_id, model_id, cross_correlation_coefficient: float, tr_vector1, tr_vector2, tr_vector3, rot_matrix11, rot_matrix21, rot_matrix31, rot_matrix12, rot_matrix22, rot_matrix32, rot_matrix13, rot_matrix23, rot_matrix33): r = self.sysr.em2d_restraints.get_by_id(restraint_id) model = self.sysr.models.get_by_id(model_id) ccc = cross_correlation_coefficient tr_vector = _get_vector3(locals(), 'tr_vector') rot_matrix = _get_matrix33(locals(), 'rot_matrix') r.fits[model] = ihm.restraint.EM2DRestraintFit( cross_correlation_coefficient=ccc, rot_matrix=rot_matrix, tr_vector=tr_vector) class _SASRestraintHandler(Handler): category = '_ihm_sas_restraint' def __call__(self, dataset_list_id, struct_assembly_id, profile_segment_flag: bool, fitting_atom_type, fitting_method, details, fitting_state, radius_of_gyration: float, number_of_gaussians: int, model_id, chi_value: float): # SAS restraints don't have their own IDs - they use the dataset and # assembly IDs r = self.sysr.sas_restraints.get_by_dataset(dataset_list_id, struct_assembly_id) r.assembly = self.sysr.assemblies.get_by_id_or_none( struct_assembly_id) r.segment = profile_segment_flag self.copy_if_present( r, locals(), keys=('fitting_atom_type', 'fitting_method', 'details')) fs = (fitting_state if fitting_state not in (None, ihm.unknown) else 'Single') r.multi_state = fs.lower() != 'single' r.radius_of_gyration = radius_of_gyration r.number_of_gaussians = number_of_gaussians model = self.sysr.models.get_by_id(model_id) r.fits[model] = ihm.restraint.SASRestraintFit(chi_value=chi_value) class _SphereObjSiteHandler(Handler): category = '_ihm_sphere_obj_site' ignored_keywords = ['ordinal_id'] def __call__(self, model_id, asym_id, rmsf: float, seq_id_begin, seq_id_end, cartn_x, cartn_y, cartn_z, object_radius): model = self.sysr.models.get_by_id(model_id) asym = self.sysr.asym_units.get_by_id(asym_id) s = ihm.model.Sphere( asym_unit=asym, seq_id_range=(int(seq_id_begin), int(seq_id_end)), x=float(cartn_x), y=float(cartn_y), z=float(cartn_z), radius=float(object_radius), rmsf=rmsf) model.add_sphere(s) class _AtomSiteHandler(Handler): category = '_atom_site' def __init__(self, *args): super().__init__(*args) self._missing_sequence = collections.defaultdict(dict) # Mapping from asym+auth_seq_id to internal ID self._seq_id_map = {} def _get_seq_id_from_auth(self, auth_seq_id, pdbx_pdb_ins_code, asym): """Get an internal seq_id for something not a polymer (nonpolymer, water, branched), given author-provided info""" if asym._id not in self._seq_id_map: self._seq_id_map[asym._id] = {} m = self._seq_id_map[asym._id] # Treat ? and . missing insertion codes equivalently if pdbx_pdb_ins_code is ihm.unknown: pdbx_pdb_ins_code = None auth = (auth_seq_id, pdbx_pdb_ins_code) if auth not in m: # Assign a new ID starting from 1 seq_id = len(m) + 1 m[auth] = seq_id # Add this info to the seq_id -> auth_seq_id mapping too if asym.auth_seq_id_map == 0: asym.auth_seq_id_map = {} asym.auth_seq_id_map[seq_id] = (auth_seq_id, pdbx_pdb_ins_code) return m[auth] def __call__(self, pdbx_pdb_model_num, label_asym_id, b_iso_or_equiv: float, label_seq_id: int, label_atom_id, type_symbol, cartn_x: float, cartn_y: float, cartn_z: float, occupancy: float, group_pdb, auth_seq_id, pdbx_pdb_ins_code, auth_asym_id, label_comp_id, label_alt_id): # seq_id can be None for non-polymers (HETATM) seq_id = label_seq_id # todo: handle fields other than those output by us model = self.sysr.models.get_by_id(pdbx_pdb_model_num) if label_asym_id is None: # If no asym_id is provided (e.g. minimal PyMOL output) then # use the author-provided ID instead asym = self.sysr.asym_units.get_by_id(auth_asym_id) # Chances are the entity_poly table is missing too, so remember # the comp_id to help us construct missing sequence info self._missing_sequence[asym][seq_id] = label_comp_id else: asym = self.sysr.asym_units.get_by_id(label_asym_id) auth_seq_id = self.get_int_or_string(auth_seq_id) if seq_id is None: # Fill in our internal seq_id using author-provided info our_seq_id = self._get_seq_id_from_auth( auth_seq_id, pdbx_pdb_ins_code, asym) else: our_seq_id = seq_id group = 'ATOM' if group_pdb is None else group_pdb a = ihm.model.Atom( asym_unit=asym, seq_id=our_seq_id, atom_id=label_atom_id, type_symbol=type_symbol, x=cartn_x, y=cartn_y, z=cartn_z, het=group != 'ATOM', biso=b_iso_or_equiv, occupancy=occupancy, alt_id=label_alt_id) model.add_atom(a) # Note any residues that have different seq_id and auth_seq_id if (auth_seq_id is not None and seq_id is not None and (seq_id != auth_seq_id or pdbx_pdb_ins_code not in (None, ihm.unknown))): if asym.auth_seq_id_map == 0: asym.auth_seq_id_map = {} asym.auth_seq_id_map[seq_id] = auth_seq_id, pdbx_pdb_ins_code def finalize(self): # Fill in missing Entity information from comp_ids entity_from_seq = {} for asym, comp_from_seq_id in self._missing_sequence.items(): if asym.entity is None: # Fill in gaps in seq_id with UNK residues seq_len = max(comp_from_seq_id.keys()) unk = ihm.LPeptideAlphabet()['UNK'] seq = [unk] * seq_len for seq_id, comp_id in comp_from_seq_id.items(): seq[seq_id - 1] = self.sysr.chem_comps.get_by_id(comp_id) seq = tuple(seq) # Lists are not hashable if seq in entity_from_seq: asym.entity = entity_from_seq[seq] else: asym.entity = ihm.Entity(seq) entity_from_seq[seq] = asym.entity self.system.entities.append(asym.entity) class _StartingModelCoordHandler(Handler): category = '_ihm_starting_model_coord' def __call__(self, starting_model_id, group_pdb, type_symbol, atom_id, asym_id, seq_id: int, cartn_x: float, cartn_y: float, cartn_z: float, b_iso_or_equiv: float): model = self.sysr.starting_models.get_by_id(starting_model_id) asym = self.sysr.asym_units.get_by_id(asym_id) # seq_id can be None for non-polymers (HETATM) group = 'ATOM' if group_pdb is None else group_pdb a = ihm.model.Atom( asym_unit=asym, seq_id=seq_id, atom_id=atom_id, type_symbol=type_symbol, x=cartn_x, y=cartn_y, z=cartn_z, het=group != 'ATOM', biso=b_iso_or_equiv) model.add_atom(a) class _StartingModelSeqDifHandler(Handler): category = '_ihm_starting_model_seq_dif' def __call__(self, starting_model_id, db_seq_id: int, seq_id: int, db_comp_id, details): model = self.sysr.starting_models.get_by_id(starting_model_id) sd = ihm.startmodel.SeqDif(db_seq_id=db_seq_id, seq_id=seq_id, db_comp_id=db_comp_id, details=details) model.add_seq_dif(sd) class _PolyResidueFeatureHandler(Handler): category = '_ihm_poly_residue_feature' def __call__(self, feature_id, entity_id, asym_id, seq_id_begin, seq_id_end): f = self.sysr.features.get_by_id( feature_id, ihm.restraint.ResidueFeature) asym_or_entity = self._get_asym_or_entity(asym_id, entity_id) r1 = int(seq_id_begin) r2 = int(seq_id_end) # allow out-of-range ranges f.ranges.append(asym_or_entity(r1, r2)) class _FeatureListHandler(Handler): category = '_ihm_feature_list' def __call__(self, feature_id, details): if details: f = self.sysr.features.get_by_id(feature_id) f.details = details class _PolyAtomFeatureHandler(Handler): category = '_ihm_poly_atom_feature' def __call__(self, feature_id, entity_id, asym_id, seq_id, atom_id): f = self.sysr.features.get_by_id( feature_id, ihm.restraint.AtomFeature) asym_or_entity = self._get_asym_or_entity(asym_id, entity_id) seq_id = int(seq_id) atom = asym_or_entity.residue(seq_id).atom(atom_id) f.atoms.append(atom) class _NonPolyFeatureHandler(Handler): category = '_ihm_non_poly_feature' def __call__(self, feature_id, entity_id, asym_id, atom_id): asym_or_entity = self._get_asym_or_entity(asym_id, entity_id) if atom_id is None: f = self.sysr.features.get_by_id( feature_id, ihm.restraint.NonPolyFeature) f.objs.append(asym_or_entity) else: f = self.sysr.features.get_by_id( feature_id, ihm.restraint.AtomFeature) # todo: handle multiple copies, e.g. waters? atom = asym_or_entity.residue(1).atom(atom_id) f.atoms.append(atom) class _PseudoSiteFeatureHandler(Handler): category = '_ihm_pseudo_site_feature' def __call__(self, feature_id, pseudo_site_id): f = self.sysr.features.get_by_id(feature_id, ihm.restraint.PseudoSiteFeature) p = self.sysr.pseudo_sites.get_by_id(pseudo_site_id) f.site = p class _PseudoSiteHandler(Handler): category = '_ihm_pseudo_site' def __call__(self, id, cartn_x: float, cartn_y: float, cartn_z: float, radius: float, description): p = self.sysr.pseudo_sites.get_by_id(id) p.x = cartn_x p.y = cartn_y p.z = cartn_z p.radius = radius p.description = description def _make_harmonic(low, up): return ihm.restraint.HarmonicDistanceRestraint(up if low is None else low) def _make_upper_bound(low, up): return ihm.restraint.UpperBoundDistanceRestraint(up) def _make_lower_bound(low, up): return ihm.restraint.LowerBoundDistanceRestraint(low) def _make_lower_upper_bound(low, up): return ihm.restraint.LowerUpperBoundDistanceRestraint( distance_lower_limit=low, distance_upper_limit=up) def _make_unknown_distance(low, up): return ihm.restraint.DistanceRestraint() _handle_distance = {'harmonic': _make_harmonic, 'upper bound': _make_upper_bound, 'lower bound': _make_lower_bound, 'lower and upper bound': _make_lower_upper_bound, None: _make_unknown_distance} class _DerivedDistanceRestraintHandler(Handler): category = '_ihm_derived_distance_restraint' _cond_map = {'ALL': True, 'ANY': False, None: None} def __call__(self, id, group_id, dataset_list_id, feature_id_1, feature_id_2, restraint_type, group_conditionality, probability: float, mic_value: float, distance_lower_limit: float, distance_upper_limit: float): r = self.sysr.dist_restraints.get_by_id(id) if group_id is not None: rg = self.sysr.dist_restraint_groups.get_by_id(group_id) rg.append(r) r.dataset = self.sysr.datasets.get_by_id_or_none(dataset_list_id) r.feature1 = self.sysr.features.get_by_id(feature_id_1) r.feature2 = self.sysr.features.get_by_id(feature_id_2) r.distance = _handle_distance[restraint_type](distance_lower_limit, distance_upper_limit) r.restrain_all = self._cond_map[group_conditionality] r.probability = probability r.mic_value = mic_value class _HDXRestraintHandler(Handler): category = '_ihm_hdx_restraint' def __call__(self, id, dataset_list_id, feature_id, protection_factor: float, details): r = self.sysr.hdx_restraints.get_by_id(id) r.dataset = self.sysr.datasets.get_by_id_or_none(dataset_list_id) r.feature = self.sysr.features.get_by_id(feature_id) r.protection_factor = protection_factor r.details = details class _PredictedContactRestraintHandler(Handler): category = '_ihm_predicted_contact_restraint' def _get_resatom(self, asym_id, seq_id, atom_id): asym = self.sysr.asym_units.get_by_id(asym_id) resatom = asym.residue(seq_id) if atom_id: resatom = resatom.atom(atom_id) return resatom def __call__(self, id, group_id, dataset_list_id, asym_id_1, seq_id_1: int, rep_atom_1, asym_id_2, seq_id_2: int, rep_atom_2, restraint_type, probability: float, distance_lower_limit: float, distance_upper_limit: float, model_granularity, software_id): r = self.sysr.pred_cont_restraints.get_by_id(id) if group_id is not None: rg = self.sysr.pred_cont_restraint_groups.get_by_id(group_id) rg.append(r) r.dataset = self.sysr.datasets.get_by_id_or_none(dataset_list_id) r.resatom1 = self._get_resatom(asym_id_1, seq_id_1, rep_atom_1) r.resatom2 = self._get_resatom(asym_id_2, seq_id_2, rep_atom_2) r.distance = _handle_distance[restraint_type](distance_lower_limit, distance_upper_limit) r.by_residue = self.get_lower(model_granularity) == 'by-residue' r.probability = probability r.software = self.sysr.software.get_by_id_or_none(software_id) class _CenterHandler(Handler): category = '_ihm_geometric_object_center' def __call__(self, id, xcoord: float, ycoord: float, zcoord: float): c = self.sysr.centers.get_by_id(id) c.x = xcoord c.y = ycoord c.z = zcoord class _TransformationHandler(Handler): category = '_ihm_geometric_object_transformation' def __call__(self, id, tr_vector1, tr_vector2, tr_vector3, rot_matrix11, rot_matrix21, rot_matrix31, rot_matrix12, rot_matrix22, rot_matrix32, rot_matrix13, rot_matrix23, rot_matrix33): t = self.sysr.transformations.get_by_id(id) t.rot_matrix = _get_matrix33(locals(), 'rot_matrix') t.tr_vector = _get_vector3(locals(), 'tr_vector') class _GeometricObjectHandler(Handler): category = '_ihm_geometric_object_list' # Map object_type to corresponding subclass (but not subsubclasses such # as XYPlane) _type_map = dict((x[1].type.lower(), x[1]) for x in inspect.getmembers(ihm.geometry, inspect.isclass) if issubclass(x[1], ihm.geometry.GeometricObject) and ihm.geometry.GeometricObject in x[1].__bases__) def __call__(self, object_type, object_id, object_name, object_description): typ = object_type.lower() if object_type is not None else 'other' g = self.sysr.geometries.get_by_id( object_id, self._type_map.get(typ, ihm.geometry.GeometricObject)) self.copy_if_present(g, locals(), mapkeys={'object_name': 'name', 'object_description': 'description'}) class _SphereHandler(Handler): category = '_ihm_geometric_object_sphere' def __call__(self, object_id, center_id, transformation_id, radius_r: float): s = self.sysr.geometries.get_by_id(object_id, ihm.geometry.Sphere) s.center = self.sysr.centers.get_by_id_or_none(center_id) s.transformation = self.sysr.transformations.get_by_id_or_none( transformation_id) s.radius = radius_r class _TorusHandler(Handler): category = '_ihm_geometric_object_torus' def __call__(self, object_id, center_id, transformation_id, major_radius_r: float, minor_radius_r: float): t = self.sysr.geometries.get_by_id(object_id, ihm.geometry.Torus) t.center = self.sysr.centers.get_by_id_or_none(center_id) t.transformation = self.sysr.transformations.get_by_id_or_none( transformation_id) t.major_radius = major_radius_r t.minor_radius = minor_radius_r class _HalfTorusHandler(Handler): category = '_ihm_geometric_object_half_torus' _inner_map = {'inner half': True, 'outer half': False} def __call__(self, object_id, thickness_th: float, section): t = self.sysr.geometries.get_by_id(object_id, ihm.geometry.HalfTorus) t.thickness = thickness_th section = section.lower() if section is not None else '' t.inner = self._inner_map.get(section, None) class _AxisHandler(Handler): category = '_ihm_geometric_object_axis' # Map axis_type to corresponding subclass _type_map = dict((x[1].axis_type.lower(), x[1]) for x in inspect.getmembers(ihm.geometry, inspect.isclass) if issubclass(x[1], ihm.geometry.Axis) and x[1] is not ihm.geometry.Axis) def __call__(self, axis_type, object_id, transformation_id): typ = axis_type.lower() if axis_type is not None else 'other' a = self.sysr.geometries.get_by_id( object_id, self._type_map.get(typ, ihm.geometry.Axis)) a.transformation = self.sysr.transformations.get_by_id_or_none( transformation_id) class _PlaneHandler(Handler): category = '_ihm_geometric_object_plane' # Map plane_type to corresponding subclass _type_map = dict((x[1].plane_type.lower(), x[1]) for x in inspect.getmembers(ihm.geometry, inspect.isclass) if issubclass(x[1], ihm.geometry.Plane) and x[1] is not ihm.geometry.Plane) def __call__(self, plane_type, object_id, transformation_id): typ = plane_type.lower() if plane_type is not None else 'other' a = self.sysr.geometries.get_by_id( object_id, self._type_map.get(typ, ihm.geometry.Plane)) a.transformation = self.sysr.transformations.get_by_id_or_none( transformation_id) class _GeometricRestraintHandler(Handler): category = '_ihm_geometric_object_distance_restraint' _cond_map = {'ALL': True, 'ANY': False, None: None} # Map object_characteristic to corresponding subclass _type_map = dict((x[1].object_characteristic.lower(), x[1]) for x in inspect.getmembers(ihm.restraint, inspect.isclass) if issubclass(x[1], ihm.restraint.GeometricRestraint)) def __call__(self, object_characteristic, id, dataset_list_id, object_id, feature_id, restraint_type, harmonic_force_constant: float, group_conditionality, distance_lower_limit: float, distance_upper_limit: float): typ = (object_characteristic or 'other').lower() r = self.sysr.geom_restraints.get_by_id( id, self._type_map.get(typ, ihm.restraint.GeometricRestraint)) r.dataset = self.sysr.datasets.get_by_id_or_none(dataset_list_id) r.geometric_object = self.sysr.geometries.get_by_id(object_id) r.feature = self.sysr.features.get_by_id(feature_id) r.distance = _handle_distance[restraint_type](distance_lower_limit, distance_upper_limit) r.harmonic_force_constant = harmonic_force_constant r.restrain_all = self._cond_map[group_conditionality] class _PolySeqSchemeHandler(Handler): category = '_pdbx_poly_seq_scheme' if _format is not None: _add_c_handler = _format.add_poly_seq_scheme_handler # Note: do not change the ordering of the first 6 parameters to this # function; the C parser expects them in this order def __call__(self, asym_id, seq_id, pdb_seq_num, auth_seq_num, pdb_ins_code, pdb_strand_id): asym = self.sysr.asym_units.get_by_id(asym_id) seq_id = self.get_int(seq_id) if pdb_strand_id not in (None, ihm.unknown, asym_id): asym._strand_id = pdb_strand_id pdb_seq_num = self.get_int_or_string(pdb_seq_num) auth_seq_num = self.get_int_or_string(auth_seq_num) # Note any residues that have different seq_id and pdb_seq_num if seq_id is not None and pdb_seq_num is not None \ and (seq_id != pdb_seq_num or pdb_ins_code not in (None, ihm.unknown)): if asym.auth_seq_id_map == 0: asym.auth_seq_id_map = {} asym.auth_seq_id_map[seq_id] = pdb_seq_num, pdb_ins_code # Note any residues that have different pdb_seq_num and auth_seq_num if (seq_id is not None and auth_seq_num is not None and pdb_seq_num is not None and auth_seq_num != pdb_seq_num): if asym.orig_auth_seq_id_map is None: asym.orig_auth_seq_id_map = {} asym.orig_auth_seq_id_map[seq_id] = auth_seq_num def finalize(self): for asym in self.sysr.system.asym_units: # If every residue in auth_seq_id_map is offset by the same # amount, and no insertion codes, replace the map with a # simple offset offset = self._get_auth_seq_id_offset(asym) if offset is not None: asym.auth_seq_id_map = offset def _get_auth_seq_id_offset(self, asym): """Get the offset from seq_id to auth_seq_id. Return None if no consistent offset exists.""" # Do nothing if the entity is not polymeric if asym.entity is None or not asym.entity.is_polymeric(): return # Do nothing if no map exists if asym.auth_seq_id_map == 0: return rng = asym.seq_id_range offset = None for seq_id in range(rng[0], rng[1] + 1): # If a residue isn't in the map, it has an effective offset of 0, # which has to be inconsistent (since everything in the map has # a nonzero offset by construction) if seq_id not in asym.auth_seq_id_map: return auth_seq_id, ins_code = asym.auth_seq_id_map[seq_id] # If auth_seq_id is a string, we can't use any offset if not isinstance(auth_seq_id, int): return # If insertion codes are provided, we can't use any offset if ins_code not in (None, ihm.unknown): return this_offset = auth_seq_id - seq_id if offset is None: offset = this_offset elif offset != this_offset: # Offset is inconsistent return return offset class _NonPolySchemeHandler(Handler): category = '_pdbx_nonpoly_scheme' def __init__(self, *args): super().__init__(*args) self._scheme = {} def __call__(self, asym_id, entity_id, pdb_seq_num, mon_id, pdb_ins_code, pdb_strand_id, ndb_seq_num: int, auth_seq_num): entity = self.sysr.entities.get_by_id(entity_id) # nonpolymer entities generally have information on their chemical # component in pdbx_entity_nonpoly, but if that's missing, at least # get the name from mon_id here, so that we don't end up with an # empty sequence if len(entity.sequence) == 0 and mon_id: if mon_id == 'HOH': s = ihm.WaterChemComp() else: s = ihm.NonPolymerChemComp( mon_id, name=entity.description) entity.sequence.append(s) asym = self.sysr.asym_units.get_by_id(asym_id) if pdb_strand_id not in (None, ihm.unknown, asym_id): asym._strand_id = pdb_strand_id pdb_seq_num = self.get_int_or_string(pdb_seq_num) auth_seq_num = self.get_int_or_string(auth_seq_num) # Make mapping from author-provided numbering (*pdb_seq_num*, not # auth_seq_num) to original and NDB numbering. We will use this at # finalize time to map internal ID ("seq_id") to auth, orig_auth, # and NDB numbering. if asym_id not in self._scheme: self._scheme[asym_id] = [] self._scheme[asym_id].append((pdb_seq_num, pdb_ins_code, auth_seq_num, ndb_seq_num)) def finalize(self): for asym in self.system.asym_units: entity = asym.entity if entity is None or entity.is_polymeric() or entity.is_branched(): continue self._finalize_asym(asym) def _finalize_asym(self, asym): # Add mapping info from scheme tables (to that already extracted # from atom_site); if a mismatch we use atom_site info scheme = self._scheme.get(asym._id) if scheme: if not asym.auth_seq_id_map: asym.auth_seq_id_map = {} if not asym.orig_auth_seq_id_map: asym.orig_auth_seq_id_map = {} # Make reverse mapping from author-provided info to internal ID auth_map = {} for key, val in asym.auth_seq_id_map.items(): auth_map[val] = key for pdb_seq_num, pdb_ins_code, auth_seq_num, ndb_seq_num in scheme: auth = (pdb_seq_num, pdb_ins_code) seq_id = auth_map.get(auth) if seq_id is None: seq_id = len(asym.auth_seq_id_map) + 1 asym.auth_seq_id_map[seq_id] = auth if pdb_seq_num != auth_seq_num: asym.orig_auth_seq_id_map[seq_id] = auth_seq_num if not asym.orig_auth_seq_id_map: asym.orig_auth_seq_id_map = None if asym.entity.type == 'water': # Replace AsymUnit with WaterAsymUnit if necessary if not isinstance(asym, ihm.WaterAsymUnit): asym.__class__ = ihm.WaterAsymUnit asym.number = len(asym.auth_seq_id_map) asym._water_sequence = [asym.entity.sequence[0]] * asym.number # todo: add mapping from seq_id to ndb numbering? class _BranchSchemeHandler(Handler): category = '_pdbx_branch_scheme' def __init__(self, *args): super().__init__(*args) self._scheme = {} def __call__(self, asym_id, num: int, pdb_seq_num, auth_seq_num, pdb_asym_id, pdb_ins_code): asym = self.sysr.asym_units.get_by_id(asym_id) if pdb_asym_id not in (None, ihm.unknown, asym_id): asym._strand_id = pdb_asym_id pdb_seq_num = self.get_int_or_string(pdb_seq_num) auth_seq_num = self.get_int_or_string(auth_seq_num) # Make mapping from author-provided numbering (*pdb_seq_num*, not # auth_seq_num) to original and "num" numbering. We will use this at # finalize time to map internal ID ("seq_id") to auth, orig_auth, # and "num" numbering. if asym_id not in self._scheme: self._scheme[asym_id] = [] self._scheme[asym_id].append((pdb_seq_num, pdb_ins_code, auth_seq_num, num)) def finalize(self): need_map_num = False for asym in self.system.asym_units: entity = asym.entity if entity is None or not entity.is_branched(): continue self._finalize_asym(asym) if asym.num_map: need_map_num = True if need_map_num: self._reassign_seq_ids() def _reassign_seq_ids(self): """Change provisional seq_ids so that they match _pdbx_branch_scheme.num""" for m in self.sysr.models.get_all(): for atom in m._atoms: if atom.asym_unit.num_map: atom.seq_id = atom.asym_unit.num_map[atom.seq_id] def _finalize_asym(self, asym): # Populate auth_seq_id mapping from scheme tables, and correct # any incorrect seq_ids assigned in atom_site to use num scheme = self._scheme.get(asym._id, []) # Make reverse mapping from atom_site author-provided info # to internal ID auth_map = {} if asym.auth_seq_id_map: for key, val in asym.auth_seq_id_map.items(): auth_map[val] = key asym.auth_seq_id_map = {} asym.orig_auth_seq_id_map = {} asym.num_map = {} for pdb_seq_num, pdb_ins_code, auth_seq_num, num in scheme: asym.auth_seq_id_map[num] = (pdb_seq_num, pdb_ins_code) if pdb_seq_num != auth_seq_num: asym.orig_auth_seq_id_map[num] = auth_seq_num as_seq_id = auth_map.get((pdb_seq_num, pdb_ins_code)) if as_seq_id is not None: if as_seq_id != num: asym.num_map[as_seq_id] = num del auth_map[(pdb_seq_num, pdb_ins_code)] if not asym.orig_auth_seq_id_map: asym.orig_auth_seq_id_map = None if not asym.num_map: asym.num_map = None # If any residues from atom_site are left, we can't assign a num # for them, so raise an error if auth_map: raise ValueError( "For branched asym %s, the following author-provided " "residue numbers (atom_site.auth_seq_id) are not present in " "the pdbx_branch_scheme table: %s" % (asym._id, ", ".join(repr(x[0]) for x in auth_map.keys()))) class _EntityBranchListHandler(Handler): category = '_pdbx_entity_branch_list' def __call__(self, entity_id, comp_id, num): s = self.sysr.entities.get_by_id(entity_id) # Assume num is 1-based (appears to be) seq_id = int(num) if seq_id > len(s.sequence): s.sequence.extend([None] * (seq_id - len(s.sequence))) s.sequence[seq_id - 1] = self.sysr.chem_comps.get_by_id(comp_id) class _BranchDescriptorHandler(Handler): category = '_pdbx_entity_branch_descriptor' def __call__(self, entity_id, descriptor, type, program, program_version): e = self.sysr.entities.get_by_id(entity_id) d = ihm.BranchDescriptor(text=descriptor, type=type, program=program, program_version=program_version) e.branch_descriptors.append(d) class _BranchLinkHandler(Handler): category = '_pdbx_entity_branch_link' def __call__(self, entity_id, entity_branch_list_num_1: int, atom_id_1, leaving_atom_id_1, entity_branch_list_num_2: int, atom_id_2, leaving_atom_id_2, value_order, details): e = self.sysr.entities.get_by_id(entity_id) lnk = ihm.BranchLink(num1=entity_branch_list_num_1, atom_id1=atom_id_1, leaving_atom_id1=leaving_atom_id_1, num2=entity_branch_list_num_2, atom_id2=atom_id_2, leaving_atom_id2=leaving_atom_id_2, order=value_order, details=details) e.branch_links.append(lnk) class _CrossLinkListHandler(Handler): category = '_ihm_cross_link_list' ignored_keywords = ['entity_description_1', 'entity_description_2', 'comp_id_1', 'comp_id_2'] _linkers_by_name = None def __init__(self, *args): super().__init__(*args) self._seen_group_ids = set() self._linker_type = {} def _get_linker_by_name(self, name): """Look up old-style linker, by name rather than descriptor""" if self._linkers_by_name is None: self._linkers_by_name \ = dict((x[1].auth_name, x[1]) for x in inspect.getmembers(ihm.cross_linkers) if isinstance(x[1], ihm.ChemDescriptor)) if name not in self._linkers_by_name: self._linkers_by_name[name] = ihm.ChemDescriptor(name) return self._linkers_by_name[name] def __call__(self, dataset_list_id, linker_chem_comp_descriptor_id, group_id, id, entity_id_1, entity_id_2, seq_id_1, seq_id_2, linker_type, details): dataset = self.sysr.datasets.get_by_id_or_none(dataset_list_id) if linker_chem_comp_descriptor_id is None and linker_type is not None: linker = self._get_linker_by_name(linker_type) else: linker = self.sysr.chem_descriptors.get_by_id( linker_chem_comp_descriptor_id) if linker_type: self._linker_type[linker] = linker_type # Group all crosslinks with same dataset and linker in one # CrossLinkRestraint object r = self.sysr.xl_restraints.get_by_attrs(dataset, linker) xl_group = self.sysr.experimental_xl_groups.get_by_id(group_id) xl = self.sysr.experimental_xls.get_by_id(id) if group_id not in self._seen_group_ids: self._seen_group_ids.add(group_id) r.experimental_cross_links.append(xl_group) xl_group.append(xl) xl.residue1 = self._get_entity_residue(entity_id_1, seq_id_1) xl.residue2 = self._get_entity_residue(entity_id_2, seq_id_2) xl.details = details def _get_entity_residue(self, entity_id, seq_id): entity = self.sysr.entities.get_by_id(entity_id) return entity.residue(int(seq_id)) def finalize(self): # If any ChemDescriptor has an empty name, fill it in using linker_type for d in self.system.orphan_chem_descriptors: if d.auth_name is None: d.auth_name = self._linker_type.get(d) class _CrossLinkRestraintHandler(Handler): category = '_ihm_cross_link_restraint' _cond_map = {'ALL': True, 'ANY': False, None: None} _distance_map = {'harmonic': ihm.restraint.HarmonicDistanceRestraint, 'lower bound': ihm.restraint.LowerBoundDistanceRestraint, 'upper bound': ihm.restraint.UpperBoundDistanceRestraint} # Map granularity to corresponding subclass _type_map = dict((x[1].granularity.lower(), x[1]) for x in inspect.getmembers(ihm.restraint, inspect.isclass) if issubclass(x[1], ihm.restraint.CrossLink) and x[1] is not ihm.restraint.CrossLink) def __call__(self, model_granularity, id, group_id, asym_id_1, asym_id_2, restraint_type, distance_threshold: float, conditional_crosslink_flag, atom_id_1, atom_id_2, psi: float, sigma_1: float, sigma_2: float): typ = (model_granularity or 'other').lower() xl = self.sysr.cross_links.get_by_id( id, self._type_map.get(typ, ihm.restraint.ResidueCrossLink)) ex_xl = self.sysr.experimental_xls.get_by_id(group_id) xl.experimental_cross_link = ex_xl xl.asym1 = self.sysr.asym_units.get_by_id(asym_id_1) xl.asym2 = self.sysr.asym_units.get_by_id(asym_id_2) # todo: handle unknown restraint type _distcls = self._distance_map[restraint_type.lower()] xl.distance = _distcls(distance_threshold) xl.restrain_all = self._cond_map[conditional_crosslink_flag] if isinstance(xl, ihm.restraint.AtomCrossLink): xl.atom1 = atom_id_1 xl.atom2 = atom_id_2 xl.psi = psi xl.sigma1 = sigma_1 xl.sigma2 = sigma_2 def finalize(self): # Put each cross link in the restraint that owns its experimental xl rsr_for_ex_xl = {} for r in self.sysr.xl_restraints.get_all(): for ex_xl_group in r.experimental_cross_links: for ex_xl in ex_xl_group: rsr_for_ex_xl[ex_xl] = r for xl in self.sysr.cross_links.get_all(): r = rsr_for_ex_xl[xl.experimental_cross_link] r.cross_links.append(xl) class _CrossLinkPseudoSiteHandler(Handler): category = '_ihm_cross_link_pseudo_site' def __call__(self, id, restraint_id, cross_link_partner: int, pseudo_site_id, model_id): xlps = self.sysr.cross_link_pseudo_sites.get_by_id(id) xlps.site = self.sysr.pseudo_sites.get_by_id(pseudo_site_id) xlps.model = self.sysr.models.get_by_id_or_none(model_id) xl = self.sysr.cross_links.get_by_id(restraint_id) if cross_link_partner == 2: if getattr(xl, 'pseudo2', None) is None: xl.pseudo2 = [] xl.pseudo2.append(xlps) else: if getattr(xl, 'pseudo1', None) is None: xl.pseudo1 = [] xl.pseudo1.append(xlps) class _CrossLinkResultHandler(Handler): category = '_ihm_cross_link_result' def __call__(self, restraint_id, ensemble_id, model_group_id, num_models: int, median_distance: float, details): if ensemble_id: g = self.sysr.ensembles.get_by_id(ensemble_id) else: g = self.sysr.model_groups.get_by_id(model_group_id) xl = self.sysr.cross_links.get_by_id(restraint_id) xl.fits[g] = ihm.restraint.CrossLinkGroupFit( num_models=num_models, median_distance=median_distance, details=details) class _CrossLinkResultParametersHandler(Handler): category = '_ihm_cross_link_result_parameters' ignored_keywords = ['ordinal_id'] def __call__(self, restraint_id, model_id, psi: float, sigma_1: float, sigma_2: float): xl = self.sysr.cross_links.get_by_id(restraint_id) model = self.sysr.models.get_by_id(model_id) xl.fits[model] = ihm.restraint.CrossLinkFit( psi=psi, sigma1=sigma_1, sigma2=sigma_2) class _OrderedModelHandler(Handler): category = '_ihm_ordered_model' def __call__(self, process_id, step_id, model_group_id_begin, model_group_id_end, edge_description, ordered_by, process_description, step_description): proc = self.sysr.ordered_procs.get_by_id(process_id) # todo: will this work with multiple processes? step = self.sysr.ordered_steps.get_by_id(step_id) edge = ihm.model.ProcessEdge( self.sysr.model_groups.get_by_id(model_group_id_begin), self.sysr.model_groups.get_by_id(model_group_id_end)) self.copy_if_present( edge, locals(), mapkeys={'edge_description': 'description'}) step.append(edge) if step_id not in [s._id for s in proc.steps]: proc.steps.append(step) self.copy_if_present( proc, locals(), keys=('ordered_by',), mapkeys={'process_description': 'description'}) self.copy_if_present( step, locals(), mapkeys={'step_description': 'description'}) # Handle the old name for the ihm_ordered_model category. This is a separate # object so relies on _OrderedModelHandler not storing any state. class _OrderedEnsembleHandler(_OrderedModelHandler): category = '_ihm_ordered_ensemble' class UnknownCategoryWarning(Warning): """Warning for unknown categories encountered in the file by :func:`read`""" pass class UnknownKeywordWarning(Warning): """Warning for unknown keywords encountered in the file by :func:`read`""" pass class _UnknownCategoryHandler: def __init__(self): self.reset() def reset(self): self._seen_categories = set() def __call__(self, catname, line): # Only warn about a given category once if catname in self._seen_categories: return self._seen_categories.add(catname) warnings.warn("Unknown category %s encountered%s - will be ignored" % (catname, " on line %d" % line if line else ""), UnknownCategoryWarning, stacklevel=2) class _UnknownKeywordHandler: def add_category_handlers(self, handlers): self._ignored_keywords = dict((h.category, frozenset(h.ignored_keywords)) for h in handlers) def __call__(self, catname, keyname, line): if keyname in self._ignored_keywords[catname]: return warnings.warn("Unknown keyword %s.%s encountered%s - will be ignored" % (catname, keyname, " on line %d" % line if line else ""), UnknownKeywordWarning, stacklevel=2) class _MultiStateSchemeHandler(Handler): category = '_ihm_multi_state_scheme' def __call__(self, id, name, details): # Get the object or create the object cur_mss = self.sysr.multi_state_schemes.get_by_id(id) # Set the variables self.copy_if_present(cur_mss, locals(), keys=('name', 'details')) class _MultiStateSchemeConnectivityHandler(Handler): category = '_ihm_multi_state_scheme_connectivity' def __call__(self, id, scheme_id, begin_state_id, end_state_id, dataset_group_id, details): # Get the object or create the object mssc = self.sysr.multi_state_scheme_connectivities.get_by_id(id) # Add the content mssc.begin_state = self.sysr.states.get_by_id(begin_state_id) mssc.end_state = self.sysr.states.get_by_id_or_none(end_state_id) mssc.dataset_group = \ self.sysr.dataset_groups.get_by_id_or_none(dataset_group_id) mssc.details = details # Get the MultiStateScheme mss = self.sysr.multi_state_schemes.get_by_id(scheme_id) # Add the connectivity to the scheme mss.add_connectivity(mssc) class _KineticRateHandler(Handler): category = '_ihm_kinetic_rate' def __call__(self, id, transition_rate_constant, equilibrium_constant, equilibrium_constant_determination_method, equilibrium_constant_unit, details, scheme_connectivity_id, dataset_group_id, external_file_id): # Get the object or create the object k = self.sysr.kinetic_rates.get_by_id(id) # if information for an equilibrium is given, create an object eq_const = None if (equilibrium_constant is not None) \ and (equilibrium_constant_determination_method is not None): if equilibrium_constant_determination_method == 'equilibrium ' \ 'constant is ' \ 'determined from '\ 'population': eq_const = \ ihm.multi_state_scheme.PopulationEquilibriumConstant( value=equilibrium_constant, unit=equilibrium_constant_unit) elif equilibrium_constant_determination_method == 'equilibrium ' \ 'constant is ' \ 'determined ' \ 'from kinetic ' \ 'rates, ' \ 'kAB/kBA': eq_const = \ ihm.multi_state_scheme.KineticRateEquilibriumConstant( value=equilibrium_constant, unit=equilibrium_constant_unit) else: eq_const = \ ihm.multi_state_scheme.EquilibriumConstant( value=equilibrium_constant, unit=equilibrium_constant_unit) # Add the content k.transition_rate_constant = transition_rate_constant k.equilibrium_constant = eq_const k.details = details k.dataset_group = \ self.sysr.dataset_groups.get_by_id_or_none(dataset_group_id) k.external_file = \ self.sysr.external_files.get_by_id_or_none(external_file_id) tmp_connectivities = self.sysr.multi_state_scheme_connectivities mssc = tmp_connectivities.get_by_id(scheme_connectivity_id) # Add the kinetic rate to the connectivity mssc.kinetic_rate = k class _RelaxationTimeHandler(Handler): category = '_ihm_relaxation_time' def __call__(self, id, value, unit, amplitude, dataset_group_id, external_file_id, details): # Get the object or create the object r = self.sysr.relaxation_times.get_by_id(id) # Add the content r.value = value r.unit = unit r.amplitude = amplitude r.dataset_group = \ self.sysr.dataset_groups.get_by_id_or_none(dataset_group_id) r.external_file = \ self.sysr.external_files.get_by_id_or_none(external_file_id) r.details = details class _RelaxationTimeMultiStateSchemeHandler(Handler): category = '_ihm_relaxation_time_multi_state_scheme' def __init__(self, *args): super().__init__(*args) self._read_args = [] def __call__(self, id, relaxation_time_id, scheme_id, scheme_connectivity_id, details): r = self.sysr.relaxation_times.get_by_id(relaxation_time_id) mss = self.sysr.multi_state_schemes.get_by_id(scheme_id) self._read_args.append((r, mss, scheme_connectivity_id, details)) def finalize(self): for (r, mss, scheme_connectivity_id, details) in self._read_args: tmp_connectivities = self.sysr.multi_state_scheme_connectivities mssc = tmp_connectivities.get_by_id_or_none(scheme_connectivity_id) # If the relaxation time is assigned to a connectivity, # add it there if mssc is not None: mssc.relaxation_time = r # Otherwise, add it to the multi-state scheme else: mss.add_relaxation_time(r) # FLR part # Note: This Handler is only here, because the category is officially # still in the flr dictionary. class _FLRChemDescriptorHandler(_ChemDescriptorHandler): category = '_flr_chemical_descriptor' class _FLRExperimentHandler(Handler): category = '_flr_experiment' def __call__(self, ordinal_id, id, instrument_id, inst_setting_id, exp_condition_id, sample_id, details): # Get the object or create the object experiment = self.sysr.flr_experiments.get_by_id(id) # Fill the object instrument = self.sysr.flr_instruments.get_by_id(instrument_id) inst_setting = self.sysr.flr_inst_settings.get_by_id(inst_setting_id) exp_condition = self.sysr.flr_exp_conditions.get_by_id( exp_condition_id) sample = self.sysr.flr_samples.get_by_id(sample_id) experiment.add_entry(instrument=instrument, inst_setting=inst_setting, exp_condition=exp_condition, sample=sample, details=details) class _FLRInstSettingHandler(Handler): category = '_flr_inst_setting' def __call__(self, id, details): # Get the object or create the object cur_inst_setting = self.sysr.flr_inst_settings.get_by_id(id) # Set the variables self.copy_if_present(cur_inst_setting, locals(), keys=('details',)) class _FLRExpConditionHandler(Handler): category = '_flr_exp_condition' def __call__(self, id, details): # Get the object or create the object cur_exp_condition = self.sysr.flr_exp_conditions.get_by_id(id) # Set the variables self.copy_if_present(cur_exp_condition, locals(), keys=('details',)) class _FLRInstrumentHandler(Handler): category = '_flr_instrument' def __call__(self, id, details): # Get the object or create the object cur_instrument = self.sysr.flr_instruments.get_by_id(id) # Set the variables self.copy_if_present(cur_instrument, locals(), keys=('details',)) class _FLREntityAssemblyHandler(Handler): category = '_flr_entity_assembly' def __call__(self, ordinal_id, assembly_id, entity_id, num_copies: int): # Get the object or create the object a = self.sysr.flr_entity_assemblies.get_by_id(assembly_id) # Get the entity entity = self.sysr.entities.get_by_id(entity_id) # Add the entity to the entity assembly a.add_entity(entity=entity, num_copies=num_copies) class _FLRSampleConditionHandler(Handler): category = '_flr_sample_condition' def __call__(self, id, details): # Get the object or create the object cur_sample_condition = self.sysr.flr_sample_conditions.get_by_id(id) # Set the variables self.copy_if_present(cur_sample_condition, locals(), keys=('details',)) class _FLRSampleHandler(Handler): category = '_flr_sample' def __call__(self, id, entity_assembly_id, num_of_probes: int, sample_condition_id, sample_description, sample_details, solvent_phase): sample = self.sysr.flr_samples.get_by_id(id) sample.entity_assembly \ = self.sysr.flr_entity_assemblies.get_by_id(entity_assembly_id) sample.num_of_probes = num_of_probes sample.condition = cond \ = self.sysr.flr_sample_conditions.get_by_id(sample_condition_id) self.copy_if_present(sample, locals(), keys=('solvent_phase',), mapkeys={'sample_description': 'description', 'sample_details': 'details'}) class _FLRProbeListHandler(Handler): category = '_flr_probe_list' def __call__(self, probe_id, chromophore_name, reactive_probe_flag: bool, reactive_probe_name, probe_origin, probe_link_type): cur_probe = self.sysr.flr_probes.get_by_id(probe_id) cur_probe.probe_list_entry = ihm.flr.ProbeList( chromophore_name=chromophore_name, reactive_probe_flag=reactive_probe_flag, reactive_probe_name=reactive_probe_name, probe_origin=probe_origin, probe_link_type=probe_link_type) class _FLRSampleProbeDetailsHandler(Handler): category = '_flr_sample_probe_details' def __call__(self, sample_probe_id, sample_id, probe_id, fluorophore_type, description, poly_probe_position_id): spd = self.sysr.flr_sample_probe_details.get_by_id(sample_probe_id) spd.sample = self.sysr.flr_samples.get_by_id(sample_id) spd.probe = self.sysr.flr_probes.get_by_id(probe_id) spd.poly_probe_position = self.sysr.flr_poly_probe_positions.get_by_id( poly_probe_position_id) spd.fluorophore_type = fluorophore_type spd.description = description class _FLRProbeDescriptorHandler(Handler): category = '_flr_probe_descriptor' def __call__(self, probe_id, reactive_probe_chem_descriptor_id, chromophore_chem_descriptor_id, chromophore_center_atom): react_cd = self.sysr.chem_descriptors.get_by_id_or_none( reactive_probe_chem_descriptor_id) chrom_cd = self.sysr.chem_descriptors.get_by_id_or_none( chromophore_chem_descriptor_id) cur_probe = self.sysr.flr_probes.get_by_id(probe_id) cur_probe.probe_descriptor = ihm.flr.ProbeDescriptor( reactive_probe_chem_descriptor=react_cd, chromophore_chem_descriptor=chrom_cd, chromophore_center_atom=chromophore_center_atom) class _FLRPolyProbePositionHandler(Handler): category = '_flr_poly_probe_position' def _get_resatom(self, entity_id, asym_id, seq_id, atom_id): entity = self.sysr.entities.get_by_id(entity_id) asym = self.sysr.asym_units.get_by_id_or_none(asym_id) if asym is not None: asym.entity = entity asym.id = asym_id resatom = entity.residue(seq_id) if asym is not None: resatom.asym = asym if atom_id: resatom = resatom.atom(atom_id) return resatom def __call__(self, id, entity_id, asym_id, seq_id: int, atom_id, mutation_flag: bool, modification_flag: bool, auth_name): ppos = self.sysr.flr_poly_probe_positions.get_by_id(id) ppos.resatom = self._get_resatom(entity_id, asym_id, seq_id, atom_id) ppos.mutation_flag = mutation_flag ppos.modification_flag = modification_flag ppos.auth_name = auth_name class _FLRPolyProbePositionModifiedHandler(Handler): category = '_flr_poly_probe_position_modified' def __call__(self, id, chem_descriptor_id): ppos = self.sysr.flr_poly_probe_positions.get_by_id(id) ppos.modified_chem_descriptor = \ self.sysr.chem_descriptors.get_by_id_or_none(chem_descriptor_id) class _FLRPolyProbePositionMutatedHandler(Handler): category = '_flr_poly_probe_position_mutated' def __call__(self, id, chem_comp_id, atom_id): ppos = self.sysr.flr_poly_probe_positions.get_by_id(id) ppos.mutated_chem_comp_id = \ self.sysr.chem_comps.get_by_id(chem_comp_id) class _FLRPolyProbeConjugateHandler(Handler): category = '_flr_poly_probe_conjugate' def __call__(self, id, sample_probe_id, chem_descriptor_id, ambiguous_stoichiometry_flag: bool, probe_stoichiometry: float): ppc = self.sysr.flr_poly_probe_conjugates.get_by_id(id) ppc.sample_probe = self.sysr.flr_sample_probe_details.get_by_id( sample_probe_id) ppc.chem_descriptor = self.sysr.chem_descriptors.get_by_id( chem_descriptor_id) ppc.ambiguous_stoichiometry = ambiguous_stoichiometry_flag ppc.probe_stoichiometry = probe_stoichiometry class _FLRFretForsterRadiusHandler(Handler): category = '_flr_fret_forster_radius' def __call__(self, id, donor_probe_id, acceptor_probe_id, forster_radius: float, reduced_forster_radius: float): ffr = self.sysr.flr_fret_forster_radius.get_by_id(id) ffr.donor_probe = self.sysr.flr_probes.get_by_id(donor_probe_id) ffr.acceptor_probe = self.sysr.flr_probes.get_by_id(acceptor_probe_id) ffr.forster_radius = forster_radius ffr.reduced_forster_radius = reduced_forster_radius class _FLRFretCalibrationParametersHandler(Handler): category = '_flr_fret_calibration_parameters' def __call__(self, id, phi_acceptor: float, alpha: float, alpha_sd: float, gg_gr_ratio: float, beta: float, gamma: float, delta: float, a_b: float): p = self.sysr.flr_fret_calibration_parameters.get_by_id(id) p.phi_acceptor = phi_acceptor p.alpha = alpha p.alpha_sd = alpha_sd p.gg_gr_ratio = gg_gr_ratio p.beta = beta p.gamma = gamma p.delta = delta p.a_b = a_b class _FLRFretAnalysisHandler(Handler): category = '_flr_fret_analysis' def __call__(self, id, experiment_id, type, sample_probe_id_1, sample_probe_id_2, forster_radius_id, dataset_list_id, external_file_id, software_id): f = self.sysr.flr_fret_analyses.get_by_id(id) f.experiment = self.sysr.flr_experiments.get_by_id(experiment_id) f.type = type f.sample_probe_1 = self.sysr.flr_sample_probe_details.get_by_id( sample_probe_id_1) f.sample_probe_2 = self.sysr.flr_sample_probe_details.get_by_id( sample_probe_id_2) f.forster_radius = self.sysr.flr_fret_forster_radius.get_by_id( forster_radius_id) f.dataset = self.sysr.datasets.get_by_id(dataset_list_id) f.external_file = \ self.sysr.external_files.get_by_id_or_none(external_file_id) f.software = self.sysr.software.get_by_id_or_none(software_id) class _FLRFretAnalysisIntensityHandler(Handler): category = '_flr_fret_analysis_intensity' def __call__(self, ordinal_id, analysis_id, calibration_parameters_id, donor_only_fraction: float, chi_square_reduced: float, method_name, details): f = self.sysr.flr_fret_analyses.get_by_id(analysis_id) f.type = 'intensity-based' f.calibration_parameters = \ self.sysr.flr_fret_calibration_parameters.get_by_id( calibration_parameters_id) f.donor_only_fraction = donor_only_fraction f.chi_square_reduced = chi_square_reduced f.method_name = method_name f.details = details class _FLRFretAnalysisLifetimeHandler(Handler): category = '_flr_fret_analysis_lifetime' def __call__(self, ordinal_id, analysis_id, reference_measurement_group_id, lifetime_fit_model_id, donor_only_fraction: float, chi_square_reduced: float, method_name, details): f = self.sysr.flr_fret_analyses.get_by_id(analysis_id) f.type = 'lifetime-based' f.ref_measurement_group \ = self.sysr.flr_ref_measurement_groups.get_by_id( reference_measurement_group_id) f.lifetime_fit_model = self.sysr.flr_lifetime_fit_models.get_by_id( lifetime_fit_model_id) f.donor_only_fraction = donor_only_fraction f.chi_square_reduced = chi_square_reduced f.method_name = method_name f.details = details class _FLRLifetimeFitModelHandler(Handler): category = '_flr_lifetime_fit_model' def __call__(self, id, name, description, external_file_id, citation_id): f = self.sysr.flr_lifetime_fit_models.get_by_id(id) f.name = name f.description = description f.external_file = \ self.sysr.external_files.get_by_id_or_none(external_file_id) f.citation = \ self.sysr.citations.get_by_id_or_none(citation_id) class _FLRRefMeasurementHandler(Handler): category = '_flr_reference_measurement' def __call__(self, id, reference_sample_probe_id, num_species, details): r = self.sysr.flr_ref_measurements.get_by_id(id) r.ref_sample_probe = self.sysr.flr_sample_probe_details.get_by_id( reference_sample_probe_id) r.details = details class _FLRRefMeasurementGroupHandler(Handler): category = '_flr_reference_measurement_group' def __call__(self, id, num_measurements, details): g = self.sysr.flr_ref_measurement_groups.get_by_id(id) g.details = details class _FLRRefMeasurementGroupLinkHandler(Handler): category = '_flr_reference_measurement_group_link' def __call__(self, group_id, reference_measurement_id): g = self.sysr.flr_ref_measurement_groups.get_by_id(group_id) r = self.sysr.flr_ref_measurements.get_by_id(reference_measurement_id) g.add_ref_measurement(r) class _FLRRefMeasurementLifetimeHandler(Handler): category = '_flr_reference_measurement_lifetime' def __call__(self, ordinal_id, reference_measurement_id, species_name, species_fraction: float, lifetime: float): lf = self.sysr.flr_ref_measurement_lifetimes.get_by_id(ordinal_id) lf.species_name = species_name lf.species_fraction = species_fraction lf.lifetime = lifetime # Add the lifetime to the reference measurement r = self.sysr.flr_ref_measurements.get_by_id(reference_measurement_id) r.add_lifetime(lf) class _FLRPeakAssignmentHandler(Handler): category = '_flr_peak_assignment' def __call__(self, id, method_name, details): p = self.sysr.flr_peak_assignments.get_by_id(id) self.copy_if_present(p, locals(), keys=('method_name', 'details')) class _FLRFretDistanceRestraintHandler(Handler): category = '_flr_fret_distance_restraint' def __call__(self, ordinal_id, id, group_id, sample_probe_id_1, sample_probe_id_2, state_id, analysis_id, distance: float, distance_error_plus: float, distance_error_minus: float, distance_type, population_fraction: float, peak_assignment_id): r = self.sysr.flr_fret_distance_restraints.get_by_id(id) r.sample_probe_1 = self.sysr.flr_sample_probe_details.get_by_id( sample_probe_id_1) r.sample_probe_2 = self.sysr.flr_sample_probe_details.get_by_id( sample_probe_id_2) r.state = self.sysr.states.get_by_id_or_none(state_id) r.analysis = self.sysr.flr_fret_analyses.get_by_id(analysis_id) r.peak_assignment = self.sysr.flr_peak_assignments.get_by_id( peak_assignment_id) r.distance = distance r.distance_error_plus = distance_error_plus r.distance_error_minus = distance_error_minus r.distance_type = distance_type r.population_fraction = population_fraction # also create the fret_distance_restraint_group rg = self.sysr.flr_fret_distance_restraint_groups.get_by_id(group_id) rg.add_distance_restraint(r) class _FLRFretModelQualityHandler(Handler): category = '_flr_fret_model_quality' def __call__(self, model_id, chi_square_reduced: float, dataset_group_id, method, details): q = self.sysr.flr_fret_model_qualities.get_by_id(model_id) q.model = self.sysr.models.get_by_id(model_id) q.chi_square_reduced = chi_square_reduced q.dataset_group = self.sysr.dataset_groups.get_by_id(dataset_group_id) self.copy_if_present(q, locals(), keys=('method', 'details')) class _FLRFretModelDistanceHandler(Handler): category = '_flr_fret_model_distance' def __init__(self, *args): super().__init__(*args) self._read_args = [] def __call__(self, id, restraint_id, model_id, distance: float, distance_deviation: float): md = self.sysr.flr_fret_model_distances.get_by_id(id) md.restraint = self.sysr.flr_fret_distance_restraints.get_by_id( restraint_id) md.model = self.sysr.models.get_by_id(model_id) md.distance = distance md.distance_deviation = distance_deviation self._read_args.append(md) def finalize(self): for md in self._read_args: md.calculate_deviation() class _FLRFPSGlobalParameterHandler(Handler): category = '_flr_fps_global_parameter' def __call__(self, id, forster_radius_value: float, conversion_function_polynom_order: int, repetition: int, av_grid_rel: float, av_min_grid_a: float, av_allowed_sphere: float, av_search_nodes: int, av_e_samples_k: float, sim_viscosity_adjustment: float, sim_dt_adjustment: float, sim_max_iter_k: float, sim_max_force: float, sim_clash_tolerance_a: float, sim_reciprocal_kt: float, sim_clash_potential, convergence_e: float, convergence_k: float, convergence_f: float, convergence_t: float): p = self.sysr.flr_fps_global_parameters.get_by_id(id) p.forster_radius = forster_radius_value p.conversion_function_polynom_order = conversion_function_polynom_order p.repetition = repetition p.av_grid_rel = av_grid_rel p.av_min_grid_a = av_min_grid_a p.av_allowed_sphere = av_allowed_sphere p.av_search_nodes = av_search_nodes p.av_e_samples_k = av_e_samples_k p.sim_viscosity_adjustment = sim_viscosity_adjustment p.sim_dt_adjustment = sim_dt_adjustment p.sim_max_iter_k = sim_max_iter_k p.sim_max_force = sim_max_force p.sim_clash_tolerance_a = sim_clash_tolerance_a p.sim_reciprocal_kt = sim_reciprocal_kt p.sim_clash_potential = sim_clash_potential p.convergence_e = convergence_e p.convergence_k = convergence_k p.convergence_f = convergence_f p.convergence_t = convergence_t class _FLRFPSModelingHandler(Handler): category = '_flr_fps_modeling' def __call__(self, id, ihm_modeling_protocol_ordinal_id, restraint_group_id, global_parameter_id, probe_modeling_method, details): m = self.sysr.flr_fps_modeling.get_by_id(id) m.protocol = self.sysr.protocols.get_by_id( ihm_modeling_protocol_ordinal_id) m.restraint_group = \ self.sysr.flr_fret_distance_restraint_groups.get_by_id( restraint_group_id) m.global_parameter = self.sysr.flr_fps_global_parameters.get_by_id( global_parameter_id) self.copy_if_present(m, locals(), keys=('probe_modeling_method', 'details')) class _FLRFPSAVParameterHandler(Handler): category = '_flr_fps_av_parameter' def __call__(self, id, num_linker_atoms: int, linker_length: float, linker_width: float, probe_radius_1: float, probe_radius_2: float, probe_radius_3: float): p = self.sysr.flr_fps_av_parameters.get_by_id(id) p.num_linker_atoms = num_linker_atoms p.linker_length = linker_length p.linker_width = linker_width p.probe_radius_1 = probe_radius_1 p.probe_radius_2 = probe_radius_2 p.probe_radius_3 = probe_radius_3 class _FLRFPSAVModelingHandler(Handler): category = '_flr_fps_av_modeling' def __call__(self, id, sample_probe_id, fps_modeling_id, parameter_id): m = self.sysr.flr_fps_av_modeling.get_by_id(id) m.fps_modeling = self.sysr.flr_fps_modeling.get_by_id(fps_modeling_id) m.sample_probe = self.sysr.flr_sample_probe_details.get_by_id( sample_probe_id) m.parameter = self.sysr.flr_fps_av_parameters.get_by_id(parameter_id) class _FLRFPSMPPHandler(Handler): category = '_flr_fps_mean_probe_position' def __call__(self, id, sample_probe_id, mpp_xcoord: float, mpp_ycoord: float, mpp_zcoord: float): p = self.sysr.flr_fps_mean_probe_positions.get_by_id(id) p.sample_probe = self.sysr.flr_sample_probe_details.get_by_id( sample_probe_id) p.x = mpp_xcoord p.y = mpp_ycoord p.z = mpp_zcoord class _FLRFPSMPPAtomPositionHandler(Handler): category = '_flr_fps_mpp_atom_position' def __call__(self, id, group_id, seq_id: int, atom_id, asym_id, xcoord: float, ycoord: float, zcoord: float): asym = self.sysr.asym_units.get_by_id(asym_id) p = self.sysr.flr_fps_mpp_atom_positions.get_by_id(id) p.atom = asym.residue(seq_id).atom(atom_id) p.x = xcoord p.y = ycoord p.z = zcoord g = self.sysr.flr_fps_mpp_atom_position_groups.get_by_id(group_id) g.add_atom_position(p) class _FLRFPSMPPModelingHandler(Handler): category = '_flr_fps_mpp_modeling' def __call__(self, ordinal_id, fps_modeling_id, mpp_id, mpp_atom_position_group_id): m = self.sysr.flr_fps_mpp_modeling.get_by_id(ordinal_id) m.fps_modeling = self.sysr.flr_fps_modeling.get_by_id(fps_modeling_id) m.mpp = self.sysr.flr_fps_mean_probe_positions.get_by_id(mpp_id) m.mpp_atom_position_group = \ self.sysr.flr_fps_mpp_atom_position_groups.get_by_id( mpp_atom_position_group_id) class _FLRKineticRateFretAnalysisConnectionHandler(Handler): category = '_flr_kinetic_rate_analysis' def __call__(self, id, fret_analysis_id, kinetic_rate_id, details): f = self.sysr.flr_fret_analyses.get_by_id(fret_analysis_id) k = self.sysr.kinetic_rates.get_by_id(kinetic_rate_id) c = self.sysr.flr_kinetic_rate_fret_analysis_connection.get_by_id(id) c.fret_analysis = f c.kinetic_rate = k c.details = details class _FLRRelaxationTimeFretAnalysisConnectionHandler(Handler): category = '_flr_relaxation_time_analysis' def __init__(self, *args): super().__init__(*args) self._read_args = [] def __call__(self, id, fret_analysis_id, relaxation_time_id, details): f = self.sysr.flr_fret_analyses.get_by_id(fret_analysis_id) r = self.sysr.relaxation_times.get_by_id(relaxation_time_id) self._read_args.append((id, f, r, details)) def finalize(self): for (id, f, r, details) in self._read_args: tmp_connection = \ self.sysr.flr_relaxation_time_fret_analysis_connection c = tmp_connection.get_by_id(id) c.fret_analysis = f c.relaxation_time = r c.details = details _flr_handlers = [_FLRChemDescriptorHandler, _FLRInstSettingHandler, _FLRExpConditionHandler, _FLRInstrumentHandler, _FLRSampleConditionHandler, _FLREntityAssemblyHandler, _FLRSampleHandler, _FLRExperimentHandler, _FLRProbeListHandler, _FLRProbeDescriptorHandler, _FLRPolyProbePositionHandler, _FLRPolyProbePositionModifiedHandler, _FLRPolyProbePositionMutatedHandler, _FLRSampleProbeDetailsHandler, _FLRPolyProbeConjugateHandler, _FLRFretForsterRadiusHandler, _FLRFretCalibrationParametersHandler, _FLRFretAnalysisHandler, _FLRFretAnalysisIntensityHandler, _FLRFretAnalysisLifetimeHandler, _FLRLifetimeFitModelHandler, _FLRRefMeasurementHandler, _FLRRefMeasurementGroupHandler, _FLRRefMeasurementGroupLinkHandler, _FLRRefMeasurementLifetimeHandler, _FLRPeakAssignmentHandler, _FLRFretDistanceRestraintHandler, _FLRFretModelQualityHandler, _FLRFretModelDistanceHandler, _FLRFPSGlobalParameterHandler, _FLRFPSModelingHandler, _FLRFPSAVParameterHandler, _FLRFPSAVModelingHandler, _FLRFPSMPPHandler, _FLRFPSMPPAtomPositionHandler, _FLRFPSMPPModelingHandler, _FLRKineticRateFretAnalysisConnectionHandler, _FLRRelaxationTimeFretAnalysisConnectionHandler] class Variant: """Utility class to select the type of file to read with :func:`read`.""" #: Class to track global file information, e.g. :class:`SystemReader` system_reader = None def get_handlers(self, sysr): """Get the :class:`Handler` objects to use to parse input. :param sysr: class to track global file information. :type sysr: :class:`SystemReader` :return: a list of :class:`Handler` objects. """ pass def get_audit_conform_handler(self, sysr): """Get a :class:`Handler` to check the audit_conform table. If :func:`read` is called with ``reject_old_file=True``, this handler is used to check the audit_conform table and reject the file if it is deemed to be too old. :param sysr: class to track global file information. :type sysr: :class:`SystemReader` :return: a suitable handler. :rtype: :class:`Handler` """ pass class IHMVariant(Variant): """Used to select typical PDBx/IHM file input. See :func:`read`.""" system_reader = SystemReader _handlers = [ _CollectionHandler, _StructHandler, _SoftwareHandler, _CitationHandler, _DatabaseHandler, _DatabaseStatusHandler, _AuditAuthorHandler, _AuditRevisionHistoryHandler, _AuditRevisionDetailsHandler, _AuditRevisionGroupHandler, _AuditRevisionCategoryHandler, _AuditRevisionItemHandler, _DataUsageHandler, _GrantHandler, _CitationAuthorHandler, _ChemCompHandler, _ChemDescriptorHandler, _EntityHandler, _EntitySrcNatHandler, _EntitySrcGenHandler, _EntitySrcSynHandler, _StructRefHandler, _StructRefSeqHandler, _StructRefSeqDifHandler, _EntityPolyHandler, _EntityPolySeqHandler, _EntityNonPolyHandler, _EntityPolySegmentHandler, _StructAsymHandler, _AssemblyDetailsHandler, _AssemblyHandler, _ExtRefHandler, _ExtFileHandler, _DatasetListHandler, _DatasetGroupHandler, _DatasetGroupLinkHandler, _DatasetExtRefHandler, _DatasetDBRefHandler, _DataTransformationHandler, _RelatedDatasetsHandler, _ModelRepresentationHandler, _ModelRepresentationDetailsHandler, _StartingModelDetailsHandler, _StartingComputationalModelsHandler, _StartingComparativeModelsHandler, _ProtocolHandler, _ProtocolDetailsHandler, _PostProcessHandler, _ModelListHandler, _ModelGroupHandler, _ModelGroupLinkHandler, _ModelRepresentativeHandler, _MultiStateHandler, _MultiStateLinkHandler, _EnsembleHandler, _NotModeledResidueRangeHandler, _DensityHandler, _SubsampleHandler, _EM3DRestraintHandler, _EM2DRestraintHandler, _EM2DFittingHandler, _SASRestraintHandler, _SphereObjSiteHandler, _AtomSiteHandler, _FeatureListHandler, _PolyResidueFeatureHandler, _PolyAtomFeatureHandler, _NonPolyFeatureHandler, _PseudoSiteFeatureHandler, _PseudoSiteHandler, _DerivedDistanceRestraintHandler, _HDXRestraintHandler, _PredictedContactRestraintHandler, _CenterHandler, _TransformationHandler, _GeometricObjectHandler, _SphereHandler, _TorusHandler, _HalfTorusHandler, _AxisHandler, _PlaneHandler, _GeometricRestraintHandler, _PolySeqSchemeHandler, _NonPolySchemeHandler, _BranchSchemeHandler, _EntityBranchListHandler, _BranchDescriptorHandler, _BranchLinkHandler, _CrossLinkListHandler, _CrossLinkRestraintHandler, _CrossLinkPseudoSiteHandler, _CrossLinkResultParametersHandler, _CrossLinkResultHandler, _StartingModelSeqDifHandler, _OrderedModelHandler, _OrderedEnsembleHandler, _MultiStateSchemeHandler, _MultiStateSchemeConnectivityHandler, _KineticRateHandler, _RelaxationTimeHandler, _RelaxationTimeMultiStateSchemeHandler ] def get_handlers(self, sysr): return [h(sysr) for h in self._handlers + _flr_handlers] def get_audit_conform_handler(self, sysr): return _AuditConformHandler(sysr) def read(fh, model_class=ihm.model.Model, format='mmCIF', handlers=[], warn_unknown_category=False, warn_unknown_keyword=False, read_starting_model_coord=True, starting_model_class=ihm.startmodel.StartingModel, reject_old_file=False, variant=IHMVariant, add_to_system=None): """Read data from the file handle `fh`. Note that the reader currently expects to see a file compliant with the PDBx and/or IHM dictionaries. It is not particularly tolerant of noncompliant or incomplete files, and will probably throw an exception rather than warning about and trying to handle such files. Please `open an issue `_ if you encounter such a problem. Files can be read in either the text-based mmCIF format or the BinaryCIF format. The mmCIF reader works by breaking the file into tokens, and using this stream of tokens to populate Python data structures. Two tokenizers are available: a pure Python implementation and a C-accelerated version. The C-accelerated version is much faster and so is used if built. The BinaryCIF reader needs the msgpack Python module to function. The file handle should be opened in text mode for mmCIF files. Traditionally, mmCIF files used ASCII encoding. More and more recent files are UTF-8 encoded instead, but some use other encodings such as latin-1. To handle most current files use something like:: try: with open('input.cif', encoding='utf-8') as fh: systems = ihm.reader.read(fh) except UnicodeDecodeError: with open('input.cif', encoding='latin-1') as fh: systems = ihm.reader.read(fh) The file handle should be opened in binary mode for BinaryCIF files:: with open('input.bcif', 'rb') as fh: systems = ihm.reader.read(fh, format='BCIF') :param file fh: The file handle to read from. (For BinaryCIF files, the file should be opened in binary mode. For mmCIF files, files opened in binary mode with Python 3 will be treated as if they are Latin-1-encoded.) :param model_class: The class to use to store model information (such as coordinates). For use with other software, it is recommended to subclass :class:`ihm.model.Model` and override :meth:`~ihm.model.Model.add_sphere` and/or :meth:`~ihm.model.Model.add_atom`, and provide that subclass here. See :meth:`ihm.model.Model.get_spheres` for more information. :param str format: The format of the file. This can be 'mmCIF' (the default) for the (text-based) mmCIF format or 'BCIF' for BinaryCIF. :param list handlers: A list of :class:`Handler` classes (not objects). These can be used to read extra categories from the file. :param bool warn_unknown_category: if set, emit an :exc:`UnknownCategoryWarning` for each unknown category encountered in the file. :param bool warn_unknown_keyword: if set, emit an :exc:`UnknownKeywordWarning` for each unknown keyword (within an otherwise-handled category) encountered in the file. :param bool read_starting_model_coord: if set, read coordinates for starting models, if provided in the file. :param starting_model_class: The class to use to store starting model information. If `read_starting_model_coord` is also set, it is recommended to subclass :class:`ihm.startmodel.StartingModel` and override :meth:`~ihm.startmodel.StartingModel.add_atom` and/or :meth:`~ihm.startmodel.StartingModel.add_seq_dif`. :param bool reject_old_file: If True, raise an :exc:`ihm.reader.OldFileError` if the file conforms to an older version of the dictionary than this library supports (by default the library will read what it can from the file). :param variant: A class or object that selects the type of file to read. This primarily controls the set of tables that are read from the file. In most cases the default :class:`IHMVariant` should be used. :type variant: :class:`Variant` :param add_to_system: If provided, all data read from the file are added to the existing System, rather than being placed in new System objects. This System must itself have previously been read from a file (so that objects have IDs, which can be used to map data in the new file to the existing System). Note however that this will not handle duplicate IDs (it is intended for depositions where the data are split between multiple files) so cannot be used to combine two disparate mmCIF files into one. :type add_to_system: :class:`ihm.System` :return: A list of :class:`ihm.System` objects. """ if isinstance(variant, type): variant = variant() systems = [] reader_map = {'mmCIF': ihm.format.CifReader, 'BCIF': ihm.format_bcif.BinaryCifReader} uchandler = _UnknownCategoryHandler() if warn_unknown_category else None ukhandler = _UnknownKeywordHandler() if warn_unknown_keyword else None r = reader_map[format](fh, {}, unknown_category_handler=uchandler, unknown_keyword_handler=ukhandler) while True: if add_to_system: s = variant.system_reader(model_class, starting_model_class, system=add_to_system) else: # e.g. older ModelCIF's SystemReader doesn't support add_to_system s = variant.system_reader(model_class, starting_model_class) hs = variant.get_handlers(s) + [h(s) for h in handlers] if reject_old_file: hs.append(variant.get_audit_conform_handler(s)) if read_starting_model_coord: hs.append(_StartingModelCoordHandler(s)) if uchandler: uchandler.reset() if ukhandler: ukhandler.add_category_handlers(hs) r.category_handler = dict((h.category, h) for h in hs) more_data = r.read_file() for h in hs: h.finalize() s.finalize() _finalize_entities(s.system) systems.append(s.system) if not more_data: break return systems python-ihm-2.7/ihm/reference.py000066400000000000000000000142731503573337200165620ustar00rootroot00000000000000"""Classes for providing extra information about an :class:`ihm.Entity`""" import urllib.request class Reference: """Base class for extra information about an :class:`ihm.Entity`. This class is not used directly; instead, use a subclass such as :class:`Sequence` or :class:`UniProtSequence`. These objects are then typically passed to the :class:`ihm.Entity` constructor.""" pass class Sequence(Reference): """Point to the sequence of an :class:`ihm.Entity` in a sequence database; convenience subclasses are provided for common sequence databases such as :class:`UniProtSequence`. These objects are typically passed to the :class:`ihm.Entity` constructor. See also :attr:`alignments` to describe the correspondence between the database and entity sequences. :param str db_name: The name of the database. :param str db_code: The name of the sequence in the database. :param str accession: The database accession. :param str sequence: The complete sequence, as a string of one-letter codes. :param str details: Longer text describing the sequence. """ def __init__(self, db_name, db_code, accession, sequence, details=None): self.db_name, self.db_code = db_name, db_code self.accession = accession self.sequence, self.details = sequence, details #: All alignments between the reference and entity sequences, as #: :class:`Alignment` objects. If none are provided, a simple 1:1 #: alignment is assumed. self.alignments = [] def _signature(self): # Ignore "details" return ((self.db_name, self.db_code, self.accession, self.sequence) + tuple(a._signature() for a in self.alignments)) def _get_alignments(self): if self.alignments: return self.alignments elif not hasattr(self, '_default_alignment'): self._default_alignment = Alignment() return [self._default_alignment] class UniProtSequence(Sequence): """Point to the sequence of an :class:`ihm.Entity` in UniProt. These objects are typically passed to the :class:`ihm.Entity` constructor. :param str db_code: The UniProt name (e.g. NUP84_YEAST) :param str accession: The UniProt accession (e.g. P52891) See :class:`Sequence` for a description of the remaining parameters. """ _db_name = 'UNP' def __init__(self, db_code, accession, sequence, details=None): super().__init__(self._db_name, db_code, accession, sequence, details) def __str__(self): return "" % self.accession @classmethod def from_accession(cls, accession): """Create :class:`UniProtSequence` from just an accession. This is done by querying the UniProt web API, so requires network access. :param str accession: The UniProt accession (e.g. P52891) """ # urlopen returns bytes def decode(t): return t.decode('ascii') url = 'https://www.uniprot.org/uniprot/%s.fasta' % accession with urllib.request.urlopen(url) as fh: header = decode(fh.readline()) spl = header.split('|') if len(spl) < 3 or spl[0] not in ('>sp', '>tr'): raise ValueError("Cannot parse UniProt header %s" % header) cd = spl[2].split(None, 1) code = cd[0] details = cd[1].rstrip('\r\n') if len(cd) > 1 else None seq = decode(fh.read()).replace('\n', '') return cls(code, accession, seq, details) class Alignment: """A sequence range that aligns between the database and the entity. This describes part of the sequence in the sequence database (:class:`Sequence`) and in the :class:`ihm.Entity`. The two ranges must be the same length and have the same primary sequence (any differences must be described with :class:`SeqDif` objects). :param int db_begin: The first residue in the database sequence that is used (defaults to the entire sequence). :param int db_end: The last residue in the database sequence that is used (or None, the default, to use the entire sequence). :param int entity_begin: The first residue in the :class:`~ihm.Entity` sequence that is taken from the reference (defaults to the entire entity sequence). :param int entity_end: The last residue in the :class:`~ihm.Entity` sequence that is taken from the reference (or None, the default, to use the entire sequence). :param seq_dif: Single-point mutations made to the sequence. :type seq_dif: Sequence of :class:`SeqDif` objects. """ def __init__(self, db_begin=1, db_end=None, entity_begin=1, entity_end=None, seq_dif=[]): self.db_begin, self.db_end = db_begin, db_end self.entity_begin, self.entity_end = entity_begin, entity_end self.seq_dif = [] self.seq_dif.extend(seq_dif) def _signature(self): return ((self.db_begin, self.db_end, self.entity_begin, self.entity_end) + tuple(s._signature() for s in self.seq_dif)) class SeqDif: """Annotate a sequence difference between a reference and entity sequence. See :class:`Alignment`. :param int seq_id: The residue index in the entity sequence. :param db_monomer: The monomer type (as a :class:`~ihm.ChemComp` object) in the reference sequence. :type db_monomer: :class:`ihm.ChemComp` :param monomer: The monomer type (as a :class:`~ihm.ChemComp` object) in the entity sequence. :type monomer: :class:`ihm.ChemComp` :param str details: Descriptive text for the sequence difference. """ def __init__(self, seq_id, db_monomer, monomer, details=None): self.seq_id, self.db_monomer = seq_id, db_monomer self.monomer, self.details = monomer, details def _signature(self): # Don't ignore "details", as these distinguish insertions from # deletions return (self.seq_id, self.db_monomer, self.monomer, self.details) python-ihm-2.7/ihm/report.py000066400000000000000000000150031503573337200161270ustar00rootroot00000000000000"""Helper classes to provide a summary report of an :class:`ihm.System`""" import ihm import sys import warnings import collections class MissingDataWarning(UserWarning): pass class LocalFilesWarning(UserWarning): pass class MissingFileWarning(UserWarning): pass def _get_name(name): if name: return repr(name) else: return "(unnamed)" class _SectionReporter: def __init__(self, title, fh): self.fh = fh print("\n\n# " + title, file=self.fh) def report(self, txt): print(" " + str(txt), file=self.fh) class Reporter: def __init__(self, system, fh=sys.stdout): self.system = system self.fh = fh def report(self): print("Title: %s" % self.system.title, file=self.fh) self.report_entities() self.report_asyms() self.report_representations() self.report_databases() self.report_files() self.report_citations() self.report_software() self.report_protocols() self.report_restraints() self.report_models() self.report_ensembles() def _section(self, title): return _SectionReporter(title, self.fh) def report_entities(self): r = self._section("Entities (unique sequences)") asyms_for_entity = collections.defaultdict(list) for a in self.system.asym_units: asyms_for_entity[a.entity].append(a) for e in self.system.entities: asyms = asyms_for_entity[e] r.report("- %s (length %d, %d instances, chain IDs %s)" % (e.description, len(e.sequence), len(asyms), ", ".join(a.id for a in asyms))) if len(e.references) == 0: warnings.warn( "No reference sequence (e.g. from UniProt) provided " "for %s" % e, MissingDataWarning) for ref in e.references: r.report(" - from %s" % str(ref)) def report_asyms(self): r = self._section("Asyms/chains") for a in self.system.asym_units: r.report("- %s (chain ID %s)" % (a.details, a.id)) def report_citations(self): r = self._section("Publications cited") for c in self.system._all_citations(): r.report('- "%s", %s, %s' % (c.title, c.journal, c.year)) def report_software(self): r = self._section("Software used") for s in ihm._remove_identical(self.system._all_software()): if s.version is None: r.report("- %s (no version given)" % s.name) else: r.report("- %s (version %s)" % (s.name, s.version)) if not s.citation: warnings.warn( "No citation provided for %s" % s, MissingDataWarning) def report_databases(self): r = self._section("External databases referenced") for loc in ihm._remove_identical(self.system._all_locations()): if isinstance(loc, ihm.location.DatabaseLocation): r.report(" - %s accession %s" % (loc.db_name, loc.access_code)) def report_files(self): r = self._section("Additional files referenced") locs_by_repo = collections.defaultdict(list) for loc in ihm._remove_identical(self.system._all_locations()): if not isinstance(loc, ihm.location.DatabaseLocation): locs_by_repo[loc.repo].append(loc) for repo, locs in locs_by_repo.items(): r.report("- %s" % ("DOI: " + repo.doi if repo else "Local files")) for loc in locs: r.report(" - %r, %s" % (loc.path, loc.details)) if None in locs_by_repo: warnings.warn( "The following local files are referenced (they will need to " "be deposited in a database or with a DOI): %s" % [loc.path for loc in locs_by_repo[None]], LocalFilesWarning) def report_representations(self): r = self._section("Model representation") for rep in self.system._all_representations(): if hasattr(rep, '_id'): r.report("- Representation %s" % rep._id) else: r.report("- Representation") for segment in rep: r.report(" - " + segment._get_report()) def report_protocols(self): r = self._section("Modeling protocols") for prot in self.system._all_protocols(): r.report("- " + (prot.name if prot.name else "Unnamed protocol")) for step in prot.steps: r.report(" - " + step._get_report()) for analysis in prot.analyses: r.report(" - Analysis") for step in analysis.steps: r.report(" - " + step._get_report()) def report_restraints(self): r = self._section("Restraints") for rsr in ihm._remove_identical(self.system._all_restraints()): r.report("- " + rsr._get_report()) def report_models(self): r = self._section("Models") for sg in self.system.state_groups: r.report("- State group") for state in sg: r.report(" - State %s" % _get_name(state.name)) for mg in state: r.report(" - Model group %s containing %d models" % (_get_name(mg.name), len(mg))) def report_ensembles(self): r = self._section("Ensembles") for e in self.system.ensembles: r.report("- Ensemble %s containing %d models" % (_get_name(e.name), e.num_models)) if e.model_group is not None: r.report(" - From model group %s" % _get_name(e.model_group.name)) if e.precision is not None: r.report(" - Precision %.1f" % e.precision) if e.file: r.report(" - In external file %s" % e.file) if (e.model_group is not None and not e.file and e.num_models > len(e.model_group)): warnings.warn( "%s references more models (%d) than are deposited in " "its model group, but does not reference an external file" % (e, e.num_models), MissingFileWarning) for d in e.densities: asym = d.asym_unit r.report(" - Localization density for %s %d-%d" % (asym.details, asym.seq_id_range[0], asym.seq_id_range[1])) python-ihm-2.7/ihm/representation.py000066400000000000000000000156421503573337200176670ustar00rootroot00000000000000"""Classes for handling representation of the system during modeling. """ def _starting_model_report(seg): if seg.starting_model: if hasattr(seg.starting_model, '_id'): return " (from starting model %s)" % seg.starting_model._id else: return " (from starting model)" else: return "" class Segment: """Base class for part of a :class:`Representation`. See :class:`AtomicSegment`, :class:`ResidueSegment`, :class:`MultiResidueSegment`, and :class:`FeatureSegment`. """ def _get_report(self): """Return a textual description of the object, used by :meth:`ihm.System.report`""" return str(self) class AtomicSegment(Segment): """Part of the system modeled atomistically, stored in a :class:`Representation`. :param asym_unit: The asymmetric unit (or part of one) that this segment represents. :type asym_unit: :class:`~ihm.AsymUnit` or :class:`~ihm.AsymUnitRange` :param bool rigid: Whether internal coordinates of the segment were fixed during modeling. :param starting_model: initial coordinates used for the segment (or None). :type starting_model: :class:`~ihm.startmodel.StartingModel` :param str description: Additional text describing this segment. """ primitive = 'atomistic' count = None granularity = 'by-atom' def _get_report(self): asym = self.asym_unit return ("%s %d-%d as %s atoms%s" % (asym.details, asym.seq_id_range[0], asym.seq_id_range[1], "rigid" if self.rigid else "flexible", _starting_model_report(self))) def __init__(self, asym_unit, rigid, starting_model=None, description=None): self.asym_unit = asym_unit self.starting_model, self.rigid = starting_model, rigid self.description = description class ResidueSegment(Segment): """Part of the system modeled as a set of residues, stored in a :class:`Representation`. :param asym_unit: The asymmetric unit (or part of one) that this segment represents. :type asym_unit: :class:`~ihm.AsymUnit` or :class:`~ihm.AsymUnitRange` :param bool rigid: Whether internal coordinates of the segment were fixed during modeling. :param str primitive: The type of object used to represent this segment (sphere/gaussian/other). :param starting_model: initial coordinates used for the segment (or None). :type starting_model: :class:`~ihm.startmodel.StartingModel` :param str description: Additional text describing this segment. """ count = None granularity = 'by-residue' def _get_report(self): asym = self.asym_unit return ("%s %d-%d as %s residues%s" % (asym.details, asym.seq_id_range[0], asym.seq_id_range[1], "rigid" if self.rigid else "flexible", _starting_model_report(self))) def __init__(self, asym_unit, rigid, primitive, starting_model=None, description=None): self.asym_unit = asym_unit self.primitive = primitive self.starting_model, self.rigid = starting_model, rigid self.description = description class MultiResidueSegment(Segment): """Part of the system modeled as a single object representing a range of residues, stored in a :class:`Representation`. :param asym_unit: The asymmetric unit (or part of one) that this segment represents. :type asym_unit: :class:`~ihm.AsymUnit` or :class:`~ihm.AsymUnitRange` :param bool rigid: Whether internal coordinates of the segment were fixed during modeling. :param str primitive: The type of object used to represent this segment (sphere/gaussian/other). :param starting_model: initial coordinates used for the segment (or None). :type starting_model: :class:`~ihm.startmodel.StartingModel` :param str description: Additional text describing this segment. """ count = None granularity = 'multi-residue' def __init__(self, asym_unit, rigid, primitive, starting_model=None, description=None): self.asym_unit = asym_unit self.primitive = primitive self.starting_model, self.rigid = starting_model, rigid self.description = description class FeatureSegment(Segment): """Part of the system modeled as a number of geometric features, stored in a :class:`Representation`. :param asym_unit: The asymmetric unit (or part of one) that this segment represents. :type asym_unit: :class:`~ihm.AsymUnit` or :class:`~ihm.AsymUnitRange` :param bool rigid: Whether internal coordinates of the segment were fixed during modeling. :param str primitive: The type of object used to represent this segment (sphere/gaussian/other). :param int count: The number of objects used to represent this segment. :param starting_model: initial coordinates used for the segment (or None). :type starting_model: :class:`~ihm.startmodel.StartingModel` :param str description: Additional text describing this segment. """ granularity = 'by-feature' def _get_report(self): asym = self.asym_unit return ("%s %d-%d as %d %s feature%s (%s)%s" % (asym.details, asym.seq_id_range[0], asym.seq_id_range[1], self.count, "rigid" if self.rigid else "flexible", "" if self.count == 1 else "s", self.primitive, _starting_model_report(self))) def __init__(self, asym_unit, rigid, primitive, count, starting_model=None, description=None): self.asym_unit = asym_unit self.primitive, self.count = primitive, count self.starting_model, self.rigid = starting_model, rigid self.description = description class Representation(list): """Part of the system modeled as a set of geometric objects, such as spheres or atoms. This is implemented as a simple list of :class:`Segment` objects. :param sequence elements: Initial set of segments. :param str name: A short descriptive name. :param str details: A longer description of the representation. Typically a Representation is assigned to a :class:`~ihm.model.Model`. See also :attr:`ihm.System.orphan_representations`. Multiple representations of the same system are possible (multi-scale). """ # For backwards compatibility with earlier versions of this class which # didn't specify name/details name = details = None # todo: use set rather than list? def __init__(self, elements=(), name=None, details=None): super().__init__(elements) self.name, self.details = name, details python-ihm-2.7/ihm/restraint.py000066400000000000000000001104411503573337200166310ustar00rootroot00000000000000"""Classes for handling restraints on the system. """ import ihm class PseudoSite: """Selection of a pseudo position in the system. Pseudo positions are typically used to reference a point or sphere that is not explicitly represented, in a :class:`PseudoSiteFeature` or :class:`CrossLinkPseudoSite`. :param float x: Cartesian X coordinate of this site. :param float y: Cartesian Y coordinate of this site. :param float z: Cartesian Z coordinate of this site. :param float radius: Radius of the site, if applicable. :param str description: Additional text describing this feature. """ def __init__(self, x, y, z, radius=None, description=None): self.x, self.y, self.z = x, y, z self.radius = radius self.description = description def _signature(self): return tuple("%.3f" % v if v else None for v in (self.x, self.y, self.z, self.radius)) class Restraint: """Base class for all restraints. See :attr:`ihm.System.restraints`. """ def _get_report(self): return str(self) class RestraintGroup(list): """A set of related :class:`Restraint` objects. This is implemented as a simple list. Note that due to limitations of the underlying dictionary, only certain combinations of restraints can be placed in groups. In particular, all objects in a group must be of the same type, and only certain types (currently only :class:`DerivedDistanceRestraint` and :class:`PredictedContactRestraint`) can be grouped. Empty groups can be created, but will be ignored on output as the dictionary does not support them. Restraint groups should be stored in the system by adding them to :attr:`ihm.System.restraint_groups`. """ pass class EM3DRestraint(Restraint): """Restrain part of the system to match an electron microscopy density map. :param dataset: Reference to the density map data (usually an :class:`~ihm.dataset.EMDensityDataset`). :type dataset: :class:`~ihm.dataset.Dataset` :param assembly: The part of the system that is fit into the map. :type assembly: :class:`~ihm.Assembly` :param bool segment: True iff the map has been segmented. :param str fitting_method: The method used to fit the model into the map. :param fitting_method_citation: The publication describing the fitting method. :type fitting_method_citation: :class:`~ihm.Citation` :param int number_of_gaussians: Number of Gaussians used to represent the map as a Gaussian Mixture Model (GMM), if applicable. :param str details: Additional details regarding the fitting. """ def _get_report(self): ret = "Fit to 3D electron microscopy density map" if self.fitting_method: ret += " using " + self.fitting_method return ret def __init__(self, dataset, assembly, segment=None, fitting_method=None, fitting_method_citation=None, number_of_gaussians=None, details=None): self.dataset, self.assembly = dataset, assembly self.segment, self.fitting_method = segment, fitting_method self.fitting_method_citation = fitting_method_citation self.number_of_gaussians = number_of_gaussians self.details = details #: Information about the fit of each model to this restraint's data. #: This is a Python dict where keys are :class:`~ihm.model.Model` #: objects and values are :class:`EM3DRestraintFit` objects. self.fits = {} class EM3DRestraintFit: """Information on the fit of a model to an :class:`EM3DRestraint`. See :attr:`EM3DRestaint.fits`. :param float cross_correlation_coefficient: The fit between the model and the map. """ __slots__ = ["cross_correlation_coefficient"] # Reduce memory usage def __init__(self, cross_correlation_coefficient=None): self.cross_correlation_coefficient = cross_correlation_coefficient class SASRestraint(Restraint): """Restrain part of the system to match small angle scattering (SAS) data. :param dataset: Reference to the SAS data (usually an :class:`~ihm.dataset.SASDataset`). :type dataset: :class:`~ihm.dataset.Dataset` :param assembly: The part of the system that is fit against SAS data. :type assembly: :class:`~ihm.Assembly` :param bool segment: True iff the SAS profile has been segmented. :param str fitting_method: The method used to fit the model against the SAS data (e.g. FoXS, DAMMIF). :param str fitting_atom_type: The set of atoms fit against the data (e.g. "Heavy atoms", "All atoms"). :param bool multi_state: Whether multiple state fitting was done. :param float radius_of_gyration: Radius of gyration obtained from the SAS profile, if used as part of the restraint. :param str details: Additional details regarding the fitting. """ def _get_report(self): state_map = {True: "Multi-state ", False: "Single-state "} ret = "%sSAS restraint" % state_map.get(self.multi_state, "") if self.fitting_atom_type: ret += " on " + self.fitting_atom_type return ret def __init__(self, dataset, assembly, segment=None, fitting_method=None, fitting_atom_type=None, multi_state=None, radius_of_gyration=None, details=None): self.dataset, self.assembly = dataset, assembly self.segment, self.fitting_method = segment, fitting_method self.fitting_atom_type = fitting_atom_type self.multi_state = multi_state self.radius_of_gyration = radius_of_gyration self.details = details #: Information about the fit of each model to this restraint's data. #: This is a Python dict where keys are :class:`~ihm.model.Model` #: objects and values are :class:`SASRestraintFit` objects. self.fits = {} class SASRestraintFit: """Information on the fit of a model to a :class:`SASRestraint`. See :attr:`SASRestaint.fits`. :param float chi_value: The fit between the model and the SAS data. """ __slots__ = ["chi_value"] # Reduce memory usage def __init__(self, chi_value=None): self.chi_value = chi_value class EM2DRestraint(Restraint): """Restrain part of the system to match an electron microscopy class average. :param dataset: Reference to the class average data (usually an :class:`~ihm.dataset.EM2DClassDataset`). :type dataset: :class:`~ihm.dataset.Dataset` :param assembly: The part of the system that is fit against the class. :type assembly: :class:`~ihm.Assembly` :param bool segment: True iff the image has been segmented. :param int number_raw_micrographs: The number of particles picked from the original raw micrographs that were used to create the class average. :param float pixel_size_width: Width of each pixel in the image, in angstroms. :param float pixel_size_height: Height of each pixel in the image, in angstroms. :param float image_resolution: Resolution of the image, in angstroms. :param int number_of_projections: Number of projections of the assembly used to fit against the image, if applicable. :param str details: Additional details regarding the fitting. """ def _get_report(self): return "Fit to 2D electron microscopy class average" def __init__(self, dataset, assembly, segment=None, number_raw_micrographs=None, pixel_size_width=None, pixel_size_height=None, image_resolution=None, number_of_projections=None, details=None): self.dataset, self.assembly = dataset, assembly self.segment = segment self.number_raw_micrographs = number_raw_micrographs self.pixel_size_width = pixel_size_width self.pixel_size_height = pixel_size_height self.image_resolution = image_resolution self.number_of_projections = number_of_projections self.details = details #: Information about the fit of each model to this restraint's data. #: This is a Python dict where keys are :class:`~ihm.model.Model` #: objects and values are :class:`EM2DRestraintFit` objects. self.fits = {} class EM2DRestraintFit: """Information on the fit of a model to an :class:`EM2DRestraint`. See :attr:`EM2DRestaint.fits`. :param float cross_correlation_coefficient: The fit between the model and the class average. :param rot_matrix: Rotation matrix (as a 3x3 array of floats) that places the model on the image. :param tr_vector: Translation vector (as a 3-element float list) that places the model on the image. """ __slots__ = ["cross_correlation_coefficient", "rot_matrix", "tr_vector"] # Reduce memory usage def __init__(self, cross_correlation_coefficient=None, rot_matrix=None, tr_vector=None): self.cross_correlation_coefficient = cross_correlation_coefficient self.rot_matrix, self.tr_vector = rot_matrix, tr_vector class CrossLinkRestraint(Restraint): """Restrain part of the system to match a set of cross-links. :param dataset: Reference to the cross-link data (usually a :class:`~ihm.dataset.CXMSDataset`). :type dataset: :class:`~ihm.dataset.Dataset` :param linker: The type of chemical linker used. :type linker: :class:`ihm.ChemDescriptor` """ assembly = None # no struct_assembly_id for XL restraints def _get_report(self): return ("%d %s cross-links from %d experimental identifications" % (len(self.cross_links), self.linker.auth_name, sum(len(x) for x in self.experimental_cross_links))) def __init__(self, dataset, linker): self.dataset, self.linker = dataset, linker #: All cross-links identified in the experiment, as a simple list #: of lists of :class:`ExperimentalCrossLink` objects. All cross-links #: in the same sublist are treated as experimentally ambiguous. For #: example, xl2 and xl3 here are considered ambiguous:: #: #: restraint.experimental_cross_links.append([xl1]) #: restraint.experimental_cross_links.append([xl2, xl3]) self.experimental_cross_links = [] #: All cross-links used in the modeling, as a list of #: :class:`CrossLink` objects. self.cross_links = [] class ExperimentalCrossLink: """A cross-link identified in the experiment. These objects, once created, should be added to the :attr:`CrossLinkRestraint.experimental_cross_links` list. :param residue1: The first residue linked by the cross-link. :type residue1: :class:`ihm.Residue` :param residue2: The second residue linked by the cross-link. :type residue2: :class:`ihm.Residue` :param str details: Additional text describing the cross-link. """ def __init__(self, residue1, residue2, details=None): self.residue1, self.residue2 = residue1, residue2 self.details = details class DistanceRestraint: """Base class for all distance restraints. These are typically used in a :class:`DerivedDistanceRestraint`. Do not use this class directly but instead use a derived class such as :class:`HarmonicDistanceRestraint`, :class:`UpperBoundDistanceRestraint`, :class:`LowerBoundDistanceRestraint`, or :class:`LowerUpperBoundDistanceRestraint`. """ restraint_type = None #: The minimum distance allowed for this restraint, #: or None if unconstrained distance_lower_limit = None #: The maximum distance allowed for this restraint, #: or None if unconstrained distance_upper_limit = None class HarmonicDistanceRestraint(DistanceRestraint): """Harmonically restrain two objects to be close to a given distance apart. These objects are typically used in a :class:`DerivedDistanceRestraint`. :param float distance: Equilibrium distance """ restraint_type = 'harmonic' def __init__(self, distance): self.distance = distance #: The equilibrium distance distance_lower_limit = property(lambda self: self.distance) #: The equilibrium distance distance_upper_limit = distance_lower_limit class UpperBoundDistanceRestraint(DistanceRestraint): """Harmonically restrain two objects to be below a given distance apart. These objects are typically used in a :class:`DerivedDistanceRestraint`. :param float distance: Distance threshold """ restraint_type = 'upper bound' def __init__(self, distance): self.distance = distance #: The maximum distance allowed by this restraint distance_upper_limit = property(lambda self: self.distance) #: Minimum distance (unconstrained, so always None) distance_lower_limit = None class LowerBoundDistanceRestraint(DistanceRestraint): """Harmonically restrain two objects to be above a given distance apart. These objects are typically used in a :class:`DerivedDistanceRestraint`. :param float distance: Distance threshold """ restraint_type = 'lower bound' def __init__(self, distance): self.distance = distance #: The minimum distance allowed by this restraint distance_lower_limit = property(lambda self: self.distance) #: Maximum distance (unconstrained, so always None) distance_upper_limit = None class LowerUpperBoundDistanceRestraint(DistanceRestraint): """Harmonically restrain two objects to be above a given distance and below another distance apart. These objects are typically used in a :class:`DerivedDistanceRestraint`. :param float distance_lower_limit: Lower bound on the distance. :param float distance_upper_limit: Upper bound on the distance. """ restraint_type = 'lower and upper bound' def __init__(self, distance_lower_limit, distance_upper_limit): #: The minimum distance allowed by this restraint self.distance_lower_limit = distance_lower_limit #: The maximum distance allowed by this restraint self.distance_upper_limit = distance_upper_limit class CrossLink: """Base class for all cross-links used in the modeling. Do not use this class directly, but instead use a subclass: :class:`ResidueCrossLink`, :class:`AtomCrossLink`, or :class:`FeatureCrossLink`.""" pass class CrossLinkPseudoSite: """Pseudo site corresponding to one end of a cross-link. These objects are used when the end of a cross-link is not represented in the model but its position is known (e.g. it may have been approximated given the position of nearby residues). They are passed as the ``pseudo1`` or ``pseudo2`` arguments to :class:`CrossLink` subclasses. :param site: The pseudo site coordinates :type site: :class:`PseudoSite` :param model: The model in whose coordinate system the pseudo site is active (if not specified, the coordinates are assumed to be valid for all models using this cross-link). :type model: :class:`ihm.model.Model` """ def __init__(self, site, model=None): self.site, self.model = site, model class ResidueCrossLink(CrossLink): """A cross-link used in the modeling, applied to residue alpha carbon atoms. These objects, once created, should be added to the :attr:`CrossLinkRestraint.cross_links` list. :param experimental_cross_link: The corresponding cross-link identified by experiment. Multiple cross-links can map to a single experimental identification. :type experimental_cross_link: :class:`ExperimentalCrossLink` :param asym1: The asymmetric unit containing the first linked residue. :type asym1: :class:`ihm.AsymUnit` :param asym2: The asymmetric unit containing the second linked residue. :type asym2: :class:`ihm.AsymUnit` :param distance: Restraint on the distance. :type distance: :class:`DistanceRestraint` :param float psi: Initial uncertainty in the experimental data. :param float sigma1: Initial uncertainty in the position of the first residue. :param float sigma2: Initial uncertainty in the position of the second residue. :param bool restrain_all: If True, all cross-links are restrained. :param pseudo1: List of pseudo sites representing the position of the first residue (if applicable). :type pseudo1: List of :class:`CrossLinkPseudoSite` :param pseudo2: List of pseudo sites representing the position of the second residue (if applicable). :type pseudo2: List of :class:`CrossLinkPseudoSite` """ granularity = 'by-residue' atom1 = atom2 = None def __init__(self, experimental_cross_link, asym1, asym2, distance, psi=None, sigma1=None, sigma2=None, restrain_all=None, pseudo1=None, pseudo2=None): self.experimental_cross_link = experimental_cross_link self.asym1, self.asym2 = asym1, asym2 self.psi, self.sigma1, self.sigma2 = psi, sigma1, sigma2 self.distance, self.restrain_all = distance, restrain_all self.pseudo1, self.pseudo2 = pseudo1, pseudo2 #: Information about the fit of models or groups to this cross-link. #: This is a Python dict where keys can be :class:`~ihm.model.Model` #: objects (with corresponding values as :class:`CrossLinkFit` objects) #: or :class:`~ihm.model.ModelGroup` or class:`~ihm.model.Ensemble` #: objects (with corresponding values as :class:`CrossLinkGroupFit` #: objects). self.fits = {} def _get_residue1(self): seq_id = self.experimental_cross_link.residue1.seq_id return self.asym1.residue(seq_id) residue1 = property(_get_residue1, doc="Residue object representing one end " "of the cross-link") def _get_residue2(self): seq_id = self.experimental_cross_link.residue2.seq_id return self.asym2.residue(seq_id) residue2 = property(_get_residue2, doc="Residue object representing one end " "of the cross-link") class FeatureCrossLink(CrossLink): """A cross-link used in the modeling, applied to the closest primitive object with the highest resolution. These objects, once created, should be added to the :attr:`CrossLinkRestraint.cross_links` list. :param experimental_cross_link: The corresponding cross-link identified by experiment. Multiple cross-links can map to a single experimental identification. :type experimental_cross_link: :class:`ExperimentalCrossLink` :param asym1: The asymmetric unit containing the first linked residue. :type asym1: :class:`ihm.AsymUnit` :param asym2: The asymmetric unit containing the second linked residue. :type asym2: :class:`ihm.AsymUnit` :param distance: Restraint on the distance. :type distance: :class:`DistanceRestraint` :param float psi: Initial uncertainty in the experimental data. :param float sigma1: Initial uncertainty in the position of the first residue. :param float sigma2: Initial uncertainty in the position of the second residue. :param bool restrain_all: If True, all cross-links are restrained. :param pseudo1: List of pseudo sites representing the position of the first residue (if applicable). :type pseudo1: List of :class:`CrossLinkPseudoSite` :param pseudo2: List of pseudo sites representing the position of the second residue (if applicable). :type pseudo2: List of :class:`CrossLinkPseudoSite` """ granularity = 'by-feature' atom1 = atom2 = None def __init__(self, experimental_cross_link, asym1, asym2, distance, psi=None, sigma1=None, sigma2=None, restrain_all=None, pseudo1=None, pseudo2=None): self.experimental_cross_link = experimental_cross_link self.asym1, self.asym2 = asym1, asym2 self.psi, self.sigma1, self.sigma2 = psi, sigma1, sigma2 self.distance, self.restrain_all = distance, restrain_all self.pseudo1, self.pseudo2 = pseudo1, pseudo2 #: Information about the fit of models or groups to this cross-link. #: This is a Python dict where keys can be :class:`~ihm.model.Model` #: objects (with corresponding values as :class:`CrossLinkFit` objects) #: or :class:`~ihm.model.ModelGroup` or class:`~ihm.model.Ensemble` #: objects (with corresponding values as :class:`CrossLinkGroupFit` #: objects). self.fits = {} class AtomCrossLink(CrossLink): """A cross-link used in the modeling, applied to the specified atoms. These objects, once created, should be added to the :attr:`CrossLinkRestraint.cross_links` list. :param experimental_cross_link: The corresponding cross-link identified by experiment. Multiple cross-links can map to a single experimental identification. :type experimental_cross_link: :class:`ExperimentalCrossLink` :param asym1: The asymmetric unit containing the first linked residue. :type asym1: :class:`ihm.AsymUnit` :param asym2: The asymmetric unit containing the second linked residue. :type asym2: :class:`ihm.AsymUnit` :param str atom1: The name of the first linked atom. :param str atom2: The name of the second linked atom. :param distance: Restraint on the distance. :type distance: :class:`DistanceRestraint` :param float psi: Initial uncertainty in the experimental data. :param float sigma1: Initial uncertainty in the position of the first residue. :param float sigma2: Initial uncertainty in the position of the second residue. :param bool restrain_all: If True, all cross-links are restrained. :param pseudo1: List of pseudo sites representing the position of the first residue (if applicable). :type pseudo1: List of :class:`CrossLinkPseudoSite` :param pseudo2: List of pseudo sites representing the position of the second residue (if applicable). :type pseudo2: List of :class:`CrossLinkPseudoSite` """ granularity = 'by-atom' def __init__(self, experimental_cross_link, asym1, asym2, atom1, atom2, distance, psi=None, sigma1=None, sigma2=None, restrain_all=None, pseudo1=None, pseudo2=None): self.experimental_cross_link = experimental_cross_link self.asym1, self.asym2 = asym1, asym2 self.atom1, self.atom2 = atom1, atom2 self.psi, self.sigma1, self.sigma2 = psi, sigma1, sigma2 self.distance, self.restrain_all = distance, restrain_all self.pseudo1, self.pseudo2 = pseudo1, pseudo2 #: Information about the fit of models or groups to this cross-link. #: This is a Python dict where keys can be :class:`~ihm.model.Model` #: objects (with corresponding values as :class:`CrossLinkFit` objects) #: or :class:`~ihm.model.ModelGroup` or class:`~ihm.model.Ensemble` #: objects (with corresponding values as :class:`CrossLinkGroupFit` #: objects). self.fits = {} class CrossLinkFit: """Information on the fit of a single model to a :class:`CrossLink`. See :attr:`ResidueCrossLink.fits`, :attr:`AtomCrossLink.fits`, or :attr:`FeatureCrossLink.fits`. See also :class:`CrossLinkGroupFit` for information on the fit of a model group or ensemble in aggregate to the cross-link. :param float psi: Uncertainty in the experimental data. :param float sigma1: Uncertainty in the position of the first residue. :param float sigma2: Uncertainty in the position of the second residue. """ __slots__ = ["psi", "sigma1", "sigma2"] # Reduce memory usage def __init__(self, psi=None, sigma1=None, sigma2=None): self.psi, self.sigma1, self.sigma2 = psi, sigma1, sigma2 class CrossLinkGroupFit: """Information on the fit of a :class:`~ihm.model.ModelGroup` or :class:`~ihm.model.Ensemble` in aggregate to a :class:`CrossLink`. See :attr:`ResidueCrossLink.fits`, :attr:`AtomCrossLink.fits`, or :attr:`FeatureCrossLink.fits`. See also :class:`CrossLinkFit` for information on the fit of a single model to the cross-link. :param float median_distance: Actual median cross-link distance in the sampled models. :param int num_models: Number of models sampled, for which the median distance is provided. :param str details: More information on the fit. """ __slots__ = ["median_distance", "num_models", "details"] # Reduce memory usage def __init__(self, median_distance, num_models=None, details=None): self.median_distance = median_distance self.num_models, self.details = num_models, details class Feature: """Base class for selecting parts of the system that a restraint acts on. This class should not be used itself; instead, see :class:`ResidueFeature`, :class:`AtomFeature`, :class:`NonPolyFeature`, and :class:`PseudoSiteFeature`. Features are typically assigned to one or more :class:`~ihm.restraint.GeometricRestraint` or :class:`~ihm.restraint.DerivedDistanceRestraint` objects. """ details = None type = ihm.unknown def _all_entities_or_asyms(self): # Get all Entities or AsymUnits referenced by this object return [] def _get_entity_type(self): return ihm.unknown class ResidueFeature(Feature): """Selection of one or more residues from the system. Residues can be selected from both :class:`ihm.AsymUnit` and :class:`ihm.Entity` (the latter implies that it selects residues in all instances of that entity). Individual residues can also be selected by passing :class:`ihm.Residue` objects. :param sequence ranges: A list of :class:`ihm.AsymUnitRange`, :class:`ihm.AsymUnit`, :class:`ihm.EntityRange`, :class:`ihm.Residue`, and/or :class:`ihm.Entity` objects. :param str details: Additional text describing this feature. """ # Type is 'residue' if each range selects a single residue, otherwise # it is 'residue range' def __get_type(self): for r in self.ranges: if r.seq_id_range[0] != r.seq_id_range[1]: return 'residue range' return 'residue' type = property(__get_type) def __init__(self, ranges, details=None): self.ranges, self.details = ranges, details _ = self._get_entity_type() def _signature(self): return tuple(self.ranges) def _all_entities_or_asyms(self): return self.ranges def _get_entity_type(self): def _get_entity(x): if isinstance(x, ihm.Entity): return x return x.entity if x.entity else x.asym.entity if any(not _get_entity(r).is_polymeric() for r in self.ranges): raise ValueError("%s cannot select non-polymeric entities" % self) else: return _get_entity(self.ranges[0]).type if self.ranges else None class AtomFeature(Feature): """Selection of one or more atoms from the system. Atoms can be selected from polymers or non-polymers (but not both). Atoms can also be selected from both :class:`ihm.AsymUnit` and :class:`ihm.Entity` (the latter implies that it selects atoms in all instances of that entity). For selecting an entire polymer or residue(s), see :class:`ResidueFeature`. For selecting an entire non-polymer, see :class:`NonPolyFeature`. :param sequence atoms: A list of :class:`ihm.Atom` objects. :param str details: Additional text describing this feature. """ type = 'atom' def __init__(self, atoms, details=None): self.atoms, self.details = atoms, details _ = self._get_entity_type() def _get_entity_type(self): def _get_entity(residue): return residue.entity if residue.entity else residue.asym.entity types = frozenset(_get_entity(a.residue).type for a in self.atoms) if len(types) > 1: raise ValueError("%s cannot span both polymeric and " "non-polymeric entities" % self) elif types: return tuple(types)[0] class NonPolyFeature(Feature): """Selection of one or more non-polymers from the system. To select individual atoms from a non-polymer, see :class:`AtomFeature`. Features can include both :class:`ihm.AsymUnit` and :class:`ihm.Entity` (the latter implies that it selects non-polymers in all instances of that entity). :param sequence objs: A list of :class:`ihm.AsymUnit` and/or :class:`ihm.Entity` objects. :param str details: Additional text describing this feature. """ type = 'ligand' def __init__(self, objs, details=None): self.objs, self.details = objs, details _ = self._get_entity_type() def _all_entities_or_asyms(self): return self.objs def _get_entity_type(self): def _get_entity(x): return x if isinstance(x, ihm.Entity) else x.entity if any(_get_entity(r).is_polymeric() for r in self.objs): raise ValueError( "%s can only select non-polymeric entities" % self) else: return _get_entity(self.objs[0]).type if self.objs else None class PseudoSiteFeature(Feature): """Selection of a pseudo position in the system. :param site: The pseudo site to use for the feature. :type site: :class:`PseudoSite` """ type = 'pseudo site' def __init__(self, site): self.site = site def _get_entity_type(self): return 'other' def _signature(self): return self.site._signature() class GeometricRestraint(Restraint): """A restraint between part of the system and some part of a geometric object. See :class:`CenterGeometricRestraint`, :class:`InnerSurfaceGeometricRestraint`, :class:`OuterSurfaceGeometricRestraint`. :param dataset: Reference to the data from which the restraint is derived. :type dataset: :class:`~ihm.dataset.Dataset` :param geometric_object: The geometric object to restrain against. :type geometric_object: :class:`ihm.geometry.GeometricObject` :param feature: The part of the system to restrain. :type feature: :class:`Feature` :param distance: Restraint on the distance. :type distance: :class:`DistanceRestraint` :param float harmonic_force_constant: Force constant, if applicable. :param bool restrain_all: If True, all distances are restrained. """ object_characteristic = 'other' assembly = None # no struct_assembly_id for geometric restraints def _get_report(self): return ("Distance (%s) to %s" % (self.distance.restraint_type, self.geometric_object.type)) def __init__(self, dataset, geometric_object, feature, distance, harmonic_force_constant=None, restrain_all=None, pseudo1=None, pseudo2=None): self.dataset = dataset self.geometric_object, self.feature = geometric_object, feature self.distance, self.restrain_all = distance, restrain_all self.harmonic_force_constant = harmonic_force_constant _all_features = property(lambda self: (self.feature,)) class CenterGeometricRestraint(GeometricRestraint): """A restraint between part of the system and the center of a geometric object. See :class:`GeometricRestraint` for a description of the parameters. """ object_characteristic = 'center' class InnerSurfaceGeometricRestraint(GeometricRestraint): """A restraint between part of the system and the inner surface of a geometric object. See :class:`GeometricRestraint` for a description of the parameters. """ object_characteristic = 'inner surface' class OuterSurfaceGeometricRestraint(GeometricRestraint): """A restraint between part of the system and the outer surface of a geometric object. See :class:`GeometricRestraint` for a description of the parameters. """ object_characteristic = 'outer surface' class DerivedDistanceRestraint(Restraint): """A restraint between two parts of the system, derived from experimental data. :param dataset: Reference to the data from which the restraint is derived. :type dataset: :class:`~ihm.dataset.Dataset` :param feature1: The first part of the system to restrain. :type feature1: :class:`Feature` :param feature2: The second part of the system to restrain. :type feature2: :class:`Feature` :param distance: Restraint on the distance. :type distance: :class:`DistanceRestraint` :param float probability: Likelihood that restraint is correct (0. - 1.) :param bool restrain_all: If True, all distances are restrained. :param float mic_value: Value of the Maximal Information Coefficient (MIC) for this interaction, if applicable. """ assembly = None # no struct_assembly_id for derived distance restraints def __init__(self, dataset, feature1, feature2, distance, probability=None, restrain_all=None, mic_value=None): self.dataset = dataset self.feature1, self.feature2 = feature1, feature2 self.distance, self.restrain_all = distance, restrain_all self.probability = probability self.mic_value = mic_value _all_features = property(lambda self: (self.feature1, self.feature2)) class PredictedContactRestraint(Restraint): """A predicted contact between two parts of the system, derived from various computational tools. :param dataset: Reference to the data from which the restraint is derived. :type dataset: :class:`~ihm.dataset.Dataset` :param resatom1: The first residue or atom to restrain. :type resatom1: :class:`ihm.Residue` or :class:`ihm.Atom` :param resatom2: The second residue or atom to restrain. :type resatom2: :class:`ihm.Residue` or :class:`ihm.Atom` :param distance: Restraint on the distance. :type distance: :class:`DistanceRestraint` :param bool by_residue: If True, the restraint is applied to specific residues; otherwise, it is applied to the closest primitive object with the highest resolution. :param float probability: Likelihood that restraint is correct (0. - 1.) :param software: The software used to generate the contact. :type software: :class:`~ihm.Software` """ assembly = None # no struct_assembly_id for predicted contact restraints def __init__(self, dataset, resatom1, resatom2, distance, by_residue, probability=None, software=None): self.dataset = dataset self.resatom1, self.resatom2 = resatom1, resatom2 self.distance, self.by_residue = distance, by_residue self.probability, self.software = probability, software class HDXRestraint(Restraint): """A restraint derived from Hydrogen-Deuterium Exchange experiments. :param dataset: Reference to the data from which the restraint is derived. :type dataset: :class:`~ihm.dataset.Dataset` :param feature: The part of the system to restrain. :type feature: :class:`Feature` :param float protection_factor: Unitless scaling factor. :param str details: Additional details regarding the restraint. """ assembly = None # no struct_assembly_id for HDX restraints def __init__(self, dataset, feature, protection_factor=None, details=None): self.dataset, self.feature = dataset, feature self.protection_factor = protection_factor self.details = details _all_features = property(lambda self: (self.feature,)) python-ihm-2.7/ihm/source.py000066400000000000000000000032771503573337200161260ustar00rootroot00000000000000"""Classes for describing the source of an entity. """ class Source: """Base class to describe the source of an :class:`ihm.Entity`. See :class:`Manipulated`, :class:`Natural` and :class:`Synthetic`. """ src_method = None class Details: """Identifying information for an entity source. See :class:`Manipulated`, :class:`Natural` or :class:`Synthetic`. :param ncbi_taxonomy_id: NCBI taxonomy identifier, e.g. "469008" :param scientific_name: Scientific name, e.g. "Escherichia coli" :param common_name: Common name :param strain: Strain, e.g. "BL21(DE3)PLYSS" """ def __init__(self, ncbi_taxonomy_id=None, scientific_name=None, common_name=None, strain=None): self.ncbi_taxonomy_id = ncbi_taxonomy_id self.scientific_name = scientific_name self.common_name = common_name self.strain = strain class Manipulated(Source): """An entity isolated from a genetically manipulated source. See :class:`ihm.Entity`. :param gene: Details about the gene source. :type gene: :class:`Details` :param host: Details about the host organism. :type host: :class:`Details` """ src_method = 'man' def __init__(self, gene=None, host=None): self.gene, self.host = gene, host class Natural(Source, Details): """An entity isolated from a natural source. See :class:`ihm.Entity`. See :class:`Details` for a description of the parameters.""" src_method = 'nat' class Synthetic(Source, Details): """An entity obtained synthetically. See :class:`ihm.Entity`. See :class:`Details` for a description of the parameters.""" src_method = 'syn' python-ihm-2.7/ihm/startmodel.py000066400000000000000000000204421503573337200167750ustar00rootroot00000000000000"""Classes to handle starting models.""" import enum class SequenceIdentityDenominator(enum.IntEnum): """The denominator used while calculating the sequence identity. One of these constants can be passed to :class:`SequenceIdentity`.""" #: Length of the shorter sequence SHORTER_LENGTH = 1 #: Number of aligned positions (including gaps) NUM_ALIGNED_WITH_GAPS = 2 #: Number of aligned residue pairs (not including the gaps) NUM_ALIGNED_WITHOUT_GAPS = 3 #: Arithmetic mean sequence length MEAN_LENGTH = 4 #: Another method not covered here OTHER = 5 class SequenceIdentity: """Describe the identity between template and target sequences. See :class:`Template`. :param value: Percentage sequence identity. :param denominator: Way in which sequence identity was calculated - see :class:`SequenceIdentityDenominator`. """ def __init__(self, value, denominator=SequenceIdentityDenominator.SHORTER_LENGTH): self.value = value self.denominator = denominator class Template: """A PDB file used as a comparative modeling template for part of a starting model. See :class:`StartingModel`. :param dataset: Pointer to where this template is stored. :type dataset: :class:`~ihm.dataset.Dataset` :param str asym_id: The author-provided asymmetric unit (chain) to use from the template dataset (not necessarily the same as the starting model's asym_id or the ID of the asym_unit in the final IHM model). :param tuple seq_id_range: The sequence range in the dataset that is modeled by this template. Note that this numbering may differ from the IHM numbering. See `offset` in :class:`StartingModel`. :param tuple template_seq_id_range: The sequence range of the template that is used in comparative modeling. :param sequence_identity: Sequence identity between template and the target sequence. :type sequence_identity: :class:`SequenceIdentity` or `float` :param alignment_file: Reference to the external file containing the template-target alignment. :type alignment_file: :class:`~ihm.location.Location` """ # todo: handle sequence_identity_denominator as an enum, not int def __init__(self, dataset, asym_id, seq_id_range, template_seq_id_range, sequence_identity, alignment_file=None): self.dataset, self.asym_id = dataset, asym_id self.seq_id_range = seq_id_range self.template_seq_id_range = template_seq_id_range if isinstance(sequence_identity, float): sequence_identity = SequenceIdentity(sequence_identity) self.sequence_identity = sequence_identity self.alignment_file = alignment_file class StartingModel: """A starting guess for modeling of an asymmetric unit See :class:`ihm.representation.Segment` and :attr:`ihm.System.orphan_starting_models`. :param asym_unit: The asymmetric unit (or part of one) this starting model represents. :type asym_unit: :class:`~ihm.AsymUnit` or :class:`~ihm.AsymUnitRange` :param dataset: Pointer to where this model is stored. :type dataset: :class:`~ihm.dataset.Dataset` :param str asym_id: The asymmetric unit (chain) to use from the starting model's dataset (not necessarily the same as the ID of the asym_unit in the final model). :param list templates: A list of :class:`Template` objects, if this is a comparative model. :param int offset: Offset between the residue numbering in the dataset and the IHM model (the offset is added to the starting model numbering to give the IHM model numbering). :param list metadata: List of PDB metadata, such as HELIX records. :param software: The software used to generate the starting model. :type software: :class:`~ihm.Software` :param script_file: Reference to the external file containing the script used to generate the starting model (usually a :class:`~ihm.location.WorkflowFileLocation`). :type script_file: :class:`~ihm.location.Location` :param str description: Additional text describing the starting model. """ def __init__(self, asym_unit, dataset, asym_id, templates=None, offset=0, metadata=None, software=None, script_file=None, description=None): self.templates = templates if templates is not None else [] self.metadata = metadata if metadata is not None else [] self.asym_unit = asym_unit self.dataset, self.asym_id, self.offset = dataset, asym_id, offset self.software, self.script_file = software, script_file self.description = description self._atoms = [] self._seq_difs = [] def get_atoms(self): """Yield :class:`~ihm.model.Atom` objects that represent this starting model. This allows the starting model coordinates to be embedded in the mmCIF file, which is useful if the starting model is not available elsewhere (or it has been modified). The default implementation returns an internal list of atoms; it is usually necessary to subclass and override this method. See :meth:`ihm.model.Model.get_spheres` for more details. Note that the returned atoms should be those used in modeling, not those stored in the file. In particular, the numbering scheme should be that used in the IHM model (add `offset` to the dataset numbering). If any residues were changed (for example it is common to mutate MSE in the dataset to MET in the modeling) the final mutated name should be used (MET in this case) and :meth:`get_seq_dif` overridden to note the change. """ return self._atoms def get_seq_dif(self): """Yield :class:`SeqDif` objects for any sequence changes between the dataset and the starting model. See :meth:`get_atoms`. The default implementation returns an internal list of objects; it is usually necessary to subclass and override this method. Note that this is always called *after* :meth:`get_atoms`. """ return self._seq_difs def add_atom(self, atom): """Add to the model's set of :class:`~ihm.model.Atom` objects. See :meth:`get_atoms` for more details. """ self._atoms.append(atom) def add_seq_dif(self, seq_dif): """Add to the model's set of :class:`SeqDif` objects. See :meth:`get_atoms` for more details. """ self._seq_difs.append(seq_dif) class PDBHelix: """Represent a HELIX record from a PDB file.""" def __init__(self, line): self.helix_id = line[11:14].strip() self.start_resnam = line[14:18].strip() self.start_asym = line[19] self.start_resnum = int(line[21:25]) self.end_resnam = line[27:30].strip() self.end_asym = line[31] self.end_resnum = int(line[33:37]) self.helix_class = int(line[38:40]) self.length = int(line[71:76]) class SeqDif: """Annotate a sequence difference between a dataset and starting model. See :meth:`StartingModel.get_seq_dif` and :class:`MSESeqDif`. :param int db_seq_id: The residue index in the dataset. :param int seq_id: The residue index in the starting model. This should normally be `db_seq_id + offset`. :param str db_comp_id: The name of the residue in the dataset. :param str details: Descriptive text for the sequence difference. """ def __init__(self, db_seq_id, seq_id, db_comp_id, details=None): self.db_seq_id, self.seq_id = db_seq_id, seq_id self.db_comp_id, self.details = db_comp_id, details class MSESeqDif: """Denote that a residue was mutated from MSE to MET. See :class:`SeqDif` for a description of the parameters. """ def __init__(self, db_seq_id, seq_id, details="Conversion of modified residue MSE to MET"): self.db_seq_id, self.seq_id = db_seq_id, seq_id self.db_comp_id, self.details = 'MSE', details python-ihm-2.7/ihm/test.py000066400000000000000000000021531503573337200155750ustar00rootroot00000000000000import ihm import ihm.dumper import ihm.reader import os import unittest try: import msgpack except ImportError: msgpack = None class Tests(unittest.TestCase): def test_basic(self): """Basic install test""" system = ihm.System(title='test system') entity_a = ihm.Entity('AAA', description='Subunit A') entity_b = ihm.Entity('AAAAAA', description='Subunit B') system.entities.extend((entity_a, entity_b)) with open('output.cif', 'w') as fh: ihm.dumper.write(fh, [system]) with open('output.cif') as fh: sys2, = ihm.reader.read(fh) self.assertEqual(sys2.title, 'test system') os.unlink('output.cif') # Also test with BinaryCIF if msgpack: with open('output.bcif', 'wb') as fh: ihm.dumper.write(fh, [system], format='BCIF') with open('output.bcif', 'rb') as fh: sys2, = ihm.reader.read(fh, format='BCIF') self.assertEqual(sys2.title, 'test system') os.unlink('output.bcif') if __name__ == '__main__': unittest.main() python-ihm-2.7/ihm/util/000077500000000000000000000000001503573337200152205ustar00rootroot00000000000000python-ihm-2.7/ihm/util/__init__.py000066400000000000000000000135261503573337200173400ustar00rootroot00000000000000"""Utility classes""" import string import os import ihm import datetime class _AsymIDs: """Map indices to multi-character asym (chain) IDs. We label the first 26 chains A-Z, then we move to two-letter chain IDs: AA through AZ, then BA through BZ, through to ZZ. This continues with longer chain IDs.""" def __getitem__(self, ind): chars = string.ascii_uppercase lc = len(chars) ids = [] while ind >= lc: ids.append(chars[ind % lc]) ind = ind // lc - 1 ids.append(chars[ind]) return "".join(reversed(ids)) def _remove_id(obj, attr='_id'): """Remove any unique ID from obj""" if hasattr(obj, attr): delattr(obj, attr) def _assign_id(obj, seen_objs, obj_by_id, attr='_id', seen_obj=None, by_id_obj=None): """Assign a unique ID to obj, and track all ids in obj_by_id.""" if seen_obj is None: seen_obj = obj if by_id_obj is None: by_id_obj = obj if seen_obj not in seen_objs: if not hasattr(obj, attr): obj_by_id.append(by_id_obj) setattr(obj, attr, len(obj_by_id)) seen_objs[seen_obj] = getattr(obj, attr) else: setattr(obj, attr, seen_objs[seen_obj]) def _get_relative_path(reference, path): """Return `path` interpreted relative to `reference`""" if os.path.isabs(path): return path else: return os.path.join(os.path.dirname(reference), path) def _text_choice_property(attr, choices, doc=None): """Like `property` but requires that the value be one of the set choices""" schoices = frozenset(choices) def getfunc(obj): return getattr(obj, "_" + attr) def setfunc(obj, val): if val is not None and val is not ihm.unknown and val not in schoices: raise ValueError( "Invalid choice %s for %s; valid values are %s, " "None, ihm.unknown" % (repr(val), attr, ", ".join(repr(x) for x in choices))) setattr(obj, "_" + attr, val) return property(getfunc, setfunc, doc=doc) def _check_residue_range(seq_id_range, entity): """Make sure that a residue range is not out of range of its Entity""" if not entity or not entity._range_check: return if seq_id_range[1] < seq_id_range[0]: raise ValueError("Range %d-%d is invalid; end is before start" % seq_id_range) if (seq_id_range[1] > len(entity.sequence) or seq_id_range[0] < 1): raise IndexError("Range %d-%d out of range for %s (1-%d)" % (seq_id_range[0], seq_id_range[1], entity, len(entity.sequence))) def _check_residue(r): """Make sure that a residue is not out of range of its Entity""" if not r.entity or not r.entity._range_check: return if r.seq_id > len(r.entity.sequence) or r.seq_id < 1: raise IndexError("Residue %d out of range for %s (1-%d)" % (r.seq_id, r.entity, len(r.entity.sequence))) def _check_transform(t): if t.rot_matrix in (None, ihm.unknown): raise ValueError("Transformation %s is missing rotation" % t) if t.tr_vector in (None, ihm.unknown): raise ValueError("Transformation %s is missing translation" % t) def _invert_ranges(ranges, end, start=1): """Given a sorted list of non-overlapping ranges, yield a new list which contains every range in the range start-end which was not in the original list. For example, if end=4, [(2, 3)] -> [(1, 1), (4, 4)]""" for r in ranges: if r[0] > start: yield (start, r[0] - 1) start = r[1] + 1 if end >= start: yield (start, end) def _pred_ranges(ranges, end): """Given a sorted list of non-overlapping ranges, yield a new list which covers the range 1-end. Each element in the new list contains a new third bool member which is True iff the element was in the original list. For example, if end=4, [(2, 3)] -> [(1, 1, False), (2, 3, True), (4, 4, False)]""" start = 1 for r in ranges: if r[0] > start: yield (start, r[0] - 1, False) yield (r[0], r[1], True) start = r[1] + 1 if end >= start: yield (start, end, False) def _combine_ranges(ranges): """Sort the input ranges and remove any overlaps; yield the result. For example, [(8, 10), (1, 2), (3, 4)] -> [(1, 4), (8, 10)]""" ranges = sorted(ranges) if not ranges: return current = ranges[0] for r in ranges[1:]: if current[1] + 1 >= r[0]: current = (current[0], max(r[1], current[1])) else: yield current current = r yield current def _make_range_from_list(rr): """Yield a list of ranges given a sorted list of values. For example, [1, 2, 5, 6] -> [[1, 2], [5, 6]]""" if not rr: return current = [rr[0], rr[0]] for r in rr[1:]: if current[1] + 1 == r: current[1] = r else: yield current current = [r, r] yield current def _get_codes(codestr): """Convert a one-letter-code string into a sequence of individual codes""" if codestr is None or codestr is ihm.unknown: return i = 0 while i < len(codestr): # Strip out linebreaks if codestr[i] == '\n': pass elif codestr[i] == '(': end = codestr.index(')', i) yield codestr[i + 1:end] i = end else: yield codestr[i] i += 1 def _get_iso_date(iso_date_str): """Get a datetime.date obj for a string in isoformat.""" if not iso_date_str: return iso_date_str return datetime.date(int(iso_date_str[0:4]), int(iso_date_str[5:7]), int(iso_date_str[8:10])) python-ihm-2.7/ihm/util/make_mmcif.py000066400000000000000000000473541503573337200176770ustar00rootroot00000000000000#!/usr/bin/env python3 """ Add minimal IHM-related tables to an mmCIF file. Given any mmCIF file as input, this script will add any missing IHM-related tables and write out a new file that is minimally compliant with the IHM dictionary. This is done by simply reading in the original file with python-ihm and then writing it out again, so a) any data in the input file that is not understood by python-ihm will be lost on output; and b) input files that aren't compliant with the PDBx dictionary, or that contain syntax errors or other problems, may crash or otherwise confuse python-ihm. The --add option can also be used to combine multiple input mmCIF files into one. This is typically used when the mmCIF files contain models with differing composition. Only model (coordinate) information is combined, not other IHM information such as starting models or restraints. """ import ihm.reader import ihm.dumper import ihm.model import ihm.protocol import ihm.util import ihm.format import urllib.request import os import argparse import collections import operator import warnings # All canonical atom names for each standard residue type, as per CCD. # This is generated using the util/get_ccd_atoms.py script. KNOWN_ATOM_NAMES = { 'A': {"C4'", "C2'", 'C2', "C1'", 'N7', 'H62', 'OP2', 'N3', 'C5', 'P', "H5''", 'H2', "C5'", 'H61', "H3'", 'C4', 'N1', 'H8', "H1'", 'C8', 'N9', 'HOP3', 'OP1', "O4'", "H2'", "HO2'", 'OP3', "O3'", 'N6', 'HOP2', "O5'", "O2'", "HO3'", "H5'", "C3'", 'C6', "H4'"}, 'ALA': {'H2', 'HB1', 'HB3', 'HB2', 'N', 'HXT', 'O', 'CB', 'C', 'HA', 'CA', 'H', 'OXT'}, 'ARG': {'HB2', 'CG', 'NE', 'H', 'H2', 'HH22', 'N', 'HG2', 'CA', 'NH2', 'HH11', 'HG3', 'HH21', 'CZ', 'HB3', 'HXT', 'O', 'C', 'HD3', 'HH12', 'CB', 'NH1', 'CD', 'HA', 'HD2', 'HE', 'OXT'}, 'ASN': {'H2', 'HB3', 'HD22', 'HB2', 'N', 'CG', 'O', 'CB', 'ND2', 'HXT', 'C', 'HA', 'HD21', 'CA', 'OD1', 'H', 'OXT'}, 'ASP': {'H2', 'HB3', 'HB2', 'N', 'CG', 'O', 'CB', 'HXT', 'C', 'HA', 'OD2', 'CA', 'OD1', 'HD2', 'H', 'OXT'}, 'C': {"C4'", "C2'", 'C2', 'O2', 'H42', 'H5', "C1'", 'OP2', 'N3', 'C5', 'P', "H5''", 'H41', 'H6', "C5'", "H3'", 'C4', 'N1', 'N4', "H1'", 'HOP3', 'OP1', "O4'", "H2'", "HO2'", 'OP3', "O3'", 'HOP2', "O5'", "O2'", "HO3'", "H5'", "C3'", 'C6', "H4'"}, 'CYS': {'H2', 'HB3', 'HB2', 'N', 'SG', 'O', 'CB', 'HXT', 'C', 'HA', 'HG', 'CA', 'H', 'OXT'}, 'DA': {"C4'", "C2'", 'C2', "C1'", 'N7', 'H62', 'OP2', 'N3', 'C5', 'P', "H5''", 'H2', "C5'", 'H61', "H3'", 'C4', 'N1', 'H8', "H1'", 'C8', 'N9', 'HOP3', 'OP1', "O4'", "H2'", 'OP3', "O3'", 'N6', 'HOP2', "O5'", "H2''", "HO3'", "H5'", "C3'", 'C6', "H4'"}, 'DC': {"C4'", "C2'", 'C2', 'O2', 'H42', 'H5', "C1'", 'OP2', 'N3', 'C5', 'P', "H5''", 'H41', 'H6', "C5'", "H3'", 'C4', 'N1', 'N4', "H1'", 'HOP3', 'OP1', "O4'", "H2'", 'OP3', "O3'", 'HOP2', "O5'", "H2''", "HO3'", "H5'", "C3'", 'C6', "H4'"}, 'DG': {"C4'", "C2'", 'C2', "C1'", 'N7', 'OP2', 'N3', 'C5', 'P', "H5''", "C5'", 'O6', 'H1', "H3'", 'C4', 'N1', 'H8', "H1'", 'C8', 'N9', 'HOP3', 'OP1', "O4'", "H2'", 'OP3', "O3'", 'HOP2', "O5'", "H2''", 'H21', 'H22', "HO3'", "H5'", "C3'", 'N2', 'C6', "H4'"}, 'DT': {"C4'", "C2'", 'C2', 'O2', 'O4', "C1'", 'OP2', 'N3', 'C5', 'P', "H5''", 'H6', "C5'", "H3'", 'C4', 'N1', 'C7', "H1'", 'H73', 'HOP3', 'H3', 'OP1', "O4'", "H2'", 'OP3', "O3'", 'HOP2', "O5'", "H2''", 'H71', "HO3'", "H5'", "C3'", 'H72', 'C6', "H4'"}, 'G': {"C4'", "C2'", 'C2', "C1'", 'N7', 'OP2', 'N3', 'C5', 'P', "H5''", "C5'", 'O6', 'H1', "H3'", 'C4', 'N1', 'H8', "H1'", 'C8', 'N9', 'HOP3', 'OP1', "O4'", "H2'", "HO2'", 'OP3', "O3'", 'HOP2', "O5'", "O2'", 'H21', 'H22', "HO3'", "H5'", "C3'", 'N2', 'C6', "H4'"}, 'GLN': {'HB2', 'CG', 'H', 'H2', 'N', 'HG2', 'HE22', 'CA', 'HG3', 'HE21', 'HB3', 'HXT', 'O', 'NE2', 'C', 'OE1', 'CB', 'CD', 'HA', 'OXT'}, 'GLU': {'HB2', 'CG', 'H', 'H2', 'N', 'HG2', 'CA', 'HG3', 'HB3', 'HXT', 'O', 'HE2', 'C', 'OE2', 'OE1', 'CB', 'CD', 'HA', 'OXT'}, 'GLY': {'HA3', 'HXT', 'CA', 'O', 'HA2', 'H', 'N', 'C', 'H2', 'OXT'}, 'HIS': {'HB2', 'CG', 'CE1', 'HE1', 'H', 'ND1', 'H2', 'N', 'CA', 'HD1', 'HB3', 'HXT', 'O', 'HE2', 'NE2', 'C', 'CD2', 'CB', 'HA', 'HD2', 'OXT'}, 'ILE': {'HD11', 'CG1', 'H', 'HD12', 'H2', 'N', 'CA', 'HD13', 'HG13', 'HXT', 'O', 'HB', 'C', 'CD1', 'HG23', 'HG22', 'HG21', 'HG12', 'CB', 'CG2', 'HA', 'OXT'}, 'LEU': {'HD11', 'HB2', 'HD22', 'CG', 'HD21', 'H', 'HD12', 'H2', 'N', 'HD23', 'CA', 'HD13', 'HB3', 'HXT', 'O', 'C', 'CD2', 'CD1', 'CB', 'HA', 'HG', 'OXT'}, 'LYS': {'HB2', 'CG', 'CE', 'H', 'H2', 'N', 'HG2', 'HE3', 'CA', 'HG3', 'HB3', 'HXT', 'O', 'HE2', 'HZ1', 'HZ3', 'C', 'HD3', 'CB', 'CD', 'HA', 'HZ2', 'HD2', 'NZ', 'OXT'}, 'MET': {'HB2', 'CG', 'HE1', 'CE', 'H', 'H2', 'N', 'HG2', 'HE3', 'CA', 'HG3', 'SD', 'HB3', 'HXT', 'O', 'HE2', 'C', 'CB', 'HA', 'OXT'}, 'PHE': {'HB2', 'CG', 'CE1', 'HE1', 'H', 'H2', 'N', 'HZ', 'CA', 'HD1', 'CZ', 'HB3', 'HXT', 'O', 'HE2', 'C', 'CD2', 'CD1', 'CB', 'CE2', 'HA', 'HD2', 'OXT'}, 'PRO': {'HB3', 'HB2', 'N', 'CG', 'O', 'CB', 'HG2', 'HXT', 'CD', 'C', 'HA', 'CA', 'HD2', 'H', 'HG3', 'HD3', 'OXT'}, 'SER': {'H2', 'HB3', 'HB2', 'N', 'HXT', 'O', 'CB', 'C', 'HA', 'HG', 'CA', 'H', 'OG', 'OXT'}, 'THR': {'H2', 'HXT', 'N', 'HG23', 'O', 'CB', 'CG2', 'OG1', 'HB', 'C', 'HA', 'CA', 'HG22', 'H', 'HG1', 'HG21', 'OXT'}, 'TRP': {'HB2', 'CG', 'CE3', 'CZ3', 'HE1', 'H', 'H2', 'N', 'HE3', 'CA', 'CZ2', 'HD1', 'HB3', 'HXT', 'O', 'HZ3', 'C', 'CD2', 'CD1', 'NE1', 'CB', 'HH2', 'CE2', 'HA', 'CH2', 'HZ2', 'OXT'}, 'U': {"C4'", "C2'", 'C2', 'O2', 'H5', 'O4', "C1'", 'OP2', 'N3', 'C5', 'P', "H5''", 'H6', "C5'", "H3'", 'C4', 'N1', "H1'", 'HOP3', 'H3', 'OP1', "O4'", "H2'", "HO2'", 'OP3', "O3'", 'HOP2', "O5'", "O2'", "HO3'", "H5'", "C3'", 'C6', "H4'"}, 'VAL': {'CG1', 'H', 'H2', 'N', 'CA', 'HG13', 'HXT', 'O', 'HB', 'C', 'HG23', 'HG22', 'HG21', 'HG12', 'CB', 'CG2', 'HA', 'OXT', 'HG11'} } def add_ihm_info(s, fix_histidines, check_atom_names): # Non-standard histidine names (protonation states) histidines = frozenset(('HIP', 'HID', 'HIE')) if not s.title: s.title = 'Auto-generated system' # Simple default assembly containing all chains default_assembly = ihm.Assembly(s.asym_units, name='Modeled assembly') # Simple default atomic representation for everything default_representation = ihm.representation.Representation( [ihm.representation.AtomicSegment(asym, rigid=False) for asym in s.asym_units]) # Simple default modeling protocol default_protocol = ihm.protocol.Protocol(name='modeling') for state_group in s.state_groups: for state in state_group: for model_group in state: for model in model_group: if not model.assembly: model.assembly = default_assembly if not model.representation: model.representation = default_representation if not model.protocol: model.protocol = default_protocol model.not_modeled_residue_ranges.extend( _get_not_modeled_residues(model)) if fix_histidines: _fix_histidine_het_atoms(model, histidines) if check_atom_names != 'no': _check_atom_names(model, check_atom_names == 'all') if fix_histidines: _fix_histidine_chem_comps(s, histidines) return s def _fix_histidine_het_atoms(model, histidines): """Fix any non-standard histidine atoms in atom_site that are marked HETATM to instead use ATOM""" for atom in model._atoms: if atom.seq_id is None or not atom.het: continue comp = atom.asym_unit.sequence[atom.seq_id - 1] if comp.id in histidines: atom.het = False class _ChemCompAtomHandler: not_in_file = omitted = unknown = None def __init__(self): super().__init__() self.atoms = collections.defaultdict(set) def __call__(self, comp_id, atom_id): self.atoms[comp_id].add(atom_id) def _get_non_std_restyp(restyp): """Return CCD info for the given residue type""" url_top = 'https://files.rcsb.org' url_pattern = url_top + '/pub/pdb/refdata/chem_comp/%s/%s/%s.cif' url = url_pattern % (restyp[-1], restyp, restyp) cca = _ChemCompAtomHandler() try: with urllib.request.urlopen(url) as fh: c = ihm.format.CifReader(fh, category_handler={'_chem_comp_atom': cca}) c.read_file() except urllib.error.URLError as exc: warnings.warn( "Component %s could not be found in CCD: %s" % (restyp, exc)) return cca.atoms def _get_non_canon(seen_atom_names, check_all): """Get all non-canonical atom names for each residue type""" for restyp, atoms in seen_atom_names.items(): if check_all and restyp not in KNOWN_ATOM_NAMES: KNOWN_ATOM_NAMES.update(_get_non_std_restyp(restyp)) if restyp in KNOWN_ATOM_NAMES: non_canon_atoms = atoms - KNOWN_ATOM_NAMES[restyp] if non_canon_atoms: yield restyp, non_canon_atoms def _check_atom_names(model, check_all): """Check that only standard atom names are used for known residue types""" seen_atom_names = collections.defaultdict(set) for atom in model._atoms: seq_id = 1 if atom.seq_id is None else atom.seq_id comp = atom.asym_unit.sequence[seq_id - 1] seen_atom_names[comp.id].add(atom.atom_id) non_canon = sorted(_get_non_canon(seen_atom_names, check_all), key=operator.itemgetter(0)) if non_canon: raise ValueError( "Non-canonical atom names found in the following residues: " + "; ".join("%s: %r" % (restyp, sorted(atoms)) for (restyp, atoms) in non_canon)) def _fix_histidine_chem_comps(s, histidines): """Change any non-standard histidine chemical components to normal HIS""" his = ihm.LPeptideAlphabet()['H'] for e in s.entities: for c in e.sequence: if c.id in histidines: # Change the ChemComp to HIS in place, as there may be # references to this ChemComp elsewhere. Duplicate HIS # components will be combined into one at output time. c.id = his.id c.code = his.code c.code_canonical = his.code_canonical c.name = his.name c.formula = his.formula c.__class__ = his.__class__ def _get_not_modeled_residues(model): """Yield NotModeledResidueRange objects for all residue ranges in the Model that are not referenced by Atom, Sphere, or pre-existing NotModeledResidueRange objects""" for assem in model.assembly: asym = assem.asym if hasattr(assem, 'asym') else assem if not asym.entity.is_polymeric(): continue # Make a set of all residue indices of this asym "handled" either # by being modeled (with Atom or Sphere objects) or by being # explicitly marked as not-modeled handled_residues = set() for rr in model.not_modeled_residue_ranges: if rr.asym_unit is asym: for seq_id in range(rr.seq_id_begin, rr.seq_id_end + 1): handled_residues.add(seq_id) for atom in model._atoms: if atom.asym_unit is asym: handled_residues.add(atom.seq_id) for sphere in model._spheres: if sphere.asym_unit is asym: for seq_id in range(sphere.seq_id_range[0], sphere.seq_id_range[1] + 1): handled_residues.add(seq_id) # Convert set to a list of residue ranges handled_residues = ihm.util._make_range_from_list( sorted(handled_residues)) # Return not-modeled for each non-handled range for r in ihm.util._invert_ranges(handled_residues, end=assem.seq_id_range[1], start=assem.seq_id_range[0]): yield ihm.model.NotModeledResidueRange(asym, r[0], r[1]) def add_ihm_info_one_system(fname, fix_histidines, check_atom_names): """Read mmCIF file `fname`, which must contain a single System, and return it with any missing IHM data added.""" with open(fname) as fh: systems = ihm.reader.read(fh) if len(systems) != 1: raise ValueError("mmCIF file %s must contain exactly 1 data block " "(%d found)" % (fname, len(systems))) return add_ihm_info(systems[0], fix_histidines, check_atom_names) def combine(s, other_s): """Add models from the System `other_s` into the System `s`. After running this function, `s` will contain all Models from both systems. The models are added to new StateGroup(s) in `s`. Note that this function also modifies `other_s` in place, so that System should no longer be used after calling this function.""" # First map all Entity and AsymUnit objects in `other_s` to equivalent # objects in `s` entity_map = combine_entities(s, other_s) asym_map = combine_asyms(s, other_s, entity_map) # Now handle the Models themselves combine_atoms(s, other_s, asym_map) def combine_entities(s, other_s): """Add `other_s` entities into `s`. Returns a dict that maps Entities in `other_s` to equivalent objects in `s`.""" entity_map = {} sequences = dict((e.sequence, e) for e in s.entities) for e in other_s.entities: if e.sequence in sequences: # If the `other_s` Entity already exists in `s`, map to it entity_map[e] = sequences[e.sequence] else: # Otherwise, add the `other_s` Entity to `s` s.entities.append(e) entity_map[e] = e return entity_map def combine_asyms(s, other_s, entity_map): """Add `other_s` asyms into `s`. Returns a dict that maps AsymUnits in `other_s` to equivalent objects in `s`.""" asym_map = {} # Collect author-provided information for existing asyms. For polymers, # we use the author-provided chain ID; for non-polymers, we also use # the author-provided residue number of the first (only) residue poly_asyms = dict(((a.entity, a.strand_id), a) for a in s.asym_units if a.entity.is_polymeric()) nonpoly_asyms = dict(((a.entity, a.strand_id, a.auth_seq_id_map[1]), a) for a in s.asym_units if a.entity.type == 'non-polymer') def map_asym(asym, orig_asym): if orig_asym: # If an equivalent asym already exists, use it (and its asym_id) asym_map[asym] = orig_asym else: # Otherwise, add a new asym asym_map[asym] = asym asym.id = None # Assign new ID s.asym_units.append(asym) for asym in other_s.asym_units: # Point to Entity in `s`, not `other_s` asym.entity = entity_map[asym.entity] # For polymers and non-polymers, if an asym in `other_s` has the # same author-provided information and entity_id as an asym in `s`, # reuse the asym_id if asym.entity.is_polymeric(): map_asym(asym, poly_asyms.get((asym.entity, asym.strand_id))) elif asym.entity.type == 'non-polymer': map_asym(asym, nonpoly_asyms.get((asym.entity, asym.strand_id, asym.auth_seq_id_map[1]))) else: # For waters and branched entities, always assign a new asym_id asym_map[asym] = asym asym.id = None # Assign new ID s.asym_units.append(asym) return asym_map def combine_atoms(s, other_s, asym_map): """Add `other_s` atoms into `s`""" seen_asmb = set() seen_rep = set() for state_group in other_s.state_groups: for state in state_group: for model_group in state: for model in model_group: # Assembly, Representation and Atom and Sphere objects # all reference `other_s` asyms. We must map these to # asyms in `s`. asmb = model.assembly if id(asmb) not in seen_asmb: seen_asmb.add(id(asmb)) # todo: also handle AsymUnitRange asmb[:] = [asym_map[asym] for asym in asmb] rep = model.representation if id(rep) not in seen_rep: seen_rep.add(id(rep)) for seg in rep: seg.asym_unit = asym_map[seg.asym_unit] for atom in model._atoms: atom.asym_unit = asym_map[atom.asym_unit] for sphere in model._spheres: sphere.asym_unit = asym_map[sphere.asym_unit] # Add all models as new state groups s.state_groups.extend(other_s.state_groups) def get_args(): p = argparse.ArgumentParser( description="Add minimal IHM-related tables to an mmCIF file.") p.add_argument("input", metavar="input.cif", help="input mmCIF file name") p.add_argument("output", metavar="output.cif", help="output mmCIF file name", default="output.cif", nargs="?") p.add_argument("--add", "-a", action='append', metavar="add.cif", help="also add model information from the named mmCIF " "file to the output file") p.add_argument("--histidines", action='store_true', dest="fix_histidines", help="Convert any non-standard histidine names (HIP, HID, " "HIE, for different protonation states) to HIS") p.add_argument('--check_atom_names', choices=['no', 'standard', 'all'], dest="check_atom_names", default='no', help="If 'standard', check for non-canonical atom names " "in standard amino acid and nucleic acid chemical " "components; if 'all', also check non-standard " "residue types by querying CCD (needs network access)") return p.parse_args() def main(): args = get_args() if (os.path.exists(args.input) and os.path.exists(args.output) and os.path.samefile(args.input, args.output)): raise ValueError("Input and output are the same file") if args.add: s = add_ihm_info_one_system(args.input, args.fix_histidines, args.check_atom_names) for other in args.add: other_s = add_ihm_info_one_system(other, args.fix_histidines, args.check_atom_names) combine(s, other_s) with open(args.output, 'w') as fhout: ihm.dumper.write( fhout, [s], variant=ihm.dumper.IgnoreVariant(['_audit_conform'])) else: with open(args.input) as fh: with open(args.output, 'w') as fhout: ihm.dumper.write( fhout, [add_ihm_info(s, args.fix_histidines, args.check_atom_names) for s in ihm.reader.read(fh)], variant=ihm.dumper.IgnoreVariant(['_audit_conform'])) if __name__ == '__main__': main() python-ihm-2.7/make-release.sh000077500000000000000000000031731503573337200163640ustar00rootroot00000000000000#!/bin/bash -e # First, do # - Check spelling with # codespell . --skip '*.cif' -L assertIn # - Update AuditConformDumper to match latest IHM dictionary if necessary # - Run util/validate-outputs.py to make sure all example outputs validate # (cd util; PYTHONPATH=.. python3 validate-outputs.py) # - Run util/check-db-entries.py to check against some real archive structures # (cd util; PYTHONPATH=.. python3 check-db-entries.py) # - Make sure all python-modelcif tests work using new IHM version # - Make sure the self-test script in each package (Homebrew, conda) works # - Update ChangeLog.rst, util/debian/changelog, and util/python-ihm.spec # with the release number and date # - Update release number in ihm/__init__.py, MANIFEST.in, and setup.py # - Commit, tag, and push # - Make release on GitHub # - Upload the release tarball from # https://github.com/ihmwg/python-ihm/releases to Zenodo as a new release # - Make sure there are no extraneous .py files (setup.py will include them # in the pypi package) # Make SWIG wrapper so users don't need SWIG rm -rf build src/ihm_format_wrap.c python3 setup.py build_ext --inplace VERSION=$(python3 setup.py --version) mv src/ihm_format_wrap.c "src/ihm_format_wrap_${VERSION}.c" python3 setup.py sdist rm -f "src/ihm_format_wrap_${VERSION}.c" echo "Now use 'twine upload dist/ihm-${VERSION}.tar.gz' to publish the release on PyPi." echo "Then, update the conda-forge, COPR, PPA, and Homebrew packages to match." echo "For COPR, use dist/ihm-${VERSION}.tar.gz together with util/python-ihm.spec" echo "For the PPA, use the GitHub release tarball, renamed to python-ihm_${VERSION}.orig.tar.gz" python-ihm-2.7/setup.py000077500000000000000000000037161503573337200152120ustar00rootroot00000000000000#!/usr/bin/env python try: from setuptools import setup, Extension except ImportError: from distutils.core import setup, Extension import sys import os VERSION = "2.7" copy_args = sys.argv[1:] # Allow building without the C extension build_ext = True if '--without-ext' in copy_args: build_ext = False copy_args.remove('--without-ext') if sys.platform == 'win32': # Our use of strdup, strerror should be safe - no need for the Windows # compiler to warn about it; we want to use the POSIX name for strdup too cargs = ['-D_CRT_SECURE_NO_WARNINGS', '-D_CRT_NONSTDC_NO_WARNINGS'] else: cargs = [] if build_ext: # Use pre-built SWIG wrappers for stable releases so that end users # don't need SWIG installed wrap = "src/ihm_format_wrap_%s.c" % VERSION if not os.path.exists(wrap): wrap = "src/ihm_format.i" mod = [Extension("ihm._format", sources=["src/ihm_format.c", "src/cmp.c", wrap], include_dirs=['src'], extra_compile_args=cargs, swig_opts=['-keyword', '-nodefaultctor', '-nodefaultdtor', '-noproxy'], optional=True)] else: mod = [] with open("README.md", "r") as fh: long_description = fh.read() setup(name='ihm', version=VERSION, script_args=copy_args, description='Package for handling IHM mmCIF and BinaryCIF files', long_description=long_description, long_description_content_type="text/markdown", author='Ben Webb', author_email='ben@salilab.org', url='https://github.com/ihmwg/python-ihm', ext_modules=mod, packages=['ihm', 'ihm.util'], install_requires=['msgpack'], license='MIT', classifiers=[ "Programming Language :: Python :: 3", "Operating System :: OS Independent", "Intended Audience :: Science/Research", "Topic :: Scientific/Engineering", ]) python-ihm-2.7/src/000077500000000000000000000000001503573337200142555ustar00rootroot00000000000000python-ihm-2.7/src/.gitignore000066400000000000000000000000331503573337200162410ustar00rootroot00000000000000*.o test ihm_format_wrap.c python-ihm-2.7/src/cmp.c000066400000000000000000002442351503573337200152120ustar00rootroot00000000000000/* The MIT License (MIT) Copyright (c) 2020 Charles Gunyon Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include "cmp.h" static const uint32_t cmp_version_ = 20; static const uint32_t cmp_mp_version_ = 5; enum { POSITIVE_FIXNUM_MARKER = 0x00, FIXMAP_MARKER = 0x80, FIXARRAY_MARKER = 0x90, FIXSTR_MARKER = 0xA0, NIL_MARKER = 0xC0, FALSE_MARKER = 0xC2, TRUE_MARKER = 0xC3, BIN8_MARKER = 0xC4, BIN16_MARKER = 0xC5, BIN32_MARKER = 0xC6, EXT8_MARKER = 0xC7, EXT16_MARKER = 0xC8, EXT32_MARKER = 0xC9, FLOAT_MARKER = 0xCA, DOUBLE_MARKER = 0xCB, U8_MARKER = 0xCC, U16_MARKER = 0xCD, U32_MARKER = 0xCE, U64_MARKER = 0xCF, S8_MARKER = 0xD0, S16_MARKER = 0xD1, S32_MARKER = 0xD2, S64_MARKER = 0xD3, FIXEXT1_MARKER = 0xD4, FIXEXT2_MARKER = 0xD5, FIXEXT4_MARKER = 0xD6, FIXEXT8_MARKER = 0xD7, FIXEXT16_MARKER = 0xD8, STR8_MARKER = 0xD9, STR16_MARKER = 0xDA, STR32_MARKER = 0xDB, ARRAY16_MARKER = 0xDC, ARRAY32_MARKER = 0xDD, MAP16_MARKER = 0xDE, MAP32_MARKER = 0xDF, NEGATIVE_FIXNUM_MARKER = 0xE0 }; enum { FIXARRAY_SIZE = 0xF, FIXMAP_SIZE = 0xF, FIXSTR_SIZE = 0x1F }; typedef enum cmp_error_e { CMP_ERROR_NONE, CMP_ERROR_STR_DATA_LENGTH_TOO_LONG, CMP_ERROR_BIN_DATA_LENGTH_TOO_LONG, CMP_ERROR_ARRAY_LENGTH_TOO_LONG, CMP_ERROR_MAP_LENGTH_TOO_LONG, CMP_ERROR_INPUT_VALUE_TOO_LARGE, CMP_ERROR_FIXED_VALUE_WRITING, CMP_ERROR_TYPE_MARKER_READING, CMP_ERROR_TYPE_MARKER_WRITING, CMP_ERROR_DATA_READING, CMP_ERROR_DATA_WRITING, CMP_ERROR_EXT_TYPE_READING, CMP_ERROR_EXT_TYPE_WRITING, CMP_ERROR_INVALID_TYPE, CMP_ERROR_LENGTH_READING, CMP_ERROR_LENGTH_WRITING, CMP_ERROR_SKIP_DEPTH_LIMIT_EXCEEDED, CMP_ERROR_INTERNAL, CMP_ERROR_DISABLED_FLOATING_POINT, CMP_ERROR_MAX } cmp_error_t; static const char *cmp_error_message(cmp_error_t error) { switch (error) { case CMP_ERROR_NONE: return "No Error"; case CMP_ERROR_STR_DATA_LENGTH_TOO_LONG: return "Specified string data length is too long (> 0xFFFFFFFF)"; case CMP_ERROR_BIN_DATA_LENGTH_TOO_LONG: return "Specified binary data length is too long (> 0xFFFFFFFF)"; case CMP_ERROR_ARRAY_LENGTH_TOO_LONG: return "Specified array length is too long (> 0xFFFFFFFF)"; case CMP_ERROR_MAP_LENGTH_TOO_LONG: return "Specified map length is too long (> 0xFFFFFFFF)"; case CMP_ERROR_INPUT_VALUE_TOO_LARGE: return "Input value is too large"; case CMP_ERROR_FIXED_VALUE_WRITING: return "Error writing fixed value"; case CMP_ERROR_TYPE_MARKER_READING: return "Error reading type marker"; case CMP_ERROR_TYPE_MARKER_WRITING: return "Error writing type marker"; case CMP_ERROR_DATA_READING: return "Error reading packed data"; case CMP_ERROR_DATA_WRITING: return "Error writing packed data"; case CMP_ERROR_EXT_TYPE_READING: return "Error reading ext type"; case CMP_ERROR_EXT_TYPE_WRITING: return "Error writing ext type"; case CMP_ERROR_INVALID_TYPE: return "Invalid type"; case CMP_ERROR_LENGTH_READING: return "Error reading size"; case CMP_ERROR_LENGTH_WRITING: return "Error writing size"; case CMP_ERROR_SKIP_DEPTH_LIMIT_EXCEEDED: return "Depth limit exceeded while skipping"; case CMP_ERROR_INTERNAL: return "Internal error"; case CMP_ERROR_DISABLED_FLOATING_POINT: return "Floating point operations disabled"; case CMP_ERROR_MAX: return "Max Error"; } return ""; } static bool is_bigendian(void) { #ifdef WORDS_BIGENDIAN return WORDS_BIGENDIAN; #else const int32_t one = 1; const char *one_bytes = (const char *)&one; return *one_bytes == 0; #endif } static uint16_t be16(uint16_t x) { if (!is_bigendian()) return ((x >> 8) & 0x00ff) | ((x << 8) & 0xff00); return x; } static int16_t sbe16(int16_t x) { return (int16_t)be16((uint16_t)x); } static uint32_t be32(uint32_t x) { if (!is_bigendian()) return ((uint32_t)be16((uint16_t)(x >> 16))) | ((uint32_t)be16((uint16_t)(x & 0xffff)) << 16); return x; } static int32_t sbe32(int32_t x) { return (int32_t)be32((uint32_t)x); } static uint64_t be64(uint64_t x) { if (!is_bigendian()) return ((uint64_t)be32((uint32_t)(x >> 32))) | ((uint64_t)be32((uint32_t)(x & 0xffffffff)) << 32); return x; } static int64_t sbe64(int64_t x) { return (int64_t)be64((uint64_t)x); } #ifndef CMP_NO_FLOAT static float decode_befloat(const char *b) { float f = 0.; char *fb = (char *)&f; if (!is_bigendian()) { fb[0] = b[3]; fb[1] = b[2]; fb[2] = b[1]; fb[3] = b[0]; } else { fb[0] = b[0]; fb[1] = b[1]; fb[2] = b[2]; fb[3] = b[3]; } return f; } static double decode_bedouble(const char *b) { double d = 0.; char *db = (char *)&d; if (!is_bigendian()) { db[0] = b[7]; db[1] = b[6]; db[2] = b[5]; db[3] = b[4]; db[4] = b[3]; db[5] = b[2]; db[6] = b[1]; db[7] = b[0]; } else { db[0] = b[0]; db[1] = b[1]; db[2] = b[2]; db[3] = b[3]; db[4] = b[4]; db[5] = b[5]; db[6] = b[6]; db[7] = b[7]; } return d; } #endif /* CMP_NO_FLOAT */ static bool read_byte(cmp_ctx_t *ctx, uint8_t *x) { return ctx->read(ctx, x, sizeof(uint8_t)); } static bool write_byte(cmp_ctx_t *ctx, uint8_t x) { return ctx->write(ctx, &x, sizeof(uint8_t)) == sizeof(uint8_t); } static bool skip_bytes(cmp_ctx_t *ctx, size_t count) { if (ctx->skip) { return ctx->skip(ctx, count); } else { size_t i; for (i = 0; i < count; ++i) { uint8_t floor; if (!ctx->read(ctx, &floor, sizeof(uint8_t))) { return false; } } return true; } } static bool read_type_marker(cmp_ctx_t *ctx, uint8_t *marker) { if (read_byte(ctx, marker)) { return true; } ctx->error = CMP_ERROR_TYPE_MARKER_READING; return false; } static bool write_type_marker(cmp_ctx_t *ctx, uint8_t marker) { if (write_byte(ctx, marker)) return true; ctx->error = CMP_ERROR_TYPE_MARKER_WRITING; return false; } static bool write_fixed_value(cmp_ctx_t *ctx, uint8_t value) { if (write_byte(ctx, value)) return true; ctx->error = CMP_ERROR_FIXED_VALUE_WRITING; return false; } static bool type_marker_to_cmp_type(uint8_t type_marker, uint8_t *cmp_type) { if (type_marker <= 0x7F) { *cmp_type = CMP_TYPE_POSITIVE_FIXNUM; return true; } if (type_marker <= 0x8F) { *cmp_type = CMP_TYPE_FIXMAP; return true; } if (type_marker <= 0x9F) { *cmp_type = CMP_TYPE_FIXARRAY; return true; } if (type_marker <= 0xBF) { *cmp_type = CMP_TYPE_FIXSTR; return true; } if (type_marker >= 0xE0) { *cmp_type = CMP_TYPE_NEGATIVE_FIXNUM; return true; } switch (type_marker) { case NIL_MARKER: *cmp_type = CMP_TYPE_NIL; return true; case FALSE_MARKER: *cmp_type = CMP_TYPE_BOOLEAN; return true; case TRUE_MARKER: *cmp_type = CMP_TYPE_BOOLEAN; return true; case BIN8_MARKER: *cmp_type = CMP_TYPE_BIN8; return true; case BIN16_MARKER: *cmp_type = CMP_TYPE_BIN16; return true; case BIN32_MARKER: *cmp_type = CMP_TYPE_BIN32; return true; case EXT8_MARKER: *cmp_type = CMP_TYPE_EXT8; return true; case EXT16_MARKER: *cmp_type = CMP_TYPE_EXT16; return true; case EXT32_MARKER: *cmp_type = CMP_TYPE_EXT32; return true; case FLOAT_MARKER: *cmp_type = CMP_TYPE_FLOAT; return true; case DOUBLE_MARKER: *cmp_type = CMP_TYPE_DOUBLE; return true; case U8_MARKER: *cmp_type = CMP_TYPE_UINT8; return true; case U16_MARKER: *cmp_type = CMP_TYPE_UINT16; return true; case U32_MARKER: *cmp_type = CMP_TYPE_UINT32; return true; case U64_MARKER: *cmp_type = CMP_TYPE_UINT64; return true; case S8_MARKER: *cmp_type = CMP_TYPE_SINT8; return true; case S16_MARKER: *cmp_type = CMP_TYPE_SINT16; return true; case S32_MARKER: *cmp_type = CMP_TYPE_SINT32; return true; case S64_MARKER: *cmp_type = CMP_TYPE_SINT64; return true; case FIXEXT1_MARKER: *cmp_type = CMP_TYPE_FIXEXT1; return true; case FIXEXT2_MARKER: *cmp_type = CMP_TYPE_FIXEXT2; return true; case FIXEXT4_MARKER: *cmp_type = CMP_TYPE_FIXEXT4; return true; case FIXEXT8_MARKER: *cmp_type = CMP_TYPE_FIXEXT8; return true; case FIXEXT16_MARKER: *cmp_type = CMP_TYPE_FIXEXT16; return true; case STR8_MARKER: *cmp_type = CMP_TYPE_STR8; return true; case STR16_MARKER: *cmp_type = CMP_TYPE_STR16; return true; case STR32_MARKER: *cmp_type = CMP_TYPE_STR32; return true; case ARRAY16_MARKER: *cmp_type = CMP_TYPE_ARRAY16; return true; case ARRAY32_MARKER: *cmp_type = CMP_TYPE_ARRAY32; return true; case MAP16_MARKER: *cmp_type = CMP_TYPE_MAP16; return true; case MAP32_MARKER: *cmp_type = CMP_TYPE_MAP32; return true; default: return false; } } static bool read_type_size(cmp_ctx_t *ctx, uint8_t type_marker, uint8_t cmp_type, uint32_t *size) { uint8_t u8temp = 0; uint16_t u16temp = 0; uint32_t u32temp = 0; switch (cmp_type) { case CMP_TYPE_POSITIVE_FIXNUM: *size = 0; return true; case CMP_TYPE_FIXMAP: *size = (type_marker & FIXMAP_SIZE); return true; case CMP_TYPE_FIXARRAY: *size = (type_marker & FIXARRAY_SIZE); return true; case CMP_TYPE_FIXSTR: *size = (type_marker & FIXSTR_SIZE); return true; case CMP_TYPE_NIL: *size = 0; return true; case CMP_TYPE_BOOLEAN: *size = 0; return true; case CMP_TYPE_BIN8: if (!ctx->read(ctx, &u8temp, sizeof(uint8_t))) { ctx->error = CMP_ERROR_LENGTH_READING; return false; } *size = u8temp; return true; case CMP_TYPE_BIN16: if (!ctx->read(ctx, &u16temp, sizeof(uint16_t))) { ctx->error = CMP_ERROR_LENGTH_READING; return false; } *size = be16(u16temp); return true; case CMP_TYPE_BIN32: if (!ctx->read(ctx, &u32temp, sizeof(uint32_t))) { ctx->error = CMP_ERROR_LENGTH_READING; return false; } *size = be32(u32temp); return true; case CMP_TYPE_EXT8: if (!ctx->read(ctx, &u8temp, sizeof(uint8_t))) { ctx->error = CMP_ERROR_LENGTH_READING; return false; } *size = u8temp; return true; case CMP_TYPE_EXT16: if (!ctx->read(ctx, &u16temp, sizeof(uint16_t))) { ctx->error = CMP_ERROR_LENGTH_READING; return false; } *size = be16(u16temp); return true; case CMP_TYPE_EXT32: if (!ctx->read(ctx, &u32temp, sizeof(uint32_t))) { ctx->error = CMP_ERROR_LENGTH_READING; return false; } *size = be32(u32temp); return true; case CMP_TYPE_FLOAT: *size = 4; return true; case CMP_TYPE_DOUBLE: *size = 8; return true; case CMP_TYPE_UINT8: *size = 1; return true; case CMP_TYPE_UINT16: *size = 2; return true; case CMP_TYPE_UINT32: *size = 4; return true; case CMP_TYPE_UINT64: *size = 8; return true; case CMP_TYPE_SINT8: *size = 1; return true; case CMP_TYPE_SINT16: *size = 2; return true; case CMP_TYPE_SINT32: *size = 4; return true; case CMP_TYPE_SINT64: *size = 8; return true; case CMP_TYPE_FIXEXT1: *size = 1; return true; case CMP_TYPE_FIXEXT2: *size = 2; return true; case CMP_TYPE_FIXEXT4: *size = 4; return true; case CMP_TYPE_FIXEXT8: *size = 8; return true; case CMP_TYPE_FIXEXT16: *size = 16; return true; case CMP_TYPE_STR8: if (!ctx->read(ctx, &u8temp, sizeof(uint8_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } *size = u8temp; return true; case CMP_TYPE_STR16: if (!ctx->read(ctx, &u16temp, sizeof(uint16_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } *size = be16(u16temp); return true; case CMP_TYPE_STR32: if (!ctx->read(ctx, &u32temp, sizeof(uint32_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } *size = be32(u32temp); return true; case CMP_TYPE_ARRAY16: if (!ctx->read(ctx, &u16temp, sizeof(uint16_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } *size = be16(u16temp); return true; case CMP_TYPE_ARRAY32: if (!ctx->read(ctx, &u32temp, sizeof(uint32_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } *size = be32(u32temp); return true; case CMP_TYPE_MAP16: if (!ctx->read(ctx, &u16temp, sizeof(uint16_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } *size = be16(u16temp); return true; case CMP_TYPE_MAP32: if (!ctx->read(ctx, &u32temp, sizeof(uint32_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } *size = be32(u32temp); return true; case CMP_TYPE_NEGATIVE_FIXNUM: *size = 0; return true; default: ctx->error = CMP_ERROR_INVALID_TYPE; return false; } } static bool read_obj_data(cmp_ctx_t *ctx, uint8_t type_marker, cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: obj->as.u8 = type_marker; return true; case CMP_TYPE_NEGATIVE_FIXNUM: obj->as.s8 = (int8_t)type_marker; return true; case CMP_TYPE_NIL: obj->as.u8 = 0; return true; case CMP_TYPE_BOOLEAN: switch (type_marker) { case TRUE_MARKER: obj->as.boolean = true; return true; case FALSE_MARKER: obj->as.boolean = false; return true; default: break; } ctx->error = CMP_ERROR_INTERNAL; return false; case CMP_TYPE_UINT8: if (!ctx->read(ctx, &obj->as.u8, sizeof(uint8_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } return true; case CMP_TYPE_UINT16: if (!ctx->read(ctx, &obj->as.u16, sizeof(uint16_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } obj->as.u16 = be16(obj->as.u16); return true; case CMP_TYPE_UINT32: if (!ctx->read(ctx, &obj->as.u32, sizeof(uint32_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } obj->as.u32 = be32(obj->as.u32); return true; case CMP_TYPE_UINT64: if (!ctx->read(ctx, &obj->as.u64, sizeof(uint64_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } obj->as.u64 = be64(obj->as.u64); return true; case CMP_TYPE_SINT8: if (!ctx->read(ctx, &obj->as.s8, sizeof(int8_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } return true; case CMP_TYPE_SINT16: if (!ctx->read(ctx, &obj->as.s16, sizeof(int16_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } obj->as.s16 = sbe16(obj->as.s16); return true; case CMP_TYPE_SINT32: if (!ctx->read(ctx, &obj->as.s32, sizeof(int32_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } obj->as.s32 = sbe32(obj->as.s32); return true; case CMP_TYPE_SINT64: if (!ctx->read(ctx, &obj->as.s64, sizeof(int64_t))) { ctx->error = CMP_ERROR_DATA_READING; return false; } obj->as.s64 = sbe64(obj->as.s64); return true; case CMP_TYPE_FLOAT: { #ifndef CMP_NO_FLOAT char bytes[4]; if (!ctx->read(ctx, bytes, 4)) { ctx->error = CMP_ERROR_DATA_READING; return false; } obj->as.flt = decode_befloat(bytes); return true; #else /* CMP_NO_FLOAT */ ctx->error = CMP_ERROR_DISABLED_FLOATING_POINT; return false; #endif /* CMP_NO_FLOAT */ } case CMP_TYPE_DOUBLE: { #ifndef CMP_NO_FLOAT char bytes[8]; if (!ctx->read(ctx, bytes, 8)) { ctx->error = CMP_ERROR_DATA_READING; return false; } obj->as.dbl = decode_bedouble(bytes); return true; #else /* CMP_NO_FLOAT */ ctx->error = CMP_ERROR_DISABLED_FLOATING_POINT; return false; #endif /* CMP_NO_FLOAT */ } case CMP_TYPE_BIN8: case CMP_TYPE_BIN16: case CMP_TYPE_BIN32: return read_type_size(ctx, type_marker, obj->type, &obj->as.bin_size); case CMP_TYPE_FIXSTR: case CMP_TYPE_STR8: case CMP_TYPE_STR16: case CMP_TYPE_STR32: return read_type_size(ctx, type_marker, obj->type, &obj->as.str_size); case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: return read_type_size(ctx, type_marker, obj->type, &obj->as.array_size); case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: return read_type_size(ctx, type_marker, obj->type, &obj->as.map_size); case CMP_TYPE_FIXEXT1: if (!ctx->read(ctx, &obj->as.ext.type, sizeof(int8_t))) { ctx->error = CMP_ERROR_EXT_TYPE_READING; return false; } obj->as.ext.size = 1; return true; case CMP_TYPE_FIXEXT2: if (!ctx->read(ctx, &obj->as.ext.type, sizeof(int8_t))) { ctx->error = CMP_ERROR_EXT_TYPE_READING; return false; } obj->as.ext.size = 2; return true; case CMP_TYPE_FIXEXT4: if (!ctx->read(ctx, &obj->as.ext.type, sizeof(int8_t))) { ctx->error = CMP_ERROR_EXT_TYPE_READING; return false; } obj->as.ext.size = 4; return true; case CMP_TYPE_FIXEXT8: if (!ctx->read(ctx, &obj->as.ext.type, sizeof(int8_t))) { ctx->error = CMP_ERROR_EXT_TYPE_READING; return false; } obj->as.ext.size = 8; return true; case CMP_TYPE_FIXEXT16: if (!ctx->read(ctx, &obj->as.ext.type, sizeof(int8_t))) { ctx->error = CMP_ERROR_EXT_TYPE_READING; return false; } obj->as.ext.size = 16; return true; case CMP_TYPE_EXT8: if (!read_type_size(ctx, type_marker, obj->type, &obj->as.ext.size)) { return false; } if (!ctx->read(ctx, &obj->as.ext.type, sizeof(int8_t))) { ctx->error = CMP_ERROR_EXT_TYPE_READING; return false; } return true; case CMP_TYPE_EXT16: if (!read_type_size(ctx, type_marker, obj->type, &obj->as.ext.size)) { return false; } if (!ctx->read(ctx, &obj->as.ext.type, sizeof(int8_t))) { ctx->error = CMP_ERROR_EXT_TYPE_READING; return false; } return true; case CMP_TYPE_EXT32: if (!read_type_size(ctx, type_marker, obj->type, &obj->as.ext.size)) { return false; } if (!ctx->read(ctx, &obj->as.ext.type, sizeof(int8_t))) { ctx->error = CMP_ERROR_EXT_TYPE_READING; return false; } return true; default: break; } ctx->error = CMP_ERROR_INVALID_TYPE; return false; } void cmp_init(cmp_ctx_t *ctx, void *buf, cmp_reader read, cmp_skipper skip, cmp_writer write) { ctx->error = CMP_ERROR_NONE; ctx->buf = buf; ctx->read = read; ctx->skip = skip; ctx->write = write; } uint32_t cmp_version(void) { return cmp_version_; } uint32_t cmp_mp_version(void) { return cmp_mp_version_; } const char* cmp_strerror(const cmp_ctx_t *ctx) { if (ctx->error > CMP_ERROR_NONE && ctx->error < CMP_ERROR_MAX) return cmp_error_message((cmp_error_t)ctx->error); return ""; } bool cmp_write_pfix(cmp_ctx_t *ctx, uint8_t c) { if (c <= 0x7F) return write_fixed_value(ctx, c); ctx->error = CMP_ERROR_INPUT_VALUE_TOO_LARGE; return false; } bool cmp_write_nfix(cmp_ctx_t *ctx, int8_t c) { if (c >= -0x20 && c <= -1) return write_fixed_value(ctx, (uint8_t)c); ctx->error = CMP_ERROR_INPUT_VALUE_TOO_LARGE; return false; } bool cmp_write_sfix(cmp_ctx_t *ctx, int8_t c) { if (c >= 0) return cmp_write_pfix(ctx, (uint8_t)c); if (c >= -0x20 && c <= -1) return cmp_write_nfix(ctx, c); ctx->error = CMP_ERROR_INPUT_VALUE_TOO_LARGE; return false; } bool cmp_write_s8(cmp_ctx_t *ctx, int8_t c) { if (!write_type_marker(ctx, S8_MARKER)) return false; return ctx->write(ctx, &c, sizeof(int8_t)) == sizeof(int8_t); } bool cmp_write_s16(cmp_ctx_t *ctx, int16_t s) { if (!write_type_marker(ctx, S16_MARKER)) return false; s = sbe16(s); return ctx->write(ctx, &s, sizeof(int16_t)) == sizeof(int16_t); } bool cmp_write_s32(cmp_ctx_t *ctx, int32_t i) { if (!write_type_marker(ctx, S32_MARKER)) return false; i = sbe32(i); return ctx->write(ctx, &i, sizeof(int32_t)) == sizeof(int32_t); } bool cmp_write_s64(cmp_ctx_t *ctx, int64_t l) { if (!write_type_marker(ctx, S64_MARKER)) return false; l = sbe64(l); return ctx->write(ctx, &l, sizeof(int64_t)) == sizeof(int64_t); } bool cmp_write_integer(cmp_ctx_t *ctx, int64_t d) { if (d >= 0) return cmp_write_uinteger(ctx, (uint64_t)d); if (d >= -0x20) return cmp_write_nfix(ctx, (int8_t)d); if (d >= -0x80) return cmp_write_s8(ctx, (int8_t)d); if (d >= -0x8000) return cmp_write_s16(ctx, (int16_t)d); if (d >= -INT64_C(0x80000000)) return cmp_write_s32(ctx, (int32_t)d); return cmp_write_s64(ctx, d); } bool cmp_write_ufix(cmp_ctx_t *ctx, uint8_t c) { return cmp_write_pfix(ctx, c); } bool cmp_write_u8(cmp_ctx_t *ctx, uint8_t c) { if (!write_type_marker(ctx, U8_MARKER)) return false; return ctx->write(ctx, &c, sizeof(uint8_t)) == sizeof(uint8_t); } bool cmp_write_u16(cmp_ctx_t *ctx, uint16_t s) { if (!write_type_marker(ctx, U16_MARKER)) return false; s = be16(s); return ctx->write(ctx, &s, sizeof(uint16_t)) == sizeof(uint16_t); } bool cmp_write_u32(cmp_ctx_t *ctx, uint32_t i) { if (!write_type_marker(ctx, U32_MARKER)) return false; i = be32(i); return ctx->write(ctx, &i, sizeof(uint32_t)) == sizeof(uint32_t); } bool cmp_write_u64(cmp_ctx_t *ctx, uint64_t l) { if (!write_type_marker(ctx, U64_MARKER)) return false; l = be64(l); return ctx->write(ctx, &l, sizeof(uint64_t)) == sizeof(uint64_t); } bool cmp_write_uinteger(cmp_ctx_t *ctx, uint64_t u) { if (u <= 0x7F) return cmp_write_pfix(ctx, (uint8_t)u); if (u <= 0xFF) return cmp_write_u8(ctx, (uint8_t)u); if (u <= 0xFFFF) return cmp_write_u16(ctx, (uint16_t)u); if (u <= 0xFFFFFFFF) return cmp_write_u32(ctx, (uint32_t)u); return cmp_write_u64(ctx, u); } #ifndef CMP_NO_FLOAT bool cmp_write_float(cmp_ctx_t *ctx, float f) { if (!write_type_marker(ctx, FLOAT_MARKER)) return false; /* * We may need to swap the float's bytes, but we can't just swap them inside * the float because the swapped bytes may not constitute a valid float. * Therefore, we have to create a buffer and swap the bytes there. */ if (!is_bigendian()) { char swapped[sizeof(float)]; char *fbuf = (char *)&f; size_t i; for (i = 0; i < sizeof(float); ++i) swapped[i] = fbuf[sizeof(float) - i - 1]; return ctx->write(ctx, swapped, sizeof(float)) == sizeof(float); } return ctx->write(ctx, &f, sizeof(float)) == sizeof(float); } bool cmp_write_double(cmp_ctx_t *ctx, double d) { if (!write_type_marker(ctx, DOUBLE_MARKER)) return false; /* Same deal for doubles */ if (!is_bigendian()) { char swapped[sizeof(double)]; char *dbuf = (char *)&d; size_t i; for (i = 0; i < sizeof(double); ++i) swapped[i] = dbuf[sizeof(double) - i - 1]; return ctx->write(ctx, swapped, sizeof(double)) == sizeof(double); } return ctx->write(ctx, &d, sizeof(double)) == sizeof(double); } bool cmp_write_decimal(cmp_ctx_t *ctx, double d) { float f = (float)d; double df = (double)f; if (df == d) return cmp_write_float(ctx, f); else return cmp_write_double(ctx, d); } #endif /* CMP_NO_FLOAT */ bool cmp_write_nil(cmp_ctx_t *ctx) { return write_type_marker(ctx, NIL_MARKER); } bool cmp_write_true(cmp_ctx_t *ctx) { return write_type_marker(ctx, TRUE_MARKER); } bool cmp_write_false(cmp_ctx_t *ctx) { return write_type_marker(ctx, FALSE_MARKER); } bool cmp_write_bool(cmp_ctx_t *ctx, bool b) { if (b) return cmp_write_true(ctx); return cmp_write_false(ctx); } bool cmp_write_u8_as_bool(cmp_ctx_t *ctx, uint8_t b) { if (b) return cmp_write_true(ctx); return cmp_write_false(ctx); } bool cmp_write_fixstr_marker(cmp_ctx_t *ctx, uint8_t size) { if (size <= FIXSTR_SIZE) return write_fixed_value(ctx, FIXSTR_MARKER | size); ctx->error = CMP_ERROR_INPUT_VALUE_TOO_LARGE; return false; } bool cmp_write_fixstr(cmp_ctx_t *ctx, const char *data, uint8_t size) { if (!cmp_write_fixstr_marker(ctx, size)) return false; if (size == 0) return true; if (ctx->write(ctx, data, size) == size) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_str8_marker(cmp_ctx_t *ctx, uint8_t size) { if (!write_type_marker(ctx, STR8_MARKER)) return false; if (ctx->write(ctx, &size, sizeof(uint8_t)) == sizeof(uint8_t)) return true; ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } bool cmp_write_str8(cmp_ctx_t *ctx, const char *data, uint8_t size) { if (!cmp_write_str8_marker(ctx, size)) return false; if (size == 0) return true; if (ctx->write(ctx, data, size) == size) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_str16_marker(cmp_ctx_t *ctx, uint16_t size) { if (!write_type_marker(ctx, STR16_MARKER)) return false; size = be16(size); if (ctx->write(ctx, &size, sizeof(uint16_t)) == sizeof(uint16_t)) return true; ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } bool cmp_write_str16(cmp_ctx_t *ctx, const char *data, uint16_t size) { if (!cmp_write_str16_marker(ctx, size)) return false; if (size == 0) return true; if (ctx->write(ctx, data, size) == size) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_str32_marker(cmp_ctx_t *ctx, uint32_t size) { if (!write_type_marker(ctx, STR32_MARKER)) return false; size = be32(size); if (ctx->write(ctx, &size, sizeof(uint32_t)) == sizeof(uint32_t)) return true; ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } bool cmp_write_str32(cmp_ctx_t *ctx, const char *data, uint32_t size) { if (!cmp_write_str32_marker(ctx, size)) return false; if (size == 0) return true; if (ctx->write(ctx, data, size) == size) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_str_marker(cmp_ctx_t *ctx, uint32_t size) { if (size <= FIXSTR_SIZE) return cmp_write_fixstr_marker(ctx, (uint8_t)size); if (size <= 0xFF) return cmp_write_str8_marker(ctx, (uint8_t)size); if (size <= 0xFFFF) return cmp_write_str16_marker(ctx, (uint16_t)size); return cmp_write_str32_marker(ctx, size); } bool cmp_write_str_marker_v4(cmp_ctx_t *ctx, uint32_t size) { if (size <= FIXSTR_SIZE) return cmp_write_fixstr_marker(ctx, (uint8_t)size); if (size <= 0xFFFF) return cmp_write_str16_marker(ctx, (uint16_t)size); return cmp_write_str32_marker(ctx, size); } bool cmp_write_str(cmp_ctx_t *ctx, const char *data, uint32_t size) { if (size <= FIXSTR_SIZE) return cmp_write_fixstr(ctx, data, (uint8_t)size); if (size <= 0xFF) return cmp_write_str8(ctx, data, (uint8_t)size); if (size <= 0xFFFF) return cmp_write_str16(ctx, data, (uint16_t)size); return cmp_write_str32(ctx, data, size); } bool cmp_write_str_v4(cmp_ctx_t *ctx, const char *data, uint32_t size) { if (size <= FIXSTR_SIZE) return cmp_write_fixstr(ctx, data, (uint8_t)size); if (size <= 0xFFFF) return cmp_write_str16(ctx, data, (uint16_t)size); return cmp_write_str32(ctx, data, size); } bool cmp_write_bin8_marker(cmp_ctx_t *ctx, uint8_t size) { if (!write_type_marker(ctx, BIN8_MARKER)) return false; if (ctx->write(ctx, &size, sizeof(uint8_t)) == sizeof(uint8_t)) return true; ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } bool cmp_write_bin8(cmp_ctx_t *ctx, const void *data, uint8_t size) { if (!cmp_write_bin8_marker(ctx, size)) return false; if (size == 0) return true; if (ctx->write(ctx, data, size) == size) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_bin16_marker(cmp_ctx_t *ctx, uint16_t size) { if (!write_type_marker(ctx, BIN16_MARKER)) return false; size = be16(size); if (ctx->write(ctx, &size, sizeof(uint16_t)) == sizeof(uint16_t)) return true; ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } bool cmp_write_bin16(cmp_ctx_t *ctx, const void *data, uint16_t size) { if (!cmp_write_bin16_marker(ctx, size)) return false; if (size == 0) return true; if (ctx->write(ctx, data, size) == size) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_bin32_marker(cmp_ctx_t *ctx, uint32_t size) { if (!write_type_marker(ctx, BIN32_MARKER)) return false; size = be32(size); if (ctx->write(ctx, &size, sizeof(uint32_t)) == sizeof(uint32_t)) return true; ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } bool cmp_write_bin32(cmp_ctx_t *ctx, const void *data, uint32_t size) { if (!cmp_write_bin32_marker(ctx, size)) return false; if (size == 0) return true; if (ctx->write(ctx, data, size) == size) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_bin_marker(cmp_ctx_t *ctx, uint32_t size) { if (size <= 0xFF) return cmp_write_bin8_marker(ctx, (uint8_t)size); if (size <= 0xFFFF) return cmp_write_bin16_marker(ctx, (uint16_t)size); return cmp_write_bin32_marker(ctx, size); } bool cmp_write_bin(cmp_ctx_t *ctx, const void *data, uint32_t size) { if (size <= 0xFF) return cmp_write_bin8(ctx, data, (uint8_t)size); if (size <= 0xFFFF) return cmp_write_bin16(ctx, data, (uint16_t)size); return cmp_write_bin32(ctx, data, size); } bool cmp_write_fixarray(cmp_ctx_t *ctx, uint8_t size) { if (size <= FIXARRAY_SIZE) return write_fixed_value(ctx, FIXARRAY_MARKER | size); ctx->error = CMP_ERROR_INPUT_VALUE_TOO_LARGE; return false; } bool cmp_write_array16(cmp_ctx_t *ctx, uint16_t size) { if (!write_type_marker(ctx, ARRAY16_MARKER)) return false; size = be16(size); if (ctx->write(ctx, &size, sizeof(uint16_t)) == sizeof(uint16_t)) return true; ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } bool cmp_write_array32(cmp_ctx_t *ctx, uint32_t size) { if (!write_type_marker(ctx, ARRAY32_MARKER)) return false; size = be32(size); if (ctx->write(ctx, &size, sizeof(uint32_t)) == sizeof(uint32_t)) return true; ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } bool cmp_write_array(cmp_ctx_t *ctx, uint32_t size) { if (size <= FIXARRAY_SIZE) return cmp_write_fixarray(ctx, (uint8_t)size); if (size <= 0xFFFF) return cmp_write_array16(ctx, (uint16_t)size); return cmp_write_array32(ctx, size); } bool cmp_write_fixmap(cmp_ctx_t *ctx, uint8_t size) { if (size <= FIXMAP_SIZE) return write_fixed_value(ctx, FIXMAP_MARKER | size); ctx->error = CMP_ERROR_INPUT_VALUE_TOO_LARGE; return false; } bool cmp_write_map16(cmp_ctx_t *ctx, uint16_t size) { if (!write_type_marker(ctx, MAP16_MARKER)) return false; size = be16(size); if (ctx->write(ctx, &size, sizeof(uint16_t)) == sizeof(uint16_t)) return true; ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } bool cmp_write_map32(cmp_ctx_t *ctx, uint32_t size) { if (!write_type_marker(ctx, MAP32_MARKER)) return false; size = be32(size); if (ctx->write(ctx, &size, sizeof(uint32_t)) == sizeof(uint32_t)) return true; ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } bool cmp_write_map(cmp_ctx_t *ctx, uint32_t size) { if (size <= FIXMAP_SIZE) return cmp_write_fixmap(ctx, (uint8_t)size); if (size <= 0xFFFF) return cmp_write_map16(ctx, (uint16_t)size); return cmp_write_map32(ctx, size); } bool cmp_write_fixext1_marker(cmp_ctx_t *ctx, int8_t type) { if (!write_type_marker(ctx, FIXEXT1_MARKER)) return false; if (ctx->write(ctx, &type, sizeof(int8_t)) == sizeof(int8_t)) return true; ctx->error = CMP_ERROR_EXT_TYPE_WRITING; return false; } bool cmp_write_fixext1(cmp_ctx_t *ctx, int8_t type, const void *data) { if (!cmp_write_fixext1_marker(ctx, type)) return false; if (ctx->write(ctx, data, 1) == 1) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_fixext2_marker(cmp_ctx_t *ctx, int8_t type) { if (!write_type_marker(ctx, FIXEXT2_MARKER)) return false; if (ctx->write(ctx, &type, sizeof(int8_t)) == sizeof(int8_t)) return true; ctx->error = CMP_ERROR_EXT_TYPE_WRITING; return false; } bool cmp_write_fixext2(cmp_ctx_t *ctx, int8_t type, const void *data) { if (!cmp_write_fixext2_marker(ctx, type)) return false; if (ctx->write(ctx, data, 2) == 2) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_fixext4_marker(cmp_ctx_t *ctx, int8_t type) { if (!write_type_marker(ctx, FIXEXT4_MARKER)) return false; if (ctx->write(ctx, &type, sizeof(int8_t)) == sizeof(int8_t)) return true; ctx->error = CMP_ERROR_EXT_TYPE_WRITING; return false; } bool cmp_write_fixext4(cmp_ctx_t *ctx, int8_t type, const void *data) { if (!cmp_write_fixext4_marker(ctx, type)) return false; if (ctx->write(ctx, data, 4) == 4) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_fixext8_marker(cmp_ctx_t *ctx, int8_t type) { if (!write_type_marker(ctx, FIXEXT8_MARKER)) return false; if (ctx->write(ctx, &type, sizeof(int8_t)) == sizeof(int8_t)) return true; ctx->error = CMP_ERROR_EXT_TYPE_WRITING; return false; } bool cmp_write_fixext8(cmp_ctx_t *ctx, int8_t type, const void *data) { if (!cmp_write_fixext8_marker(ctx, type)) return false; if (ctx->write(ctx, data, 8) == 8) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_fixext16_marker(cmp_ctx_t *ctx, int8_t type) { if (!write_type_marker(ctx, FIXEXT16_MARKER)) return false; if (ctx->write(ctx, &type, sizeof(int8_t)) == sizeof(int8_t)) return true; ctx->error = CMP_ERROR_EXT_TYPE_WRITING; return false; } bool cmp_write_fixext16(cmp_ctx_t *ctx, int8_t type, const void *data) { if (!cmp_write_fixext16_marker(ctx, type)) return false; if (ctx->write(ctx, data, 16) == 16) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_ext8_marker(cmp_ctx_t *ctx, int8_t type, uint8_t size) { if (!write_type_marker(ctx, EXT8_MARKER)) return false; if (ctx->write(ctx, &size, sizeof(uint8_t)) != sizeof(uint8_t)) { ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } if (ctx->write(ctx, &type, sizeof(int8_t)) == sizeof(int8_t)) return true; ctx->error = CMP_ERROR_EXT_TYPE_WRITING; return false; } bool cmp_write_ext8(cmp_ctx_t *ctx, int8_t type, uint8_t size, const void *data) { if (!cmp_write_ext8_marker(ctx, type, size)) return false; if (ctx->write(ctx, data, size) == size) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_ext16_marker(cmp_ctx_t *ctx, int8_t type, uint16_t size) { if (!write_type_marker(ctx, EXT16_MARKER)) return false; size = be16(size); if (ctx->write(ctx, &size, sizeof(uint16_t)) != sizeof(uint16_t)) { ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } if (ctx->write(ctx, &type, sizeof(int8_t)) == sizeof(int8_t)) return true; ctx->error = CMP_ERROR_EXT_TYPE_WRITING; return false; } bool cmp_write_ext16(cmp_ctx_t *ctx, int8_t type, uint16_t size, const void *data) { if (!cmp_write_ext16_marker(ctx, type, size)) return false; if (ctx->write(ctx, data, size) == size) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_ext32_marker(cmp_ctx_t *ctx, int8_t type, uint32_t size) { if (!write_type_marker(ctx, EXT32_MARKER)) return false; size = be32(size); if (ctx->write(ctx, &size, sizeof(uint32_t)) != sizeof(uint32_t)) { ctx->error = CMP_ERROR_LENGTH_WRITING; return false; } if (ctx->write(ctx, &type, sizeof(int8_t)) == sizeof(int8_t)) return true; ctx->error = CMP_ERROR_EXT_TYPE_WRITING; return false; } bool cmp_write_ext32(cmp_ctx_t *ctx, int8_t type, uint32_t size, const void *data) { if (!cmp_write_ext32_marker(ctx, type, size)) return false; if (ctx->write(ctx, data, size) == size) return true; ctx->error = CMP_ERROR_DATA_WRITING; return false; } bool cmp_write_ext_marker(cmp_ctx_t *ctx, int8_t type, uint32_t size) { if (size == 1) return cmp_write_fixext1_marker(ctx, type); if (size == 2) return cmp_write_fixext2_marker(ctx, type); if (size == 4) return cmp_write_fixext4_marker(ctx, type); if (size == 8) return cmp_write_fixext8_marker(ctx, type); if (size == 16) return cmp_write_fixext16_marker(ctx, type); if (size <= 0xFF) return cmp_write_ext8_marker(ctx, type, (uint8_t)size); if (size <= 0xFFFF) return cmp_write_ext16_marker(ctx, type, (uint16_t)size); return cmp_write_ext32_marker(ctx, type, size); } bool cmp_write_ext(cmp_ctx_t *ctx, int8_t type, uint32_t size, const void *data) { if (size == 1) return cmp_write_fixext1(ctx, type, data); if (size == 2) return cmp_write_fixext2(ctx, type, data); if (size == 4) return cmp_write_fixext4(ctx, type, data); if (size == 8) return cmp_write_fixext8(ctx, type, data); if (size == 16) return cmp_write_fixext16(ctx, type, data); if (size <= 0xFF) return cmp_write_ext8(ctx, type, (uint8_t)size, data); if (size <= 0xFFFF) return cmp_write_ext16(ctx, type, (uint16_t)size, data); return cmp_write_ext32(ctx, type, size, data); } bool cmp_write_object(cmp_ctx_t *ctx, const cmp_object_t *obj) { switch(obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: return cmp_write_pfix(ctx, obj->as.u8); case CMP_TYPE_FIXMAP: return cmp_write_fixmap(ctx, (uint8_t)obj->as.map_size); case CMP_TYPE_FIXARRAY: return cmp_write_fixarray(ctx, (uint8_t)obj->as.array_size); case CMP_TYPE_FIXSTR: return cmp_write_fixstr_marker(ctx, (uint8_t)obj->as.str_size); case CMP_TYPE_NIL: return cmp_write_nil(ctx); case CMP_TYPE_BOOLEAN: if (obj->as.boolean) return cmp_write_true(ctx); return cmp_write_false(ctx); case CMP_TYPE_BIN8: return cmp_write_bin8_marker(ctx, (uint8_t)obj->as.bin_size); case CMP_TYPE_BIN16: return cmp_write_bin16_marker(ctx, (uint16_t)obj->as.bin_size); case CMP_TYPE_BIN32: return cmp_write_bin32_marker(ctx, obj->as.bin_size); case CMP_TYPE_EXT8: return cmp_write_ext8_marker( ctx, obj->as.ext.type, (uint8_t)obj->as.ext.size ); case CMP_TYPE_EXT16: return cmp_write_ext16_marker( ctx, obj->as.ext.type, (uint16_t)obj->as.ext.size ); case CMP_TYPE_EXT32: return cmp_write_ext32_marker(ctx, obj->as.ext.type, obj->as.ext.size); case CMP_TYPE_FLOAT: #ifndef CMP_NO_FLOAT return cmp_write_float(ctx, obj->as.flt); #else /* CMP_NO_FLOAT */ ctx->error = CMP_ERROR_DISABLED_FLOATING_POINT; return false; #endif /* CMP_NO_FLOAT */ case CMP_TYPE_DOUBLE: #ifndef CMP_NO_FLOAT return cmp_write_double(ctx, obj->as.dbl); #else /* CMP_NO_FLOAT */ ctx->error = CMP_ERROR_DISABLED_FLOATING_POINT; return false; #endif case CMP_TYPE_UINT8: return cmp_write_u8(ctx, obj->as.u8); case CMP_TYPE_UINT16: return cmp_write_u16(ctx, obj->as.u16); case CMP_TYPE_UINT32: return cmp_write_u32(ctx, obj->as.u32); case CMP_TYPE_UINT64: return cmp_write_u64(ctx, obj->as.u64); case CMP_TYPE_SINT8: return cmp_write_s8(ctx, obj->as.s8); case CMP_TYPE_SINT16: return cmp_write_s16(ctx, obj->as.s16); case CMP_TYPE_SINT32: return cmp_write_s32(ctx, obj->as.s32); case CMP_TYPE_SINT64: return cmp_write_s64(ctx, obj->as.s64); case CMP_TYPE_FIXEXT1: return cmp_write_fixext1_marker(ctx, obj->as.ext.type); case CMP_TYPE_FIXEXT2: return cmp_write_fixext2_marker(ctx, obj->as.ext.type); case CMP_TYPE_FIXEXT4: return cmp_write_fixext4_marker(ctx, obj->as.ext.type); case CMP_TYPE_FIXEXT8: return cmp_write_fixext8_marker(ctx, obj->as.ext.type); case CMP_TYPE_FIXEXT16: return cmp_write_fixext16_marker(ctx, obj->as.ext.type); case CMP_TYPE_STR8: return cmp_write_str8_marker(ctx, (uint8_t)obj->as.str_size); case CMP_TYPE_STR16: return cmp_write_str16_marker(ctx, (uint16_t)obj->as.str_size); case CMP_TYPE_STR32: return cmp_write_str32_marker(ctx, obj->as.str_size); case CMP_TYPE_ARRAY16: return cmp_write_array16(ctx, (uint16_t)obj->as.array_size); case CMP_TYPE_ARRAY32: return cmp_write_array32(ctx, obj->as.array_size); case CMP_TYPE_MAP16: return cmp_write_map16(ctx, (uint16_t)obj->as.map_size); case CMP_TYPE_MAP32: return cmp_write_map32(ctx, obj->as.map_size); case CMP_TYPE_NEGATIVE_FIXNUM: return cmp_write_nfix(ctx, obj->as.s8); default: ctx->error = CMP_ERROR_INVALID_TYPE; return false; } } bool cmp_write_object_v4(cmp_ctx_t *ctx, const cmp_object_t *obj) { switch(obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: return cmp_write_pfix(ctx, obj->as.u8); case CMP_TYPE_FIXMAP: return cmp_write_fixmap(ctx, (uint8_t)obj->as.map_size); case CMP_TYPE_FIXARRAY: return cmp_write_fixarray(ctx, (uint8_t)obj->as.array_size); case CMP_TYPE_FIXSTR: return cmp_write_fixstr_marker(ctx, (uint8_t)obj->as.str_size); case CMP_TYPE_NIL: return cmp_write_nil(ctx); case CMP_TYPE_BOOLEAN: if (obj->as.boolean) return cmp_write_true(ctx); return cmp_write_false(ctx); case CMP_TYPE_EXT8: return cmp_write_ext8_marker(ctx, obj->as.ext.type, (uint8_t)obj->as.ext.size); case CMP_TYPE_EXT16: return cmp_write_ext16_marker( ctx, obj->as.ext.type, (uint16_t)obj->as.ext.size ); case CMP_TYPE_EXT32: return cmp_write_ext32_marker(ctx, obj->as.ext.type, obj->as.ext.size); case CMP_TYPE_FLOAT: #ifndef CMP_NO_FLOAT return cmp_write_float(ctx, obj->as.flt); #else /* CMP_NO_FLOAT */ ctx->error = CMP_ERROR_DISABLED_FLOATING_POINT; return false; #endif case CMP_TYPE_DOUBLE: #ifndef CMP_NO_FLOAT return cmp_write_double(ctx, obj->as.dbl); #else ctx->error = CMP_ERROR_DISABLED_FLOATING_POINT; return false; #endif case CMP_TYPE_UINT8: return cmp_write_u8(ctx, obj->as.u8); case CMP_TYPE_UINT16: return cmp_write_u16(ctx, obj->as.u16); case CMP_TYPE_UINT32: return cmp_write_u32(ctx, obj->as.u32); case CMP_TYPE_UINT64: return cmp_write_u64(ctx, obj->as.u64); case CMP_TYPE_SINT8: return cmp_write_s8(ctx, obj->as.s8); case CMP_TYPE_SINT16: return cmp_write_s16(ctx, obj->as.s16); case CMP_TYPE_SINT32: return cmp_write_s32(ctx, obj->as.s32); case CMP_TYPE_SINT64: return cmp_write_s64(ctx, obj->as.s64); case CMP_TYPE_FIXEXT1: return cmp_write_fixext1_marker(ctx, obj->as.ext.type); case CMP_TYPE_FIXEXT2: return cmp_write_fixext2_marker(ctx, obj->as.ext.type); case CMP_TYPE_FIXEXT4: return cmp_write_fixext4_marker(ctx, obj->as.ext.type); case CMP_TYPE_FIXEXT8: return cmp_write_fixext8_marker(ctx, obj->as.ext.type); case CMP_TYPE_FIXEXT16: return cmp_write_fixext16_marker(ctx, obj->as.ext.type); case CMP_TYPE_STR16: return cmp_write_str16_marker(ctx, (uint16_t)obj->as.str_size); case CMP_TYPE_STR32: return cmp_write_str32_marker(ctx, obj->as.str_size); case CMP_TYPE_ARRAY16: return cmp_write_array16(ctx, (uint16_t)obj->as.array_size); case CMP_TYPE_ARRAY32: return cmp_write_array32(ctx, obj->as.array_size); case CMP_TYPE_MAP16: return cmp_write_map16(ctx, (uint16_t)obj->as.map_size); case CMP_TYPE_MAP32: return cmp_write_map32(ctx, obj->as.map_size); case CMP_TYPE_NEGATIVE_FIXNUM: return cmp_write_nfix(ctx, obj->as.s8); default: ctx->error = CMP_ERROR_INVALID_TYPE; return false; } } bool cmp_read_pfix(cmp_ctx_t *ctx, uint8_t *c) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_POSITIVE_FIXNUM) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *c = obj.as.u8; return true; } bool cmp_read_nfix(cmp_ctx_t *ctx, int8_t *c) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_NEGATIVE_FIXNUM) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *c = obj.as.s8; return true; } bool cmp_read_sfix(cmp_ctx_t *ctx, int8_t *c) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_NEGATIVE_FIXNUM: *c = obj.as.s8; return true; default: ctx->error = CMP_ERROR_INVALID_TYPE; return false; } } bool cmp_read_s8(cmp_ctx_t *ctx, int8_t *c) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_SINT8) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *c = obj.as.s8; return true; } bool cmp_read_s16(cmp_ctx_t *ctx, int16_t *s) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_SINT16) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *s = obj.as.s16; return true; } bool cmp_read_s32(cmp_ctx_t *ctx, int32_t *i) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_SINT32) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *i = obj.as.s32; return true; } bool cmp_read_s64(cmp_ctx_t *ctx, int64_t *l) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_SINT64) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *l = obj.as.s64; return true; } bool cmp_read_char(cmp_ctx_t *ctx, int8_t *c) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: *c = obj.as.s8; return true; case CMP_TYPE_UINT8: if (obj.as.u8 <= 0x7F) { *c = (int8_t)obj.as.u8; return true; } break; default: break; } ctx->error = CMP_ERROR_INVALID_TYPE; return false; } bool cmp_read_short(cmp_ctx_t *ctx, int16_t *s) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: *s = obj.as.s8; return true; case CMP_TYPE_UINT8: *s = obj.as.u8; return true; case CMP_TYPE_SINT16: *s = obj.as.s16; return true; case CMP_TYPE_UINT16: if (obj.as.u16 <= 0x7FFF) { *s = (int16_t)obj.as.u16; return true; } break; default: break; } ctx->error = CMP_ERROR_INVALID_TYPE; return false; } bool cmp_read_int(cmp_ctx_t *ctx, int32_t *i) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: *i = obj.as.s8; return true; case CMP_TYPE_UINT8: *i = obj.as.u8; return true; case CMP_TYPE_SINT16: *i = obj.as.s16; return true; case CMP_TYPE_UINT16: *i = obj.as.u16; return true; case CMP_TYPE_SINT32: *i = obj.as.s32; return true; case CMP_TYPE_UINT32: if (obj.as.u32 <= 0x7FFFFFFF) { *i = (int32_t)obj.as.u32; return true; } break; default: break; } ctx->error = CMP_ERROR_INVALID_TYPE; return false; } bool cmp_read_long(cmp_ctx_t *ctx, int64_t *d) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: *d = obj.as.s8; return true; case CMP_TYPE_UINT8: *d = obj.as.u8; return true; case CMP_TYPE_SINT16: *d = obj.as.s16; return true; case CMP_TYPE_UINT16: *d = obj.as.u16; return true; case CMP_TYPE_SINT32: *d = obj.as.s32; return true; case CMP_TYPE_UINT32: *d = obj.as.u32; return true; case CMP_TYPE_SINT64: *d = obj.as.s64; return true; case CMP_TYPE_UINT64: if (obj.as.u64 <= UINT64_C(0x7FFFFFFFFFFFFFFF)) { *d = (int64_t)obj.as.u64; return true; } break; default: break; } ctx->error = CMP_ERROR_INVALID_TYPE; return false; } bool cmp_read_integer(cmp_ctx_t *ctx, int64_t *d) { return cmp_read_long(ctx, d); } bool cmp_read_ufix(cmp_ctx_t *ctx, uint8_t *c) { return cmp_read_pfix(ctx, c); } bool cmp_read_u8(cmp_ctx_t *ctx, uint8_t *c) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_UINT8) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *c = obj.as.u8; return true; } bool cmp_read_u16(cmp_ctx_t *ctx, uint16_t *s) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_UINT16) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *s = obj.as.u16; return true; } bool cmp_read_u32(cmp_ctx_t *ctx, uint32_t *i) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_UINT32) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *i = obj.as.u32; return true; } bool cmp_read_u64(cmp_ctx_t *ctx, uint64_t *l) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_UINT64) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *l = obj.as.u64; return true; } bool cmp_read_uchar(cmp_ctx_t *ctx, uint8_t *c) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: *c = obj.as.u8; return true; case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: if (obj.as.s8 >= 0) { *c = (uint8_t)obj.as.s8; return true; } break; default: break; } ctx->error = CMP_ERROR_INVALID_TYPE; return false; } bool cmp_read_ushort(cmp_ctx_t *ctx, uint16_t *s) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: *s = obj.as.u8; return true; case CMP_TYPE_UINT16: *s = obj.as.u16; return true; case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: if (obj.as.s8 >= 0) { *s = (uint8_t)obj.as.s8; return true; } break; case CMP_TYPE_SINT16: if (obj.as.s16 >= 0) { *s = (uint16_t)obj.as.s16; return true; } break; default: break; } ctx->error = CMP_ERROR_INVALID_TYPE; return false; } bool cmp_read_uint(cmp_ctx_t *ctx, uint32_t *i) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: *i = obj.as.u8; return true; case CMP_TYPE_UINT16: *i = obj.as.u16; return true; case CMP_TYPE_UINT32: *i = obj.as.u32; return true; case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: if (obj.as.s8 >= 0) { *i = (uint8_t)obj.as.s8; return true; } break; case CMP_TYPE_SINT16: if (obj.as.s16 >= 0) { *i = (uint16_t)obj.as.s16; return true; } break; case CMP_TYPE_SINT32: if (obj.as.s32 >= 0) { *i = (uint32_t)obj.as.s32; return true; } break; default: break; } ctx->error = CMP_ERROR_INVALID_TYPE; return false; } bool cmp_read_ulong(cmp_ctx_t *ctx, uint64_t *u) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: *u = obj.as.u8; return true; case CMP_TYPE_UINT16: *u = obj.as.u16; return true; case CMP_TYPE_UINT32: *u = obj.as.u32; return true; case CMP_TYPE_UINT64: *u = obj.as.u64; return true; case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: if (obj.as.s8 >= 0) { *u = (uint8_t)obj.as.s8; return true; } break; case CMP_TYPE_SINT16: if (obj.as.s16 >= 0) { *u = (uint16_t)obj.as.s16; return true; } break; case CMP_TYPE_SINT32: if (obj.as.s32 >= 0) { *u = (uint32_t)obj.as.s32; return true; } break; case CMP_TYPE_SINT64: if (obj.as.s64 >= 0) { *u = (uint64_t)obj.as.s64; return true; } break; default: break; } ctx->error = CMP_ERROR_INVALID_TYPE; return false; } bool cmp_read_uinteger(cmp_ctx_t *ctx, uint64_t *u) { return cmp_read_ulong(ctx, u); } #ifndef CMP_NO_FLOAT bool cmp_read_float(cmp_ctx_t *ctx, float *f) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_FLOAT) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *f = obj.as.flt; return true; } bool cmp_read_double(cmp_ctx_t *ctx, double *d) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_DOUBLE) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *d = obj.as.dbl; return true; } bool cmp_read_decimal(cmp_ctx_t *ctx, double *d) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_FLOAT: *d = (double)obj.as.flt; return true; case CMP_TYPE_DOUBLE: *d = obj.as.dbl; return true; default: ctx->error = CMP_ERROR_INVALID_TYPE; return false; } } #endif /* CMP_NO_FLOAT */ bool cmp_read_nil(cmp_ctx_t *ctx) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type == CMP_TYPE_NIL) return true; ctx->error = CMP_ERROR_INVALID_TYPE; return false; } bool cmp_read_bool(cmp_ctx_t *ctx, bool *b) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_BOOLEAN) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } if (obj.as.boolean) *b = true; else *b = false; return true; } bool cmp_read_bool_as_u8(cmp_ctx_t *ctx, uint8_t *b) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_BOOLEAN) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } if (obj.as.boolean) *b = 1; else *b = 0; return true; } bool cmp_read_str_size(cmp_ctx_t *ctx, uint32_t *size) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_FIXSTR: case CMP_TYPE_STR8: case CMP_TYPE_STR16: case CMP_TYPE_STR32: *size = obj.as.str_size; return true; default: ctx->error = CMP_ERROR_INVALID_TYPE; return false; } } bool cmp_read_str(cmp_ctx_t *ctx, char *data, uint32_t *size) { uint32_t str_size = 0; if (!cmp_read_str_size(ctx, &str_size)) return false; if (str_size >= *size) { *size = str_size; ctx->error = CMP_ERROR_STR_DATA_LENGTH_TOO_LONG; return false; } if (!ctx->read(ctx, data, str_size)) { ctx->error = CMP_ERROR_DATA_READING; return false; } data[str_size] = 0; *size = str_size; return true; } bool cmp_read_bin_size(cmp_ctx_t *ctx, uint32_t *size) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_BIN8: case CMP_TYPE_BIN16: case CMP_TYPE_BIN32: *size = obj.as.bin_size; return true; default: ctx->error = CMP_ERROR_INVALID_TYPE; return false; } } bool cmp_read_bin(cmp_ctx_t *ctx, void *data, uint32_t *size) { uint32_t bin_size = 0; if (!cmp_read_bin_size(ctx, &bin_size)) return false; if (bin_size > *size) { ctx->error = CMP_ERROR_BIN_DATA_LENGTH_TOO_LONG; return false; } if (!ctx->read(ctx, data, bin_size)) { ctx->error = CMP_ERROR_DATA_READING; return false; } *size = bin_size; return true; } bool cmp_read_array(cmp_ctx_t *ctx, uint32_t *size) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: *size = obj.as.array_size; return true; default: ctx->error = CMP_ERROR_INVALID_TYPE; return false; } } bool cmp_read_map(cmp_ctx_t *ctx, uint32_t *size) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: *size = obj.as.map_size; return true; default: ctx->error = CMP_ERROR_INVALID_TYPE; return false; } } bool cmp_read_fixext1_marker(cmp_ctx_t *ctx, int8_t *type) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_FIXEXT1) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *type = obj.as.ext.type; return true; } bool cmp_read_fixext1(cmp_ctx_t *ctx, int8_t *type, void *data) { if (!cmp_read_fixext1_marker(ctx, type)) return false; if (ctx->read(ctx, data, 1)) return true; ctx->error = CMP_ERROR_DATA_READING; return false; } bool cmp_read_fixext2_marker(cmp_ctx_t *ctx, int8_t *type) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_FIXEXT2) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *type = obj.as.ext.type; return true; } bool cmp_read_fixext2(cmp_ctx_t *ctx, int8_t *type, void *data) { if (!cmp_read_fixext2_marker(ctx, type)) return false; if (ctx->read(ctx, data, 2)) return true; ctx->error = CMP_ERROR_DATA_READING; return false; } bool cmp_read_fixext4_marker(cmp_ctx_t *ctx, int8_t *type) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_FIXEXT4) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *type = obj.as.ext.type; return true; } bool cmp_read_fixext4(cmp_ctx_t *ctx, int8_t *type, void *data) { if (!cmp_read_fixext4_marker(ctx, type)) return false; if (ctx->read(ctx, data, 4)) return true; ctx->error = CMP_ERROR_DATA_READING; return false; } bool cmp_read_fixext8_marker(cmp_ctx_t *ctx, int8_t *type) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_FIXEXT8) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *type = obj.as.ext.type; return true; } bool cmp_read_fixext8(cmp_ctx_t *ctx, int8_t *type, void *data) { if (!cmp_read_fixext8_marker(ctx, type)) return false; if (ctx->read(ctx, data, 8)) return true; ctx->error = CMP_ERROR_DATA_READING; return false; } bool cmp_read_fixext16_marker(cmp_ctx_t *ctx, int8_t *type) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_FIXEXT16) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *type = obj.as.ext.type; return true; } bool cmp_read_fixext16(cmp_ctx_t *ctx, int8_t *type, void *data) { if (!cmp_read_fixext16_marker(ctx, type)) return false; if (ctx->read(ctx, data, 16)) return true; ctx->error = CMP_ERROR_DATA_READING; return false; } bool cmp_read_ext8_marker(cmp_ctx_t *ctx, int8_t *type, uint8_t *size) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_EXT8) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *type = obj.as.ext.type; *size = (uint8_t)obj.as.ext.size; return true; } bool cmp_read_ext8(cmp_ctx_t *ctx, int8_t *type, uint8_t *size, void *data) { if (!cmp_read_ext8_marker(ctx, type, size)) return false; if (ctx->read(ctx, data, *size)) return true; ctx->error = CMP_ERROR_DATA_READING; return false; } bool cmp_read_ext16_marker(cmp_ctx_t *ctx, int8_t *type, uint16_t *size) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_EXT16) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *type = obj.as.ext.type; *size = (uint16_t)obj.as.ext.size; return true; } bool cmp_read_ext16(cmp_ctx_t *ctx, int8_t *type, uint16_t *size, void *data) { if (!cmp_read_ext16_marker(ctx, type, size)) return false; if (ctx->read(ctx, data, *size)) return true; ctx->error = CMP_ERROR_DATA_READING; return false; } bool cmp_read_ext32_marker(cmp_ctx_t *ctx, int8_t *type, uint32_t *size) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; if (obj.type != CMP_TYPE_EXT32) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } *type = obj.as.ext.type; *size = obj.as.ext.size; return true; } bool cmp_read_ext32(cmp_ctx_t *ctx, int8_t *type, uint32_t *size, void *data) { if (!cmp_read_ext32_marker(ctx, type, size)) return false; if (ctx->read(ctx, data, *size)) return true; ctx->error = CMP_ERROR_DATA_READING; return false; } bool cmp_read_ext_marker(cmp_ctx_t *ctx, int8_t *type, uint32_t *size) { cmp_object_t obj; if (!cmp_read_object(ctx, &obj)) return false; switch (obj.type) { case CMP_TYPE_FIXEXT1: case CMP_TYPE_FIXEXT2: case CMP_TYPE_FIXEXT4: case CMP_TYPE_FIXEXT8: case CMP_TYPE_FIXEXT16: case CMP_TYPE_EXT8: case CMP_TYPE_EXT16: case CMP_TYPE_EXT32: *type = obj.as.ext.type; *size = obj.as.ext.size; return true; default: ctx->error = CMP_ERROR_INVALID_TYPE; return false; } } bool cmp_read_ext(cmp_ctx_t *ctx, int8_t *type, uint32_t *size, void *data) { if (!cmp_read_ext_marker(ctx, type, size)) return false; if (ctx->read(ctx, data, *size)) return true; ctx->error = CMP_ERROR_DATA_READING; return false; } bool cmp_read_object(cmp_ctx_t *ctx, cmp_object_t *obj) { uint8_t type_marker = 0; if (!read_type_marker(ctx, &type_marker)) return false; if (!type_marker_to_cmp_type(type_marker, &obj->type)) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } return read_obj_data(ctx, type_marker, obj); } bool cmp_skip_object(cmp_ctx_t *ctx, cmp_object_t *obj) { uint8_t type_marker = 0; uint8_t cmp_type; uint32_t size = 0; if (!read_type_marker(ctx, &type_marker)) { return false; } if (!type_marker_to_cmp_type(type_marker, &cmp_type)) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } switch (cmp_type) { case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: obj->type = cmp_type; if (!read_obj_data(ctx, type_marker, obj)) { return false; } ctx->error = CMP_ERROR_SKIP_DEPTH_LIMIT_EXCEEDED; return false; default: if (!read_type_size(ctx, type_marker, cmp_type, &size)) { return false; } if (size) { switch (cmp_type) { case CMP_TYPE_FIXEXT1: case CMP_TYPE_FIXEXT2: case CMP_TYPE_FIXEXT4: case CMP_TYPE_FIXEXT8: case CMP_TYPE_FIXEXT16: case CMP_TYPE_EXT8: case CMP_TYPE_EXT16: case CMP_TYPE_EXT32: ++size; break; default: break; } if (!skip_bytes(ctx, size)) { return false; } } } return true; } bool cmp_skip_object_flat(cmp_ctx_t *ctx, cmp_object_t *obj) { size_t element_count = 1; bool in_container = false; while (element_count) { uint8_t type_marker = 0; uint8_t cmp_type; uint32_t size = 0; if (!read_type_marker(ctx, &type_marker)) { return false; } if (!type_marker_to_cmp_type(type_marker, &cmp_type)) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } switch (cmp_type) { case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: if (in_container) { obj->type = cmp_type; if (!read_obj_data(ctx, type_marker, obj)) { return false; } ctx->error = CMP_ERROR_SKIP_DEPTH_LIMIT_EXCEEDED; return false; } in_container = true; break; default: if (!read_type_size(ctx, type_marker, cmp_type, &size)) { return false; } if (size) { switch (cmp_type) { case CMP_TYPE_FIXEXT1: case CMP_TYPE_FIXEXT2: case CMP_TYPE_FIXEXT4: case CMP_TYPE_FIXEXT8: case CMP_TYPE_FIXEXT16: case CMP_TYPE_EXT8: case CMP_TYPE_EXT16: case CMP_TYPE_EXT32: ++size; break; default: break; } if (!skip_bytes(ctx, size)) { return false; } } } element_count--; switch (cmp_type) { case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: if (!read_type_size(ctx, type_marker, cmp_type, &size)) { return false; } element_count += size; break; case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: if (!read_type_size(ctx, type_marker, cmp_type, &size)) { return false; } element_count += ((size_t)size) * 2; break; default: break; } } return true; } bool cmp_skip_object_no_limit(cmp_ctx_t *ctx) { size_t element_count = 1; while (element_count) { uint8_t type_marker = 0; uint8_t cmp_type = 0; uint32_t size = 0; if (!read_type_marker(ctx, &type_marker)) { return false; } if (!type_marker_to_cmp_type(type_marker, &cmp_type)) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } switch (cmp_type) { case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: break; default: if (!read_type_size(ctx, type_marker, cmp_type, &size)) { return false; } if (size) { switch (cmp_type) { case CMP_TYPE_FIXEXT1: case CMP_TYPE_FIXEXT2: case CMP_TYPE_FIXEXT4: case CMP_TYPE_FIXEXT8: case CMP_TYPE_FIXEXT16: case CMP_TYPE_EXT8: case CMP_TYPE_EXT16: case CMP_TYPE_EXT32: ++size; break; default: break; } if (!skip_bytes(ctx, size)) { return false; } } } element_count--; switch (cmp_type) { case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: if (!read_type_size(ctx, type_marker, cmp_type, &size)) { return false; } element_count += size; break; case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: if (!read_type_size(ctx, type_marker, cmp_type, &size)) { return false; } element_count += ((size_t)size) * 2; break; default: break; } } return true; } bool cmp_skip_object_limit(cmp_ctx_t *ctx, cmp_object_t *obj, uint32_t limit) { size_t element_count = 1; uint32_t depth = 0; while (element_count) { uint8_t type_marker = 0; uint8_t cmp_type; uint32_t size = 0; if (!read_type_marker(ctx, &type_marker)) { return false; } if (!type_marker_to_cmp_type(type_marker, &cmp_type)) { ctx->error = CMP_ERROR_INVALID_TYPE; return false; } switch (cmp_type) { case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: ++depth; if (depth > limit) { obj->type = cmp_type; if (!read_obj_data(ctx, type_marker, obj)) { return false; } ctx->error = CMP_ERROR_SKIP_DEPTH_LIMIT_EXCEEDED; return false; } break; default: if (!read_type_size(ctx, type_marker, cmp_type, &size)) { return false; } if (size) { switch (cmp_type) { case CMP_TYPE_FIXEXT1: case CMP_TYPE_FIXEXT2: case CMP_TYPE_FIXEXT4: case CMP_TYPE_FIXEXT8: case CMP_TYPE_FIXEXT16: case CMP_TYPE_EXT8: case CMP_TYPE_EXT16: case CMP_TYPE_EXT32: ++size; break; default: break; } if (!skip_bytes(ctx, size)) { return false; } } } element_count--; switch (cmp_type) { case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: if (!read_type_size(ctx, type_marker, cmp_type, &size)) { return false; } element_count += size; break; case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: if (!read_type_size(ctx, type_marker, cmp_type, &size)) { return false; } element_count += ((size_t)size) * 2; break; default: break; } } return true; } bool cmp_object_is_char(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: return true; default: return false; } } bool cmp_object_is_short(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: case CMP_TYPE_SINT16: return true; default: return false; } } bool cmp_object_is_int(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: case CMP_TYPE_SINT16: case CMP_TYPE_SINT32: return true; default: return false; } } bool cmp_object_is_long(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: case CMP_TYPE_SINT16: case CMP_TYPE_SINT32: case CMP_TYPE_SINT64: return true; default: return false; } } bool cmp_object_is_sinteger(const cmp_object_t *obj) { return cmp_object_is_long(obj); } bool cmp_object_is_uchar(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: return true; default: return false; } } bool cmp_object_is_ushort(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: return true; case CMP_TYPE_UINT16: return true; default: return false; } } bool cmp_object_is_uint(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: case CMP_TYPE_UINT16: case CMP_TYPE_UINT32: return true; default: return false; } } bool cmp_object_is_ulong(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: case CMP_TYPE_UINT16: case CMP_TYPE_UINT32: case CMP_TYPE_UINT64: return true; default: return false; } } bool cmp_object_is_uinteger(const cmp_object_t *obj) { return cmp_object_is_ulong(obj); } bool cmp_object_is_float(const cmp_object_t *obj) { if (obj->type == CMP_TYPE_FLOAT) return true; return false; } bool cmp_object_is_double(const cmp_object_t *obj) { if (obj->type == CMP_TYPE_DOUBLE) return true; return false; } bool cmp_object_is_nil(const cmp_object_t *obj) { if (obj->type == CMP_TYPE_NIL) return true; return false; } bool cmp_object_is_bool(const cmp_object_t *obj) { if (obj->type == CMP_TYPE_BOOLEAN) return true; return false; } bool cmp_object_is_str(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_FIXSTR: case CMP_TYPE_STR8: case CMP_TYPE_STR16: case CMP_TYPE_STR32: return true; default: return false; } } bool cmp_object_is_bin(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_BIN8: case CMP_TYPE_BIN16: case CMP_TYPE_BIN32: return true; default: return false; } } bool cmp_object_is_array(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: return true; default: return false; } } bool cmp_object_is_map(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: return true; default: return false; } } bool cmp_object_is_ext(const cmp_object_t *obj) { switch (obj->type) { case CMP_TYPE_FIXEXT1: case CMP_TYPE_FIXEXT2: case CMP_TYPE_FIXEXT4: case CMP_TYPE_FIXEXT8: case CMP_TYPE_FIXEXT16: case CMP_TYPE_EXT8: case CMP_TYPE_EXT16: case CMP_TYPE_EXT32: return true; default: return false; } } bool cmp_object_as_char(const cmp_object_t *obj, int8_t *c) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: *c = obj->as.s8; return true; case CMP_TYPE_UINT8: if (obj->as.u8 <= 0x7F) { *c = obj->as.s8; return true; } else { return false; } default: return false; } } bool cmp_object_as_short(const cmp_object_t *obj, int16_t *s) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: *s = obj->as.s8; return true; case CMP_TYPE_UINT8: *s = obj->as.u8; return true; case CMP_TYPE_SINT16: *s = obj->as.s16; return true; case CMP_TYPE_UINT16: if (obj->as.u16 <= 0x7FFF) { *s = (int16_t)obj->as.u16; return true; } else { return false; } default: return false; } } bool cmp_object_as_int(const cmp_object_t *obj, int32_t *i) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: *i = obj->as.s8; return true; case CMP_TYPE_UINT8: *i = obj->as.u8; return true; case CMP_TYPE_SINT16: *i = obj->as.s16; return true; case CMP_TYPE_UINT16: *i = obj->as.u16; return true; case CMP_TYPE_SINT32: *i = obj->as.s32; return true; case CMP_TYPE_UINT32: if (obj->as.u32 <= 0x7FFFFFFF) { *i = (int32_t)obj->as.u32; return true; } else { return false; } default: return false; } } bool cmp_object_as_long(const cmp_object_t *obj, int64_t *d) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: *d = obj->as.s8; return true; case CMP_TYPE_UINT8: *d = obj->as.u8; return true; case CMP_TYPE_SINT16: *d = obj->as.s16; return true; case CMP_TYPE_UINT16: *d = obj->as.u16; return true; case CMP_TYPE_SINT32: *d = obj->as.s32; return true; case CMP_TYPE_UINT32: *d = obj->as.u32; return true; case CMP_TYPE_SINT64: *d = obj->as.s64; return true; case CMP_TYPE_UINT64: if (obj->as.u64 <= UINT64_C(0x7FFFFFFFFFFFFFFF)) { *d = (int64_t)obj->as.u64; return true; } else { return false; } default: return false; } } bool cmp_object_as_sinteger(const cmp_object_t *obj, int64_t *d) { return cmp_object_as_long(obj, d); } bool cmp_object_as_uchar(const cmp_object_t *obj, uint8_t *c) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: *c = obj->as.u8; return true; default: return false; } } bool cmp_object_as_ushort(const cmp_object_t *obj, uint16_t *s) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: *s = obj->as.u8; return true; case CMP_TYPE_UINT16: *s = obj->as.u16; return true; default: return false; } } bool cmp_object_as_uint(const cmp_object_t *obj, uint32_t *i) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: *i = obj->as.u8; return true; case CMP_TYPE_UINT16: *i = obj->as.u16; return true; case CMP_TYPE_UINT32: *i = obj->as.u32; return true; default: return false; } } bool cmp_object_as_ulong(const cmp_object_t *obj, uint64_t *u) { switch (obj->type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: *u = obj->as.u8; return true; case CMP_TYPE_UINT16: *u = obj->as.u16; return true; case CMP_TYPE_UINT32: *u = obj->as.u32; return true; case CMP_TYPE_UINT64: *u = obj->as.u64; return true; default: return false; } } bool cmp_object_as_uinteger(const cmp_object_t *obj, uint64_t *u) { return cmp_object_as_ulong(obj, u); } #ifndef CMP_NO_FLOAT bool cmp_object_as_float(const cmp_object_t *obj, float *f) { if (obj->type == CMP_TYPE_FLOAT) { *f = obj->as.flt; return true; } return false; } bool cmp_object_as_double(const cmp_object_t *obj, double *d) { if (obj->type == CMP_TYPE_DOUBLE) { *d = obj->as.dbl; return true; } return false; } #endif /* CMP_NO_FLOAT */ bool cmp_object_as_bool(const cmp_object_t *obj, bool *b) { if (obj->type == CMP_TYPE_BOOLEAN) { if (obj->as.boolean) *b = true; else *b = false; return true; } return false; } bool cmp_object_as_str(const cmp_object_t *obj, uint32_t *size) { switch (obj->type) { case CMP_TYPE_FIXSTR: case CMP_TYPE_STR8: case CMP_TYPE_STR16: case CMP_TYPE_STR32: *size = obj->as.str_size; return true; default: return false; } } bool cmp_object_as_bin(const cmp_object_t *obj, uint32_t *size) { switch (obj->type) { case CMP_TYPE_BIN8: case CMP_TYPE_BIN16: case CMP_TYPE_BIN32: *size = obj->as.bin_size; return true; default: return false; } } bool cmp_object_as_array(const cmp_object_t *obj, uint32_t *size) { switch (obj->type) { case CMP_TYPE_FIXARRAY: case CMP_TYPE_ARRAY16: case CMP_TYPE_ARRAY32: *size = obj->as.array_size; return true; default: return false; } } bool cmp_object_as_map(const cmp_object_t *obj, uint32_t *size) { switch (obj->type) { case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: *size = obj->as.map_size; return true; default: return false; } } bool cmp_object_as_ext(const cmp_object_t *obj, int8_t *type, uint32_t *size) { switch (obj->type) { case CMP_TYPE_FIXEXT1: case CMP_TYPE_FIXEXT2: case CMP_TYPE_FIXEXT4: case CMP_TYPE_FIXEXT8: case CMP_TYPE_FIXEXT16: case CMP_TYPE_EXT8: case CMP_TYPE_EXT16: case CMP_TYPE_EXT32: *type = obj->as.ext.type; *size = obj->as.ext.size; return true; default: return false; } } bool cmp_object_to_str(cmp_ctx_t *ctx, const cmp_object_t *obj, char *data, uint32_t buf_size) { uint32_t str_size = 0; switch (obj->type) { case CMP_TYPE_FIXSTR: case CMP_TYPE_STR8: case CMP_TYPE_STR16: case CMP_TYPE_STR32: str_size = obj->as.str_size; if (str_size >= buf_size) { ctx->error = CMP_ERROR_STR_DATA_LENGTH_TOO_LONG; return false; } if (!ctx->read(ctx, data, str_size)) { ctx->error = CMP_ERROR_DATA_READING; return false; } data[str_size] = 0; return true; default: return false; } } bool cmp_object_to_bin(cmp_ctx_t *ctx, const cmp_object_t *obj, void *data, uint32_t buf_size) { uint32_t bin_size = 0; switch (obj->type) { case CMP_TYPE_BIN8: case CMP_TYPE_BIN16: case CMP_TYPE_BIN32: bin_size = obj->as.bin_size; if (bin_size > buf_size) { ctx->error = CMP_ERROR_BIN_DATA_LENGTH_TOO_LONG; return false; } if (!ctx->read(ctx, data, bin_size)) { ctx->error = CMP_ERROR_DATA_READING; return false; } return true; default: return false; } } /* vi: set et ts=2 sw=2: */ python-ihm-2.7/src/cmp.h000066400000000000000000000525221503573337200152130ustar00rootroot00000000000000/* The MIT License (MIT) Copyright (c) 2020 Charles Gunyon Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #ifndef CMP_H_INCLUDED #define CMP_H_INCLUDED #if defined(_MSC_VER) && _MSC_VER <= 1800 typedef int bool; #define true 1 #define false 0 typedef unsigned char uint8_t; typedef signed char int8_t; typedef unsigned short uint16_t; typedef short int16_t; typedef unsigned int uint32_t; typedef int int32_t; typedef unsigned long long uint64_t; typedef long long int64_t; #define INT64_C(c) c ## LL #define UINT64_C(c) c ## ULL #else #include #include #endif #include struct cmp_ctx_s; typedef bool (*cmp_reader)(struct cmp_ctx_s *ctx, void *data, size_t limit); typedef bool (*cmp_skipper)(struct cmp_ctx_s *ctx, size_t count); typedef size_t (*cmp_writer)(struct cmp_ctx_s *ctx, const void *data, size_t count); enum { CMP_TYPE_POSITIVE_FIXNUM, /* 0 */ CMP_TYPE_FIXMAP, /* 1 */ CMP_TYPE_FIXARRAY, /* 2 */ CMP_TYPE_FIXSTR, /* 3 */ CMP_TYPE_NIL, /* 4 */ CMP_TYPE_BOOLEAN, /* 5 */ CMP_TYPE_BIN8, /* 6 */ CMP_TYPE_BIN16, /* 7 */ CMP_TYPE_BIN32, /* 8 */ CMP_TYPE_EXT8, /* 9 */ CMP_TYPE_EXT16, /* 10 */ CMP_TYPE_EXT32, /* 11 */ CMP_TYPE_FLOAT, /* 12 */ CMP_TYPE_DOUBLE, /* 13 */ CMP_TYPE_UINT8, /* 14 */ CMP_TYPE_UINT16, /* 15 */ CMP_TYPE_UINT32, /* 16 */ CMP_TYPE_UINT64, /* 17 */ CMP_TYPE_SINT8, /* 18 */ CMP_TYPE_SINT16, /* 19 */ CMP_TYPE_SINT32, /* 20 */ CMP_TYPE_SINT64, /* 21 */ CMP_TYPE_FIXEXT1, /* 22 */ CMP_TYPE_FIXEXT2, /* 23 */ CMP_TYPE_FIXEXT4, /* 24 */ CMP_TYPE_FIXEXT8, /* 25 */ CMP_TYPE_FIXEXT16, /* 26 */ CMP_TYPE_STR8, /* 27 */ CMP_TYPE_STR16, /* 28 */ CMP_TYPE_STR32, /* 29 */ CMP_TYPE_ARRAY16, /* 30 */ CMP_TYPE_ARRAY32, /* 31 */ CMP_TYPE_MAP16, /* 32 */ CMP_TYPE_MAP32, /* 33 */ CMP_TYPE_NEGATIVE_FIXNUM /* 34 */ }; typedef struct cmp_ext_s { int8_t type; uint32_t size; } cmp_ext_t; union cmp_object_data_u { bool boolean; uint8_t u8; uint16_t u16; uint32_t u32; uint64_t u64; int8_t s8; int16_t s16; int32_t s32; int64_t s64; #ifndef CMP_NO_FLOAT float flt; double dbl; #endif /* CMP_NO_FLOAT */ uint32_t array_size; uint32_t map_size; uint32_t str_size; uint32_t bin_size; cmp_ext_t ext; }; typedef struct cmp_ctx_s { uint8_t error; void *buf; cmp_reader read; cmp_skipper skip; cmp_writer write; } cmp_ctx_t; typedef struct cmp_object_s { uint8_t type; union cmp_object_data_u as; } cmp_object_t; #ifdef __cplusplus extern "C" { #endif /* * ============================================================================ * === Main API * ============================================================================ */ /* * Initializes a CMP context * * If you don't intend to read, `read` may be NULL, but calling `*read*` * functions will crash; there is no check. * * `skip` may be NULL, in which case skipping functions will use `read`. * * If you don't intend to write, `write` may be NULL, but calling `*write*` * functions will crash; there is no check. */ void cmp_init(cmp_ctx_t *ctx, void *buf, cmp_reader read, cmp_skipper skip, cmp_writer write); /* Returns CMP's version */ uint32_t cmp_version(void); /* Returns the MessagePack version employed by CMP */ uint32_t cmp_mp_version(void); /* Returns a string description of a CMP context's error */ const char* cmp_strerror(const cmp_ctx_t *ctx); /* Writes a signed integer to the backend */ bool cmp_write_integer(cmp_ctx_t *ctx, int64_t d); /* Writes an unsigned integer to the backend */ bool cmp_write_uinteger(cmp_ctx_t *ctx, uint64_t u); /* * Writes a floating-point value (either single or double-precision) to the * backend */ #ifndef CMP_NO_FLOAT bool cmp_write_decimal(cmp_ctx_t *ctx, double d); #endif /* CMP_NO_FLOAT */ /* Writes NULL to the backend */ bool cmp_write_nil(cmp_ctx_t *ctx); /* Writes true to the backend */ bool cmp_write_true(cmp_ctx_t *ctx); /* Writes false to the backend */ bool cmp_write_false(cmp_ctx_t *ctx); /* Writes a boolean value to the backend */ bool cmp_write_bool(cmp_ctx_t *ctx, bool b); /* * Writes an unsigned char's value to the backend as a boolean. This is useful * if you are using a different boolean type in your application. */ bool cmp_write_u8_as_bool(cmp_ctx_t *ctx, uint8_t b); /* * Writes a string to the backend; according to the MessagePack spec, this must * be encoded using UTF-8, but CMP leaves that job up to the programmer. */ bool cmp_write_str(cmp_ctx_t *ctx, const char *data, uint32_t size); /* * Writes a string to the backend. This avoids using the STR8 marker, which * is unsupported by MessagePack v4, the version implemented by many other * MessagePack libraries. No encoding is assumed in this case, not that it * matters. */ bool cmp_write_str_v4(cmp_ctx_t *ctx, const char *data, uint32_t size); /* * Writes the string marker to the backend. This is useful if you are writing * data in chunks instead of a single shot. */ bool cmp_write_str_marker(cmp_ctx_t *ctx, uint32_t size); /* * Writes the string marker to the backend. This is useful if you are writing * data in chunks instead of a single shot. This avoids using the STR8 * marker, which is unsupported by MessagePack v4, the version implemented by * many other MessagePack libraries. No encoding is assumed in this case, not * that it matters. */ bool cmp_write_str_marker_v4(cmp_ctx_t *ctx, uint32_t size); /* Writes binary data to the backend */ bool cmp_write_bin(cmp_ctx_t *ctx, const void *data, uint32_t size); /* * Writes the binary data marker to the backend. This is useful if you are * writing data in chunks instead of a single shot. */ bool cmp_write_bin_marker(cmp_ctx_t *ctx, uint32_t size); /* Writes an array to the backend. */ bool cmp_write_array(cmp_ctx_t *ctx, uint32_t size); /* Writes a map to the backend. */ bool cmp_write_map(cmp_ctx_t *ctx, uint32_t size); /* Writes an extended type to the backend */ bool cmp_write_ext(cmp_ctx_t *ctx, int8_t type, uint32_t size, const void *data); /* * Writes the extended type marker to the backend. This is useful if you want * to write the type's data in chunks instead of a single shot. */ bool cmp_write_ext_marker(cmp_ctx_t *ctx, int8_t type, uint32_t size); /* Writes an object to the backend */ bool cmp_write_object(cmp_ctx_t *ctx, const cmp_object_t *obj); /* * Writes an object to the backend. This avoids using the STR8 marker, which * is unsupported by MessagePack v4, the version implemented by many other * MessagePack libraries. */ bool cmp_write_object_v4(cmp_ctx_t *ctx, const cmp_object_t *obj); /* Reads a signed integer that fits inside a signed char */ bool cmp_read_char(cmp_ctx_t *ctx, int8_t *c); /* Reads a signed integer that fits inside a signed short */ bool cmp_read_short(cmp_ctx_t *ctx, int16_t *s); /* Reads a signed integer that fits inside a signed int */ bool cmp_read_int(cmp_ctx_t *ctx, int32_t *i); /* Reads a signed integer that fits inside a signed long */ bool cmp_read_long(cmp_ctx_t *ctx, int64_t *d); /* Reads a signed integer */ bool cmp_read_integer(cmp_ctx_t *ctx, int64_t *d); /* Reads an unsigned integer that fits inside an unsigned char */ bool cmp_read_uchar(cmp_ctx_t *ctx, uint8_t *c); /* Reads an unsigned integer that fits inside an unsigned short */ bool cmp_read_ushort(cmp_ctx_t *ctx, uint16_t *s); /* Reads an unsigned integer that fits inside an unsigned int */ bool cmp_read_uint(cmp_ctx_t *ctx, uint32_t *i); /* Reads an unsigned integer that fits inside an unsigned long */ bool cmp_read_ulong(cmp_ctx_t *ctx, uint64_t *u); /* Reads an unsigned integer */ bool cmp_read_uinteger(cmp_ctx_t *ctx, uint64_t *u); /* * Reads a floating point value (either single or double-precision) from the * backend */ #ifndef CMP_NO_FLOAT bool cmp_read_decimal(cmp_ctx_t *ctx, double *d); #endif /* CMP_NO_FLOAT */ /* "Reads" (more like "skips") a NULL value from the backend */ bool cmp_read_nil(cmp_ctx_t *ctx); /* Reads a boolean from the backend */ bool cmp_read_bool(cmp_ctx_t *ctx, bool *b); /* * Reads a boolean as an unsigned char from the backend; this is useful if your * application uses a different boolean type. */ bool cmp_read_bool_as_u8(cmp_ctx_t *ctx, uint8_t *b); /* Reads a string's size from the backend */ bool cmp_read_str_size(cmp_ctx_t *ctx, uint32_t *size); /* * Reads a string from the backend; according to the spec, the string's data * ought to be encoded using UTF-8, but CMP leaves that job up to the programmer. */ bool cmp_read_str(cmp_ctx_t *ctx, char *data, uint32_t *size); /* Reads the size of packed binary data from the backend */ bool cmp_read_bin_size(cmp_ctx_t *ctx, uint32_t *size); /* Reads packed binary data from the backend */ bool cmp_read_bin(cmp_ctx_t *ctx, void *data, uint32_t *size); /* Reads an array from the backend */ bool cmp_read_array(cmp_ctx_t *ctx, uint32_t *size); /* Reads a map from the backend */ bool cmp_read_map(cmp_ctx_t *ctx, uint32_t *size); /* Reads the extended type's marker from the backend */ bool cmp_read_ext_marker(cmp_ctx_t *ctx, int8_t *type, uint32_t *size); /* Reads an extended type from the backend */ bool cmp_read_ext(cmp_ctx_t *ctx, int8_t *type, uint32_t *size, void *data); /* Reads an object from the backend */ bool cmp_read_object(cmp_ctx_t *ctx, cmp_object_t *obj); /* * Skips the next object from the backend. If that object is an array or map, * this function will: * - If `obj` is not `NULL`, fill in `obj` with that object * - Set `ctx->error` to `SKIP_DEPTH_LIMIT_EXCEEDED_ERROR` * - Return `false` * Otherwise: * - (Don't touch `obj`) * - Return `true` */ bool cmp_skip_object(cmp_ctx_t *ctx, cmp_object_t *obj); /* * This is similar to `cmp_skip_object`, except it tolerates flat arrays and * maps. If when skipping such an array or map this function encounters * another array/map, it will: * - If `obj` is not `NULL`, fill in `obj` with that (nested) object * - Set `ctx->error` to `SKIP_DEPTH_LIMIT_EXCEEDED_ERROR` * - Return `false` * Otherwise: * - (Don't touch `obj`) * - Return `true` * * WARNING: This can cause your application to spend an unbounded amount of * time reading nested data structures. Unless you completely trust * the data source, you should use `cmp_skip_object`. */ bool cmp_skip_object_flat(cmp_ctx_t *ctx, cmp_object_t *obj); /* * This is similar to `cmp_skip_object`, except it will continually skip * nested data structures. * * WARNING: This can cause your application to spend an unbounded amount of * time reading nested data structures. Unless you completely trust * the data source, you should use `cmp_skip_object`. */ bool cmp_skip_object_no_limit(cmp_ctx_t *ctx); /* * WARNING: THIS FUNCTION IS DEPRECATED AND WILL BE REMOVED IN A FUTURE RELEASE * * There is no way to track depths across elements without allocation. For * example, an array constructed as: `[ [] [] [] [] [] [] [] [] [] [] ]` * should be able to be skipped with `cmp_skip_object_limit(&cmp, &obj, 2)`. * However, because we cannot track depth across the elements, there's no way * to reset it after descending down into each element. * * This is similar to `cmp_skip_object`, except it tolerates up to `limit` * levels of nesting. For example, in order to skip an array that contains a * map, call `cmp_skip_object_limit(ctx, &obj, 2)`. Or in other words, * `cmp_skip_object(ctx, &obj)` acts similarly to `cmp_skip_object_limit(ctx, * &obj, 0)` * * Specifically, `limit` refers to depth, not breadth. So in order to skip an * array that contains two arrays that each contain 3 strings, you would call * `cmp_skip_object_limit(ctx, &obj, 2). In order to skip an array that * contains 4 arrays that each contain 1 string, you would still call * `cmp_skip_object_limit(ctx, &obj, 2). */ bool cmp_skip_object_limit(cmp_ctx_t *ctx, cmp_object_t *obj, uint32_t limit) #ifdef __GNUC__ __attribute__((deprecated)) #endif ; #ifdef _MSC_VER #pragma deprecated(cmp_skip_object_limit) #endif /* * ============================================================================ * === Specific API * ============================================================================ */ bool cmp_write_pfix(cmp_ctx_t *ctx, uint8_t c); bool cmp_write_nfix(cmp_ctx_t *ctx, int8_t c); bool cmp_write_sfix(cmp_ctx_t *ctx, int8_t c); bool cmp_write_s8(cmp_ctx_t *ctx, int8_t c); bool cmp_write_s16(cmp_ctx_t *ctx, int16_t s); bool cmp_write_s32(cmp_ctx_t *ctx, int32_t i); bool cmp_write_s64(cmp_ctx_t *ctx, int64_t l); bool cmp_write_ufix(cmp_ctx_t *ctx, uint8_t c); bool cmp_write_u8(cmp_ctx_t *ctx, uint8_t c); bool cmp_write_u16(cmp_ctx_t *ctx, uint16_t s); bool cmp_write_u32(cmp_ctx_t *ctx, uint32_t i); bool cmp_write_u64(cmp_ctx_t *ctx, uint64_t l); #ifndef CMP_NO_FLOAT bool cmp_write_float(cmp_ctx_t *ctx, float f); bool cmp_write_double(cmp_ctx_t *ctx, double d); #endif /* CMP_NO_FLOAT */ bool cmp_write_fixstr_marker(cmp_ctx_t *ctx, uint8_t size); bool cmp_write_fixstr(cmp_ctx_t *ctx, const char *data, uint8_t size); bool cmp_write_str8_marker(cmp_ctx_t *ctx, uint8_t size); bool cmp_write_str8(cmp_ctx_t *ctx, const char *data, uint8_t size); bool cmp_write_str16_marker(cmp_ctx_t *ctx, uint16_t size); bool cmp_write_str16(cmp_ctx_t *ctx, const char *data, uint16_t size); bool cmp_write_str32_marker(cmp_ctx_t *ctx, uint32_t size); bool cmp_write_str32(cmp_ctx_t *ctx, const char *data, uint32_t size); bool cmp_write_bin8_marker(cmp_ctx_t *ctx, uint8_t size); bool cmp_write_bin8(cmp_ctx_t *ctx, const void *data, uint8_t size); bool cmp_write_bin16_marker(cmp_ctx_t *ctx, uint16_t size); bool cmp_write_bin16(cmp_ctx_t *ctx, const void *data, uint16_t size); bool cmp_write_bin32_marker(cmp_ctx_t *ctx, uint32_t size); bool cmp_write_bin32(cmp_ctx_t *ctx, const void *data, uint32_t size); bool cmp_write_fixarray(cmp_ctx_t *ctx, uint8_t size); bool cmp_write_array16(cmp_ctx_t *ctx, uint16_t size); bool cmp_write_array32(cmp_ctx_t *ctx, uint32_t size); bool cmp_write_fixmap(cmp_ctx_t *ctx, uint8_t size); bool cmp_write_map16(cmp_ctx_t *ctx, uint16_t size); bool cmp_write_map32(cmp_ctx_t *ctx, uint32_t size); bool cmp_write_fixext1_marker(cmp_ctx_t *ctx, int8_t type); bool cmp_write_fixext1(cmp_ctx_t *ctx, int8_t type, const void *data); bool cmp_write_fixext2_marker(cmp_ctx_t *ctx, int8_t type); bool cmp_write_fixext2(cmp_ctx_t *ctx, int8_t type, const void *data); bool cmp_write_fixext4_marker(cmp_ctx_t *ctx, int8_t type); bool cmp_write_fixext4(cmp_ctx_t *ctx, int8_t type, const void *data); bool cmp_write_fixext8_marker(cmp_ctx_t *ctx, int8_t type); bool cmp_write_fixext8(cmp_ctx_t *ctx, int8_t type, const void *data); bool cmp_write_fixext16_marker(cmp_ctx_t *ctx, int8_t type); bool cmp_write_fixext16(cmp_ctx_t *ctx, int8_t type, const void *data); bool cmp_write_ext8_marker(cmp_ctx_t *ctx, int8_t type, uint8_t size); bool cmp_write_ext8(cmp_ctx_t *ctx, int8_t type, uint8_t size, const void *data); bool cmp_write_ext16_marker(cmp_ctx_t *ctx, int8_t type, uint16_t size); bool cmp_write_ext16(cmp_ctx_t *ctx, int8_t type, uint16_t size, const void *data); bool cmp_write_ext32_marker(cmp_ctx_t *ctx, int8_t type, uint32_t size); bool cmp_write_ext32(cmp_ctx_t *ctx, int8_t type, uint32_t size, const void *data); bool cmp_read_pfix(cmp_ctx_t *ctx, uint8_t *c); bool cmp_read_nfix(cmp_ctx_t *ctx, int8_t *c); bool cmp_read_sfix(cmp_ctx_t *ctx, int8_t *c); bool cmp_read_s8(cmp_ctx_t *ctx, int8_t *c); bool cmp_read_s16(cmp_ctx_t *ctx, int16_t *s); bool cmp_read_s32(cmp_ctx_t *ctx, int32_t *i); bool cmp_read_s64(cmp_ctx_t *ctx, int64_t *l); bool cmp_read_ufix(cmp_ctx_t *ctx, uint8_t *c); bool cmp_read_u8(cmp_ctx_t *ctx, uint8_t *c); bool cmp_read_u16(cmp_ctx_t *ctx, uint16_t *s); bool cmp_read_u32(cmp_ctx_t *ctx, uint32_t *i); bool cmp_read_u64(cmp_ctx_t *ctx, uint64_t *l); #ifndef CMP_NO_FLOAT bool cmp_read_float(cmp_ctx_t *ctx, float *f); bool cmp_read_double(cmp_ctx_t *ctx, double *d); #endif /* CMP_NO_FLOAT */ bool cmp_read_fixext1_marker(cmp_ctx_t *ctx, int8_t *type); bool cmp_read_fixext1(cmp_ctx_t *ctx, int8_t *type, void *data); bool cmp_read_fixext2_marker(cmp_ctx_t *ctx, int8_t *type); bool cmp_read_fixext2(cmp_ctx_t *ctx, int8_t *type, void *data); bool cmp_read_fixext4_marker(cmp_ctx_t *ctx, int8_t *type); bool cmp_read_fixext4(cmp_ctx_t *ctx, int8_t *type, void *data); bool cmp_read_fixext8_marker(cmp_ctx_t *ctx, int8_t *type); bool cmp_read_fixext8(cmp_ctx_t *ctx, int8_t *type, void *data); bool cmp_read_fixext16_marker(cmp_ctx_t *ctx, int8_t *type); bool cmp_read_fixext16(cmp_ctx_t *ctx, int8_t *type, void *data); bool cmp_read_ext8_marker(cmp_ctx_t *ctx, int8_t *type, uint8_t *size); bool cmp_read_ext8(cmp_ctx_t *ctx, int8_t *type, uint8_t *size, void *data); bool cmp_read_ext16_marker(cmp_ctx_t *ctx, int8_t *type, uint16_t *size); bool cmp_read_ext16(cmp_ctx_t *ctx, int8_t *type, uint16_t *size, void *data); bool cmp_read_ext32_marker(cmp_ctx_t *ctx, int8_t *type, uint32_t *size); bool cmp_read_ext32(cmp_ctx_t *ctx, int8_t *type, uint32_t *size, void *data); /* * ============================================================================ * === Object API * ============================================================================ */ bool cmp_object_is_char(const cmp_object_t *obj); bool cmp_object_is_short(const cmp_object_t *obj); bool cmp_object_is_int(const cmp_object_t *obj); bool cmp_object_is_long(const cmp_object_t *obj); bool cmp_object_is_sinteger(const cmp_object_t *obj); bool cmp_object_is_uchar(const cmp_object_t *obj); bool cmp_object_is_ushort(const cmp_object_t *obj); bool cmp_object_is_uint(const cmp_object_t *obj); bool cmp_object_is_ulong(const cmp_object_t *obj); bool cmp_object_is_uinteger(const cmp_object_t *obj); bool cmp_object_is_float(const cmp_object_t *obj); bool cmp_object_is_double(const cmp_object_t *obj); bool cmp_object_is_nil(const cmp_object_t *obj); bool cmp_object_is_bool(const cmp_object_t *obj); bool cmp_object_is_str(const cmp_object_t *obj); bool cmp_object_is_bin(const cmp_object_t *obj); bool cmp_object_is_array(const cmp_object_t *obj); bool cmp_object_is_map(const cmp_object_t *obj); bool cmp_object_is_ext(const cmp_object_t *obj); bool cmp_object_as_char(const cmp_object_t *obj, int8_t *c); bool cmp_object_as_short(const cmp_object_t *obj, int16_t *s); bool cmp_object_as_int(const cmp_object_t *obj, int32_t *i); bool cmp_object_as_long(const cmp_object_t *obj, int64_t *d); bool cmp_object_as_sinteger(const cmp_object_t *obj, int64_t *d); bool cmp_object_as_uchar(const cmp_object_t *obj, uint8_t *c); bool cmp_object_as_ushort(const cmp_object_t *obj, uint16_t *s); bool cmp_object_as_uint(const cmp_object_t *obj, uint32_t *i); bool cmp_object_as_ulong(const cmp_object_t *obj, uint64_t *u); bool cmp_object_as_uinteger(const cmp_object_t *obj, uint64_t *u); bool cmp_object_as_float(const cmp_object_t *obj, float *f); bool cmp_object_as_double(const cmp_object_t *obj, double *d); bool cmp_object_as_bool(const cmp_object_t *obj, bool *b); bool cmp_object_as_str(const cmp_object_t *obj, uint32_t *size); bool cmp_object_as_bin(const cmp_object_t *obj, uint32_t *size); bool cmp_object_as_array(const cmp_object_t *obj, uint32_t *size); bool cmp_object_as_map(const cmp_object_t *obj, uint32_t *size); bool cmp_object_as_ext(const cmp_object_t *obj, int8_t *type, uint32_t *size); bool cmp_object_to_str(cmp_ctx_t *ctx, const cmp_object_t *obj, char *data, uint32_t buf_size); bool cmp_object_to_bin(cmp_ctx_t *ctx, const cmp_object_t *obj, void *data, uint32_t buf_size); #ifdef __cplusplus } /* extern "C" */ #endif /* * ============================================================================ * === Backwards compatibility defines * ============================================================================ */ #define cmp_write_int cmp_write_integer #define cmp_write_sint cmp_write_integer #define cmp_write_sinteger cmp_write_integer #define cmp_write_uint cmp_write_uinteger #define cmp_read_sinteger cmp_read_integer #endif /* CMP_H_INCLUDED */ /* vi: set et ts=2 sw=2: */ python-ihm-2.7/src/ihm_format.c000066400000000000000000003105061503573337200165530ustar00rootroot00000000000000/** \file ihm_format.c Routines for handling mmCIF or BinaryCIF format files. * * The file is read sequentially. All values for desired keywords in * desired categories are collected (other parts of the file are ignored). * * For mmCIF, at the end of the file and each save frame a callback function * for each category is called to process the data. In the case of mmCIF * loops, this callback will be called multiple times, once for each entry * in the loop. * * For BinaryCIF, the category callback will be called as each category * is encountered in the file, once per row. */ #include "ihm_format.h" #include #include #include #include #if defined(_WIN32) || defined(_WIN64) # include # include #else # include #endif #include #include #include "cmp.h" #define INT_TO_POINTER(i) ((void *) (long) (i)) #define POINTER_TO_INT(p) ((int) (long) (p)) #if defined(_WIN32) || defined(_WIN64) # define strcasecmp _stricmp # define usleep Sleep #endif /* Allocate memory; unlike malloc() this never returns NULL (a failure will terminate the program) */ static void *ihm_malloc(size_t size) { void *ret = malloc(size); if (ret) { return ret; } else { fprintf(stderr, "Memory allocation failed\n"); exit(1); } } /* Allocate memory; unlike realloc() this never returns NULL (a failure will terminate the program) */ static void *ihm_realloc(void *ptr, size_t size) { void *ret = realloc(ptr, size); if (ret) { return ret; } else { fprintf(stderr, "Memory allocation failed\n"); exit(1); } } /* Free the memory used by an ihm_error */ void ihm_error_free(struct ihm_error *err) { free(err->msg); free(err); } /* Set the error indicator */ void ihm_error_set(struct ihm_error **err, IHMErrorCode code, const char *format, ...) { va_list ap; int len; char *msg = NULL; assert(err && !*err); /* First, determine length needed for complete string */ va_start(ap, format); len = vsnprintf(msg, 0, format, ap); va_end(ap); msg = (char *)ihm_realloc(msg, len + 1); va_start(ap, format); vsnprintf(msg, len + 1, format, ap); va_end(ap); *err = (struct ihm_error *)ihm_malloc(sizeof(struct ihm_error)); (*err)->code = code; (*err)->msg = msg; } /* Move error info from `from_err` to `to_err`, if `from_err` is set. `to_err` must not have already been set with an error. Return true iff info was moved. */ static bool ihm_error_move(struct ihm_error **to_err, struct ihm_error **from_err) { assert(to_err && !*to_err); assert(from_err); if (*from_err) { *to_err = *from_err; *from_err = NULL; return true; } else { return false; } } /* A variable-sized array of elements */ struct ihm_array { /* The array data itself */ void *data; /* The number of elements in the array */ size_t len; /* The size in bytes of each element */ size_t element_size; /* The currently-allocated number of elements in the array (>= len) */ size_t capacity; }; /* Make a new empty ihm_array */ static struct ihm_array *ihm_array_new(size_t element_size) { struct ihm_array *a = (struct ihm_array *)ihm_malloc( sizeof(struct ihm_array)); a->len = 0; a->element_size = element_size; a->capacity = 8; a->data = ihm_malloc(a->capacity * a->element_size); return a; } /* Release the memory used by an ihm_array */ static void ihm_array_free(struct ihm_array *a) { free(a->data); free(a); } /* Set the number of elements in the array to zero */ static void ihm_array_clear(struct ihm_array *a) { a->len = 0; } /* Return a reference to the ith element in the array, cast to the given type */ #define ihm_array_index(a, t, i) (((t*)(a)->data)[(i)]) /* Add a new element to the end of the array */ static void ihm_array_append(struct ihm_array *a, void *element) { a->len++; if (a->len > a->capacity) { a->capacity *= 2; a->data = ihm_realloc(a->data, a->capacity * a->element_size); } memcpy((char *)a->data + (a->len - 1) * a->element_size, element, a->element_size); } /* A variable-length string buffer */ struct ihm_string { /* The string buffer itself */ char *str; /* The length of the string (may be different from strlen(str) if str contains embedded nulls); str[len] is always a null byte */ size_t len; /* The allocated size of str; never less than len+1 (to allow for null terminator) */ size_t capacity; }; /* Make a new ihm_string of zero length */ static struct ihm_string *ihm_string_new(void) { struct ihm_string *s = (struct ihm_string *)ihm_malloc( sizeof(struct ihm_string)); s->len = 0; s->capacity = 64; s->str = (char *)ihm_malloc(s->capacity); /* Ensure string is null terminated */ s->str[0] = '\0'; return s; } /* Free the memory used by an ihm_string */ static void ihm_string_free(struct ihm_string *s) { free(s->str); free(s); } /* Erase len characters starting at pos from an ihm_string */ static void ihm_string_erase(struct ihm_string *s, size_t pos, size_t len) { memmove(s->str + pos, s->str + pos + len, s->len + 1 - pos - len); s->len -= len; } /* Set the size of the string to len. If shorter than the current length, the string is truncated. If longer, memory (with undefined contents) is added to the end of the string */ static void ihm_string_set_size(struct ihm_string *s, size_t len) { if (len >= s->capacity) { s->capacity *= 2; if (len >= s->capacity) { s->capacity = len + 1; } s->str = (char *)ihm_realloc(s->str, s->capacity); } s->len = len; s->str[s->len] = '\0'; } /* Set the ihm_string contents to be equal to (null-terminated) str */ static void ihm_string_assign(struct ihm_string *s, const char *str) { size_t len = strlen(str); ihm_string_set_size(s, len); memcpy(s->str, str, len); } /* Set the ihm_string contents to be equal to str of given size */ static void ihm_string_assign_n(struct ihm_string *s, const char *str, size_t strsz) { ihm_string_set_size(s, strsz); memcpy(s->str, str, strsz); } /* Append str to the end of the ihm_string */ static void ihm_string_append(struct ihm_string *s, const char *str) { size_t len = strlen(str); size_t oldlen = s->len; ihm_string_set_size(s, s->len + len); memcpy(s->str + oldlen, str, len); } struct ihm_key_value { char *key; void *value; }; /* Function to free mapping values */ typedef void (*ihm_destroy_callback)(void *data); /* Simple case-insensitive string to struct* mapping using a binary search */ struct ihm_mapping { /* Array of struct ihm_key_value */ struct ihm_array *keyvalues; /* Function to free mapping values */ ihm_destroy_callback value_destroy_func; }; /* Make a new mapping from case-insensitive strings to arbitrary pointers. The mapping uses a simple binary search (more memory efficient than a hash table and generally faster too since the number of keys is quite small). */ struct ihm_mapping *ihm_mapping_new(ihm_destroy_callback value_destroy_func) { struct ihm_mapping *m = (struct ihm_mapping *)ihm_malloc( sizeof(struct ihm_mapping)); m->keyvalues = ihm_array_new(sizeof(struct ihm_key_value)); m->value_destroy_func = value_destroy_func; return m; } /* Clear all key:value pairs from the mapping */ static void ihm_mapping_remove_all(struct ihm_mapping *m) { unsigned i; for (i = 0; i < m->keyvalues->len; ++i) { (*m->value_destroy_func)(ihm_array_index(m->keyvalues, struct ihm_key_value, i).value); } ihm_array_clear(m->keyvalues); } /* Free memory used by a mapping */ static void ihm_mapping_free(struct ihm_mapping *m) { ihm_mapping_remove_all(m); ihm_array_free(m->keyvalues); free(m); } /* Add a new key:value pair to the mapping. key is assumed to point to memory that is managed elsewhere (and must be valid as long as the mapping exists) while value is freed using value_destroy_func when the mapping is freed. Neither keys or nor values should ever be NULL. */ static void ihm_mapping_insert(struct ihm_mapping *m, char *key, void *value) { struct ihm_key_value kv; kv.key = key; kv.value = value; ihm_array_append(m->keyvalues, &kv); } static int mapping_compare(const void *a, const void *b) { const struct ihm_key_value *kv1, *kv2; kv1 = (const struct ihm_key_value *)a; kv2 = (const struct ihm_key_value *)b; return strcasecmp(kv1->key, kv2->key); } /* Put a mapping's key:value pairs in sorted order. This must be done before ihm_mapping_lookup is used. */ static void ihm_mapping_sort(struct ihm_mapping *m) { qsort(m->keyvalues->data, m->keyvalues->len, m->keyvalues->element_size, mapping_compare); } /* Look up key in the mapping and return the corresponding value, or NULL if not present. This uses a simple binary search so requires that ihm_mapping_sort() has been called first. */ static void *ihm_mapping_lookup(struct ihm_mapping *m, char *key) { int left = 0, right = m->keyvalues->len - 1; while (left <= right) { int mid = (left + right) / 2; int cmp = strcasecmp(ihm_array_index(m->keyvalues, struct ihm_key_value, mid).key, key); if (cmp < 0) { left = mid + 1; } else if (cmp > 0) { right = mid - 1; } else { return ihm_array_index(m->keyvalues, struct ihm_key_value, mid).value; } } return NULL; } /* Callback passed to ihm_mapping_foreach */ typedef void (*ihm_foreach_callback)(void *key, void *value, void *user_data); /* Call the given function, passing it key, value, and data, for each key:value pair in the mapping. */ static void ihm_mapping_foreach(struct ihm_mapping *m, ihm_foreach_callback func, void *data) { unsigned i; for (i = 0; i < m->keyvalues->len; ++i) { struct ihm_key_value *kv = &ihm_array_index(m->keyvalues, struct ihm_key_value, i); (*func)(kv->key, kv->value, data); } } /* Free the memory used by a struct ihm_keyword */ static void ihm_keyword_free(void *value) { struct ihm_keyword *key = (struct ihm_keyword *)value; free(key->name); if (key->own_data && key->in_file && key->type == IHM_STRING) { free(key->data.str); } free(key); } /* A category in an mmCIF file. */ struct ihm_category { char *name; /* All keywords that we want to extract in this category */ struct ihm_mapping *keyword_map; /* Function called when we have all data for this category */ ihm_category_callback data_callback; /* Function called at the end of each save frame */ ihm_category_callback end_frame_callback; /* Function called at the very end of the data block */ ihm_category_callback finalize_callback; /* Data passed to callbacks */ void *data; /* Function to release data */ ihm_free_callback free_func; }; /* Keep track of data used while reading an mmCIF or BinaryCIF file. */ struct ihm_reader { /* The file handle to read from */ struct ihm_file *fh; /* true for BinaryCIF, false for mmCIF */ bool binary; /* The current line number in the file */ int linenum; /* Temporary buffer for string data. For mmCIF, this is used for multiline tokens, to contain the entire contents of the lines */ struct ihm_string *tmp_str; /* All tokens parsed from the last line */ struct ihm_array *tokens; /* The next token to be returned */ unsigned token_index; /* All categories that we want to extract from the file */ struct ihm_mapping *category_map; /* Handler for unknown categories */ ihm_unknown_category_callback unknown_category_callback; /* Data passed to unknown category callback */ void *unknown_category_data; /* Function to release unknown category data */ ihm_free_callback unknown_category_free_func; /* Handler for unknown keywords */ ihm_unknown_keyword_callback unknown_keyword_callback; /* Data passed to unknown keyword callback */ void *unknown_keyword_data; /* Function to release unknown keyword data */ ihm_free_callback unknown_keyword_free_func; /* msgpack context for reading BinaryCIF file */ cmp_ctx_t cmp; /* Number of BinaryCIF data blocks left to read, or -1 if header not read yet */ int num_blocks_left; /* Any errors raised in the CMP read callback */ struct ihm_error *cmp_read_err; }; typedef enum { MMCIF_TOKEN_VALUE = 1, MMCIF_TOKEN_OMITTED, MMCIF_TOKEN_UNKNOWN, MMCIF_TOKEN_LOOP, MMCIF_TOKEN_DATA, MMCIF_TOKEN_SAVE, MMCIF_TOKEN_VARIABLE } ihm_token_type; /* Part of a string that corresponds to an mmCIF token. The memory pointed to by str is valid only until the next line is read from the file. */ struct ihm_token { ihm_token_type type; char *str; }; /* Free memory used by a struct ihm_category */ static void ihm_category_free(void *value) { struct ihm_category *cat = (struct ihm_category *)value; ihm_mapping_free(cat->keyword_map); free(cat->name); if (cat->free_func) { (*cat->free_func) (cat->data); } free(cat); } /* Make a new struct ihm_category */ struct ihm_category *ihm_category_new(struct ihm_reader *reader, const char *name, ihm_category_callback data_callback, ihm_category_callback end_frame_callback, ihm_category_callback finalize_callback, void *data, ihm_free_callback free_func) { struct ihm_category *category = (struct ihm_category *)ihm_malloc(sizeof(struct ihm_category)); category->name = strdup(name); category->data_callback = data_callback; category->end_frame_callback = end_frame_callback; category->finalize_callback = finalize_callback; category->data = data; category->free_func = free_func; category->keyword_map = ihm_mapping_new(ihm_keyword_free); ihm_mapping_insert(reader->category_map, category->name, category); return category; } /* Add a new struct ihm_keyword (of undefined type) to a category. */ static struct ihm_keyword *ihm_keyword_new(struct ihm_category *category, const char *name) { struct ihm_keyword *key = (struct ihm_keyword *)ihm_malloc(sizeof(struct ihm_keyword)); key->name = strdup(name); key->own_data = false; key->in_file = false; ihm_mapping_insert(category->keyword_map, key->name, key); key->own_data = false; return key; } /* Add a new integer ihm_keyword to a category. */ struct ihm_keyword *ihm_keyword_int_new(struct ihm_category *category, const char *name) { struct ihm_keyword *key = ihm_keyword_new(category, name); key->type = IHM_INT; return key; } /* Add a new floating-point ihm_keyword to a category. */ struct ihm_keyword *ihm_keyword_float_new(struct ihm_category *category, const char *name) { struct ihm_keyword *key = ihm_keyword_new(category, name); key->type = IHM_FLOAT; return key; } /* Add a new boolean ihm_keyword to a category. */ struct ihm_keyword *ihm_keyword_bool_new(struct ihm_category *category, const char *name) { struct ihm_keyword *key = ihm_keyword_new(category, name); key->type = IHM_BOOL; return key; } /* Add a new string ihm_keyword to a category. */ struct ihm_keyword *ihm_keyword_str_new(struct ihm_category *category, const char *name) { struct ihm_keyword *key = ihm_keyword_new(category, name); key->type = IHM_STRING; return key; } static void set_keyword_to_default(struct ihm_keyword *key) { if (key->type == IHM_STRING) { key->data.str = NULL; } key->own_data = false; } /* Set the value of a given keyword from the given string */ static void set_value_from_string(struct ihm_reader *reader, struct ihm_category *category, struct ihm_keyword *key, char *str, bool own_data, struct ihm_error **err) { char *ch; /* If a key is duplicated, overwrite it with the new value */ if (key->in_file && key->type == IHM_STRING && key->own_data) { free(key->data.str); key->data.str = NULL; } switch(key->type) { case IHM_STRING: key->own_data = own_data; if (own_data) { key->data.str = strdup(str); } else { key->data.str = str; } key->omitted = key->unknown = false; break; case IHM_INT: key->data.ival = strtol(str, &ch, 10); if (*ch) { ihm_error_set(err, IHM_ERROR_VALUE, "Cannot parse '%s' as integer in file, line %d", str, reader->linenum); return; } key->omitted = key->unknown = false; break; case IHM_FLOAT: key->data.fval = strtod(str, &ch); if (*ch) { ihm_error_set(err, IHM_ERROR_VALUE, "Cannot parse '%s' as float in file, line %d", str, reader->linenum); return; } key->omitted = key->unknown = false; break; case IHM_BOOL: key->omitted = key->unknown = false; if (strcasecmp(str, "YES") == 0) { key->data.bval = true; } else if (strcasecmp(str, "NO") == 0) { key->data.bval = false; } else { key->omitted = true; } break; } key->in_file = true; } /* Set the given keyword to the 'omitted' special value */ static void set_omitted_value(struct ihm_keyword *key) { /* If a key is duplicated, overwrite it with the new value */ if (key->in_file && key->own_data && key->type == IHM_STRING) { free(key->data.str); } key->omitted = true; key->unknown = false; set_keyword_to_default(key); key->in_file = true; } /* Set the given keyword to the 'unknown' special value */ static void set_unknown_value(struct ihm_keyword *key) { /* If a key is duplicated, overwrite it with the new value */ if (key->in_file && key->own_data && key->type == IHM_STRING) { free(key->data.str); } key->omitted = false; key->unknown = true; set_keyword_to_default(key); key->in_file = true; } /* Make a new ihm_file */ struct ihm_file *ihm_file_new(ihm_file_read_callback read_callback, void *data, ihm_free_callback free_func) { struct ihm_file *file = (struct ihm_file *)ihm_malloc(sizeof(struct ihm_file)); file->buffer = ihm_string_new(); file->line_start = file->next_line_start = 0; file->read_callback = read_callback; file->data = data; file->free_func = free_func; return file; } /* Free memory used by ihm_file */ static void ihm_file_free(struct ihm_file *file) { ihm_string_free(file->buffer); if (file->free_func) { (*file->free_func) (file->data); } free(file); } /* Read data from a file descriptor */ static ssize_t fd_read_callback(char *buffer, size_t buffer_len, void *data, struct ihm_error **err) { int fd = POINTER_TO_INT(data); ssize_t readlen; while(1) { #if defined(_WIN32) || defined(_WIN64) readlen = _read(fd, buffer, buffer_len); #else readlen = read(fd, buffer, buffer_len); #endif if (readlen != -1 || errno != EAGAIN) break; /* If EAGAIN encountered, wait for more data to become available */ usleep(100); } if (readlen == -1) { ihm_error_set(err, IHM_ERROR_IO, "%s", strerror(errno)); } return readlen; } /* Read data from file to expand the in-memory buffer. Returns the number of bytes read (0 on EOF), or -1 (and sets err) on error */ static ssize_t expand_buffer(struct ihm_file *fh, struct ihm_error **err) { static const size_t READ_SIZE = 4194304; /* Read 4MiB of data at a time */ size_t current_size; ssize_t readlen; /* Move any existing data to the start of the buffer (otherwise the buffer will grow to the full size of the file) */ if (fh->line_start) { ihm_string_erase(fh->buffer, 0, fh->line_start); fh->next_line_start -= fh->line_start; fh->line_start = 0; } current_size = fh->buffer->len; ihm_string_set_size(fh->buffer, current_size + READ_SIZE); readlen = (*fh->read_callback)(fh->buffer->str + current_size, READ_SIZE, fh->data, err); ihm_string_set_size(fh->buffer, current_size + (readlen == -1 ? 0 : readlen)); return readlen; } /* Read the next line from the file. Lines are terminated by \n, \r, \r\n, or \0. On success, true is returned. fh->line_start points to the start of the null-terminated line. *eof is set true iff the end of the line is the end of the file. On error, false is returned and err is set. */ static bool ihm_file_read_line(struct ihm_file *fh, int *eof, struct ihm_error **err) { size_t line_end; *eof = false; fh->line_start = fh->next_line_start; if (fh->line_start > fh->buffer->len) { /* EOF occurred earlier - return it (plus an empty string) again */ *eof = true; fh->line_start = 0; fh->buffer->str[0] = '\0'; return true; } /* Line is only definitely terminated if there are characters after it (embedded NULL, or \r followed by a possible \n) */ while((line_end = fh->line_start + strcspn(fh->buffer->str + fh->line_start, "\r\n")) == fh->buffer->len) { ssize_t num_added = expand_buffer(fh, err); if (num_added < 0) { return false; /* error occurred */ } else if (num_added == 0) { *eof = true; /* end of file */ break; } } fh->next_line_start = line_end + 1; /* Handle \r\n terminator */ if (fh->buffer->str[line_end] == '\r' && fh->buffer->str[line_end + 1] == '\n') { fh->next_line_start++; } fh->buffer->str[line_end] = '\0'; return true; } /* Make a new ihm_file that will read data from the given file descriptor */ struct ihm_file *ihm_file_new_from_fd(int fd) { return ihm_file_new(fd_read_callback, INT_TO_POINTER(fd), NULL); } /* Make a new struct ihm_reader */ struct ihm_reader *ihm_reader_new(struct ihm_file *fh, bool binary) { struct ihm_reader *reader = (struct ihm_reader *)ihm_malloc(sizeof(struct ihm_reader)); reader->fh = fh; reader->binary = binary; reader->linenum = 0; reader->tmp_str = ihm_string_new(); reader->tokens = ihm_array_new(sizeof(struct ihm_token)); reader->token_index = 0; reader->category_map = ihm_mapping_new(ihm_category_free); reader->unknown_category_callback = NULL; reader->unknown_category_data = NULL; reader->unknown_category_free_func = NULL; reader->unknown_keyword_callback = NULL; reader->unknown_keyword_data = NULL; reader->unknown_keyword_free_func = NULL; reader->num_blocks_left = -1; reader->cmp_read_err = NULL; return reader; } /* Free memory used by a struct ihm_reader */ void ihm_reader_free(struct ihm_reader *reader) { ihm_string_free(reader->tmp_str); ihm_array_free(reader->tokens); ihm_mapping_free(reader->category_map); ihm_file_free(reader->fh); if (reader->unknown_category_free_func) { (*reader->unknown_category_free_func) (reader->unknown_category_data); } if (reader->unknown_keyword_free_func) { (*reader->unknown_keyword_free_func) (reader->unknown_keyword_data); } if (reader->cmp_read_err) { ihm_error_free(reader->cmp_read_err); } free(reader); } /* Set a callback for unknown categories. The given callback is called whenever a category is encountered in the file that is not handled (by ihm_category_new). */ void ihm_reader_unknown_category_callback_set(struct ihm_reader *reader, ihm_unknown_category_callback callback, void *data, ihm_free_callback free_func) { if (reader->unknown_category_free_func) { (*reader->unknown_category_free_func) (reader->unknown_category_data); } reader->unknown_category_callback = callback; reader->unknown_category_data = data; reader->unknown_category_free_func = free_func; } /* Set a callback for unknown keywords. The given callback is called whenever a keyword is encountered in the file that is not handled (within a category that is handled by ihm_category_new). */ void ihm_reader_unknown_keyword_callback_set(struct ihm_reader *reader, ihm_unknown_keyword_callback callback, void *data, ihm_free_callback free_func) { if (reader->unknown_keyword_free_func) { (*reader->unknown_keyword_free_func) (reader->unknown_keyword_data); } reader->unknown_keyword_callback = callback; reader->unknown_keyword_data = data; reader->unknown_keyword_free_func = free_func; } /* Remove all categories from the reader. */ void ihm_reader_remove_all_categories(struct ihm_reader *reader) { ihm_mapping_remove_all(reader->category_map); if (reader->unknown_category_free_func) { (*reader->unknown_category_free_func) (reader->unknown_category_data); } reader->unknown_category_callback = NULL; reader->unknown_category_data = NULL; reader->unknown_category_free_func = NULL; if (reader->unknown_keyword_free_func) { (*reader->unknown_keyword_free_func) (reader->unknown_keyword_data); } reader->unknown_keyword_callback = NULL; reader->unknown_keyword_data = NULL; reader->unknown_keyword_free_func = NULL; } /* Given the start of a quoted string, find the end and add a token for it */ static size_t handle_quoted_token(struct ihm_reader *reader, char *line, size_t len, size_t start_pos, const char *quote_type, struct ihm_error **err) { char *pt = line + start_pos; char *end = pt; /* Get the next quote that is followed by whitespace (or line end). In mmCIF a quote within a string is not considered an end quote as long as it is not followed by whitespace. */ do { end = strchr(end + 1, pt[0]); } while (end && *end && end[1] && !strchr(" \t", end[1])); if (end && *end) { struct ihm_token t; int tok_end = end - pt + start_pos; /* A quoted string is always a literal string, even if it is "?" or ".", not an unknown/omitted value */ t.type = MMCIF_TOKEN_VALUE; t.str = line + start_pos + 1; line[tok_end] = '\0'; ihm_array_append(reader->tokens, &t); return tok_end + 1; /* step past the closing quote */ } else { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "%s-quoted string not terminated in file, line %d", quote_type, reader->linenum); return len; } } /* Get the next token from the line. */ static size_t get_next_token(struct ihm_reader *reader, char *line, size_t len, size_t start_pos, struct ihm_error **err) { /* Skip initial whitespace */ char *pt = line + start_pos; start_pos += strspn(pt, " \t"); pt = line + start_pos; if (*pt == '\0') { return len; } else if (*pt == '"') { return handle_quoted_token(reader, line, len, start_pos, "Double", err); } else if (*pt == '\'') { return handle_quoted_token(reader, line, len, start_pos, "Single", err); } else if (*pt == '#') { /* Comment - discard the rest of the line */ return len; } else { struct ihm_token t; int tok_end = start_pos + strcspn(pt, " \t"); t.str = line + start_pos; line[tok_end] = '\0'; if (strcmp(t.str, "loop_") == 0) { t.type = MMCIF_TOKEN_LOOP; } else if (strncmp(t.str, "data_", 5) == 0) { t.type = MMCIF_TOKEN_DATA; } else if (strncmp(t.str, "save_", 5) == 0) { t.type = MMCIF_TOKEN_SAVE; } else if (t.str[0] == '_') { t.type = MMCIF_TOKEN_VARIABLE; } else if (t.str[0] == '.' && t.str[1] == '\0') { t.type = MMCIF_TOKEN_OMITTED; } else if (t.str[0] == '?' && t.str[1] == '\0') { t.type = MMCIF_TOKEN_UNKNOWN; } else { /* Note that we do no special processing for other reserved words (global_, stop_). But the probability of them occurring where we expect a value is pretty small. */ t.type = MMCIF_TOKEN_VALUE; } ihm_array_append(reader->tokens, &t); return tok_end + 1; } } /* Break up a line into tokens, populating reader->tokens. */ static void tokenize(struct ihm_reader *reader, char *line, struct ihm_error **err) { size_t start_pos, len = strlen(line); ihm_array_clear(reader->tokens); if (len > 0 && line[0] == '#') { /* Skip comment lines */ return; } for (start_pos = 0; start_pos < len && !*err; start_pos = get_next_token(reader, line, len, start_pos, err)) { } if (*err) { ihm_array_clear(reader->tokens); } } /* Return a pointer to the current line */ static char *line_pt(struct ihm_reader *reader) { return reader->fh->buffer->str + reader->fh->line_start; } /* Read a semicolon-delimited (multiline) token */ static void read_multiline_token(struct ihm_reader *reader, int ignore_multiline, struct ihm_error **err) { int eof = 0; int start_linenum = reader->linenum; while (!eof) { reader->linenum++; if (!ihm_file_read_line(reader->fh, &eof, err)) { return; } else if (line_pt(reader)[0] == ';') { struct ihm_token t; t.type = MMCIF_TOKEN_VALUE; t.str = reader->tmp_str->str; ihm_array_clear(reader->tokens); ihm_array_append(reader->tokens, &t); reader->token_index = 0; return; } else if (!ignore_multiline) { ihm_string_append(reader->tmp_str, "\n"); ihm_string_append(reader->tmp_str, line_pt(reader)); } } ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "End of file while reading multiline string " "which started on line %d", start_linenum); } /* Return the number of tokens still available in the current line. */ static unsigned get_num_line_tokens(struct ihm_reader *reader) { return reader->tokens->len - reader->token_index; } /* Push back the last token returned by get_token() so it can be read again. */ static void unget_token(struct ihm_reader *reader) { reader->token_index--; } /* Get the next token from an mmCIF file, or NULL on end of file. The memory used by the token is valid for N calls to this function, where N is the result of get_num_line_tokens(). If ignore_multiline is true, the string contents of any multiline value tokens (those that are semicolon-delimited) are not stored in memory. */ static struct ihm_token *get_token(struct ihm_reader *reader, int ignore_multiline, struct ihm_error **err) { int eof = 0; if (reader->tokens->len <= reader->token_index) { do { /* No tokens left - read the next non-blank line in */ reader->linenum++; if (!ihm_file_read_line(reader->fh, &eof, err)) { return NULL; } else if (line_pt(reader)[0] == ';') { if (!ignore_multiline) { /* Skip initial semicolon */ ihm_string_assign(reader->tmp_str, line_pt(reader) + 1); } read_multiline_token(reader, ignore_multiline, err); if (*err) { return NULL; } } else { tokenize(reader, line_pt(reader), err); if (*err) { return NULL; } else { reader->token_index = 0; } } } while (reader->tokens->len == 0 && !eof); } if (reader->tokens->len == 0) { return NULL; } else { return &ihm_array_index(reader->tokens, struct ihm_token, reader->token_index++); } } /* Break up a variable token into category and keyword */ static void parse_category_keyword(struct ihm_reader *reader, char *str, char **category, char **keyword, struct ihm_error **err) { char *dot; size_t wordlen; dot = strchr(str, '.'); if (!dot) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "No period found in mmCIF variable name (%s) at line %d", str, reader->linenum); return; } wordlen = strcspn(str, " \t"); str[wordlen] = '\0'; *dot = '\0'; *category = str; *keyword = dot + 1; } /* Read a line that sets a single value, e.g. _entry.id 1YTI */ static void read_value(struct ihm_reader *reader, struct ihm_token *key_token, struct ihm_error **err) { struct ihm_category *category; char *category_name, *keyword_name; parse_category_keyword(reader, key_token->str, &category_name, &keyword_name, err); if (*err) return; category = (struct ihm_category *)ihm_mapping_lookup(reader->category_map, category_name); if (category) { struct ihm_keyword *key; key = (struct ihm_keyword *)ihm_mapping_lookup(category->keyword_map, keyword_name); if (key) { struct ihm_token *val_token = get_token(reader, false, err); if (val_token && val_token->type == MMCIF_TOKEN_VALUE) { set_value_from_string(reader, category, key, val_token->str, true, err); } else if (val_token && val_token->type == MMCIF_TOKEN_OMITTED) { set_omitted_value(key); } else if (val_token && val_token->type == MMCIF_TOKEN_UNKNOWN) { set_unknown_value(key); } else if (!*err) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "No valid value found for %s.%s in file, line %d", category->name, key->name, reader->linenum); } } else if (reader->unknown_keyword_callback) { (*reader->unknown_keyword_callback)(reader, category_name, keyword_name, reader->linenum, reader->unknown_keyword_data, err); } } else if (reader->unknown_category_callback) { (*reader->unknown_category_callback)(reader, category_name, reader->linenum, reader->unknown_category_data, err); } } /* Handle a single token listing category and keyword from a loop_ construct. The relevant ihm_keyword is returned, or NULL if we are not interested in this keyword. */ static struct ihm_keyword *handle_loop_index(struct ihm_reader *reader, struct ihm_category **catpt, struct ihm_token *token, bool first_loop, struct ihm_error **err) { struct ihm_category *category; char *category_name, *keyword_name; parse_category_keyword(reader, token->str, &category_name, &keyword_name, err); if (*err) return NULL; category = (struct ihm_category *)ihm_mapping_lookup(reader->category_map, category_name); if (first_loop) { *catpt = category; if (!category && reader->unknown_category_callback) { (*reader->unknown_category_callback)(reader, category_name, reader->linenum, reader->unknown_category_data, err); if (*err) { return NULL; } } } else if (*catpt != category) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "mmCIF files cannot contain multiple categories " "within a single loop at line %d", reader->linenum); return NULL; } if (category) { struct ihm_keyword *key; key = (struct ihm_keyword *)ihm_mapping_lookup(category->keyword_map, keyword_name); if (key) { return key; } else if (reader->unknown_keyword_callback) { (*reader->unknown_keyword_callback)(reader, category_name, keyword_name, reader->linenum, reader->unknown_keyword_data, err); if (*err) { return NULL; } } } return NULL; } static void check_keywords_in_file(void *k, void *value, void *user_data) { struct ihm_keyword *key = (struct ihm_keyword *)value; bool *in_file = (bool *)user_data; *in_file |= key->in_file; } static void clear_keywords(void *k, void *value, void *user_data) { struct ihm_keyword *key = (struct ihm_keyword *)value; if (key->own_data && key->type == IHM_STRING) { free(key->data.str); } key->in_file = false; set_keyword_to_default(key); } /* Call the category's data callback function. If force is false, only call it if data has actually been read in. */ static void call_category(struct ihm_reader *reader, struct ihm_category *category, bool force, struct ihm_error **err) { if (category->data_callback) { if (!force) { /* Check to see if at least one keyword was given a value */ ihm_mapping_foreach(category->keyword_map, check_keywords_in_file, &force); } if (force) { (*category->data_callback) (reader, reader->linenum, category->data, err); } } /* Clear out keyword values, ready for the next set of data */ ihm_mapping_foreach(category->keyword_map, clear_keywords, NULL); } /* Read the list of keywords from a loop_ construct. */ static struct ihm_array *read_loop_keywords(struct ihm_reader *reader, struct ihm_category **category, struct ihm_error **err) { bool first_loop = true; struct ihm_token *token; /* An array of ihm_keyword*, in the order the values should be given. Any NULL pointers correspond to keywords we're not interested in. */ struct ihm_array *keywords = ihm_array_new(sizeof(struct ihm_keyword*)); *category = NULL; while (!*err && (token = get_token(reader, false, err))) { if (token->type == MMCIF_TOKEN_VARIABLE) { struct ihm_keyword *k = handle_loop_index(reader, category, token, first_loop, err); ihm_array_append(keywords, &k); first_loop = false; } else if (token->type == MMCIF_TOKEN_VALUE || token->type == MMCIF_TOKEN_UNKNOWN || token->type == MMCIF_TOKEN_OMITTED) { /* OK, end of keywords; proceed on to values */ unget_token(reader); break; } else { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Was expecting a keyword or value for loop at line %d", reader->linenum); } } if (*err) { ihm_array_free(keywords); return NULL; } else { return keywords; } } /* Read data for a loop_ construct */ static void read_loop_data(struct ihm_reader *reader, struct ihm_category *category, unsigned len, struct ihm_keyword **keywords, struct ihm_error **err) { while (!*err) { /* Does the current line contain an entire row in the loop? */ int oneline = get_num_line_tokens(reader) >= len; unsigned i; for (i = 0; !*err && i < len; ++i) { struct ihm_token *token = get_token(reader, false, err); if (*err) { break; } else if (token && token->type == MMCIF_TOKEN_VALUE) { if (keywords[i]) { set_value_from_string(reader, category, keywords[i], token->str, !oneline, err); } } else if (token && token->type == MMCIF_TOKEN_OMITTED) { if (keywords[i]) { set_omitted_value(keywords[i]); } } else if (token && token->type == MMCIF_TOKEN_UNKNOWN) { if (keywords[i]) { set_unknown_value(keywords[i]); } } else if (i == 0) { /* OK, end of the loop */ if (token) { unget_token(reader); } return; } else { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Wrong number of data values in loop (should be an " "exact multiple of the number of keys) at line %d", reader->linenum); } } if (!*err) { call_category(reader, category, true, err); } } } /* Read a loop_ construct from the file. */ static void read_loop(struct ihm_reader *reader, struct ihm_error **err) { struct ihm_array *keywords; struct ihm_category *category; keywords = read_loop_keywords(reader, &category, err); if (*err) { return; } if (category) { read_loop_data(reader, category, keywords->len, (struct ihm_keyword **)keywords->data, err); } ihm_array_free(keywords); } struct category_foreach_data { struct ihm_error **err; struct ihm_reader *reader; }; static void call_category_foreach(void *key, void *value, void *user_data) { struct category_foreach_data *d = (struct category_foreach_data *)user_data; struct ihm_category *category = (struct ihm_category *)value; if (!*(d->err)) { call_category(d->reader, category, false, d->err); } } /* Process any data stored in all categories */ static void call_all_categories(struct ihm_reader *reader, struct ihm_error **err) { struct category_foreach_data d; d.err = err; d.reader = reader; ihm_mapping_foreach(reader->category_map, call_category_foreach, &d); } static void finalize_category_foreach(void *key, void *value, void *user_data) { struct category_foreach_data *d = (struct category_foreach_data *)user_data; struct ihm_category *category = (struct ihm_category *)value; if (!*(d->err) && category->finalize_callback) { (*category->finalize_callback)(d->reader, d->reader->linenum, category->data, d->err); } } /* Call each category's finalize callback */ static void finalize_all_categories(struct ihm_reader *reader, struct ihm_error **err) { struct category_foreach_data d; d.err = err; d.reader = reader; ihm_mapping_foreach(reader->category_map, finalize_category_foreach, &d); } static void end_frame_category_foreach(void *key, void *value, void *user_data) { struct category_foreach_data *d = (struct category_foreach_data *)user_data; struct ihm_category *category = (struct ihm_category *)value; if (!*(d->err) && category->end_frame_callback) { (*category->end_frame_callback)(d->reader, d->reader->linenum, category->data, d->err); } } /* Call each category's end_frame callback */ static void end_frame_all_categories(struct ihm_reader *reader, struct ihm_error **err) { struct category_foreach_data d; d.err = err; d.reader = reader; ihm_mapping_foreach(reader->category_map, end_frame_category_foreach, &d); } static void sort_category_foreach(void *key, void *value, void *user_data) { struct ihm_category *category = (struct ihm_category *)value; ihm_mapping_sort(category->keyword_map); } /* Make sure that all mappings are sorted before we try to use them */ static void sort_mappings(struct ihm_reader *reader) { ihm_mapping_sort(reader->category_map); ihm_mapping_foreach(reader->category_map, sort_category_foreach, NULL); } /* Read an entire mmCIF file. */ static bool read_mmcif_file(struct ihm_reader *reader, bool *more_data, struct ihm_error **err) { int ndata = 0, in_save = 0; struct ihm_token *token; sort_mappings(reader); while (!*err && (token = get_token(reader, true, err))) { if (token->type == MMCIF_TOKEN_VARIABLE) { read_value(reader, token, err); } else if (token->type == MMCIF_TOKEN_DATA) { ndata++; /* Only read the first data block */ if (ndata > 1) { /* Allow reading the next data block */ unget_token(reader); break; } } else if (token->type == MMCIF_TOKEN_LOOP) { read_loop(reader, err); } else if (token->type == MMCIF_TOKEN_SAVE) { in_save = !in_save; if (!in_save) { call_all_categories(reader, err); end_frame_all_categories(reader, err); } } } if (!*err) { call_all_categories(reader, err); finalize_all_categories(reader, err); } if (*err) { *more_data = false; return false; } else { *more_data = (ndata > 1); return true; } } /* Read exactly sz bytes from the given file. Return a pointer to the location in the file read buffer of those bytes. This pointer is only valid until the next file read. */ static bool ihm_file_read_bytes(struct ihm_file *fh, char **buf, size_t sz, struct ihm_error **err) { /* Read at least 4MiB of data at a time */ static const ssize_t READ_SIZE = 4194304; if (fh->line_start + sz > fh->buffer->len) { size_t current_size, to_read; ssize_t readlen, needed; /* Move any existing data to the start of the buffer, so it doesn't grow to the full size of the file */ if (fh->line_start) { ihm_string_erase(fh->buffer, 0, fh->line_start); fh->line_start = 0; } /* Fill buffer with new data, at least sz long (but could be more) */ current_size = fh->buffer->len; needed = sz - current_size; to_read = READ_SIZE > needed ? READ_SIZE : needed; /* Expand buffer as needed */ ihm_string_set_size(fh->buffer, current_size + to_read); readlen = (*fh->read_callback)( fh->buffer->str + current_size, to_read, fh->data, err); if (*err) return false; if (readlen < needed) { ihm_error_set(err, IHM_ERROR_IO, "Less data read than requested"); return false; } /* Set buffer size to match data actually read */ ihm_string_set_size(fh->buffer, current_size + readlen); } *buf = fh->buffer->str + fh->line_start; fh->line_start += sz; return true; } /* Read callback for the cmp library */ static bool bcif_cmp_read(cmp_ctx_t *ctx, void *data, size_t limit) { char *buf; struct ihm_reader *reader = (struct ihm_reader *)ctx->buf; if (!ihm_file_read_bytes(reader->fh, &buf, limit, &reader->cmp_read_err)) { return false; } else { memcpy(data, buf, limit); return true; } } /* Skip callback for the cmp library */ static bool bcif_cmp_skip(cmp_ctx_t *ctx, size_t count) { char *buf; struct ihm_reader *reader = (struct ihm_reader *)ctx->buf; if (!ihm_file_read_bytes(reader->fh, &buf, count, &reader->cmp_read_err)) { return false; } else { return true; } } /* Read the next msgpack object from the BinaryCIF file; it must be a map. Return true on success and return the number of elements in the map; return false on error (and set err) */ static bool read_bcif_map(struct ihm_reader *reader, uint32_t *map_size, struct ihm_error **err) { if (!cmp_read_map(&reader->cmp, map_size)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Was expecting a map; %s", cmp_strerror(&reader->cmp)); } return false; } else { return true; } } /* Like read_bcif_map, but if a NIL object is encountered instead, act as if a zero-size map was read. */ static bool read_bcif_map_or_nil(struct ihm_reader *reader, uint32_t *map_size, struct ihm_error **err) { cmp_object_t obj; if (!cmp_read_object(&reader->cmp, &obj)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "%s", cmp_strerror(&reader->cmp)); } return false; } switch(obj.type) { case CMP_TYPE_NIL: *map_size = 0; return true; case CMP_TYPE_FIXMAP: case CMP_TYPE_MAP16: case CMP_TYPE_MAP32: *map_size = obj.as.map_size; return true; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Was expecting a map or nil"); return false; } } /* Read the next number (any kind of int or float) object from the BinaryCIF file, and return it as a double. */ static bool read_bcif_any_double(struct ihm_reader *reader, double *value, struct ihm_error **err) { cmp_object_t obj; if (!cmp_read_object(&reader->cmp, &obj)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "%s", cmp_strerror(&reader->cmp)); } return false; } switch(obj.type) { case CMP_TYPE_POSITIVE_FIXNUM: case CMP_TYPE_UINT8: *value = obj.as.u8; return true; case CMP_TYPE_UINT16: *value = obj.as.u16; return true; case CMP_TYPE_UINT32: *value = obj.as.u32; return true; case CMP_TYPE_NEGATIVE_FIXNUM: case CMP_TYPE_SINT8: *value = obj.as.s8; return true; case CMP_TYPE_SINT16: *value = obj.as.s16; return true; case CMP_TYPE_SINT32: *value = obj.as.s32; return true; case CMP_TYPE_FLOAT: *value = obj.as.flt; return true; case CMP_TYPE_DOUBLE: *value = obj.as.dbl; return true; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Was expecting a number"); return false; } } /* Read the next msgpack object from the BinaryCIF file; it must be an array. Return true on success and return the number of elements in the array; return false on error (and set err) */ static bool read_bcif_array(struct ihm_reader *reader, uint32_t *array_size, struct ihm_error **err) { if (!cmp_read_array(&reader->cmp, array_size)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Was expecting an array; %s", cmp_strerror(&reader->cmp)); } return false; } else { return true; } } /* Skip the next msgpack object from the BinaryCIF file; it can be any kind of simple object (not an array or map). */ static bool skip_bcif_object(struct ihm_reader *reader, struct ihm_error **err) { if (!cmp_skip_object(&reader->cmp, NULL)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Could not skip object; %s", cmp_strerror(&reader->cmp)); } return false; } else { return true; } } /* Skip the next msgpack object from the BinaryCIF file; it can be any kind of object, including an array or map. */ static bool skip_bcif_object_no_limit(struct ihm_reader *reader, struct ihm_error **err) { if (!cmp_skip_object_no_limit(&reader->cmp)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Could not skip object; %s", cmp_strerror(&reader->cmp)); } return false; } else { return true; } } /* Read the next integer object from the BinaryCIF file. */ static bool read_bcif_int(struct ihm_reader *reader, int32_t *value, struct ihm_error **err) { if (!cmp_read_int(&reader->cmp, value)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Was expecting an integer; %s", cmp_strerror(&reader->cmp)); } return false; } else { return true; } } /* Read the next string from the BinaryCIF file and return a pointer to it. This pointer points into ihm_reader and is valid until the next read. */ static bool read_bcif_string(struct ihm_reader *reader, char **str, struct ihm_error **err) { char *buf; uint32_t strsz; if (!cmp_read_str_size(&reader->cmp, &strsz)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Was expecting a string; %s", cmp_strerror(&reader->cmp)); } return false; } if (!ihm_file_read_bytes(reader->fh, &buf, strsz, err)) return false; /* Copy into reader's temporary string buffer and return a pointer to it */ ihm_string_assign_n(reader->tmp_str, buf, strsz); *str = reader->tmp_str->str; return true; } /* Read the next string from the BinaryCIF file and store a copy of it at the given pointer. The caller is responsible for freeing it later. */ static bool read_bcif_string_dup(struct ihm_reader *reader, char **str, struct ihm_error **err) { char *buf; uint32_t strsz; if (!cmp_read_str_size(&reader->cmp, &strsz)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Was expecting a string; %s", cmp_strerror(&reader->cmp)); } return false; } if (!ihm_file_read_bytes(reader->fh, &buf, strsz, err)) return false; /* strdup into new buffer; we can't use strndup as Windows doesn't have it */ free(*str); *str = (char *)ihm_malloc(strsz + 1); memcpy(*str, buf, strsz); (*str)[strsz] = '\0'; return true; } /* Read the next string from the BinaryCIF file. Set match if it compares equal to str. This is slightly more efficient than returning the null-terminated string and then comparing it as it eliminates a copy. */ static bool read_bcif_exact_string(struct ihm_reader *reader, const char *str, bool *match, struct ihm_error **err) { char *buf; uint32_t actual_len, want_len = strlen(str); if (!cmp_read_str_size(&reader->cmp, &actual_len)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Was expecting a string; %s", cmp_strerror(&reader->cmp)); } return false; } if (!ihm_file_read_bytes(reader->fh, &buf, actual_len, err)) return false; *match = (actual_len == want_len && strncmp(str, buf, want_len) == 0); return true; } /* Read the next binary object from the BinaryCIF file and store a copy of it at the given pointer. The caller is responsible for freeing it later. */ static bool read_bcif_binary_dup(struct ihm_reader *reader, char **bin, size_t *bin_size, struct ihm_error **err) { char *buf; uint32_t binsz; if (!cmp_read_bin_size(&reader->cmp, &binsz)) { if (!ihm_error_move(err, &reader->cmp_read_err)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Was expecting binary; %s", cmp_strerror(&reader->cmp)); } return false; } if (!ihm_file_read_bytes(reader->fh, &buf, binsz, err)) return false; /* memcpy into new buffer */ free(*bin); *bin = (char *)ihm_malloc(binsz); *bin_size = binsz; memcpy(*bin, buf, binsz); return true; } /* Read the header from a BinaryCIF file to get the number of data blocks */ static bool read_bcif_header(struct ihm_reader *reader, struct ihm_error **err) { uint32_t map_size, i; if (!read_bcif_map(reader, &map_size, err)) return false; for (i = 0; i < map_size; ++i) { bool match; if (!read_bcif_exact_string(reader, "dataBlocks", &match, err)) return false; if (match) { uint32_t array_size; if (!read_bcif_array(reader, &array_size, err)) return false; reader->num_blocks_left = array_size; return true; } else { if (!skip_bcif_object(reader, err)) return false; } } reader->num_blocks_left = 0; return true; } /* The type of data stored in bcif_data */ typedef enum { BCIF_DATA_NULL, /* No data present (e.g. empty mask) */ BCIF_DATA_RAW, /* Raw data, before decoding */ BCIF_DATA_INT8, /* Array of signed bytes */ BCIF_DATA_UINT8, /* Array of unsigned bytes */ BCIF_DATA_INT16, /* Array of signed 16-bit integers */ BCIF_DATA_UINT16, /* Array of unsigned 16-bit integers */ BCIF_DATA_INT32, /* Array of signed 32-bit integers */ BCIF_DATA_UINT32, /* Array of unsigned 32-bit integers */ BCIF_DATA_FLOAT, /* Array of single-precision floating point values */ BCIF_DATA_DOUBLE, /* Array of double-precision floating point values */ BCIF_DATA_STRING /* Array of char* null-terminated strings */ } bcif_data_type; /* All possible C types stored in bcif_data */ union bcif_data_c { char *raw; int8_t *int8; uint8_t *uint8; int16_t *int16; uint16_t *uint16; int32_t *int32; uint32_t *uint32; float *float32; double *float64; char **string; }; /* Data stored in BinaryCIF for a column, mask, or StringArray offsets. This data can be of multiple types, e.g. raw, int array, etc. */ struct bcif_data { /* The type of the data */ bcif_data_type type; /* The data itself */ union bcif_data_c data; /* The size of the data (e.g. array dimension) */ size_t size; }; /* Initialize a new bcif_data */ static void bcif_data_init(struct bcif_data *d) { d->type = BCIF_DATA_NULL; d->size = 0; } /* Free memory used by a bcif_data */ static void bcif_data_free(struct bcif_data *d) { switch(d->type) { case BCIF_DATA_NULL: break; case BCIF_DATA_RAW: free(d->data.raw); break; case BCIF_DATA_INT8: free(d->data.int8); break; case BCIF_DATA_UINT8: free(d->data.uint8); break; case BCIF_DATA_INT16: free(d->data.int16); break; case BCIF_DATA_UINT16: free(d->data.uint16); break; case BCIF_DATA_INT32: free(d->data.int32); break; case BCIF_DATA_UINT32: free(d->data.uint32); break; case BCIF_DATA_FLOAT: free(d->data.float32); break; case BCIF_DATA_DOUBLE: free(d->data.float64); break; case BCIF_DATA_STRING: free(d->data.string); break; } } /* Overwrite bcif_data with new raw data */ static void bcif_data_assign_raw(struct bcif_data *d, char *data, size_t size) { bcif_data_free(d); d->type = BCIF_DATA_RAW; d->data.raw = data; d->size = size; } /* All valid and supported raw encoder types */ typedef enum { BCIF_ENC_NONE, BCIF_ENC_STRING_ARRAY, BCIF_ENC_BYTE_ARRAY, BCIF_ENC_INTEGER_PACKING, BCIF_ENC_DELTA, BCIF_ENC_RUN_LENGTH, BCIF_ENC_FIXED_POINT, BCIF_ENC_INTERVAL_QUANT } bcif_encoding_kind; /* An encoding used to compress raw data in BinaryCIF */ struct bcif_encoding { /* The encoder type */ bcif_encoding_kind kind; /* Origin (for delta encoding) */ int32_t origin; /* Factor (for fixed point encoding) */ int32_t factor; /* Min value (for interval quantization encoding) */ double minval; /* Max value (for interval quantization encoding) */ double maxval; /* Number of steps (for interval quantization encoding) */ int32_t numsteps; /* ByteArray type */ int32_t type; /* Encoding of StringArray data */ struct bcif_encoding *first_data_encoding; /* Encoding of StringArray offset */ struct bcif_encoding *first_offset_encoding; /* String data for StringArray encoding */ char *string_data; /* Data for offsets for StringArray encoding */ struct bcif_data offsets; /* Next encoding, or NULL */ struct bcif_encoding *next; }; /* A single column in a BinaryCIF category */ struct bcif_column { /* Keyword name */ char *name; /* Data and size */ struct bcif_data data; /* Mask data and size (or NULL) */ struct bcif_data mask_data; /* Singly-linked list of data encodings */ struct bcif_encoding *first_encoding; /* Singly-linked list of mask encodings */ struct bcif_encoding *first_mask_encoding; /* The corresponding ihm_keyword, if any */ struct ihm_keyword *keyword; /* Temporary buffer for keyword value as a string */ char *str; /* Next column, or NULL */ struct bcif_column *next; }; /* A single category in a BinaryCIF file */ struct bcif_category { /* Category name */ char *name; /* Singly-linked list of column (keyword) information */ struct bcif_column *first_column; }; /* Create and return a new bcif_encoding */ static struct bcif_encoding *bcif_encoding_new() { struct bcif_encoding *enc = (struct bcif_encoding *)ihm_malloc( sizeof(struct bcif_encoding)); enc->kind = BCIF_ENC_NONE; enc->origin = 0; enc->factor = 1; enc->minval = 0.; enc->maxval = 0.; enc->numsteps = 1; enc->type = -1; enc->first_data_encoding = NULL; enc->first_offset_encoding = NULL; enc->string_data = NULL; bcif_data_init(&enc->offsets); enc->next = NULL; return enc; } /* Free memory used by a bcif_encoding */ static void bcif_encoding_free(struct bcif_encoding *enc) { while(enc->first_data_encoding) { struct bcif_encoding *inenc = enc->first_data_encoding; enc->first_data_encoding = inenc->next; bcif_encoding_free(inenc); } while(enc->first_offset_encoding) { struct bcif_encoding *inenc = enc->first_offset_encoding; enc->first_offset_encoding = inenc->next; bcif_encoding_free(inenc); } free(enc->string_data); bcif_data_free(&enc->offsets); free(enc); } /* Create and return a new bcif_column */ static struct bcif_column *bcif_column_new() { struct bcif_column *c = (struct bcif_column *)ihm_malloc( sizeof(struct bcif_column)); c->name = NULL; bcif_data_init(&c->data); bcif_data_init(&c->mask_data); c->first_encoding = NULL; c->first_mask_encoding = NULL; c->keyword = NULL; c->str = NULL; c->next = NULL; return c; } /* Free memory used by a bcif_column */ static void bcif_column_free(struct bcif_column *col) { free(col->name); bcif_data_init(&col->data); bcif_data_init(&col->mask_data); while(col->first_encoding) { struct bcif_encoding *enc = col->first_encoding; col->first_encoding = enc->next; bcif_encoding_free(enc); } while(col->first_mask_encoding) { struct bcif_encoding *enc = col->first_mask_encoding; col->first_mask_encoding = enc->next; bcif_encoding_free(enc); } free(col->str); free(col); } /* Initialize a new bcif_category */ static void bcif_category_init(struct bcif_category *cat) { cat->name = NULL; cat->first_column = NULL; } /* Free memory used by a bcif_category */ static void bcif_category_free(struct bcif_category *cat) { free(cat->name); while(cat->first_column) { struct bcif_column *col = cat->first_column; cat->first_column = col->next; bcif_column_free(col); } } static bool read_bcif_encodings(struct ihm_reader *reader, struct bcif_encoding **first, bool allow_string_array, struct ihm_error **err); /* Read a single encoding from a BinaryCIF file */ static bool read_bcif_encoding(struct ihm_reader *reader, struct bcif_encoding *enc, bool allow_string_array, struct ihm_error **err) { uint32_t map_size, i; if (!read_bcif_map(reader, &map_size, err)) return false; for (i = 0; i < map_size; ++i) { char *str; if (!read_bcif_string(reader, &str, err)) return false; if (strcmp(str, "kind") == 0) { if (!read_bcif_string(reader, &str, err)) return false; if (strcmp(str, "StringArray") == 0) { if (!allow_string_array) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "StringArray decoding cannot be used for data " "or offset encoding"); return false; } enc->kind = BCIF_ENC_STRING_ARRAY; } else if (strcmp(str, "ByteArray") == 0) { enc->kind = BCIF_ENC_BYTE_ARRAY; } else if (strcmp(str, "IntegerPacking") == 0) { enc->kind = BCIF_ENC_INTEGER_PACKING; } else if (strcmp(str, "Delta") == 0) { enc->kind = BCIF_ENC_DELTA; } else if (strcmp(str, "RunLength") == 0) { enc->kind = BCIF_ENC_RUN_LENGTH; } else if (strcmp(str, "FixedPoint") == 0) { enc->kind = BCIF_ENC_FIXED_POINT; } else if (strcmp(str, "IntervalQuantization") == 0) { enc->kind = BCIF_ENC_INTERVAL_QUANT; } } else if (strcmp(str, "dataEncoding") == 0) { /* dataEncoding and offsetEncoding should not include StringArray encoding */ if (!read_bcif_encodings(reader, &enc->first_data_encoding, false, err)) return false; } else if (strcmp(str, "offsetEncoding") == 0) { if (!read_bcif_encodings(reader, &enc->first_offset_encoding, false, err)) return false; } else if (strcmp(str, "stringData") == 0) { if (!read_bcif_string_dup(reader, &enc->string_data, err)) return false; } else if (strcmp(str, "offsets") == 0) { char *data = NULL; size_t data_size; if (!read_bcif_binary_dup(reader, &data, &data_size, err)) return false; bcif_data_assign_raw(&enc->offsets, data, data_size); } else if (strcmp(str, "origin") == 0) { if (!read_bcif_int(reader, &enc->origin, err)) return false; } else if (strcmp(str, "factor") == 0) { if (!read_bcif_int(reader, &enc->factor, err)) return false; } else if (strcmp(str, "type") == 0) { if (!read_bcif_int(reader, &enc->type, err)) return false; } else if (strcmp(str, "min") == 0) { if (!read_bcif_any_double(reader, &enc->minval, err)) return false; } else if (strcmp(str, "max") == 0) { if (!read_bcif_any_double(reader, &enc->maxval, err)) return false; } else if (strcmp(str, "numSteps") == 0) { if (!read_bcif_int(reader, &enc->numsteps, err)) return false; } else { if (!skip_bcif_object_no_limit(reader, err)) return false; } } return true; } /* Read all encoding information from a BinaryCIF file */ static bool read_bcif_encodings(struct ihm_reader *reader, struct bcif_encoding **first, bool allow_string_array, struct ihm_error **err) { uint32_t array_size, i; if (!read_bcif_array(reader, &array_size, err)) return false; for (i = 0; i < array_size; ++i) { struct bcif_encoding *enc = bcif_encoding_new(); if (!read_bcif_encoding(reader, enc, allow_string_array, err)) { bcif_encoding_free(enc); return false; } else { enc->next = *first; *first = enc; } } return true; } /* Read raw data from a BinaryCIF file */ static bool read_bcif_data(struct ihm_reader *reader, struct bcif_column *col, struct ihm_error **err) { uint32_t map_size, i; if (!read_bcif_map(reader, &map_size, err)) return false; for (i = 0; i < map_size; ++i) { char *str; if (!read_bcif_string(reader, &str, err)) return false; if (strcmp(str, "data") == 0) { char *data = NULL; size_t data_size; if (!read_bcif_binary_dup(reader, &data, &data_size, err)) return false; bcif_data_assign_raw(&col->data, data, data_size); } else if (strcmp(str, "encoding") == 0) { if (!read_bcif_encodings(reader, &col->first_encoding, true, err)) return false; } else { if (!skip_bcif_object_no_limit(reader, err)) return false; } } return true; } /* Read a column's mask from a BinaryCIF file */ static bool read_bcif_mask(struct ihm_reader *reader, struct bcif_column *col, struct ihm_error **err) { uint32_t map_size, i; if (!read_bcif_map_or_nil(reader, &map_size, err)) return false; for (i = 0; i < map_size; ++i) { char *str; if (!read_bcif_string(reader, &str, err)) return false; if (strcmp(str, "encoding") == 0) { if (!read_bcif_encodings(reader, &col->first_mask_encoding, true, err)) return false; } else if (strcmp(str, "data") == 0) { char *mask_data = NULL; size_t mask_data_size; if (!read_bcif_binary_dup(reader, &mask_data, &mask_data_size, err)) return false; bcif_data_assign_raw(&col->mask_data, mask_data, mask_data_size); } else { if (!skip_bcif_object(reader, err)) return false; } } return true; } /* Read a single column from a BinaryCIF file */ static bool read_bcif_column(struct ihm_reader *reader, struct bcif_column *col, struct ihm_category *ihm_cat, struct ihm_error **err) { uint32_t map_size, i; /* If we already read the category name then we can potentially skip reading data/mask if we don't have a handler for the keyword */ bool skip = false; if (!read_bcif_map(reader, &map_size, err)) return false; for (i = 0; i < map_size; ++i) { char *str; if (!read_bcif_string(reader, &str, err)) return false; if (strcmp(str, "name") == 0) { if (!read_bcif_string_dup(reader, &col->name, err)) return false; if (ihm_cat) { struct ihm_keyword *key; key = (struct ihm_keyword *)ihm_mapping_lookup( ihm_cat->keyword_map, col->name); if (!key) { skip = true; } } } else if (!skip && strcmp(str, "data") == 0) { if (!read_bcif_data(reader, col, err)) return false; } else if (!skip && strcmp(str, "mask") == 0) { if (!read_bcif_mask(reader, col, err)) return false; } else { if (!skip_bcif_object_no_limit(reader, err)) return false; } } return true; } /* Read all columns for a category from a BinaryCIF file */ static bool read_bcif_columns(struct ihm_reader *reader, struct bcif_category *cat, struct ihm_category *ihm_cat, struct ihm_error **err) { uint32_t array_size, i; if (!read_bcif_array(reader, &array_size, err)) return false; for (i = 0; i < array_size; ++i) { struct bcif_column *col = bcif_column_new(); if (!read_bcif_column(reader, col, ihm_cat, err)) { bcif_column_free(col); return false; } else { col->next = cat->first_column; cat->first_column = col; } } return true; } /* Read a single category from a BinaryCIF file */ static bool read_bcif_category(struct ihm_reader *reader, struct bcif_category *cat, struct ihm_category **ihm_cat, struct ihm_error **err) { uint32_t map_size, i; bool skip = false; *ihm_cat = NULL; if (!read_bcif_map(reader, &map_size, err)) return false; for (i = 0; i < map_size; ++i) { char *str; if (!read_bcif_string(reader, &str, err)) return false; if (strcmp(str, "name") == 0) { if (!read_bcif_string_dup(reader, &cat->name, err)) return false; *ihm_cat = (struct ihm_category *)ihm_mapping_lookup( reader->category_map, cat->name); if (!*ihm_cat) { skip = true; /* no need to read columns if we don't have a callback */ } } else if (!skip && strcmp(str, "columns") == 0) { if (!read_bcif_columns(reader, cat, *ihm_cat, err)) return false; } else { if (!skip_bcif_object_no_limit(reader, err)) return false; } } return true; } /* Valid ByteArray data types */ #define BYTE_ARRAY_INT8 1 #define BYTE_ARRAY_INT16 2 #define BYTE_ARRAY_INT32 3 #define BYTE_ARRAY_UINT8 4 #define BYTE_ARRAY_UINT16 5 #define BYTE_ARRAY_UINT32 6 #define BYTE_ARRAY_FLOAT 32 #define BYTE_ARRAY_DOUBLE 33 /* Make sure the input data size for ByteArray decoding is correct, and set the output size. */ static bool handle_byte_array_size(struct bcif_data *d, size_t type_size, struct ihm_error **err) { static const uint32_t ul = 1; if (d->size % type_size != 0) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "ByteArray raw data size is not a multiple of the type size"); return false; } /* If we're on a bigendian platform, byteswap the array (ByteArray is always little endian) */ if ((int)(*((unsigned char *)&ul)) == 0 && type_size > 1) { size_t i, j, start; for (i = 0, start = 0; i < d->size; ++i, start += type_size) { for (j = 0; j < type_size / 2; ++j) { char tmp = d->data.raw[start + j]; d->data.raw[start + j] = d->data.raw[start + type_size - j]; d->data.raw[start + type_size - j] = tmp; } } } d->size /= type_size; return true; } /* Decode data using BinaryCIF ByteArray encoding */ static bool decode_bcif_byte_array(struct bcif_data *d, struct bcif_encoding *enc, struct ihm_error **err) { if (d->type != BCIF_DATA_RAW) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "ByteArray not given raw data as input"); return false; } switch(enc->type) { case BYTE_ARRAY_INT8: d->type = BCIF_DATA_INT8; d->data.int8 = (int8_t *)d->data.raw; break; case BYTE_ARRAY_UINT8: d->type = BCIF_DATA_UINT8; d->data.uint8 = (uint8_t *)d->data.raw; break; case BYTE_ARRAY_INT16: if (!handle_byte_array_size(d, sizeof(int16_t), err)) return false; d->type = BCIF_DATA_INT16; d->data.int16 = (int16_t *)d->data.raw; break; case BYTE_ARRAY_UINT16: if (!handle_byte_array_size(d, sizeof(uint16_t), err)) return false; d->type = BCIF_DATA_UINT16; d->data.uint16 = (uint16_t *)d->data.raw; break; case BYTE_ARRAY_INT32: if (!handle_byte_array_size(d, sizeof(int32_t), err)) return false; d->type = BCIF_DATA_INT32; d->data.int32 = (int32_t *)d->data.raw; break; case BYTE_ARRAY_UINT32: if (!handle_byte_array_size(d, sizeof(uint32_t), err)) return false; d->type = BCIF_DATA_UINT32; d->data.uint32 = (uint32_t *)d->data.raw; break; case BYTE_ARRAY_FLOAT: if (!handle_byte_array_size(d, sizeof(float), err)) return false; d->type = BCIF_DATA_FLOAT; d->data.float32 = (float *)d->data.raw; break; case BYTE_ARRAY_DOUBLE: if (!handle_byte_array_size(d, sizeof(double), err)) return false; d->type = BCIF_DATA_DOUBLE; d->data.float64 = (double *)d->data.raw; break; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "ByteArray unhandled data type %d", enc->type); return false; } return true; } #define DECODE_BCIF_INT_PACK(limit_check, datapt, datatyp) \ { \ int32_t *outdata, value; \ size_t i, j; \ size_t outsz = 0; \ /* Get the size of the decoded array. Limit values don't count. */ \ for (i = 0; i < d->size; ++i) { \ datatyp t = datapt[i]; \ if (!(limit_check)) { outsz++; } \ } \ outdata = (int32_t *)ihm_malloc(outsz * sizeof(int32_t)); \ j = 0; \ value = 0; \ for (i = 0; i < d->size; ++i) { \ datatyp t = datapt[i]; \ if (limit_check) { \ value += t; \ } else { \ outdata[j++] = value + t; \ value = 0; \ } \ } \ bcif_data_free(d); \ /* todo: can the output be unsigned 32-bit ? */ \ d->type = BCIF_DATA_INT32; \ d->size = outsz; \ d->data.int32 = outdata; \ } /* Decode data using BinaryCIF IntegerPacking encoding */ static bool decode_bcif_integer_packing(struct bcif_data *d, struct bcif_encoding *enc, struct ihm_error **err) { /* Encoded data must be 8- or 16-bit integers (signed or unsigned). The behavior is similar in each case, so use a macro */ switch(d->type) { case BCIF_DATA_UINT8: DECODE_BCIF_INT_PACK(t == 0xFF, d->data.uint8, uint8_t); break; case BCIF_DATA_INT8: DECODE_BCIF_INT_PACK(t == 0x7F || t == -0x80, d->data.int8, int8_t); break; case BCIF_DATA_UINT16: DECODE_BCIF_INT_PACK(t == 0xFFFF, d->data.uint16, uint16_t); break; case BCIF_DATA_INT16: DECODE_BCIF_INT_PACK(t == 0x7FFF || t == -0x8000, d->data.int16, int16_t); break; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "IntegerPacking bad input data type %d", d->type); return false; } return true; } #define DECODE_BCIF_DELTA(datapt, outpt, datatyp) \ { \ int32_t value; \ size_t i; \ value = enc->origin; \ for (i = 0; i < d->size; ++i) { \ value += datapt[i]; \ outpt[i] = value; \ } \ } #define DECODE_BCIF_DELTA_PROMOTE(datapt, datatyp) \ { \ int32_t *outdata = (int32_t *)ihm_malloc(d->size * sizeof(int32_t)); \ DECODE_BCIF_DELTA(datapt, outdata, datatyp) \ bcif_data_free(d); \ d->type = BCIF_DATA_INT32; \ d->data.int32 = outdata; \ } /* Decode data using BinaryCIF Delta encoding */ static bool decode_bcif_delta(struct bcif_data *d, struct bcif_encoding *enc, struct ihm_error **err) { switch (d->type) { case BCIF_DATA_INT8: DECODE_BCIF_DELTA_PROMOTE(d->data.int8, int8_t); break; case BCIF_DATA_UINT8: DECODE_BCIF_DELTA_PROMOTE(d->data.uint8, uint8_t); break; case BCIF_DATA_INT16: DECODE_BCIF_DELTA_PROMOTE(d->data.int16, int16_t); break; case BCIF_DATA_UINT16: DECODE_BCIF_DELTA_PROMOTE(d->data.uint16, uint16_t); break; case BCIF_DATA_INT32: DECODE_BCIF_DELTA(d->data.int32, d->data.int32, int32_t); break; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Delta not given integers as input"); return false; } return true; } #define DECODE_BCIF_RUN_LENGTH(datapt, datatyp) \ { \ size_t i, k; \ int32_t outsz, j, *outdata; \ outsz = 0; \ for (i = 1; i < d->size; i += 2) { \ int32_t ts = datapt[i]; \ /* Try to catch invalid (or malicious) counts. Counts cannot be negative and the largest count seen in a very large PDB structure (3j3q) is about 2.4m, so we are unlikely to see counts of 40m in real systems */ \ if (ts < 0 || ts > 40000000) { \ ihm_error_set(err, IHM_ERROR_FILE_FORMAT, \ "Bad run length repeat count %d", ts); \ return false; \ } \ outsz += ts; \ } \ assert(outsz > 0); \ outdata = (int32_t *)ihm_malloc(outsz * sizeof(int32_t)); \ for (i = 0, k = 0; i < d->size; i += 2) { \ int32_t value = datapt[i]; \ int32_t n_repeats = datapt[i + 1]; \ for (j = 0; j < n_repeats; ++j) { \ outdata[k++] = value; \ } \ } \ bcif_data_free(d); \ d->type = BCIF_DATA_INT32; \ d->size = outsz; \ d->data.int32 = outdata; \ } /* Decode data using BinaryCIF RunLength encoding */ static bool decode_bcif_run_length(struct bcif_data *d, struct bcif_encoding *enc, struct ihm_error **err) { if (d->size % 2 != 0) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Run length data size (%d) is not even", d->size); return false; } switch (d->type) { case BCIF_DATA_INT8: DECODE_BCIF_RUN_LENGTH(d->data.int8, int8_t); break; case BCIF_DATA_UINT8: DECODE_BCIF_RUN_LENGTH(d->data.uint8, uint8_t); break; case BCIF_DATA_INT16: DECODE_BCIF_RUN_LENGTH(d->data.int16, int16_t); break; case BCIF_DATA_UINT16: DECODE_BCIF_RUN_LENGTH(d->data.uint16, uint16_t); break; case BCIF_DATA_INT32: DECODE_BCIF_RUN_LENGTH(d->data.int32, int32_t); break; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "RunLength not given integers as input"); return false; } return true; } #define DECODE_BCIF_FIXED_POINT(datapt) \ { \ size_t i; \ /* We ignore srcType and always output double (not float) */ \ double *outdata = (double *)ihm_malloc(d->size * sizeof(double)); \ for (i = 0; i < d->size; ++i) { \ outdata[i] = (double)datapt[i] / enc->factor; \ } \ bcif_data_free(d); \ d->type = BCIF_DATA_DOUBLE; \ d->data.float64 = outdata; \ } /* Decode data using BinaryCIF FixedPoint encoding */ static bool decode_bcif_fixed_point(struct bcif_data *d, struct bcif_encoding *enc, struct ihm_error **err) { switch (d->type) { case BCIF_DATA_INT8: DECODE_BCIF_FIXED_POINT(d->data.int8); break; case BCIF_DATA_UINT8: DECODE_BCIF_FIXED_POINT(d->data.uint8); break; case BCIF_DATA_INT16: DECODE_BCIF_FIXED_POINT(d->data.int16); break; case BCIF_DATA_UINT16: DECODE_BCIF_FIXED_POINT(d->data.uint16); break; case BCIF_DATA_INT32: DECODE_BCIF_FIXED_POINT(d->data.int32); break; case BCIF_DATA_UINT32: DECODE_BCIF_FIXED_POINT(d->data.uint32); break; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "FixedPoint not given integers as input"); return false; } return true; } #define DECODE_BCIF_INTERVAL_QUANT(datapt) \ { \ size_t i; \ /* We ignore srcType and always output double (not float) */ \ double *outdata = (double *)ihm_malloc(d->size * sizeof(double)); \ double delta = (enc->maxval - enc->minval) / (enc->numsteps - 1); \ for (i = 0; i < d->size; ++i) { \ outdata[i] = enc->minval + delta * datapt[i]; \ } \ bcif_data_free(d); \ d->type = BCIF_DATA_DOUBLE; \ d->data.float64 = outdata; \ } /* Decode data using BinaryCIF IntervalQuantization encoding */ static bool decode_bcif_interval_quant(struct bcif_data *d, struct bcif_encoding *enc, struct ihm_error **err) { if (enc->numsteps < 2) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "IntervalQuantization num_steps (%d) must be at least 2", enc->numsteps); return false; } switch (d->type) { case BCIF_DATA_INT8: DECODE_BCIF_INTERVAL_QUANT(d->data.int8); break; case BCIF_DATA_UINT8: DECODE_BCIF_INTERVAL_QUANT(d->data.uint8); break; case BCIF_DATA_INT16: DECODE_BCIF_INTERVAL_QUANT(d->data.int16); break; case BCIF_DATA_UINT16: DECODE_BCIF_INTERVAL_QUANT(d->data.uint16); break; case BCIF_DATA_INT32: DECODE_BCIF_INTERVAL_QUANT(d->data.int32); break; case BCIF_DATA_UINT32: DECODE_BCIF_INTERVAL_QUANT(d->data.uint32); break; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "IntervalQuantization not given integers as input"); return false; } return true; } /* Return true iff the data type is int32, or another integer that can be promoted to that type */ static bool require_bcif_data_is_int32(struct bcif_data *d) { switch(d->type) { case BCIF_DATA_INT8: case BCIF_DATA_UINT8: case BCIF_DATA_INT16: case BCIF_DATA_UINT16: case BCIF_DATA_INT32: return true; default: return false; } } /* Get the ith element of the data array. It must be of a type that can be promoted to int32_t (see require_bcif_data_is_int32) */ static int32_t get_int_data(struct bcif_data *d, int i) { switch(d->type) { case BCIF_DATA_INT8: return d->data.int8[i]; case BCIF_DATA_UINT8: return d->data.uint8[i]; case BCIF_DATA_INT16: return d->data.int16[i]; case BCIF_DATA_UINT16: return d->data.uint16[i]; case BCIF_DATA_INT32: return d->data.int32[i]; default: return 0; } } /* Decode data using BinaryCIF StringArray encoding */ static bool decode_bcif_string_array(struct bcif_data *d, struct bcif_encoding *enc, struct ihm_error **err) { char *newstring, **strarr; int32_t stringsz; size_t i; int *starts, start; if (!require_bcif_data_is_int32(d)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "StringArray not given integers as input"); return false; } if (!require_bcif_data_is_int32(&enc->offsets)) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "StringArray not given integers as offsets"); return false; } /* Make sure offsets are in range */ stringsz = strlen(enc->string_data); for (i = 0; i < enc->offsets.size; ++i) { if (get_int_data(&enc->offsets, i) < 0 || get_int_data(&enc->offsets, i) > stringsz) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "StringArray offset %d out of range 0-%d", get_int_data(&enc->offsets, i), 0, stringsz); return false; } } /* Add nulls to string_data so we can point directly into it */ stringsz = 0; for (i = 0; i < enc->offsets.size - 1; ++i) { stringsz += 1 + get_int_data(&enc->offsets, i + 1) - get_int_data(&enc->offsets, i); } newstring = (char *)ihm_malloc(stringsz); starts = (int *)ihm_malloc((enc->offsets.size - 1) * sizeof(int)); start = 0; for (i = 0; i < enc->offsets.size - 1; ++i) { stringsz = get_int_data(&enc->offsets, i + 1) - get_int_data(&enc->offsets, i); memcpy(newstring + start, enc->string_data + get_int_data(&enc->offsets, i), stringsz); newstring[start + stringsz] = '\0'; starts[i] = start; start += stringsz + 1; } free(enc->string_data); enc->string_data = newstring; strarr = (char **)ihm_malloc(d->size * sizeof(char *)); for (i = 0; i < d->size; ++i) { int32_t strnum = get_int_data(d, i); /* If strnum out of range, return a null string (this usually corresponds to masked data) */ if (strnum < 0 || (size_t)strnum >= enc->offsets.size) { strarr[i] = ""; } else { strarr[i] = enc->string_data + starts[strnum]; } } free(starts); bcif_data_free(d); d->type = BCIF_DATA_STRING; d->data.string = strarr; return true; } /* Decode raw BinaryCIF data by using all encoders specified */ static bool decode_bcif_data(struct bcif_data *d, struct bcif_encoding *enc, struct ihm_error **err) { while (enc) { switch(enc->kind) { case BCIF_ENC_BYTE_ARRAY: if (!decode_bcif_byte_array(d, enc, err)) return false; break; case BCIF_ENC_INTEGER_PACKING: if (!decode_bcif_integer_packing(d, enc, err)) return false; break; case BCIF_ENC_DELTA: if (!decode_bcif_delta(d, enc, err)) return false; break; case BCIF_ENC_RUN_LENGTH: if (!decode_bcif_run_length(d, enc, err)) return false; break; case BCIF_ENC_FIXED_POINT: if (!decode_bcif_fixed_point(d, enc, err)) return false; break; case BCIF_ENC_INTERVAL_QUANT: if (!decode_bcif_interval_quant(d, enc, err)) return false; break; case BCIF_ENC_STRING_ARRAY: if (!decode_bcif_data(&enc->offsets, enc->first_offset_encoding, err)) return false; if (!decode_bcif_data(d, enc->first_data_encoding, err)) return false; if (!decode_bcif_string_array(d, enc, err)) return false; break; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Unhandled encoding type %d", enc->kind); return false; } enc = enc->next; } return true; } /* Map BinaryCIF columns to ihm_keywords */ static bool check_bcif_columns(struct ihm_reader *reader, struct bcif_category *cat, struct ihm_category *ihm_cat, struct ihm_error **err) { struct bcif_column *col; /* Match columns to ihm_keywords; call back for any unknown */ for (col = cat->first_column; col; col = col->next) { col->keyword = (struct ihm_keyword *)ihm_mapping_lookup( ihm_cat->keyword_map, col->name); if (!col->keyword && reader->unknown_keyword_callback) { (*reader->unknown_keyword_callback)(reader, cat->name, col->name, 0, reader->unknown_keyword_data, err); if (*err) return false; } } return true; } /* Decode and check the column's data */ static bool process_column_data(struct bcif_column *col, struct ihm_error **err) { if (!decode_bcif_data(&col->data, col->first_encoding, err)) return false; switch(col->data.type) { case BCIF_DATA_INT32: case BCIF_DATA_INT8: case BCIF_DATA_UINT8: case BCIF_DATA_INT16: case BCIF_DATA_UINT16: case BCIF_DATA_FLOAT: case BCIF_DATA_DOUBLE: case BCIF_DATA_STRING: return true; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Unsupported column data type %d", col->data.type); return false; } } /* Decode and check the column's mask, if any */ static bool process_column_mask(struct bcif_column *col, struct ihm_error **err) { if (col->mask_data.type == BCIF_DATA_NULL) { return true; } if (!decode_bcif_data(&col->mask_data, col->first_mask_encoding, err)) return false; /* Masks are supposed to be uint8 but some of our decoders return int32 for simplicity. If this happened, map back to uint8 */ if (col->mask_data.type == BCIF_DATA_INT32) { uint8_t *newdata; size_t i; newdata = (uint8_t *)ihm_malloc(col->mask_data.size * sizeof(uint8_t)); for (i = 0; i < col->mask_data.size; ++i) { newdata[i] = (uint8_t)col->mask_data.data.int32[i]; } free(col->mask_data.data.int32); col->mask_data.data.uint8 = newdata; col->mask_data.type = BCIF_DATA_UINT8; } if (col->mask_data.type != BCIF_DATA_UINT8) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Unsupported column mask data type %d", col->mask_data.type); return false; } return true; } static void set_value_from_bcif_string(struct ihm_keyword *key, char *str, struct ihm_error **err) { char *ch; switch(key->type) { case IHM_STRING: /* In BinaryCIF the string is always owned by the file buffer, not the keyword */ key->own_data = false; key->data.str = str; key->omitted = false; break; case IHM_INT: key->data.ival = strtol(str, &ch, 10); if (*ch) { ihm_error_set(err, IHM_ERROR_VALUE, "Cannot parse '%s' as integer in file", str); } else { key->omitted = false; } break; case IHM_FLOAT: key->data.fval = strtod(str, &ch); if (*ch) { ihm_error_set(err, IHM_ERROR_VALUE, "Cannot parse '%s' as float in file", str); } else { key->omitted = false; } break; case IHM_BOOL: key->omitted = false; if (strcasecmp(str, "YES") == 0) { key->data.bval = true; } else if (strcasecmp(str, "NO") == 0) { key->data.bval = false; } else { key->omitted = true; } break; } if (!*err) { key->in_file = true; key->unknown = false; } } static void set_value_from_bcif_double(struct ihm_keyword *key, double fval, char *buffer) { key->omitted = key->unknown = false; key->in_file = true; switch(key->type) { case IHM_STRING: /* We (not the keyword) own buffer */ key->own_data = false; sprintf(buffer, "%g", fval); key->data.str = buffer; break; case IHM_INT: /* Truncate float to int. This matches Python's behavior of int(some_float) */ key->data.ival = (int)fval; break; case IHM_FLOAT: key->data.fval = fval; break; case IHM_BOOL: key->omitted = true; break; } } static void set_value_from_bcif_int(struct ihm_keyword *key, int32_t ival, char *buffer) { key->omitted = key->unknown = false; key->in_file = true; switch(key->type) { case IHM_STRING: /* We (not the keyword) own buffer */ key->own_data = false; sprintf(buffer, "%d", ival); key->data.str = buffer; break; case IHM_INT: key->data.ival = ival; break; case IHM_FLOAT: key->data.fval = ival; break; case IHM_BOOL: key->omitted = true; break; } } /* Set the value of a given keyword from the given BinaryCIF data */ static void set_value_from_data(struct ihm_reader *reader, struct ihm_category *category, struct ihm_keyword *key, struct bcif_data *data, size_t irow, char *buffer, struct ihm_error **err) { /* If a key is duplicated, overwrite it with the new value */ if (key->in_file && key->type == IHM_STRING && key->own_data) { free(key->data.str); key->data.str = NULL; } /* BinaryCIF data is typed (not always a string like mmCIF), so we may need to convert to the desired output type. */ switch(data->type) { case BCIF_DATA_STRING: set_value_from_bcif_string(key, data->data.string[irow], err); break; case BCIF_DATA_FLOAT: /* promote to double */ set_value_from_bcif_double(key, data->data.float32[irow], buffer); break; case BCIF_DATA_DOUBLE: set_value_from_bcif_double(key, data->data.float64[irow], buffer); break; case BCIF_DATA_INT8: /* promote to int32 */ set_value_from_bcif_int(key, data->data.int8[irow], buffer); break; case BCIF_DATA_UINT8: /* promote to int32 */ set_value_from_bcif_int(key, data->data.uint8[irow], buffer); break; case BCIF_DATA_INT16: /* promote to int32 */ set_value_from_bcif_int(key, data->data.int16[irow], buffer); break; case BCIF_DATA_UINT16: /* promote to int32 */ set_value_from_bcif_int(key, data->data.uint16[irow], buffer); break; case BCIF_DATA_INT32: set_value_from_bcif_int(key, data->data.int32[irow], buffer); break; default: ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Unhandled data type %d", data->type); break; } } /* Send the data for one category row to the callback */ static bool process_bcif_row(struct ihm_reader *reader, struct bcif_category *cat, struct ihm_category *ihm_cat, size_t irow, struct ihm_error **err) { struct bcif_column *col; for (col = cat->first_column; col; col = col->next) { if (!col->keyword) continue; if (col->mask_data.type == BCIF_DATA_UINT8 && col->mask_data.data.uint8[irow] == 1) { set_omitted_value(col->keyword); } else if (col->mask_data.type == BCIF_DATA_UINT8 && col->mask_data.data.uint8[irow] == 2) { set_unknown_value(col->keyword); } else { set_value_from_data(reader, ihm_cat, col->keyword, &col->data, irow, col->str, err); if (*err) return false; } } call_category(reader, ihm_cat, true, err); if (*err) return false; return true; } /* Check a read-in category, and send out the data via callbacks */ static bool process_bcif_category(struct ihm_reader *reader, struct bcif_category *cat, struct ihm_category *ihm_cat, struct ihm_error **err) { struct bcif_column *col; size_t i, n_rows = 0; if (!ihm_cat) { if (reader->unknown_category_callback) { (*reader->unknown_category_callback)( reader, cat->name, 0, reader->unknown_category_data, err); if (*err) return false; } return true; } if (!check_bcif_columns(reader, cat, ihm_cat, err)) return false; for (col = cat->first_column; col; col = col->next) { if (!col->keyword) continue; if (!process_column_data(col, err) || !process_column_mask(col, err)) return false; /* Make buffer for value as a string; should be long enough to store any int or double */ col->str = (char *)ihm_malloc(80); if (n_rows == 0) { n_rows = col->data.size; } else if (col->data.size != n_rows) { ihm_error_set(err, IHM_ERROR_FILE_FORMAT, "Column size mismatch %d != %d in category %s", col->data.size, n_rows, cat->name); return false; } } for (i = 0; i < n_rows; ++i) { if (!process_bcif_row(reader, cat, ihm_cat, i, err)) return false; } if (ihm_cat->finalize_callback) { (*ihm_cat->finalize_callback)(reader, reader->linenum, ihm_cat->data, err); if (*err) return false; } return true; } /* Read all categories from a BinaryCIF file */ static bool read_bcif_categories(struct ihm_reader *reader, struct ihm_error **err) { uint32_t ncat, icat; if (!read_bcif_array(reader, &ncat, err)) return false; for (icat = 0; icat < ncat; ++icat) { struct bcif_category cat; struct ihm_category *ihm_cat; bcif_category_init(&cat); if (!read_bcif_category(reader, &cat, &ihm_cat, err) || !process_bcif_category(reader, &cat, ihm_cat, err)) { bcif_category_free(&cat); return false; } else { bcif_category_free(&cat); } } return true; } /* Read the next data block from a BinaryCIF file */ static bool read_bcif_block(struct ihm_reader *reader, struct ihm_error **err) { uint32_t map_size, i; if (!read_bcif_map(reader, &map_size, err)) return false; for (i = 0; i < map_size; ++i) { bool match; if (!read_bcif_exact_string(reader, "categories", &match, err)) return false; if (match) { if (!read_bcif_categories(reader, err)) return false; } else { if (!skip_bcif_object(reader, err)) return false; } } reader->num_blocks_left--; return true; } /* Read an entire BinaryCIF file. */ static bool read_bcif_file(struct ihm_reader *reader, bool *more_data, struct ihm_error **err) { *more_data = false; sort_mappings(reader); if (reader->num_blocks_left == -1) { cmp_init(&reader->cmp, reader, bcif_cmp_read, bcif_cmp_skip, NULL); if (!read_bcif_header(reader, err)) return false; } if (reader->num_blocks_left > 0) { if (!read_bcif_block(reader, err)) return false; } *more_data = (reader->num_blocks_left > 0); return true; } /* Read an entire mmCIF or BinaryCIF file. */ bool ihm_read_file(struct ihm_reader *reader, bool *more_data, struct ihm_error **err) { if (reader->binary) { return read_bcif_file(reader, more_data, err); } else { return read_mmcif_file(reader, more_data, err); } } python-ihm-2.7/src/ihm_format.h000066400000000000000000000176151503573337200165650ustar00rootroot00000000000000/** \file ihm_format.h Routines for handling mmCIF or BinaryCIF format files. * * The file is read sequentially. All values for desired keywords in * desired categories are collected (other parts of the file are ignored). * * For mmCIF, at the end of the file and each save frame a callback function * for each category is called to process the data. In the case of mmCIF * loops, this callback will be called multiple times, once for each entry * in the loop. * * For BinaryCIF, the category callback will be called as each category * is encountered in the file, once per row. */ #ifndef IHM_FORMAT_H #define IHM_FORMAT_H #include /* For size_t */ #if defined(_MSC_VER) #include typedef SSIZE_T ssize_t; #if _MSC_VER > 1800 #include /* For bool */ #else typedef int bool; #define true 1 #define false 0 #endif #else #include /* For bool */ #include /* For ssize_t */ #endif #ifdef __cplusplus extern "C" { #endif /* IHM error types */ typedef enum { IHM_ERROR_VALUE, /* Bad value */ IHM_ERROR_IO, /* Input/output error */ IHM_ERROR_FILE_FORMAT, /* File format error */ } IHMErrorCode; /* Error reported by IHM functions. The caller is responsible for freeing the memory used by this struct by calling ihm_error_free(). */ struct ihm_error { /* The type of error */ IHMErrorCode code; /* Human-readable error message */ char *msg; }; /* Free the memory used by an ihm_error */ void ihm_error_free(struct ihm_error *err); /* Set the error indicator */ void ihm_error_set(struct ihm_error **err, IHMErrorCode code, const char *format, ...); #ifndef SWIG typedef enum { IHM_STRING = 1, IHM_INT, IHM_FLOAT, IHM_BOOL } ihm_keyword_type; /* A keyword in an mmCIF or BinaryCIF file. Holds a description of its format and any value read from the file. */ struct ihm_keyword { char *name; /* Type of value (string, int, float) */ ihm_keyword_type type; /* Last value read from the file */ union { char *str; int ival; double fval; bool bval; } data; /* If true, we own the memory for data */ bool own_data; /* true iff this keyword is in the file (not necessarily with a value) */ bool in_file; /* true iff the keyword is in the file but the value is omitted ('.') */ bool omitted; /* true iff the keyword is in the file but the value is unknown ('?') */ bool unknown; }; #endif /* Opaque types */ struct ihm_reader; struct ihm_category; /* Callback for mmCIF/BinaryCIF category data. Should set err on failure */ typedef void (*ihm_category_callback)(struct ihm_reader *reader, int linenum, void *data, struct ihm_error **err); /* Callback for unknown mmCIF/BinaryCIF categories. Should set err on failure */ typedef void (*ihm_unknown_category_callback)(struct ihm_reader *reader, const char *category, int linenum, void *data, struct ihm_error **err); /* Callback for unknown mmCIF/BinaryCIF keywords. Should set err on failure */ typedef void (*ihm_unknown_keyword_callback)(struct ihm_reader *reader, const char *category, const char *keyword, int linenum, void *data, struct ihm_error **err); /* Callback to free arbitrary data */ typedef void (*ihm_free_callback)(void *data); /* Make a new struct ihm_category and add it to the reader. */ struct ihm_category *ihm_category_new(struct ihm_reader *reader, const char *name, ihm_category_callback data_callback, ihm_category_callback end_frame_callback, ihm_category_callback finalize_callback, void *data, ihm_free_callback free_func); /* Set a callback for unknown categories. The given callback is called whenever a category is encountered in the file that is not handled (by ihm_category_new). */ void ihm_reader_unknown_category_callback_set(struct ihm_reader *reader, ihm_unknown_category_callback callback, void *data, ihm_free_callback free_func); /* Set a callback for unknown keywords. The given callback is called whenever a keyword is encountered in the file that is not handled (within a category that is handled by ihm_category_new). */ void ihm_reader_unknown_keyword_callback_set(struct ihm_reader *reader, ihm_unknown_keyword_callback callback, void *data, ihm_free_callback free_func); /* Remove all categories from the reader. This also removes any unknown category or keyword callbacks. */ void ihm_reader_remove_all_categories(struct ihm_reader *reader); /* Add a new integer ihm_keyword to a category. */ struct ihm_keyword *ihm_keyword_int_new(struct ihm_category *category, const char *name); /* Add a new floating-point ihm_keyword to a category. */ struct ihm_keyword *ihm_keyword_float_new(struct ihm_category *category, const char *name); /* Add a new boolean ihm_keyword to a category. */ struct ihm_keyword *ihm_keyword_bool_new(struct ihm_category *category, const char *name); /* Add a new string ihm_keyword to a category. */ struct ihm_keyword *ihm_keyword_str_new(struct ihm_category *category, const char *name); struct ihm_file; struct ihm_string; /* Read data into the ihm_file buffer. Return the number of bytes read (0 on EOF), or -1 (and sets err) on failure. */ typedef ssize_t (*ihm_file_read_callback)(char *buffer, size_t buffer_len, void *data, struct ihm_error **err); /* Track a file (or filelike object) that the data is read from */ struct ihm_file { /* Raw data read from the file */ struct ihm_string *buffer; /* Offset into buffer of the start of the current line */ size_t line_start; /* Offset into buffer of the start of the next line, or line_start if the line hasn't been read yet */ size_t next_line_start; /* Callback function to read more data into buffer */ ihm_file_read_callback read_callback; /* Data to pass to callback function */ void *data; /* Function to free callback_data (or NULL) */ ihm_free_callback free_func; }; /* Make a new ihm_file, used to handle reading data from a file. `read_callback` is used to read a chunk of data from the file; `data` is arbitrary data that is passed to the read callback; `free_func` is used to do any necessary cleanup of `data` when the ihm_file structure is freed. */ struct ihm_file *ihm_file_new(ihm_file_read_callback read_callback, void *data, ihm_free_callback free_func); /* Make a new ihm_file that will read data from the given file descriptor */ struct ihm_file *ihm_file_new_from_fd(int fd); /* Make a new struct ihm_reader. To read an mmCIF file, set binary=false; to read BinaryCIF, set binary=true. */ struct ihm_reader *ihm_reader_new(struct ihm_file *fh, bool binary); /* Free memory used by a struct ihm_reader. Note that this does not close the underlying file descriptor or object that is wrapped by ihm_file. */ void ihm_reader_free(struct ihm_reader *reader); /* Read a data block from an mmCIF or BinaryCIF file. *more_data is set true iff more data blocks are available after this one. Return false and set err on error. */ bool ihm_read_file(struct ihm_reader *reader, bool *more_data, struct ihm_error **err); #ifdef __cplusplus } #endif #endif /* IHM_FORMAT_H */ python-ihm-2.7/src/ihm_format.i000066400000000000000000000470631503573337200165660ustar00rootroot00000000000000%module _format %{ #include #include "ihm_format.h" %} /* Get simple return values */ %apply bool *OUTPUT { bool * }; %ignore ihm_keyword; %ignore ihm_error_set; /* Convert ihm_error to a Python exception */ %init { file_format_error = PyErr_NewException("_format.FileFormatError", NULL, NULL); Py_INCREF(file_format_error); PyModule_AddObject(m, "FileFormatError", file_format_error); } %{ static PyObject *file_format_error; static void handle_error(struct ihm_error *err) { PyObject *py_err_type = PyExc_IOError; switch(err->code) { case IHM_ERROR_FILE_FORMAT: py_err_type = file_format_error; break; case IHM_ERROR_VALUE: py_err_type = PyExc_ValueError; break; case IHM_ERROR_IO: py_err_type = PyExc_IOError; break; } /* Don't overwrite a Python exception already raised (e.g. by a callback) */ if (!PyErr_Occurred()) { PyErr_SetString(py_err_type, err->msg); } ihm_error_free(err); } %} %typemap(in, numinputs=0) struct ihm_error **err (struct ihm_error *temp) { temp = NULL; $1 = &temp; } %typemap(argout) struct ihm_error **err { if (*$1) { handle_error(*$1); Py_DECREF(resultobj); SWIG_fail; } } %{ /* Read data from a Python filelike object, in text mode */ static ssize_t pyfile_text_read_callback(char *buffer, size_t buffer_len, void *data, struct ihm_error **err) { Py_ssize_t read_len; char *read_str; static char fmt[] = "(n)"; PyObject *bytes = NULL; PyObject *read_method = data; /* Note that we can read up to `buffer_len` *bytes*, but Python's read() can return Unicode *characters*. One Unicode character can require up to 4 bytes to be represented with UTF-8, so limit the read accordingly. (mmCIF files are supposed to be ASCII but we should be liberal in what we accept.) */ PyObject *result = PyObject_CallFunction(read_method, fmt, buffer_len / 4); if (!result) { ihm_error_set(err, IHM_ERROR_VALUE, "read failed"); return -1; } if (PyUnicode_Check(result)) { /* This returns const char * on Python 3.7 or later */ if (!(read_str = (char *)PyUnicode_AsUTF8AndSize(result, &read_len))) { ihm_error_set(err, IHM_ERROR_VALUE, "string creation failed"); Py_DECREF(result); return -1; } } else if (PyBytes_Check(result)) { char *bytes_buffer; Py_ssize_t bytes_length; bytes = result; /* Convert to Unicode. Since we don't know the encoding, choose something permissive (latin-1). mmCIF files are supposed to be ASCII anyway. */ if (PyBytes_AsStringAndSize(bytes, &bytes_buffer, &bytes_length) < 0) { Py_DECREF(bytes); ihm_error_set(err, IHM_ERROR_VALUE, "PyBytes_AsStringAndSize failed"); return -1; } result = PyUnicode_DecodeLatin1(bytes_buffer, bytes_length, NULL); Py_DECREF(bytes); if (!result) { ihm_error_set(err, IHM_ERROR_VALUE, "latin1 string creation failed"); return -1; } /* This returns const char * on Python 3.7 or later */ if (!(read_str = (char *)PyUnicode_AsUTF8AndSize(result, &read_len))) { ihm_error_set(err, IHM_ERROR_VALUE, "string creation failed"); Py_DECREF(result); return -1; } } else { ihm_error_set(err, IHM_ERROR_VALUE, "read method should return a string"); Py_DECREF(result); return -1; } if (read_len > buffer_len) { ihm_error_set(err, IHM_ERROR_VALUE, "Python read method returned too many bytes"); Py_DECREF(result); return -1; } memcpy(buffer, read_str, read_len); Py_DECREF(result); return read_len; } /* Read data from a Python filelike object, in binary mode */ static ssize_t pyfile_binary_read_callback(char *buffer, size_t buffer_len, void *data, struct ihm_error **err) { Py_ssize_t read_len; char *read_str; static char fmt[] = "(n)"; PyObject *read_method = data; PyObject *result = PyObject_CallFunction(read_method, fmt, buffer_len); if (!result) { ihm_error_set(err, IHM_ERROR_VALUE, "read failed"); return -1; } if (PyBytes_Check(result)) { if (PyBytes_AsStringAndSize(result, &read_str, &read_len) < 0) { ihm_error_set(err, IHM_ERROR_VALUE, "PyBytes_AsStringAndSize failed"); return -1; } } else { ihm_error_set(err, IHM_ERROR_VALUE, "read method should return bytes"); Py_DECREF(result); return -1; } if (read_len > buffer_len) { ihm_error_set(err, IHM_ERROR_VALUE, "Python read method returned too many bytes"); Py_DECREF(result); return -1; } memcpy(buffer, read_str, read_len); Py_DECREF(result); return read_len; } /* Read data from a Python filelike object directly into the buffer */ static ssize_t pyfile_binary_readinto_callback( char *buffer, size_t buffer_len, void *data, struct ihm_error **err) { PyObject *readinto_method = data; PyObject *memview, *result; Py_ssize_t read_len; memview = PyMemoryView_FromMemory(buffer, buffer_len, PyBUF_WRITE); result = PyObject_CallFunctionObjArgs(readinto_method, memview, NULL); Py_DECREF(memview); if (!result) { ihm_error_set(err, IHM_ERROR_VALUE, "Python readinto failed"); return -1; } if (!PyLong_Check(result)) { ihm_error_set(err, IHM_ERROR_VALUE, "Python readinto did not return int"); Py_DECREF(result); return -1; } if ((read_len = PyLong_AsSsize_t(result)) == -1 && PyErr_Occurred()) { ihm_error_set(err, IHM_ERROR_VALUE, "Python readinto bad return"); Py_DECREF(result); return -1; } Py_DECREF(result); if (read_len > buffer_len) { ihm_error_set(err, IHM_ERROR_VALUE, "Python readinto method returned too many bytes"); return -1; } else { return read_len; } } static void pyfile_free(void *data) { PyObject *read_method = data; Py_DECREF(read_method); } static PyObject *get_optional_attr_str(PyObject *obj, const char *attr) { PyObject *method = PyObject_GetAttrString(obj, attr); if (!method) { PyErr_Clear(); } return method; } %} %inline %{ /* Wrap a Python file object as an ihm_file */ struct ihm_file *ihm_file_new_from_python(PyObject *pyfile, bool binary, struct ihm_error **err) { PyObject *read_method; ihm_file_read_callback read_callback; read_callback = binary ? pyfile_binary_read_callback : pyfile_text_read_callback; /* In binary mode, we can avoid a copy if the object supports readinto() */ if (binary && (read_method = get_optional_attr_str(pyfile, "readinto"))) { read_callback = pyfile_binary_readinto_callback; } else { /* Look for a read() method and use that to read data */ if (!(read_method = PyObject_GetAttrString(pyfile, "read"))) { ihm_error_set(err, IHM_ERROR_VALUE, "no read method"); return NULL; } } return ihm_file_new(read_callback, read_method, pyfile_free); } %} %{ struct category_handler_data { /* The Python callable object that is given the data */ PyObject *callable; /* Python value used for keywords not in the file (usually None) */ PyObject *not_in_file; /* Python value used for keywords marked as omitted, '.' (usually None) */ PyObject *omitted; /* Python value used for keywords marked as unknown, '?' (usually "?") */ PyObject *unknown; /* The number of keywords in the category that we extract from the file */ int num_keywords; /* Array of the keywords */ struct ihm_keyword **keywords; }; static void category_handler_data_free(void *data) { struct category_handler_data *hd = data; Py_DECREF(hd->callable); Py_XDECREF(hd->not_in_file); Py_XDECREF(hd->omitted); Py_XDECREF(hd->unknown); /* Don't need to free each hd->keywords[i] as the ihm_reader owns these pointers */ free(hd->keywords); free(hd); } /* Called for each category (or loop construct data line) with data */ static void handle_category_data(struct ihm_reader *reader, int linenum, void *data, struct ihm_error **err) { int i; struct category_handler_data *hd = data; struct ihm_keyword **keys; PyObject *ret, *tuple; /* make a tuple of the data */ tuple = PyTuple_New(hd->num_keywords); if (!tuple) { ihm_error_set(err, IHM_ERROR_VALUE, "tuple creation failed"); return; } for (i = 0, keys = hd->keywords; i < hd->num_keywords; ++i, ++keys) { PyObject *val; if (!(*keys)->in_file) { val = hd->not_in_file; Py_INCREF(val); } else if ((*keys)->omitted) { val = hd->omitted; Py_INCREF(val); } else if ((*keys)->unknown) { val = hd->unknown; Py_INCREF(val); } else { switch((*keys)->type) { case IHM_STRING: val = PyUnicode_FromString((*keys)->data.str); if (!val) { ihm_error_set(err, IHM_ERROR_VALUE, "string creation failed"); Py_DECREF(tuple); return; } break; case IHM_INT: val = PyLong_FromLong((*keys)->data.ival); break; case IHM_FLOAT: val = PyFloat_FromDouble((*keys)->data.fval); break; case IHM_BOOL: val = (*keys)->data.bval ? Py_True : Py_False; Py_INCREF(val); break; } } /* Steals ref to val */ PyTuple_SET_ITEM(tuple, i, val); } /* pass the data to Python */ ret = PyObject_CallObject(hd->callable, tuple); Py_DECREF(tuple); if (ret) { Py_DECREF(ret); /* discard return value */ } else { /* Pass Python exception back to the original caller */ ihm_error_set(err, IHM_ERROR_VALUE, "Python error"); } } /* Called at the end of each save frame for each category */ static void end_frame_category(struct ihm_reader *reader, int linenum, void *data, struct ihm_error **err) { PyObject *ret; struct category_handler_data *hd = data; ret = PyObject_CallMethod(hd->callable, "end_save_frame", NULL); if (ret) { Py_DECREF(ret); /* discard return value */ } else { /* Pass Python exception back to the original caller */ ihm_error_set(err, IHM_ERROR_VALUE, "Python error"); } } static struct category_handler_data *do_add_handler( struct ihm_reader *reader, char *name, PyObject *keywords, PyObject *int_keywords, PyObject *float_keywords, PyObject *bool_keywords, PyObject *callable, ihm_category_callback data_callback, ihm_category_callback end_frame_callback, ihm_category_callback finalize_callback, struct ihm_error **err) { Py_ssize_t seqlen, i; struct ihm_category *category; struct category_handler_data *hd; if (!PySequence_Check(keywords)) { ihm_error_set(err, IHM_ERROR_VALUE, "'keywords' should be a sequence"); return NULL; } if (!PyAnySet_Check(int_keywords)) { ihm_error_set(err, IHM_ERROR_VALUE, "'int_keywords' should be a set"); return NULL; } if (!PyAnySet_Check(float_keywords)) { ihm_error_set(err, IHM_ERROR_VALUE, "'float_keywords' should be a set"); return NULL; } if (!PyAnySet_Check(bool_keywords)) { ihm_error_set(err, IHM_ERROR_VALUE, "'bool_keywords' should be a set"); return NULL; } if (!PyCallable_Check(callable)) { ihm_error_set(err, IHM_ERROR_VALUE, "'callable' should be a callable object"); return NULL; } seqlen = PySequence_Length(keywords); hd = malloc(sizeof(struct category_handler_data)); Py_INCREF(callable); hd->callable = callable; hd->not_in_file = NULL; hd->omitted = NULL; hd->unknown = NULL; hd->num_keywords = seqlen; hd->keywords = malloc(sizeof(struct ihm_keyword *) * seqlen); category = ihm_category_new(reader, name, data_callback, end_frame_callback, finalize_callback, hd, category_handler_data_free); if (!(hd->not_in_file = PyObject_GetAttrString(callable, "not_in_file")) || !(hd->omitted = PyObject_GetAttrString(callable, "omitted")) || !(hd->unknown = PyObject_GetAttrString(callable, "unknown"))) { ihm_error_set(err, IHM_ERROR_VALUE, "missing attribute"); return NULL; } for (i = 0; i < seqlen; ++i) { const char *key_name; PyObject *o = PySequence_GetItem(keywords, i); if (PyUnicode_Check(o)) { key_name = PyUnicode_AsUTF8(o); if (PySet_Contains(int_keywords, o) == 1) { hd->keywords[i] = ihm_keyword_int_new(category, key_name); } else if (PySet_Contains(float_keywords, o) == 1) { hd->keywords[i] = ihm_keyword_float_new(category, key_name); } else if (PySet_Contains(bool_keywords, o) == 1) { hd->keywords[i] = ihm_keyword_bool_new(category, key_name); } else { hd->keywords[i] = ihm_keyword_str_new(category, key_name); } Py_DECREF(o); } else { Py_XDECREF(o); ihm_error_set(err, IHM_ERROR_VALUE, "keywords[%ld] should be a string", i); return NULL; } } return hd; } /* Pass unknown category info to a Python callable */ static void unknown_category_python(struct ihm_reader *reader, const char *category, int linenum, void *data, struct ihm_error **err) { static char fmt[] = "(si)"; PyObject *callable = data; PyObject *result = PyObject_CallFunction(callable, fmt, category, linenum); if (!result) { ihm_error_set(err, IHM_ERROR_VALUE, "Python error"); } else { Py_DECREF(result); } } /* Pass unknown keyword info to a Python callable */ static void unknown_keyword_python(struct ihm_reader *reader, const char *category, const char *keyword, int linenum, void *data, struct ihm_error **err) { static char fmt[] = "(ssi)"; PyObject *callable = data; PyObject *result = PyObject_CallFunction(callable, fmt, category, keyword, linenum); if (!result) { ihm_error_set(err, IHM_ERROR_VALUE, "Python error"); } else { Py_DECREF(result); } } /* Treat data as a Python object, and decrease its refcount */ static void free_python_callable(void *data) { PyObject *obj = data; Py_DECREF(obj); } %} %inline %{ /* Add a handler for unknown categories */ void add_unknown_category_handler(struct ihm_reader *reader, PyObject *callable, struct ihm_error **err) { if (!PyCallable_Check(callable)) { ihm_error_set(err, IHM_ERROR_VALUE, "'callable' should be a callable object"); return; } Py_INCREF(callable); ihm_reader_unknown_category_callback_set(reader, unknown_category_python, callable, free_python_callable); } /* Add a handler for unknown keywords */ void add_unknown_keyword_handler(struct ihm_reader *reader, PyObject *callable, struct ihm_error **err) { if (!PyCallable_Check(callable)) { ihm_error_set(err, IHM_ERROR_VALUE, "'callable' should be a callable object"); return; } Py_INCREF(callable); ihm_reader_unknown_keyword_callback_set(reader, unknown_keyword_python, callable, free_python_callable); } /* Add a generic category handler which collects all specified keywords for the given category and passes them to a Python callable */ void add_category_handler(struct ihm_reader *reader, char *name, PyObject *keywords, PyObject *int_keywords, PyObject *float_keywords, PyObject *bool_keywords, PyObject *callable, struct ihm_error **err) { do_add_handler(reader, name, keywords, int_keywords, float_keywords, bool_keywords, callable, handle_category_data, end_frame_category, NULL, err); } %} %{ /* Called for each _pdbx_poly_seq_scheme line */ static void handle_poly_seq_scheme_data(struct ihm_reader *reader, int linenum, void *data, struct ihm_error **err) { int i, seq_id, pdb_seq_num, auth_seq_num; char *seq_id_endptr, *pdb_seq_num_endptr, *auth_seq_num_endptr; struct category_handler_data *hd = data; struct ihm_keyword **keys; /* If both asym_id (1st keyword) and pdb_strand_id (6th keyword) are present, but different, call the Python handler */ if (hd->keywords[0]->in_file && hd->keywords[5]->in_file && !hd->keywords[0]->omitted && !hd->keywords[5]->omitted && !hd->keywords[0]->unknown && !hd->keywords[5]->unknown && strcmp(hd->keywords[0]->data.str, hd->keywords[5]->data.str) != 0) { handle_category_data(reader, linenum, data, err); return; } for (i = 0, keys = hd->keywords; i < 4; ++i, ++keys) { /* Call Python handler if any of asym_id, seq_id, pdb_seq_num, or auth_seq_num are missing */ if (!(*keys)->in_file || (*keys)->omitted || (*keys)->unknown) { handle_category_data(reader, linenum, data, err); return; } } /* If seq_id (2nd keyword), pdb_seq_num (3rd keyword), and auth_seq_num (4th keyword) are identical integers, and pdb_ins_code (5th keyword) is blank or missing, nothing needs to be done */ seq_id = strtol(hd->keywords[1]->data.str, &seq_id_endptr, 10); pdb_seq_num = strtol(hd->keywords[2]->data.str, &pdb_seq_num_endptr, 10); auth_seq_num = strtol(hd->keywords[3]->data.str, &auth_seq_num_endptr, 10); if (!*seq_id_endptr && !*pdb_seq_num_endptr && !*auth_seq_num_endptr && seq_id == pdb_seq_num && seq_id == auth_seq_num && (!hd->keywords[4]->in_file || hd->keywords[4]->omitted || hd->keywords[4]->unknown)) { return; } else { /* Otherwise, call the normal handler */ handle_category_data(reader, linenum, data, err); } } %} %inline %{ /* Add a handler specifically for the _pdbx_poly_seq_scheme table. This speeds up processing by skipping the callback to Python in the common case where seq_id==pdb_seq_num==auth_seq_num, asym_id==pdb_strand_id, and pdb_ins_code is blank */ void add_poly_seq_scheme_handler(struct ihm_reader *reader, char *name, PyObject *keywords, PyObject *int_keywords, PyObject *float_keywords, PyObject *bool_keywords, PyObject *callable, struct ihm_error **err) { struct category_handler_data *hd; hd = do_add_handler(reader, name, keywords, int_keywords, float_keywords, bool_keywords, callable, handle_poly_seq_scheme_data, NULL, NULL, err); if (hd) { /* Make sure the Python handler and the C handler agree on the order of the keywords */ assert(hd->num_keywords >= 6); assert(strcmp(hd->keywords[1]->name, "seq_id") == 0); assert(strcmp(hd->keywords[2]->name, "pdb_seq_num") == 0); assert(strcmp(hd->keywords[3]->name, "auth_seq_num") == 0); assert(strcmp(hd->keywords[4]->name, "pdb_ins_code") == 0); assert(strcmp(hd->keywords[5]->name, "pdb_strand_id") == 0); } } /* Test function so we can make sure finalize callbacks work */ void _test_finalize_callback(struct ihm_reader *reader, char *name, PyObject *keywords, PyObject *int_keywords, PyObject *float_keywords, PyObject *bool_keywords, PyObject *callable, struct ihm_error **err) { do_add_handler(reader, name, keywords, int_keywords, float_keywords, bool_keywords, callable, handle_category_data, NULL, handle_category_data, err); } %} %include "ihm_format.h" python-ihm-2.7/test/000077500000000000000000000000001503573337200144455ustar00rootroot00000000000000python-ihm-2.7/test/input/000077500000000000000000000000001503573337200156045ustar00rootroot00000000000000python-ihm-2.7/test/input/15133C.pdb000066400000000000000000000001171503573337200170510ustar00rootroot00000000000000ATOM 2 CA TYR A 7 -8.986 11.688 -5.817 1.00 91.82 C python-ihm-2.7/test/input/6ep0.bcif.gz000066400000000000000000003253051503573337200176320ustar00rootroot00000000000000‹‰Î¡e6ep0.bcifì]{tÛÖyø”(Y¶c'v›&A–­KbKæÃ–ã4kG“‰ˆ¢R–Ç- …X”£îÑ,iþÜñI“­;==é’8~Ûqk'n³4Ûɺӳ-©OÿèμõmggÛ9[³6v*?ÄÝ € A|ƒ¦lÈò‚Àý~÷»ßý÷ûðüQŠ%¹ ſ㘇wô¼ÈqŒ€ïx9ºHñͱo¸Ç|cîB$ö2y@xñ¹# ¾xx<wŸ ‘Êr¼ÓÙ5hmjd PM÷OKÝS„W÷¿óq¼4¾ ŠùGwìÆs ŒçÇgwÀ›‚Ûpî ’¦wôÝ€7œ'ÓðŒ9B ÒÞÒÔxYiR[Z–=£µb˜ŽÅƒ{÷ï/†ãnm€»±Úeà-ªeÔ¢Pþ¯LÏB)L{¼nð3¾gÏÎ>ÀÖ6\Û pÚ•™™ IJ¨!ÎúåS ëi a*oüšÚáaD‹pzõ„²±ÈCqLŠà4¾°?çqy½÷Ðg?üÛC°©í!P¼2ïJë‘¢ñÔ" 'EzÀ®*Š"Ÿ{”ãÁ-¦jÜj€v€­6E5SûÛðc¨¿‰@»MÃ3Ûc¬ã‰¥Ø¬¸ v^í1Üõ&èQ <…å€Ägú@%X_â[%ÄgrÄÓ¯òGmè~JѪÒlÃvÈÖ8ô´:±@ûÍ_Øu:_©Ó>èu{öŒº½£îñÒ‘Ç'ùF½nðoç¨Û3êÙm8ËÚ°¬qŸUÀ«rÎVOÎe(‘ ¡$ç¾^WÎé˜Ê­Ñ*£é¬z“n\­cרŽé_a´Ê =–ç¹Ez:º  žÊs ¿> ß¥Cÿacgi‘&Œ§ (É}€œÌPÉÓyC ú?ï‘j›ÔðPE­;ªH6 8Õîwj® Yž+䫼:kÂËuu߆zÑ !ºÇ@+ëPcå­­N:êuÒa±Z-6[•ø¶èXÛJü`%ÄoH|}³´Ï÷Õa¨Ù}´Rû| ¾b`2”äD *n°2ÍS<è:%”A ¹Á/)ž^¤2I0d‘\½B‚š§Y*8 «X0  ÞWȬߴqó]ò ‚ »‚QKµâ:(Ú÷kŠ(%¸³Ô)5bˆ”Z×])ÕV'H)›ÕjsöXJµEÇÚ–R#ݾÄÚ-"[‹Çcx\e®Æ¢ì}’%a¤ºžãÁ½pW¿¿$à}A¦Ò—À´8^ë»ÆI.P9À‡¹|š¹\ùݸQ9b#}–µzpý’0LK 5Ê·ƒ îÅfJàæÅƒOU4Î*# XeذűþŽ‘×=w Élã"Äzµ©êŽ*8GdFú^MÁI‹T®'BsÔ¡¹½»B³­N6š«Ëv…f[t¬m¡9ªq@¶î­Àì&W@þOöùtIú‰´ÈPŠŒRÞ¨m$¼ ¿Tþài®Àƒ9&ææ 8]õ9I’ÁšŸãÉdluÃ"”wÌY*=Oó‚XõCh>Òkpõà4^§5^˜›¢2å–(‚¯º•‚e«>[ËL×écy‘å=ÒR’&„¥¼kã3¥å¤©3êK Odˆ9Šiîîò©MÝ^>µñý}Mïkšx_³Äûš'Þ×<ñ¾¦ˆ_$˜˜@Êš>EEåY #Ñ€}•3+ÛVsNs©»:ÍJW·jqÑm N½vý»kZÝ î›£ ©nòÖ§Ú[Ÿjo]ª½ ¨ö6 ÚÛ€jo}ª½*Õ†¯ec Ö²1dýº[·~jëæM÷Þ?²ù‡~hÛ¶‡å×{F>sß}÷bvßÝ¥¿«t Uß•×À·ÒZ±(ZPC;ßTÇò§íl°iì}=ŠöŠ`­v–b¤´§…ùÛšü\÷CGkÛ3®ßÒþàÖ7ô­å ÁŠ› ˜ß‹ÕXÎÈaÀ6CöMĽnÏîQüïPôâŒÇ’ayÇx×é5Öd£ºûåÑOƒßîýáxpo¨¨×› §  ·'õšU‚4ã§µV‰w¹Àe %)†ËK>Å6ݶ·boÁ-¼0½„%͘úsL÷'׫©>ÀJOªž¯œYlNvr˜ÜRwÂÛU6 O1`šeªØ€¹ IQz½éíkô†mr¹ôÀÌ… àX EŠf1ðŸÊåÅ%L(¹„û˜t€y0„:ü®Ø¾ý}@¶®æbèþÈ“VÈS$=OS™>a¨aîÖi%ö%Ç«·!0nFÆÛ³Ôϱ۱äX`âçÁtߎ=>Ö9®Ú‰F§$‰Ý«L‰ú{äµ=;KgÀä€ü˧§<¸†ÿÌ{¿ÞÑ]»|¾Ñ]ÞñGú`ì»—s\ü”&èO‘»뛘@¤h*I5Tê7KèH=Ò©H§æ8î€áÜ3ªÐ ¡|aŽ¡… nŒÎi1‚Ë4ókuBJÚ¨`âSŸïœ¬‰Tš¡ÙDÖÄ©fö†²¹¥ë༶o²à±èØoç*÷ÅLQÀ"&˜Ù=-‚cD9££yž†I}S/p¡ÝÿÖýˆeŒ±—ËŒßÜ~ÄòÍŠ]}þäÌ”9Cjé¼Ú-‘&FµÌ˪í¡]é¸×½&Ð{}}€Ö žGµ„Â|Y•Ànò t²ª|"S[©+nä ÕÕÕ­ƒàªÞ‘Âl¶`Ú :ögy‹zK(/Ú´(W·µwt™7Œ±„/á~,@x·òT¶ ÅC± 埊/ÉYŸ˜pɱ[(ä#Ø 6G"ÅÃâx ÔŒ#4s\Gô†‰-ÁÿoÎ*]¨Òœ2×ùÚSæ|­t®#õŽÇ=æqûÙ!ìôìß=*ÒÙãöúF=}€á§uXð‚N²JK\yÿ{ÿ BØ’'1“žgˆ¬É–u·€œMW%ƒV—áT›Û-ÃY'ÉËaLT¯ÿÌi âÐß6¢ãæÓµu•¼‚åú4C/Û±ðX‚jT†ÛŽMí- °D-ÅoÇcqx.¹@Á“znw6('â.gØ5;zo¯Ÿû Ë¡}sס ’e>[#è^þè‘=ãã£>÷¸ñ¥ßt¸[e ² Ù¤B$mKÁxb?ˆ*o +Áï´¨VÖø†žW±ûÌ¥SêNæ­ÃmlsêY}¾ ~ºç˜¥ų;ªZ;ßü㲫è"ÚÝ2'ÀEÒòFç5€ñq`èK,Kˆ}€ªÕjÕCõ¬¬R¶]½íf`{)4A“Ø<‘£™¥’éè†bÓI<¦¦cÓñÈt2ñÏ„þhß7…áÓ±@$:Àƒ!xÜvo\vèŽ-CÃv=qrîOötú Egª-ÿ—Ú¬óÿ#ôàÿD%«ž¥°êÊϼ²'~­ø³ÿ)ÿõ/ŠÅÊ~Wâ¶›‹)7ŸÎq Eª: öµîË^‹ÅayA£pH}¡È®kÜvc4î>²‰,ˆ½•eÚWé’³UB=WÛy’ƒ‰}°Ÿç‰,L÷1±ï%ömÆûMÔÛD]µ ìJL± ÒP}-¦øÊ•¦ÕÞ—ºn±êÏí(óMKÁ’’—ÖŒFÕF—’‡åØJè¥Q-Ò-Fø¢ÊQ´Ó¨ÎMyúНÝK:’б”½K« ?ªÔ+[èŸðOÅþ@42éL­‹u¦Õ·9[iŸÀ¹*FÜhFÖ±?¸hÀ£›.V²Æël¡ºti‡¥wÛësCYò£Ê^Éuqïb5ý.Ö춦õž©ÐLOFCûÂÑ}þd,šJ†£)< GcþDxÖM†þ¨&•ð'ã3‘D<Š'¦gñª Ðaˆ{³1áÔª/JA@)ZÀL¹Lì¶«hÑbG—Š^]A–mËCèkËÎk+××ÑËèõAËÕ«ÖÁË#¿Zv\YwýjÑ^´Ü¸n_]^v^·/_YwµÙ¯ +ç'×6~‚^[¾l³Z–n,ß°Yí—7—¯ZW,V«õêÊÐGÃÛ¯Y‘•¢ÜßfûùŠýº2T¼<ˆ~luXË×?Fm+—í¶å+늟Xn¸®]µ ~dGQç² ,ÕW¬NËÇ®O¶Ëç÷‘ y®ë ®ÎïÊR4ý%€–|@«äæ•-r%Á ƒ}Yª"ÛÝUïÎIY×7éðBòv5‚ÓWàIʘTG"—gú¡Â‰CG{?­Úž†øÍÎí¥9†ËÒ$Á`à&på~@c“rTuŽÊJˆ¬^O[|r¢âgÚºÚÅjŽÓŠ’X¨€pÑ4¤RÑÚ¶²ƒ\E Ì侚ÄoVe@mΠo—;íöxv÷bz™ngU¤ªD«¢à&¿H |%‘'hóaõAÍ,&=¾[ …“æó'uÐ*K'3U]¥·µ«Y››+n¬¾¯ÅJKžä²'Q(ß ±ÐÞ&9’,Ø(<¸qh½ û0W±¢ÄbA¥ã!Mö†EWÚHŸ2h4FŽÆ«Æ×KnüäSTz#*Â6Ý&Åß¹V«”«)'lÊ?7­ô²Q"ïŸÈ‹á±N6?…¸ÔœRÑ.ÍàÌÀƒ:Éþ@`Îø§ÀÉÊ|2¡ð£3xì×OàÉi Oáç¥?)Ø )xÞÙX*ªôˆÛ(øD.xR7$2“HÅg@Ÿý1 ‚%"€¯†³âÖ¬¸qnp­\ç´»œ×:§cÐ5àмJÆà`K¹îWRk‹‡ÖX%µ-U³ükz¨ØTTêmÄ—F¶¨ÿ¨Rt Ì}% rîÿWÀ‡Evc1lÚ𸱈g'Û…MïÆâqðnÛ š0O`'y‹y±i<”¾±3@`Þƒz»À¹n¥ï‘›½Xd—|éq¹Ý']+N“>ñ•wI_†7‚—òH X2°G}£^Ö«öXêÉùœñxå‹”¿å«¸šáZ›‰Êºq­rÙìÃ.׈mhdÝËÃÂe@ ƒßÁÔºEÐ'í*©õXÿI¸ÞÅ?²"›¿x£xz±xÉŠ ßzyEª{KžEøþ¹ÿX)þð1yêßoïÈ+åŸ~Aþʼn Oƒ¿…ÇŠÅ¿ÿÇbñïÆ„¢ˆíH±ø{©f®jblQb˜råžR…4Ü&Õe‘¬o*s–Úð4¢¯³È¾æ7¾ÈTZZZu.j‹›tPy¥MÈê–^±¢ÅoiªJ@NXÕ·˯´Öñ÷ë UØ!ñW¦Ÿý|íÇ2,€æÚÝ¿ ë°ü úÊ-VˆeåÖ"GS™„ÍÌéÔyXCbÖWYûTœ‚xjŒ¦|›dÔvJ(dlD ág¿"v'8Â[¶ôĦ¢&aÐBPÜ?¿ð½Cyÿ·ªåpŽþšZZZÀ’V°vt}!¸Ó˜…ÀR±(?h‹’Óõ }ÉéøåàUYrŽ\q~2R¼R)97üŸÕnGWÊ’sÝÿj$§ã£²ät~ä°8©#9­?¯!9‹1,?»5ȹIóõ¤$çÌ kNXs®‰ {ZR¯Ú¬m:ôœ§ ¬4+´Sï¾×þƒþôI[ÅŸÑÅ91½WÐïUxÓ°áé§Ê›êø”Ý‘€nM)q¥†x•?òù#;\e?8Ô(®†#s* M„#øã“Ñ©Xwƒu¹tP剫.pèBpEí’{³=u¢äJLÚ‚Zí6¼¢6ÔþB×ü„½¢b !‡a»¼¥ÀüBoÆÄƒÏ}nâ€uáwžúôó_rüÁÎø~{ß<;EsKaÑò+ƒ_Üúk¿úòÆZäýñ]wú3QoÀ½.±§'Ç·QÓ<û©Øí9øðW~/…âë?Ÿ(äŸ&l³š'¹vê2çSó¿c«ÞĹœ 6ÆÚCºñŽˆþ²Å'jEG?×;{¬=`×”9fAµlé¬+•ÆØ!ð[.ÝHQ=Ó7/$xª\´“"‰HZ~B¬óÃvŒ¢­•휶òsJÏ !°vQèíí£–¶ñÔ<ÍR°ì*ì &À:ŸlV:æ©LAÊ.Î/݃ÎW×ðˆ³¼ÁQóˆ¥Ó0{€f(¾Ü“ï;áû³*ô‹/ñhjýzR‡cE‚ÓPm9OÔľìÏk±OS9‚nµÈ€9ŒüÀ”ö½Düdûy&ð*Cå)6¾Öz½8ùN?¶@ðhk™¨÷u†`³"k¢Þ[ÔÛ«dc¢Þ‘“ûöÎ>¶#;à¤%;Žã؉Ͻ&‡ÖešÜûdå’\ ´WZ$%V$E“’båz¬–Cqãå.½»”¬è×Zàþ¸Ïлù¶ÇŽsçkûO¯×8)úW(P´@¢(z‡M;³üØYr)rEÙOp‰”–3¿yóæ½7oÞ¨%Ðé¡ß$·aß<4WœMˆƒ¡æ 3GEzl5}d¢1Û¶´7m‹ÆœPAšÃ”泪Á—Ð"ôe "ñµu‰y¨ªûôøƒc^‚}¿mZ=Ñ`µzLÅÞÜ<ŽTüàŽq±ú]ÏrÓØµZyÊ,ÉØÜû%gw^Y’1äZÅ}Âþk¼ãÎ;œ°;ßùýuõ/ê[!©í‚†K-mGX‡Êm¬’[g-H‹ØØtã{±Ùt^ÎðÚówI¨V7Ýø^ji; 0g€Éoë.›uÓ9ùR2?6„FÝ„lñXƒJŠ|±¸äT*©+’¡£WkîȦºÖŒ ¹Üý¿÷ÂÒ禃bì0¸‹›wpap; ®ûÞ‹^÷ÝÈÈŸÿcØÃ+Âðr†÷ô2uC¡4Óa³Ø€ìpøœzå‡Ühü’…GŸpY–ojX”jš* 2âÇ?·YÎÊJTl±b¹åÀвºÍž‚ õ8¼ÞbNCÀÕµxA”É×µ^§@q@¹PÅ—²@ÝdL=ÐSÝ JX’àÁ²®&ª+Ò±:±EK°íÐNÉ}5Ѥ¯V«ØÐVœ‚o<ìVçÍ[É/äS@|“ì iše¬Óú#‹Øs*Ï7Žó[.à7ôš Ó^Së5ót+šà.[&ùë^Tû³}'u&ûìÁØ8ù7ÈåLÓ;¦¦/×é\ Êæ·Þ?ðñš!» ²½),êªV3¨ârY%² jX2¦â­&+M³®)G³¨*ZÝÓ’àêH¦ÉÏ E `ùÁ¢>Öu¸ºòÐöÂ%Q[Õ AîÉdBR[€´âOVÔþKØ•#û ñ…X"Jâ³éw•Õ€õ$GÜ.Z@ueŽ^´ÌQdÏÑæù†_°Eм ÖƒyÐ)ÚìJ˜|¨±ŠJR¹¬AÅ :Œ²‚aTðŠ{>rÒJ¾¹†Ñí¡¢p:0 Ýú ò5‡Q k"õ\uÕEêqœ÷þ]X㼎ê3Ç ŸJ¡±±1@Ã@sÂkÕ2jÏÕ4\´^ªc<€ßÐÉÇ@ˆÇŸÎ\ˉÌ,>MÙaѹ`y?H®“~A…è’¨Jþ¤,y£;É*¥jBªiRUÐVƒÑù¦£%@ÔÆ×—¦„ ‰§Ú(-©#$ 1³mÚ ¸ZL L7<˜@7ñ²ˆ¼¨@AuK ´“Ô›f:–½ƒˆ}¹„Q]ƒ,>«·¬h!D‹é¶˜¸ªª.H"™‹ÀˆÁèj;#Xú:n9_kßr¦é£+Í}ç'íôx¡fFÄëà ²¤î¬`‘«U0D¥Ø|:~};3n´éF?˜çg fºÑ\1=“{>6ÏM¦s“±Da&?à~‘“ztÖN=Òp†µOZ›.¬M˜š†u½ºœvè€mÅ!´¦cLà0àœ³á€Væ:ÉŠlúZIÏFÛ×½óf|L% aQ_ ” ˜gí£Ñ…Aͬ{Ê‹cÕllV“ôXm*ö¹cÏÇŽÑ—r„ü|ìEò]<›>6v`<›dQªWc™À+aúòÁll*™Oc:ùHòž.È}ÔË^|>6þ¹Ç>µü™åOÇòÉÉØgdžáŠÂ1Ý/˜ kSH”Àƒ=è²èŽà¬ÙïNÔæ»v`G¨.šW±²,iªB/’¬Yí€ ¼ò–\b{½õI°û%O´‘‚•x°q3çÌÒ뵚ª`æ8 hKžw jmpGjBP~ÙnJ°€pH]ó‚—.AA~ ;U±ÍpiêHRö²íÀR ÓS¨Ž'û-09:Ç2h}§s*Ÿ>’Ì ¢­œ(Ñ[lÖit™oöÞÕDrb¶.ÆòéL|v®{);(÷pP^ˆÓƒÈÚ§Ê4ÇN籦کDÆyγU…P#ßêÈP ÏÕ±€É,äoe¼ª²ìT3#¼øžž›y·ÿðìøØÁ—÷½¼ÿàËCÀl‡GE»í¥+޽¤ %É,SàIØÙçÊ’l` álS19_”ŠZU—°‚!gƒ¨ª*ªXÑT2]`Æ?ˆjª,h’® EÕª€ˆèb‘Y¹± øš¦.‚žf'ý  ~Èœ;NÞ ×C,zʨ S9¯Ëؾ‘eæÔûøVc=”D¤jHêUQÿñob2QNLÆr~ZàÊ’¡df¼ÝE q}iÛí! ¡¡ŠPhWŠéÜd&{%>ŸÌ$s“³SC€ŽWœØJ3A^}j%ùH. qsÉ ¦þþ©ãûÉËC€Ë»Õà¾|ß|iQëÄŠ{íàÝá@fݱ®ˆúÊùE¹®Ö5;¿cïŠuMƒl-ÈÖÚ¨ਗ਼º«Û«¤õ, ƒÌiK—õ͹âBnbª03[Šk!ãX^§ A[ ³Ã Å\M ©Ø¡d<›Iç’±tbüW†€/amY%€=v¨×ŽâýäyÖeðÀˆë·tž6¢!ÐÐz«¬—¾ªÐž¡i UYRú/©'‡DrÔØ  ]@ÃIªÛ‡ƒ÷¬¢†Ë²¢7^°IB’®¢Wˆ¯B(aÝ É nR=^[òÔ«ƒ÷YökåÞ¤/“Á-ÕíDð;¹ ò†‹ÜeÄEu¥d%¥ÖMPiɽäö¦Þù·°4Âå¶ÈêJTZ쉰:$¿½aYªJªÀe8Á/~€Ž‚uäH ê$ˆ è¬u4dHpòã£.z¬ ߢ-¯¶ûHWɰ¶ŒKHÔ$º¹ùLÞ·¼œP Ô[0Z ëº¦•Ù F d‹Ië6ƒiDU@)àˆWx1÷´ì ½kª®K‹2l‘¸ Ö], ²†1Dk}¨Ÿ/T±¶„Éš öq'B yB4 œÒ$\Â2Õe¬A^'Ïö³ ª[ý ëU.Ñ+{h*p ý΃Р6¥$(¢ûØqo‰ÏſtµåbÓ°•jš¡T¹¸;ò¯¾]ø£ ꬜~‚ZÐW«hYë°,0P’‚ 2–i˜rÙòÀI`&¸h1Qõ¨/,õqÇVà:kØÑ8 ‘¥¾¥vô?á¤ßi"߆4‘;f…YIÁÍ‘×íüA¡f}áàÉ¿®ÄÉ)ýÈ»O~ÿï?úgmwµñmyHV f¨…Ã^ÙÎÚx°à>tÝ›˜ÜÝ"†–af7êö0Gö¶¨j¶ ˆ*.—QJE†ŠRbßl…3aõù1k½ô­GV ±_à‡Ȩ„É'« »9”®»)!¬Cˆ¬ y‚£y’§®äI«‚ï,O„ȇÔí +íØ# n ¢ÿWŽì/Äb‰t*UˆO̦‡âþ@ÞÍÍ·d ‹:"ͯ‹F]ƒ†~“æM­”Lš¬XÎ ›(d¯›^•4&" ÖÛX ]`°M{°ó|£ñ}»÷é©? -Çš47$ElD’@B9k«‹Ìf«[í¬ Š©¿SØN ªÿøÎÁ%U-)DšZFeÉ€û–Á tVÙ‚ ¸îe‹°ÙâðºAxUVKñdhX0ªXª‹UU ]:Žè»Àˆ#Nöá#QUHßâV€¦âÙVN A#/²èHÊ®¿–U·¤‚¢X[hrßô<±ÀÊáð¨<Úù¡+ªÑàŽc µÃâãœé䊙r#ÿVÔë–·Ì)×cø.ú“Í–›mÍ‘Ù6H¤Õw?f„¶Ñú`RÚ)¹/º=Ì çꚦj ° ½ãá‚ýol®™Â°vz¾Qéý°&Êu³)ýX‰¯°`´Íª7&©4`ê¼Ó, Î]/´@¶Ø´®™´, —PĪ[P Q|÷R¯HeÃ*E€õvðÙ±r£òZVŽZo&cÙäºøDÃV°´T1È_€ÇÑA¯7IébWaÏŠy>„‚‚@U J`#øL:ÿˆ€b‚í¨ ª(Ök¦ÇÖfgDpZ”í»èª¼Lw”ªj Ë`7ùÙãí¨ÌÜ ´H^ZÝÒ: ´ø)dEjÅrÃ,î N@Õub°nà %$¥×ÁœâºÉ"…d˜‚]…),tú:=¨@°8‰Af­ŠŒ–gWì"¯æš(ˆ4=;šm ìzÉ-7-­{Ϙ¸j%ç(»ZF5u¥d¿0ˆA¦€ Ï—26(àëñÔÌÃ: „Œ" O‘ÿ—9*€Œ^w#[f~úÿYZVÐèõÀ%yn r¬\J?¦•`fv6ö «f¥¶EA)¡©d¸+7±¦}áÄ+jA‡Ú>Zìm¸ÌèŽYÿbB«KâQ½"(GQ"ïÎÕè¹²ÃOÃÊDø!§_‡è&h;ôßauè¶·C}îËÝèÂíÌl¦HÏiK¥:ñÌã‰<*ËÂLSÆ4íëµÍ-2th3h_h!”Í"–]Ò¦ˆ³XÜN„¥®x u–åî`JÍd­å´€È£U'Vô,(°ê`³ ØFe]tì Å uÃC Žw t»Õšºb}²m :÷!V$ÝXU¹^UôoÁ}‰žì›7ÄUQfad[yºvh_0Ôª~5°÷Ü·ïc·=ÞþØ;ÞÇ*uÒ\ID‚°ÉîŽßò>[––%ØS_r=õzÛí î@Oü’ë‰WÛžh¨†àYQ|ŸwÇÅô즽5íÌ&¼2Í­—.;zÇ)ïÖWÝd+–3…º’ÁÊ’QitÐiôž>j—M¢ò¶Dö´I‘¨IÖ‚§lÜ!Z™–Mj_áQmPñSOÛÉ£#{ÈÎÇËDì±GÇu=Åû=<;ýfùÞhTŽF¹ÿÉè}ëëù)ˆáBz¾Åúj|ã܎³aAX‡NXO[+³kdX¾ûøïvÛå--]þ ÿVm±Zõ\$rcôÝŸÜÿßHäµÈW¢_zì~ò³ûÔÚ¹þ0b~ýú…2BÒIS,í¯iø5,NÙY¼,šo—¯ùâÌ v´÷£Û ´¨*%T2 ,Éô÷%•¤ .iDÚ¯HRh®+}Y4 TÓq½dý‚È—ˆj² C ћԜï%ò»Ê"y„ZÅK1ª5z×(ù‘˜kþ(U‰Sk}Åþ,ïz½ŠœzWýˆº!É4±DüOòn¾b7Ïù±ùi¦¶§¿O/é2Èê6pñýHñýHdûc{ö<¾{çèÖÝ»÷<ýäžÝOîuÜ©hôÉV³ê´UÔ’­ ýŽz÷ÒnöÌpÔ×Þ½{#‘½ã?'³ƒþùT_´ßMÛ4×­Ë~P¹nݿڛ6\ÿ(vX¢-sêÎT¼É¥'ŠéÜ\q&ˆgŠÉlÚyuàr¶µƒœmlßÙÙàFÈÂ5B±(]Ȣ뾆¶Çëƒé,ËŽö‘§ Bä ”@ÀÀiø…¶•×ñ^=ßÏwDüUbÙöëë¾Qïw8Ñ@ßëô|ŸøOº¢½ß¤æ ûø†%Ü7yÚ߄Ջó>¥©{NÄú üÖƒ2`+ƒKÊE3ŸñŒí›}ç·?ý×Oüåß~¾Mìá2³^Øw­1ö?ýrÑxî›ïµ³‡ëŒºÞm†»ŒºEu®'€¤ˆè2\L”¤ q(YùfŽ@Ë9ËE‰(ñºŽƒe6ìtù2—­SÞ½|jV=.)CÐí­œn[Ö³# èÿÌ‘ô0L oÿ½G<ì©!è«UgZ|ÝzïDà ±Îd¨]ƒI†º?4‘H¦&§Ò¿9ɺÇemÞÝÇ67FŽ&íjæ=Þ*»o›²»(Ó#¶ùÄ!$VI‘J½(øÎ9…=HçœÂaZ7¶DvyU±/¤²ä öÐÀ@ÏÅ•¦èy” «‰ÑuåpíbçpYÍÚe¦DÑL¨£ÑöܵÞlÈ í!2MÁ’’˦֔\2VÍ„³]Ö sV* —…ê¹ U ƒwk¥EdZCý6þt.—‚±âÞ£ý&­ê¿ÏϽT[8ˆ’¹ÙTfìä°êÁ ‚(b]D…ÓÓŠ!€À+É é*C€'Í𤹫X0Mc—Õñ1¤ÒÔYLËÃf Þûq6“ÌMg&ÓéÜáL2“+Ì'“…ùâtr:>}8³(N&Ò òbzr65=™).¤§r ™T2‘^H¦â“Óó…âá\º8=™KRñü|vÇB&™Žg’SéÄt6¸Ë$’éùøôÂ|2›Kǧò©Br27YˆÏÒ¯d™tfz:™™>t>K¬ÏSžÔqx¦ÌíÆpƒ6ÞùÖ.•ñ6J6@‡-VêP7*hx :f#‡È°hxÁN¢Ë ¦µOøÛ]yÂ>ñ÷}}ÜœsüùÝ[v32ziøl=Îg¿;± =Ð}mq†ªÊªozY€}ì-¥Ž·²ÉÙ©t1^ÌÅ “éLr>ž‰ó“™¹þî3?: }æ‘íÿ}ssŒ¸°Ñè‡÷G£´ÿèͤ"¿ÎAH3whã‘ÝÑ6e²F Rw‡à{ x²%מmû¢[FF·n{$öìþÏ|ùwßuýèÝ#1ܺ¬ð5ÜP™ûo“²°u›t_£rÑ冣ÞÛVæ†Ñpíº¡Û­Tø¦šC芷!>CÈö´Ô¯s9 ‚»a |¬£1p5³PœÌäˆ @,A˜Ûcl±‡çc‘G¢£FGȲ?ú³hd[ó§ >Ó¾FAFëËõcë{z…¶òÙMxúÿÙ»ð6ª+}ïŒ$Û²ã8/',/Q‹2#;–ZŒdaÅØ–k›–”ÇVQE2²ˆÃÃ-íB»ÐË34!!Í;< ´`–Ç‚)aÙG¡éWXØoØ~…ÒýØ ¶fïI3ÒÈòH3ÊY£‘¬ñõsÎýϹ÷žÇiç.ºùïþñÕ”ºß¤a¾E9F†Dœò}ãóƒáàP°g}và8„xÁp pí@T„?ÇPOo$¼6,@nµì’;¯R2`+Ye£JÅ×t&1#&̼Ú+VsŠ¢ŽdÊךa-òdÌ~uOO™ÇŸ!¥<èŽïŠÞăõkB›²{ktÔ2(ØãèB,8šÎ FÖ]ÓÅ¡%8wY_ânТ 38)ìC† ‹†ÙÑŽ¦¸Ü¦@†Ã‘P°?²> öFƒ]}Aá@†’L¾*22ôâö5 7*£¯Ê³ª£—'ˆñ­Ò]Œ„rY«3`ÍÒÊX—Mš³»lu@&f¿äÜœc÷=ö¼V‡·3þek·”‰¤Ö0äÅ{BR‰«šÄyvÅTû¨šÄY|Rg}²¨© ®ÝÓa›„Öb ÓÈôMÀ-U4Q‡þ@8%oOâ»<-„BFᤵšWc{qP^×õ’ûWë®õÔ6Özk/®mªõÕ®ªm®½¤¶¥¶µ–„hŽS³ÆŽ=[½‚©ØìC´ ¤¨‡|-@Ýå $#HM®„qž—Á ‘¨þ<ÞŽ„03“Ùd˜AMŽé@ä)…  ’•Äú¨ÒGºs1Q²óâ0‰Ø{Ý{.¥'€;³3ˆäC"qeºëëë ¸íL!ɇ‘ªAs¸è¦î“ô'µµ›j‡ &Â^±²ýåÌ詾2Y|•;±çu'Z’Á&»ÎêÓõü˜[×EÖõݸ3zIKÁIÎê&è'£ë: zÁÇ([¼‚^¬Øâ,^Aw/¢;uô‚Ѹ Ï͘Rï’%§s¥oñoÅÌU¸MUäEàó¶\ÖM‚£F‹ Z.4ÏT“³»§Ý%šO¬xrЧ:ñT/ž–‰§åâi…xZ)ýùRé,µÃH 1RKŒÔ#µÅH1RkŒÔ#µÇJí±ñ~±ú:5ÌÏ…š¶,²b°¤¬tFrãr^ú¦0Ž]¯-¸ÿ‰Qäæ8†ãXŽsr\ÇÕsÜ2Ž[Îq+8n%çf87˹œ»Žs×sîeœ{9ç^Á¹Wr†ó°œÇÉyêàíTüV«3qÍ„zÖBÝ8Ñ( NÄÕYý†žä:ÛšZVwù:Ú;ü]«;º:Ú¹ÎT¼%ÓuÒÒfdµà,<-_ÚÀ ü~Bx›7ÒÒ:°ø‡+ ]±ï›$,öqI£kN..êb+Y?Õ³1—^Ö¿ùλ}ê7ø½øVY£HÈT†z&Äæå=lÍCÐ vÜÚÑâ=¬Î<¹÷puÂGíŽ%+6ȧ‰Å™d ˜zæ]ɱ7/²a8­˜$ŸÌ1©ë˜LЕ扔u¶ÞPÏ`ª·‘þ‰€3Œ»¸„ZÆ-ãÄ ž â5jÚZùÚZO¦THhr 5ßH‰ú´ìË,ô_JcÖc³ÿ\ (ô–·¦lUKs8ùö°”â «}3c YhIÛÀIsýy¬7 Dƒ=¦+¢|1ž~6XˆbZ“< òL¶ùNn¾ž}’ö Jcqô£õHA…zñÄŠ'§xªOõâi™xZ.žVˆ§•ÒŸ/•ÎR;ŒÔ#µÄHM1R[ŒÔ#µÆHÍ1R{¬Ôï—Ô+µÇJí±R{¬Ô+µÇJí±R{ÎÔü4yrãLãöç¥H&·ûÆm÷•4@$?À ž4à)Áz6¢ÿÑÝ3YoLõ3/I¾yIöÈR3›ì!Ž= …£ŒÁ; 9±inV6ý‚‹:¹›/ÑWK”´B‚&ëæ‚) MY,VÚF—P¥°L¨ªàJ^KYzs¥*+Wö㕳ÊÐ=[“Uù²êW2V õ„ûz¢}d)[“]2ví—8%I’Á!{v#U;­ï®+@ìiö#«wis„=:œG&Õîdõ˜ 4ïî&š´¶¹užØš”%ì3a«4Fã7§:3PÌÄíz354Èif`i2ìIËëOz8?¶ü…)sP &¤®Z’:› qÐÓe.iE²¦ÁO¶Y"g•iðÍ*i}ÇP¿£i’µ~´©¬œåïyg.WN}¹òŒè“Ù%ŠÎ;Œ0ÅÅÎ:À™à~úpN#Áš, F^7ÖÁ”d7"b’7³çG&k,V€rˆ?á£<…‰“_õ Ã!á§<.@Ƹ‘¹ Ãq<ϼš†gÜt£¿~ddâÆæå±‘‘‘ã7Þ¸ydäf€OÇ”ž«,+ÜKÑ.ºÔ‘µè2I –…ò|:aA:Iœk™&¤ªµïœlEÀdÕÉgUú°"J š¼š”WDÙ„&¯&…@smªXe®MÍ*²Š™¬‘±æ€Xu988Ô­Vã cVº#ôÇñÿ«–—îHÙS¥jõ[Ž.`ÏÂmà9þ.þ>`åO§áÄO`“_ß…ï~ÉÂ{á°”o†#±ÍÀÇ{ù;ù›ø¨…±ôâ¶Â}ãE¨) Ä ènGÑHÈv’˜“ZKö.XØifØŸögDØß/„!Öî’€ 3²<‹ì5è¸å—Œ!ûfÆx€¼VuôËjÈê—uPòÀÆŽl»ßk¼#FI‰+‚ûµÕ @ÂzŠ~òþ,|”~EOÐèÌC}÷²¨…Ý´béûY{gç$hE¦Œ4+cu1”+㢠:Žvr­nO[³¾Î%F97Úƒ )^JhÜ€˜0šx4 xaP scñHIã ÐhdÑ|ºó£.áPeÛѵ@ÚÊlÕ9µ–ͨ4>ÖTpä¥âÆJp„RpG(¾HìŽáÆÌà¨_fuH:´Ááp9\=]âcx ü0&=úÁ/Ãø^ph2F$IO’Dp€ñ4HPö˸c„x¹èp©âœdÐþŸ©@òU …p!ˉKÙ*Èr[Yʤ½p„€÷Å­ÂçY†¸•™°2`?«ƒšþ¨ß% >ÿ>¸Ï/AO~›ðÂo—Nüޏàà§pÚ"ht=¾h«øÊ? "’†î¦˜0/L+f‡H+Qg“jtXqÙ(?/€kovºRªt=\€@StY;Ð%6Ro6ewYÞ)HêiôB°C5“¶YìÖ™…œî8d_£‰CÚq¦OAYs¡€4.©,ߢçÆ± pœ©‚R†ÆµLCðŸjÔJŒ)Êgí¼ÓÆ_0“_jã;z£" ædœh(4:†hOÆʉ´êÞQrÒœ%§¿—!Á/»Ðñ¼,8ÄÆ¡~Bâ+-ÐJÁðW‘A;›¤ËÜÝèÐ’œØ´0ûÜku·yŠló¡8‚h ޵@XF[ð á×%MtEF8#ʬËg{ÅÝ®g ^9È´Y0¼QÆhœD€‚%4aÎ d…43ì\JÛ„.„À.XÐ1{ Ç(P2 A¥¶ýya^ÚŠOK†šü.™IþPÐÇœv ô .‘„óÙ‘5í®Bˆ1Ø!‚máX ‚äMÁ§ V’Ñ¿P];Ê^è‚ük«ùg,ø…æ—@~Åo§ùþUëÍßKóGàÅo¡ù×-Ã_Bñ[iþ ÿ¢UEºÌY=ÑH»_äY³úâÍ bÇ=X" jŠ7´Ç;«mHÿÍTïE5ÚP]4¤¥Äë¸ï§óÀ5ÓgòR‚›NãߎÝG=ÍÏû@Œ¯„gÔéãß°¼7~ üGà/ü)Ôý?æoæï¿ç×RW§'àoãÿƒŠLôR/LÄÞ篅§L|NµÇÂÔZ`Ÿ8“¿¾CýiüVð§Øx–ÿ‚—l·¨$D™ˆ! F"V§AdÓ¾x˜Îàº@`(¡³5×(8ýi–}“d/Ç1Çrœ“ãꈲ#ð,Ê…p­îãÁ2 ƃ¦b =$ÝÞ°~M Š¥¼'Ü—¡Øih+Ý e)HuSS*Ô¤"i±Kb\}ï’¢bůL,Ô d?.Ä×Í0*¡E½s, SBÆN‘¾&bP<:êÞ× .è‰ö„BôŸ¡¤ÿ Y`X}Épñ*Ò€8Cm©¢âR1ð *âCÒ-}µNæW^h0Þ+È ?ŠÅµ5~ÂBM3Ü„œóÞŽvŸ÷Û\K‹—¬eÍR¬‚'RÔ°ŒÆF­Ò—Nç8‹Ò„8˜$ì”S·9À, g=ˆyŸuàŽºÄ›“™aQªÅ°(¥¤‡4á¾âù–Õ\G¹._Gà\’Æ&ÎéŠsIÂNçÚN Î᎚8wqNP‰D…å̹$‰M”Óå$èJ¬1'q±®â2"’$6G—®£+AWMˆÄZU©”!_‘ÞP\“Z·&îK.XÍ#mÁÊÜ90w°,vN—ô•…_Õ~¤C¦¬d«–¬>ìù ‘FÚ—å îÊUàT¡=¹¹"‹X馬ù?¡¤k4o‘Œ÷‘åTûhè¼ÝÜøÌ¼ñ™à€¡áÓOc(éj* FŠÈšúB}ñ¸‚¬Dª ÉT0:1šQhå'­R[DpÕîóbi)ûV—|f&^¥©I³h©uø[FÖ•Û®Tí£ißž\ûÖà,^ÓOc(éj* ýí[=Rš™ú";ÕZ.¾0ê"Õavobÿ!8”ð”½7×°­¥qícW†åcâºSkГ’%Î@XRtºz¤¹\§ïâZÛ=œÇøE«ìx@ST<Î\,×Kñ0HÒwÊè\lý'”ý<œì§aӃЋ¨i‹]sœ·:·äqÞöDfq´†7¬ïŽƒ}`÷>ÓHaú‚‡ Ë)räK¦Õò{ز]>9°mæX‰:$ê;98‡s)“ñ '\Àf»À™í‚ºlÔg»`Yæ Z„ –OzŸîl÷éžü>[„ œ†Öò,ƒµ|í,áNi¦\Â,·Kð-7Ë»ûá¸iþAO›uâäÝ­RŒNá^ ° rêûÜé5m˜ fBe¥m›=KªJmeŠM{å¯Ùã('ÌžL|fÏœi³Y­¶ ‹Åj­ªÂïåÝÝGÖBsVÛñè9ØEµ³Í·ª³©åÒ¦–Õ\GS×êŽVo—‡³—qíi¦ežRU©cÆ¥„T!KŽ.Å9t)I7 ¦Ý^Æ‚%¸ÎØ zŽÇlè|Üb‹Q1ê˜=ö%2Tm_$/·Z&Е%zýÆJb*,&¥¦kvïæ[=mîKZtÎ쪧¡ÈìZçŸì‘]E#þQã ùq@¡ãŸÆ­}ÆoåW#ÎAšŸªÈÑ þíÕ»8¨:$¼hnÜ8¼ùºëpßC¿¯ Æðs>. g»úq¸Àæëox$/E¸R¼~3úFS¡ÐÚI‰F…R¡RÉÐx}P€¼œ(H\OWZ¸ QNh]d¢fÍ"uñ’Oˆš¢½D­\˜ÚGí³×ÐE¿œF`ñ„ÅE`i!* ,è ÑÙ O•ÓyεD'UëäJ‚¿€ÿ9¿;Q;”uŒ¿“|Œ¿w¾X8”© þ5ÿ ®?ú Öü6™Ú@¿”ßÊ?Ê?ÛÀoà_D }¸@ÍŽ1hKØsí`•`nh©“?ù*ÛÇ ËH›oŠ7#íB ‚Ih ~Xl™ôk1VpcO(ˆ. t÷†"ƒx¯7<Ôӛȶú°á9˜¦,[<û°OÚz4 á奓Ý5…«Æ~¹³VZ`á}_¦PÂÊïeÛ¾KªÐ”*³÷ËÍæ¢ è#Æ›Ì|ßTÄSše1w¢Ù%ši’ Ÿ´:ɦiÍiÂ19Þ¼ù5Cåî¹EÁP¢´­¦•¹dêf¹yo½@DWhÖÔv‰N–´š™")Õ¢ f±(Õb æN¼BÛÙ^$J5CºŠO³;òZðV§™ÄoŒü:³Ò-†ð_¡tM¢„è;ú‚ƒ©éÜ3Öê‹ß‹Zm¡7^7r^žâô$uÿ©”EÔÉÁ`$áúô÷æò†Æå "v)5 ÁíM-$$Z '!&)QRÙ‰IÖ^`Ê ™d×O­— ™ühM­¥Mké°ýf’\É·¬ êb'üóËoOõN©©ß©5Yùœ"hhûÀ >Ý>2g®¡ó·Õ œƒ¢ ¬EÖ=Lfz<-^‚SvAQŒ†C—-îàV;W]|qçéZå'AƒÏRÅvÒ,Çüv^MÌÓ†y;׆ÖEŠaHíÇ4/QH‰ÿ¼ûÚTbª¶'E»Ty÷ÙçÍ›¾ÐÓÕQ³ º_Pk1ÍÅcÒ7ÏÝßôÁ/¼¨üâs6Pî‹ÔZÖÔâ9¶Üå9ÿÜ‹à{TÙO,W´¸«ëræ ôÔÔ`ÏKÿz~à”Gò/ï½_¾el4¥Aö ôÔÔ`çÝgýlÃÕ›]ºvüûó•o¹Rt^žš|õÙ§o³}¶s´Ò`kà–—R{ÈhîaÅ–Ý?üÖªGŸÜ¹éÕó¾~gjÍ=üŸƒíß¿ïš[GûûÑUç»Y…†<ôÁßu¸âëÅùçvZ·ÿPÙÖ¸¼pÑ ·>óý㮡…÷ð·ÇÀE) jçòŠþÆ`ä³ÿr½S÷çÞ¦OS{¨Ë§ÍxñíŽò‹–ܾí8óáq•[ÖØÃ}Á÷®þéÏÞåðåÑô5ö°ÁþÎöo~îãÿ-µAí\Þ=ç­Æß}öòèm‹>™ñüÒÝÊ;sà2lýÑ ÿ®[z¯øÃÅôÓ* j¤áŸŽ½äÞ÷ÛÑ+7úϨ¾ê•5Þò»>ö-áÿaô`tÝŸ\7šÚ «ù–ï>\qõÞ¿½ì}¥›UÔxËWa©ùÏîz,×Ñg\é j¼e|£®úçGï¸á˯¼ùBjƒNÍ·\vêCß;ï7®c÷¼»åæ?«Ò Æ[ÞÓð˾5zÏí3‡—íy[¥A·üGKݬe>7*"÷ÆÑIWS““îþhdÃ@|Êñ 9á˜vŽ}2a ¬k:Õ›1'EÚ&EYÊјT7„êzÕÊ4é®îÙJišd7†ìúT;2É®Û'/†dRÝlש>¨I÷@ÆÄöB“}÷` è FÂ&½ / zwÏîÑþÍv\¿ÙáÆ'ÜRJJRf¡©«'GâòɆpdÍ`w$Ú=ˆFº#½½‰ xñ…”û ÊÈ•Sö„¬¹fhu]Ó»‡YjàU*sREB›Ö¢ÝkC=ý¹Éyº/w}_MÊ$¼®ôÜäwÃ@O¸w“EI`¸¡I7ôQÄ:ÚY@)´éìª7MÕ«¦*BZ½]¾U$Åâ:¾6@AEºIÜ ˆþS­F aÉÿAå èàîXx„%ÊŒ«Ôœ 2k€ÊDê =rà™ˆ˜‚ˆzäŽ4!1$N–õtLLL*¥´îÞuõ¢ àPָɷ´yk”ç¹%Ye訇káp²·Ž&®³ §|ã<-M-mbN8$]>¿oU‹·Å‹ƒø‘¼µû¼íþNoG—¯£«£½kuGzÉÆ<“Ä-Ðq´$dñ•¶R]%²y ˜Ï¡N¥gYª¬ÐKæ–Î)³Ûç•ÓÕ3ª+O™9·jþ¬Y³+çÌš{ê¼Õsôy¥íÒ1Ç’ uIV¤ ÿvâ¨ßãö_Öåó±>Îçf|nÖçvú.ëj÷3í~¶Ýïl÷××xг®Æ_WãqÖø5¶ÆÏÖx˜š¶•žmË=õžemËÚÛæôÔùüí,:œ¾ú¬¯®Æç¬ñ¡?ó±è„¦Æ·Â×¶ŒAëiò4¶y=ßmó1m¨ Mèéô5²¾F§Ïëó1 :XÔ7tfY#Ó†¿BŸÿŸ½ç€¢J^™7™Ýôª Å¥$¡ª§ÆdÉÊ¥IP$Ê?H0„‚ Êõ„SAì ˆ‚€4QA¬(ž ¢g9 œÈC% dwʽ73»;;»aCH4ñÇ&³3¯í•ïÛ÷¾a¡Œì¼Bwz;Ãã¦Qn¥?z²èfANz6Íá¦yiZº'=;—þgdÓÐ%ž\Zinz&m.^¬YÚh:ËJ«gñ´ÌlwA‰Çé)I÷”dxJ2‹s²iå%…¹…¹´m7=Û™]’‘]’™ía@zè'7½…}=ÏjŽÈ—DùæÒ˜oC*üä„1#ˆi» Àbä w YŒ~ Š˜ÐK/Ëj¡wB³cå6ð”ù‡¦¾h`ß}õçtðÎõ'í¬Õ“âÀ;züð¾ßÔ#ú}Z—³Ù( ¶šeãÁÛµfÞlÆ%½Yk Ý8j”¾¿SÛG‹6è`°†Þ ý¬2r÷ixIÏ{¹À^pÚJ³`Ià„Nôk™‘0ÄûŒß¹¡Ósþ*@§vZì¾Ä¬ãSÆ VÁ'zÄÄG†^ÐC×x‘AÓá/ú«éîsЇš@"ÎE–.žh"ma­’^G÷=,ðbˆb„£”®ÇUMn+ÓÓN}‚ZQ]èÑg¥Ö_èˆQz”hÈ;„þÐÓCŒ H ÝûCÆeºÿ…tE¤?£X3?ÒSXÇŒ3cèűZH ŒÀ¦& 3=&FHÔŸ!âýeô¶ô\¬›±¬ãº"=ë9í ’,Ï´m³-y$±ÖˈÁOÃÎ@0¤)Âk^ª«¦”ÕTŽo{^3Ųstu¦ õMzMWg:s“ÌÍ¡÷ÉtžD.‘Þ{„S|zMyuysWQÙf(>.úAà‚âÖsbS<¤›4§÷àhöq£wrtZ ¼\jš(˜y°R 󤟙=4{`ÁÏP¯ X-ÌÞkØ+ ܆Å"»Äã6íÑ,'l·ð\Â,íÉn1£9cÈÕQÆ«¹€¥k"†ºE‚*XÌn¡Û!ˆnµ` S€ i»@¦ÝB·x×r1?ŠåBW»Ìë·öüÆŒ8ë+pFHæë‚)éíÖ0þõ‹º‚t]¶Ù¢0x›Å [¶$eÑ[ýÏñRÃÛîuª=ÂÒðKÞÇþ º­ ­»ïgàœýÌ\±ÜÕÐ÷ç†N+BÊÐñCØi_b÷¥uvÁ` |îÏW¶¯öÆ©‡XRI÷½Þ§#±µÎ:çc8,òÇSlzè¶Šu!¶ŠëË&Ï(gËÝòê¶²Vر$êRá¹éôaBÕŒqm@7ÄæJ| G%™£"¬ë&È´I°;1õ‰^˜^1æNל`†3Lô: GïÉæ•jê3ØR¯dê3NzÅYÒ°%ÝšŸåû¬Í€+Ú™Uà ®·ng2mMºupÕf˜]å–gv]Èq±¶<~{‹B³¸Õ„]Ymá”`ÀAŒö÷ßLÝo=£«ûóÃÔýù2ëK͘M¨¬)>cÚ´ªêÀ 2²l¨˜¡¿¬•¢<±ljåeÍØ>yA‰¬¦äœKó{ä—M®œZÑXãß‹À…¥¤¡•žð«M¢£ÿìð격ãËÛêŽFPa"…°†í<wÂtüÛÐ&¡ æMÜí ÌMÈË# o6dtNóÕ‚[®Y9µ²¦²l²yè{J9å¾ض»!|CsSÛ57›ÒŠ+kn`èäp·¤§¬;leq-N™u峦•WW2ΖMî¡ó¶ Ð*±‘a`åôªÕãËK ¶-LŠgŠrÚ‚'rÔî«ËÆ/ŸÎœÜ6gftô—*¸|tÀ76û5ï Ë}p°Ü+)°÷§O·ùï UÒÕÉÿ~ÒÙì.ÐÈØ˜ì¼RQ]6¾¦´†."¦W”R ²ºrÖ‰;õ!Ëwu]6÷‹šRu˜ã—F½OÆø‰ÉûäÉQ WvšD¢03íHÎŒÚ)'æP¨é"É›R{$Q$'Ní?ÄrÂ.«È‚RÇøœ%ÓBDö¥ª¯/gH·;>GD¢½19"ígä°/—VË%–Ó¿Z2SÏ6kGäIþl&´Î!›ç³Ù6ɂ¢Öß)Ù„íöúÁÀà?p`¶’ ú¾N1°ÖýÓ•R­åÍÊ®íÍñN™ ¼ÕȘ5ª0ßã•5*¿ ü€þc-œË¸ÍQœÑ¬m¹E‰€п÷BŽ©Ÿüfæ–ÜËàÚu/ÃóÙ…ÆÙš–ýž4GlQÄÆ¡ïã5ÞæºÍµÑ_£ª–fܧê@ $Xs¨¡¹Í{ ³¥Thua95#Õx Íx°ç ¶ÎEV³%[Êæ8?fa=6 aШlßJÎN´@(‰?;ý6AÒ1<—µþ¸y€ž‚!œ åBÕ@iÕR^Øe† ¯ÏÖ¼µEvƒjÈ‘"-rj|a ô'ZÑB‘³€iáp± Ü.sVðmðù·µ`ç}6[ˆhTf¥·]FµaŒÔ4À…†"JPØÝ> D(ÒhçŽÐHø˜ÐبÞß¹F;‘²ÉTxG¶±È†@f£±ƒZ{ˆ}l²r'L>í¨G"öu{' wìå"ŽC<[…Î’À*?,]ë¸Ý((øèmì>5š‹¬,mãsQö}jJ:5%q§¦¤v1%!ÕT3j2z€°QI#‰@æÚÀ'‚Óæ¹28I5º• ²šíÜUÍþ׃¦·¶q_ßcŸSèN/t››óÙ®yc«};#x*§07›í—Ï(ô°]û¹4gzqŽm }³]%´´ÆA a hüiWPß<Ùóޝ; «€ó5¿ŸDKÕÓ;Û0_«‡Ù~ù_T¤€%¶‹x­O–Üp8´.ý9˜#†ÖsÐ,ý³¥ÅàÝ–'‚Ÿ¬ØaIJ-Õ,ÏÚâi¬8À>=ø¿>†K ÆŠ‰cƒÂH3ÛõÃ¥S#Ô±g³CVHX:ƒÁhÏ ‰¥ÎTpÄÓ§ÃA|¬9-8 ÁÜwž~ºÊ1 éø`³Pß§ŸÊ&Ô òÛÆ“‚V˜…Ð?¤}ÌÒnGú}$×­Ê6É´P,"ßür`¶`´kÁ+_ö†òööýR"eaÜ´J²¿åp Ê6Žë°E¢¿.+Múù%¥/’´ ©ô[ ±ß†b¨Kd0Wƒ­ïØúXXº½oúq÷×niÕJ߈cEx6ê°É„!w:í‚<µ~Ù< ÜÃ9™«¡P›õ[F=ÝOÿ0é‹€oØÈ‰E‚˜Zz[¸¼Û! Á ‘g«l…ÈqPš¬ã‡ì Êr(ÆA9±J¶¿ŸšSNÍ)m~Ns5 •^éO//½NÍ2§f™S³L[™eºÐa;àð0p@ÞÜ­‘ÜSžÌËæ[LÙægá7ö¨Ý'Hà´aNóJ˜*ÉÜÝX]a/=ãu'ì#·±_ÙÍôg1ÛÜe¢aé`syg§žˆm^t[ÍÓLó¸¾uh‚ÃÎ<÷å¹y—gä)ÎÍsiž;7¯ kdîYyÅî‘YyY£ÆŒÌ*.åYäq{.-.Y˜ï5jdQV¾¾ÁÓÒžDZÍag2„È<±:pˆ ‘#óñ8DÃH‰ ‘}‹$²\1<‰#’ÓIDÆNÂ'ÍÇÓDÀØ%ÄJHĘ£©’“'q1æåH¬D–Wr1VŠá1F4¿ ‰­•'¬~Œ!Í'H,=^ B‚^ˆ¶Â`eP!‘•e­²9v¨Cèt¬4£Ë+%†ƒ›å“z3h$§‹Â)9!…Û©S:.&´.$üQ¹”šÌ§ú R@‡¹‘Œº­ä¼™ƒZ×è/ƒÉº$;Ç=<×séˆ?çåÛeŸäà×ZŽàb͉ǜønÆ}LežUu¯Â̧Qj €s„ë|+§Öœ œ‡<á÷n4“ñÇÛ<Û hÖOgЉÎÜK¦ˆ½[qrldQdN=E#JÄ)ºH,Ž#ñ$$ŠI$YL!HªÔQè$v»H]…nŽÓÄîbr:ßSì%õ&}œg3gá³I9‡œ+õ%ýÄþœ.dH™â@<ˆ v q%ÃbÎ#ç;/ ’./"“,ç%b¶”ûñp1WðH—:Fˆ&y$ßQ â"r™4R*–Fñ— W£É•Î1¤D¼Š\Í%ÿ'–Æ\CÊÈ8q<™ ”‹ÒDr-©$“ÄëÈda ?U¬rNþ_¬&ÓI ™Á_3S˜%Ý Ý(Þ„o&³ñ-Ò­ümÂíÒrGìd.ù ù+¹‹Ü-Þ#Þ+ÞGîÿ&= Ìæ“¿Ç>H"“d¡øˆ¸ˆ<ê|Œ<îx‚’<%>--v,ž–ŠËȳd¹øy^\á\I^ ««ÉþE²VXGÖ‹ÈFòÙ$½,¾B^•6“-âkx+y¼AÞt¼å|ÛùŽs›ø.~Ϲ]x_ü‡ó²CØI>?¢£,!íŠ# tŽp›[zÑÏ×.—±¸ÞÌù×kõCå­ú‘fî4Ž|¶o³ÇD·»ZšeúÛD¶»ü¬Zž]V]3µtVSvìDûw´c×Ò´¦ÒžÈVjs/r—G8nÿ$¢õÏã3UÌí}Îçëaÿ£©J,\­ tô±üj8I£=#ÿ8ÄW!U};Ê÷€ûùqð-~Ò¾|m¶7N“Èz| ·?}p•÷¹^ž„>”—z¿Ô&‘»ø1G?vn<ð÷fe˜²ÔÁžÇn'cÔo•àø•cƒdE‘@?\òQœQ»Ð—*÷=¼u‰ëáPÞ‡*޼¦¬P¦8žsÌÂðSø‘ºt|%¸”¨ëÕÙè*xU¿ÞŽ_:Ú= þ†÷¢½¿Ž÷sÇ´=Žžx3…÷×*¾ÁÜßVõK_ŒðºÓC×ÀŸáìzÞä½õ‡hN­ÝFËE¾Ÿó Šetï‘!PQÂ?5 tVãÙøb<». ¿ NÇN~&:C^û8šßÃßÖÍ)Ê"(Á7á×謺ÿú.§ÃR~-ª:2NyJ.Q“+’U¼½†wÖ×ãw4íiXÎÅ¢M`ÞVÿ°ö9|·át>LÁƒh™ÛÄ8|!¨„hep?Ï·#¸ >¶ߪf)K};È=ÁŸ–¡Ý¨àH27ö:ƹÀÝx(¾öH9\¤8­Uh©p‡Ú‡B?V{¯ò W%×Å$ãÕàºú;䞎_Àu/øÊ;\ýNNâ6SØÀ½¾•¿»¢}ÊÍòƒ¾HNíʼn7¡1(ý(,ï“_B߃ƒp§| äÁWámÜ—ð\þ"¸™{tÿ5rŒv„ûÊ[#Š?«-ý¤ÆÊG|©ŽÃèqu›oŽài¸G¾GŽ§Üš+÷r¬ø¥\Þ¤îV†¢ÅjƒïJmº8Žë-c” žç{\N—7áòc[ñ…òÊä‡Ñ.îkm!>ŒÎãëQoþôŸÞâ{½ÚbßûÞÙê9j/ío½öƒÏéê£]ˆd%ŸkÐ6:nBÛ•jß »‘“[†¾ÒÞEÿÑîE]ö߯tòMÒ2/n–Õv÷ù¸G•}Ê]Þ9j‘²N¨þ+¥ „#¨Œ=|òá/¾)ð{ß-\™ð,ž©ÝÉ¿«ÎãÇjù¸zÏv_‰œ¦ Þ€‹åøÊ†C¾»Àlùe‚º@©F]|3ÑB啸àu­¶Vx}Ì?.eïT‡z¯U /òéHT—ý·Ð[®ÔÈ»ï}C[?V\ ŒF÷àüúC¾ù¾}xäÑñjߟäQ‰ (•ö"÷уp&ú ã•Jî[4Œ«¨«€½¯Â—½×‚ÝIQG¼þâÛ£«Ôpå1¬nô­“7ò}ñew7ÿt'íÅÜ/Ž ¾þâG‡_÷öUw(ߥmõMä>WJ•tô^¥¤y§)cÅT®ÓÑû½wh—¡¿zïVûîóåð×£ïÕŸ”û”­Ü 0W7Ü /P+•`?€_éKgôÊšO Wr{q…–)_¶ ½¹ýØ­=“¬ò•ªñÚÜ*Õůӊ~ãCÊ\80±+ŸàFƒzÜ™›‰»q5Xà®ûe4ü„¿õç{¯Sû>Õnô}G6%?Ë?ÉSð¯ê70­ác¾OÃÏð4á HRs¹‡„†ºþê=8{kkçáùp7ª…Ÿà¥usáBe{ãy¨îÈ2yŽïeßà仑„/@×%ÉŸs뽯‚5Þ¸2yKÊMh'ÿ ¬û{oßFÒet afæ 3upÂ̵¸ºÀ:9ûn9¥á4õ}CúàB¬&ÚE‡¡yd?²Ó[ð±\*¥ (ýŠÚ¥fÒªª_é×쥣t¡æoاö¦]Ì}Xé$:íÎá~öèñj’~Z—¥|ê"Ó nS ôÖ7KPzó}ç7=“[£þrpD’[}ATS¢~\×ÀkÚ 0˜F ÕHZEoƒo©?È*žÏ?òƒa ­¤f§Ø^¿(PG»Lýü·Ã.HÖ6¥vÓh¹H¬¬ª»Yý–\@Ï›ê? ·d …¨²i)ü‰n!!æIÎEt4œ¡—ÖGÒ:ð½–M ƒ3‡qô+c|WiþZÖHÍLúL”Ÿà…d’q9‰—‡ßPÓ§Z^yŸZ œÓ²€òÒû LƒY}9·´0ué«Ï‡µÔÅ™¯K—Ðiº# ÛKÒ«²8”ßù‹!—[áad¼TSŽÀO¹½ÒQ/ÅÖ!¤6ÏvEZ÷Kyœ?Q?¸fÆwÓ Ò)x':?Ê'QrJíŠ鯚‘Ì…ÕòΓûá¬Rç\ÒÊt;½¡T õ9%ƒã$܈cœu MõÍN³¼‡ûf%|‹5–ÒP‡”%ì9ˆqFeLÇkäÒørz} ¤³óÊá<©¨–Ħ±aò,û ÄÓÓ·Ó¨’cŽy˜#÷GŸS«íô`T"KQ™ÇF<Ë‘*噤¬‚–€3€ÒR¬ú+ì-ÌH{;—°mì1´;‚fÒ{xŒ=ÐZ±û¨(fo¤ÏQC Åg£(­,Œ2N–Hc¥ÃÒéZA²̧àAy.–éŠä5zV­@ÆRi›\D»Å:ÁŒj¢:F^ŠÒ¢* f¿‚,3‰º¦T³êc´&¦Þè£ê`ƳÚ<û"ãrûUœÀfë»Ézm¡¹¶>NÏ*ùÁ 4A›Iûúe”òH]7R´ Ú°L&Ò²ôcþOã›k«io0Z¿¡®Õ¡iÆ4\{êÁz:L |@ëÛÒaÚYò•Ì`]Y¶ŒŸ`ìÇÀΤª¶ÈX×ÃWuD†#¨ŽôÎù³é”³µT}ŠÝ¨ŸÒϱÀG4T0lr¼ÒGâöt¾ö‚T ¼À9ÂÐ ­%£àa-#|&{Êa LÎ :jw}FÀRg£ôvô0X×T¨×Z˜KiµÉ:´ßñ³ùŽž‹vÓ›ƒ·zš/ @Ï® èO³²¢ -j±䬾\-¤× øÂQ{º¡[|6í‘ÞT3š‹k¿À®Úoð½t™Ì"Úƒ…„©ùõ|[%Õ‘ÁP;e®6UûUÛëw}dÜîÔiw­·i$8¥Ý5õ7ZI/¦O èHó¨»ôÛ*ÑJ”æqý½3Ðìœ/}A™âhµõÚZS¿®z-À°,½”¡ŽvXîW”,ÖsëÅüO¢[4@JìÒ:uOJ^èÛÈó¼‹ Ò&/¥–%ËP7òÚ˜Šf¡¥¸-B³¦ 4m2²™t"ðìtç-­!:kj¯ÈÉà˜ý,¬ÌNkYLÓ kd ÍàGr$î¯åƒÇÉÞ {ñ|©JS/j•ôÒ£Iª#—C?9wÂÚh>Ê*Ë ªd õÉ­¬!Ù9ŒeõØ.Mc©ªƒeÒ fÌ VÈgÁOécÔ¤»mk¡ý¨5dc¥‘úMXØyÝ×úàqÚN­.xÊ£Ð]ù°Ô5z€6–ÂgìeÔd0›ë¿t%ï´AÒl0‹}ÖL¦9ôºìD°§œ§ôÏ’ÓYùü-u†ŠõZ}s*%J½¤òö²Z+¸GßçW]2K3$‡³.鉬(‰z;ãb`$£{¬´ÜÏQ%¦¾„­µlä¦aü•T4ÿº—3ãMÚ^y9mºèeMÙ‡&«ý"U®ï8ä|JK¨{È6ÿ%lÊ.íqJ«žÖ§éƒå&jZÆÈµÈVvV«iœA"€IÏm¬j̤Ÿ{”E®Ì<~t¦QÓ‰Ÿš• ÷¿)ÏtÎMŸªn$×´$¹…jbÙth<-12œ5Ö/çÐG('• ÃÕv,Z ×{üÆúÈŸÙ†Áz¶;è3Õi:3‘»;ž«ŸÔŒ´©6Ô`CÑ´ÝOÇéŠ9'´¢›¬Àãa5™ÐT’¦â.úo‰¯Ô`-RknúˆŽ@ ÝâÜ­ß¡«´ú¦`«­ÔÕ€þ`-΋ +‰ïõÎê`ý¹¿?Ù…^ÀN¤ƒvÄÐ4=VýE¬e57ÔüÈ.é ß»pº@»oJÐ/êCðE¾áz.r×ßÉæèSÈnÿ²x©d¼õõ¬Þ]Ý£ú_b»íTBÆjšñ2mJwê÷ÍÀýƒñvzVT-Jkúf©ô nž4Vm¤÷Ö*Ÿãzp)xë|¨Ç°;z1y(E¨§Ü ¦JCíõèvÐN«B¯ù_’úè/LcbB´Ï°±´ÒqWH¿j©†\ޤKµ#¸œÎp´¾ÉNÐÃè›-?ÅS#ÇÕ º´®œN$_•ºiÝÌ´R£„sà6šÇ¤ÖóÔ–ÞõkƒVÊ=ñ RD~»H½ìåâ(©Ï}ð œ$áô¹h+‡JkqÙ”œð¼N:áÍä$L) Wý%˜Ö@knž–ò$à i'º)í³§H?¡…àþ,MõC9QéMZVô‰N–ÊÐp¢\wLÙÄ᎙Fj¿Œ±‘þr?©…¤¦÷ÔcõÑZ¹¸”—¸Î 5³_6u´÷–âôûl’œ¢ô"lšÞ€ç3Wãè_ççøS‘µd+ 1\½ŒÀ6U2Û3dH¯AÓµÌz gZû¥éj3óPuŸô>’°Y¤&k~Èf0YŸo|”6[›®KzGó=܈B㯺f¨E§' yÕô’ùIjkõ i¤Å3Ћ¬î‚y¬|'[aͯÙi)äN[Ï.i/2_ŽÑušdnHÑH½;j'Ù*ü†õ!X> ¶ã1ô nž†KWq¼¶;%7«%-“Ž¥_TZžoê ý¢Ù¤~jÓ${´)‘ã1®šš(ÍÒ›á€h'ài @?‚+ðEë sÎÒKÍOÊsö^P5í :½‡PÿQm¦g@‡þ¨>8#£Ùµ­i(N®áW´†6Ø4ÀqPk Ï…Í%;êCLgU“©ù—‘óàþèJúC.´™c÷b¹,{JZË i]m¨ž!p ¥²œ|W›§ÖÑ[øµP(x–¥võ…à€À: B ECÁy­9]å?æaC Sâ3èÇÔGäc4$j3M“’ù;ûèÙ`?U1ÏLŽËޱštˆ±¸nccÕ©ô¢_„lÀÍ鯏±\'ôÖ‘a­„s;FÊcI{ÐÞÕ»¢Ip†V¿Öžo¤‡³I]ÓƒônRIóS§W¦ÄïúLoÖÇÕ¨!¤€q›t”£I±Ô‹êvF3£ºœLcm蚀ú í KÎÆ4ZækS:QJÁURÆC Áïq.Ræ•õ™àÕº%mž ="÷ÕdŠö¦êïÕÚÆ¸ÌÍÐù<œ‘Vdzìõ3O•SA/܃&ȃÑV1e98ÏŠaƒó¨ŽUgkTŠÕ.Ã}©—µÅj=V™LÉU\ª!ŸBóŸÕÏæKióü÷H÷ØK<š|Å1dˆ£6ûÖ ¢vEZËãR Z‘z€]ÕZ²ãêñSå…øn«>{£+`‹t ´Dµä蘣h8»‚cÔþð8¨Ê,åF¹Ój‘Nè)è:š> 8ù2Ê‘~ΦÔå~•ÍÐ_:¢”gã_ I  #îå ®²£ZŒô’)r4(Õæ_An ˜€d¼ƒn4<"¤ñã¸~7ÅéÙQê?X‚ò PZ R +è¦&‚òdIŽ r.¬ ýdœœÎZÈóÑ[)ÎaM] qèqþ ¨6l„Öè©›Éfµ< CÕL,DS¤¯Ì®-6Õb»Ø5š¦5Îe êe’/ R “ªÁxH«‡·Ñœœ0½P§ñ•Y¬ ”kÃíÎŦ-lÉG¦ù½ϤgÒm´ˆ^” Qû¸¶¼§§û•+Ù﬉;˹|/í°)­WuÈn0ØòÍá·‚üFjÓÍwyäê`®Í¹v/-;ù%ànM ÄÖުﵼ~5嫜ßIm¥­¡ó´…†Æð–óWS'Z“å¦ÛôŸüâá¹”}‘>—¶TsÜÖPú‘¶“Z&ÌV«ë!ÚDãfõk©ÿ b sô,@ÕÒ9‡ÿM ÔÏãps[°æ ßÕ†ÒOé$~äÑ,wz”º@E‘Ý?Tbx5½£Ï×3¢dý˜V‰6aÆìã¤lÒ#öí¦×1’æIa²fÁKì­ÌH^¾¨%£4Dk„àÇÒÔX§å$KÁjíG ÈÛì ä mvæÒWzO§±Rc©¡Ö#µ]ö°ZjvÖQmgn"]Ÿäª¨wjEõ˜álZ×ì³äÕwäâò¼FK ÁßY y`:ûÊY¤‚cê~zÉo£\_£?È#ö¨­õ§Ԣµ ÒåL¥äÞR:جU‘æ?4‚ ”VI9P&mêár\ç}S«I€­Ä±ôWTÖ k¥Éóésði¿À ÒûR´‘ÀiÀ ½:{géuü~‘gá̸žÖ’Ô€ëÕdc$ÚO—áP}9ŽaÏ¥—Ò¤xLA~€èw9Ös UçšW¥¤fê+÷çq&Z=f™Pýôl'°£´Œè‰iÌâhOOJÇÁ/é©´Xàe)PêŽ^j?“Dõ…ñmšÍÃöaÿ¥ø%]ˆ÷’ŽÈŠ=É\ß®K9 _ZyÚ e„«ÓŸë=Q}Ç;©†c ªAÞd‹„ rN¤¥fUû’0­™!1ó>>¦¦ÒïŽÙð(­¿¤ÍÓý Ųa­^– 8bn×kÊíìX'’_P´ ý!ï€õ`±ØÅä´v¬“ÊJѸ”þ‡±¦þ_Äq÷µùúE¸¿”íƒ}“KéÕX$=¢_Ó €Ú¦Þl :CL(ƒÞJ«ehm¼‚^ÑË ²Á û Q,?ž–—æJ¹Ò{h•Álµ[Y?ëbèErFêQO©· 7pÌåì|¤V~Òª5åÞxy/…gûš4¢ê9˜ýUÿ;©¬<Ù±Þ€c´u|]¦°NºQ»(Ýä1j%œ®av7³ÿ¨OH¾™=Gzh£ MO»:/†Îx=µ^DËR[ÊÎ|Û:.Ëä/ƒÎ—}ò(#rΓ²ÓGq[ã2 ëøùÕÛÈã7î]à`ᾞšÐá„1‰ƒ:Y‹|H¼úâX¼aPnz= |…¼š­b¹ìáWŸf2ø½f³Öù—yò{Ùit?hûþåëoجªðÜÆ3}ãoõ/s[ìËX,T‡SaDX~xíaÙLèþ—­;fúÉf¿Ž¯R«UôˆÆ¿o×¶TRGõ>¬?gŒ+÷äS ¶Ôu%f¥g)ýÀšbü£Y¥˜ì¿Wn›!åŽ!=S´cOv¿/s" ù _TdJŽíEK¿¹“©ú¼–ÁOV(PµLì®'ÇZï }¿‚Í:”«µqëÖ?ý_OMñ¯?Àþ?[S|Z7俵¦¸€:,ÞXM7€ÜÇÛâ+éGáØÁÞPÍú£ëê¤ Ó`o­†^"P§Ýé:”ÞK¯Ç9ÚqC=vÌ¡ÀQdè.6Éxí§Ûãì5h*^GÇêJAoðPÁvK-i)‡ Ì”‡BkªEÍ.ý¦VËÚAþˆo²æ´ƒ”ƹíZ„JCáx¢ñ¨(TÙmRPŠ"w æË¥¡ŠŽÇ× f– >T÷Ú/½v42°²2×í•Y!]AkËYШ±óræÚýÙ¨¯@7Pß4#¡}R;Ò2Ù÷Á⤲¶ÈP&Á’P¦eCL«°ŸTM"ì³VÛT.̦¥ƒ|d“ÞTöÛ—É]ñëôŸUéÇj›3g—÷âòô ñ£/át5=Ã|Fê ×j`³Žq/Z&Eq^ªiCiÅ€þÒ5|З¡Sô7xNdô+iª½4 •X[ÖXB¶èäŠz9¶Pí@Ûû_do` ~›õ7psê#õPÍwšY”>‘4ÔtÒ€ýšI_4…´1ì”¶H5´læ:ÚL %tÖ»«%éã\ûzuŒ¾,‡’¼ßMª¯7'‹Ø@¶^}dlIÆ’fø)GºK5Épy>w"¤™èmªñIú<ãpm0­#µDØ \Z[ŠŸ‘»ÆÜ)ïèp\­M­@ ª3Ép0%+C‘òa”žvN=hn•r4óÙÂÙÃ5T‡5Œ™ƒ²ÝÚ"ó-œå’K£´”œd­š®?‚}²?–ÎK+1rÜ’J—´ÅÆ­DuÖ“¶€áÒzu%³ªKè'öý‚hj?²u¬Ê8=’·£•©§Õq¤¿:çš)ŸÂ8#[N¶À³†&ðlAŒÅ€Kl¸:ÊŒúK§@9{½¾S_†GfÉ%Õ—cÐG-–T’~bÍ´×&Uz-Yé턹,½O2à—l »CVèa~§ØýŽ©^ Õ~¤«ÿ+ùlÅÍR#ôZ°¡ ¼ÍÂAµ³…¤%»®½6®fÕi9ÃzݪéuIù›ƒ“'éÝÔ ý²¹¦ã£Ú™îÔ²ö©Á^õ;0œ%ÒYú\s>PB1–r|5æÐó‚¥ú6[ý•l 0±|†Ï_o«OµÚzQãV´ •.Ú)®?a/µ{òG-Ä¥×ÈdgeH’Ñßž‘ÆRöôš$»êÔ‹ûï“#Òˬ_¦Æ‘ m•¼Þ¢µÔ z´y8®'ïrÖ0µÑ«$R¯é­µü¦ròï¸ZzARìÂèrX[—ÌÃÌTA×Òf±ÇR7t7e¥> eõT½Z–«¿ô %MIÆÒ^Éæ\‡hªײžh§¼¶qŒWÃÈVV3óB9; AÔñ 6½ˆšê¤GiÛ56¬5E’Ì8´Jßì—‚ŽÉ…Ñ-û9õ£[ÊeÁG<’EgPo}™:êKÉh6‹­BEœ¹aò†¼‘ÞQYö‡¡iRJ‘ìÎ$tİÒyÚðT_`¯á h±~[>Ã="Ø9 o§³°B+sŽ:MÛ—–“šáyð«ã­§¦ Zà ÎìãÒ˪½ŒŠýVæCò8O"GØO°<å|¾Ð¸Ÿ: ü¢Í»µ‹ò$º o…™Ò2Ó†j-z@ûJ‘÷¢ªi…ÔhQ\×±9Ç!¹z(ý¨ö¦ãaaxX?ãP4ž¤'êCp¬Þ  o`{¾_ü È¥ÓÒiÐVÏnªQFÏ…IíÒ*ꇃÙ>T î×4ÕÖúý´>>Šó£Eä¦!5>3­€Ãvnµ$ Ó—‚¥ÎîÒW}h¶ñ¸³tþ¤ïD+È ã'Í ¡µ„‹QºÎ$Þ&Ei=Ž€õ§´ži"ãôV¿â4'-„ê§­QG3¢´Vô¸Z†6Ërƒ5DÑ躾XMÑ_ÃäÀ,è„Lkø–Ç‚jxGì^m¢VœN‘"ÕdÖ@ËêjÀPmG†5Ûô 3YÁòi3Þc@G:İÝUgíjVØ|:e»ú#·«šh.ìª9{.“qh"‰2 f]A4ægG+h#PÂxdÓñ ÖE'†ˆøìWýg8`-†f#p[vÄ1•Í”/Fà;RW8Dc¸´ý†ßF©ÈÖJ%•½–ҥظÜ`ž¼tH_ KÇØ Óå$P­ù-ì $Ÿ‘„¢wz/¼Ÿì„šº† $5Ð"PÂ^—¸Z"øB}ä*°¢ºäF·´Ò(¿±„<åÁ_‘‚$ƒ¶ÕLÔgÒj©|DíFOÀ‹Z)XZkmžã ¤£ùžG¤Ô²a“ãköñRé5œ¯§¢ü@ÅYiuœ†¡Gð6Í•rˆ„^ägРéè©4Áa×2ÁJÎ=朜±­Ðò6ÁGš¡0]DÆÐN&›tÌE«âìNûõ–ßâ9œ¹ý±Gª7Ö šýPçqé%‘HMX–΂ýi%üÁžhž —ii*L jÆ_ÑnAU¯È~Ô&Ò"ÚÖCÊfß›«ê®Ój†l¨«žæ€á´6£6`È ×üÇü¦:9ù tQcIÑbi é%ÝŒê`ºèìDVfKC€ÖeûLÃð#y^'o K&‡Ô6ŠÜR·ƒ…dOwë‹Ù‰€r°:Dï¢A$¿vÝh…? Z>VǯŸT޼“»Åmתê i;TAŸ'ë«Ñ@R‚æ×ÊI› ¦€•Ì—7ïÙ@yªtdÃVÄã€Y*“ k%À\ ˤ´ñæÒÚiiŽT#,B5qvp,9á^º ÷Áw˜ªËä&>CöpdìžÖGë .«:8¢.¯µþy—ØUü|B»£…©vZîAGÒÍtþÀ>¡UàPF:ï§nåLü²ŒþBê±8íqÞ_ðx}Ùì—„;²&ÌÖû‹ÖKZ!JßlºöÒe¬·qT“Õ–û'죲Z€{]6©»#îks5†4×VJ9¹¿|‚´YiܘôÐÞšÖHYe?«—©¯IÞ#ÝK½ˆkÍ@ÂWPm®-ö:a:M6eüIjNw’ æÒÏò4ô ÓâÑ5Ò'üGòq`@sôÇæ¼ÒjÐ_*¨šaE_cšc.«ó±*ê²NÝaŠ1öƒ/´ð„s†vŠÕæ>ŠÚž°ÅêT¶ÜosÈû™ÔCjw=˜Ya9-¿žŒË‚nú]m+Í„gc¢×#M×T­,êD²³xm< ÎŒY"PEx™ÊÁh É€êªgau˜]KgÄxŽÅÙO DM Z…RŸë•3• ±F ‘ZC:\»IèžÃ–g?·Œ W?Ûð{Ë®5‡8L H+éIzS­H&ÉZuí¬öJ*”Iáªí =FÊ  øIŠb¥¼l+nM/‚Ó†1ð+œÖ„¾Pç³¥òc­­fÖÌpéBÖ‹š¤—Ñäº ýÑPWÁTœÆ‚9Îæè¹Ö¼2:¥7àš;W{¨×¦‹à*–—Ïþ&éN©ilºzÕ˶W'ÙJYÃíäwð|^emc³‚ÑÚ;°ïåJ/¨1­܉ǡ){´2úº˜ãÿ\‚7’d¶H?²Jg#tÎçTAšì\jø Ž‘ ˜’–Ó¦ø"W/Ïés”ô‰­¥©r„´Xz‰Ã¤Ípü#²>˜!µÃ“µ¾Ú&)(}£ô«¦d Ç;ä8ØÐÑÌÔAÆKY21hýu[†ÂR* /±kÒ@Ç ôÝ&1(‚Ũà6çLîw¿ê]É(ÜF+¥e‘¦‘íŠV:³E? ré¥iĬÖ3Þ‘óÀ+d89åW^–¯";Í–ø”¤;'Í/­ËØÔîê ²U"ÐFîI›ô»†w´¹Ô1=Ÿq#¨¬u£cýýäpiÞ¤GEÓž±\Ú }Š©‹•^¦=2—JÞå\ÂÑŒ,ZÍU;j›ñgí!gþ¥¤ l­¾BnŽçƒ?Hj·TýбYRIª¨ïHKÿòSôF^ˆÛ樣õH`‚…Q^úÕy“ Ô—è­Xî DZë8¨ÖèES®%¬éÏ¿õ1ê^Ý`®.;ÁZ¬/’ž©Ã@;é½½*ê çÐîÄ Êr¿/”–œÅ&ûG¿(˜$äL±ªz”¡Ç²“?¢R;•Òy°?ìI²ZHâ\¦Šs8 ;©ÑNV _RrûYøï•ð¥ôÅÆ\r8¨-Ô:ʹÀù6È’Ô AZúAÖÛ cñΩ‚ƒr1ô4%7Ü༹»,I÷ðOÎÒd'HÖV™Ž±&øsâ *”þ0Ä‚¬IóY6<ÝO]ÎüÐm„¿}â«dIk¤v0Nµdš#/…R­6¹ ŽÉvT+=ƒnSÈv{#|"ÓQ9½“¦:©½õ0Ó(ô€­ÖëÀ²¬¦t7}²énz}i€i"Q)Ió/ GÂà“ö+x­:œ9´‹ä>Ú²èk8¤Ã’h2I&×éC`¤½Õt«q¯¼ubK¾ŽPÏÓÝ$£dfÕ&¡è”ž€JJÏp3Ð ¯qB£H#VÔ\7}=éªÖ£ÅLÝä¾Î²Ž“*ýõõ`Y£ŸB…ð0I›A>¸Œ¬”3Óø ¨GÁÑÚUVÅœ Åâµ¢á‹>ÂÙÕÐÔùnÒšûc4i‚û¨WÙ;ΡæèÀ­z xDk ç‘FùçBÃØ"öÄ0~”ŸÛ—©9Y=>Ò¦ùŠ#Èt.~0ßõƒúCØt¢íiUc/ÖÞ×KÈØbTVE{Èc¸JßÁYûd”UNÀÕdgXw⯞"mý¬rk”\3Ίj©Næ±öŠÜ™è§ËuI‡rm Wá(¹qâMÒGZ$MH­ÌµEíÍ…—:†ç^‡ŸéÕ´i~Ù¤v ³sLÑQGxD¿-MM{E>€§ú,}#9®8Zd¹ §éùAqó>¼­¢-¤ÚLU†du~SKÿYÛH¢¥‡ú¯0P-&¥ià=+ØJÒGúðŒ6„™ñDSQì’ fÈWáØTÈŽ ¾/°­ÜTÁmh7ù˜"Yá²ø¬8{rªip 3ô-è,=+•€KÒ«ØV½RöƒÒX©«Õ»‚6Ò|ÐIª îJm;Hat66²(#)‡&–¡c@( p”åL»8X/¤ÖËgP3lÕÓ\ ÌË™çÑÊ8“ç‘:ÚVã1çu’õíõ®4B» úÓÆ€Åp4œG9?¹"zˆ×‘\h“4€ÕP+kfÎe¿b…L6:Sûj?23;¡‡ñÚðE{•á©öaÅý’(Ñ*&nQóÒÙä<¨¦cëI¢ÿÏÔ…èoQ7½ úB?á¥Þ¨ÝËš™º¢Öà>™“Žé>õGö›œÀz‚YòO sÒµ\aU «ÂÙw-©ÑÜ7 u¤R´“ô =ƒ¹z¢¦×ýÚI=ÈC㥘›ÚRm=‚£´ûl·¶.ÕØB½Ì¢®Êœ ‹ Òð˜J!Ú¥WÕæ› HYÙ8C•´5¤ Ú+UvŽ ùLã!ð*ªŽ¤7Óf°¦¤TÆéè³üŽMKeYY)MšÆ•È{|YºjoBñ>û-¹!.LOÃàZ͵ôØdy„ë‹i{¹]Åë¹2oÄ'¸·ÿ»Ê?ÃnÒLÖB‹6ÕŽšI² àtæÔlú\|Ëü”M’‡‚öŒjã—Ô°ìcä…0ö昖•€9¤â¡,ñCasr•dsq¢á ŽÍF² ;|?“ô’œi„}³GUd#l‚üåDz ]†oáK´ÌHžOãISxS/Š"¤H{* '¥¼àIzK­&í|–m¢Ô·„ûèi³^&cQ{”&¥³KÒvE gÓ¥m¶kz6ð˜]Q×k™ØZÒ•®$µX¤6>GgðIzͬZ -·VQî–^S.«­×bó”/pU3”5c?±þd´bb½Qi´ŠDÃªÚ î§J9Á‰±õ’ÆfHm-´O >ƒö†­É0ÿ 0‰^ ‰¦Œè%©55KoÍÒ´ýô_E)‘e•®ó˜‘Ä>ãzá3õCx ZÎs ð@+Ÿy ÎÂÕVÎð_™ë¦åbÍ¥mð^êu-?ªþ˜í¾ ‡ÁföVj!R]kj<’q¡\/cYºM£Ó_¦MÐyõº9“¼—fû ÏáF`ÕpP…ÿgg),Œe™a ,1îj¾ui…_'Å_¬Ô®âP:åòé6õ¨žšÉ$üÌ’¤ªÒ{,´¸2¾°–¬næ-ºDÙìØZ ü]…ì£å\¥Ê¼ð»s¼ôÉ¢ ¥á¥+ï½”¯Í»öì2)PèËMyi¹…ÖYr®<¿YžgýMÞ÷årÖ½¬oÊKÿ³YBBV¸›2¤ñÎû‹lÉôÉï½i‘bï~ÆAçÆM›ü¹á‘E¿Æ>îU`Ù™vŸD_®ðñ¸\ì”4®êØÕ¹ònk½È¶çÄz×ã^™WIz{T¯ÉÐè+­•UN{ghÞ¨î%í`@ɺ©OÖä Ûø©Å¬­Oûuü4$±gà‚ ÏU̵´²n#ö˜€ö)kke:µ§–eUÙž»~8™b¼—tšÍÊ–ïÎÃ…F_¾Sz|éƒÎ·×ª—ùœç‡´²ŸZ|˜ïg3üTxÁ•£ýgÞ¦±33•-ôÕÄéÿUÕĦ£ÿ[«‰[˜O¯à»Ò{EΘ2ƒq}œŽÕ{r%¶Bތˑ4º‚6Ãvì‚T„óŠZ'd‘ýPSûUcoÕdV0Û!ù¼,íÖÚ’½°ÛG0<ƒ¦ã(õT‰TЮÛ '\Ÿ­ :ë²ÁÓjø¥«k¸×Œb~~³à ½…ix|5}ú@ÊêOP}š6ذÚ‡r¡d¶eÝäþÎù z¬ÅÃ<,É7ØXé\ä-´§}Ùmu*h Õ hAâ‡$#ëÌ®ãéö·=¹>žÍñ"+¶Ñg¸‘}IRâ¯Íd£¥½Zwó/)Å´#àGò…´‚uŒþpzH-0IŸ‰sÊ83šNj (oÂx ÎÏKM¢WhKô\ýŒG¿áfê úF‚Б֨” ×fÙ ãîó6\ŒŸÆ†«5YÓyöÌÖ:ƒíR^ûJý'UbU8Ó˨6$×ÁÄ€“lF´¸H©zËdN¿n2i—)øŒ ±Íð½ôN'3 ™“~§ïÕÕô«|\³ë‘êEm›¶Mn¤/Ööjóü#ÑrªJsRêê&í€V)à´^^>@ú;ôßÈ=ÖØîÑ÷ÙÃÈm ©àŸ$-Å6i0Qi{‘øîúudJ¥kjÉÔ jۀ꫉Z”¾˜tBCcYvíº–×<Þ0ÌO_¯u%gµû†wd1^%u²;¤°ôò†2¸¦ÔOj˜ºgÇÛ2"ý ÆHn/9Rá.ä<‚T’»IÎg8:¢vxŒ”3µ%º…ó9 '¡bòg\&ešfX µ’æHÓ&K'𠽯ñ’~ HÇSVá¥è®dMëCsÈCÒ»˜Bà\DÚš² …¿°Çð2óÃp9¹¤ò'Æ€r°Ú)}Á¿KñiñKé¨s§y‰ó•” IoÛ¢ËRîôQ´¿ÞÆôŒžg@ X_ÍÍpOû3c_{8žc¸šÉÁHçì¤ùH”´KÎùCFœ®…Á&pÙÞtÑ¢À.i§½5¿LøIËD>Ó™`ê€ö’‹Ò&2õ@¯ÓJ’ê}b7vas¥l–T/ñ®ºŠY´K(?þ“ОREHFp]ÛϬæJ)û´.Z½®9ô2(çÀYpAÒ‘K騆¿ƒVŽ ú^b*c´…Z%s#m(¤7Ô:ÐôuÀm½,ËOÊÉkûÙKîák ÷`MÒžQ©I}éžžRX[BŸÐ¸Œïpo²T+í?Ì5ì%³Õžtœ4]²s[Xí÷ß»2ºÍ8X®„OÊ=qñ¸'$Z}É4í¾z†3à™ò.ýìç,ï7‡XÀX©µ~Oÿ¬Ö4ÞO«nŒV›IÀš¢M„¹áQí,ˆÃ,ì“4˜¥h:ã¿ /àŒ'3+&-cYbýu¿Cwµ¹†Ã d±V…DÀ©þ2Z¤UµM™AGÙüu¤–•#k¸QjêI–b3]§õÚd?Ü©‚íHýXž0üš~aA† ¸œSÛú¯õQhKdy¤»öÚ¤•V*´´¾’4§%جÀ%°˜ ž’¨¾÷¦+´ózô«c9®ý–í&€?+¾IÊsè§œÑòÁõ¸´ü3ÎNþHì¥f!úuºš]V±l Ià;vš¥s乂‹Ó“Ú"h/ÁM©{É.p'ð‹”„à—à=›-Ý¥&¬fÓÑ;HÒÎ’Duºþ‡¶…Φò4G2®æ:–j§Á&õ|€ŸêI0|\ÊBf$lб¦ °ÖB—Ôª† Ê!l§£œš‹TÖš‹f)(ÇÀkœv§­µÕúX¸D7¡ð-¨‡¦tÓ7Ãìàc»é#Õ¿iNðšNòÙì”J²žÈ3ÊpcÒÇØŒÇû¤á3ècrÚôöä„«#~’M³'¨¹X0gäS³vŽá<¨:É µ EA4«‰K“UœçÀÕ¤…ÎÂi/ô:  xᬆéVÚ×o*è*—ýÆÊA =ÊFðÕ).՛ѶÔO,˜[P/5çÿ—ÍO¤óðRCêÏêL´ðc¸ê˜„eµ@ ƒð<¬C+Ò¶â /?Ä[H$«DW‘|°¿ÖÌÕi_À*šKŸL?êýe?£^@éd«a1>H®‚§†©|ž“èô“ds¶þº™ÔJñW×™Wó‡ˆd¤il‰º'C7¸>£çðF’UŠÁÇÙ+ÍnÌ`ß CmÍ=äªZ{Óé˜0õŽ~ŠÄ¢'jË Ÿ¢3H}4Ëѵѓ³¶áj꓾ ¥~êvã-C4ŠæfM»`V’Õ˜#¹­¥î£Mod ïÁ×qÔ¬YÏ6‚#ÎDüÂØ$ÂĆ3ÐŽ²×1AôÆl©¹1ÂÕ¥ÇxFxE½¬VŸÆK¯5‰RÓ¡†\Úp0NÇ€Òvúú~ì°\•Ƃڜq‚?;d“zàÍ;g‘º•„ÐZþ×ÑT“®ÖÖ8ü+¹DupEŸaj3øÝ+Wrö€Qr ÒŒ°¶3$C79‘墴L6âb¨wY1¼ŒžcµŒ@SÇWz+pDí ~"±¹râú 1ÞÂÑFZ+AÑõ5úéNÔ5Ãmû}þŠ,iõy¬5)øM ý4Uª‰*cE};¤u×ûÞc}äpÖNn×Q_®æÕ/ùO‘Béc¼Í^ÀÉÔdš–hl³±Ëh’þ#M#'µ‹~óéb½&}nùˆÞ–ÇÐÚrÝø»ôAZïdÉK4Y{¦Móû«†c¤-´É¦wlµèõ[þÍÑ m>AfWíkq@º¿“Š;o¤ך뭵mæJÚFýg­·ÞÜÑ–f Å´Rzù€z({šY›¯mÒªùÝ“¿ÂßSôD:SK2Õ¥_àöFê/•JoÈ>ªØ>vΪ´ þ€¨ ÚTKɵ๊I}õ®°sÀ^y.…_«éP“40„ѬØB÷a»žIj•Þ=ãµÄáô!Ra£´dWŒwU-÷#çB éIèÉfœ£•0^&;ÑSºQ.£p¸~‘à&´Ý± deµq?5 hÛCëHÕ¸¦M'f“~–WâJölÚ[}Zî*­`ëð:ÕYõÆeÁ©ôFúµ¹e˜gÓ*´¶¹*þ‰ž²? ÷¤SŒ:Zâì’¢úù…H›ÙT€¿Ëã¾£•¤+¬£ýGíGÜ@·d®-uý¤ãéDíK~€ýoJyÑt²›éRe<…<…GÑøËŠÏ¢«R”]ªö³Ù Wà4íXaxŽîâSx»¦Ç·ÑÎR™í /€Õi‘UŽ*!x]Gú²ZSsÙôææìÚ ?EÊ&ƒ›R3°Œ™@eõk«Í»0[¸rÛÎ&:*jëÁa2[6ÁœÜÊÂ~ú0Cbê¶€ö8D—àÃ/RsàIÜ&½¾~µ†…¤ö8ã#ÇÛ·;é¡Ã@ …)lfÔ$Kj‘ll7l›kÁ{ù4=ƒ¶8ËË'KÐÚÚéP×°¦d,íH[£‡ÎÝÒ1ç@î EuF¥åA›h9Ê&ÇrÄK¤ãèÚY:î‘•$\ß­ž0J;k<©êzÇìwÐEYCû`ŒìÅÇʃa8n¡’OK-Q[“jA¢¿Ó~6PÔJnèXêWÝÑHÞ‹.¤¶ —é"œ †‚ $Í0$)…0ô|›6›öcãv x¹z˜¾ õE™IãBùè/e$28;‚qàƒÝÉ®³«L×Ú°1Ú óµ Ø`TŽ]rtd‡‘æ„Oi÷Àk¨ûY˜¾ÙTl—Á°TƦàuŽœ™ÆÈ±¸Þ­ö"ÓÑxR¬Gfíá"ÎÎàͤ<î\OwÐ&Ü;6¥õ§ÔŒl*™š%; –³¡ßÒ¨!ÕÀQ½ŽÿF9ÖÂ)$D†ËgPay¸cý×ãp•±D¶¬žei{ôØÒ?“nþýÐM9ÒíÔ'@ÒJ€S™ÉÀ<•œ•GÃKúãÕ„ì6=wv‚c¥‡ö§è.“ö‚.éÓB¥Mø6çc Ñé$ì/N)€))‡i[ÜØÔp}4”Ô Øœì† Œ°Ð¿²mjx¦µ0Z^¿¤ÿ¾jKMgí»L¯´Zà¬ù¦|½Çk´Âl=m‡vÒ¼¨>}›Òyj0 !Ýô‰4è£ßQ‡“,Æ>R#îegÓFh%Ùm»œ¦>6NºPfÁ´«ºzsS“ôÓl²šFwûÍ’Óð8÷‚÷AåÐÅê }PÀAœÞÔ¸ÅYQ¯¢ß'~¸"™OÖÃDµñ®¨_h{óuÝf:ùµ¯šôÒªç“Êìg´~”ªè§í=Yy2¼šjexœ@BÐZ> ¨üÇ 6Z”” “ÇáµI%8—ì‚çÚÏ“ð%LO/H^Á¾Ž€Œ…àE9¬ëX îš·ÉÅ`8jÉ^Ê+Ñ &SPKùzøðŠ,@éú3tTÍŽêËI | IÑû²R4>¤k³äƵäóð²SC¿INGOƒ]ÚÁöây4D>‡ò€ærN°µçÚʱé´ª¬¥ƒòZ]óç)2^$oÕö4ÜÔŸÃ#z`¨ÜN ËãÑmi·ÞiGán­0›ÎJEð2úk¨¯¶ / í<ªÄ^êµùà4Êå\N7 ¸¿–_ ëiãÖ›l‚F fË ð#’7-M¿‹®*j[ÐŽ!i¨<çáåÒP-°ŸÌO߯z’Ìúvõ0m•²² 0MO\«öàºa«­^1™Ô‘䌖BªJµô)ªq»úC}°–MìW5´Cý%¹ŒÎÀ 0—`•å&ñɯð$H±ÿAÂú:ÿ£ð‘¾Š\ö‡8*\LÈK#3þ˜º‚Õ5Þ„‹´Í$Å?­“p5y†m½FÔã/Ši'@}ýCµñ'_É"Ãa¨ƒtN·ƒÊúS?ÇSUýs jƒKÒ3ªëTó0Z„Ýçqe¼†WÓOzy¿Î0;ôgÍ¡5¡d_G—²?ðPPÚÞ€ÝÇí`Ý´ ¤¨tÈ|?#ɨ¨§²‡©ÝHi¸˜ wæ%“—É”lËa¤ÀikÉ\4ÒQW娀֑7ÙÏI5¥FèºÖI¦l9Ì¡kø퇳²¾ß  <|œ8…fÏ ÝqžnÕ×N@ ¬‚îöXÿ™œ @\˜Z™ cZ(süTZ‚l[Kž“Û¬þìJ7sôßr©!r=¸\Ï©}”Ú°¶, sv\AžýÔ jóäôS¯¤¹,‘¯EWO%ÁÇÔ+´ ÝÄž°‡¶øk²¹í#­”–¥=ç<7Xo“¡´$q_´£M³eZ¾¢yRW=›„ñ-2ØØ)±7Ê Ž¢eêzð“~TµhKÁC5-Ó9Öî£Ù!µ i/˜ÆÓQêDbXÉÒñ;½kÚ6õ>k@Z±HºA#×’Û#nE?‚,úi¸G2:˜_s£'²%¦JØÆHæøcú ­g2¿heÌjÚM- v#Ch¸i3Z‹²ÒÒN”Qªª C)`­ hn,ÂÕ Q 0fQÁ…àâ¦~$Ád&h§6îõiwœLÒ•ÚµÉ ëf»Ä‘r'ÈjÐðDšIÊ›´„ƨÛXªÔ‰®EÁŽ RHýé/z{£ïgù3\„MÐŽ$ûàô'Ãõó°ÜÄ®Àl‹\Î@oY}8 Мèz©oÑAÀg ­ô8ÖécØ:±9 êó¸t‡4V_“\´$ ‡-2‡Û¤Ñ¨¦:[‰ ÷pÜ7N•½ˆ«ÛБ\ÕHÿ™«¬8ÒÆ,Ã]r¸måèP!5Ù/ðGý X$=³Gá"(žâ÷$Æ9„Ãà†ý#Jý*¡MrGøÂ±Qý…-ƒñzí¸á\†Úr¶í‚'a>Ü™æK›Åùx#xV_G,8fáÓZ„þ+•HSv<ãMÒZŠaOí£èÝ dúEjˆÏÃqtíIrK»ÀW”¤DÝÑô#h$ߊ«ÿòÁñŽe¬YD¦ûÏå9à™c¨º›½ÐS²Î“÷RðRn‹—ȵp“èæ ]WaH?4š8õ¡:g*ô¢6Aø•£Poù +.ùÒ§s¦_ vÍ^L.Ãö 0$Y^Á' óÙy6B³±83Ûˆ®ãÚ؆%…Àsc'tŽOªLá¥ørJ¬¾ôh#õŽæ68“Ò>˜óx¦Ó¸·½¾á¤ìÌŽbáÚL1¬¬îO»òÙT¥ý¤‘É}õšêZ}–ßDù h ú&S«3 MÑsÀZÎ|ÆêÚ!ú©I&ä$Êê,æ—ú«NòÊÇõÃ4²íHk¡®$}õ²ÒRÐC:(eU³Ó¥t èÍyì:¤Ac3Pfæ‘ñV^Ñ(xŽ+¿³àFòV½—Úœd2“‚9`UÒ5µ‹ÖjIÔHÓG¾•Èã ’ ¼Qkщþå:°+Ëõ5E½¥ý®÷7$¨5žåÔ'ø/Ò8D7ãz¸:Š8µ0RìÏP>Ÿ3ÛÁñ”h=¿æ$+ät|¦ÏÕNÀtrRϧõÁsüŒè¬\ji³ô3zuõž^8ß¹gÿ´çìY.õçC¥â©]áïÒ´ôsætTÞ‡#R®jKôúÚ-Cb†ZrI|Oе/‘wÙ{ùrôFsx½”ê¦hïɯúÿ ðªtRúÁÃp Ù‡'“ç0Þ”šK9Ô2íçUñÓ©:Z+½µ§ÓžrcÇÜÖÆFéeJKè'FŸ¤¨iêx}±ö“a‹ÿj©£ÔRºš$]ƱÒv®ƒ^hÈ3I ¬.ÿÈ?DKIÂ-qù)jêÄø·²Aà„T’=¢@²s¥S:Jò¥ÖQrÒEZyõŽérê’,]¤|¨†F2ÒJì;N*Â]t8WP3Q¨ÔÄqC>΂ôz¢I°X.Ì ’59“s’|ª¦—ñ,7AIKÃo1cÕ%ä¬ö^n K9óã¸/ÝÊ¢´f¦ž´«¾[~íø]¡ëÒ j£H‹£Ô“G¯^_MúLõ•žÅÜXÿN“L¸VXÛCGiM“ÙtÞÓ¯ÜOó“ù<–Žæ,¢h@ üŒD[㎫ÅôÛÚc9\Ñ>ÆP“\Ô×ÑIÚ ÓhÒ’ó¢gú6ý‰:¤°=‰ëÓ÷ð\™X€U-z/ÿ_8RD³d=Íâ1¤ž£Ö$íµòÆr°è.€á 9(së=é pœ^Å»¨¾bô;¦W ‹l?Juä‘xÜM4ý6UÞi_.?ÕJ‘ÐÀ¢ø½ƒÒfÒ˺_Æ)R3y(–í‰Ò0!”e“óÂeTYð ¯p¬3ŽtóøÑ µ€&|_Òcô6Z'cÚYåרzjh6šgË¿` j36 ì¢ýPIìäš#RÂp’ö˜EéÍüöq†?®0 ÆålŽó®i ÌÔ-Üû.k}´ÇrZo·zŠÆçÉuš– T×B”6KÛ ÍoZÝ4áÃ@zàËM½hœÁõ{Ú›P§ÈÅä|ö‰hŽ3Ÿ6‚ÖJ“õj7½:€+Ë£àOyöc˺<•e8þs픥ë~Èv/îÑ} õ²: /ŸÿZaöñµ h––ÿüÓÛž(|ÌÍu£P—{™jþóæ+ Û2iÖɃ½ÀìsñRÓÎ/Ï9ŠÔ+ýäpîèçk§zné?(ÿùø§÷ͳª ½x)±ÏrK¹yvü·²YýÀì‚ 'š…_l9[ÞHFÿNj„)62.¥ÙO€‚?Ø>øIoÁ@#ŸÁDIrŸdY€»…+Øor£ÑWŽƒ‘,+:À²Á’z^–ld@(Þ(™áGˆ6‘CñŒ?t–æ:2ÓuÆvóQS–‚.£-¨^©×V/j·XcÐì†I¬¤åg-ÁWV*h²šÊªâ— Ñzè9KkÁG’l€9@Gæ'¿ñ»º°È¯wY$X1‡teÔ«‘»¬ Š`A$<ÍÖÁ} ?uª(¬D~ŽA t2˜ mE Ÿ¤^Ú=ú6v+ŽZ«^–ãBü»Äj« +½~×ÙaðŒ í9 ¸z™µC£ÁRífµðm ƒ‘ð{Âö¡n:YPËÍj‚¯Vg°hx€ÏÉ_É×93 ¢‡Ø)VMµÁ!As6að<®-è<¨¡Ç¡Œø&ÜoL¤[ÔñŽW Ãp¦±Ùp2³3œøÅÙF\ÙÔ'½€ÿ¹Äüì&\ Њì5hN{³0c¹@íÁ–™mÝè6 ÞgýÙÐ ¼æVµ>¦#iapŒe-àRAøe–;%Îg9EîÅŸR5 ‡EÀÚÖe8£Lž®f°8ÍúÁ•`5è 6‚Âø4èdŸÇÎÇ@#[YC0Þ$ѬjͶÒAÜÖ&± ˜]`[Á8 L$ƒåý\„´gëYG˜ö’à(ØÇZƒ ö˜Ï¶:ÉÞ£~@Ñ·ÀDv|`võ×õÖ,/¬IgBng¿c D&Ø›¼•Ïù%ÃÇú¾ÄÖ`0‚ôE¿ƒØŽKúvh8OjB{£=ô_E?ð m¯C'‰EÉkéÌähž€Ù 5ÄäcÍÀÑ?°§à3ûÄòJܦZpÙº‘Þ¦eØgP4d›ÉZfDØ]6øÁ%ìø«á£áˆ9Lna}Èž@¸Hß±|ö¡ì4ÚÆŽi¿£fœpw•ËéÇØnéðG€n¬šTMò½™ÄΙ{ûߤ§Š›Ç½eبm”`ôåЮÚlSøŠVçÁ¶6†¶ÈåÞ*5¨v°qÔ"­Ãñ`°"µ8ÂEíú™Q¦±>°“ÿ3C¶¥~ WÁ6•E²Û +w nE˜ ,guÙ=´M5c˜ÑŸÔ'5"2q{9 Ñ|þ @[¦À´ ;Ê^Às¬%•aU0®evÖ †‘R „•…ÍØ8 4`;è`½À"î'ÐtzÙi£+àIAVð3û ޲ÌÝq¬ ZÊšÓC` aP ‚ì 8gØЋ]£+²©`#`"ÈÇ&')ÇšÀ7ì }Î-©šË©[$( 3N-áЊ4Œ¤ú)i^T#ðìÕÙs¸¼2[ ¨óY^Ô4¢—Ùc6fg~`*†Óâx8Ïj ¹ˆèZ€ýò\„Ó|ÇC™F׳FÜö ƒIð.{F7Ó’`ÜÆ¤9ÂA:¬@hû!Û‹g«¿3 ~`­IРΤÃ`>pŽ&ƒ]l>xÅ\?X­?ø¬O縛ή³z\m£Ù[iNPU+ ¾À` 9ýòwV›ÛÞt„èu9^·f¹`Á£ÍQ2 b­à;ÖHx68«Gr”6ËŸ¥gi%Œ­ÀÀùÀé´Å¦~`9Qx¥a1CWp“9DfИ´K™lâ({ ö–$—–^É}¥éÑ.+Œeíx|{ uÒÏÚbÒ«qLŽ`w€…Û[7òŒ-AYÀ bíXQ´dåR¥!ÐqqP«ióX,œÇö‘ó,<¥7Á2P–[nv£+â÷|e$ÇöšÚ1B&9¬l¸Ì1ë  Š¾‹µF­À:²…Ep‹; N²a`€¼žœÑ[ÒÌ„¦°,¬0ÇüsP’ûê…À;ÎUæ@†¦™Ÿkç%%é{Çm¥ Èj¨Fá?RžÓs°8K*²¶“µa}AØ‘Rt—3——씄Gé Ž,ýà[Çrzù©x¢³DÒ6ÖÁ9ví þ´»t’,eïÈöÊZ“°0˜ÅvÀ àmˆ¢éCz…½ƒÛÀ;Ž/QSײvp·†!ÒaÜÙx_ŒkÎ#>xlŽ4 fe9´¡` \ –Ó9¨(¦èqì-¸ 4: ÷H‚ÞL_Î~C/AUÒM@F0¼CAgRÕEwÐz¸Ã¡‚mà»Z¡KðÝ€¶Óéò„’fqL¸’õëG!ŽV Ø~T T'Ø"à ax1È„2R,/^ ·7aCp•´ô?¥'ASú4޾Á]Z™e4”ÇáÉà#{AZKý€ Îf¿ò?¯YM_Ø wd+¥””Ýl3ÌÆ³O «ô\îŠ[$ïçV•‡GÑ̓٠=/è {g|LWÒÂjQà <Æ<—›(©kÊÏ ñÓ˜Nå@"x-muÔÄ´ ÛÙCpÂX&ðµô)ewb*[ÒY*Ø v W³°[Å¢ÙÅ£Æb—ÿ±¬ÃÊ‚(Pî¡…8î䇭9&=4Çʳ¥.8<ü0÷+kGW°Kð<ÛÎöq4ú ÑD†ÑHH× Ïì4lªW\þê°Íxœ³ Û‰dŽqíáŽÓì&„•Ý@(ë–¢-Te§8?½æÓ# ÜM¼D+áàgçL¾b ÁPâ`N0™‡›é0´[ï#F ¹h^Î%òè¯Ø\ò¸ Kƒº“[æeŽéµ8‚Ï ùîÌÁ“Ðs§ƒ-†yD’™“3Sà:Z„æ·ÐÌd¯¶-âÈR^!œ?Fp¡•Žº²žzö+ËÄzÀ†P}ˆt^â»| ÜÓ÷²Ï熃wð›Ã†~„=áúôy Ç·rtŒtÅT‚šAÏÔY¬½¹ŠÓŸLáãÝ :™bØBP’=£¹&)Ç1‘ï•ÞŒïÅ{6¾†%Q.VȹšùÁ«,‚®ËÙv˜¦áûTš3×ƇÓ© 䎹À¡g`¼^T:O:Ò‚°¸„8K!=Éiøö‡~l-x «ƒ¯d/, rÀ.ÄÆjs%…Én¶æ`IœÝnàÑ,’me÷`wÎPmàGfÃÔcì8¸ï*vš[;«É9Ò%>‚¡,•Ö&ªô-énh º€{ô5üÍñÀ ãÿáèÙÍh¸ŸjÙãZp>8u`•a0¥ÑÙÜBÒX+ýš¡.k¦ïY€³Ç<º¥¢Íô¸aÍ©ðuù…s¥: Îî“|Ío°[t1*m+´'|›qk°L/óI·Õ©ò ]_ÀJÎ;çãëà˜ ´Ê4"McÏø]öpFs›Î!uøÌ4v†mCaÚ(ð çõ‹ô ÜÇ­ý$ù\hàÏË#ý§€izXÒ ûüAF2§èû8“ýÝ…a¬>Ý~çà ðŠVÄdYú—úš|„CÁv}'LÂòé ³.øloÃ&Á<à$9ü¹b¸Ê}¶0*Áq$ ´·@QÐ$¢Ã`¯ÚÌç‘'™ö½iˆÔ[»ÁÙÛ4®Í¶Ãœzo®d¦sÖÓ…³É l%›ƒ>ô©ÍYeOö†-`à|šÄ=í×kѬ‡4•GÎ(‡“µ†]ÁjÚ¯ÃÏ`¹ PŸ;Û3Æ™k0Lj“h­¾•ûz!Îó¶ã Æ_X.’Áž—#à(Eç±ñ`ÍÏ×l-;CÃàq(7WAo‰­ázo¸G‚þà*XÄêƒ<æÏãÏ•YCV¬ Òâ¯o9·8 v‘¹ÌŒ³…qEà Œá\G#¤Ýš´ãQæ÷ƒû-ÓxÜøŒÆ‚JŽ·l¬ÊpκžÇÈç\•ÆÂ‘, %àø‰¹æyx B«Ø :K*¨ÕÇÃéÊyw-6V€ ¹÷çózϧÝà4’ïÃZ8“³]¯ ^À ¶„6ãÈö {Å#ÄÌÂÀ4䥌ü ƒÞ3q;h6ƒ/äÌ'rw„Mìœý°ÆèŽZ‚3ùÜÇkƒ·°$P‘kÕËà‹^‡ãps°DÈ}JèYåEQ:Õ`)¶‰ëš"\…q擯Äö6ãè@ö3[ .±ÜÇílgžÏ€IÉ»X?Ž2„”gGÀ0ÊnË,–ä¥hMºÈ0AOÐK×Ù:P³³x¼Ê5Gñî`ˆ)ÝôŠ« S ÇXŽ·è¥ ­ xÀÄJ¸“fÍÙ0¶¢yA^\0!ÕÐ3 Ç¡BÀŽWÂçлzKÐ5ý©…{eU>Ë~_Ù)ýWGwŽwè ЄeåŠìÇó¡Luóºtj geŸÚÏꈡS@cþÙ àÔÓ¹¥Ü —¹"ŸÂ×áë÷˜RŽx—àö#Û›Ò~ Xv6ddè"¾"­€…r­‘‹ÆÓ‡,.óÉ®;sqUI¹ÏÀ_µH懾G•¸ÜcÏ`8l½Ú ™ï#GHŸËÁ ,¨Å£M ì\ÈÚÂ윑nc÷9k«æ‘BðßáŽ`.Çä;`7íŽÇ›rë½”ú’©\B®ÛbÙBUìœuÎç:"†-b;8J^Õç²Áð»ÉU|FΨJr…?Ç•d n– ³Ú,B9šŒÂã䡯šr—˜lÅ™åVp¬¤#9Ú}ää*¨ “¶wÀF`,ÉÂd éÉò“ À þúBX›Ç§R€ªY©>¸Ï¹Å’Mdu‰se†¦±¡tÍJ¢¶ô[>påUФXœD‡¬ì&8Ì^:k‡áC-™¯p k2q—H‹ÙÎ(ºƒ…d;Áý~9[LWs­ØŠ¢@#} G…SÅ–³òh*ØOFråîÇñ²<Ø… ƒÜ$|ÌŽÃ ,ßa_Éç8?–Öá _ï²Ó€Çq–BH™¤N8-¥˜ŒýJ”'r•Ѷft•ë“r<?âø(̵^:-j|…]GS»&]` ¸ŽiC¬¨^&HÎp”EM¢KÀŽ‘Ëù¨#uÆ"øŒaAOfóAU¸…Ûá$yë+åO‹b]PKÓ!( GS Ôæhª±Œå\Î6€`3ëE»ðè£1ÈQë8GjqÌ)³³l¢aR`^rMÊÝ܃™Á):HÎb(bú` $Yi×è V ueH(›°¦èçSà îsµ@AÔdfvv‹«­6àg‚°ÎTBng0HJx&Îzó²½l‰œîÉÎ÷ ¬ÄYÇ2<t‡w¡\í,%ØA\- ÖÔ$6fíÑaV—öbAomŽáWлAòUڇǡt€ý3˜ã@>6í¶ÙaS)?î›ØŒ…íÁ`²žÇ´Áè9Ëý¾0xÍÖ!ΰ6ÃO`/jt’o$¸oVòؤ‘¯¬‚c+ f~lWÕQh5¨«/åŸ]b…Ð%0¾¥ýPt[s­cø…s†\ð! H+ÁUË{®‡~Á$R‹'ƒÙ8ºõc]`V(S#8Ç€.ô¼ñŽþ k¦ea3öš Æë¤/ê1òœ„kYCnyHtLzb¿…d˜Î×r»GBNÚ£‹°š\×g€ÏÀ z•TwÑ¡¨!IÚ²XÖÌayàSzm伡!·úúðÓõl:ÌÉ9èãk)ŽëŒA±*ýNs½¿“³ßìØÍ¾°Šè7Ò<…Ñlf<}‰æêŸ¸‡ÔçW]™Øc©˜ÚŸ•«ØexeT;p‹:Ã9z ÛF°íœkuâ8ôV« ®q?ÎÏUÒ0„_ÙyTýäÌÁ!bÓ¸"ÉÒ/pMBEa¸>‚UÂÙÀÂ^† äÝËW6çžsÉ{–ûƒ_É^–ë~ äœ*;Á½¢AÀvgV}‚ &ãS ¹›j¤uÉô%Ñgæ²82 ´€hM؉ÕF¹9¯)Úpnåvsõœü1¥8Ž>°Q´<¿?ª{£`M—pÄZmçºV£i~¼ iãõ'¨|à@ÓñÄ#Æ!Ⱥƒ6ÁóÂfëWa8_‰ °Œ3z+c- ‰¯CΕÌ@ç˜ß‘ÍfƒÁ6cAz*°Ü×\~7S'&ð7­áª4žåáŒ{÷ï ñ¨?žG…"üÕ9z—[O–®†9äE sŒ…˜8ÂçoÒšÊ+Én©qÖÜ´&ëÛPÑ´Á±B ÖJOôúòSî[+íi˜ µêh%‹ÕËÎÆ¦t*«†‡²ŸäG7pvÚ½§¥u?s5l§¦áWÆŸèQ·¹ i„ʧµð³?g[ðÓéÔy¸Ž¹ªó²~Kz©ío+ɱô[¸·.Ëñh?¹¤g’a<ÂÞꋨÓ4ÏÁÔ/à'˜®›PŠÿ ÔN|m²Áb°3¬€Î±AŽ~æ’ìº~LùQ{°2 N?Eû°¥†·Îgà˜ZÂ`”Ú}#æ*ÎÐE} 2÷KÜM¾/(žvÿÏ!c'Œî?ªßÀaý' ýë¯ñüoû5ÿ~%€b®(à4¹õèBð_ýk2K¹~Mf±@àþãzr?ÎóŠ× }¿ñôHÿÉ“†ýýû¥úÿÞïC–žþ¯úí»ÿú}ÈÿÓväÿøß‡ÌÊçd «Y%È#5«YUøí‹k![ÙzÈ6@¶²Mó.{8÷`[!Û¹(b; Û Ù.ÈvCö;d{ Û Ù>Èõ,Bì d‡ ; ÙÈy(;9g' ; Ù)ÈNCvr>ÅÎAvrRÀ.B.‡yüg@vriÁ®Av²Ý„ìd·!»Ù]ÈîAv²=„ìd!{ÙSÈÕ/{Ù È^Bö ²×½ì-dï {ÙÈ>Bö ²Ïÿûf÷§_?ïFÛcGû;Üþñ«¥ÿ_OŸãŸáø»_.ý±që¦]šµîÒ°SÛÖ=:5kÝ£Eë¦ÍZ·mرY׆­;5íØ°uÃÎ=:6ìÔ¾sóŽí›7mÞ¢SûŽíÚ4íܹcû†mÚ7nظuóvÍÿò ª×ÿ»¿ ú?œm®ÿÌ\MÿÁ¯°6ˆ³!„¡'áÀÍÈ„ÍÀ$Ir&d2˜]_&d2ú›Œ0‹ë_³›]WùÉØlô&ÿ€“Ù I&ÙÀ¯“ù;LFI 4fðÇfIü»þ²)£â×Shv]ëÍüýdIÂüz£¿Ù`àw•M®ûKâ×ý]ßÏd43»ß Í2ªëcF?׳°9³)“ÑÀGL€¿v}Ÿã7—?äßuÝÏOÎè—ÉèºtßW’Œü®F×H³dp½ð«€ gqÍÔ5'S†þäþºk5\ãu=ßlð“3󧏯ê6»Þëzªë‰À=väa Ñ5:×»]+âº"ƒf“k^®q»®ó/æ@ §âãp¯tF¿Ì¦ÿ5»ùÿÔ]Èš3›œòav˜cÁß1­ÿÄi£ÿaZÀˆh¿7lô´¬kËãC»Æ7kܤRã&•7i×´R»¦•Û5©Ô–¿jÚ¶gãf•7«Ü¶)ÿ¿q϶Í+µm^¹]³ÆM+5æ5oׄ¿àWVêÔ¤­ëKU÷¬Ü¸g•ÆÍ+·oW©};þo•öíª•lÌÿ¯Z²]Õ’«”lW¥dãÊ%ÛU.Ù¸Rɶµ×l[£qµÆÕÛVo[©qå¶UWmÜú¿6TÕýÏO™ÿÀxÊpãˆÿÏÃBKü#Ÿ»þbþW† O0 ¸ÊßatB(cþ®tþÑÀïê~Í/„Éžïb¡äÏ_§CõûϾ»BLøó½ÜŸ»Âß'V¼;ñ»'~ûø—+¤,0æû‘üuŒ’þ—ïŠ÷»žkÌÄ¿ªAŒt_ŸFxïçš‹ï+ßÏÄý•¿ŒÂó=ñ\ï¸Ü«‘¦¹>ωÿ~$®ï»ÆàyÞ÷#ùîž9a,ÌÄ?Kø6Ÿï¯ünñšù_Æý§yz×Ý÷>× ¹çëÛw«ï§w•ÅúV÷Û~ÿeÄ ~?æø?­ÿŸžÏ¿îøî¹¹ø¿)ŸëŸÖØóTý/–ùÝŠýã¾yí@<ÁóÜïæåŸoìÎ?½ÿ¯Ï÷Z៬ìo»ù½%{Ÿü÷Ôÿ²ãî±ýÓÚyïõý‹ôî—þÍúþÉZ9ù¿FþÕà?ÏÐm‘ß®rþÅwþâcûþ_}Ó;wïÝ¿{ê÷ëûXñwöÜã/6á±;÷Ú}ÛÓ¿b…×6Úï¾›ûßwøŸwõÏ£÷ÿqÜß÷®ÿ߬ïæû7dùûH¾³„o3ýÎÛþnïáŸfðo|þ½mýÉŽ¿YÓ÷øáÅÎo¶ü糓ï-ÛëßÿŠ)ÿŠ)ÿãc äìÇE¸ùÇœüo,ÿ›‰ÿUùßE™E™E™ÿ)Q&„9@ Õ‚ÑîÏjÁ‡Ì’ÿ^ákߤQ¿Ñc ÕoÌäÑÿ_UøÐ¿Wá“ xèèÁ cvß…?Ç3è³é>qø¤Áýú>qì¤ cÇ8vÔäÑc&~Yùë5ßžþq°ì/ßþîý¾»jÒâMž7»_ýé+ßûùÛh}«òíù߯øë¢ù^}‰÷rþ¯ú·uüûUßßß·à®]Oßÿ´“ÞßT}ï¦Ò?ìÅ_müý~yü÷Ot}@ôFþoXûçï~[¾¿m ÷¾§°?Oî»aþÕ„ÿn–ø_mîûáÿe|Þ‡ÿå ÝûØæïÑs³ï×û¯6ÊþdŒÿä ‚?¿úG úÛÇ¿Â?¼åßtîxÈß1áßB•¿û;ø7Èóê/6õwGþËýe›Ù¿=ÕtÐï=䝨ôýîüÍ>ÿ:õB ôõ¿:Ñßqç¯ïûGú7>ÿÞè¾û†oVÿ´ß¹Ö¿ëFß ÿ»Ñÿe»ÿ‹þ‹¾ßÒÿá±èÛ`\ÿþ+$ý+$…¤ÿ!éï’Tÿ\îÿÿTÿtô¿Qßÿ—À»ÿw•ÀÿÛÕm±ÿ@Ýž5Æüw}Úóø‚û‹XÔ§÷ÿ«>ý¯ZÂÿŠZ¿êÓÿªü«rð?®rð¯úô¿bÊÿÚ˜ÿUŸþW”ùW”ùŸe¾W¥§¾W¥£&ý÷tÀÿ?Ñ>aVäDü?W‘¿ÉC øZàË‹xþ§§«± w{ó?üu]xâ?qnöŸ+ÂÞs³®|S³'Cá:#ë=+ëú ^ÅÞs³ß§#þûÎsý§vþ)þ7N9}Ÿq¨õŸ>Ïõ_ÛŸí¿Çª Î*Npýï9;ô¯\ÿváÿØ\ÿf2õ¿íäÖeßÑÿØ“[&¦ Xgø·hÁúg+ü'Á¿Ð¿÷³p¡ÿU'ùÿõ³þ§íÈÿñ?[áïnìj3>f"§yƒþúsdþCÉ£ õ;”Üõߎ‘ÿ<†oÉɳèŠ 0𿚯›\Ü<0ÐËÇtéU©ÿÿ¯ô·(ìÃaÚMpfŒ¾¤¼xÛ5®ÂIJ—Šee}‘¬Øöm½Sèd²ânÿ´û¥“”Ñwéé_’“”æíõ\ß')U?[Ãq+IYiHÌѼ}²r€_U{m²©JzÕ&YY0ãÅ™iIŠ©ÞÖ.;¤( ¿4]ùñ| y3UUÒKõ^u+-]©ì~ ª ;3ñ\ö$%ee§IÅ6%*¥gºùb’øz²òSø‹"Ÿ_•g¹;|Ê;#EùtTí=ún’²³n¹7["’”ÌÕë^í¸1QùùÚÎ<‚• AÅ s6ðû|5¶ŸÑ;žD¥VS׌“”¢Ÿ3G\áóêúëñóR•ò«w9*E¤)©|tÑ%ŒÌK &*芜%›’¤ÌëÐbûìIJ,öø«6oÇÓÃÏ” f}¬7í«òúŠ-[þ‡‰JZË k¾KTæNüq}º¯ç̓{Û6HR&¹^öKR:ŒÜòó¶ßÇ­A‡êÌçsø9_ùÅÖ÷»ª Êç¥d°=,^y_5¡fÓf_•†sCïoþ#Néܱä®%'ĸתë¹”¾­ëmMPæÝx´»éùepîÐî9/$*ÚX?xq’²§è%}Qj¢2m1åÝ‘¢dÙwõfÎñŠkWŽ}ˆU,?¸&§¸VaÖÇX%ñÀ½V5–Ä*ݬ̯Ðz¿ßIÊü@× •b®Û}Žë¥Œ¸2i].)FÉè6”heùça`ìœ(±¯J“ác<ŒPÖÕ†mæ†+óÜ÷³*nsX¥Ìrß Jù£¤ëÑbÝ¢÷6‹R¦ú+5sy´ÒÈmÑJ5ײLˆV\w)>9Jq›åÉ(±ïQÊÄ÷Y’>R¤Èå¥âc”µg5¿þk‚ÒosëìË[¦)å LÕ5Y±NÏv¼™5])²ròø¼Êî¡.C WøÅùkP>ö›Ò®@ŽÅ5ʰäp¥ú‘úÝzŠT¢KT2Í¥l,ìzc´âÏG¶„)D5/ÏlŠ}²e[æ}áb•àu‹ô|ª8w¹®\Ù¸:ãôê6ÅíÇ, vO0L)éºmÏ0Å=̘0%ã¶*ÜÃÂÄ:‡ »Wª¸ÿ„+K üÚ³QŒÂ?ÿ¨8%Ë-úG(÷¾œ›?ã…Mq›óq›òCã|±¡ÕÔFχ¼®\<ùÊ„+Û~wF„ÂoR?êc„ “ôÉþz†R­ïe›ÂºûÁj6Åm^]lÊÅû|ùѦq]¶Íæ[‡~nGSNN¶&~F¼Òÿö벃sGˆõ±*|Ñ®Çí°äã9½=áÀ=›2ûBnÙeŠk¹®Z÷v¢å”5zÃé€ eêô}åç W~u¹í¥PE«=îÙâW¡ŠÛ ¬6e”û‚0Åe—FÚ&Q"÷ö5‰v'ž› ÔmûrMo«¤}~?gâŸC•G]*ƪ¸‡×Ê&p0L‰‘ªrh W¾¸ÌZ W²¸Í"ö7DøYˆð¿Se• ÆX¨â†ÉAŸ”¢-á¦ÿIq›ß¯áOåΩòcöÀ/Jàø£Ë>ãëÄ_]‘-bŸ­ÊÇgöN¶}Ve¶Û0C”™®é Q^X•‹®ao¶*ç\9lQ\«l WîríH¢¡ì ¹–!I¹Ü"•]”¨dåÞ1ämˆÒ½ÖÙS®…(2¹>³(áÉ. ´Šù[•Sí]@!ð¡lùÍõ'Aiã R„RÖ½‘‰JNJÞ¶øö­Ëíê‡  ûoQÜfQ×¢¼5»6΢¬w…§+!ÂB„ŸX·;´´ú®s»ß'‹RÄ ¼6¥·pmŠ™{{éÇ¡‚Y•fî <Í*ìÍ*âT¨’ÁB…úî·Èã°ÂÏÔ‘î‰ÙŸ WÜîÐ)Lá`ovî²*%܌ϵ ¦PÅMãfYn„*n:S/R™àr³‚Q"ÎÅ(îps3AáàkáÁƒÅ¾[Å8¬>\pÃûÀ ¥Û­ŠÕÝ¡¹¬Š›\ÿ"ð9DÄ×áA¾uvÓk¥$Z·G¡Šþ‡*n˜Ïª¸axg”ÂÉg$V%Äâz€U™ã^·PÅíŽ?óù|p’P¥Fž½ 5´)7k¹"ŠMìO˜°Ÿ0_|÷ð¯(¥½ËMçE‰xoñÍÛÃû­JW7„úpij¯!><ñØ£Ua.šÕ>Dq›mÓ¥¬{¡¬J¼p¿ªï¢ OC—×lnmUܰ·+Tiá~ Uq‡ßÆ¡‚'Z•©îY•.÷ªä›ÑŸ9Lq­n‹ía"ž…Џ¥v}yv„°W›àCVÁ‡­ÂÏB•Èü."fSdWؾjQ¢9++L,/­Âÿ¬Š¾ù~ºéxQ«À·P¡b_¼Îë&6ÁÃmJ÷‡(œ\4^6ʪpçà#² =Âã‡[0}³sÏúp>㎠'e|'C}|سޡ,>œòâdm7°(.V±zf¸àI‘Âÿ£¾Å*ÔøŠD)UÜÄ"^á Á="U9ØíQå;§Ò„_¥ˆçFŠøéã³nÚÓ$Føs¤Âƒ ÷è¡Ãb7­ê§¸e\t„Àý%Ÿ{BÑ"žÄ\ˆö!p1\¬_¤rÔµí¢”nÃRÜÛp)FðÐÅè(ÅÑɵ"Qb½b”›ÿà :JqË¢1ʼn¸/ð6Z1º&Fq±¦±‰±Š; %Å)Ý"^ÄÉdSNÅ=zÉ)pÔ©¸TíãYQb¿#}ãòØ_¤/>¬p¹yÅHßzxçé‰Êh·Fúx®—¯zð&\Éæ&XÂ.#…=…+]4ª‚Miͽó§ðPÁ³Ã…Ý…‹ø®twmoù¥Wñ%3F ýáó&Ý•"ø±SÙá¢{5tÁÓ¾í“G7F*µ\°2;Z1» R©í&,‘ÊL7Gÿ þ¦¸‡S-J^ðÉ8ÁŸ£÷r]ˆ8)âG´î–÷Q‚·F)I©®À¥¸éŽ:2ÊgßžyG*9Ý&Zıÿ¸¾q…ÃI_…Nü6^}ňyÆž¥Œw/x¼Ð¹‰Ê ·c' >—¬¸eËÌ(¥£›(D)¯]´¼VŒÀÕÅ-+ü£•z®iåVÜp E+—V¸@1Š;MT;FÄk¯?%)îa=LT\OYU)Rq‡¿óÊ“¯. Tžôq)ÌHÁ?"•%®Ç¬ó^÷m<¼8Jøs”àkQ"Îxí0BqoKÖHÂN¢Äxã…&‹¸Ÿ®¸eJUqEÕ¤eãé®ÄìŠfµ0e¿ Ž'„ {ŒPʸ Ryíæ¡0aBW†ùâXo·° &ôMùÅ•v˜ÎýÄM|ÂD~'B×Ï…,Ê(·£…‰ñE(³ÜÂ7B¼/JèÕ(ÁO"¯ŠRª»dٚŠc"…ÎŽ8­¸eYéh[ÑBOÆ*n7ô‹Vl.x-ÄñÒ à1Êí+ì+Nqoû8Åm.«âE¾!QÙùÔ•ÀúªlãRÆ‘‚Dˆü]¤â¦¥¼ü9Ü—ð×D(5Ý ±(®ìkëb\ÑÊnW£xŒÈcx×?\qˈ‘">Fˆ¸.ð%\YzÒ ÃïSÜá·N”àQg¢Ÿ‹qÁ‹IŠÛ¬vÁ7ìbü‘‰ôéýGÞ.ò.‘BWG)w7å¡Ò›g‰:ÿ›=xü:LäílBß…Šý y «ˆ#a¾|ŒÇ>BE¼ UÜÛQÙ&t–U)ïþF˜ÒÃ%s‚Ã7mL :6B莑ÿŒ¸Ÿ&ð%UðýtÁ¯ÃD¾$Lè0áJ/wâ3\ÁfW‰PÜÄ)RäE£Ä¸"•›®ôHJ¤À«ˆSVáס½¼Ê*ò*V÷,B¯‡ ³*n7+cxaSÜpßÕ¦¸á¶O¨°¿på7ÑùÝËlŠ[V¶ ?²)î´v–0‘ßóñOžî«à5ÉŠ›nL8*ò V‘±‰ü]¨ÒÜ“`:;Tq§G:Û”®tÌ6«â6)¡Â¯¹îs™y˜M)îNÔ† =`S\Ù„µµÃ…… ݪ¸eÚ«rÛE#nÙDÙ&t¥Uð´P_^Ócÿa¾÷UvÈPÅ-³Ù„_‡+']òÇc1òÚU¸ïyôHœÈ‡%+M]îÖ#N¬o¢ð[›xmS~w ûM˜ˆïaÂ^l‚·Ç ‰V\,¡jæ(‘O þ&ö'\èŽpçÃ߈¼$\Ä(Åæ‚ýÕá‚ç†ûpÁƒ›a>ûôä÷EÜKþ–"ò.6ÅŽM6¹…ŽMè;›ÈŸÙ”n‹:%N‰Ùà ±Š;=´#Rá ÂÁ›W± Þªœqɨ‘¡B‡Ø”Aîè]›//éáyV_žÏ£ BE~1Tð ›ÏnXÅ>[wš{’UèC›È·Û”Vn¤œŸï*à‰ønU¦»/°*M\a VˆÀ«`‘OqÙ"ìÑ›ï´*îmî*â·E™æN8…).µ’é~”–Í•ÁŠðá€û6Å,B/‡ˆ:—ÕçG<°øp7È•~?cyÂPÅ ×;C”œî†(nwÏ*t¥×Ÿƒ¶*u\e’vŸ…¿ ?µˆ¼ˆEð†PÅý.d<êdw'Þ¬B'†*n8î*x«Eø—E𫈡‚wxójÁ¾çzx§U¬S¨â›ƒ¿´(ÅÝä[ÛïB„³=b¼Ö¢Lw )«ˆã¡¾¼¯7¾yÖÁ"xJˆÈc†(‹¸·×¼ú7Ä—Çð®‹Û̳…ˆ|†EÔ, XÃag\°â†g§UäeB¿ :4Ì—/»ç¢ï ÂDþÅ«C“…§ \O8,òt¡"/oU¹ÒÁVÅ•ýmõÀ*øQ¨ðópq]¼Ðë6;_•/.ú¸>XÔGƒ„޲ˆ8cuº`¡[ƒ|uß´ˆ:ErÂ?O->ðÄ—"^ûùIä1ƒ|y ÷ãú~ûÿIè‹â¦I›‚….}/ò ….òÖ>úøˆ§NüEàÒÙ½aÁ"/¬¸·ñ Eq‡çQw u•`‚|øá©‹xlñÇ"âS°ÐŸNVʹ‰Tˆ¨_ ;ø$øÉ'+ŸE^î‹à—¥­›°}ñ÷˜ÿe—+}mòቧžgñ=Ç£‚EÞ(ØÇÇ<¸ýYäÃ?)n9Ô&DqÓ™ÁJ~7ý(òÞŸD¾ä‹ÐQŸ”3.òù‹â¦+cƒ”k7]€þYà_°›/"~‰úÌ‘OÿâËg{ë3{ù¬drgˆâ²ÚÓÛCŽ…ˆ|iÐ÷A"ß"ø¿Eà^ˆÐ×6aBÿ† ^þYqO3å“âvÈ`á÷Þ:É'¥0‚”Üî)nøßüEàYÈ÷|Q¹¦Ùß*â¬Uà\¨›xëÁŠ»<ï‹o^î4Pe‹âNDZ•VnÇõv‹/¿íÁ}‹È{„Šzç™.ºÓÅ"ÖÛ&xý·|…&»ß*ò$á‚ŸÆøâ¤7Þyù½G‡‡ùòéžz¦EäÂnÇ='ìËæãõ<WÜt36BÄ ›À5›â†ñOÑbDÝ3ZØW˜°§0á7")â^˜Èg„ ; W\Ñîv.ü<Ü—×p—3÷s¾æ¢aC"…®Žv(øHŠðïTáWO#|õgÏøÃ÷.ž‹~%ò,1"^Å “$ð$N)å& ±âú0_]Ú£÷Ã|uro~ÜË+<:,LèÙPÁOCE>$\Ե·ÌQC…¶‰¼m¨ˆá¢Þ.xO¤¨‹F*¿¹àšëô§O\â7=^'øM¢°Ç0á×6…%Ÿa¸¯?Â'=zÁ*ð+Tè«ÈÇY…‡Šþ«°KoÝâ[ÝØ[ïöð×H1Ïhå¬+<,y“(aÏI">%Šür¢¯ÎååGž|ä·> ¯~öÌ#Väiã/‹uõQ‡þVOóæ—+¹w¸¯NçÑá>ïÁ‘¡ã#DŒðéY½E ûŠº0Zô „‹xjóñ!ÙMpÃ|ý\ u£O"N‰>eŠ»€æÓ™žº–Í7No?ƒGG„ùìØ³ Š;þ!F9â’g}×yê…aWÃE?N¸/å¼XŸxQJm"ßnù’o:ÎS·úâš{;ÇZDS°À‡`¡?‚ĺZ7MzöEq‡ÿ¶VQo±(î2v«oºÓËÓ¼ýÞdº$Xì»Uä7¬Š{› ‡ žø­ÃÃ#£ß°ˆ¼‹EØÙ7~à†»PþÑÕ¶Ô)Èg<~fù ‹×xpô‹À7o<øâ‹ÏÞþOŸÒ·z¨—ç¸X̳Üá"? ôP’àÏq¢Žâãõž<‡ÅÇO<þfñé üÎâË—{òšßx¸—Oyxq¯®åÁq«¨G}y¨‘W ùë`_^Ä£w¬OB|ùto‚‡¯ZY„½† }ó-àYo«ˆVáÿßê[ÞþÕoëBE>9RÄË8±¾É¢Á¼º#T¼Ï[ýu¡‚ÏEúê8?Œòù£·îáѱ¢,RŒ+—·÷Ô…b‹v&xe˜¨ÿ…ûê}ëåÿQ¢ÿ1Nq—1ªF‹|¦·(TÔ­¾º¥7NzëžÞú¡§®jU:¹ m\G»ÒlCmÂ>ÃDž1Ò7OO¾'LYæJ×öTV»Ì±`ŒòÜë[ãDî«Ð‘¢Ž%ú£E=Ü['ˆ|4^Ì7Qàp¼²&£‹8% ½’ øtаÓT±oÂE<ÿ–Oóð1›¸/ßíÑI‘‚w‡ùú<ãŠT4ÜžŽõøoy;ýÙ|¸åíò®‹ÇßcDÞ-ÖWßñô™Æ ~-ò¯1‚ʼn¼jœÈÄ{Ië‘,æ›,øa´¯~îé¿ñ楢Dž$RØS„ÈD(îòÆÏÑ¢$Fàÿ·¸æÑ…á¢ÓÛÇ© t7ŠD‰<]´à7Q"îÆûŽ~.ôF”¨ËD ?Žq)RðšÁW•'®ö¢ü‰"ïœì«;zð)\q"zC„RÈ |áÂŽ#„ÝGùìÆ“wKú.ÅŸ½þâÍïv'®¼xÿ­ÿÓƒ¯BïGŠ8aSæ»Úhß†ŠºC˜ÐËa¾~nƈ|O´è÷‹ôõzX˜Ð ab½Â||Ù“÷‹ú(Nèé$_>.Î%b­"¿dó=ד?ÿ–GóÆ o|óôóEˆ} RN»èî«RÇÝ€*ò`6ï÷ÄÉ0Q×yÛx¡gÓÞÙ…¾NòÅsO?®MÔ?m¢?Ú&xiø·zšÛoÃEÿ÷{·c~ëïqÓÙÞßðÈ“¯ }°Á‚—[Dý+Ä—òÔï-"oõ-çÉ/[||Óí¾>}Y}ý+Þ>CÏs­¾¼¤›F¶ ¼° zø«È¯& ž¦ôtÆÓ…XDü°øâœ·Ñ£‚}ºÒS¶(n³i"ê@Á¢þìÓ½âëCõêlOÅ"üÓ"â·7/iõõG¹å}›xîoùùeU˜k€!¢Þå­gØDŸA¨À·Ñ÷ûEô%~u½‘‡ ý\Ÿ|<γ¿A·¾øöÇ-ó¼y¹/¯¾(-]n›#È×'èéW õô ѧn}WÁc8¹àj0òöí… *âUè±P¡/l¢îkºà[}Ô“‡ŽTÚ¹ m¤—xêQ!>~é±OË·þO·¡†ˆu¶Š>ˆßx½|ū߽ý$xПŽ÷özâN¨çYDU¤ÐqÞ|¥Môùú=<Â"â“EàB°è÷ y5/¿´½$êSAbÞÁ"?,ò?Á¾ü–G_Ù|yoü<ØçŸžÃýÁOEC}ñÓ›g÷ø}°/?ï©çÛDÈ&ú±Bw{B¨·7Zä­#E~Å[¿ U2»‰g¨À0áçaÂέ>~äñÓP‘/ú–·ö>ßcŸá>žàÁ/›¯>ê­ƒzpß*ø—ÍןàÍgxëÝÞú€·9Ê›A6¿ôð3«/_ëµ/oÒ“±)îtBæp_ß“W ýñ±J&·#Ä ÞõU؃Mœ3±ùê¬^^éáµa"Ÿ-ì#Iì_¢è¿Kôå£=yYoŸe¼²Õ–š{ûNlâÜ÷xç©W…ûÎaxû1¼}aûŠõ‘XÅ ¯™â…^ú*ðá«È}vûÕǧ<|5ÚwNÓNjx#^»¯[Vª±Š›~ûÖ/ä©Ãxë°‘>þîÑ…Þ>®(óÑ¢Ï?Bðä¡?#|ýOüŒº8AÔ#Ó…¥‰¾¯4±~a¾º¤'¯.úu"D}3Tô_D^`~*Ö#Lq§?7„‰ñFˆ|h¤¨Ó„)nZx-Tôs…‰|h„à·QB†‹:~¸°»ïòÖ <}.¾xà±woZ¸àCÞºn„°ïa_6qÎÉ&Î!„ ; y›ðs›ï¼‡g½lÂ/l¢^`øø­¯ÔǼ}+a"O.òP‘>ÜõÔ‡BŹ(›Àg›zì=NÔËÂ…½Dûú^½uSÏ„Š¼W¸¯OÂÃ×¼}ÖV_ß­·ï9æ«{ž#ò)¢ß:Eq/t’O—{ó2Þ¾O/oòðj«¯¾ïÕ?žü¥Mø“UèV‹È—‡úúf=¼,XðÜ_ד÷ õgððìo|ͼuo~Ý/½çJB|úÏÛÿáɡøú‰½ãóä·l¢ïËæ;Oãío÷Ô!¾õ…{ßçÝ'ïuÞþkOߪMôeE)·]ðÕ1Zœ§ øgzØ"t…MôU„‰¼¡EÔóB}ýUôò×Ñ÷-?冋ßúC¼ûï±·p_¾Ò“‡Šç­}ù!ÏùºXQw õåÏ<ödºû›yÏgyqÉÛâáóV_<÷Ô_C….ô®g˜È³[D­ÅwÞÂÛÃ|çm¼üÊÛ7íéo²‰º¡MèZoÁ›_ çBÃE;Lä/¢EÂDÞ1—çðô%ÚÄù¥0_Ÿ‰çÜâW‘GN}Jq¢¯.^ô‡  :5Â×§ëí7òâ§·OÅÝö2Ì7OÏx”P—ÿMG{äi˜¯ÿÅ[Oñè¬Á¿#ψTÜí‹7bD^*ÖÇ·¼väÑÝâœQ„È›‡ »üvþ“oŒçÂD\ ùüpß¹Ï9…pG‘¾sÝ*úl¢Ý&ú5CE¾"TÔ¿á‰g~¡bÃD¿C¸ÈóÙ|~îÙW«/ÞxðõÛs½xãá‰6ßù§\îF«PßBEÿp˜ÏO½qÌÛéÑ£Ñâ\RŒèŸ²‰|±Í/=~.ú‹m¯ø:»‰]Œè¯‰óO_ÿêë/÷öÅyóFž<ƒÍwÎÁ“O³‰s”6ÑÇiçfÃEÞ0ÔWWòö_{úülb_CE-BÔ½¿Š~«xçcÄ}¿õ»x>†úÎgzã‰'_çÕ_VŸþñÆ ®µœ¶þ$ò!A>]çááVßs½}ƒžV‘ï°úôž—xóî¶ChóõzìÉæëcôöñzõ·ÃÓ_,êÁÞ~%‹/ÿ⩃†ˆsÖ_þÍ{îÈ“ï¸aõõ{xâJ°Oß{yŸ‡OY}«Gy·;—êé;ùÖOë9WgõåA=ýz‘? }V±O¡¾8åÑ…6§¡‚·ÚDÿð·¸ë­ëyxQ¸ÀË(ß¹moýß{î×{Ë»O¸`6‡Eäe¿süê _Ü=|À"òšŸ{û‘¼}~ž<¢ÕWçôòˆM.3:*ê§6_ýÆÓÇmõ…oxë=?î­ƒzxs¬Ð›)¢Ï*UYï~~²¯¾íéó¶ù~€÷œ‹§/7LàY¸¯Þã‘‚Ÿ…‰sf6‘W óÅGoÍ“§óÖëÂE¿m„ï\¦—{Î_ÙÞÚD?O¨XçPßù/¯?xân¨ÈX…® ýa¾s]žçÛ„þ± { óõ‘yòí1ÿSDž2Iq§9²§úúè<ñ,Dä­¾ó†ÞóÞ],tJ°¨×„"â’Eìk°Ï.<åÂ1o«¯ÁÛW6Ø]¸ÿÖ×âÍ'yñÏ{®=—ÛA,¾õðžëôÔo¬ÊuWÐÙH1þX‘Žø/êÞ_ŸWo{ã´—÷zòCßΡ{óÆnõôPa/¡¾>?O}×"ôå?о oÿ†EðùO¾þMoŸ¹GG‡úô•'n…úø•7ŸêÑKÞ¸e|ó[Þûÿjï? ,Í®ëL°( "QE'QbFêÖ̰e(©» öhF¦õŠ!RErhÔ%6E‰$(¤è$zïà½)ïMzŸùGDFš²(CT¥‰/¼÷YU'ãíï;e‚ 5«×t÷ˆ@fF¼øÍ½çž³÷>ûäyÍ 3XA϶T}JÖ=ÑŒ‚ëu«NM<º ÿ0Yû6ëhŒ<ôûû"ý:-þ¨N0ûöøáEø"õ„—Jo¨þI=»¸kêÝ^ÕËÁºô-U^¾  >ÚCW7‰Ž³WûJœMrô ­ï…º²ÔÉãÅ+¦®™¨º8¸Íu×õÂl]Oø’ná”éwíôÃë£Ýâ Ô9$ß-¾;|øhí'ëQß³øwôQ—k?eOž¤n=ëhºÎ¥äý— Nq÷; ^Þåó_x8}œç…ê“W÷ô"y÷ÅÒÚç]Ô(x\ÛßkÉ~éVþ•ŸW¿;Vu¶º9ûÉ“ÿ¶çIêıò¿0o‹Np”}1ʹ ïG«οÛ=QxOê‚Qt_—«ßÛüA«ù©:uãIx½ÑêËH^0ÁùÝ#¿…÷-}®õƒ>î[óuºÁ)ÇjÝe´q$ûœ|p†ç±‚Þl Ün®â‘×›uÞÅ×þÈ6Øoüµÿ™û¸TqÁuú÷qxþ":‹Ëüùrçw–ï/¾Xútõ¡ñ'iõ®æãêtÂot+Ÿ ¾?î¸P}eá#äí/òœ/¢/¹HÔò@ò"úD‡Õ¹Ú/‘se´âqò‚.ùy—¾ÑÚÑÉØ¡>õs<÷Á+^ ®¿Èùs½ó ¥GM=ð8Å \Ç%ôv—ëóRvyÞÝÎ_ë¬öï_฀OÆ(yT—> ‹ä÷c¬—)ð˜%žçjù{„—¹PuRò‰‹à<á_.W„çAò³ àŠ‹~ábén}þêoCÙ÷:'wÊåÞåÊcåÍ¢ÛýÞxù äœå<»@ýt¡òy×ðÅŸƒ×~‘ý¼åRç3;pÕ_¾L_Ìóè²>U|¯ùŒºÊðê]p¯‹åú§K½õbõ›DwÕ¥/s‚sÓþϱÊWsãè¡z¥?ÕŸÉ~Qq'uÀÉ‹ùóçÝ2ülËǦþhû[ìˆ>à2õÏ$û­[¼QxœË¥#K^ß­ø«>:ë||¦ ž7Vu–yktp]ÞK·³°K_=/ü]ÇXñ‹Á-.WœM?TÛ_“ç÷bíGýƒ¢g¸ùbõ‡ôcJ<2>¾@=ðúÏKè+.¢Û»@^t ÔetI—Kés ß:V~Uá÷_~‘÷q‰û¾D}zsëB§/×~ô¸þ‹è^¬ë ^y‘:õyì(qzºú¡åû·^¬û´¿(:ÏQô9/Â×]d]v¾§/¨¾NùbÅûnôJ?åEΫñÂ=‚o_†»Àz¼P:Êä%£ÅO‹$ö¹~†zùçã%âi{n‡ºDòûôsÅw$Wÿÿà/Ð÷.¿ÿbíßÔá/²Ú¿W÷™~ËœëÐ׫ó|ë 𻟫õÝ“õÒ ä‰/–Qöç‹•‡§?°ÕiW¼Xç[øîÖ§EÝ…}žyÓÄÛExÍpð¥Ù/ðk±ÅÏK½>JäqæÓàº*ñ¼î˪fº…+e_‚whûÎì{ s‰÷|ß…Òë»ßõÁò½†W|±Îðž—ñßxÅ…òã ^|‰úþRÕçÁCÄ1?G=ûbåêò¼Fñ_ùûàðú‹àÎ/n=Êú .‡]À—àûËþèQx†Kä!—ª¯/þÝÒ9Å×çbå™òrÁÇô»Pû3ý‰/‚o_d½_ÿ¼È9q±ú¸‚Ÿ\"¿¹D¾~±|mä‡Í3»_ºX>xY—ësÃGŽV=ãyæDéOõ‹ðý%o×Wúߺð/‚W^Bwp‘Ÿ¿Tû0xÝ%òÕná)ÖQÁ).UdpûÿFáZ_@qÄôÑŒï‘8=Vy‚}uæÖâþö‘&>MR/ÍR‡/‚ï¬÷Û>ÐðǣŠg]ùï—ªþÎsš"«¼Aý{pŠ?§I]8M\èÕ9ªÎÜç™~•¶ß*õq·øIuòBâ±YOãð@½òÁHÕ:¥yê%òìνe| ÆÙêözèzu_â—9‡fØÿ³à1‹ÔSà ³ð¿è"Û¼/ñßܶo)ýFêÒÇJŸš÷Öú\úÞÕq¨Ów¶I>4yÏdù1Åßiž:d¼x¹úYÔå¹N¡oŸ*žM?ˆÜWºt¼®Ó߯N&ûo¢ú®’Ç´ú™à+SåßÜiþb\b•ú»Ú?—|®õcÊõõÐ-­€W®P7,—Î,õh‹³ë“äºÏº+ß»à5òv“U?Úç¦î\~&ûªÇ¹2¾¸A=·N\Ý$Ÿ±ß}¢ðwߟþ&êðÕmFG0S: õ,yÏöY¡£íw}ôkF G‘Ê{¿ Ž¥¿é8qm¢|÷ì³1~ª3Ñ¿.üA‹¤´ø€¬ÏñÒ­«û nÒ£~ž)¾/zбêìoÃÝãð6cÕ‡¤ß§¾‰ÖSê}Õg¨¯Qo…ý±£è=—ºå^}š¼`ø¾Þ»NŸÇhùñ徯ÈãÆñËéÒ§7ÅÏ-¢‡\çÛâýꋼQ|£þ¨®Ëà$㥗M^|±x_ûmÕ{Å/q¬|í‹òüÑ'Yý§ëÍs#÷Õcôˆgµþõû‰»WzôÈ.Á7­vú²Æ¿°É>Ý_§®gýLTŸ<™yEð¾YâÞ,ùc¼c’õ >3E=1o;Ç}ÎÏ®ŸŽüRðÂVod_Q⡾HkèÖÁ¥WªßNþ/º©ñzþá#º¥÷o>þÙÒyßÑ5ê4U}Äâ˜9¿àß—Ñ1,Öy™ý6E^6]þÌáßgxêSç‰W ðH ÔkKðw+ä;k僗?·þFÁá&‹÷I¾Ð«sÇó&yóûOÈ$õÊ$¸üDéíU|«Wçª~RúÙ„œW˜#¿®>–$³Ä‰9x^Ï­êöåòqïË~šWŸ§šÂodýédùSe]ϯΔßVÖÃ$õÐ$ùç$ñ|ª|êä³Â¿O£Ç˜#ï–™†¯›Á7|†ód–z£íÃ Ž¾ ¾°B½¶ŠnrºúŸ²NgÈ‹¦¹îYð‡¥êcËï[ ß›&OâœÐÇݼ`Š: ÞjŠü|š|l–Ïk}Å‹·¾a9÷§Ð§/ïÍ¡'˜CßÒ¾‡œ£ÓÌ ˜ª>¿ôíÍG—È#fñ_ž*?k}òÎÕ›L•¥ô{ñ÷d½]Ÿ¿Äy>ŠŽéRñPÉ?º¥KÉï»Tþa‘µŒ¿û‹<ŸÏQw^¦¾¹ ^7¾ ?¤^l “uõkŠÚžÿ kS05Sıi˜ìi2êi˜ŒÙr„ :[Š@¬an§ª²7”é2ØÃt $©}ßQ6ÍóLo–ÎÑùê ‚½È94b<Ë{œ'Óœ'΀pÌ£HYâ½-ƒXm°×ÉWq*™)D(Ô,H÷,HÜ Ê¸ŒY÷ñ4H›ÎÓ\×4û[¥ë<Ÿ³ “?C%¹ ²4Y¦£ávH‰ðÉÙÉoFnç¤/©,Z'‘t"MÀÐ÷èÀi;úSIöX×c¥DÓ‘CçÅœ“c¬¿±r\³#ÙN>™v–¥n1t¼I'âD!;NZ ½Ã°RŽÁA0ºt —“†NèAf[†9HÃxU~v61o+F•mY7ÝêØÈóoÙ£¼ªÎr’ã|Ù«Î̬—I…v@ Ó(Ãt0š¯‰2º:º'oé•Ó¾7"<>‡œÇ½RÆú^£èŸ(t'þ…ž($ñ¸©ƒW¼¯uê S>É:œ,åzò†éê„ôçÂM ì²#t²9]2Ïù¾I ^§O|n²¾(/¦É§§ëze RL×¾µ“3yÜT9D‰5Íú™™oóÔœHãdÏÕÔ+3µOƒ„:!aÄpeó•ò<ÈË Œî“97PÈl‚À®ƒ,Í÷æ@.èT]¤bŸ‡Q\d‚ËbÅÃ樴ç+_ 2¿D\¤sp‰ø¸€‚¹*íä s8bÌ¡øŸCi:ñXš¹®EöÍ ŒÍ2ÌÇ+ëÅ &žÌÔD§ì—Öë,ÊŠåÎ-}}“ÖMìÍzžÇ¹)”¾S…l‡)œ†‰šŸŸ©ûI3[a6zåœázÕy@Ç;Þ‡2z‰­³¾ŠóDÍ£±#Ö¼p«&dÈÆéi‚ó·WtîQŠÒj¬&ü9Ù ÌÓD!sv˜©pÙÎúªû±Rg<'¼æþ§aò§+ŸŽÃ†LQfÈï稗æ*¯Jœ^¥sv»óúþÜæùm‘¯Ï²/gQ&,‚7X·Èd/’/-”SpÞÛ"õäuÊ2ʼeù%ê'//“¿,rN­°^W¨ŸÖ@n×È›VY7ëÔc«Ä»U@ëà4ktd­Â|nglòþ®ðܯ°ÿ×qXïüùþÚ(ü#ùÙ:ëÔyktЯ•ãZêñ—;}àýù—@Ô¯Ó|hçåb®’¯.sßKì›%ž‡LÎ"ùÓ"qb¡&7û>²^Àoæ¸ß%p¨5Öù&HâKÔÅŸüCà—qð›-$1ˆç ëg¶œ+ôÖ¾äc†:x®ê¼Lj˜§c`–ºÐ “½ršLž<É$‡‰bè=¯œ œ|§WÎî‹ÄÁI:<×uŸ¨¼Å¸¢¢%æ“¥÷(,ztPN–ó…³tvÏû˜®sȺ%È“àlS•o?›çù.s_«à~UÉü¥Ã¢W)vîEñÂp¦â•N°*øu ¿á„qøŸÖáÐÉ\*®íìV1‘ü¶Gý3Y<ß}þ:ɉK¨Pv‚bb½êøÖÁ+Ê‘ :p&ÊyÂó'8ë8x÷D9´¤žêÖdQÿ=¸K«¸Ö‘/¼P¯Àµq+û í|ö<ʹÒ‡è–3¹’É«ºÕyjŸó±Íì ófëd“ë­û°cÖ¼*xàd9 '_œ«:?NÌÓàÁ‹ìãu[[¬çUêƒrì¶#]e’‰Ç³äË‹Üÿë}ô:ûÃçÖCQ>ÙÙÚîn'YDy=^ÊÚä7=xŽñÂÿt8J\îò{Æ+žª@ÑÙοwB¸×fÿé0^Ž|NúÞ8Qdœìà‡TF‹'ç<+üAçÂtnvqìÖùê¹g'Hxê xz÷ï4õåD9éå9‹Ú°ÌyÕ:ŒÿžG«N’Ö«9×WÈVÁå§PìLQ·ü…üÊÎìÛ%Îóeð¯¥RÒ†7›¬8•뚪ó$¸A¯&»FÉ:^;EÞÕòBéLnq´ÔeËå¼u¼ .w¥êÛì““.§jò¯NP ŽçßÏPï.ò>gy:ÜNÔ„K;/ƒ/MÔD¡ðÞ­ÓmâüXMⓇß2ÏÐqÝõ¤ÞëÞÝ*„Wfk¢Rðþøº^9ùEyÞò¤Ö£á9Zb,­Ç­ç’7´ó霘($¸ÔùÖ yÅ ëa¶&ÓÇv…‰µ+8í-×$ ð “8X´¼]M{èJ¦À& ÃR«ÐJ'³¹fk’‘¸gâþ$ë¾W|qøYp¤9:ÈæX÷sÔç+¼—eò»…ªo“ÿÍ¢\ ¾ŸçºæYç3(jçÉ©;Ôi,À/,Б9O\ZæüYE)¶EÜÛOÝ*‡Óœ'3tÍ–ã¡üa΃e:™—Q„®”¢-xû$ë`šày!nšÎ«)Î…ê©Ùë:º•§éêï5tÒ°yª¥:å”bšÎ¼äûã娤¢6:Éùðtá:©w[œÓNIí8 ÎàD“©š€¤þÀxÇìyâèRá“NÆÓñÏs&:–¶ªN^ËzoqÛð!í„Eë9;þR_Ø:œæºÝïcå4¨#{òXŸ{¯&Fo4vóûBcįô 3äó(ÇÇÉ?ÆÁs'ª^gÒáÐ Ã:º‡7˜¨ÉT::ÉÃÉÌv˜8¡'ñ}¼Á4ëi žlYûD\&f8ÑL¢[›%.O‚‹N•.ʸœÆŽ½ep-òyõ°kè(&y?=œÙ'«SHGýœ×“à“uÿÁŸÛ8n‡CøÒ^é3Ì#+èß@~¿G<˜,'„L¨™·*e}â×uñ4û¼í<Ïy2‰oª&nÉ›«'ÌûYéL÷ÿ}}¶Y:j'|fÿ÷ª#2¸úºƒ^M†Hä÷N£W™f}ÍðsÓìó¹ÒÉ·©/Hþã$³ pð^MV³#2yÑ<ñdžów†õ6MžÂ¹y†ç0…ãÊLñàÉÿ¦©çíÈ[(]HtPËÄò“EöÇ2z¢~ÿ â‹ÄƒÞÇ “˜Z}DâöBÅi;öäqÔëÉ›¤î˜.Æõç·èæà%æà/‰›KèÚf«®Ðq:q~ŒyôOêš—É“Wˆ×+¥»I°D¿\:˜ä[ Õ’õ¹€žt±ž¯=û¡Ut]«Õi^t]ß2çÁñf‘Ž1ëç%p™5ÎÃðçUpà9ÎëÙr6 O°Xõ “QrNÌ Ÿ˜'-¯-‘G-Q×,ð{WY÷k¬«5žãøå2x÷J§¯YW/—Ó@ê•mòû­:äÓ‚wÙ1ºn²\y‹/s/w^Ù “ß»Òé7ܾ² îº O° þ¿Ýù»ý@¹M¼Þÿ»Â>ÚFç³ Öö¥¤nÛ‚·Ú$OÛ,ýÌ_ï¿à+Ô#/s޽Äßo€c´zôăMp‡ut œwëà6¬×upÃut>ëµn_ÖÁ ¶é”úÞ³ÛÔ©W:}ã…+à•/nQnuFú¸ò6çÓâïyÁúÚmò­-êŠm>w›¸» ï¹ n´Í¾Ø"¯Þ.ïP?Îo“\á9l“·½L~»M¼h×[&`mwÞÖ'(¯>ϹøÿþùN?mù{[ÔÛä©[Ô5›àL›äqëä«ëð7kà˜kħ-êÐíÎgúºõm:·:}øão^¡î| ¾âeÎ…—:_Þ'd·á1ÛûïÓíÿô z€+ä{HGçknMžúÚ[³ovÝþk‹:l|k“ø¶Y}‰?ä}›àƒ<ç ÖÃùÙf§/Ý¿ n´ÎúÛÂIx‹ýµ¶É9³E>¶Eþ¸Õ™ìw–n‘ßoñþ®PÇlǼ‚£Í+è ¯PGlÁm ló\·k]»‚Nx›z~“x·ÁºÞDŸ°ÁûY'?Þ ŽÞB׳¾³A¾½þgƒ÷¿Uý€ÿª{n°ÿ|^ÛèH6:oèîíN¿|zb«Ó7–ø¡mpümêìmúή'¼Lü}…~¹? Žü¶]·ö×ÉOÜp«z²LàÚ"þm¡¿Úm×¥ŸŸs| >Ý÷ºý ú68—×уlr.¬snlÀ—náH°EŸæûx¾žMtð›èY7Ù¿[à?ÛÔ—ízÏó¾B\}…÷¸ëÖ¾3è¿ßukôê7Üœ‹zp ür‹ó`«öï¾þ¹¾Í9¸h“zd“ú|‹Nù- 6ëyÆIjÆçx¥ú£{~™|êóÁ~ö†[ûáïè®[sžowúå·;úzÕmò¦+¼‡íÎÿÒ_ˆW¨;_ÆyñÉÛÿˆóêeꇗáë·È#6Y‡[Ĺ-Îß Î­ òâ âÏ:zÇMÎÃMø£môJÛð›ä[œk›ä—ÆíMt[¬“v½…¿Ý®¿7žåó_½Òîãw/#nwþRÿ|…÷¹MÞº _µÝùú~@º‚Þå çÏð®-âä&ëi³Ögê” t&Õ霺c½ô«Ñk´û¨ŸŽ>¾I³N^´Æ9²^ý”9×ÉO×àS×;ýå~ËøÂ:|Øqav¾¦ γ­Nÿ²þç-ê‘+ðëœïëä ë8ϬS®Sglr}WXw/q>y޼y…<ºÕñ¦.\åYï\F¾Œ3Â2z·%tÝËÔ[ ü»ýóð“sœ#sœWsà œ÷ 8, ·×iÐÉã³à³åX¼e‘u<˹=3ÿ4Î3_ufÖû\õƒÚ÷ÝÅ2ñ™ï_¢Î\…ÿiãjöá6ûk‘ó}±ôßÑ™.ƒc.Ào, #[‡^ïœ/¾JžØ‰Há/ùóý”+ðM+äŸKÕ/–üy™óSçÊð×UUVxÿ«ÄUð‹Uö˸Èy“ñj> Õqgý¶Ž ù}NL_uÂß xÆ2û^e^w…ühÜe \v™øÕêÅ“ùu"×ù*¸þ*yÇ úë¸ÝVу¬‚ ¯±~×ÉkÖáSì3õ¼Þ¤Îß§ÜD±Ùé¿ÎwoóÞ¯Ÿ¯R/¬RO­ÁÇ®s~®²þÖÈßWy¾ëè<¿ αF?À¼Á*çá*8é?ï߯qά‚­¢‡Xƒç\£vƒõ±®µÞìçgÖ¯Ÿç=~ž<ëòÛ¶ï8<ÇuåVáyÙ‡Û>||›úrÝÌŸ·Á}mÀlTþi?Môü[è¶œºÊº__ƒß_­IèÁÖàiWÁï×À7õ•XGÚöW\ÁѪÝWÉŸVÁW×Ë!|óø¯WØKœ›‹ä…Ëà˜Kà© ìÅ“/Á¿/ï½ßúŒåê¯î±\¸Ö?î I–É«—Ðo/Ò/±PzÁ7åwçÑó/€. oX€/]é$¯ZÇ['_³™åùÏRÏSw-Ào΂gÌ”`ú Û~ÚœsÕŸ¿ŠYt;­ž88Òdõ§É“§NêÑ=Ký;C¼š%ožáü›¿Zd?O—“eðÅY|Ô1¨óSG¼YuLtƒôsÌ¿ÎÀ£ÌqŽÍÏ\t¡ð‚óÎPÿOÓO³gѥΠ'›aÿL“ÿ΂óÎÕ¤éôsÏ \`/”.#ëb¹x¤Äýytf ¥“HÝuürùªDw·BµLþ;^iøbá¤y¾óìÇyžç<ëužzž8¸†n` s…üm‡ÀYtz³ègg¨«g«¯:¿gº&OF2ͤiöÕºÅxfñæiö×ú^Ïzu# ¼ï¥r¸Sß^lŠç2‰d]Ætõ÷89;yÞÔuŽÈÓ…sËo¨ÇŽx’ç6Mý>MÚò‹NÄŠßËtùØ7áäðèé§ÐiNÕýØ/ /ƒ”:çûfðÕè•#r6Äd9§êqŽN•C`âÌõo¯A½Nù:ÿ^g%÷î¿íQg¼±OLçFûç´N¤Éï¦Ê‡!}\ÓÕ7¬¿Bò¦ixäøÐVw^k™üöj}Ðϧ7ÙW+åýÓò𩦋?R¯}åtÝGp³ÉrÔŠnyŠ|pªt›òx:ºþ„`_¸?gë_¡¿DòMyØêðÙâõ‚§Ìñó«èAVà‡—é+žd?N–þ?8òtéòdèºN\›.FpérDM^Ôòe¹Ÿiê‡iò™iê¶iêÙv²¬¼_»öº’O´}ãéGš,ý¶|rp÷~Ï,ôÓà7sÔÅóÅóÈK&¯Pß5[z©ðÓðsÔËÄ‘up‡M&ULã$:Ï; ¾0Í9Ðò ÑO—^ɾ-ׂN“ßM×ägÛåÑõч ß?Ë98[ë#ùý ¸íçßxçB­ã‘ίöSÚ×%_š|t¦xT¿_w¢ñxù»$>L‘'ÍT|W7§ÿKú#§J×á¹ ¿š¼|~¶&XgÝÏWg©«çJ—>¸tH³œ«m›zmc…ºw ? éêÓ2þx®¯à$Sà·‹•÷Ÿ«ýªî'8eÛ§®ÃlâÄl9°&/™çjOƒÿL¯œzt¦ÎørÍg&©¯§øýSu„oìñ¼zø¶LÑÿ3͹ ëLŸóc¦ú‚SO÷ÐÙèT=Eþ7UºÖèºt^Ÿ!O.Ý£ÎùÑÏó\çÑõЃôˆk­/@ú%{¥Ã°oO§mõÎÁ=t€nõ‚ÑëO^c‰}7WñÇsÔ}n¦âuâ‘ÎÖ³àÓðz“Ä×VGîúÖÏ)¿w¡ðóÔI“Õw™õ>]úrÏÓð “¥W¦[ëPçtãLê®™rfVGï9¡CÞŸº‰ItˆÓÔGÓÔûÓèf¦Ñçê¿1UNÏÆ©ä¹³µÏƒïβ~Zåè­f*/N¾5O]:[úq§Ü×,} sœÃ-?ý÷8Â<þ;‹œ¯ 耾 O¸ZüiÎãmöçüÇ|Ê|Ð"øùøÎRáyéKYeŸn¢çÚ&OZ¬º uÅùó"yÉb]_pøypÈt äÅS•ǧoy¶üOò¹sœ{ó膗བ¨?É÷–è«Zæ¹/sÎ.¾”ûZ¡¾Z†W?^¯]©8‘k‘úw}ö,ërÜvŽüc™8¢i«‰k‘ül‰ë\æû–Ѓ-£‹O\-'ø¬ËytE‹ø--U~^.þ Ïu™¸´TñH=jð©xÐ%t2‹ÕW%~Ÿú‰sq‘¸¡¿×2ûz™çc=Ùþ9çí:ë~u¸É>·\)Ÿœ<÷UðŽò­t8+ø .“G¬‡¬”OhøÖÿ&¼à2yò*yîjáîYŸ«ì¯µÎlŸ¯y‰¼ï uùÞã üðréÑRÏ-S?/“Ï´xTt<«äK<¯Eò²e|!–‰sKL¾\ÄgA™eðåEâÒº±eÎÃ%ö©ña~~ =Ø:øóRéÜ<“µüYâö2ëPÿÓUöÇ:ëÃu+î»T:±\_ƒï½LÞô2ýº¯/—zb®ø'σLäš§Î^d×"ëa¹òeó }T§çÐÏR/´þJ¹®9ð©WÕ»Ä×L>^ªó(ùŠ“æ+ÞäûæŠOóúÒg¾€¾}®Îq'3Ú?–sež:z gŸÐi>†u¢c[gN•ŽYzÿï-òûMüc¦èÿvòÌ }ý35™Ý>Ëô·LU=-Î}f;1DœIITóóàF3Ôm3ðÀ3ä?3è—å ¦k’JÞ÷L½¯èOfȧVÀïÖàá6;ßÒnó¹Sð•úòMà ´}ý‰S ðï‹Õ/f}.Þâ$£äŸ³à13ô̧<ÇÊÖûˆ.d¦ðûôíÌt>¿oÜ>¿¹ÞéÓwÇ6¨3×8oÚ~©à½Óä§ÓÕÏk_•“{ì×Î95ÍÄéÚwÁæÑ»9ád™üÿ*Ÿ·R>Éñuš­û6K]:G\€ÇX,L^ çÆñc…÷´V>³ÁsfØïóå»lTÖÃ|Úº™yúlgÙ§³ð“ øv,TÞ.o’u4GÞ=_<”õKp„ÅÊ{²?çÉ—+ŒÎjžb]órÕQÉ#WɃVêœËy¼È9·Èy-þ·Œn}©|·Íçôµ ~²‚}‘8êu.ò9ò€òâ+œc«èdWYçkð3ë<ÇUÎÍUx©Uââ:ùí:÷·F<Ü„—ß*=fòÇ5ê«õÒ—Zïô?îonP'lSÏ_!ÏÚ&OÙ¤dôxS««ŠÿÊ:ý{kè6¨ÃÖá•78O7kþAôÂ[~šùS›ðŽú­·zéäïøwn²~·8O6Ù?|Ýb¿Ÿ¶ž³UúÁ趉S[èó·;ý4ê¿¿Rz°ø_éôÃå[_á=n£»B¾²ÅúÞoØ"nlò~·Ñ¾ÌóÜukÞà ·ö—í—ìº5çåyÛ:uý:úƒuòïõÊs²o×8Ç68/ÖŠž¬ÿÉzù &þ$xñzªÕúÜäo«œ£Ö{ëÔÃõ÷âá©¶˜T· Ï·.².¼F\'É÷¯£“Ø‚o|©Ó§ÿÌ+ßé÷«ý!yûjùã‡_¥d…slß²UxÝ5ê°µòmŒ¿ÇùÀZ§K^Í÷¢+ZC×¾FYGG¹NÞªNa¼nƒ>­úyuÎ)H=´ÆzQ´V}$©/7¨_VÑû®r.®’W¬p.¯Âû[g¬pN¬pίpßV‰ê©V©SVñáXEg¿J~ºo¶Žþ̾ÿâç ño•ú{l…Ÿkõ?‰ë+œ+¬³ÕêëP÷%n–:Ç iKèåëÜп^¾:<Ü"ùÏ|ÕwéÃ[B¯¶ O±T> ÉÛWñ rÎÁ2ø°óVÈ–á©VÀ•Úøž½ô-y¾kä¯ëÔ³ëàƒkL^Z#/^­ó¶V¾{•}µÆ{]C_²FÿÁ:¸Éz§—üw›è76áé×9'ÖÉ¿V¸ÿMòÑ-øâ+¥? ²E>xŸÏ“Ï]¡ïh×­Ù/ÛðEÛø–]A°]xˆºÞÄ‘—¨·¶;«}]ÞKà7W:}뼌Þõeâïò‡+<Ÿ—пLÿÔËÔñ„žé|¼ÿˆzë xÚtÖ_/ƒ_\¡¼‚.û uýËGú|ÿ+à'Ÿ'þÿQç_÷ϳ?÷û£¼×¿wíá9o¸5¸üR‡~ž8òGàvĺû|ý\êänM¾ùyöÃ+ðâÈyûš[û¸Í­7ÞÚÿç¿qíýõþ¯¹5:”×ÞÚ÷ý‹7Þß…o½nRÑ×|×ÕÿüÉ&å¤\%óX-Ú èk\Ù*ÊT:×`ÆÖ¨œ×JQ—³J¥­ãÎZ9?GÉñöêÉßW ù‰òe¤]Äh••°Ì¿¯Ã¼­Rq®‚h-“¡.óûW`öVÈVj‡'sYáYçdߤR^&Ã[Æñ½E㜲ÄIÒ*Ãâti^®ÃDr‘Ä2©e”çKœ8Ë0émLæ³ÄI²HÇÖruÂ©Ô °ŒâqÄo&j¥ft¦ƒi­®9Y`uP[@‰µ¢¾VˆU˜!Ày¶9˜y”«ó0Ås¥¬ˆ’t¦”2Q.M ¸šù‘Áœ¥"˜%Ûc‡Îá 3W €TŒ³Õá$eÐ9"Â2, «d€[œ$Ó(IœÅéÄVÙ`Ũ2N‡x'Bè Ç)˜Š©šÌ…ØT)jtËó›„¡š$óŸ$Cš¬Y¯:‘&ÓÐÁ\fc¡œ¯“`Hœ­ÝÎ Âß:óËü;kR&ÖÙâ9)'Q$OVgvöíŽ.vÔΔHÀ&ÉÈ'Ë1ÅÙ·©üT(Œ‚ŒMRõ@Lz Ó“Õñ®ã‚:F1·@¦º ¢³Ó<ÍÉ5Q€A^{ÕÁžu3Uˆœ‰¬“ºNÈÉÈÆËY&ñb¢&ö$¥b%sh6¢|ÑésŒ÷>VÏ-Ì·³/Ç`ÄÇ˹6q¡â㟳sW+óJE¿Â>/g›TLå,’  Urê4NÛÑX9¥ê¤åºÎûñ'žSïÄš ]ãõó:<¨ŒŠ‚f¶fg½N°>&É'ˉGç© +T¼L Ø)Ù(­ŠAd'ŠñÏ{˜,§Ñ>À¹ÖÎêNÅÕ+GÞt´N\çP<ɹ2BÞC¡Û:O&cœ¤"š¬ 5và;sS‡Õœ=2»ÉZwN‰"WgÒ©rLÒ‘" ¢Va¥@Öt9¬¦Â™¨ :J8!E¥H¿Y â4UHn„-R—ëítLÔä §²ŸÚéÙ?œÃ“œßS¥ðL¥8SñFåMâà,жY”ót=ò“ÉŠ¯QÈM ¨˜¬õ†°"Ó+‡‰(8Z¥¥ÊmìRÁõx½Š:±©d £7W ŠÄÍ%˜´ÕrÎÖ -Ï©uR=ò9VûÂs(ÊnMpÒ8ùN·&Š$SŸ(E²q ùb¯ò›(z0Ô­s¹JåœÓ“äßÓTÄ­’SÅžùTîgF{‘NÄ ˜È‰:ït.OÅ4Až6Q„t‹“à8ŠÂñšÔ“uÉZÆ%ŒA¯Ö‘Š[•ÎaÆÛç™|vD}’ Û™ç“5¡/õÐçÁùÖDOA u*kļ?ó¦Ä὚•+•xÙ+‡þì§÷¯wû>‚x£˜˜æ|Rñ9‡²h¾:(²—aX7ªs5qtG»Εîëãá ûw¥øëv©fñFáª#ÌùÍ\9ã8QÉ’œ3Ôsm§G:àçË1,çûl1œQŽÍ3IiælL†Ó޽e:ÛfYgs(—çKéz„pÖ6ÏcÄt»´˜ç3S‰+3ÅP骢ÎI–N¬K'Ú4ñ`ºœ¾T|f2 StÄ÷xß=œpZFMfÐ ‚2w2¡Ý ß?4S¥a¤Ö:¿×WˆlwúéÎÄËåܘß?E'¨Nü3u}QÖNó~¦+ÞÿU:ÚyFl‡²Éš|dp¬&9¦^/g¾(ó¦Á!ì¨iãyòÝ9âør}ŸïGg¢äQsœcÖo«Õ!÷B?Ïщg«î׸š83Ç93OGÀTÕËQj/Ôû͹§¢ÍΧ9Ó9ê¾ÙÚ§9ß=Ÿæ@úg©döçK9çþȺZ®ý‘‰Ä³å¼ª’2ùÒyö2Ê%•2ëàs0@sÕA–º¡˜ýÄõ¹rhŽ2¡e£_g=¬R'®”ãlpˆiΟV)nÞc‡K$QgJù–}<Ë>˜1™&ß™®u›¼ËI :-‘·ÊtnQï¼Ìº¾B<ކɛä=N’שÐmxS×MV‡ˆóÌ·œ|æ¼WNhž_ž{NrñÜð¼¢l²Æ2æ*®“/MW'ŽÎÇ9÷¦``gÊéLåuîSG\åéšÈ—üe¦Öqžó,yê\9G‡Q‡Am‹£RÇv’„Ù:3ëÐ.¾ê$ætÈtQHŒ×$#'ÃD1Ñ¥Îï.ô}`n¢·ÃWŒ•ƒzð˜qêŽ1ø“±ú³ö‚—Œã3A5Ž´u>Ô: §^Ÿ¨‰+®£àQc¬‹q”ècµÜÉ÷ÇÀE&¸®‰r„8‰rz¢&ï°EeÆfñjÉóÖ¨ÛÉQ˜NÇO>‘ø=E°LÝëÄÈe” cõžœ#®~ÒI[]ÞÿXMªâÑý<^ÎÞ‰W—¨3FQ¤ŽR'uÁ¡Û}”új~Á‰•cäÁ.—óvê«Ë…Ë9I,ñ­¤ÅD‹»çü/ÞÀI†yà­ƒ}ø¡qœ¦Æk2vò‚Q>´œÛÓé7Æ{¾D~|‰:è2|ë(ïÁ íNx|ûìò\ÆjBoòþ‰š@èä¹(„Æá-&k¿—hçÍkr®ŒÒy6ZÛHा(ÈÆ8GG‰;ݚĖs¨[ÏÝI—ÆéäwãäÅ£5±'uP—õÓ§ê²»ð³c5ù1÷5†¢¨peÞùf;JåkøbñÆn߯OÏ›8Q—91o‚úµ×:Õ3!Àý~»[Ô£p«IÍÁ‡&À…Æjaêønå£N pr¦¤Átd˜¿më—ðs5ñ:çr‹×ˆß'¿©¼\ÜDÜ-çüëp¥”áÖâ½ò+Æ©¬»©ê@ n´Àz]'˜,œWþÓŽÌèÚ =ž‡Nê˹Ðò Nì#Þ ?Ã~]†Z…wÖÙh«:àÃ?Lñü¦ª#ÖNù覙LâÿÓk¾øóæàÂ5ȺÂNe+r8 qåäXá¯9gz—œ â¹çäÍÔ¿Õ©¬“†õHò„WM2f¢µõLîg¬&%Ÿ'Ïê‚“vÁOÆj¢XÖóŽãà ]ÖûçëŽwc•盧s¢xDù8ë„Ô àH*¬gÑ?8!p‚ý33ÁDíïðœչ`g±÷G ù” ÖýDñ&ò(ÖSÆÛàã5Ñ1ÏyœuÚæ_:Ç'.êdÒå>Û|ÔIÁÉÆÀ#F©?G<}‘õ%ðD»©»5±3ŠºnMZsrVÎçEò—Yâëxg·&w¯ºÇ‰ YÏ­£NÎÑy:ºð£ðcèFƨ¿F¹Ë¼§QðµKð¦—9'.+v+oŽlþc´pZãmîg´òýണࣕg¤¾­ç›¸Õ­I7Nü‰ñ"øÄÒ.s^^&.]ÿº€ƒò‹è½FY]ÖçâîEÎÓQÎÓKà™—Éó.Ác²G+ïŽ.¢Ç9´\N~áw§—xëyf?^†ß½H|ꂃ]æ½¾X5ãDr‰|ç"N:ìŽéêQÖÙ(uCû|¢ã¹ r‰ŽÏQ:ýF‰#—kbvöó(uü%Îñ‹õ{œÜ—zÏŸ»Îs‰xÖ¾'u:™\u‰x e±8þyÄ"ùãJí«ð£ð­ÝÊû\N´ËÏMU–“xƒSŽ‚ë·ñм-¼É8z%õ-ãðQ£Äóњ̘u;V“wã¤g½2 ?’¸K~|™së ÿ±šÐn~™sì2×1J}ÔNw²»8Eê=y×né!‚LvþV?À:ÉeÜ»èœú³Í¯Üg\Vç¥óRòÁž÷TuðëédÙÄÛnásž'q‚ŸcÿuÉ Ç8ø<ƾUÿÑC:Võ‹õ]øÚQöûxM\1ïÉ{o'Ü9©MÝ‚x¥xmê2ñz'ø-ÁÃÌ—â\¼Ó d:Š8Á%:À‰ÂÙœ¼ä„vqÐÜÏøàQÖMáŸÚûH|o•²^¦áoÅ­'ˆ›S¥3Ì9ß:­Ù™§£DÞË"yìJéŽÔé<£s‹8uðƒ)øÒ9Îïeò£UœØ¦à5Zœ^ÞÄu¥ó`tQ­#b“xÛvêÔÜ{ …ùbu¸[ˆ÷™*ǘä vìÍSgÍóþVªŽI¾3OÙÿ뾚|nªÅKSw¨sì~ü}¢pžð©½ÒÔDÐ~@ž/ä¬ã-îc¥&Û‡ÜÇ©O'*ow2jêù^9åy[oÑAÐÖqæ Ù-N-Þ þ#ëkÓ!t½÷(º.ˆG¹ß.ï«[†n—ú®0ï§GáAGy_£èmƨKÆÀG9gGÑÍtáK»•79!;¤ê&ÌS{Ô£œC£è—Æ çÍ}Ò©<ʾëÒ‡à{%Ÿl'D‡‡è‚×t‰Sc¥—S>ü2}—Ñã²OG wóüÿÎç_æ<-sðˆqx˜‡R7’|ÖxÚ«ýŸ<ʼ²Õw‰‡ÉOZ7Ïœ@ê¤Cõ&ѵM‚ŽŽ™87^ñ,ýã<×ôÖ“ðÛ“œçc5Y;ϱåÑû£»§³m¼ò†<ó‰ÉšÐ«ã–|wøú¾©8Ô˜¼oŠz{Š<¶G^:C<ÕYg©œR²Î'Ëi-:Íet3÷SǶºX¯×Nìð ìßpïyÎéø›Öá)yî2¸¶xô2üÌ<øÒ×9‡|Žý:Ïz_dß.ƒÃ/U‡~ø«éâýrÞ¶NÁmÛþ ',æ½OWǽ8˜vÉ/g‰Ë+LbØ&Nluúr‘á-tÔ-|qš¸§ÞºWzóe׃“¶@½Ò çëléRçõÊyS祬ŸÙr€Ìþœnñ~€oæt„H~2Íy: ;Ã{Ÿ¡k¶&©&®NU_‰úó<'+˜,Gƒè 'ë>òþ¦ÊéSü2õá ñL}ÔL9 ¥3°ígqÒ¡ýÉÃ&ËIÛu÷µXzùð951Qý}t1u.Òù>†.@þ¯îÖ‡ïÕ$WyóäOÓå´—j}ÇŽ^Å'÷mâãtéŒãÀÓåzºÔ5côÛŒG‹µ-õâx=¯kyÉqöÝxéäÔaEGÑêgÒ®îf<·í7¯V¼Q:®ô't«^H|g¿vK‡™uÙ-Þ,õ‚|ÂXáh©_Æ ×K¯óV¾M¼.u}½þX­÷qê·±ÒÁ¦d¼âzâÛ,øëbñ+ÙË•'„‡ƒolûÿäwÔáÃÇŒ.üi‚ø0Qy²º(ùËäƒ/–ŽHü2ñðÅêwu¿˜ÈåÜž(AñÙ¬£néâÄqÕ£å}ÌQ߯¢+]†GY¡.ëR?t9OZý«:WóãàŒcè§ÅU/ñž.°O/ó\æ¹\â|»È¹y©ðÍø\Dq‘>„.û¿‹£çex¢‰âKƒSRtá»Fñ)è–ÞÆ~Uû=ÿ¼¯Ô_—ÑùŒ±~ä F‰/£…ë'NuÉ‹F«LÞ3 î`¿ð(ñè2qo´pòÔ­N8ûê2z–K¥Sÿè: np¹žKòÁËàYêzÅ¢ã衟ž@0….ãñäEêÊ œã/‚GÁ³^äy¾@ýy‘¼ýÎóKÄ© ð/ò¼.’ß]†/î‚'t‰“ê ä¹/—n(}ØêéF ‡/8ZçNòÂná©áç&ÈsÇ«ž~x¬Ö}œI»¥N~5Î:îŸoŒ|ƒ8yð’V|t‚¾–nå!9w»è›º¥ŒÎ¨[}ÑOõª/Á¼!-8wK'ŸõÕê;í?P·`Ÿ‘øŸy¾:¦ô¬°–Ëá8i㼿qòˆqîo‚úÎÉãàjãàŸ=ú'ªÏ*ýå3<°Wz£ø­L ×œä÷·xRöõ(ŸÛêîÕ'¨ËK¼-ý“ç§:V×}ìžçæÁÍ&ÉŸ¤Öö˜—:&}7½š@—ï[d_¯1 `³úmäY’§¶uXúÆ«þÿÞú?ÈóÈC^ï+]GÛ/ï}fµx«“IÄKü\óôSÌV¾—º^'å÷=‰Þzºž“ïӺ܉8‰3­Bp‡¶_ÌÉSöIǘ!¿™®ëUWš¾ºéÒ5&ÞêÃ1Mþ9‹O—“K¦É_Ä+§ÁæjørpƒÖy9uÅ\õ£‡ïwñ:uæTéW“OV?[Îiûß&yo‹Õ‡ntεÉ'ÀÃ{Ä£ø®ý¦“¥·qb‹¾7¹ß‰òÃQܪõW°5}ãð ã•Gß1^yz¾¯Ó꺭óS'þ$h “Çâ€:J9Z¼júÛºè ºè¥»ÄÕ¶ß ziûzÆ*Yg…×…—¿ ÑEg4 9 s…¿ô|ºÈõ¼XúÖø ’—¼ïp¡Ó?žß{‘õ}ºùBý{Öé%Ö¥yôhñé­ëÙê:£3¸P:‰œƒê6.¢oº„îk”~úËÔ‹m~ýKû¼/FÉÛÕÏ–^EÝPöõ,yÛëm…s¯uþ‡ñ|JR—s¤‹Þµ[øsøÃyô¾Ë¥?Q?<übé¨ã_ÑæË®k냜æsPý0éÏ¿Düþ,zöKð—y?cäÝãôsŒÂÏ–N'8C·®/8B˃„'¹Ty¹:ÖèFÑ/^"¸DžÔ¥Žê–î':¡ËœÇ—9Gºàíst]÷¾Hýs™zþréY²nÇèϼ@Þ©ò뜧KGÜüRéwìÓ .3ŠÖzÚ~…ö9D:Úö ö×Q›GgÿŽW`ÜŒR—‰côÃwÑùŽ¡««>¢¬‡1žëùÉXéc‚?të÷x®çŽr®ŽáøÚöyFÇ>ÆónûÞå5³Ï¦àc¦ˆsu}êiÍWœ¤ N;õåý‘Ýê—Mž2Á¾˜(¼É¾ ñæœ#ãè{Å?÷h}€ôMqâ©ý#Ù“…‡ÈûËç: Óü#uü û^ίæøûðÁt ‹Ä¯ižs;¡1ŽèS•¿;9:8é4|ËLù±Ê·:VßàPSè¨[çõèÆVÐÅLpä“Uç[×Ú§¦ß¦ù·~1ÉÛÉ—öIäû&Ë×!¸Èøø&¸çxñ&þGm?˜~#úcèרN)yï ñj¾üô Ðï@]GpÕ1|MÚ~‹L”%Ÿï¢ï­z5Îæ½òEDz,}c䣣uΨ»7Êónõ/á¡'Ê÷Ržð‘3å¯)?€ÖÇ!¸‘º’öüî± Ø­>_q¨œÖ1£¬§Ë¥[²ÿ%û£[8¦úŬ«ÕŠïÁÿœX0ZzgûÅÏÍ# ”übýìd†-âóïg_°/JþÞö™dB€ø±ø¨yÃhñù9ßÔãŽÂ'«3-‹:OûOçöà÷Æ«/0¸à8úš‰âÝôYT¯<~ª|·sÞ­ÂÃm€[osÎN”BßÓøL·¸®j‚?-p®/°¦Èg§Áåf 'H9S“ƒ×8é`¹ôvòFú¢éÓì9b\ Ï´FŸãzù—¤NŸ(^]ßìÛ^ù#È;ç<‡×xÞP\lЏ«ÏóüÃtMÜ ¼B^¼Š^KÝÀ qŽ<|ž¼hüpž _Kð³N†[¦¯ÆÉ\+¬ÿ5øœÂÁ’LU>`>¼ ÅmÔ9$?ŸÁ?nªt9¯gÁìÇžª‰÷âTêõÏRÿ\qs©|¹sî/³?Ú‰¨NŽLž½Ìs]D_ºNÜ_£ÎXƒï˜'¿šC5¯8‡q–üw=ö\éI²–8æÊ?/×3Ϲ5KÝ<®£®q±ú—÷¤¸\¾o‰û‹ä'‹ècÚ‰}Á;—+ïJ¸Îd t; ðóÔ}óħy&G9w¥&Á¤nÔ÷®|ѦlâÈ ùÚ}/ÌëÄ·Ò_×-^9Ÿcðeô£Ä½Ëàt£œ?—ÁqÆè#çÜ¿\ýæö›&þ—RpŽ.ë¬ô¹>÷à{ã¥qþCοÑêßÍùÙe}ŸF 5_Sÿ”Ïkq]ý&ć£ºHÛţ˾¸ˆÌ(¢§ÊßD¿q}ì§Ó‡Ú>yû…ìÃʹ-_køhùȘ׆Çîâc7Zy„xMê'ûÁGk}Ê“Š;¦Oz´ð\qàœcÝòiÒÍ|_½[ò±ypœ.|ÑrùõWÔ Ç·`²æ'Ø/-ï U| ÆXOãå“þý3NþÙ+Þ%<Ó¸þ$ý¡ÓäÙ+5±=çâJñ}ꔲ/­÷§Ê×[ÝnÖûT‰Ó<ÿÙšt\t–óc™óy…óg¥â³<œ/“?Ͼ ¿gøÏùòÕÏ÷÷*ÞG÷aÿïxÓRM8ËûXã÷¶“Ó®›TôÕßõ/Þø'œTEÒsM~Ó³M*¾çš lÏ7Q®² þd“'÷d“î|ãÏçä{¾ÉNy–ï{¦IÄþt“ é³M2¥Ï6©X?ÛäølFõÙ&³ÇžiRi=ÓäÍ=ׄQyº‰£ú3MNúgšœ(Ï6Y O7É,Ÿ®Ï Röl¤éñ&™èù&ˆÅH„ót“L®iÂ~²‰‚æ™&ˆõsM2©ç› O5Ak²ƒÏ599Ï7QÖ=ßä„ût“JÿÓM˜œÏ6AdžoRá=ß„üT“ÂO5™IùL¥ø'›0mÏ4AžlÒyúé&'þgš Пn‚ì~ª‰òôsM2ƒx¾/68|4Q=ß|Žëùd“ðt“ޤg› 쟬û²ô™&ßSM”-7Q’žiÒ¹º 3ûŸ{®IGâÙ&™ï™&™ñÙ&'ÙHD}° 2Ä}œh²Sš&Ý¡&Håþ&øá&Êõ‘&ê™& Õ™&™Ëé&êñ&ˆðãM”Hç›d#uýÉ0Î5Q¨Ÿmržn²ã†ê=ç¤~Œ¿¬I†üD†|°IF¬I:Ôd‡îoâTy¸I¦òH“Ž»› X6qT?ÔD‘¿?ؤ“aë}OÆéž&Œèûš ºïm‚äÿÌ¿AqûQ2šãM"è©&þ‰&ˆÝÑ&†›d²§›DèÓM"Õ‰&•Òñ&'ÀÁ&þ&•ô‰&ƒ'X—'šœÀMFòH]2š B÷5‰Üw59Aïl‚¾—uù/š0j¿‰òæ;Yÿ5aÀà÷>ÔDuoìÑ&HÖ#M"ðÝM2Ä;›tòÜÑ ¾­IFvG“ `/Þ×D¡³§~_öõaâßaâÚQÖÑž&ÌïCMÉ{št¤ÞIy”}òp'’;›(¥>Ò„áüP“Œï½M2÷zß\÷·ñõ—Q"|Œáã<§Ôçz“ÎÜÛšTÊcÞÞ„Y½­žG˜‡Ûš øo’á}¸ R÷¾&wÔ:ríNÖËM%n‚”}° ÓðÞ&ü{› 7h’é¼»I†ýÎ&Ž&ïlRq¾«É:ýÖõ¯5Qhüb““ø\÷/4A¾~¿I†ñküû¯4AR~šçûËM”ëÿŒûü¾ÿˆ¿Úñ|W““÷·›d.¿ÕÄAï7›Tê¿×dÝýV“Jõ]\×»št&ýjí¿ bï­ç—¸ð!âÜÇ8÷>ÆùqG“Êýî&ŽÂw5Afîo’ÑÜ_ë: ¼½<—÷OÞOxg%Ô»›0bï©ïó÷&S|7¿÷—¸Ÿ¿Ýñù¤‚ÿ9r~äógA½‰¢Ýçý“M˜ Çsø_­xåÇ/£„ÿ}˜‚Ò¡}'•äG˜„Ò¾O߇_ƒ~°ž[2Ûš ï®ß÷4éþ@“Œý—Yo¿Îz»ƒ|àþ&÷õ ëì¡&ÈèOñ~€ëýݽ0ÓwÐÑò ˜ò{É(eîýt’? ³ö^” âsÞ £÷“Tïåï߆cã‡`޾…Ö‡aÜ>€ÂûgÉH¿Ÿ ò¼ûQJìYz„÷q„ø»›Tô¿_û+ûí›<¶qnÜž¼øøî&Xï&ü÷Ä™Ÿeýü#”¿Zë%ÿçÈL?D%ða˜Ásß¿BÜú1*¬_vÖýù{òç÷6ÞGòÐ×> ãõÑ& ©R¹ßÂüAãP™<C°$÷ HâÒ§ê=OP}ëüߺO8ßßYûÍøm0.³nš J·‘‡üví§¼ß_'ž¾§öë² RòOê<ÏçþÎÇ_áœû}˜Ý÷ò¤ôv:dß ’pW½‡c·wŒsæ É£>Ö¤“ø¶&N h‚ ¼»a÷ÞG<¼½Iå~O%Ô}M*܇štÝÏ9»§I\ØO¼—üòj×WlÒ¡tW“Jö.¾ÿÎ&•Ö½M˜ß7Qö|¸wGÅótì|„sóCMöÓÕï§ùwTüö¼ˆ²áwxîÿ´ãùÂz7ߨý—óì®&ëæ>òÑGÉ×÷¯îû‚ûÈ>ºƒ¸¶· ‚y°ò‰ÔAÇ›(7ét8Ø„<ØD)±¿ “¸¿ â¿· °· S´Ÿ¼á@%õaΟ£¬ÇGxn÷5©èï®ó'JÑ;š0 Gštzo‚  6aH‡›0±G9¯4A®ö5QÞîm¢Hl‚ÜžnÂ0Ž4qr;×D0Lž4Ònš0–g›8îœnÂTTý÷6ÔI8Í9;ÒDÉv® ò>ÜDIv®‰âv˜¼u„úa„ºál¤øLææ4õÒP&û1êóԯ爛#䯓§ž®ß„êññ8ïa¤ â2ºnÂÜž#¿&_=פÃt„ør¢I§äá&LÊ>âÏ^ê¤Cœ{Xç÷WÞ ]gQ¶ìk‚¸$?;ÒDÉu¨ Ò¾¿‰äH¤ápdrOÕWa0²ÿO4Qlœj‚Hí#ß;Ðp6qìkš `ÃMXFš(9‡©ƒNóýCìãaâÇ ñ|° Ò2ÜDY{’Ï=ÇóxŒúøÉ&ʈ§›(ño¢ø|‚8ùd“øþtæ÷‰& Äyêógš 8Ï5é0x¾ 3ù©&ˆí§ˆ¿OòÞo¢Ìzœuü$ëò‰&ÊÕÇk!~²‰ø\¥ÏYpê»aò¼‘&HÞuüYò×AâÉqd˜}:ØDI<Ä{!ß©z.Nȳ÷4a2àþ÷4A4l¢d|¸I'õ}M&g<Є1ÜÛDQ÷P“seo“Ι#M”Hp^쩺4LÃ^p—{8·áÜ8ØD‰s†cO­·à‡š0ìGª¾ ^pëáâÚýU·ùyarînÂøá½h‚ð%Ϊçj½‡‘& Æ9öÑãM"Oço„#>iÂ(kâìtüéH§˜CMì½M:ß7Q¼<ܤcóxΣ䫇›0ˇ›0e'š8 žj‚èh¢Ø<þ0ÔD4D<;Ù„Én’ç VâìíM”wVÜÍ::¨p°¬‹›8ÉÝÓ„¹½‰Âù¾Š›aœN‚ ‚‹ƒ7…šŠûE‰x¬ “s°‰õ$¸×Iòµ¦ö±8ZÖå`&±©ÏIÝ4Hþt¬òát¨ïã9?BþïßÓDÁ±¼`Oæe?ÏwOåÓ#•¯gïkÂÐí¯çøpåSžWá b}Þ±¼táµæYÙW‡¸ÿG ' Stoá?a˜öT¼×öÜŠ’êî&J´Ûˆ-<È?'²??Ѱ~©Ë>Zß'¾"þ—û»sç㜟—þuïm…ÇD!ýqòKqÂw׿ç=~¸ óúëôÂŬ#Å«‚#üvá„Ä·&ûðת>°Ïûõôq«ÜÿÏS—ÿD…ÉÁû>Býö âÇíUÇ‹Wæý|€üë½uÝQL|œúè6êã5îú<£°úXå/>Go:Zï*<Üç&ŽèûˆÓÿ{ÁÝïªõþâ!ðÞ=ÔGw±?>Þ„I½?BØ‘—?Àu=\q#õê&¡÷7QrïiÂôï§?PùYòãU×GiðHeåÁ&œ“/ÜWuEâÐ!êÒ#M`ÎG᪸•sý0üÍ1êÅSM'+¿Š¢dFz_ÏÖûCÿÜ÷QÌQWŸn¢œnÒÙz¦ ¾÷×ýD“N §ÙÇÏP·<úxŽüz°øÃÔÇ*Òd¨‰âf°‰£ä©&ŠÐ£Mœ¡†*>¿Ò¡¶§g„ÛËï}„:ûÞâ;²ï÷4aö÷ŸúPåËâUÙiþ ÄM®ÿÅ„ÿû¼áG ‡ÍùýáŠ#Ù§÷À>Ì{z¸ÖQxÓ;‹W3Oq…_»¼üàg¡ÿ(ëó¶&Jú;k½»ÎŵŒ;æ ⇹îŸ÷üñÂÅ}O~ð-õóÆE¯W^EüÉ8!^füL^ýñzÞÞŸq!ûø£µïåQr^~¸òÄÄÃ÷ƒo|´â¯ñÝ|Bþ‡<µ‰Ròî&JºûêýzNø}Qr¾æ.ò˜û¨$îí!îÞ]ïSÏsA#G4Q\>ÌùÕþ|xß»ê=Ëãåy>¾òHñrqH:Hœ| ð¬¬Ó‡›8pÜQøD:±V=’zïD“N®ýu=ò¯â“Q\®ü#ËëDábé =\û:ì‡  îqþá(¸Ô1öïÉÂËÂϬúØü#N›5àµM”Ì'Áš&Êëapž×ñº®ð"‡›,Óã…ÛåÜ"ž.Þùï/>"yÌwu¬¿àQüýõч輺“:êQ:QöQÝF‡û»Šgƒ7“W’/@¡÷a~ïÊ/ßÞAGÑ^ÙÓI~ŒÎ­A”WGP26tà ¡ÔB©}¨¾/8ÞQœPŽ£øÛ‡Â{uÕÝ(›ÓÉÅó1ù$JµðnÒá{¢»Q*Óa4‚"n˜¸!ê±ót =õ8ŠÏ'ëû³~áÓQ”ŸD âåí\ÇÇèȸG¬ðÞ>\ë—<¯ôâYy¯oEð6ÿ¾øf¿ZGƒë D½·ÞKpÖ{PÂî­}sï<çqŽ£ Dù:HçûiöÕÃu¿èp|»³â@ÖÝ)ŽÐrÿÔû,ü@ž=ûôûèˆûyê[Ke’}ò¾ÒY¤ž¿¿pÆìÛß©õä>âÜG)~/×ýÁºÄñ_(ƒø„ú&ãygMWÝW‡8Qx|¿õQx™‡ ‡³~¶î7RÏ¡ŽI|O|J¾Áü,ûòý¼@t[¿Ëzüu>j|,üNÝSò—ß(†8’ü›u•¸¡º0ñ3u(ħª«­O­_£§þ8ñöƒ¥ÇP?'Ο¼ú—™à÷{ÖÕà.¿Q_}NÁ —N¤“‡}ŒŽ†»è´¾ŽæywÚ8íù—ëøÍ:÷ä)ÂçütG=q ëð·ŒG<×_/ŽºðC÷yéÄÀÓ踸Þãnâçm88ÜGžyçóÃ\ß}t®?ÈÐ=tʨønÂ}|¢ô>è´èL~géTгxÝ\×opæ¼{/Ÿ÷óù$ù&õ·ùóûJä:ê§Ï³wº¾ÈS÷T¾m¿+ߣΣÿ˜Ã•ï¦sæ4çÒYÖÁÃ8ŽÞ~O}•ß÷£nÙK‡ô^:xá¼~¸ø;Ïeó”¬ëûÈÇïg=ìÁAúCtl½¯ž¿õCòêqj¿ƒºÛéx¹»ãú0ž:âmyN¿þö …Óå~>@¿Æ]à*÷7Ü‹3ËÇk=ƒcW}ÝQ›Ï‘P7þR½ou‡<öÓ/U=M\+£zVã úcö×qP¸‰=àßß_õ‡¼¤<28=õét í¥ãý`åÍèÒ¨[ÞÎðVx‡_ü9óœÒE«ï ÿy ~i°òŠ8i¨wéï9ݤÎ>¯5\ý©ëFšt&jâ8v¼Îyõé:¾8Q¼{œÏN4q–9^MV>š:®á\$>ÜWü¢¼ŸyZÖõÃÅËÚg‘óýÒ‹›gÉ RGRý qëýüûûêœ§ï¢ø;uüâöêʽtÇ…çʆ>º—uqñ‰âüâ”ê´äm‚í¯¼M¼]wÅtÆ…3òûÀ÷n§xãœz7qúc¥/Goް‡I, Ç‹¾í΃Ç+ï5R矯·5ù¼ÛJ×€N£t/êÃÕøÞÔOØÏ Ž7:®7ÆWô?U—ŠgX'/Ú[:ððG9ç¢_¹óâŽê_@Lþö1:NïªOÞtWåèƒÁîÓfÿ{ú~¥£¾Å~ónû)<à‘J—¢ÞÆ|0ïéîÊŸÐß‘ßì­sÅ<-}nmž€Î³òRxS|îÃùðáÂcƒÇÝSëÍþ$øõÒI‰3¡Æ¹ãîÊ_©ïë½›‡yîGWü ÎÇ{8o÷â°ö0Ñûpâ=ˆãÍ:Çá0}˜ýr<ô“†p‚.¼(pê&ôÜGÿɽ8 ÝM=×âÔÔëõùÑÝ?X8¦ü‡uae÷V]]Ö‡ ¯'^t¬SÑ‘€ë¿“uûãiÇú>øî_g½þvÕÅôSU}­îðúŸCŸSý=âmœUGÈ_¡—¬þ>ûÓÔÑå}ýTÕœÏÕÇç9n^L[ú:uPêñ®×Ûg“×|¤Î%uxö‰ZS‚÷ý$×ù³žW¥[Ìú|GÕÖßÞ¯:Ñ\ÿW5êè«ÏµÒ:É~CïÓüÅ>ÀÔmï,þ>z´÷î$ncïçx>û¬ksßâûà|û•âO¬ë¸ŽÂ[ä|/èjëþoàÿxß…¹žìk¢ßµx×ä¨÷'Þ&ž•óõNú7?Tzû]Í|¿ê]ÌËì'J½~OéÔ!ø|]¾'û!Y—õœÜwù¼_«ºü‡ââàp[çZ½ë¿a]ýõÝï€oå\Oõ»©¹¾ï1u룅C‚'–ŽÄ>/òžïoÖ}©—R‡aŸtâèÁêçö÷'|¿†¶ïÀ>iyyð™ýlõ w¸Ÿ÷ú0}6wp_ûš8th2õp“‰ZÇš8ÐìA±¾ä¨[4ýtõ+7i8çè/;\x<çëວGéç9PýmÖ'ðLôœh²,OQœhât|=Ì}ëÃUo‡¢~¤êåàíGÐÀ_`?õç‘Ò÷ªƒO¥doÝ—ýaöÙ¯‘<ò‘&N0{ˆ V?‘:uÈö‡ÄùéÑöy£¢N ÿfé­çÓÿs¬ø×CÂøžÒõÚï¸ðH‡éGªï]Dé ÕÙÆõ qþ>û©s÷ÕïSר.Q}šzfy} ’_ž,]tü<šz¯Ñßœ`ß-½bªN²¾á­†xOCì£!ô6GÑÉ.}úŠ& ¡ÿx^Õ…äóõ>õA°žºú¬WíKQ¯Eý/qOé&ÕwŠ‹{¾Ú×]ÆïPo¿Ýs¤ãŸ=rnüj#þ©.O>„~Vt—喝¯>G½°:9y‘ô¿§ø õ¸ùû;K¯æ¹è9à9âç_{[¿ÚdœR¯þRÝ£ë ]@íCשømúÊ—.Ï}¦¿‚º9û[ô•°Þ?«o¶Oü‘çvéåàª^¢ÖKê¡¶¿T<Ã~&òôò/Q7oROå}Û• P‡À³›í/]¨¸¶úÖœwûjŸúýžž3äéÕH~^÷M½uü`ÅõÓÔuô¯>¾ù`oê ݾûãìÇ >úøÇ½¥O4Ÿ”‡£~®õ$.$žE]R}ò‰c­¯‡øQê¤KÞ^<£ý½êÔÃÜ^yýá•'©‹W‡îºòºíg†ï¯¼Ùýiž­.•¾úúªŽ5ññwËg@ÿû·Å?ÜŸêiÕ½ª‹W‡+g_Œÿn^g¾}ø…·©0Ç)=¯×a_“§}¤ô¼öûøžŒKú˜ÇŠsé`܃¥îiùQ}XôYÑ·Áç›÷ÿ{ÕÏ¿Qõ’ñ ~ªâ˜ñÒuËíµ>ÂÇÝIq'ýz­Ž}v‰£÷“Ý_}›òÇàeµnñ…¨óC݇÷­øuÉ]¥+§®®¸¥.Y? Ï©àI²Þö¢ ÞWúOy5uŸ®“Ò£ãûc¾|ü¡Ê/}ž®WÏÏÏ-ëëUë Ÿ§q ïóîZðkäÿQ§>Ødbò#å3„¾²ôÞö¤ŸûÞò‘±ïÉxg߇u°ç¦~!öÝ¡S­:þ®õÉI½W>9â³æ¾O÷wÎõªýçþ×7®_¬ÏÅß}®úí¸íŸ4þÿô31ÞÈ›éaÝC¿_õPßÿŒCêïþ«ðýUÄìorÿxwýY~YÞWý¾ñƾ!}DÔëgaãþ´oÃ}e¿ýWê|ðÅ‹¨Wñý›‡¹žÅgÐQ”>BœÆøoþ‘çs{ñ0êdÐÕN߸Zyš>SÆã¶ßg¼Š¾õ®Ê›ÜÇö[x^—¾SáußW>æWæžïög%¾½âƒ<“}Öú ™æ¸«Îyï÷z0óTpRúBöUß´uRôÂñÓ;Œþýh§ýcì“#Õwd?¨ñ+8ø½ôÞYÏÏû÷p=êÿâùjü†G*ýO®óõ‰îu5âAâRž â/É7o/E = xéûáÞWý¼â9÷>.¼åkÔ%øsâ'øxÉ[Õïo+~_“ø]ýÆàfèÞY¸—8MÖ±õÏ­âo¥³Æ¯ß–Q¬o‘<Œ¸´ºu¯úýàn+Ìç n—s½x"q©kù³wÕýØ·­_•}Ñøª•˜úŸ“ý—âŠè+ìã.¾’ó}êûK?ãûö½ècÈþƒß¹‹Ï¹³ú'¨ËJ‡ /̤¿=Å«¨§rý'™hõñÒµÐ÷VzöCázâã¬ÿê³ðAÇZ¾kÄtHï.}Px‹wÆùQzòÄâm‰Gðs)¼_~~µå9à+ÔAù{å«ô5Èú}[íûÔ•«“Q«®ÇsA<Ùó†çD¼ýÕÂìãçæùŽîçãW&^îuò}o«þûúõã| ÿá®Òõú}Ä¡zßöëpnÁß]ü¹ûÅϵ¿À~ηâÕ—QßÀ÷>Œ£ü£8(ïÞÇ„†½ÅÙ£ž‹ú—‰á-Ÿ™×¸—I3ûKÌÑþ$Ll8Yýø/ñû‡‰§yß§™œq’¾”Õ”õ2ÈÄ­ãpØu~ñ“øsÔõE7¸ŸÉ ó¼÷0à!ø¬aœ˜GðeaâúYxܳÄÝóLnyŒõø8qò4øÃ¬Ç!¿‡˜T5Äľažëtϰ/;ø1©`°úžÂÓcâã&À©÷Dß0†é³Âaû};#Õ·œè NÐ#øl —œ®OuâÇsŒsc §ª?ŸâÂÉÒëû1áíÏã(Žç-ßÞ‰ 2#ðºgK¯oÿNòËÜàN’ÿ¢sä¼êàs[ýéÜKÖCÕBüý/{8îçþ¨~ûCðmày=ZñÞýšëhõô«Ó&ü“–ö–/ |AÅùþ²y¬å§=‡é[å:o«sJü,ùxõý©;4> §®~%t3¥ôœ‘‡?í¨7Íóþ`ùWªK±oD~W~Ö¼Äóؾ@ðˆú~õDž_â·ê¸©ßÔk~A?EåyöYÊßY߈C¨÷¼¶¿ù׊—Rkžl¿µ}/â^ÖUâ.æ÷ògæcâ æ·þ|êžOT~kýh¿‰zŸyRá2œ#…É_êg)onÞ nßúØóRÏI+ý¾E•ŸøgxËÊÝ?öÑ·^ùŽ:"ÞKåðÌ•gæú»Ö¯zz×gòówÔ¹n½ž:¸íç×Õû±É<–õPyªúÏ]|&J—.Ì„¿ƒ¥_FoÇýµº'õ%êéøê×ïcÔ=¥kÊÄ€ØÇ·UüñùÑŸÄï¹½tNêS²žï)Ý…ûK=SåcáJ§“ -û:ðuõýú3eÂÕÒoÁ 1!äpéyðmcýìÁïiköWß#u7ý®¬z"×½¿ƒ/%zöW)¾LJ8Á¤‘cÕ—‘¼v_õâ3Ry ~hLV;ÂäªýL¦Î¤ì'É;O³‡«O2¾~ƒÄ¡¡NøÝa&lœªŸË„ž†<èùÞ)žÇ‰Ò§¹‚—Dx„‰VûË÷Ø>×|+“Œ‡˜X9Dßûp'¶œCèNšê§ß~š‰$§‰ÃLÜîÄ·á ëé,Ïwâóð8ùîĵAâÞIt^Ç«o¾cüùãL¸:RýÁÙ—Gêkð£CÕ‚xsªò0üÿèï¬|7:ÂÓÄ«!Öÿ™ú9ü7;ñ8Ñ ®~¼úã™ ÀD¨Aô\g˜„v´ö÷ÄÍÜ9À„•}<Çýµ¿à?+~eRæ!&`ÒôG’7?Ä$̇*NûÕþ óeú*Í{lïÇý„¯,?7LÿÈHý}ð¿âýÙZwÆuŽêþì'þ;¢·¢ÏþX廸t”¯ü?“–ŽWÞ ?Oß|›×šŸZçÛ¦®V=‘׃£ž·ù¬}ú^W&y­ºÎëÈû½¿|ÏÁ+Ÿ¯ÓÜ>¯è€Kg›‰`§kÿ§fbàºá†:x¼g°CBÅáÄ·cè‘O‚ë ‘_íôÛ†~²DÇ:Ä:¢üq¹Ý‡×ŸÃÖåêXá—j?õËÝwŒ0Ék˜}9Lþr޼á|'¾&¡S~¼úÆé{圪}^ïTè‚ÀÃN£ï>Ã$Ë‘óN˜ð{޼ê<¾õç:ñÑ|¼â™çFÂí ý{ƒõ~~m'¼ßu– R瘬už¸r¾üÂ_)Ý5z†ªÿÌKè+©uõ…“оæO8©ˆ“Æk°‰CÁPà ۰ã›ïï[ñgÎ1*ŽÃå(Êá ÌÑH9Ú$’ž§x%Áã8àž§CíI]G`’Nã\vEÃùêPr‚L"ü0ŒÂŽÊgét9bè<Š·ÇË):àÃ9œsÏãÜxEÄÙš¨“Ω³MfžmÒ!wšŠeG¤Aÿ*´sMC\9W`‰TgqÚj¹³ãaNnœ•¡ˆ9Ù™U'¹NãýðŽÁ, ÕóÔÑV'ñ¬è#8óà½ì+''•$v¹Š¡QÉGq1FT”q–áVh´ÎX2åNP.倢ŠJÖr,–1Ò±R'd¿unA+…¤Lv”ä{k²N‡G‹1¢“G¸ƒ(9Z‡b•}T:U©É(Ã,”âB…©Š •0~Ué)¯âTJ•:øËÔÊ [Á«,ÒqZ‡y:_•«¾rþ{2úGJ)¤ÒÉ*tFÔaQç6ýµTŽ©ö~tn׉TŤ Eß§ *',ÉøÙ©"ÊO'õ¾rüPŹì»#åjGšN* tŨœ"U6e‚ÔƒMBZ†“ŽŒrT¶3NG9 € 8{k’|)‚í¤3«œXU,è ¡ÒCÅ•ûßõî$­8¯"Þ7¾{ÿ07µþ¼_•C0†¥¨PÁ§#ÌD9ŒÃ ©C‚Nþ½ 2‘ÅëATȨlÅiº˜r•\tVªœs ]ó.•&ÆAÏ÷»¿Ïó\gÏ+*_Sqésq’‘ùVòÊû+/Ôá§ÂRŸË:Š‚€V¼ñßs]‡ê¼µ³ÄIaÔN ¼o˜Ôr@Î:Þ_q/õÄÊœmâ¬t¦ 2|®p¿äƒwt0²îŽóîÑr$t‚lûª8빕úêXÕ±N„öÜ·óкF¥Þ¿®0  ŒwáQ œb9q`¸&ƒ9YÏ÷d]cžfž‰£KMø2ÏÓ1ÊŽA÷ëÞºD¼DE±ø™¸Ê ë1œªîR)mžì~·^r"´ùŠR8€°ÿ÷•sºÊ •—NdRª‚EGY;õujQ‘©‚Og‘:‚9©Ue¬ó*&¬çèxäs~&öÃ(So+j"• °2õ2_*PÈ—bTç1HL4)§_~u¨ÑÑÄÏWѨӄNÄ2qNœÕ±Bùg92É£œe”T83™²PüœrfEaéŸeÆtò•S±!sÏþ)ÅŽü¼åø“zñ@)MPÕ}ëLéW'¨êä§ÊR4*=è<Ñ ¾QÁjý/jg‰u°õ®ñ®[%¬³UÈú¾Tx3a¦£#°ÏW‡¢\×Ãå`-³¥âYçQ•ÑÔ¥Øv²–ÊŒqjªûÖy¼DgŸR´á„Zu¿¿Ï ¾*ÕÀU¾à¹ˆ¸¯t\‡/e¯Ó*ïuÂr›ÏUG ñ5UF«ŒÓÉ …9^èì*þ òËÏ5éÜä¾·ãGÎr&4ž¨¨÷}ë¬âçûy8}×ú°cK¾u1“ J¡f§×µ?ÿ®&ªÛ¹¨r¼‡¸þK§q¬ªÏ?’7pò‘Râ@v0y=N\±£ÕN'TÊ+Ø!kç¦x‰|‰¸ø˜“dÄCÌÇÌ7ÅCuJðœÅq©&êšGzÞ‰K‹Ó;QɼKÜR Î&¤xÞÚil'‘NN^Ò#û|MþÀ™ g€!ê’¡š¸š|m¤‰²õt9J{þ:J:;q"*üœs‡Ž½‡ËqXÈ.;¶ÌëÅiìHcz9,‹‹9F~NTžH'ì ¿²ãÍÎX•”v¦é˜l^a‡°üËyÔŽÞk'N¿ÛNƒRŒê`ç>±#Åmuú}:è`o§«ç‚8¬ÎèvÞ¹¿ìÌôþí$?ÖÏÉ$8éLTûÿz§êàªÎ>p[òã/èõLLLÒ™´œUQ†.¬Ã®“ßuº7þû}ž ÆA?×NÿÞøeÇ‘Ô×:C}MÅ#¡Œc*Ž™ðT“uxÕñêº Q5‰NG Ï'409¦:NÌ»ÀkK«“³ [‘pÀ/ç8;ÔTpë¥e³Å•ŸÚ¡v3Uáö(Ë+¿Tn’.v”yÝàÉ5AÓüËüB'IóZ™J¹®Ã¤ &ÚtbÓ!Ï| ¾ &‡Ú9ïùëºô¼Ã±«:¯t¢t¢Ÿø½÷í¾ó¼•ЙÊ}bÜ@ñ_Ö7:#È“É+GñH9¬àÔ\¾:-؉ïdvëp;›åÄäç­ßÔ!ÈX×ËxÎØAn‡«¼š8,u²ÃTþÛ ¢Ùg”“©Ýì´U7`\‘w°s[G2y4;Cq+ÆsÕ룓¨&l‰£:áJg'n²/–NDg"ñ{ñ' ˆ_ŠgÇo;ðíüµcXüHþ[JžV~ÃóÑ<Å÷ ϬîÀŸ³ÎÕ‘C^Ñ wvÊ~‡>½ýýÕqn|7Îy¾Úaì:sý9ÉO|Êõæy'ßgGvâåÝÕ­×ï$Gä'ì§Iýžê|ð¼µcßNuù8"üª#ž‡v ØÁn¾ ³†ûÝN¿ªÏq]Úïdë3Ÿy§ÏÝN¯ÓóÙ|Ã8«‚ç:xDååò’×NôúÑšð O¬“÷éWõF>;´+âþu"±sÇ8fÞtýÄDfä™}:x?à(ç“@ÆmïG‡Aó:Kë\µcÔüÅó[Ç(Ÿ§NÃ:9è0#obÞ$ÿîy`œ1oÔ9Ñûµ“Ç÷î¤ó5ë›k'J~U9&š—Ø1ë9b\Ô±#º¦;jÒ7Ž¥å¬NÉï×Bã„y¬¸®øX&ÆÜSz%ã‡ù·÷ï¹æzó½ú~\:F¸_Œ/ž7®;ó-ò›Z:Wšg{.9ùUÇouSêšPŠWÜ—ÇéÓ›c‡Ñ;¯ ‰ê{<ŸÔ‡ˆç\¼»poõ1Lœ(' U<ul±ÓÌx«Óù„qRžA½—úurþ¾âqûçÄ¡fcg9~æ üéÑšè¨.ÊÉ“âÂÖ—N²a"GéuhÔ Ïu¥¾É U_ëÀb¾¤C‹ø¿º-óõntô4ýFÚO*G1Ïcëát°ì+ýü7“ÒŠo¤¨ôUtfV}®saâñ¡âm­‡£ËªÉ¾âØêuÒQ·†cBéùÌÃÌ»äU˜Œ¼§î3³S7ÞWü·“›Õ±ÑéTŽD:ô…Ç=XÔmÊ?ëŒæ:Rÿ©ã¦ûE§4Ÿ«òo9Ö–c£|NöÅÁ¦O?º¿û}/ò·tBà¬v´tkæiêÞÄ ÔèüϤ‹š˜E‡K鹌[î[ó[ó(ªÌ?ÄatÈÒϸ©>Õx#ÿ`œ5Þè”äzVÏf¾¬c¢ŽrâêvÕaä:T½è0(šº:ùñn޹­;tµô«ç‘<€xˆ:ýýâóL(¨ïÓÉîZ¿ä¤Oã½8…õ¾Ï[GLïu(óÏN$Çá êjë6?ßç.¾â¿[‡Z7]ïìéz·,'d¿Ê‹ŠKÀ›VÜ/3î{ýÖ³®KûtÔ#™Wè¸á#'ù™gÙç¡.TÇ|'°Êƒøùžï®?<<ýwÏ[ïCÜÄzÉçã:wù{tÖwñÜ7?ï4Ð)Öó\'BÏÏ׫ç¼üŸuÞëû­ÈwƒcÜQ×a~ãç]Ÿ¿ˆ·ºÿ\/î+uÖž³þÙó_¼ÆuïÄ'ëi?ÜzÏ:êéÀá9i|ãž:ßÌ퇱/BœÐý®¤qD=˜ç¼ç¾ï[JœÊ‰tî/¿š¸Žý>ù_ññ>ÄäÁÅ›ä‘Ý·âFN6.ú~Ì ¬ Ð;ÔïÏ¿ÿ'7×ñ*WܧâHׯíï,>K'A'_È'y¾é çdù0×ä%¸.x¥ŸªŸç~kâ„|“|–ºãŒ8•|‰º¯_G òãÒ{GjBŒü˜ròy:¢àtYãNæ$Ÿ®É×8a–cŠß'/ä¤qœú«ÓŸ|®Cßg)¾J½‡ÆåµœTíÏé”ãDK':©§ñßÁñËFÝΞ8–ÝUú) uŽQCeé´À™jbxú î,§9ÕÛ “+‡)'r:¡žþ¼šT>RÎ8NC‡€ÃÔtä·XôÔ}ë ã.ð¤rÖQ?D¾_yGñ†Ä“š\¦s•ŽDò}:åȟꄨ#’::ó<'û÷®_÷™Ž‹òºæ…êõÔùù:!©7d¢KÅõÍûä/äK¬¯ÄQí{Rh]ãyx-nø{UÇÍs<uN–ÏÏ5^Š«»Ÿu ½ÞáX\š:±žóµqï[ä?ÅÁÕí\;9øWê~¬÷åqäÌ›ý9ùãµq^gf¯Ç>3y_ã—ú'õ›òéäáå´©ã#¸BMn§·.Ð)ìº ¥oHýÎÅ´>©üωMæ1òÕÖñ>×ç¥ç½Gx&žÏÛª_N>ó­èÜG^—÷é$÷‡/O¢ÎÅ} #›ÏÁ¯>Ÿ‹y: Ïw×—õ çPÃò ~Ÿ_]¯òIê!|ÏLÔ¬þJù÷—“¥ä³Íû܇âVæÁæëææGÖ?9ß>Tõ—y«ý¥®ëó%ó?êáÂѼnq óÏëÿÞÉRN¶¶þq?Ê¿é\ïºsûÕumþîçúž¼^߯uëÄûðþÌ Íý\ãšyïׯòÖ©þ½ñôz½“õŠŸï¾•‡•—UÇå{³^•¿–'6W¯ÄúÉ:Á:ÊzÅ?‹ÛZߊëŠãÊK¨·n°^–ò9ú~üwtI}½êN|>_ñó`ß·uŠõ¹ñ\Ü@~HÞÄ:ʺÊúÞsL\WŸïóô=‰ÏŠGªwuýx.?®÷—p=úŒ+îëuÏuëjqã˜ç¸6¹Ï¼^'£×\Æ5që,ñeyGëuq|äÕ·:Á¾ÖÄ¥ûNk½Ø¯a†“…p&,¬õ®¸xiêõ{j}«Ï¼ÞC}“:]qWy/q#ñyvëW'MÉŸÙ?æçë÷à¿Ûgª.ÕI^N“—Sßš8~_ñ½â·ô?Níõ‰È[á0[<®¸­}ßêzãH¸·xNûq_öÿ'ß‹çÑÒ]Ùgÿ@é²ìKÂq©ða×z"yeqlß—|—z[y.yMñj?OþÒ‰Š®G߯u4uAù´à˜U:cuWö?áW|·}…é§ÝW}…ò…öC¥ßíѺnqv'5©Ÿð~½Oñù;uÜ5Ñ*—ú¬‹všýüòÐö£¿µOȬ¾ûGåÕqøÜSÏ·Æä'Ü÷N^Ï$½{JO/«ÿŒþ#ú€8 GÝù8´ºóš4Ë9£„y8½8¿}ææyê èwªç§žA¿?Ï|Å|ʼÄë5>ë#¡žÝ:ÔóÍsÛ¾ë óa'¹;a^ÞÍuàd!דëRg´¾:údÉŸá4[þ,~Ž~%ÆûL]§®ÏCùQq<¯ÏsõzÜP½ºçŒÏÉÉlÆ5yLõ.¾wýRäÿ«ê{ÕÙè§¥NÖûv"çhpÔ»+~Ê·«§——çP'à÷˃[‡˜ú^ÍûÍ­¬“ä?õÑp2 <¦÷qýäA÷¹: 'oÙ·lMßë¾Ò•¨7±YÞRüÙÉWâÔNÆ‘ÏU—å>µŸÇüRëMÜ[¿ùeq|ÏõæCú’$-y¤â©y„ç¶çŒ|¯ù„ºè8zÞ_ú;õ}êµ]_æyê;äÓ®ŸDižl>r=¿mý ¯ã„!~tòøÖ³ÖŸÖ¹ò2®{ÿì:ÿ—sÝùïæÕÖê+J¿@ž¬SèúzÒ<Ù|Úõ¬îм½tðtêoÍó]÷æß^z ¯×ûµîöú¯ç»¬{|Ž~¾õ‡:ZõÄÆ]q,û€<Äˬ;ެ?äįuÒou¯~µN¿žg¾þç<×Ôoúž­¯¼ú«¿&y娇S8Ôµý@ÿºîϾ%q)ùl' ÙD}Uü„ÿn¿ýD×öõ|[ñ¨ö;Š7É¿êØoÜó¹¸~ÔßÈo¹/¬S½/qSû?íÏo£ÿ²&Ó\;)çå”kß·8·ýBôÑÓþžêv„?o_²ýDN¶òûíEŸ\œìÇ•gÏuâ¸úl'ò¨«°OIþÁ 9ê/ìCµÖþ.tmôÈ5Áƒ:£…ÑËàŒý`ñø•Õ„û³ñ•ì?Ô„/yû–˜`Ÿ°¼ùWñ;Nì°ÿœ°ðd'€ù÷œ#u]yT_{tÞõ<ÀQpB¾·&IáƒVϼªž³ýpä'å— ~jÅ'Úïz¶ÿÞxà„/y2ùaú~j‚…<œþNs_91Íýe<²^þ×ç¦>Kÿpù/x¾ò0Æ?÷ ùAÅI÷³¼®> ®ãž~öÇëס ëA_ã“Æäsôiò<²ŸT^…¾ÑªŸÔs‰ŸZ'Êû{>Ú_+Oâ¿ë#píä—¯©øìùf¾c_¨>‹â•þ»zqIóó,y uîú/‰_ßW#/i½¡ƼSÜÚK§/îaŸ€“>Õí;¡ÓûïÐ×F¿=ûÕqê£]\AÕ9â=â â%ê¿ÔÓª¯–—Ro¯Ï–u©8°ëÊ|JH=¥“oÅáÍ—å‡Å%ÔG;¡ÕÏQ?©.Êü]¾Ä~wufòbæ×î#uË®sqnësë[ùWóMùPùd'šß©70Ò¿{ù˜š7¨ßÑ_„~ÁЇöm›ÿêâÄR'5ÂcV¿2|”ütõå›·š;ÉØóÐIªêjüw'zžzÞ_Ô9ŒOXM¢p>Â¥ƒQCÿTåƒNNrR£“è·å¹?B¿ë:RÏÁe:àŽü2;ø`¢¯ÙK>³ŸÉtú°ótè»éàóQ“ƒþtßPì0CŽ1Qðhç·úrçð˚蟲ý5¡Š¾$&§© ýõxMjÄ×¾ò=ý‹ˆ5ÙÆIzà+𱚸äD%ï+*ÚIQy/íõ8¹Å ’NÔ¡Ï‹÷ùp‡~ šð’8~¦LüÙS(á©:àî5‰¦|ŸÌ£ÍkÔ=9AÓÏC_þQꔈ51?ôq¸ÿPu†ú-ó4×5ø~é™ÔCùœÐëd]ó'ñ™·‚7¡³úXMfEP~Åæ%êá¬SÌÇÜŸæ-æyÆ }ÔU™'q®–…ù·_ͳÍ[õqà/Ÿ'' R7—“zB}*ìw5^©GÔŸB]³u¡“rõ»@¯T“ñ®÷ýÐ \¤òvãŸùúÚÒÙ/~SzÊkûs¿ª&ãï¡ßF馜lŒ´þì$Àëû—ÅcÄ#œ hÿ²¸—¸†u…qZ?¨k'ìþ&ÏûwJw©?çfÕ9â×ú±üPÝ/¾)å+b~ëyÆýÕïµò¼2ÏV—u­NêÇ+¿•÷<ÿò2OõœR%žâçúy~Žy¶ø”¸™:u>æ¿þ½8¤¸š}èǪïÞ>}õ/^—ïß å~Žù­ù‡¼Œ: õÒò¤ö“ɯ« 7RÏ"§ÎDµŸ#`šy»Ÿ+¿¯ŽA?/ë×£:2uË®Kuuâ`úߨ‡C'Iüþ…ëôyß[¸Þµ:Â7Ù']uŸ8ϱôÖò:ò`â™ê¬ÔQÊÇùs¾ŸŸºß‹?¯aùµê‹A>Ó\7©èòIEqHþ43€?2öéÎwö)êOSé>‚ùl§oXø}ÏP‘~’¿ÿ4N~Ÿî¤S÷Sœ\σ,|†YP@eþ¹N_@éü9f ~šø)N¢O3ì³TØŸê¤óüÓ 4ŸéÜз>þ 3ÿ>ÅìêOw¾¸?,ê3d"ŸEYý<³¦žëÄáùÙÎ×ö‡R=Éý4ÎgPp–™ˆŸ#³{¡Ó?¸¿ï3ÌrüHÏóDÊO‘!|DáÅÎ_é[¸¼H†w‘™~Ÿc¦òç@®^`âçê÷&û,?ÿÙN:Ù?݉ràE,/tâDóB'ŽÐ/0‹ëR'Èe2ËÌzåþ^è„¡û'øgQ¢†ÙÏŸcÆôtâXðY2ãÏuú‚ë¿ó¡O1{ñ¹Î@?ez–ÙdÏãlù<ùùNÿ6ÿésDìç;©ìžù}–Y Ï2«ófj?ÇlÅgÈ$ž±zªGœç;épþÈäóÌÊ{ŽÙ•Ÿ!ãú ˆRûÞ2ó³ìäçQ”?‡Óè3|î3¼ÇçQÒ?G¤xŽYÔÏ1â2î'É„Ÿ¨Ùj9ŸI=ßɪsÿ¡OÅ©~²fXFÑþÈåcÜ×d–ñ^Ï‘y?FEw%þùN:GxßO3ãó“dÒOÔ>IEõ,™×³Ìn|Ž™‚O‚À>ÁýÇ‘õlݟחuý³§r9ÛÁžûÁá¤îdòÙiîAÖ署¥ê¬¹dÂg@Ï€ ža†åi¾ï|‡Éd8±žíü@ÿÿÎÖ,Ìfgjv{Þÿ¹N/ç:çv ±u¦f¼'ÜÙ1kþI”È#é_ÀpÍtÍú>ÅÌÕáš!Ø¿í¯$C>Áº=NÜ9¬»ãÌ=R³Ìéx1ÜÛIgQÚßþ£';Ú§¢êô.· QyžÆv¤fbö/ã_6 ·§j&n:æÔsO<f½Sa®“LªYŸ E5#3qlUºÎúuv2¿Å001¢˜ug@âQ³–éì­™÷L’ê¼û_îh?ŽSÑ€9igÜ:ÃÞÏñªž0lÖlv&à·‡óeo'ÈÒƒ…¤ƒ Ö¬K++ÇdŽmåeÇÈc9ÕšÁâ_H<Žå4Œ’£:…`ÔëûeVPrnÜ[/Š«еBþQrT'’Œ‚O׊Êê8B!Ã> ŸÏÅN&€­`nVpÂÞUN¹×2ˆ®á:ø¡ô-§?” œ;)Gl•ÊÚÊÄJV¦H§@gŠÊ4¢p®Ÿ³â邹†Ù¸­*pŸ/J‘êüpvùõL*J¡ŽN2¢vé4&CâóðþDÊì(AîàP_í¤s=Ê ŠÈdxŸ:1˜Ȥ\ËD¨:ÄtwÉÜ „+‡E‘9;¹PúÔLn; DÖtº¡ó9ŠX9‹<ª–±q¢”©Ž>”†5Ó×8…£MužápÐ Û:‹£Ô¬™î"dLæ`õòØ}ä[{:LªÙÄ õ̾~硙­üH]Jßr—™IïÀTÜIA4®á¼ÄûÚÇŒàê\ÂÙªÓþý“Üç çᱚ mλS|Þ0yìPÍÐ§N÷Ç¡Š:ŸÙ9žÿrÓéˆ_7Dp²ÎS&9pŸ§èجó”ÉuÌÚ=¾=Èû=܉£e‹†é0ÜÉd…ÓõïÎPf2A‡GföžfÆìÖõÙÊ—Âxœá>F˜yË:yº“ ˆOsþ>EÞóõÅ“Äó§prý$3 Ÿèĉí™NߨéWŸèdRíÓ |Os®}’xó4³…Ÿ`6ýãLx†¸ñ qäYÎÙg:Q|?G¼|äþ<õæYêÔÎÝÇ:™„ñx'Jíóä+OÂpŸ§¾{ &âqòó³L®Áæú†;©ËÏð¼k–´û¦? wúH‡‰I¡î_n*¿ S?ØÉ„·!aâa›'ïfœ¨ÏÏ$™AâÑçÇΣ#…äûïaôNw¢ð>]³¬Ãܵ3êë;™èÛ0c~õy¢“I"gj¶y°çØçY稫ÙÏYmäÏÇñüÉN&?ÍçêÓÃì›c&ñÔù•ýs„º½}^L’«¯Æ &gñçÁÚo®ßÄï# œJÁ$†£R1v0ý û«Ž@¹] /ØÀ…î­úަªü>'¾à|ÈçßÍú˜u÷H9{›GÓ™C=teÖÑš°¢‚IFJE O'y¢|)ɼZŀ̃ù¤ß§RF¨ÌŠÈ¶Š['„ê,`¾n¾­’IÆË|[†…®ç¶bªuX ÓÎI=:h”’ÙÎNœP£c™mÖ"êvdËÄ\«ú–R8©4•)òßU"¨Ðò¾|8°•L†_E•LÎò öÅ Øq.sd‡¸Ì’1¾ߣu‚º*ˆd.œô©³¸÷i½®SƒÌžÊ1ÿE{Õ‘* ­cT úUå'ÎAÜßKùf=i=Sñ¿U¢åý}¤ê;îOq&lT=ÃÄÔªËܧ8bvpŽ/ÆÛúA¥©û•ŽÂ3ú¯ýÙ=Uÿ0™­¦Ö_N€·žTIKG+yKt ¡C¨p‘gwʘýPý>ë5ñ• *@­­_U ¨0õ9Òi >yg‡ŽbæÅiÂÞQŸ¯ÂEÞ÷ÌDï‡|ŸNâr—õ¿çˆ¿Çó%U9÷¸Ä·tò>¼^ñ4:rÁ9†ÚSëÚýB®?*7pä¹ïí„ßžôH)ƒ²Îö•ÒßSIÃD÷ιÔYTÝîþuRÎÉ÷‚๗|´ƒÃrg' ~ù#‡+ßO Nt~éXç§út<âdå[Ö~NŸöû#U¯ç:OVÞà„3&üu˜H[õKâÂpÕ/éh: y¢“É‚Ç:}Xù5G;ýy¹¿v˜8rˆze„|©åž§;_õgø¬§ª~µnÍû{–<ð9ò3Õ—š!_8]õ”ÑçÈCÏ?ÂDUâÔ¹ºÏÔ#ƒ=¯Ý °'Á™×ý†8Úa"c)p>î0)’N†c…÷iµ9\ø¼J3[;ÕÇþÁAø‘ìƒýðC{‰‹‡KyüµÅ{²>‡ªîË¿×$;&NÕ:¤£\âPá{®gqkóI&î/”y¥y£Ê>q=ñm•zæµ*¿˜¨ZŠ*:Ñq@»·: èxä\}°Î;-<ß'­ÿ‹tPÔä[ë;­Œ¯^§N•L@×mq’à ÷¡o{ÜúAÎÇGK‘oG¢y·¸•ç˜ç޽uŸžNŠU-¿jG ñÞzÈúÌÎ;àÆ]óðˆê4µ3Á8É~­¼ÆºÇºÈºÉºÏ¯æ7Þ“œíµÃ§°ê„“·cƒõâáýyÏQyyuòòæ+òÃ8%Tþ¢\>Z~\ÜN>_ÜÇõç„m¿Š÷¨pVi­bÔÉPâEâYâ]*Â=tžU‰¬bT€q@¾ßŽG•áâz*Ú=ݯâæ3æ‹:ïz]v´«4Uù­ÓœLN:ÿ³ÃM§)'tf´óÚ?‹#êXhG¿ñLZìL×ù×çn³ÝvØ!ªÃ¾“#í$Ô!UGMãN: €¯–C·aâ{>òÃr¨ÓÁM…²_¯u|ЉóûT—C§ç¶ ü¾º?;ÜUãè_¨×;túœí@0ß±#Êš…÷êLëÄZãº>죊×îo;Mœ@äïס‚ÎáÂëp«Nü|î#œ‹û:LZ+^P,xס΅8ìÆƒ/ï'¸Ç¨óÎÅqtû':8rNÞWy—y†õõµN¼¿_N*¾/;’ì¸Ñ©ÇüÌz5ÏyOá)â;Öÿòrv´ÑÑä$Ú8/'Þª~!ïù:îSÕA)Ü÷0xîQÖÏN_¶{þpuFª?Rç+_›çu‚óîpuPª_ .;XzÛL\;Ýé×»‡K·£>$º±³Å_¯çRç¥þ ç¯ÎY…צn>Juˆ÷s tN©w†8›Ò$^ïœÞ);¿õ0ñà ï{OñBÉŽ¢;o:÷ì |o,}rôì§á‘[}’:ná™4\©«*ÌóÇýæ{×p?ËËàŒY¸„x™Ž.Äá/pð‡sâ€ÎÔvZªoÕ‘_ÜTçùÏ1Ïg'…‹“©›V®îF]ƒz„Ä“càrÇ;c;éò¿=ÑéË“ž>Ò°>tÑLR;Ô + ®?‚³Ú±\·þÝÑ‘šdÜà(NSÇq`;ÉDÆc zòýNa'âHÃõá0H?Ô€Û7Y÷ÇšÞÎëøîÃ5a0<Äyéi5G8O‡›¿ÝGšèOÎâÜw®É~<ÏstN¡ãêl=ácMŸN:ôxüû Ž˜g›è’Î5ÑE4ÑÉ?ŽsÚ“8S=ͤ©gpTxŠNý'›ô3=ÕôaÉå§šðVOÓyù|úÓL}¼àip£'q}çÖó8«>…£ÚS8Ê>…3ÁL´z²ùÍþñþX³¯ÜvŽ<ã,Ž‚gš}ý°tg…³8fœ¦ãð,×u¶ycÿAà`9L=0ÔüÚN›Õ]g™XrŒó º8&|5?ÜOtš&ñë¥'™H8Øôш¿{GÆ#Môn'q<Úôé ¯tœÔ9X'ex²š8ç¿ëX¢ó¦“JáEÁ32ù`Ó?.~²aBÝPÓ7ªX;UŽ*Éã-çI¸Ñe”E§tëî¬Å#Ô¥xîÉ[{.ê(èä*ëguîÎþØSë]5ëíX½ß—Öt˜t2/zº:OЫ•ó¡õ‹õЏ“ö:˜ÙO!#¾…:£½ðûŠ'õ<µÞ³s׺Ñs]‡QTq¦À™óX“Ë:I=ÙàèÙÔ$ ë­wu¾O°Ýç,΄£(ýtpêæ~÷—þV§Žœƒ'È_‹ Þtºú·ä»ó{[œ“IÔU_àx\ºA?ŸÉŒÕÿ¢îJ^E'(ññ~ùDù4qkñf'Ðét(¾¤óŽN@ý²ãÊžšà«¹ :ŒYÇŠ?Š—éHg½ä~0ßÓáÌ|Ì:Øç¤þòøNÚögtúÿÚáªC­kÅùí¨_r"‡Ž»:ÍêੳŽâjN ðóÅ#ᬛ¯Çµ|~×:ÿñ÷Âéä#Œ§NÎÕñV›8‡u¯Ž òŽÖê ÄÄmÅóå߬ûužÐ1ÚsL‡/ã“÷ï$"u‹ÆñÄÿÖéß?ë”áÑr¦2ÐÉ]‡aÖtšóeo9 ëŒ~¥IŸöaœ¨–“1¸u9ÃÿWžáþpb±ÄòˆNXr⢓;œPjžƒ~¦Ú=ŸS§­‰ÙèEêu¢·Nñô¿Î’¾†cMôëûˆSÇÖMØŸÑÙ^ç{ÒÅ™uØÒéÔÉ¢ž¯É§ö–3½y}°ålïúœ=Z×~‡½ußéS9ÙЯÃ:lb˜>?y‡µâöHóWûæXóäN›ÓÃGÉËŽ48,Õsô};Q¥& ÷®‰´®'Ïê¼–|ý0ñïH½_x :Œ#耚~ú´ëdCëc˜<ô“X6·ï døoÔp&¤Õ>Pךý6Œóø™&:£sU/õÛÞp– ±Nã¹?VuRúúÎ6©cÏóüÎoo7ϱïÏÕÏÓ¯ ?}<ì üè™&º¨3ðËÃäEÃÄ»A&ÑŸ†w?iþq¿‘ô Î4é›jÒ'6È$ŒAêž }4 º|òèãUW£ã"_ }º‰_Á0¼úPC?O“¾ÃAΑSœ'ª¾ÊóËÏ¡¼&´éàd^í$Hy¼nù&³<Ÿå—¼o¯ËüR§#'a8¹ ]Zñ—òsÖ]ò꘮å?Tºë u<ÖU:lÂßÕÄ@œpÚ,ç4ûQt€’‡1Ït"šÏÕ‰èjÝ9©Ã}å¾q2„ïωaÆ'6™‡¢k­‰/ò„Ö‡:ñê¯Ã½ŽÀò”Öa:Œé0J½YÏÉ zÖ'ò£®;ׇ“ÏœTa|ò>ý~Ÿ›}õâF~ŽÏO'^'8ëHæDPñûk¿~MM¶ô>åÍuPÖñLVç8û“èó)^RÇ9ÁЙùýõûÕ…9˜~¢š, ß/_Ž“Xá0¾Þ~0Ÿ›u´µŸI‹z(}At$¼Þé_^MçVq-ù1õ•úcÈKŠ É“]¯³R¤ÞÒ?«/R窟‰zhëêïêäûJo®¦>+á ¨þ%ùûÁÉàÕŽà¸z¼ôä:ŒêDœÿQ|ª>AyÒvʶ_8Tüßoì£o?o»¿xº8Æî/?;õI^q¨üjô‡IÜ8R×+o˜v…c5¡˜‰DåŸâsÌ:¼»tæêžtf—oµÿȾ?nÅoÀЧE7X}TþÙ>-õê{åuTý¯ûÂν嗠ÞWݺ>)ö+Ês©'³OLÿy4uZèiKW¡^J¼V«}ƒêÍÔ©'V'"?¨~ §èrÒW¿ ¯®\¼H²x##«¿L=¤ÎÈ:ð‹«¨3®èP¨ÞÔ8­C¥zRã»ç–ñEçGñHßk<™¹ð"íÿtBŽÐ¥SºÞ Ô¾P¯ÏÉ[þ^¿:)ÇI¦N²§Rç¬NE=‘y‘:+³=tÎ։ݼ̼<®&À9aȼYµyª×ãçë$îä5q‹ä»{jòü3×y{MØsb¢“í¬Ÿ¸(>„¿UMŽt¢ÎñÖò;ôãT Þ OßÐSs,lù±ðøíÏ3áQ§vñyöyCœ+þK]7ñ¤xúYŠ0/×wr¬Ž˜æ/æÝ:šãçT“±œD è¹f_°ç»ç”ç©ýÃ8ŸÖù¨#ªëÜ|߉Uê³~ªðD¿ù(¾HNœ+Ç[õuNª_2ïљԾã®}¢ñ-iÏõÓö1é󥮨óÕ>ûí²?Ãxn©º-u£â°äçÕßãùkˆzìÄû_¯|Zý˜y•º/ãyñ‚øM©óüU®>ɾ?ðÁò=R·’÷½·úÆì#×C?|¿€£Õ_œåTçÓ;pÓÀ‰r×G)þËÏH^ü ú¯œ@cž¡ŽC¿9pÑêË´ÏY?9ûÒ·s´Ó¿Ý/?…Þy°|PôóKŸõñÒsxýöfrÝ0NãÃÕϘú¹õé>L^pº¾?8Lëœy~´áx'þG«SÇuß“:§ðêƒè•‡áéN—o†¾à>ä1ÇÈ3/•÷r¦<«õsÑI_“Ä•Aú߇ÈSÏñ|Ã_åñò ¯þdù%®=Æç=†~þl'°éytfçɯΒ‡Pž§ºõ Þ÷tù„&_y¿žgð­jýDvPÚò+gʯS—¾=ÔÝ#øyœ§/ø4ùÁi|¶Îà{r†þÖ!úm‡Ê7Ëû žyŸÚ³åû”üù t[Oq<>‰îé™ò©?æ¹Fg6L?ò úýS|þ©š`~Ž~£úíï4ŸG·Aü9ÛIŸË¹/è“'F·ø~šw“?ï\ù¶ü} v„üv¤úy³Žž¤ßç©òùM^>\>8êõs‚BÞó©Òýmí¼Î?s‚¸ÜúÙ¨´¿ºÿØ^sœÏ?R â3z°³Ð—“‚_ÜW~mæãàšøã>\z6'9Y'è·¨þLªy±}‚NÆpë˨Oƒ}ëåÛB= ~ž¥ôê üåDuæÉNð|‚+ùzõŸöyØ¿k=J^XýoæôË•þÜúA¾Q<“ZjBŠºcûðt?O¸öëÕ&û;œ ®`„xˆºTûËôwDg`=RëâONÚtÒ‘¸Ÿø¨ù”yªâN.G¯\8¯Îáæ?æyø4é÷Ú[y¨x6yuÄo®(-N¨N]@á‘â=è¨k2¬øªx«P°ä¤ yQð‚ÒmÙw(¾è&|3ëyÈ8¹W~P}Žú%ëë-ñý~ì u½‰Cª¥¯tÜö¹ÙDŽÚ`ß©¼¨ýHôÙÿ>\úÿß™Âu}¿â¹öSÙg®nZû¯ìããù—Ÿ’ýsêXÕÇêGl˜u¾þžö7YÇ«ã5?R‡nþ¦.×|Wé¾ÃÄÞò%Ð?+~‚ûñ9Düió,'š?Í;£8\þk;²Ó›N‡:ýtÿK×D™ðŸG˧Ãó!çVS¾}úëùïúò¥.B/>Ò‰.ô yÙyp¯áòa‹îçTù”¥ž©ó<ïåz•óå§Ï$º×òKÌûÐÿîlùòÝWÀÇoát·©?I]ðIò٧ȇža²Ð³¬«§ðÿy¶s¦/SxÿŠgy®Ï‘'<‡^üS䟟â98oàYêŠçн?Ó‰Nî“¿Ó¨Ïsþ|ªÓ—üܧ;áå>æóL’ü$¸é“å§ž<ÿ<ïçIêÇ™Àó+ŸÀ×óéò™O^ðtçíýBéi|WŸ.¿÷ø¨>M?ècä/Ÿ$_z†úïiôÿO²ŸÅäyÞÓóø•<Ïþ|=Òô|ËG>úú½îãŸØ}ÛÍ·¿þŽ›î¼é®ÝwÜ3pïîûîßýÀ-<4ððëxôu{öÞ´o`ÿî ¾ùÈî£Çn>>pb÷ÉO 4ƒC¯¾åô-#·œÙ}öÆs·œ¿é±ÝßòÄÀ“7=5ðôîOÜô†ÿ]½‘/û?úyõ.ÞÛîâï{ëþønãwýj¯=ømïxÛÕ?õwÛC;ßñæÿõ·ùÀ¿Íx{ÿR¾îêE|îo}㛿ãë¿ñ;¾öÛ¾éÿù·}ý7þóøoþúoü¦¯ýÖ¯ÿ§_ûßöæoýÚoüÚoÿçßúµßö-ßþ ßú-ßðæoø‡ßö-ßúÍÿäÍßþíßú-¼õ~àíoy‡·ñÞÜÆÝ_÷–yÇ÷ÜóÖ·ýðþðÝP·òŸ¼Ç/ÿϹÃÿøû»7Wöö‘oØõÚ?MP¸a`×î× ¼v÷ 7Þøº/}ÍÀëwïüÍ ¯¸éæ›vý©ÿÞýú×îÞù®7¼îµ»oºù†›o¹e`÷ëo¼ñ–× Ürõû^wõ'nºñÆ/ºé‹o~íîo¼áê¿Þ|Ëë¾ä ¯¹ú½7 |ñÍ»vï|ïÍ·ìÚýÅ7¿áu7ÞøÚ«ßÓÍ»_ÿú«ŸúºÏ¿ñÆ×\ý¾›nÞù÷/½ià¦/ëÿä®Ý¯»ú[w¾~Év~×kwÙÀ—Þôú«Wñš«W~Kÿ߯F‚«×uóÕ Ùuõ_w>ï ¯û’7|éM;Ÿ¶«ÿ¹7ÞxÓÕOý¢›v®ôO}ñÎÏÝpõ»nxíŸÚ¹Ó{øâ[®þËkú¿ó4v®wç÷ï~ý^÷eWËεî\Õkwß2«Úù7ô¯ý5ý+ü¢›v®nç§wžÈÎw|ñÍ_6°s_;×½ó}7åÎÕÜ|Ë]½Î›oyÍÕë¾¥ÿ¤¿ä _ö¿Ÿ·ðeÿG} ÿ‘àõ=oÿéýÿOðzÍÿ÷[ûu¯ ]w|íß¿.½çÿcúÏ 0¯ýO˜×Þ°kWÿ‰ ÜpÃϼ~× ïyýìñ¾ã­ÿ÷ÿÇ ú';8†ëÇn¹ç½ï]¯>8þÙ7½ékßôÍoúûoúú7}ÝW¿éëÞø¦¯ûæ7õ7¿ùßüu_ýMWÿôæoúÎ7}ýW¿éëßøMo¾úŸ7}ç7}ÃWÓ7¼ñ›¿þMoþê7]ý¦oøæ¯»ú‡«ßùÕßöuß´óW_ó¦ï|㛾ókÞô oü_ý¥~åâ¥~å »^³k×k^óÚ×^ýÏÕÿz ÿÅ_ôÿòµ9Un¹çê¸éÊ®]¯{í՟ؼúõõ»víZïÿùuWÿ×jþõµ¸k×7_ýóæ®—^ý¿^õÝ»v ìZºö³úÿ»ýŽ7\ýœ9~zùU¿±ýzÝwÜø§v;úJ®¿Æ_¹î_ùùß{Ó—^ýÛ—w½~×Tÿû¿dפŸ·s/õ7¯¾“þß\wù7~¯×Õ_±kcçó{^}%;ÿ¾s ù}¯¾’W}æWìšÛõ¥Wÿ×R{?¯þÎWÝÃÂ5w~Ýu_sŸ>÷ú¹'Կߺ¶W=}ïӧ̬§Û¾ïëÞOðÕ×¼pÍó¿æ÷_ýûíWýÞÿêê¯}á½^óŒó[_¹ne¾ê‰ý±ïÍuÀoÈï}Õ}ÕõÕµ_¹æç¯ÿý®ÂkVÙ¼ÍW¯dó>ÁW®{ãýkû㞟õêgÌô}½Ò®¾?nµÞôWÿû¦«{éÚ;ì¯Èö»®\·w®Ûc_ðï×ïMïÝOÕo}õóýccÅîç|Æuk"ë®ÿìÚwz}¬pmn_ó>ü ¯º÷/|Ãü[½öªùüWEœþ¿ûü¿`õý1÷û‘å ¯äU+¡½ÓWí¶/\ï×_á5wðù߯^[׬ãv5½:~;Ûµ|í·ëäÕ+Ûýý_Δÿr¦üoþLÙu5ûÙI¯~ýŠ«ÿ?wõÿ¿ôêÿ¿tõÿÿË)ó_N™ÿrÊüoå”IQzßÛÞúïúeÑÕºðÿ CÜÒɨpython-ihm-2.7/test/input/6ep0.cif.gz000066400000000000000000005613661503573337200175010ustar00rootroot00000000000000‹´1™eÿ6ep0.cifä½]s#9’ö®_Á¸ ‡îìnnPŸ¾‡QbKÚ–(­ÔÓ»‡Í ¨’ÄŠÔ’TÏôÚþïFâ3É>Í“{ H$€Ì@& “Íd\ ¯“ƒïŒ›Åfõ£?{èõzÖ“‰“·‡Ùf<].—«—þÃlº/&/MOý{y™Îǯ÷@Vü{³ZÏ– Ïû¼âa¨ùr:Ù(°çÍæõÿË_$î¾ÀÝ_®žþ@"{²š5ë¿LÖÓÙì/~墹óåòu|0~ýºŸ¬›1ëÛ?E¿ÂÓåCC³¥û=™N›µìAèäê¼wp}20$3ÿDæ8ÿŠæ5é¥IŸ±4ù‹H„ß÷þþw(v2N™„«ë¬÷©‡þ‘úꡬ]‘qÕ|ŸAóÆÏ³õf)8¸\=Ì“y¯ º,ض7?^›-ð/“ÿ^®lò6àÙbw`› š$Z‘öo7«·éæmÕô^s懽´—ôX’Öö1)z,“Z˜THÂ0 Ã?2!áY† –}LÒi©FA¨ùÍf2›¯-¹ñ¿tK›€ o+ÓfX/Ðún¯«å÷ÙC³Âm]5¯Ëõ 8±¥°®“ü;<_Ì63ÑU3oÄÀØÖ€‡f=]Í^õH7¢¾­ŒŒ{txl O«åÛë–¡ `Z¼èÞuÔ(hùÿ Ñ!y=<¸zÓå|ÞÈ©íÐs‡‡ÉGÐú±Y5 1-É2BÒƒð.eýöò2YýÐ5©ç{µ'ÃÛSô²½ð—qø þ*߬fß›‡Þt2Ÿ¾Íåj" Ôaø›æq¶h^'{H,·I–X§š§íó¬ÛQ¾,ü®"f ˜?@Ђ’0m&dÄÑU……KÙµN,TP«WŠïVJ„©èxÏŠ^°ÔZ&ô"ÿ„%pK¿ŠîR‚ž›çåÊ/U†KQ@Ì£SRªêelúܼ^¾¼Ž'›åK«…õ¶R÷ËÅC«TšK9Ý$H4 –’ýš©I\ðÊŒˆ±Ì¥X¼”bˆª åâiÞ¸ºx°Ôzù¸ù}²j"üJ³p)WÍþ:ÑlÓ¼l¨dÇA*aw þ/:0ÇFû›ÙfÞöºþÅê¡æÄ.8ÔÀåÝm™.ß`+Ð… ò,ÿ½|[ JŽ'÷÷‚$‡û ì–ÙÃøøöäpŸÆr~{; ¡‰ ü–ïËùÛK˜Ð0lÁò:yjƳÕz%0L »`™O:ȉb2« êˆ Ía{âØËõÛýeópH°°mX~4“ÕáM,‡¡Yœº‘‰æp‡=à!Â’G°„'Çþëf±Jå¢3ž¬¼:–â§±ÈBc)˺ù§B"°T?ƒe>¹oæ®KiýóXl—XòóXl—Xº?Þâc?ñˆñŸÆ‚xIJŸÁâóˆå?ñ¨øy,ˆG{Êî÷Éü­5ù°˜ì¥£ÇVÛæ“]Ô =5ÌÖ›q°)b…MvÁš–t?,NÔ¶'j ß‹'jK¶/¡4·±ä{b æ€{b 怗;cyi‚ZÕ¯vÄÂ:å¥ÞKX^²d?,ayÉÒ±tÈKÆöÅ’—Œï‰%(/Y¶'– ¼dùÎX:äÅžHZýd-t¨·u_EêD;ôïfx±CÑñúñ§‹¾¬ÚE?Å JƒÃxö›Ž•"\vÕL¿?Øëƒ:ô‡È=8å-?¦‰ø/†áöTY ceáà@h’?UÓyºÞƒÎBŽž—kA¥ïÍ|ù*÷÷öШ« ˜1ÀD3ÙÈA!ôôû9nöÝ.M]¼¬d~ûøÖ+»jæ¢Q}Q)21Iâ ôhøðësÓ[CYAçM3[ôÄÍËëæ‡>Ecî°«^,‹ùè—¿Ga½sø·Åúµ™ÎgÍ>‘PêœIê=/M"wG$oö *”«ñÕÍñùIïàð[ócµ\|èÝö¥ÁLGE`óœó9+*uüܬU>ôþ*àXÔ”ä¶^“{AžIèîSLnÆnc`÷Ëåoj§=u`“`¯o÷óÙú™Z$:À ýGL€tvaºTÔÞˆÍólñ›ØDGÀÞ"SÃ/_0ÞôН@Ù„Ð…,°øLøºšÁ>¦Ç+Ä&3šlzÇË——78Ì‚Ý~½Œµ‚ž¸ôvƒ#Æ’ ,gŒ‡ÀßæóX¬ëõ[³LÄË[‚áæbËð†˜AçŽd|°4JÈAàØ¥· 2(‹Š?G½ã ûß>®š§79›ôš£ËëÊdÑ[ÿ>ÛLŸaæz~{™,z“ÅCï^Œºfv¶ÏçÇfr[÷ {áü$,=`ñŒI-e1¥JàpȹŸ&¼úË:K³¢ü( ªuÂøÇ´³¼:åyRåI^y[˜Aþ0~œOž"¶>ïô¾oKû¾Ÿ«ç\?Ù͸~N{Ò5CÏ›|£Ó¯…¿šÏ¾O>ôN5xl"¶ð7K!­Ë½KU‚o¼MŸÁ¢Y}è‹"Ùøkhÿô¹1MÊ·ÀÓeDhc¨¿U]yR”˜5@¹t ±ëÓqýk½šŽ•.âÒ” «°ÍÅCŒñïÍìéyã•X¼½Ü -Ç/Ëy3}›7k¢™z /oÚZD“W“'i´Éf¶>H{¯Ëù½8ÁX<ü<›ö'/³ù3 w³‚ý4-{LЬúßë-–‹Íú‡@qt2]Ýž†½Ë«ÑÕõÙÕíõÙÑ×áag%8ŠHQA(xÅñÑÅñù/—½ó+ï(;KúIYIÝ£È(Î.®nÎO†-<ïg9ïù­È{¿‹©ÊL0b%¿½©°ê'i.ÿâÜ¡øwKâ14¥¯ÿŽ­z©r?À³*½6Bˆšÿ¸øÏC¯ôb½yð—|ôo± Á¿,Ë—`'[ðê°Jìù– ±@4› è*nvð_—gòßåÅpôåâôü|ô·‹áÅèæÛpxóíöËptôåæâîäöôäüäæ|øíôëç/§·wçg£»‹ÏÓó»áç£Ó/ßnnÿ6:¿ýr::ø|óùèúÛåÝÅðüèbxv~òåòúo7£‹“áù·£/w߆—£ó£³ëÏ7ÃÓÑéÍÑ×›ó¿_œ\œ_|ù2¼øróíÛÉßGç£/Ó»‹Û£ËáÍÁíõ·/'Û—»‹óÛÑÑÅד/磓›áùçË/§ç'ç·wwwÃÓ»¯Ã»Ñ·“áÅÁíF±ÕXüÿ‰BE^<„¤úèà $4›Éê©Ú¬<N.º°ÍÞ çëå/”¥9¤+UtFZ‰ô£ŠÎ7ÇGÒŸMŽ/zþÜAÿ]µ×I%ÒC?O¬Ô£ øÏhÃfu˜b/‡_{ åš#q~«~pü#Ã?rü£À?Jü£Â¨kñãbø‹ú!ÓÞé…ù!šp$”fõC4áâN#H9.“A™;ýC4áüb¨øG‰±UPÆü¨6†[ëŒËaÐÛ{G7§ú‡hÁ·£ ý#Ç ü£Äe*\F´àvx£É› žò!à¸Zpq¤d¸LŽêáê-øzgê©Ûµþ[%ˆ¢YŠÀ„öî(šqœ“¡J3Ì… Ó +Q·³ ×#ZðõL· -¸>Óòu.g¨LŽå ÏPòõ4Çr—X`+DѼFeŠ¡.RÔœ‚¡þ˜E†ê)p L¡"lbcQ£Î•XÊÑ­dˆÖ%GÝ.3$×eŽ:Wb”%.Sázp ª!¨0 *Ü‚Šãœ õ§-¸¾¹Ò? Ô…ªDS@U!RUx4Öx4Ö)êBÍP=5–ƒ:ÃerÄíÓ .çjLƒºFmK“Äu!MR4m$ÌÑ@ìôÐì’dxJË]=iR ù-)Ñô”TŽ:iR»Î¥bRt3Ÿ˜-©R1)º2bRDSg†ºæ¨mbRt•ŠIÕS9R¥bRtÝ“¢ep*&E×m1)ºJG±˜Ñäãœ#À-“¢R1):01)ºV‹Iñ롨˜­9G´æ˜ bRD œSâ…¥Â?jÔ91)¢%'E`–1):–ˆIýÀr &E×1):R‰IÕS£V‹IÑå¸9æBŽi &E'.bRt¨Å¤h§€4Ç\“¢“Ó@LŠ®§bRtĤèÄ¥À4“¢khiP`.ˆIÕSáE¼FĤèú#&E‡@LŠh­ç¨m%–D1)ºž–¸bRtÂ'&E×з Â’(&E¤S0TFLŠŽTbRt«°$ŠIÑ,1):ÂWX«Ÿ· NQ¥5¦AiPc9“"úQà˜ bRD•ÖŒ‰9Ñ’ŠÉ9ÑjO ƒ¡9‘%h,0<'2<'21'j– 0œ‰<5‹m›aoClJ©‡ø/+5™oÆëåÛjÚxÇmòßzòò:obŠM^|7~8˜-ç˧Ùt2ï È7pÜ>Œ`ºož$6Påwoz#öWñR@çV9ùCì{`‡€ Lèì3VJüñ¶ööS¤¸h›P'pß¹¡M=?ì®RÝÖNbÚú½J­åÞzŠø~8\À~pºœNk'×Ùúp®Åô~&¶ìÀÉ®b N·ü>YÍ&^Ë·7}ÚÌçcdVÚ­Ôd3îOÜåêi²ø¹R¢•ÍÞýÚ¿…Pêm>YáÛ‚¡RÏËõ¶ßh—uÙ¢¡YOŸ›Õlú<“WYf½ÁKÿãdÈÿóp ädEõ8Þƒýg\´Ù¿Ô®RàµÏ4Eë Î:;–òfíü Í‹[Kí5jØí3jm©½F­«ëmn cw±j×þ¤lì:ÖI]»ŒZò¢3Ë•§gìXjéÝk1jþx])?ðñúÇzÓ¼´õ®P ç“õËì!4 íP*ÞÀ(5B—,cæTsÕH™íÜOm¹s r£ˆÔqŽ1²Ú¥$þX,?^Ö$GÛôBiÖÎ{˜Ã‹Ú”Ó›ÍlñtØû»›œR‡útxÌ{geoÔ»búºªû œžÀ)6¶™ýŽ“o@˜&½³4ëòÞUÙ»vö¹ØýDZxszn" ,¾¼7ÊD{©h¥Ø±õYR „b+Ax{}tst@ ³ÞYÕ±Þ×&iÎúpÖq;0ÂC‰ñëùqïèøœÜ§Ñ% 3‘r–þÞôGh5ÂI† ”`ˆ!Œá¼÷1= ˜+àð*Üe±ûztâ ÌSDÄ4+úpºq· 5Æ0 šÐPˆ œøÀŽó°…®'[xwlÖ;˱` ©Ë{Â-9_ÏOb].T—¹•Ã\t¹`áÕY„)£â&2už ↠À°e·ðüöJl†#‚ -ä¨Ï)Oûpòqè0¶­ïb49a|x,3KCàr÷à˜#ŒPäœ ‘Žtä0•-ìÝJᆲî3‘r‡(a„"gtwšamê.yN[à4+‚ðæê"NC#ØFÓT äIïÎbÂEN ÍŒÍ Â$—SöœÀ„Šœa„fúª ´îÃqïHFÞÜ]3ÿÑ(„Pð,eŠÍ‡ÊµM,‚ËpAxwsu],SÌŒ]¥Š)p€F(r:h˜û\N…fEËrlÌ cØÀ¼4}ípß‹d·,Ë>€roåZ#l0wñpoÏo" ¯ÛAìõƒ-hºÛ"±tƒÈ¶X«~Ðl±6_“  ôQÏÙ SyÎ¥¾÷b‘˜p ÌX§™õÇ ‚»5—~cQ0cц8‹ƒ[w.½Ä¢`Æ ^Xw£ ˜±C\ÆÁŒåâ*fl꛿! 0cm‡Øø ƒi;<Ä&Ào˜¶ÐClü†€Á´­bBmÓV}ˆM€ß0˜¶4Clü†€Á´åbB`ÚGbB=ÕÞ›¿!`0mɄ؄@OÇÄ&¸`| 6!„Mû@lB  Æbà· L{6@lü†€Á´ÏÄ&„º ­à›Ó~›j›ö €Ø„PÛ´gÄ&ÀoxrÐÖvˆMH¯ñǀ؄@Œ§Ä&¸`|8 6~Ë€À´wÄ&„Ú¦ý> 6!@7ã± A2¾"›¿!`0íE± ð[¦©± .ψMŒz㓱 J· Ä&F½ñcØ„6íá± ò߈MUªeb‚düe 6! 䯓bBtÓ>6›¿!à¥M{ß@lü†€Á´_Ä&ÄÒxì@lB mÆ—bbi¼| 6! HÆÿbÒk<ƒ 6!À,ã3± ðÓÞD›õÆÏbm3H› ˆñM‚Ø„³Œ×Ä&äÍø3AlB`,O'ˆMÐÍø@AlB¨§Zö!6!Ô6í7± ¡žj*ˆMLªÆ× bbi¼° 6! äÆ? bãÔxnAlB`1>]›rãí± =ÄøAlB@,‡Ä&xj|Ç 6!Ô6íQ± !ºiªClB€ Æ b]0>j›ã½± .¿6ˆMaÓo›7ã ± ðÓ^r›$ã?± ðÓžu›€ÆçbSñƃ؄À85~z›$ãÁ± ºß>ˆMµM{ýAlB¨mÚb3¹ñ„Ø„P´!Ä&fKã]± ±`ü!6!ÀS㑱 A2¾Š›Ú¤h/FˆMŒãß± ¡‘ö|„Ø„àÆSùÿ@lB€nÆ[bBûSíG ± ¡Íö°„Ø„ÀÈ2¾—›Kã• ± ¡Ý®öׄ؄À4žœ›j›öñ„Ø„à†]yBlBH´_(Ä&„xª=F!6!D^íK ± Aº)/SˆM @ã ± !!מ©›˜¢Ï*Ä&„„\{³BlBˆ¼ÚÏbBGÚb‚Ç&¹Æ–ÛÄVè.6» ¹ bB\О¶›Ø\ˆMUª½s!6!$!Úobà7Æô³!4‡h__ˆMÍ!Ú b‚Ør݅܆` ­°!ˆMùClBð(¬Ò`• A0åƒ ± !±ÔÊ›Ä&„äMûñClB¨ ÚÃb‚gøÊïbB<Õ· 6!4Nõ}ˆMI¯¾I± Á.¨;›ZvõíˆM±^ßK€Ø„ôˆMÑMße€Ø„ íCÝr€Ø„PÛôýˆM @}3bBb©ïL@lB° ê6Ä&„´A}ÏbBS¾± Aòª»›Òßô­ ˆMuAßç€Ø„yõMˆMbÓcAÄ&„Æ‚¾± ¡qªï@lB¬Ð`… A0=#‰Ø„ëõ-ˆM’·Ö•Ö6*57W 6!0͈MªômˆMVªöÊ›2Éé2›²gé½2Ä&$Äܪ؄`Ô}ˆMð&®ÁCüÀuâX—!uâX—!uâ˜oˆ7¿! 0ßo~C@`]†xÖeˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x,½†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ“C‡!ƒuâ1X‡!ƒuâ1X‡!ƒuâ1X‡!ƒuâ1X‡!ƒuâñÜÛaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x¼´uâ1X‡!ƒuâ1X‡!ƒuâ1X‡!ƒuâ1X‡!ƒuâ1X‡!k†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇŠY‡!ƒuâ1X‡!ƒuâ1X‡!ƒuâ1X‡!ƒuâ1X‡!ƒuâ±ÞÛaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x ÖaˆÇ`†x¼­è0Äc°C<ë0Äc°C<ë0Äc°C<ë0Äc°C<ë0Äc°C<ٵŠñ,nˆ'`qC<‹â XÜOÀâ†x7ݸ!ž€Å ñ,nˆ'›â¸!ž€Å ñ,nˆ'`qC<‹â XÜOÀâ†x7ݸ!ž€Å ñäÌ!nˆ'`qC<‹â XÜOÀâ†x7ݸ!ž€Å ñ,nˆ'`qC<9Ò‰â XÜOÀâ†x7ݸ!ž€Å ñ,nˆ'`qC<‹â XÜONÌâ†x7ݸ!ž€Å ñ,nˆ'`qC<‹â XÜOÀâ†x7Ä“ɸ!ž€Å ñ,nˆ'`qC<‹â XÜOÀâ†x7ݸ!ž€Å ñä¼7nˆ'`qC<‹â XÜOÀâ†x7ݸ!ž€Å ñ,nˆ'`qC<9Nâ XÜOÀâ†x7ݸ!ž€Å ñ,nˆ'`qC<‹â XÜO¬qC<‹â XÜOÀâ†x7ݸ!ž€Å ñ,nˆ'`qC<‹â‰1(nˆ'`qC<‹â XÜOÀâ†x7ݸ!ž€Å ñ,nˆ'`qC<±µÅ ñ,nˆ'`qC<‹â XÜOÀâ†x7ݸ!^ƒy+Щ|«ÀËm}ªÀ˧—û½ÌÀ§<ˆ×­¡t¶#ð‚P;ü¯`ÜGŽ{L~ÁLˆ‰ò`€Ÿ2²œô¸ü€¿Ìg2~BùCœÏ¥;ÉÿŒó3éÇ@òOq~.HþÎ/¤çÉ?(á3A2¿”Îð‚Ìÿ+í_Šúbuð…ö/EøeþÆÏUÿ5~™)š _Äã_¢6ƒðÂΫàç½p>˜û~°3óÁÎ'èÇcåÁÀXi$,{@¿,’&= _É[^R)wP>ñ’ZÚ—ƒùiÒËD£w8Œ©Êñ!œÏD>“GCá|A;ÑòBöD¨bô‡ºáèIô!œ_ˆüBÚ¨Ãù¥È/¥M1œ_‰üJž½„ók‘_Ësñ°üú‰Àcí‡G¬™² ‡óýäIR,_ÐN„*Æ_xRÎŽbý‡Ç¢E¨£õ ú‰Àbò <Ã1Q´¼ „<–/è'Bë<· 'B±ñ)suŽÎô!Õ‹À¢õ úÉSÉôƒð±yL>à‘aX4_ÐN„"Æ?xX„:V?<ù+‹/xÌ7S^á|A?8½‰âô˸ô9 ç ú‰ÇÚæŠPÅä+ôGñ ú‰PDóý2uVÎô¡Š•ÏýD(bôÉÁaC`†óýDÈcôËýD(£õ úÁI‹˜ãÃù‚~"ðØø„Ïàl%‰Ì_¹ Ÿul~ÊaìVÒhÎôÅø+“Áù‰£á|ðdQ†‰p¾ ŸE¬… •°Èú"Î ÎHDÃù‚~"T±ùK ¼L„<º> ú‰PÇø''¡ŠÍ?E Ÿ –V³ðú.èW*ëP8\|”Q=œ/ègI„>%¬½\š•Âù‚~pºÁ¼üÕŸJDŸÎ7úSZ„óµþÄz‘òFÊ"ùZªcåµþT‘|­?±~­?ñ2’¯õ§"Ö­?u,_ëOŒEò•þTðXy­?Åú§õ§,Ú>­?¥U$_éO` ç+ý‰Gû§ô§2F_­?å±þiý‰EåKéOEË×úS’Dò•þTÆè§õ§<‰å+ý©ˆÉ‡ÖŸ²*–¯ô§*Z^éOu´¼ÒŸÀœ_J*Y¤¼ÖŸòØøÑúS£¯ÖŸÊh¾ÒŸòÿþã¿ÖŸÀù œ¯ô§2&ZâÑö)ý ì—áùGëO±òZ*cü1ú«#ùJÊcü×ú˜¯ÃùJÊbüÕú˜qÃùJt8_éOUŒ¾ZJcò£õ§2Æ_£?Åè«õ§"Ö?­?UÑ|­?¥‘þký ü%ÂùFЬ_FŠñOëOY”>JªüùÇš­—›ß'«Æ{ Ù%Ï'ë5\~‡—kú+ÔqýQtû;"åìûÒŸz«æq¶hЛ%Ÿ‚ÿür«>°¬~²~šô9<€ó©wOh÷Öð<‰ú^tƒ ùòúæêXÿì›G§-†Uóð&ß8Œ`øÇÉ-n¢ÆÀÃëód-jÇŸž ý>H|þC¦<‚gú‚Yóf<™ „íïÙÖI?Iãfýx½ÝAÝ7›º>€l£óñ=M^^vkŸ„ìhŸàû*úªS1¼¶è:>óЙoå¿-f›õø×Ô¼Y>›Ðw]>uC¼¤O·ÉëNb¯Äq9I Ç7èGƒyÜýטï×ËÕëf<]6Bå€W”Æ/oîÁhµR¯ÖŒ¿Ž_&ì4[l2oÆ´@šõ?ªâ€:X«™ªà¦«ëÍdâPj¶=Û"yŸ—aQ:üÇÇ›£»ÞÉùçÏ7GÇ_åS´Xðẻ±n¨ÐÊæË·UÇË3Zè]k0¨>ÌW‹]¡/'›Ísó;i ëY´@¨ïqô¯Íj ‚´^:ñÎyŸñÇoÚø?±¤ïJš­¯3è×Uó:YMvlÉzö¯F€¡Ãí‰B/ö€^MvÄ­„k<[oVÍ®ÐB§OÝwB¿®f/“Õ]e奙¬ws€ö‚-ÐO›}pÏ÷‚Þ4/¯ûA£Æïí¿ ô¼Z. úÇXpè©¿­æ{@ƒ6;ÙÄ¡_–ë‰X@6[9O¡)7C3"¬`¿÷'¯rxÚçõ>EÀ6óësc¤;f»ö™˜?iG°ù³ãᷣ뫹ür+ƒ½³£Ñéùè´wrsu}Ø…d,¶ u½žm¡¯ƒ§ÏÞÜß†ÎØºi°FÔ fûóFMÕi0¸úÀÅÀf/Í.”S²ÙJóäå²÷u%tMAüªŸ|€Óg‘4šÏ?ôrñ×Ñåõ‡^Òg—½ãÉ|:{{é?Ï—«ÙCÉéeïlx=¼í­ÅD*òÖ1Pª²Ÿè±êéýÇ÷¿|ÿÏÞõð´—%ÉaG³^ÏÄT¿xjÜHQë~òr/U·fñ}¶Z.Ì Æ§6„GüTî‘0ˆ,ÞT¦!ü‘…\býöúº\‘)˃k”Ïö4AD6YýÓv_¼¡ò)ö[1ˆ§Í6ó.Îx~‚LòN€_ò9Å퇡‡Õ¿ëó /B…Ôï0™Ó@øÔ0cž ¿ÞœßŠê.޾þrÛ+.8&B Äì"ôJXõ;tÍôÛ ¨ÝÓJì–ÄšºB¸S÷œç*´%w) OYÝsØiù1)?¦%á­ÐïfR5%k.ÖãÈk‘´JWª“YÃ\©ÇÙ|Ó¬ÆÍÃS³G]³Åóòe ÏÙ¶tŽR/ËÅrú¼ZŠ®…1Xêu9Ÿˆyy1^ådÿRRÝß¹ÔjyàX©N©–úc5ù;úûå|R¿O¾7úädë°DýRê˜%ÿ ^‰Ï'ob2†ê/£¥P…óÙz³ i©])/Wd•*¸°YN=²Þ ÝêbØûûÑ·áÅptúõì0Šg²˜Ìü«ÙCÆäû¨S±cnV Ü´8úÇGÁ­ðpE}¥ Rº˜Ð(é×eÁ²r[Ì€´Ÿ&­ÅÖlÚô§o«•ÿ"{ ÷ºk% @wL2i úuù{³Ú>&44ìr{»CË( }{7:>»¹úzs5j•ÛLVOÍf×Zº†öáðöæso0<º”ÏžŸ°ú°…àûr¾™DæÖ@u“ßšËÇGu¼ ºk|¶å)\h—*Ö?0ol„Ò9¾o&/úmèðöbûj:"é%Vlæ‹u0ž­—ã¿ ñ€Íz‹éuÁú ðJ=y“ϳ§Ö'ZGÀçËß[­®X_¾8oà»t;¯)BO˜mÆÏ±cª0ìl±#ìo{àým¼ó=ðÎw«~Ç“ù¼·+ìò>FbÆ“Œ9xج¾7b¯6ƒõa¹ˆãnÃŽ?“În…Eí†=ßïùx׳§—Éøó°çmXs:üº\¯g÷ó¦Emµ'ÕÐ7Boš-/¿Ø—FLà‚R1^‡`c¼F°ŸW³æ¡òå÷fš±Ûô´Ùa„nžÅè^ÎÆî­ú¬œ˜ƒ+Fªž*ï¼îuÉÔÛù6îP`ºžn•¨ÎfP¾oÁÜâ"‚3&°üÇi½ p×òŸ–ý*Y£ÃÈ#+^¸µŒ*f+ Yp"Àá'éó:‰P°½DG±GÖ†4YtÂÀ‘•D,",w4¼”¸UDl»û“l™ÿGúŠÿºÎ?æý2…[H¡¢¬³(8U&I¸$ÿ¹’l[ñ沟®”o+™¦ýŒÕ*ªN$¢»Ó–4¡2-[EûtBÌ®íºf‹=ê’>UZ«“î\ãÏËñf9þuÁ2aîšö¥•WîóúŸÛˆâ×u¾i·ë)ÛÊ„ªóË(y{ÙÛ¸\t”‰öˆ–Y?Ï7js&&¼.—æx™°v땱‹­¦xpM£e~oÄZ 71ÆÛ Ñ¡2ê+[»Ðà÷`¬L׌•é|ñ2ñÊ,§Ó·W)k[v*á2Ý;Tf½œ——E–`Ùr"XF®Nãûõr¾o™ß¶”ÁóHܳU xÚì_fÔÔ¢ãgýeÓ–2¾§úö2‘Mè¶2¡Š>y[ (HÌ}ÝõÐ2Ÿ·lߣåÄ„¿c]Ò‚?}Û€û„´½ýÿó¾e`«æ•ë*v›@5­2b£¿Y-_gÓñæ¹Y½LæJÔ·ÑnºZ ©û>™ÏÂ:ø×³›«_NÏ®~ùêÖŽæb‡>‰«Øªéí/¯.†Ç¿\Ýôn†×GÇÃËáè롇i½™¬ä\Ûn±»÷3úåï­bͪYÂô<[öV¹Ý¨ƒÊu¤Çb}bÝX·ÉöŒss4:¹ºŒV†ê¯_›éx:Y7Áʤ™Wl5†·¿Œov1RF6v{3·}ø},Ý ¥c×l§2@ƒ}ˬŸW³Åop™c{±©17ovÒæP(m-óú`Tt1ƒ¾.ûp¶[¬w,ÔêeäE™®‚2p†›ˆí¢²O™ß…e.'+8{ØŒ[ÇqÑ2b\=¯ÇmBïá㤿Ï~^!¹V–ã­j?)#‡‚TýWo³éoëçÉâ·ñÉõ¹Rá3¾µÜfQUÀ©þÆ .×.blYé•ûzq JóìáMÈúÑÉõøq>yÚ&ãÛ\©Ý¡¿T­\Ó»’¢2—û—¹ P›ÐñH¹Ža¨®ÏW—ŠK7pez÷2 ÇîRFùë*—›ñãzjø¼W™˜ÒÜU&FbnJÁz³ý„µëdU¡˜þ˜Î;gÓPÆ«m³|‘3ͦqª;ç<ÛZfñ&ªZÇd*«N¶˜ÏžÄ”áv¿ ¬—Úžti]·€žähŸ¯Êl=†%'°¸LǬ>~uéѥͮÚçsÀ9¶óšï¢Pߘ‰çiõ$¢o®·3Œ5³£¿}ã'«Ýq/Ú=}F6~|S§»½ƒø~½B`¶iþ Úã×å³EN6Áü¸«œÈ×YUxŨ¯¾ËŠdéû9œ«\Ÿ „F$vK³Õ›¤þp’¿„Ó ÙŽ1F=س ÜŸÇòOžÈ?9ü9t~vžº?ÏÜŸçòOP¶þê}qàRþ™ÃŸ#÷'âEý-‡òá^}´6ðï—Ñu V¾‰‚-n“»t<}ý|A ©Ã¹ûñd:õ]˜T¡0øl½|ô¾ð‰B:nmë£:koþ9^ÂìÒÀ¶éÈÁ]^ G_.NÏÏG»^Œn¾ ‡7ßn¿ ¿}ùÛÅÝÉíéÉù‰Hß|>ºþvypw1*gÊ>ÿÖ…!.‹Î=C×ÙŒ˜üqo? Þ e¥¬+~o ºÎNØ%-ŠÅ{/f?Ò­ežæ<’_©ÿ$-ŸôîùP¬ËøDÆâ©‹è­Pùe xcþ¨Ç¢XCìf"?ç­¶k*P,|_,\Š%ÛK¦Å’ï‹%Wb)öÅR¨@±”ûb)U0XäØò=;‚]…G¦ËÅã|6mB a§få«ËéÅH>ÕÅ" Ô ¼¦z$ß>#XàANÞ ÀíÀä y ‡g£XJp‰õz¯°·zoÕǰȧۙG]x“œ`Qo®ó8xÈœ#,ðJ%<<êµåù>¸uôaðáøÃɇá‡ÏN?œ}8ÿð×_>\|¸ü0êh©Ô‹›ï3xX§éúšé¥Û=Þ§´è»¸àD>ÚºX.šÝÄ?Öê÷^Ò(·ÛÈtàÁU‡P?Þôp¢^Ó‰ažN¥ã<Ï£EÌ“ c‹^ùãÃÿŠQ¾wÞ íT]R67•·•D—¥“ýJò=K~—_,õ.“ïS'#ýܳ$û9 ±Ÿî'ûé~òŸî'ÿi~Òìéýäýl›ÀÚ /…=ª£}|" ³Z ö˜î¹™Ïþh€Ó©ùä^~Nëåµ+_O¥Ñ|8 W0ò¤°ut"aàX«« .?Ü—oÀt4Z(gÉ8dvœöà$ؼÜ6;Þ¹8n»ò­:¤uæ*¨¿0yp6¼øÇøº§"±;:JÍžTjÁ·ÃØ·×ð·Kw©òà{xàaBk¼ƒ}¶T!Ôn9“®<.Ý¥æÊkˆâဇÃM €ªUYØ£rÏáÒ]*´¶´Ox²ÞõÍtÊ=‚:¿ªd]ºK…öp¿=9àÉ{_ï ÞªrxjÙ6—îRe{˜‡§<…¡g¦uV¯Æ~Bé.Uâ)<<%à)5=Áõá“Ü|Á©Djé é.Uâ©=<à©z_o®%hùê¤%ÍÀÐ¥»T OæÓ§<59ÓP’’©ë—Hw©Aú¤bß2Hõ©FZäHö2ù·Iw©Oâã8ÌÀWÌò(­ ‡G¤»T‰'÷ñ®kc’_CuVRÚ¶AºK Ês*p 2³³³ãk€Æ×ÀޝA|| ?ä’¿=¾ðþ×¥»TÀS´Ú#(8(ä8 ñ5@ãk`Ç× >¾„` J9Žz|Ä ä+»ŸPºK ã)žÊÐÁŽ‹;.ñq!Òµ¦ƒ4.v\ âãB$§Rþh\ иØq1ˆ ±‹::fdG¯8ìÆÅÀŽ‹A|\0!ÏÇ\ïéõ¸¸8’2“kyVé.UâI}<¢-Ç™—ãBq8­K‡§b(ÕÐùßé²"•²YР´‡öijñ:€Uóجå3ø)¬ -|}¾È¿Xu#ýéðy3ùwäÀÉÝóp@ʬéçl«´C¡¡ÅžBt¯hb¸l¾é„Šª= Ü3&«‡Î†›-V “m¥ ÛJ¶2l7ʰ(Ã:)#ûÕË@Åbª›éÏ–ضØÖb<3dà»Ñ”wòƇŠ4¸Ug¸ã-°N>ò.>†´_û„ÁZßr åÊ °sÊ«­ìÕr.Š ÂNæS¡'«?¤ð‘œÀáTûê$Õùªƒ‹ÓÞñQO>ì 1…=’i,0–¾ Ìúàèé*Y«Êë^Á¡J¶C• v[•YZã*y´JÚ˳ª,"U–,ÇUf^•ò˜Z¬VWCZåI Jkªd‘*áårTe¨’%iïª×#U#U¬©’Gªäe‚«,bUÙûU)µ'[e­’öóUf1ÂJ¥ÚVY‘*¥ï$ ¾b×[y °Uã%Ï akRe±©òR> /Ó3UšŠŽd^–ÆIR“A"öïVe©²¢„êÜ»UYÇxÉh/ÙnUŽ:ª¨*ó˜øäŒ‘*9®r­rÐ’e74\^l\¦¤Êl·*ã½ä¦—b©’Ñ*s\åçŸàefxY$‘*‹”LB‹ß©ÊAk*tUv–‹ABÆ¥˜qwªrÔÑKCØØTpJØêݪ̣¼äd%ƒ Uyú¼Ì /óØìSfd½dɶ*óq&yé/Þ®J—›}ȸdén½uôR6K yY@b«ŒTIfŸ³Ÿèe±¥—BbSZ%ß¿J¶W•¢—œ –m«ra‹­„-s202û ¢êÖ—Vcì`Ëd%aE¬JönUòŠJ,™}þUDBUEd[•¬¤U’Ùç˼øëeTÃK32û°úݪLcëeEÇ%OÞ­Ê,¦ˆdfDžíƒ'Ý”Tüài±åà ›ñ$¸¼‚Úoyd ¼ðyÎ.ÀfÏ» ¬ÝFïì¶æ;ÛÝ~'¬:ÄØ­wÞÇvØÛKA:A½ã‚NXtzÒÇöá0ÛƒÃl³}8Ìöà0ÛƒÃl³=8Ìvç0ÛƒÃlGó}8Ì÷à0߇Ã|ó=8Ì÷à0߃Ã|óÝ9Ì÷à0ßÃ!Ÿ8?[~ú 5Ú¤;º£«uK¬gZîŒK§K•­´Cw.FÿV0uÒ—ØûT)3·Ùø„6¦,u•Vínâ;TYå}ùQ”ƒì›*EͶtÐjuÁúÒ¶y¿O/w¨²JúÒÕá èíD•w¨2ÍûÒòzP¾£øŒP«Ýæâ“Þ^¤²NQeõŽâÓ]e-¶j²—õûv—^0JÀÛ5Ù‰ïQeÉu•)âåžyU{‰»CåÓËÞ§J¶½ÊÔTÉw'ìÿ¬ÊfYeö.½Ü—E¥Çeš¿K/w©2ísUe±UÞ£J3.ÓR8‹ ýUyâUé²ÕÞÜ °Qͱº—Õ»T›MÒW™ª*ëšøU¦IÙ—Ÿ`ÉŸÛËÊU™ÂêU¦n/Q•r†*ÙNTy‡*…Ī%šñ?—°µëe 3T™ý¹„uUÖ©ž}Xþçõ2áZ|ŠñU–🬲|?^&<ù¡<ÓËDKlõ~¼ì®²®ÄÖïÇË-½Ì¹š}xò~¼ÜÒËTó’§;ñþª,k=H¸™}œÕ›þªzUrWåmE!tJÅKÎß§ÊÔ‘«bƒŠ*ÕÛ žíÔÄw¨²¬ôzÉówéeŒ—Ò WK,¯Õž„ïÒË]ªÌ=HʨòUÖFuæT÷éìåg¯Êlû[$X|t•õûU¹E|̉H–ìÔÄw¨2Ͳ¾¼Â˜¥ï×Ë/$JÍØûõ²»J$r‰ÎøNTy‡*óœûd*as<¤š—ùŸJXTeÍL/‹?•°¨ÊÊhY¹#Þƒ°¬Ô„­È„§T¬AXÝúbª´–è!ÛŠ"7ÇYý.UJí¯Hc´4¶”ÁÀ„cÃd§&¾C•U®×Ë<}—^FÅ£ ¯Ô½dïÒË]ªÌKµDç|'ª¼C•vñʳ?•°©ku^ëÅkG ‡*ëJOyñ§U [Z©ûäåNŒx^2½^æÕŸÊË oi =Hê?•—¨J¡(‰ ÔŸÉKTemÔŠôOå%ª²â¦J¶ïߣJ¦Ië~Ñú¹i6ä‚‘JÑ_Ø iúCºêsk?×}IôH^àƒêàF¬ý“»?3óg¸=êFB_ýÝj›Îx‚ûOãtK> ç/×òé‹PÍÍbݨž¤²¡“ÅföœÏ›¹êW$ƒÇ2²pF„²õQ ¨ÜXzÇm÷0Xë†JÌ¿ƒ¡¶\Ç 7áÃ`͋ߋo5/~=Þïoø>Rj+í‚W—ül¯0~ƒ>e*T"¬î—eÏÞ–.+wÓÒ]ª,Âz×gpí¹JÝEöJ.í.Ý¥êñ ¾1júdÿÎÐߥÊ"L_NËÂ^mOËÒ^5†t—ª—º/êú2@}ؾ L_¸îËõe€ú2°}˜¾dº/Ô—êËÀöe`ú’é¾ P_¨/Û•ñ·S¬|†—mðØ)p+Û ÔíÇ{¾tàzÕÄáƒÞ2!ð‡™x‡;P|÷¾v8EÁwì)Û“QlOF±=ÅöcÛQl?F±ýÅöc ͯLè_x^Å¿®ÈTŠY5bD¦NüëŠÌŠWíÙÐÔ< 5PÍRóÀÔœ¡š¤æªy@jnÍ]†J³§@‚Zsõ¹ü×DÖ4šß^i~ky¥ÙߥV¿Çج½|§Ñ‹eGhš¹òÞáýlñö„"½Ç媧±I0PÕÓCÀÀ0ã .¿OÇ À¤®;æm \}qc+®0dm ÙŽmȆ¼!ßC®0m …ú~ÄV …ÂP €¡ÜJ‡ ‰¡T*‡l*‹-ÜXnÖÃîÜXnž¤ÃÅnܼ$7ƒƒP~ Ð2QþÌÁ£ ÓÔi~p£üÐuµw Rõ}Ãèà²ã-ˆÎ¸ýþwü|ïr¾£këÃ6 }aR~RELxp÷í>]9Âj`ý¡Ÿ} HüM€¹ÆßRràÌ«OG©Ã÷7ΰûÒ’û›~›YæÚ,þ&Àe sŽ0{¬fõéiyG×ýM€k ¬¿m$ÍÝîo ß[RÀú³îòI3÷7N=ÒÉ÷"ÝߘyÀò3îo Ì 0Ì"'úþV§<8³ì¾:ë]j`øû¨—¥>æ¼Øg |ÍH,QEoÙ€O)`˜mÉ,wpe€CmöØ-?>ö˜¢¿0ö˜"?#dGx Ý"(0“ßÊ{öóà©¥ó;ȸ4£œus¿9nÆ5chF1{L<Ù`¥üRa6ñŒ»m>üM+¼ 5jè`‘xÓW‚1oé O1f¿ƒžˆrÖœ{ÔàÀÁ\R}÷ßýM3 Œ>ïïþ¦À¹Ã £C»ÜÚ¿)pa€¤Ëý™¿Ä˜óÀb6÷aÁaaö¨‘%0p°…l‡6g‰vFwÌYŠ1oisÆ ðm΀ƒ¥)[W«,ó; ,7ÀfÑ)ÐTxÀ…üuæÏJ¼ÃĘ+ÐPñfÏýMk¬¿²fUˆA[…ÈŒ?;çþ¦À©Ö_Icè;j-‚9`÷Q:÷7æ^›­ 1h«yæcÎf¿ƒ¹Å¬¾QoUˆA[…È  j郶 ‘—X¿ÿ`UˆA[…È+tV6mÙÈ-Aø‚´‚A[+(,aqa€/°¯ªY†fQ˜u3o2/xpæ1¥Önæï$ø¸Ÿã¯Hß´õ¢0À»P£4À»P£êNýfÔÀ™'u¥à ØêiËH)Sld¸@òìÍH%s˜·J]É-æít.3¼ƒˆz§Ýß'óÙÃDl“¦óåº1O£+æ85TO>ª‡›º`ñ‘¡;ïÖ_Ñß XïFwV›Ï­°øPt+0Þ—îØ?M ¶1Ø>Ä`{ƒíC ¶1¶Ã'ñÔ3pàÖ`üÓÔ禮¬É?5ߨaê¸Ñ¯ Ù°ârý6û•¾U¤‘Ÿ§àqQׯn‡„Üdu‹· n‡¼#í0=²¶É?¨o….‚·€^Ÿgñ¼õLqÞ‹Ï@+êLó´/”ª¬êW™b @–ö«º—•}ø°®G{ýæîf¾öoÝwýý"m;c¹š=Íã?âY?âYÿ d}Õ7„rÔƒ¡õ¡þ6ÍáÑz‚9Ѷ]DÛvmÛE´mѶ]DÛvmÛm´žÛh=·úY‰H‹æD°ñ(6Ŧévø7Gw½“óÏŸoŽŽ¿ž_å—ŠìƒIŸq¡°ª_ËÏ!§ý¤ ØÇD 1Làñ¡øˆŸI&ö³ªq*6"*„•ös&Tº¤_TB)æ}^'uï@1&vâiŸbç qbã ¸’Lþæ¼*U]‰BÊ ‰´ä².±§HdÄn°©‚†— ‚}c¸o¼deOÄœj0íS꺂×ÔsÑNø)šõ¤°=’}A½u*¿õ—AßêL¨¸¢§¹ˆd!E»I šÉ¸VϳLþJY"Ó©i]YS.ˆIž `•“Óøiµ|{Ý:Ei°Ž,”Ðð\ÙH—ˆ \Ð\ܹeÀÙe àwkcÀŸ( ¸næÍt£ÎÝaΊ ÞÔûüïðÿîý?ÿkïÿ=Œ .4Ð…€É·ò1R•ÜsÉ#ùº¨L¾RÉW*9³Ð [è|EÐ…I>ÓÏ.¨äÒ&3œ\Ùä#œ\ÛäAŠ’…¨˜d†“S›Ìq23ɪ&^Ïó’ký¬¢NÎtòUzº“æ&™‘äÂ$s’\êä»ÊÿÍ%W&ùX&k ¦µMÎlò <ªjd K-4·Ð·ðì©æš[h†qgšaèÜB§ÚôrT©ºhÓËãÊ„L6½•Úôò8ÇÐÜôò¸ ɦ—£ 27½¥77½93ÉdöɹN¦³Ožh:ûä&ù”$&ù„$—¦CÒÀÊ@ÿJ k}–"èÂöRõÞ$›^zó é% ÓKJÁÂô’΃…é%ðŠÂ&ã ¯0½<;%ЕM&Ц—g'ºLl2†.M/φdZ·½¡ÉµÆ=$Lƒåe»‡xì¤ðD¼„¦Lcº—Ó˜î¥Ç4¦{é1é^zLc…I¦J¥î%cSV™d ­{y6Ä# £6É 'ë^ú «ìå/>/9WÉ/y¦“)/y®’=^òBCS^òR'S^òJ'S^òZ㦼Ì“L`–êdÊËL÷Òãe¦{éñ2Ó½ôx™é^z¼Ì “L¸“é^z¼Ì*“L¡u/φ:×½ô˜–Ë^ÞùLË™JÆLƒd®“)ÓòL%{LËsL)˜*Ù£`^êdJÁ¼ÒÉG´;µI&/L{YÀ ~bïìõNj!ÙÍ‚ëdÚË"SÉ^/‹\CSÑ, |Š•øT(?ª%')RùS¡ü(h5ÇZèZ'+‰5Ée¢‘ØÙG%ë^zô.u/=z—º—½KÝKObKÝKObËÂ$6”º—g'd_\V&™"ѽ<è*1ɺҽôX\A/¯Î|îT\%+ÙAg:™v^(?ð¥''Bù‘ÉžœåG%S9ÊLö[RkèA"”•|šb$BùÑÉŒ$3|B¡¹®’r^(?2Ùã|­{éu¾Ö½ô8_ë^ž h²îåÙ)VÓº¶Éh7Ê’Ä$ãü2M2ÃÉÌ&$º—Bª’Ì&$¹M&HL/‰øÀ“³p¹’ržîÉÞÁè>2™pžîÉ”ó t = ÐL'ŸÒd®“ ‹è>*™‘ä\WI8Ï@÷dÊy–ê^RγT÷’rž¥º—tÌ3–˜dN’u/ÏN ¦{é1q›Œ™Æ2›Œ™Ær“Lć6™ )m2AbzI9º¸bxœÝ’=΃î#“)çA÷dó ûHhÊyÐ}d2å<è>2ù„&:yH“KÝî_i»+Ý*\÷ÒˆL÷ÒˆL÷ÒˆL÷ÒˆŒ›d"™î%U[X–›d ­{I×–•&™Bë^Òµe¦—C›^þš’dÓË_ ’ÜôòWŠÄô’Šè>`"ðÄtHöÄt™LÅtHöÄt MÅt™LÅtH¾Uâsk’S MÅtY%•Ð} Ù““B÷Ò““B÷Ò““B÷Ò““¢4É„°…î¥''Em’ t©{I•V¦&™ )™I¦HL/)/A÷‹­/A÷d— ûÈdÊKÐ} Ùã%è>šòt™|JϨ|’’äÔ$3’ÌtòBs“L¡u/Ýi¢JÖ½ô¢Ò½ô¢Ò½ô¢Ò½ô¢Ò½ô¢NL2áN­{IµGV3“L‘è^zQg&™Bë^žýJX›^RÝ^_÷tHöt™LÍU û@2p‚UÐ Ítò)Mæ:ù„&g7a\¤•É”ip;V%ÑdÝKÊžT&™“dÝK:\yš˜dê^R¦ñ”™d ­{IÙÀA÷OrP6pÐ} ™²ƒî#“)@÷d  ûHhÊÐ}$ô)ÝG&SzƒîɽA÷‘ÉŒ&sLÙÀt/=6°Ü$R1ÝKªnq¦{éQt¸uïQtHö(ºL¦Ý’= ‚î#¡oD2×Ч)Î44ÙØpÐ}$4%,è>ìt™L Ëu/=ÂrÝKºƒá™îåÙiJ’S“ŒuMž1›Œ­o·ÉØú–é^zlÝçëÍA÷d  ûÈdÊÐ} Ùcè>š 2è>2™,0t™LºJf$™ëvSb¨Ì4ôBç&™“dÓË_)´éå¯Z÷òøŒBë^zrRè^zrRè^zrRè^zrRè^z°ÈL2€…î%]¥x¡{I—#^”&™"ѽ¤*(x\êd]š^žèÒô’ è>pÛÈ6Ð} Ù6Ð}d26Ð} Ù6Ð}$46Ð}d26Ð}d26Ð}T2a1è>2™h3t•L¡u/©6Ã+ÝË«3ÒîJ÷ÒŸJ÷ÒŸJ÷ÒŸJ÷ÒŸJ÷ÒŸª6É„—µî¥'>uj’ ’Z÷Ò“ªš›d ­{yvFX›^R9ݾÕâÉ è>ìÉ è>2™Ê è>ìùä€î#¡‰œÀ•L½àæ­Nf$™kÜ„ià#,“)Ó2Ð}d2£Éº—”ià÷«’4Y÷R¬ ØÝ ©m2v7H›Ìqrj’ñJ’¥Ì&$Ü&$¦—„ip©3è&¿.r“øS$‹µ³Ôknòk†íÌ->b&î#æ`>bZüÖðâ+62 #ÙÌËf4›SO³c¯tæehvN³Ï¼¦Ô_M…‡åÛ=õ-ÓÙŠ¡¶tEý×´K™Í®ýlFÉ’xÙœf§Ä îÌ«;eÄÙLû–Ù–{¾hÚÇÌ•Îh6÷²s’­}Î\vA\×´w“Ë.‰ ›örrÙqeÓžk.»&.mÚƒÍIKB³•¯”ËN[Ù g3â§ýà\6§ÙÊÎeg4ûÌ+÷:íñæ² âf§½ê\vI³•wË®hö™Wº&ÎzÚÍË ¢„8íi=—Òl¯åœÑì3¯4'®ÚiÌegÄP{ü¹lê!¨}Ë\6õÔŽ2ûûÒéìÌË®ˆC¡ö”£äûÖ©ì³Ê›[âw¨Ý -rëk§2´»¡Ëf4;£u[ß;UnTxug4;õJçÄ›Qûع҅ŸMF¨õÍSh™×òŠ8?j_GWwM²ý ;Oˆ¤Ïë»^,¬_x±°¾|áÅÂùô çÛ^, ’í/Ö×/¼X8Ÿ¿àba}ÿT­Zßv+QB²ýÅÂú† ë¨Ðê#'—ÍI¶>rÙ™—í!ωë£öttÙÔ3R«Á.»ô²=äq Ô{—]“ì3¯nëS¨Êi·J—zÙŒ.cjOCWšûÙdYŸC…V{ºìÜϦ¥ âÜé/ïž/¢?J¬Obx”XßÄð(±>ŠáQâ|ƒ£Äù,G‰õ] ëÃQ©râ'éëÓ%Ö·1‘>¿­od˜ßÎG2Èoë+æ·ó™ òÛùNùm}(Ãü¶¾”a~[ŸÊ˜ Íi¶§BË0¿­¯¥Éö‘ØÍó¬•]bwÏ–~žTØO²µ'Jjœí3Túd¢læg§ØÛò¸Uš‘ìŸM|5}†z>›-†ßÍC©g‹¡Ö—3ÌPëÓÙÕØÔ_æ¬gx™³¾žáeÎú|*´ÚåÐõÛø~šUÐGžaÿRí–‰²s/›n™ŒOhl³WbGÍ–0± g·„‰Õ$Û&ž`wÏ–0ñ”dûÂd|H#Âd}IÃÂD}J[Âd|K#ÂD}L[Âd}MÃÂd}NÃÂd|O#Âd|P#Âd|Q#Âd|RM¶œc׳Vv†][Ò’åØC´%-Y³[Ò’•$Û—–¬Â~¦-iÉjœ­]@ÑÁBB³=ªßÖˆ´X×°´_×Yò »”¶È’ç8»E–¼ Ù>Yò;¦¶È’W$ÛDy³[ƒ¨H°{k‹,ÆW6Bë3DÖw6<ˆŒmdY_Z­3¥d³çùÔj³ÝIz¾µÇh”[•}v’zuרÏÈÄçö8¼~ß[Uk»4ÃÙú•æØ¡·5BOnD¥o®u}Õž®(» Ù¾¬•%öym rYáì– —5Éö‘W öœm r•’l_+†³[‚\qìÛä*#Ù>ÕŒ¯¯d%ÖçWg{,±¾¿Zý¦UØ«÷Ø—5ë ,³µ5Z&^6=ò¤¾ÁÚº€²™—í!ç^¶‡<ÃÆÚkeç^¶‡¼ð²=ä%öSn r]a'Ü–(Ö5Înü' Éf~vŠ]y[¥ÉøÙgû¢(}C°/ŠÒe{ý–>ÉÎAØŸS­orxNu>ÊÁ9Õù*ëI“Š¢õY&s*ÊNqö™ß4ëÔϗٗäÓ’ëÛ|Ü"[çãàÙú:Ûly…§}QT¾Ïw±å]ù@ßÅ–wå }[Þ•Oô]lyW¾Ñw±å]ùHßÅ–wå+}[Þ•Ïô]lyW¾Ów±å]ùPßÅ–wëKEëSÔ‘ouPG¶>ÖaÙùZK´ÇCy†³ýõÛó½öÏ=ìѯ>ògûë·õÉ6Ù>ò{rk'ed'Jh¶‡Üøj›l¹ñÙŽÈyƱûrKÎ3âÝÜ’ó,'Ù¾œgv‚nÉyV’l_γ g·ä<«±+uKÎó„dûýÎSìZÝ’óœáì–œçœf{47>á í­/çÖ7<,çÖG<,çÆWü6,çÖg\Ëš'LÖw<,©Æ‡<"©Æ—<"LÃþÓ-a*8În SA¼¯[ÂT/ì–0Éö…©(qvK˜Š ûr·„©¨I¶ßï2Á¾Ý-a*SœÝ¦’Ñlæ%ÇâÇ'tWc}ÖÛë÷ñ]Ç{"”]àlOd}ÙZíy…ê®pvkNµ¾í²VmŒq¥­{pOä|Ýe­Ø”ãù¼‡¶LÎ÷]ÏŠKŒ|D«û}·¹*H¶?¾+â5Þ’óŠx·¹ªI¶/Èu‚³[‚\§Ø½%È5#Ù~¿kŽ}Ò[‚\g8»%ÈuN³=A® ìÙÞ¢šñµÌŠÖç><+Zßûàòn}ðÃË»õÅ‹ƒôÉwþç¾8Hß|—íó[úè£lß#)!^ìÇ­ÒÉøÙ%Î>k5­Â¾ð¾8H_~”í÷;M°oü•'Ò·ßeûâ }üq¶Gó”cû³ò {Ú·X’æØ¡½Å’´ÀÙ-–¤%ÉöY’VØ-¾Å’”xÍ·XœÝb K±s}‹%Œ‘l¿ßŒcgû+ïÔBÞ!pÙþ©…¼Kà²ÏZ-/°ËþY y‰]÷ýs{Ç |îàîÏ샿yŠ=ç[üæ g·øÍ9ÉöùÍ3ìßâ7ÏI¶O5N¼÷[üæÄ‹¿Åo^‘ìV¿‰W¿?#«; 71‹¦ºËp³hª; ön€¯Z¨» (›ªꎃ½B0òT {×!¬Z¸;AÕÂÝ}ÐÙÜC^á ÇC¿i5ÎöU w'B«^i{7BÖªï%¸¦Ù;2Ûבí] …öøÌ똽3!³ýÝœ»;!kÕ¥QÓ œíoöÜ] ã%âeWøòDkŒå5¾0ÐcE‚³[c¬HI¶?Æ †¯´ÆXAn%´ÆX‘áìÖ+r|y¡5ÆŠ‚dûý.J|™¡5ÆŠ g·ÆXQÓl%e‚¯D´ÆX™ÒlßæGX}ww9ÂcÌÞé1s·#¬¾»;AõÝÝõ1{ç#¨¾»»AõÝÝ‘åô%”âÛg­l†oy´ä¼âøÂCKΫ g·ä¼ÊI¶/çU¯M´ä¼"·*Zr^U8»%çU/_´ä¼NH¶ßï:Å—1|‹‡¼c‚³=–Ôg·t‡:ÃW:ô½ ”{ÙT90wPÂw%¬yØ;)aÍÃÜM köŽJXZä]•7ŸJž¼=Ì6ãõÛëërµé?¾ÉÇUÇËÕÓd1û×D~Á»×;<9¿ì]Næ³Åãa¸àtù¶€‡>½ŸW“Å´ yyø`ö}³BE>…¡ÍE ïîÊl1ÛÌ&sý©ñ—F`—å†ox·Jm)"þšm~Àõ‘9<ˆ‚š-&µ6¼6«€ˆ†K Ã­XÖË·Õ´/&/ÙõÉ`k¡ÉtÚ¬á­õ•í^/ýò÷­¥Ìw×1é˜òâ <Ôº2¬€ ½À¿bxPèÇÕdºo_×ã—Éf5ûC¿3Ñ“ ärÀîP‚éòßN%ø~%˜kÕî%Ø~ý`{·ŠïÝ*¾7­8jU–]ýøÞL7Ë•j’«c{¶oîp÷ÀdIY}óIo±ÀçáÛ‹bÅ:kÆÁµ·½V9ùN¹|m`¬F‘K•wÉÜOúÑp—î¿ÜÊ1¯úøîÂZ«ˆý´½Ÿcçœ@žýܾ˃Ûûº¾Ë=ž¬6ê¥?íG í_$;7@REEë%|T¿ùçÛì{»)ËÕ‹˜f¦Ï“Õm }0ÀKQÊ{¿ÀϰÃ=ÃpôõêRNöæ(üë÷ÀFz$Rk9ã}d¼ŸÃ§ðó~.Ô¨´ì—àØž QçÜ~ßÚPUldÖÜ\ëTêM9ë§ 9¨Q9( y Ðôr…Ä!7×Ç#-OûðÖgšõAËJ«>ìMËyŸ%¹×òL+\WQä…Øl 䙨M¤u?‡Ëy™äWòÜ´|CÞ´è—Ð ‹iy]ôsî‘e@‘ùi9¼T“òš3ˆ2†h®ÞEÈO)òÒ —[ò‚UÐdpÔ©Á=Ü O ¹DâW9 !T†3ÁIpB-¯àÕ ƒ.©QäŒ ¯=9×/«Ê‹”ºåbºOy?ã²e+š¨*æË9ä`9÷‹–×b«ž2!-:P°侜KÍɹ00@^‰#~ÕI‰¤¥ôû-gTÎ}ä0à3 Ø~ůڑ%Ó uÈ}9—Z=’ó6ÍxJ ¼hyRev¥ý*§-÷än¡b9o#/ÅŽ6…'©8ŒÐè¨ ÙI*ŠÜ“s¸ÃªD1† ΂­p›UüÊà–£BÎó~É(òy¡i>L#È+󤟳¼“An‘‹¼<¡4ÒA÷grE08×}pÕëéñjZžÂSy9Dpû"õÎu*ET‰"¸|( ì0Ó¸á/þ_‰bjžÊn ¢š"¹˜±.EQ5°—ä•þ¹?ˆXBQ98‰–'ðú™X:,òšõyá!÷—¹”¢6ò ¦Ã´÷YP97ÈÅZÁ)rÉÍ-D>ò²ŸÀt+QÈ‹Â0´Ê€Ÿ´åÞ bœ¢6r˜MrðYËuUæv„ 9¯)roÁ$'É¢‹6rX{CËŠÁkl5·+‘X@˜Go±€ûÒRõbá#rÒ"È"ÖiÑò’Û‰K¬#¹'ŠÞb·­‰Rtw«3…\fE¡¾äRZòÜ´¼Êûµb¨UÅZJQé)ErAˆZ¾¼'‘‹:²Ê!/ м%ç•§µZÎ,bn©‹L¾€gç–:1ó¹Cî·¼ö”¢ò$ÏÄÜRÁ•Q–õÁ‘Ù"WêœCîË9O<¥ÈCÎÄtXYr1bÍk^#²T^Ë=9ç©§yÈ:PoÅ\+0Á$kçs¼EOÎ9£‹…\,¥\ær¡‰ì‡F.ÆWæ!÷ nFè0ˆˆ ëš–J.s,Ar^x4WHòLËù¯AäbZ…K‘@±êäà©l†?è_TÎõçáEÊ´U*§\Ñ !-`½r4W#”Çv0%‡vyU$9 ˜ª‡îjbä­EÞYäb±P -ÄRÉႵ ÛYÀWB; ‡<‡7Ä„´ ‡‰9²DÈKF‘·QÞY8²d¹œr3±¬ˆ–s7q‰i'ñZî ¢, ï, r.•"QÜ4/ <+¦roeixgáÃ%9؇æ)´<©¸[æROZüEÆÂ; ƒ\Í&¢ùö)46qªEé·Ü[,à[ tgq§gf™“.1ˆ)ŠÜéçðªDžõt1_γÌßYPäBùJYªTF¦+-IJ‘ûržåþ΂"¯ûðM£TñBÊyn[.ŠÜoyáï,rÐ+©eðª*<Â\ºÕ?ã¹/ç‹i— $òÜnÅŸ 4BhYªÌJ‹˜1Y¨*Ö¢yEiî!‡–s©Z€k¶ X,òÄCÞ¢yMiî#ã^Ì}€\ŒuDUê6\©Ü?XH(ÍÈ3‰ ¬ t£Êéç…‡¼uj‘Ò¹%@sØ€ÂIQ-ÉÂj7ˆ2æµÜ›[r3BOÓ r±PÂsĹŒ¤%I+‡<õ‘ŸÒáŸ[-—Å*æ>1ó¾\ ‡}»@W¹œÿ<£—ßòBÎ&"‚+F0ˆªÜ ÿÊo¹7qåyDÎ s€o Ô z‹ y‘£í¹B^ÄäË&’–RKK;X(êÈÁBiäiàxQl—EËá’„Aö JOá ?x°`‘ç?@7c™'}pm1È¿åþÁB™úº¢F^²$\"/9å,A+‘Ū§‹ùr^2_W¤È3!o™Ü€ÖyOOÀN…®(r_ÎKîëŠ9šŠ< )EŠh9³Å¢åJÏFÈý–g¾®H‘‹eN(p"JÄp-/3æZ^{dñå¼ÌýShŸæE!ÏYá¾¹èGY¤è-ñZîÉyYø§Ðy! ò¸‚I«´#4í3Ÿæ¾µ¥ôO¡)r°Rä@–NDA²K×ò$ó{ eåŸBûȳZÒ¼J¢ˆ¸D¥Pç¼–û§Ð0&¤œÛShм’–!‰\ýÕfV,™ÑÏœ{§ÐU9X¨í ›!Ö‡{< ehª¶>X¨ÒÈÁBmävÐ"âJW„ÛÙv­êØÁBÅ" µ³‡J²”bòöº“"±r0¹ßr9X¨òŸÔ´<ë4 OÑÞ?¯(rUYä`ÁÒ\ÒCŒžLN¹`sµ-×Ûó:v°P呃‹NþÅ@…/aäð¹^«BÜCî ¢ªˆ,Ôføçj_ž e[¬DN)ËóÈÒ2Y–‘ƒ+-fë | :X(ZÈ=9¯Â&K–XiÛ&ØÔ1‰ng[†29+²¨É²ªÃ&KƒðÈ£ÐJ¯þ¹Ñr+ÖÏjŠÜ—ó: ›,²‡Ör%JˆÅ®¡•¨°ð{-¯Ó°É’Y{(þ¤òÚî,JÁÝ„"÷å¼fa“¥#‹t*¨…ÐT -™=úƒʽ–{r^ó°É’YÃø&Kn.U »™Ø§¹'çu6Y:äRWª…˜9ø/XåöŒ"÷ ˜žC&KÇЂ+iáy^9c«˜‡kJso±¨‹°Éµ¼*4¬\LŽnÃÅaoç!÷ìþex±`©¥9,s¢1 ÁbaM–%×§^,ê*¼X0k„Ê)-RaWjZ^&ý2§È[ƒ¨/Ì[+XCµÞ’É@»†¦>ò–ÇB\,˜µÍ•y¢WRo±sK ~hyËî‡`ÅÂÑ>K6ù\*¢°C²ÈëÂk¹o÷‡cÀbáhž)Ÿ*“æ³:©Cs¯å-»¿XgB‹…£9ÜJK¹{‘Û¯Ú×[ro±€ó‚ÐbáÈ’årøƒ7˜‰í]–ú0#÷Ló`1 ì ³# &7 °¢ZkKQš³¨iL`´Cß<€=€˜b`Ÿ‘i)Œòï·ü[`_ØAäbÕ,¥î Ÿè‚AÄs'-Eé!oµ¼ î  òÒl)ŠT¶¼´ÖsA¥­:äm9¯ƒ;h\ú…ÈsÖ"‘ƒÈþмÝr_ÎÁØA;äI)ÕÛD¬i å–,©>µ@È}9‡“äÀÚ1´æò@±J¥A®gê–ZãBdñåæŠÀ!ϤR”§rÕZck^·¤ÅßAƒm‰ÈùÍ©Bέ J%ig~p:gýC µ†ŠU¬%çiFåÜCn r®Í• óÔÉyå!oûqåTÎ=ä¹ÔT`)½%·+QQi焼Õò‚ʹ‡v²RÎÁwNî,r$Š)EÞ’ó´¤rÞ"K‘ÉÅ„Iå¿¶‡ó¥±p¡–·ä¼¢rÞbhQªÃ­Bj¹¥õóyí“¥%ç5QŠÈ…êðÿñõ®9˜í¸‘àVry¡÷ã×ÀF&î`ÚetùOçþ2 RŠjÀ(å/#™:”D‘Á ¥ü+¹Våy¸æq!êø¯¥i}ƒhs–¶F< ¡ëº +þÖàóýc‚£f ÅLá\™~ž Ëÿhð¹Cÿñ&ø¸ö+æ}\¥#âJ›TÐÜ|‡ ËÿQÛ?ÌúOx, :ÑÆbã½2,¯¡œƒk†\mŠÄöG#§UmÁéƒt…§¥OÛKšT+ü”ÿصýCþl >ÜÇ*²ÓijCÓ ç6øµýCùl > D™"t¬—säÎ׿,¯Ÿí¯ÁG¤â(VL…«çÅí?×|ƒ_Û?´Ïö¿—eE¹…R"%÷³ý9Q,,×Û?ôOAä^–T)øoR"aÓ8K_ñùW¨ÓÊ‚Èmy›o"DãòÙ4Bè¦Á•+âŸf=Ïó²¼cûã1Dt?7ÑpOvÅüdÇ`?Ï7ø kmëΘàn¦þøåŠ1ÚÏó¼¼%7º™]™Q;àAY~¹"HEÖó;Ýšó DËs%oo¢²üäHýzÅ…Îi]s幉R°7QY~îE”ÍAír»â¼ýËsaSX›¨,Â’+Þ2»µŠ:_s幉R²7Q9QŽÏÿn¿ý3¾µ²ü¢åg{í5§:Q¥n*åìí_×-ÏM”н‰Žå¨ÉwrÅÀ=K;„N^ëM”ª½‰¶åy@Œ8Îurš²kЩ¯S±<7Qjö&*kß§QJùât^Ñkp½‰ºý&ªËñ×ãʤ‚XŃ‹kÐøéãM”ý&ªÛ½ã×-}×°s\_ðkeo¿‰ªd·"îØ#7]›HY~m"üØzÕÉn¥^6z:3+–>Åõù&ÊÑ~íeḬF=a|×¼s\œ3ÿÂr½‰r²ßDu1Š)„Ž#6Špw0®vPô²\½-Ù~­5ï¸d±5GÄ…,ôyÍÁ/¸ÞDxãXo¢µæ uGJyÿq‡s-Ì·pEý&ÊÕ~1x¥°…?h¸Û/ ðÏ»²\¿‰P³ÞD ^Ææé1Øñ&r‡õVÜRŸo¢Üí7ѲÜ/WLˆÖ"h±kpµý‹³ßDmy â`ІÈÏÝŽÏs™LKüôñ&*Þ~µé-è ¡ ¨óm·ƒ¢qYÌŒh{¾‰J°ßDmzK3IKf,øí-°¼)ðËòh¿‰Ú\sÿpFÔQ z×,pÕ/øµýK²ßDkY:îAT8iéÏy¦²\oÿ’í7ч¡bæê¹-¦·´ç›¨ûM´½Å1Ù?sðð¸Ê~µç›YMáŠPt#ð¾,G­uG!£ßl¨<–ŠÁÇOùÝ®Ø>®¨ÀÇÙwBi>Ò[÷¤¸S›ÔÙ~»bÿ¸âN‡îÐFP~Eœ9¢\[^¿ÕŽíï¼{~¥ooIm%Š7øåŠõ[m¹ÖÙVZ–F…®v.èíŠÇríŠuÞ¡›àh$jZÐ&ò¢ãol"þ‘°\ßD5šmVÜÌ÷É9ìÐÇ¡”ƒ§âòóö%~„ŸÚmVÔ|d´YMðÙÊŠw¢íw™8…¿zû‚_ÞR³Ùf5ÁA íNÄ<ìPwÖÜkðËòb¶YMðL QÎ?q¾x'-Ç&ân…~{K5Û¬öš#.§ qäw×¾,ÒJ Ë/oif›Õg …sã/‰”_ˆU¸bL üò–n¶YMp´Y!ýÜÇMÄUÅý&ËÂ;T€ë¸¥9³Íꀃ5¢¡˜jµ;¸óâZp·4o¶Y-pOäP´ßr(nz[^IKáç:niÁ|Aó£á'r‡™6Q‰´,­zñl¡ø?}¼ [4_Ð<¯…è~•ÛO6Gt€§ô¿6QKæ z‚'å¢\§´‹gÍ9#*À/˳ù‚žàÈmñÙB)]îžÝàsùç ºó½—%ù2; ©A,±ý¹f!,×›¨Uó½—ÅÏó¼UÎBŸ"Tš!´×›¨5ó}֜¹+.êlte½æ×&êæ z¯yÂqD›ŒÏÓ|‡Š5×/èîÌô¶¼S|zˆ§êÂNZöUÊ‘à*néÞ¬žÇ0Áþ/T3]I¾ý¹W1¾«ç=˜Õó P:TraÚ¢ˆ[Rù‚_›¨G³z>ÁÇí€È›ÈS0îž\\U_–'³z>Á©ŸlpÇ/GÿŒô¿6QÏfõ\¬¹£šgP´ ܸ,´åzõbVÏ7xf‰bÞØD®+p½‰z5«çœŠµ±[Û;4ÎW|WÏ{3«ç|<ªÜ|YpE'GqpEµæWõ¼«LÑÿ÷ §å ¾ŠÖÀ)îq•¿˜ÔˆŸò»ÛÊið@§b¡µÇ?`[ŽÐÂ}Áo•)ú‚GnEÜR ¼gq%~Y®2E_ðÀ¾ µF ¡ÃN‰ ´h_ðKÀÁ©LÑ|\l`y£¦…ó&Jy&-…åZÀÁ%;„NkYÕDº5“ B<W›™"üô¡Tà²BÏN¢ˆRr+%úè·¤:ÄøýA‹B38ôx /K§äšQöå·¿¿,¯vMàÃõ¨õTâDÕâ²Üg^ñ€ß´Ù!ôZª Ó²pbtsþÓ*–_´Û!ô,Y4,Kç21XËÂò¨×\+r,Nѯ8Ý¡,–ƒºß¡¹Íšÿ¥Á½B¯J Þ+%Š1¯øåçéB‡Å)úcÐD G#>÷UWN­«eÑ!txpŠx9É\)“+öÝ83üœ–ðS;œ"w´wø‘Ûé&jòí¯Á¯MôàM“¿© P±¢Ñí¿ "c1ÑZ€_–Ûœ"Ü'ÇÏóYàÜ~À¯Môà­e ÔnŒºx~ž—ZpÝ_X®7уS´–%Q»1埨À’H·6½æ×&²9E{Í©±Qnàv‹ÍËmåþ z=8EkY"53ÒÍœ91zúC‘Éüº¢ÞDNÑöóŠum10‡f?ÏÑÂYÝ×r½‰œ¢ îù Î^k v«°¼ù¯å*œ"ž(#ÚùyØáTPôUùó 8E‹—ËJÃàµC«£ö¹¸Ó­c‡2×?}H *N‘O$Øpf-÷,".æðkû+NÑœe[8âBó|§â‰ÏË‹"§è  KgðÆŠy}¡rœœ¢~«Z5“"Îà¸,~®Á©.DäÐ@;Ô¥p-üòóþñó/8HPÁ 7¡…ý²@ÍB{‹öó´r¹¶å{bû>qÛ¿È+ ®ÒPaqŠ~Û–“n(À‘1Â…TÅ²Ô \[>´/$Áñ–è^&${E‰ÈÜUÜ¡3ny¸1ãÔ*p3x%ÍFçËeËÊš–ñYà¦PÁ(p¯e!y¦·Ñ²¸sA»u= ܤif¸×$ððJ-‹ý¼Cë -žî°8EJ¼ÌùrjS`ê¬"±À·r/i8Ìvµ¤á¼°&1§þ(>o1½Á/o)Δ†[àÔ?JÁ&q«›Š4¸¶¼xSnm“ž-$ÿÊIC­;ô€_ÞR‚yq†ixK™Y lUôÿeQ&æ¼bòÏ›¨Dó&bðDí\é$®îº DÓü^ódÞD œüšéÊ$³Øwr>¯Ve~YžÍ›hƒ§™oAï¹çðe× ùMtÀï5/æM4³zñAN®z ‹õ',×;´Tó&Z–W¾æ5ŸŽšÏö¯ólàz‡–fÞDË[: ¤“üI']û,"..åˆeÑ7QéæM´,'&&1•øíŸ‚Œ¸‚W7Qu¦4Ü̱´'Ÿ ç…Ý †íÏ–‡—4\¨Þ”†› 7”ñc Š#íiQ²,³ÚrÀ/?¯Á”†Û–S”KMŠ$á* a&søey4¥á–åÍE/8[ÆÓèPPÜ”û8à—Ÿ×dJÃMuC’)™™ÿð¥PX®ý¼fSŽÁsü)œ›*«Y¬97¶ pÅúÃägKn-K'í8P‰Ñ‚ž…Nш™!Áµ+VSŽÁ:îñ‰ŽyH<[–r‚×›¨6ûÏóÚíó<òöϤOôm’*öA並ŸÇçyÞœ}žG¾æ"¿,P*kKð÷P»×–7oŸç‘#."KÒ&b½o"¤-p¾høåç-Øçù¶<²·€Èáñ.-¢CD­ùåç‹Sô÷cÍI:T«DÛ_Òòõ²èó¼%û<ŸÞR)Ž—…wüAEƒÿbY´+.N‘>ϧåÕ/ðJ;Ôíf%tBe ®6Q+fŠYT¨¾'>ÏiÄÂ8k²èÊ™tz¡§è?lðηæY^È}ä2_süöóf¡¶å·àXኡ‹Âß¼ Ó³µ8EðBOE*³`¬E¡ÈâZ°ªÕ¿ü¼;³Åà•»p˜N~.ž-KìKX®ý¼{³µÀghÁ 0¡ˆe™—Ez¡§è—ŽlœcË‘ô…ÔúVµB‡ˆSàºÕ£Y„Zà•Ï´ŸPhÑOwœ Âuª'³Åà Ÿwfqã8D„ìdÐË¢s\=›E¨ ^ˆ°+i²RJbÜÚ&,×E¨^Ì"ÔG3Õ*;ßþ»…³–Iã”àjû÷j‡sy¾,¸ð‡Œ6‘ßte\ìçOEëЛÎ埓œ>Š­fÃP®~mÿ¾à×öïÝç&x¡D1Dú¼Fx‡çüš áìpn[>£\tc×é™ œßþé©h•NÑÙ¸<Ÿ-¥¬a?|îЧ¢uÜ:EÞ÷,§Ê}áž>ènùÉmªý¤§¢u\œ"ÎMË{š.—IÀ=ì¼bYuÿôT´Ž[§È²Ü­ÂZ!¦}ÔsäbÔ…ÿ¥-Ï&]yò¹ÏÔÙöƒk‹çšs¼‰ž ¿Qé}Á =aËÇ‘;‚\Ü ¿å ~M…P:E_p\ÈiVrÇöÉÄ9›¨)Ëo?o&]9Í2q_ebÛ 7Bíí?wè³á7*"½,©®;tD\cqhþÅZóª-¿F«8“®¼>hâdN¤6ÑȪ6›iÉS!Ò³á7.NÑ/œˆkvh¦Fª1éÙð}0éÊkY\š\ ÈÍnÑÛ›(è5Wu¢è£IW^–wÒv&Å]qü”ÿؽæß;T"äqÇ߈ÏÇâ ð’¿à÷šïP>ôü£›x¼ß¶åy‰Ú ðËòïzÓÉEÝxõ”ŠåÙ’ø½æß;T;~BS—e·„&,ojͯíg”;·ÿN—,^sÐpûàB›•W–ëí¿8EsûÁ1ƒ³óÝÆ…¹˜´kûwýAõöWœ¢ÿþ‡cEÎGþŒ³ß™8ìPw‚"ÐÛ¸ 6þ“ÿØåŠŠS¤ÀQmÍtA“¶Mü:IK.¶ð˧觤%ʉ7QOrÍ›²ürEÅ)ºÀÑ\Ím ˆݹ涖È¿\QqŠ8†òlOÐ~°æá‡.ü Ë/WüV[xá¡¤É ð~BèqU ®Ã9Å)RàÃ:ܘJZ±æ•‚µ,YPý²Pœ¢Ëòˆ÷;±f"BÇ"Ïޏ$¸òsÅ)Rà˜.Í-ù"®úÙ¡Ÿ p+*Nѵ,œv·NéDH;¡E¼ý\ÇŠŠStYÎYJL'Ê3”º·E€ëÉîàÎSÒ–*|зÄqéÉÛŸßDø©Ý~“ÝÁ=ÁAk›kNÏ–\ºðs–<à×öOv÷ç&kGJ™ÚgDÇŸSà—åv÷¯³A \•BWÑP¿à×öOv÷w³#¬S®+òÈÖóA“²\oÿdwp38O²$oÆçTo:ºêƒ^Û?ÙÜ Žc¼38Ú ‹î§bÒk®_sÙîàfð0Kó šyÞÔVoËzYô ÌlwpçUÉ嘹R*4ÍÚëšÄÁ?~~ Ê f§^.MšaÍ ÄçåEˆry‡†g£BopªÝ“¸BÿQX]õh,¤/øµ‰r2Cè¼úC3M‚EÞ6‚t–%­> ~YžÍZ€wwP#­Uq•/øµ‰r19ÿyµ}H‰x,Ks'>÷³*¿‡“æjrþóêøƒ?E¹ãÚA5Î pN,ä÷pÒÜLÎÿOž½÷áx'Ÿ³4\~'ÍÝäüopÈŲ!#›Z¼‡“grþ'ø$!WsaA}]sÏᤱx“ó?Á+IeQw§#×wáç\'ÊÏá¤qé)Îÿ§æSL=‚Ÿ·æx×TÇ-‹S¤8ÿ¼ÍX1Q­2³"Ú_éÖüNJýB_‘¼ŸS–ú‰Â°¼´"-çó<½øŠ±d“¯¸Á©¹\ÂT6îÑÙÇüòóRL¾âÁÑÔ‚ùN\sLµà—åÕä+že™ Kh¥ê¹Ðµ`Òï¿ý¼™|ŠκnÏ©ßpÞDUY~ùy7ùŠœÈϘÞäPd<;òMAY®ý\qŠEœC’Ÿq÷p¡Õk.#.ÖÌÁO푨8E œ ¸š "µ‹öîÉ=à÷Èæ`vˆlðÞçk¥ùjt!<Åu~YÍ‘½,ŽÄ†›€™#uжÚÏ¿¼EqŠø¤³8þŽôÄÎ+ ˵·ÔlvˆäÕñGÞ²À=)\ž—…Wà—·³C䀓|WÕótš8ò”úàúTÜœ¢`€»Y=§q64\¹ìÁ yo¢üê‰K§è·7Á[™ÿ2 ü™3‡ Ü\¿æ§è·e¹'½©iyŲÔSÉí3 -Á•å‹Sôç‡ ÎŒG9ˆ­¡ï:ëþ\¢‹SôŽéã~7"ìŠ"ÚÏÿÑàÁ,p3]gKgå¢ÂÃu*>GÙF¥S¤ÀÓŽ¸"†+nY›”'¿%?GÙF¥S¤À!~Ä9.(Ð\Ø(ÞþN_–g³À½ÁS`p$Š}’=\~íÐç(Û¨tŠ®e¡×A`d¨NãLYÉœç(Û¸uмýA—®,ïÅ ‘´ªŠù9Ê6.N‘*pŸ5'¤L#8À@Û´üaù|=GÙÆÖÍ÷¶œz%Ðe‰ åŽÆÎª—E\ý{‡þóÿΧâÒåüy Æ5 Çò'(šÛü”ÿØåçý{‡*ð:ƒî¯ÂÙ²5ÐΕ/øåçý{‡^इ۟ϖêDóé\ó~Yþ½C/pãuD>ñLs9)~ùyÿÞ¡ ¼Íü¹_tˆ¼‡“Ž5ç&a¹öóþ½C8Hœã‚ˆd¿·ÚÏV‡àú]œ¢éŠ |Ò¡0ÔŠùЧß?¯IÂ[.WüÞ¡7xeÕFH7ƒ y´þV'”°\ß¡ý{‡jppØHG4ñÌéâÛ•??àú]:E¿ÃâPXs­7/킈òs=?”"(C싽ì'æøñË‚.Lúè<¸ÚKì+9oŠ}MðÌžæ`q§ùç\ïÐä‚)öµÁY¶—,îqí4T{‰}‘¼ž!öµÁkâD1ª²}NËÂð]àÉûšà‰äñlAJi¬ù™«ˆtkP–ÿ§ϦØ×Odj!i%Ü¡QHÚ6õAõM®˜b_{YH= üAsI‚ö£Ö\3Š“«¦Ø×}3’åh˜ö“¸5ÐÔšk±/“j½æúºý© §Ó¨?xKq ÿÿi¿æ’ëökn[¹4©¥J Nl¢éŠýõšKÞÙ¯¹¾ 39gKç9Š­ÊòËϽ·_s œS#ÊE³R£3òÄøåç>د¹U=¯¤°ä)c„ïê„ôSÄ…åÚÏ}´_s«î_\ŸÏR轸 “^íç>Ù¯¹^ÓŒÏÁ˧@­mAPíç‹S¤_s»ÀMtŒQá±(®9–Ì’àÊϧ跷ÁKâ:upð] º¢ÀkË«ýšë«+§ðS3áŠÉi1^ ®-oökn‚jUFæ|Xl+_ÐzYþèÚí×\ŸT~*zR '·Í´~®^s)83^V º‘”'éfrÅ å>ðS;ž‚7óçeUr;©q‚¡CkÞ³Ìå/øu¶„`æÏË*¶R±b¤íŸeo 7+ ðËòhæÏË.¶¶ˆ²ÜÇ —Ëdðël ÉÌŸoË餂ª^˜i'ĸ ",×gKÈfþ|ƒÓ<2t+D:¸ÚQ*X#›¸>[B1óç¼#“ˆüy¤†‚Öå&r \Ÿ-¡šùóí-u²¡@žP‰"¥|ÑJpµCC3ù-œþÙXªÏ–@®xÞDy©åã§6¿%…nò[&8küщ?o¤qÀÝüòóèL~Ëw4Š¥ò(ªs¸œ²üòóèM~Ë'¦µGó­ù‘µÁx²ð¿ü<“ß2Á=KC"9ßH¿…TÇ7‘£+˵ŸÇhò[82Äm¿,Ð%2ÿMƒk?Éä·p¤ÌH˶óÁ%ÛÕ²\~³Éo9ËÒx.ŠÕáºúd:„W~‹ÉoYàN +Ao«²ßßÏÔŸ×wh¬&¿å, äב@kˆyî›XàÚòfò[xí•»rH© Ÿ>è69¢\ß¡±›Ý ,žDY‹ÀÏÏ;ô\+Q\«[!%gv+lpV&ÁqX§‰1| üÚþÉ›Ý Çr:[X7Ñîl-‹#*À/˃٭°ÁQPéO¢þ·'Y—>ksüÚþ)šÝ %|^Йk€ÚÎkŽ_ÐÂr½ýS2»ÊÑ…îtpEVA9’¶µÎ(W€ëퟲ٭P–7‹µŽ¨¿ó„ò}A{‡ìü—/f·ÂçF9Lࡸŵj÷wÍõݳς΢êtÖ–ÆÒpG¼‘FÎ|ÁÕMvw‰{?Ï×É»{ÏKX7ѳƒ;%»ƒ»¬ê9Õ4='Š)dS8™ UžÜ)Ûܼ“ú m¢Fã&ýÖµ(u’Äʳƒ;e»ƒ{‚£Q6SF†ˆ£y)œMÄ©êòìà&…S£ƒû¬9©Lj¸˜m¢›jåÕš_›(ÛÜe9ràMDó,䘺yʳƒ;e»ƒ», J¥§D&bIñ÷ÃãjzYôšíîm9 êâfî :î´pò ‘òìàNŠS´Ê %í¸´Pšrôá:¸Ò«Â•§Hƒ3; ͸<£4ï·‰ó‘{Ào?of…ë€C›Üó¸ätŽPI›Ì~YÞÍ WÙ³‰©7±ê¬£)gzxü‚_~®8E†åžTaô¥V.¡ó´åÚÏ7§È›à<²·?ëqµ3á7ͼ¢W®´8EªÂu,‡©Âp¦ûŒ²]úŠ\¹âæ½,¯2gŽýÏŸ,‘´8E¿mË1Ž´ó&ŠÎíX1ͦ¼òd‰ÑÈ+že!Dçi|ã°'+•f€kË“™W<–'~¯àŸËw2§¹©°Tž,ˆ>XyŲ‡Mƒu„çyJ\¹l‡ ÝÕ½òеØ®=U¹4.'Žƒ‹ÒPâÈåhµ<ÛÃS­öƒëŒlT†ê,îéü—ú{¶‡§Úì×ÙŒ!óôwdV‰bqɲ<ÛÃiDˆõà*;´¨Làb–ˆÛl¨²ºrʳ=<5g?¸ö²”5J9´,þ¸bŸc>ʳ=<5o?¸68¦]¢Æ×(ªë[tº!ç¨ÀõÙÒ‚ýàÚkjNû+7ŽêÜ~ž{±þ×®í×^s7lmÔ‘OGîf‰xÜK廿zû/"ýàÚ–ãqˆæœÆM ;ÊõžÆB|ÁÕöoÙNZîyÐ ‡â»ª*:©ôËœüô‘´lÅNZnpPšp!ur÷zfBµYV8à×&jÕNZRAϬgCÿ€£Å¼bSà—åÍNZp¤+2‘,h/‰IÖ\Ê9à÷&êvÒ²® |Vi€ w"Ýõ²èMÔ´¬û‚®”ósPû‰Ü9³À‹²üÚD‹S¤“–ÜGºýñŒó‚€ã-Në zë<Ó1Ä£‘W§ =®ZÔgÒrÍ>ûý²œx•¨… qîчø Êϯ zqŠ~Û–Ïé趤¨®ËãªYÔgÒrqŠþüx, ¡VµKBŽÏòs}A÷bù*í\…¼%„/ j?ì-í5ä+õjù*k6q¥à°°ÓD$vfYÖ/øµý{3‡|•5šGŽ[¢3ëïœ-«·E€_–wsÈ×w!FIÌ) Kóöo¯!_Ù9sÈ×^G¯ÚB±ô²’›ÕšëíOZFƒXY³‰sç윋Dn¯ûM„ü9_ÐýÕ –­±PÖlâR8Å Ñ øNsš±pÀõÍÎÖXØ–s:Á? ¤ OœóCøe¹­±°-§."ÏÄq•²ÍVm_ðûƒf³³µ®Q¶¤x,tLÃò|g0þ€e¯Ÿ³‰³Ò)ºÀi_áK”ËÂuÿúœMœ•N‘'2:¸@'A  µÞDõ9›8+" Îsà±ý=·üÄzû×çlâ¬tŠnËß:=»„ÒoS–ëM¤tŠ®5§Ùå>ÌÚŽ·Ô¥%RŸ³‰³Ò)º?( iåVN*`®#·ûÙ VŸ³‰óâý~€³>¶£®Îäˆ+<ç>à¿5x4;[ëb,аËÎY×ά® ¡¿òõó?ÜÖ)ª§dÉÇ¡N]xï‘Wåeñ/¢üÐ)ª»ð×*iÙB»5óÄf%÷qÀ¯MôÐ)Zà‘ô8‘† MyMêqy ~YnëIpj$‚NQa!ŠÎå/øµ‰:EgY°ï©ðçÑd]¥LYÔ–_›ÈÖ)ª»Lœƒ¾£ìàÆÙ¢,×ÑüÐ):ÞÍÊŸCJ¨Æ"ÎÎàKð À½)¯ZÃÎ9ö–5w~7I ?µåU³Ò)Òà™tíÇœéÒãøåŠJ§Hƒž×”¨í 4Ô*\±hðËòdÊ«JËg¢’ž(øåŠ!›òªÇr*ø¢Ðbºbú-LS–kW Å”W=àО@Ü-‹ÛŸ\\Ÿç›SdƒW¢(á]ž±æ±ľš×çùâý~×FS2À†ÂÝ#[•9Q,\QŸç‹SôLJR‰gð”–~hº²°\ŸçÑ™òª ƒC‰ïã1ý„¤§ëÏ)p-¯Š­%¯z,ÇU„öLøýôž—Y²”àjûG{nKkÍ] ¬…6§/'âZY üÔîƒÎÑžÛrÀ +Ù£Á=19bŸç\>;à×ööÜ–>O$‚p’–Üñ'À/Ëí¹-¼r¿,èÛPˆÚÞRVvî€_Û?Ús[β ±](kžŠ´œi?Âr½ý£=·E€g µ UX†jWrç&Н>hê¸ÿ<¸˜–_ÓZj˜‹Ô²œ¥À¤›9nI?滽EÏmù€Óêh$Rê“ìàæ6ï~yKÒs[48FÒíWÌ'Ê-xá)pmyÒs[¾àŽÆKÓ…DRBÂòrY~yK * ­-§á!ãÅÍ?÷zY´·¤¨²Ð_ð@ôVÚDPíµ 5N–ûàÚ[§è— Ž'%[x8g‹§êè\_)«,ô<€Òè*)'nñàH´ïš«q›E›Õm¹ã)gÕ_ᨷÜ»¯Ÿ«WNÕl³ªy­9Ô·q9hCµ£Q–h~j·YåÔÌ6«Ž,¤#z´þÊš!rÀïMÔÍ6«9ÑdÅ}žç¥ÏA™\[žÙfuÀ W¶Ñ ‘y’í¦CÌ :¿Ú¬ðÔ±Ú¬8 Éô*ÄíŽÀÚÖâ–ëM´8E¿Ö¼6’l‡+BUa»b—å×&ÊÑl³ZàžÒ”°|܇nNp^´ŸªÖ\Ó~¨ßh³:Ë‚ff’ã0ð7IWœÉœüj³Ê9›´ŸZö²d¿*¹‰ëÛ§%Ÿo¢gc{ÎŤý,ðaž£‘§`Zâ¾—G.ë·Ôgc{ÎÕ¤ýpôB +'á©H÷^sæˆÖgc{ÎͤýpÄ`*5È”EZô üöónÒ~Κ£á“¬3=¸ÄTˆv[®ý|qŠíg;òstŸS*sóÙ¡ÎV®¹Å›´ŸŽþµÈÃZ!hÎ º®Ö¶úllÏJ§H£É‡Çú …¥’‹pÅè¸ÞD%Ú!t]7õäzjåÌRxªôɆÂO!tIv=Á¡e›XEÜÓœµ"NEV@=à—Ÿ—l‡Ðô6hCy¹ý(CÖ5±]€_–;„žàœ¿Á±NÔ»ˆ¶·Ôg]ªBoðδüLZÜA$-÷ º>Cè5ûìï8„¸°—l§ÉvÅV&MX®ÏóÒíüyÛt,và–e΢ÚÏtÅöÌŸWgçÏ78‚D̲f‚¦“xVà—·ToçÏÛZ4?`öYÃö§9™ûƒÎó¼=óç5Øùó ÍŽ©h¸êðÏëüò–íüyÛ±"6Q%zÈp×Å­Á/o©ÉΟopè^aà”'ݹcyËS¹]€ko©ÙΟ·uYÐy^éšèÌ@™ùó4%V¸¡—NÑï8F˜ÀòNÏѼKó åçþ×ù–Zíüy;7- Uè1îìÐò3Óò…Ÿë|Kµç‡ÖÙM w¥e¡üyŒò‘;ã–wá¯ÚóCx§à®Øü 1ÞºÚ ë»ð×ìù¡%,KBÎ ‘æW¾å]øköüÐŽˆ+*%«¸Á¹’[ß…¿Ìg KVåà)3¥KQg‡òÛ?}<[Z4Ÿ-ò~4µ¥›ååüù¿×<™Ï–ÞHoãHÐ>œÚ\]±¢¿,Ïæ³å€c\qœÍÐþÚj?`η/ø½æÅ|¶Lðè¨ç'4–X¡ùJ‡¯È5 a¹>¸Z5Ÿ- Ü%‡&øü åv4ÎÄò×Wkæ³eƒ¨HDíÜ_½ pHЗEG\­›Ï–îYŽªìOG'ï<W”+\Q?[”NÑ¢ 4¿Î–Èg R§ÐZ®"œãÛ¿=;[³Ò)Òà…Háø;ýÄŠeòÏÛ³³5+" BE¥aÓ˜NHWA´® ü²<š$±Á> áÅêÈ|Ë¡¹6מ­YéÝàh:zf~Àu¹,]Y®ý\éiðHœ"€{¸b;òª¥L¢u{v¶æ^L’˜™vèø®?Pž)u&ÐÚ³³5+" Îámàñ^@“þoËrùy3Ib|Ü÷eᡇ¼¥áð&r \'Ðz7Ib Ü“¨ ¦ÍŽsbœ-]¦[‹ös•@+Ι$1î)VôãGÛß º²^Zg×C[X®ˆD1µäÌü:Ìhqó5÷¬‡g×Cøˆª²£IަŸqõ²„aÛ³Zœ]]à\vÆknãnIsÏdNL ü²Ü®‡.ð@Oð ¦Ûô&2ÿí þ¯ Ü®‡ž5ܤØ=ç 8ÿIëí_œ]]à·ÑX¥0¤nâÀOíÛ¿x{nËG‹/5†‚”1¿ç!C*ðkû{{nˇ,5 …6‘OYmà—åöÜ–îé8D2šÜ²çž—¥ô{À¯íïí¹-Çr(‡"½$ëDyiZ Ëõö÷öܱ,5ÒÁ•©î_‹dæ8®·¿·ç¶´]&Æ@E¤]"fü¹S÷ÏåZ}ûoÏm9kz,w CxÁãª3™#Á•Ÿ{n˱3Û‘êqD¤‰9zô¡W· öÜ–c9J9”»Ä° š»I\á’àÚr{nËù }œø8Ös£Œ¨”ô²èƒ+D“"ÞvÛqG{â—EÞŠyàå²åéE/!™ñÞ(Bø2" ŸˆŠZç€_Û?d“".-/tž/p® wP–_Û?“".À=µ¶¡¶‰³åôY¬Ìÿ¿¶¨&E|ƒP‘YI¬°˜[;Ë⺲\o­Sô¿ÿÅàùìÐÀÊ zñ ²O³|†Ÿò»?¨Ò)ú‚ÓÁEí•`ÎcBÜ¡åçµæüú Z§HYÞhßLr;dès¾Í²‚×–k"^iz-i9Z–­€ F±¿>¨Ö)º–¥tê=G‰àç¸í¶.t^Ã2„åúƒj¢ ÏèÝñòÏ$ÚfËJ  p}žÇ/cáöôU’>Y&-Ç(ºŠ×á\Ì&é·í"Þ+Ä ôléB¹}Z^^¤ß‹Iú]àhÜ 'dJC…­‚‚óÜ}ÁoW¬&éwW_IíЉæòlaØÜ¦Ú¿,o&éwõ¨•ÕIé·˜ˆyé ~»b7I¿gY·úáÞgÂÕé'âz°\»br&éW¬y#Z¥(·–HYÜ\ÕCKò&éWXîhÍ©DÇ:‚§û,hpu‡¦o”û¿~ÿƒ×my¢vB7,ÇŠVϚϛhü7üÇ.WLß(WƒC¶šKKû)¥øs›%Ë~¹búF¹¼Pì ŠJÛÿ¨ ·ÒPü²üåjðLB¢æç+FЏ$šòÊürÅôrï5/܊чP[uB§ëe¹\ñåjðNdjiõ4Æå¨ý,š×§"žØÿÿóË' ú ¸%~)k¯úo}*.NÑoœeÊàç¸CË_% ɬ¬—E?r³=·¥µM´DËG€á_9æƒ;¸ñS[‘£d{nˇ"qŠPgƒ·ì,tv“ uÀ/?ÏöÜ–ž¸ß¿­ý'&G¡h]øe¹=·ex?g–„ôäŠ["Çu*¶—"GÉöÜ–³æ™%!1†Ô³»Ö–k?ÏöÜ–>ßþ•ˆÖ¸Ü­ÊP) \ûy¶ç¶ðÈM˜™C“³ŠÔàú5—í¹- ¼qëöˆÇk²¢GÑÍ2±Wçy¶ç¶ð˜R8âœQø9K pýšËöÜ–Ž&?ê&FA­‹®èŽ? ®,/öÜ–ެ65BVjt•Ï–yµ—"e! EŽ Ê|§Ž¿6®Jô‹í®ù“Ìi/Ejÿ4”~[ÿH NJ×QèB3x)ý–M¥ß‰,wG Êg¤"pÀ¯³¥$SéwVʬĜǼïˆæ9÷\€_–gSéw÷›ïK-ŸÞD¢šY"ü:[J1•~ÏšSƒ·‡#iYå²(ðël)ÕTú]àÈüóåyÐ.‹N¨¬>èu¶”f*ýpèécÁi((„¾µåúl)ÝTú=ËBW§ºˆ£IŒ„ \íP5ûl)rôÝÙÚÃ$·Gz*v1þ€K–øol噢fŸipÇ—E¦n…ŸŸ™Pe\üòs5ûìwŽúæP„ÂHîijEƒ_–GSy¦¯þPbá‚Îåâ pžçé ~ù¹š}v[Ž¡q4õˆÞ¡î ÷©$&,×~®fŸÝ–ÓíinøÏ™Ýtˆ¢ÀµŸïÙgp¼œ©¡€_ÐþÔ,\‡ ×\ÇŠµšÊ3Çr:c=iÛálIWß5××\m¦ò̧Y·èVQ § â0ÌEëMÔM.t? ¿Îÿ˜½yP[ÍB.›9Eø©Í….Í™\èžX¬I´$B .¶ðk5or¡x˜jvEh‹9¡Á/˃ɅîK.›:ß‘o+Ž—…Ó Yá€_›¨E“ ÝOÃïÔÉßmD\¡‹¼"wŸ Ëõ&Zœ"Å…>à$ 1Ö#R8×¥ÜÇŠ\¿ý[6¹ÐY0ܡނ¢(§Ÿ´¤Á•+¶br¡ûnU:¡BC㜊a2¸¾‰Z5óç=ì5”@Ëì->ˆiV·à§üykfþü€#E ÒkîdçR^~žùóÖÍüùwÄ®"‰rJZö ä>øG\[Þ™?Ÿà}¦øB³í€'eùåçÝ›ùóey ÍêpcËc?Ë¢Á/?ïv”Û—Ò/±Ÿ‘Ò tä Yø´¼å9ºt;ÊíK£¸9"Ð#»3üh‰7öç<èÒí(·ïÙÄ…g¶"œàÍ5ýTìÏyÐô 1¢Ü¾•~±…‘¼(å&©9Ÿ•å÷µ£Ü½,uÎᢗ´"ÄÔ6fÎ÷ç<èÒí(÷|PÒSíD w42ãtsJ¤?çA—nG¹¼’ï5j…𔋓8”·\gK·£Ü¾u¡¡Û„Rt«4«´Ë©ÊÚ[T”[Ý#ÊMK3'”9†/Q¬xּϚEO¯(·ºG”›ÖèC¬2)ïQ2§ž|ËÂ{ÀµŸW÷ˆr¸k<Él¢Ÿ¬@¡o¢ôŠr«{D¹{ª2‚p´‰‚%’Ç7”Áù‚ÿëD¹Ëò@#}q¼’å%–ÿ§D¹kLvkSA³s‡#ZÝL  ð¿5ø#Ê]àÀ M… ¸¥ æ|÷_ð_üå¦=kžå›œãˆkÇ-}Iñ‹5WQnu(wYîIa‰5g0ÚògŸ®7Q7G|2A!Vâ9\Aòhbq%?µG|VïÌŸœÆ@ #G.RœhüÚDÞ›#>7¸ë<å½Xó$¢Üyûç׈O’2F|NðÂ=~çåÂí‹ðsfýðkùhŽøœà žæKÉ·žC¾æíŸ_#>«OæˆÏ Þ9§þTk¬ìç¹WàzùlŽøÜà4/,‘J!t9éÖº‚ÿüñY§Hø<–ä™uâ¡Ñ»høUà×e±gŸ=,§þñP˜ޤ¥?–s%W€ëê›9âó, „ÍTçFèXD±5)?×½çtÊYïвDíp›€ü311wÅŠs‡>{rkx¼CËÞ¡<›£iPøÛ³RœÙ¹þìÉ­áñ]ƒIj*°j6¦fËíÏéÖþìÉÅL#óºÀi`48E’9QljY®ïÐ2whHüA#G¹b¬êJ·ögOn whYr XóJ ´²Y)M"GöäÖðx‡®JúØèÍËÄÌ QŒlf‰•þìÉ­áñ=ÒÍ•ºg;Ñ~Θì:çYôgO.‰m£UøY‡gáßÐì§ÁÖA(,ñÁU_£Ujhæh• iD‡Âhšx‡*ðÛÏ»9Ze‚ŠËñA=ïPD¦ˆs¹\[9Ze‚{¼E}¿œMb$SÄøåçÑ›£Uö² ÅÍœ¢Dá»,…åÚÏc0G«lpê|=u¢I¬Ìî3®o¢ÍÑ*<Ñ5Cô¶Äy•åºY‰²Æh•³,™G|ºqKüdòß!,y ®6Q´Õ8ûš“KêëuUqŽU1p‚?»‰k´Õ8'8X <@á0Þ¼E<[X­?»‰k´Õ8·å}jk<¬P!“¿,·Õ8·åDÆà˜“ûlÙ$^jYn?·Õ8åÏ´ÏÑ̯,\IY®ý<ÙjœÇrÇáúÈ‘WL¢‚IýÙM\“­Æ¹Á1Þƒû,"I¹îªb]ÅÖþì&®ÉVãÜà4ïP¸bŸéô)õÉÌœþì&®ÉVãœàÈU¸,Àæô¢Ðû¯ŸÿÑàÉdCõ5ø8àD›•ãgËþ iññS› US6ÙPœ˜¡S¾à§d™§¢õ¿6Q*&ª¯aÓ¤z©,ÿäòÿ)Y~Y^M6T_#›©?­¹Ã©+~²õ ~m¢ÔL6ÔÏÌ8ƒ¢LgY‚²üÚDÝdCõ%€ìéÚÖ-;k‘êÚþýņ"ŸÁ†êKº™ªôA³ÊB÷•oé/6T]œ¢ßðŽäú~#yKØô¶”Ö›¨¿ØP$žlp¡¡¾Ç¢ýˆ¼\欅˜{ÎSÛðS› ]s4¹Ð|\Ì8“#˜ŠŽü<ˆAö< ä€_~ž“É…Þà$½Ë±§¢  p[ ¿,Ï&z/ ÅØNTŒÉóà:à—Ÿçbr¡·å ·þˆ‘ú~á™^rŠŠ²\ûy®&z[Nj`Ç;tœ_ALlïzY.?o&zƒw*ÓøÝ4δ^Ðüòón zêýBï²·1’ÀÿÏDEv¬ÈÏüԬřB‚œ®ªˆ±Xž™9bwÈ_ðË‹7…'8¸ãoy5Ô3[a•Ïøey0…7¸Ã3ÕEñi¢ý¤«e¹\±DSHp/ åW#‹ÂÑ'ðgY¸=\X®]±$SHp[N·„„ã×\87‘W–_®X²)$¸Á#Ì‹ž¤-ñwÉ1zY.W,ÅÜËBOÅèH„ÌùROŠ»©e¹Ò­¥šB‚ Žª?þûÈÒ Ž_HûÍEù¹Næ”f¶*{xY¨ó=ò`"¼æÎh·whx´*×ÒÍVeçªÿ¯|Y ï«"¯¸ŽÜðhU®Õ™­Ê Üyoaªò-It+4eùµ‰ª7[•8+ØÇNy z½óqÀ¯MTƒÙª|À/K ”(N1é HX®7Qf«2ƒ£¼¿ñA¡<ƒÊ·¯X¸ÞD5™­ÊËòN"ó8[øM½x‡¶¨Àõ&ªÙlU^à$b ËÑÕôó3ÌnuÍ WÔqËâýyøyÀK6¢ÓŒr¹'néa†sÂrü/"Õª¼Á‰YÑí-5î›(£+¹\ùy°{[0‚|Ž›,|p•̉…Mûñ¼?z[Z°{[&8F|ŽgÝþü·œ¸Sà·+Ú½-Ü™ÇrIÔ Wv®?z[Z´{[8žÐ1ó”hål¢^¿à—+F»·e¯9½‚cb2$T?’Xs§,×®¨fŸ-R§> Î¢îI¡Ñ9¶Ã¹0û‰ðS›<ÓÔì³ <"°‰œÔ]Å—eù|*nð냪Ùg ¼“d3WrIŽ/nNXÊòûƒf“<³ÁilH1sñÅé6«~Ðb’g&8ó~. eçÒ~Y¤%=!,¿>h5É3jV°»Á1-Tâžx~mð™ÌÙà×&R³Ï4x Œ9º°—øm·7Ñ,åðËò`­8ø›ÈaÎ÷_MdDËì„:à×&R³Ï48k A4"Ë\‘>.½,z©Ùg ¼óxi4BzW—™¢à¸ÞDxãDë N¶è³ÀAVãÛ;t2s6¸&Z75ûì^–ÌÝg(Ê%‚²\?¸ª=áŒðZTšO„ùdž ˆƒkÞ¡ùÁÌ¡%µ˜9<‘ æpõˆ9\G6ÅBç3§U{ÂïÏÔòXJ(|RÜm1-¸¶¼Ù~x!mhÎgG³‰ C¦ö¿ü¼Ù~ås¸Æ‘‹0ý¯&‚_~Þì ¿Çr°} o" t Nk?oö„ߎ ÆÿB7tqKÖË¢ßDÍžð»À+Ý=3%’HÆ]¸bP®¨ßDÍžð ;¯ywtäBþÊËqðDgPT^E¨fOø]àÃM0†¯L¡HÛÝ)‘ò*B5{ÂïGmm³™ñTYʉE_–Û~øþ¦ðãbÎoðkû/NÑ8¨Ä4…sÿhO,§r6¶ðËònwYNðFãSÏÀGKbpÃÚ¡þÑeÙ×ì³ÿ|YžXs¾ã0Ûh7]¹(Ëõö_œ¢¿Ÿ–wÒ↿ƒÖů¬×\oÿ¥Sôë iB?…þ£xR­¸¾‰b´»,'8ŽCš•ƒì.½­TÐÓÊÎWü­Á“ÝeyÀ= ?ƒLçö´Y¿–ëM´tŠt—åY–Dà.xÝl¨Ö®ª»,{,v—%ƒc$F« “±æ‰g6ó²ô)<%ÁÕÙ«]ø aeŠFÔLó2=KÏ‹®™µ?µ =6»ð·ÁQ²„6T ÑP$-²ÀçÛƒ_Û?v»ðÂ~‡Òkú7(`ÙÁ]¸¶<9»ðV "$:Ý~„O ç¿¶òváo‚³’`p¦ïÚwoËVX–ëퟂ]øÛà§*ƒ‰ìËfZ–°êþ\oÿÅ)úeƒƒyÊCxá4ràT‹S¤T€ëí¿8Eºð·ÁчƒLQ î…2dT–ëÂ__³ÏtáÁq{Næ`ô×m»+G€«M”ŠÚDÿg‚ÇéŠ9Ðàcè[ ðpóÊJæ„8ÿص‰RU›è ž¨!‰…JàÙ ñ¿à×&JMm¢/x$Ö/žçŽÜ3?4Õŵ8à—å]m¢/8Ȳôö‡<-À»ÈåÎŽ¿ ~m"5ûlgþCZË’i|páƒ+$¡">Kóã§vª«Ùgê¡¡®M+³þ ÿõj? ¯zh¯z(ƒsM5 ÿžh×'S¾à×ö¯zèϸË@‹”²6uŸç¯z(Æ™õÐ0›OcbpŒÃhÇç¨)ðkû×G=”Á;é*ªR8W²ÐâžÜ¹ðª‡öú½CÿçŸÉâ³o.ÖËÅL­Î "çöŸ®Øæ»?è÷½ÀAj§Ë"6•…öÛÏøõAÛ÷Uà'5àšëHýõø©*:®-oß;ôwØ)†·OXYèø¿>hûV[8õY$"‰µ¯Dêµ¹¬,×tqŠ&‘ãGÝ;tx ò€’">•ÄŽåŠÈÑ·NQx€c,DàüsáÏ{ÚOôš+"GWœ¢spÍŽ¿Hš–Ø=^® ÷ï¯ÐBqŠøðÏÝg`Mbä×~Y ðIÌÛà·+ÖGhѧ–5üÎf%\›]Œ?h ü²¼=B‹>ûæH¼1p ˆQ¸¢¿]±?B‹¹æ¤6€ÎNÑ´_б®Ž¿c¹vÅî¡Å*üáR³’£eÙ×ôÿÔ½B‹î¡ÅG—×o|*¶(¼e’ ¸-Öì³ß68žŠ‰Öt ¨î´påŠ:´X:ElðHŸ‘Šmé‰Ã(>–ëТ§GhÑWP4ŽêÉ¥,´h(KIìX®C‹¥St…»4ÞO#„>¹Üˆþ ®¶/6¥0ÎNº{H™]1Š@tÖ,ÆOmJaïÕ¦2x¦y-Þ²uŠÀ)Š_ðkû÷fS ãêMÜ6 ¾nÿ*Æ|Ì^Å~YÞmJaœb¼4·e¬y ŒíŸDhÑ•åzû{”¬-JáZ–H3þúˆ[*'ŠÅ<‹¨—å?/poR 8½@Ì‹”œ÷½êé'\ÝD|-JáZj ¡Æ~‡!—=OE4x4)…˃ŸqK¥ŠŽÛ–ç5ýD€ÿº,OŸˆëO…¥èç€Ô‘±,ãÒ˜ ç‘ËgËø)ÿ±ÿº?hþD\_ð4ÂVni¶QWä9'¿eƒk?÷È2ɈKƒ§Ì›SïÆ“ÈÉþÐÉ;à·åõq}Á#%q¯)t“êBÜðóö‰¸ôš'܈¸Æ­²4ËÞDÛòÛÏû§mVƒ;ú •òOEÒ!ÆZõšÿ­Á½û´Íê5'yüÀ .Lü«’¯èøåŠÞÚfµåQ.u倰äN“5dnõý¥7ÑpVºC–»Â¯¹=\Í’údppÎXŠuîñY£äÎ-?©O³Ã’údðH¤vt¶F‡³%ú,–+†‡Ô§GmÙ’údðJèá7€#ÚO7qÜŒâcùu*†hJ}.p=õ[ õ,ˆìœ^–ëT É”údðÂ3š#?Ê4¢ôêÉ=àש²)õÉà|뜩³TVÒê\ûùˆ‰-©ÏçêXŠtïFY—ãçS3Ƈԧ‡r’%õ§Ò/´æ©º!´çà‚åþ nøy3¥>ãÔ…¦L2Öˆõ׊$‰iË ?ï¦Ôç÷™×<T¸J”곬°Áo?Δú\ËBÛ®X3sŠÜZćÔç÷¦Ôçú 8°`yç¤eðb~¨×Ërùù°Ì’ú\àÏdÄ(ÙñÛ¿Šù¡zY.?ÿ/Kê“Á3õâ’Ö_§tØ„¥ìÖkN€k?É>ÏÓ9Aš9 ÂÏë çÆ‘»Ö<=Ïó˜íó<Í©4Z%P*´qUá\í ~ûy,öy>Á]çyA «¡-‘¤ÀoË«}ž'Òù§˜ˆæ¶tRä(ý“}À ?oöy>-Gž…´¡j¦t«ÔÌY®˜žçyìöy¾À;?rQÚF ËùDêƒÞ~žœ}ž¯ºÞ¡ŽhÉ U«¦×üòóäíó|®9um k‘&R 'ízY.?OÁÎZäypäA4Œ'¯¼,ÖY ®23k‘çì³9åå…qÒˆ‰í~%ç7øíç)ÙY‹ N¯ œŠÄùO"—Û3ç€ß–g;k‘i*D VVš­Ð~ Љ?.)þ ~ûy*vÖ"OyÕ¥Þ6þ…q²Íj 8Ë/?OÕÎZLðxAG4ÊX1­¹ŠüÊZ¤fg-¦·P«>h„§o’Åíõš_Y‹Ôí¬Ež:ÿŽSÜŽÚÃ}—®¨Á¯M4nó<çª"¯„¦eD'”Ûòò<ϳ·Ïó )­4Ð>#)âsœÍ¿ý<û[&x!õâÉ¡|–N×|Þ‘ nl¢n>[œ5x¤çYŠû :eùµ‰º3Ÿ-ެ…çÁjÅÓ<‹,×Üiðkuo>[Ҕ˦ª"Ê“tçŽÀZÞ´Ÿ~=[z0Ÿ-Ër´…ó…ðT®è׳E€k?G©q%Ï›ˆnäÄ™³…ÏÙÂ;tüôqõdF\¼¦ ÆUùÈñÕu‡nðÛÏ{6#.Ÿ¬T¶éžÊgQœç³}ÀoË‹qMpbK‘Ÿ“€C?JÐñW¿à·Ÿ÷jF\žç(ÛÄ©îBø¤ZËo?ofÄEà…Ó«äŠXfå[§b]Y‹~ûy7#.¯3PIqáý¶-om]Ð\_~rŠ~[àË¡¼‰Ÿ—#$è]@Aôþû÷fÄEàà̶“L¸5o·*ã-óõó?x0 ¨‰‡ð¦4Áq’×±_Ï-+œK¯©ÊÞ»hPÏÉ ÖRymV]Äçyå[Òkª²Go–E@MsÚlš3þ@[DûL;t¶p¦×TeE,‹€ºÀ©¤Mw(ª-þ¸ónâH¯©ÊÞ»bPxõ‹ôšªì1]È" .pR™F±"™Ok[n‹Ýš^S•½Ÿœ¢_pT¨í›Rÿ8[º¿7Q7 ¨Ó[mI䳯‚”qï º´Å…N¯©ÊÞONÑÛÏ)9‡ô¢a †;Ï–âN¯©ÊÏV‹€Jà”ÉrÔTŠÌ+–nM¯©ÊÞON‘& NpšòA ³CÚ™f…#WïPMA|±¥9ŸÖTåÊ¡¤„ …"ã–µ,ñ¡9ï½O¦æ|šsrI«\óa¦ŸUÙà÷ö÷ÙÔœOqqçØòäˆ9_œP´ž\è~[^LÍùN´\s‘ªçe׿0Vµ~Áïíï«©9¿Ö丒˜Â´å×ö÷ÍÔœgpÇSw‰³E 4&¾>hWô*åøÉ)ÒšóËrê A1›!ËQAÙÙ9®]18ÛÓâ“胊‰[süôåŠÁÛ®8'üÖ¦·° Jª—LÙ¿]1ÛÓšÂ9G6sc{=¾Ãêá:à·åÑvÅ žL€Z8óQ‡Øí'üvÅlWL‹ ÅkŽùDèV¨NÈ”%eù劓St¹bZ3¡xÍQøCcDSÛ®5¿\1Û×lbÏgKÐ)OÅ´gÍ ðË«IXJs , °Aa5’ÆÂéÍnQgÓk6±÷¡™„¥N¯ÜqûC-=—ûTÌa¸Ók6±G¯EXJsN.Rƒ$¦VIî#í> „I_–Gg–8 0¢®'jU>üó¼óçé5›ØûèMÂÒ^sZ–HE(# ù¦ ,¿\1“°´À)¶ÀèšÆ ¿[F¸ì>èôšMìýây ¼Òˆ,j×DïìÉ-u¿‰^³‰=B‹°Äà™ufáÓ8r¡ÙOÍ"é5×/hÔ¾-Z~*“õWx7fS@Îá4Îô5ÎfüôAË÷±˜´ü4Ûé; Ûü%?}mMlßà·ŸÇjÒòÓjl‡/b^·‡—ÝÙšã^–ò¢å#œ·hùiµ‡ƒØBà‘zÏåy>Y"ÜðónÒò÷²tÚv(g5ѽ¼¥¼hù>9“–¿ÀCçÑä¤<“¸íbŸŠY_~ž¼IË_ÞÂcø*…ôò]9ëeQ^´|Ĩ-/ ¶?5à‘6T“ ¿Åkpíç)š´|¯Ñ$M*nœCaVÜR^´|?9E¿ƒ½,}‚#k1À½ð½.‹ò¢åcóZ´ü´dÖ@8×)â Gÿ'§èïײäáÙ ø ?´k¾ýýÞ¡õ!%ä!XmI Mp==YIðQÃI,¬W}H yL4µ¤„¶åi88ÎnOª|m òÔ|­ù]§b®vhÍ&ưyšçG7Q>Šyyµ‡Ÿ>êD ¯›u¢ŽÆPÙÝD¹-‘Ù•³Á ?ïv¨-AçYe&Ñ›h«·!9øeyqvh£õBê…ú ËQAÙ¢Óüöóâí:Ñ@²?M•ÕìÊZƒß~^‚]'ZÒȂ⓶næÙ¡êƒÞ~^¢]'šê¤¨Fº!ÎQm~'mùåç‹SlËIÞ-±ä Æò¸~r\+>o¯:¨±]9­nä¶S¤9‚èàÞ­y?}Е})&]9­Þs¼%I¥e'“xÍ­\nÑ•‘O±èÊ<“LÑÙòsµ½ýû‹® b¦EWÞà`Ï%ÖVÁu|Š mpÃÏ»IWÞkŽÌYâ黵qMLáÌÊòËÏ«3éÊ©‹avÈŸ7’Að'ÇuRýEWö“S¤éÊûƒ"UŒÞ¡Dœÿ^Ëúë/º²Ÿœ¢_¶åž&|ãeÁ‚9E:n™àX&UºCûI‰lõ¶³,—+6gÆ-ÛrÈNÂòBbý¼ Ëz‡W¼ÎóæÍº^êÊi8ø¤C@Ò¶Å‹˜—Ÿu?9EÿñÇ»œòŠ´æ4…yk÷/øíç-šuÿ¼D§áŠ48œç[¿%o.t~Öýýä=Á!ïZ>¿CÛ9ÓÊüçgÝ#{­ºÿO>ÓbäÏ e]Ïy•å—ŸONÑß&8FŒð`% w¨©mùY÷÷“SôëŽG?øçÐn­LwÛe ~•,[3ëþÜ“ä :DpYð[÷í-Wɲu³î¿ÁÀ;WiJÛ‰â׳%¿ëþÝ™LËÖöË t䶣Že„óKèÛûîM¦åG$S)‡óÎì³¼ÛÃóKèÛûL¦å¶ËByÅHš9]Hñ¯5 }ðh2-·å˜ÓH–SJ„òÆËò¤,¿7QO&Ór‚GR(¡"]Ði'Šó.pç—з÷=›LË ž¨#¥œêø²ØÏóœÏ/¡o ÕÓrƒCIplÏàq‡Ý­Œh~ }{ˆàXLË ‘¼8_™âæ5×WûI~ }ðf2-7xã4ÔPÊÙ4ξ»æóKè{€w³N”ã<[RÜ2•ÄŠ`ÏÐâÙØœ3ëD<»Ài(êE.ÿ<[êüÚDÁy³NÄàH‚ ´p”Åé¤m%z[¼¿-fhƒcH4&:Ù83…ó³±=¸hÖ‰68X¡¯Ò²Ä(B‹©%’ŸíÁ%³N´×½›hîåzhÙ7x¹]ÿ}g³N4Á=)| í¸±}×,À(ÖË¢#.ð‘­:ÑYLšÊÃùM´%Ê›_e…üllî“)úŸÿ3%sZó, \Sÿ<1 dùyšÌðóO¦Hƒ#>¯5 …©M*Cκÿ7üü“)2,¯aÑ!:͹” øe¹ÿdŠnpŒÞ¢¸úçþˆfäÍâÞà·ŸûO¦Hƒ¢Ë¡ã]±I%±užoË/?÷ŸLÑ •ÏÊɾ uˆ®À/?÷ŸL‘ÏÄø¸,À)ê÷ð£~ù¹ÿdŠnË¡ó %/ðàÝÌB缞-vhbÚ;¥œ¶#®üÊB‡àÌ,4ƒ0‹FÈŽe©é3?´|Áï³%x3 =Á=½CÑÚ– ô›EoËy‡æWÎee¡788ÅxY40-1ðïœ-³…sƒßgKˆfúX>Ö±yl¢œÎšG~Ÿ-!™Yè½æ˜ÀC½Š‰ë¡NÌÊI ü:[–N‘Xr%DåÏcs3ç€_gËäé,ô¶Ì=È«ò* Ü5kp½C¿œ¢ÿ^›¨Ì—ØÐØžÑð›„`ÛAQ™ÌðóÏzƒƒ]1. \*•žÓ|rD7¸áçŸ;Tƒ{’ƒBbºæ»hUî«L|À/Ë¿œ¢Ûr̯B|ÞÁœïgR^ÞUÅ ~ûù—S¤Á¡‚Dù– S&†Mç-Tr,¿ü|êýmƒ'Òœ€ÊLÍ_Y›²Û¬Žå—ŸÇhjÎçº6ѸMÀ‰$ƒ¢¸CW|þÒœ÷{·4çGn¡dx™&! ɬô¿½%fSs~ƒãý†º#ý'5ÖúÒœàÅÔœßਠⲀš°lâ+-~Á o©¦æüGøø^‘¯‰ÌšÓG/·)Ëooùh,ü¯ßÿ3ÁÛ²|ʸ,:/´¡VJ¤Í?f|ÐÆ‚nâùÈõè=—gÚª‡nðûƒ¦ÆÂm9RÃ1‘D94O¥6Ôl›=à—å飱pƒ—ÌŠÖ}Ðáü'~ÐôÑX¸—l¹È¬?ÄN,Ë,Í˯š> =&^I¤T ›Ž{ôá¿¶ÿØfÿþñãß¿lðLýgÎAa©t©™³J9 üß¿.ðÆÂ ž‘Ëå.ÔüWôI,KWàWÒí%f^±ÏË…$, UÇHÎsp­pîÙÁ’ÝÁ=Á©{BÀÏQ"¹‰1Ùþ nø¹ÝÁ½Á3¯9JtI"‡$øm¹ÝÁ=ÁÑâßXz¢¢Å7¾âógwÈv÷Ïão‡ŸÓ°i⣒˜¿ý<ÛÜ<Ñ ÿ4=<íÞóªÀ/?Ïv÷ÏÄ!ŽhV‚ŸÇ£ôÛêêÉÏîíî Ž$.,'bÌù]©¿³ýŸÜ!Ûܼ²âT$"Z¢‘œ'ßõ½^s9›MyÅ­ü¹§Ëò7ò€b@ ¯ùøÿ<šòB.fSÞgî @er«>[6ø½‰r5›òÊ®*ÆÂ mÑ"Ê«Ëò€ß–7³)oƒ#ÞÃê©ß_¤D¶×76Q7›òβ$ ¡¡;öRü¿-˵‰Š3›òxñTUAr–A†Ð]_›hrŠ~Ùà‘çH eÈ)ˆ Z¯ùµ‰J0›ò68Dò£@©`œ\»zžökî¸âuYLNÑ<Ѩv¡JC¿X‰÷ÌÖcù•µ(ÉlÊ›à™”àq¥†Æ]‘ 4m¹nÊ “S¤›òŽå㥕¸• $]¹ ®·)æS±ìÙÄ™TÚyKÅ}U[ÆO_OÅRͧbYn4C»•9Üiáù:}Áïí_šùTÜà²Âó|YÔ/vO³:à·åÝ|*npˆfA/·Vê.Üt0^ø½ý«3ŸŠeÏàŽžI¥~Ôò«²üÚþÕ›OÅ Ž$(ntüM ­ÜÞ•å×ö¯Ÿîÿþßÿšàaeþeç ŽÎ$GÂÍe óÝÞR?Üx£ Ħ%¹õãüö–úéàVàÈ·D Š @…MTÚ5äë€ß–:¸oËÁ®ˆ<4ê|rjÛl(Øà†·|:¸oËÓØ7~Ä—hâØUÅcùí-ŸnÜÑÄPÄÑ-îD*ªz_õÓÁ}ƒG^¼WÑ-.ÇgýA¯Ë¢v³…³Ä}û{|Pð}ÐE[ÅpÒù&*ÏnâМÙ¹Á ÝUágb-~}=»‰Cóf gY…¿‚SqDu™ÀKwhÒà·åÁláÜà æ¡À=úhýLÊs_ðÛ[4[8'ø« §¸ÇÛB‘Qh;µæ·+¶d¶pnp$žQÊoqš@ÑD ªÁ5Ó2LN‘náÜà_ oÁyõ qYLÖ_yv‡VL’XI«’ ?¯ÔMŒ³åø¹ß±bz‘ÄB«&IŒÁ‘ãjZ8:¸jA‘Sà†+6“$VvUÑ×ãÝ~kÿéEC]Ï"‰mËñ +ŽC"´¥ˆ!_ üvÅîL’˜XóÄÑBj«M œš’YÇòË»7IbgÍ óríPÁ´Œ{û§I,ô`’ÄöšCBŒ¦+Í+éåärµ·Ü§b&I¬¬wÿ¨l8ϳèàOŦ—å ¡{2IbÇ[R`Qo?3Ý/eÈãçWݳÉo)yå¸ qŠ0%ñù®¶ d9®gtèÅä·LðBWÀÅŠ9–‹õWž}СW“ß2ÁÑ“Oü Aúù™d]¶Ÿ?û Co&¿¥¬1Ùƒ\H?ç+Wµ¶•gtèÝä·lË1Ø ÊàéÁ•Ę쨗Eo¢èœÉo™àÃM*-èõùˆŸw˜9Yÿ}{“ß²—e²þò¸!ÐÓéášý¡åÙ'§Hó[ÊšŽFh.Eh?­ã^Õk®ksï´jÐ¥LWĨœ-•´þÊ&Ïà57ƒ¢§ÐwtɬAopÄ ZWzAG_¯Ñäå)ô]6kÐú`&ç7xEŸ—¢Ã_ÈY‹|‹¿ýÜG39?ÁËà2Õá¦S€q ÜSé÷€ß–'39¿Áo"ˆ°£û$Ý5è ~û¹Ïfr¾¬’%ayÂì39›x‹ÚË/?÷ÅLΗºùŠÔò…t‡Kõ¶·ÔWr>új&ç'¸'7!ÁLÈN&!’uÿuA·W;aüêipÐÙ‰ßB Qòš#AG®¹ñ«Stƒc2:˜9Û?$é-×ýûrÅj¶npÊq1x¡>Aã üº‰‚=¢ì‘͈r=½ýƒP²2¢ýõ‚ŽÁž QV 'VoNôžÓQ1@°}Áo?öTˆ  seƇ‡ª°—b¼I_–G{*Ä.Ñ)nÁö‰r~h>Y‹þzAÇhO…8˨˛lª–¡-¿ü<ÚS!¶å-f‰]X›·ÜÕ–þzAÇhO…8Ëâ<ïÆÈ£ÐD gÖôº,¢=¢¬¶Y$·pž§ú¸ Ru ü÷nO…Øà™—ý”@óG¼qò[ŽŸÿ¹Àmæ|us1H™8O*ñNžŠ >~ú`ÎÇh3çxï<µšØPg‡BIL›ÈfÎOð@É­NÎóZ?*âE_–'›9¿ÁÁ=õÁ5¢ d>3EüÞDÉfÎOðD;ž4ÜÖC^!tÒ–_›(ÙÌù ÉØ•J2ÑUJ7G~m¢d3ç7xÁ² ¯ P”{¦Bä•ãúÿ {·dín\9p*š€¼_B;$G¸-‡ë¼ô?ÿôJ€Õ~8Uþv âI—Ì ®ÎódwÎpnû!ê=tæôƒ»uÔæø}%»s~YÞ1ŽÐ Ë"$„J×ˢ¹dwÎ/ËÑ‹vpŸH¶)Èei7¸²ÜîœßËò=šay"YÕÝVõU5è˜ìÎùm9ÄZ¿“—”ò„ðq+¿ç \uÎÇdwÎW¿.èD2äàÌM¿ãƒúgÖ"Ûó a0nzßÎö6$Ð⠮ϖlwÎp¤=é‚Nß 2 ÕN,ø \[nwÎ/ðÈŒdU%xœ××gK¶;çx¢I‚ñÈ­¬É¹5[ïeQgK¶;çx&Jæ€.;ºl\þ™µÈvçüGç)gùĦ¹:6V»ÀÕÙ’íÎùîy´ wÐŽnácŒ‡÷üÞ¡'OÑŠrk˜ÔWfA ¤5/_УâûéëÁuò]àè½ÍÄi *sðDY‹~‚~ÞÍWªÊ¤½@”º¸C–B#;·Á•å'O‘w§býÝô„\àÚÏOž"µ,¥fÈ€ ºDÙ¯8zþ·åÊÏ'O‘7?¨ã”N %ÈSt)Ãç/põàšÚgÁ^< )óŸÐÝZRÕİüvÅ’Ìײ¼Tz‡&ââ&rå-Â{ƒ«Mtò­.‘òˆ©‹[è*æ5dýýôÑöOž¢ OÄÄå ý¡Þ?Á™ÓNpíç'O‘ç5-´º ´/+VŒ¯¶Ÿxò]àßõ€s¬V Üà½VâXà†Ÿw³ígZPJÄwÒ>sR@PY®ü|hŸýõ°-³Ô=œü‹¼"X­üe¹:Ï«7Ù~jboAˆMàLhßåöµ¹ï§¶ŸXƒÉö3ÁI‰øŽk.ï¬Ø!ò ®½¥F“í§¦y*LJýþ[ Rå']àÚòd²ýLpŒ¶ófÀ¬bË|$ç¸ö–šM¶Ç W£§b%¥?!7y­¹á-ÅdûàžôšR#òÆBm:[Xíþ úö¯ÕdûY–C$ç #bÇyžw‹xMsö|/‹:¸j3Ù~x \H·Çy¾ÛÛ@àp»¢ºýÏž¢5•Sé4ßDPTÄõN†s£zþýô1 Ïž"N'8NÅJƒ-oZ›®ýüì)ºÀÁ6ëè¤wÐ>˜:-R…s$Ðê»Ø:xŠlË¿×>INàÞ§³E ü–YV¨ïbk«ö³e‚&o„ˆm'ô}ÍŽ…ú.¶Žž¢?Ë(VL4JP¿0H>gÜò.¶í³¿à­°+&áwè·æóly[GO‘z¶ p(B÷pÂð©bÓè¹ÀÕÁ5µÏ‚ žñ’E/KÇ#·ÇcXIßW?úrÿûïyûsŠš¸ÉKfŽI%4Àëø3íçýè˽ÀAÈq‹#(V³[t|ý×~Þ¾Ü M3¼C—ˆ ‚Gt¢ \[~ôå*pÙÆ›ˆ–ò"pÈm¶å/píçýèËUkNí)1Ka  `YîïeQ~>xŠÆ³E­¹#ðLï8?r3õ7Cèe¹z¶Lí³`[αb$Â_d_VªÊî²\=[®ôà)"ð¡0C7Ñ÷m6öIɪnpm¹ÍSTY>˜ò¨¿åsYT¢pÅQƒ^àÿhp›§hXNrÜ%Bè³d™œ­Á]Ym6¦á-(á“nKP ¨õY²LÎÖààh!euB¦ËŽE”ÏF%·>K–ÉÛÜcY–¬*Ü-›é·LJÛú,Y&okpOð<–¥Á[Qn»—Eù¹·5¸Ç²¸86bgêÓqúíÿ,Y&okp8ªÌ’p ¿¡¶Ž¾ þÿSÛÜcY"¯y%ahPóö$^s—+ª’%È6­’å/µ18êæD U©]¼üü—/fºµ9>[¦Ä'æŽÀR˜„·Œ!Žï§tkòÕL·pÚ ‰¿ÞY‰CÐÂç \o"ßÌtëgéjbg2^I¯:2ÿ\[ÞÍtë'i,j@-ù?Zt \o¢àÌt+§¡ÒˆÎyçDhÃËrµ‰FOÑn–ó$;Ú•G\Q€Ìÿ¿ã–4zŠît+çÁØ@£t!ƒrwÎ ðû<ÑL·6–lŽ4lNËB!4¶ÅºCûm¹º‰B2hÍÓš <µ+3£õ?Ùçù÷ÓG-…l&ÐëäFŠ¸Ð°„ó¼÷ôÎù\ûy(fmXŽQ%²Æ4¹(¹‘âÞàÚòj&ІåøO²ÿþ A¡!o¢Q³Xà†Ÿ73ÖXá—Ø”yü„Ž\×dÖ¢_–k?ïf­±ÂoŽÃrt¢7jѫַ–eôÎ-píç)™¤cÍ é¿cB>÷ßÀr±grÛÌBoË•Ÿ§l’f pvňÎ'oM°ÎÆËrí穘¤œI@&ðEÄå–U¯ÅWÁª&iÆ”¼Å¼?Úòqaÿ›+ªàðý²]1ó²|&WHnvb!ÍÙóm¹ þGOÑMš1ÀÇç™&‰BáμY=Ÿ;4¾H3Rv&iÆ'ëß"OB…"Ÿç«¥P€ßÛ?Û³--±Ö<‘Aá²hÞâ¤(ÌH?}̶¤l϶´4R¤NN ð”Qɨ‡.p½ý³=ÛÂà¸-X„sá ¹ÙCyi=[Òk¶%e{¶…ÁÑÊ–ƒ&Ÿ(¨…Øô AZàzûg{¶…ÁOø¢çÿûÂhX\ÃJõ²\mÿl϶Ìe05ýbY"‡³dÙ®ª·¶g[&x£´£§4HâWÊ;ŒÔžàjûg{¶eY^Yù“5«i¹H;œkþ£6‘=ÛÒ†L6X~x²•,ß¿m¸ø½‰Š=ÛÒxÊò{+ò² ¼÷ÙêvrÞÍšÅ÷Ó×ۿس- ¸ì„XõÐFÜv |L¶.p½‰Š=Û2Á{fåÓæF8çö$H\[n϶08¶R¬èéM µýóóí_ìÙ–¹æ¤tŒQ“@"e'Ò¤†Û–«MTìÙ– NñÆï9SԪЃYè ®ÞþÅžm™kN!Ö˜ƒÆ(ªÂj󩘟oÿb϶Lpê!F¬ˆšE¥Î‹•µ˜o¢ü|û—c¶åÿü=ÁË8r郕\1/‘/¤Ç-ãÏ ??f[8Ć‚H"_(’¸\ûy=f[.ðHÄjDßä)„+ß’–jÛW–×c¶å»Ÿç (TVœ‘Z9ý×~^Ù– ½ŽÀSãeY“­¤µxY®ü¼³- œØýˆJ¨Ü­ñtýÅ \]õèúSàtRÅHú P>bTÙÝ–+W=EãM¤Á‘!K|'trGæ_€ß›hôýxÛ)=5bˆv3r´™?ßàê&¢$‡ñË닌7}÷ˆEÆÛFicXØD™ ¨Y=èÑS´Ào=E?&¸cN•ˆËšÀ›`ËÏ—åê©Øìzhkì-ˆ‚‰‹<¢_ð¿_Ðmr,|?}ÕC›]eðÂy'fzN,„ í¸áçv=”Á‘Ìí|A;Ú¡!ˆpnp‰lpey·ë¡mHÙ’Ž*è¿sgŠf”ÛžõÐn×Cç²$²¼ ^s[>8¬Gn{ÖC»]ËRðqõÄà+;·OÅö¬‡v»:ÁÁ’B£•Þ¡~Õ,z\;´=ë¡Ý®‡Î5Oh–%¦²|“7~ÿÃïÄL-À•Ÿw»:Áéº';Îέê1$éO?W—E¿­ÿßÞùƒ¶Ä®þsd¡½¸ -|ëãÏô&ê£õά“”@ë¤NØšqK8Áõ&ê£õŽöq䢥Ql´vè˜àÞàÚò‹ÑúÿÖ!08 |‘uY”\m"ˆ [ïÐîÆ²ÐÁ•H¨$µcVýYƒÎÎ~‡2xãÔPdñ|ÐZ|ÎÒNpµæè ´Þ¡ Þé¹BŒÖ‘jmå[¨›þ×–ÛïPGƒ2¨áx4ÜœYÄç£ÝŸ5èììw(ƒ.˜øšË Þd|>®þ¬Agg¿CDGÑ@.¼u÷¼ÿe¹z‡"!n½C§·Ð0[ß™£p®xü÷g :;û:—Åy‡Žg>Áµæñþ *Ë›Y›ë~(¶7ös4CN0 ®¿ˆvÿªÍe×ÍÚƒ“±åcÆ…d3³"o\àÚϽ3ks}NYbú’•®9ç„„ÐànÝàÊrïÍÚÜß MjV nÆ5·ºDRŸMb \û¹fmn- Q òP"®¥|šVþ|[®üÜG³67?(þ‰ÓÒQY¡‰¸Åßkþ—OfmnS.}èà¢çÅŠ{Y”+úlÖæ¼qÏ ª-Ž "=Š\îxYð{=zŠúáüP¿kÎsmÎíººþús„3?zŠ8tÒ᪉Ïó.NÅ·ôçg~ô-pä¡à-™n¢plÿ\[n÷ p¤+H·#PÉ|KºÀµŸ?zŠÖš·F\.qŽkþÜlíÏÎüè)ZàÇAµå{pÑMÔ•²RŽpæGOÑ^óFb$¨Uû*Ø8ÓÒÊéÏÎüè)êk„3K!ºý¡kDG±¿ÁÕ&:{ŠÖÄ_cY0.Ý–NIË\…’õ(Í÷çg>{ŠnðïX‰º‚B\¤vx¤\ûùÙStƒïs¹¦Z°æMÄ-·åÚÏÏž¢<±ªr`ÜBšœ7ZŽpæ³§H¯¹K´,¸¨Ñƹƒ¢Õóߟ#œP³F88’` cGÚÄ›º9®Ëâ9‰"„EÓçÜRE´,Ž›gD±ud-úSV5Go’à ðHÒ¾:‚Î%'ÈÕÇœEʪæLr˜¾ä&{à¼"½‰jù–ÑQÜŸ²ªe³ÈaúÊ*ú è<¯YˆªÏàÿ)«šc2ÉaÖ² &y¨´ß›-?Ï>®þ”UÍ1›ä0냂–6€y“²± ñpY®oÿÑSt“Ãô9Šž³ÐY·¥·Ý]šïOYÕûõÛÃr4†JÙh(¡IÑ|ƒß‰…<´Ïþ~;´äpÝ ôÈåºËÏÿVàÁ>[&0‚ Ô,*å¸ÜÎåöµýóólIÑ>[&u31æ%Ê?Þ>ˆvˆÑ®¼ÀõÙ’’}¶LÒi°;âM”¸Âåeɲ\àÚòlŸ-yÑe“BAä³%1P0Ƭ¸>[R±Ï–¹æÝs¬ÈkÞ½ìo¹ÀõÙ’ª}¶Ì5¢0-ÐÍMÄçù^su¶Œž"u¶Ìeq©sj䃘„j¸:[R·Ï–i92ƒºù8¸v—ˆ¿wèà)úyYeïÈl(Xó•œÇ¬âm¹:[OÑOx¹bbNÌu’†Úã'÷š«³eôýzºb©¬ÂI™"d«•¿-WgËÐ>ûÛï4» 5öó²¼¥å-úlÉÉ>[&»r…Pf¢~NŒŸ”¬ä࿟¾Î–œí³¥,ŽbÞþhn/¢¿eó-p}¶äbŸ-“]9ríÅŠ¹ÈÌ¿ÀµåÕ>[ÊT(`¥<ô%€ú«…‚Ák±ÀõÙ’›}¶,ŠrVáý1s}A—çÙ’»}¶”E°V8QLb.Eh¿ÁÕÙRœ}¶LËA9ƒƒ s™‡QWÚ]àêl)Þ>[Ê¢ãã#×{ù‰:¯XžgËè)úñeÁ»g fr3͉íòY¹ÀÕÙ2xŠ~^–C§Y V²Î]¤¸»W–'ûl)‹‘DÕ1Ô‚”7ÒPåy¶”lŸ-eŠDÒÉKÞ^Ä-áò}¶[?´Oqp6Dnþ‡…ìo‘þ¤(ÏÅÖíSá¿*ûÌà¢î?¦ú“¢<[?´×uäf€'(Hëeñ-£ÚÒŸ幨ú¡}}c˜-`ü„J9¹‰é³+>)ÊsµõCךc#|à`ûwþš&ÞD‚ýIQŽæ «³/æöDϨ©ƒâ®ŠÁö¹CŸå¹Úú¡ËrÌâG°´gœY9®R×}R”c.Öjã\Þ‚á—ÏrŒ´bäg縥mR”çj뇮eÉì-PƒÁö_5‹ü^sÕÆ™k6K–}ª*c®6BÌ›B‹­ò“—TÖ÷ÓGÉ2×b–,œTÛ*uæpÍ¢,Ž4·×\o¢ZÍ’å¶ü[oYЧþ–ºPË ¡Û«d‰t›U²Ü–ÚDˆ¤ÑÅŚσ«=K–gOÑ>ûn€V“å|–÷&êþ¼?ƒ¢³§è¯”¾‰èƒàþ–ͱàg Ê×k~öÝà…Šsð´CD) “VY¡?ƒ¢³§èGé<ÿþºÜ¸»ÀõšŸ=EÚr^„Nmn$) ¦rƀض\\gO‘±,ß!‰CÀ»è…î÷š«ƒ«e;(šà˜QþÀ}/hay¿¼EE­ØAÑw4`ÑxZ!¶ªÆOø}¶ í³ÿpE”Í¡Ü*g-äyÞ.pu*Žž¢ŸðÿNp µpóŒ' ³ç_€+Ë»MpdПéà ëš+~Òñmpž¢¿_à=RÅ®(ˆxÁ">ËgýuoQ fn\s…ÎsˆX¢6WúMhŸ>¨„r•ÐÇk.®ö6© –ØÙàúléÑ¢Úààê …§<š—ƒœàÚòdQ mpˆ@àû 7(E’áŒ'¸>[z¶¨„öš×Y±½Ó²ì¡¼>Qa¹:[z±¨„&x#ñÈâGßFMU\s¼ý…åª|6zŠ.*¡ µ;<ÏžD×¢)–«òYoV|¢¼¡•SHœÍd¡ 2/åÛ?}Åç½[ñùϤ€FŽ@‡c_Ì3¨A§\¹bqΊÏ'8“SA$ó!ÈÜe¹rÅ⼟opL+Fž€Ä=½:-ËdÛàÿhð`Åç{Íñ‚FÜâè¶ÜPedD…å(ðhÅçbÍ¿g-Ö£S„Q÷Á55þø_ ß™D„sÀãjXªx‹Ýàá7U!@Ô>ÈÕù¹‹ ~¾ûrs”Yøé#V,ÎT…˜àÐñlÔ°„k®É¸e>r7¸±‰LUˆ *´+# ݨay Oˆpe¹7U!68ú%"D² Nž:ç¿:ÏóðŠ‹7U!„å¹8ÖQ‹Z®ˆÌ¾,W›È›ªbÍ •ÏÐìºÄúàùàjySB,K'“Q„ÂrͯeQ±bñ¦*ÄÿnæB—',à-ëMTܨAKðÛϽ© ±Áé ¯4 EÏѾ?h¹ÁÕõ¦*ÄÞD`¦ Óé.p+ß-ZÌ 1¡=*c¶ßSAw ñô+Æ­M ΢µYà.që,jpš.7‘?ÁõÙ¼Ek3À+ÓAÀrÐ Œ®ÎE÷á.pmy°hm8ôèf^{©º›.{ƒë³%D‹Öf€sZ‘Ú•ÓØDRœ4_–«³%$‹Öf­9͇¢1/ÓÓWõ¼&½,êl Ù¢µàžçOHTn¢°†8J=¢bYÔÙŠEk³,ÏÔÅ]ˆ€I:™kñrEu¶„ji̓»’G!`®ýD—EØ™¢0HJñÓ‡Ö| ÍÒšà…&Z)Vœ "žý nøy·´æ8ÚL<ãu‹#fÉ£íW–GgiÍð4¼yE:¿ÚêXøÎóü§—Ö|‰ÞÒš_Ë’[>zDƒk>®ôÒš/1XZókY wNå3„Я¥\à÷›¨ÄhiÍ/päÍ)¯X=ÀK4ôüvج.nüëóš×0x-èBjI¼æâ°ü)“]b¶º¸88¸é×>½ç’?Áµ+Æbuqoð§"˜­¿;ÔŰïPƒkË«ÕÅ=À·?a ÏóNÚV+n™wèS&»Äfuq/ð6&þg£v‹x™š­ÂríŠÝêâ^Ë‚g Ï*VJ,¬5¯eô+ puä&guq/o)Á1 z¿,úòó§Lv=EW÷²œÄ2PluÔU×5×âzp=e²ËÙS´ßD“\=óM§b—Ù¹>ŠPøéãí_Ξ"âzYŒ§Q,…ÑŸàÚÏÏž"N£¬´ý)v ZVPTžoÿ³§HSªïÐN©®¸Ö¼Ì²Â×~~ö]à…æå¸/—’euC•©P ,W~~öà·@ÄàëöÓo½ÀµŸ7ûí_Æ ]‰…Ûåîì\y¾ýS·ßþ ¼ó4Bè@Ñúb@MýZsõ²ÈÎ~û—1«èy*L<ðÈ5}ÖýºæÊóíòMÎyh0ç<û¿ÝÓ=eF|ù–™Ì©/ùƒròipøÂÛLÏŸÄ©˜ê ®7ÑÉStGæ ¦ãÒ.n=r[Z;´¾äÊÉStƒªàÜÕN]ˆÎø¼¾äÊÉStgîfÇ{¥R”+jsSPX®6ÑÉStG-š+°æŽÊE{̪ĕW¬/ùƒ’«%0À+µoÒAžé•^¢¤W½–E¿ýGOÑ%°ÀáMõGòW¢UÍ7¸òóãú÷ÿüÏo|ÿÿ`>9Í“lS¡Îù¾®–G ~ʦý¼ïмÐÄ/† uCÕÍ®<…27¸öór¼CoðJ©P0ž0iFZñy›Å\[~¼C58ÚÛ0õT©ÍwAÈ™ÿ ®ý¼ïP½,PD/K$ä¸Â¹VFmNX®ü¼ïPcY «Š›~´CÌ—EwƒB€+?ÚgÃϵå—GÕÖo\™ÿÇSQx‹òórÔCoðFͲ°@ý¿7QWªî—Ÿ‡Nƒ¦Ü IŸKÈž[ ¿ŸòŸéMÔ•*„Dõ‹>¹È »zÞæ÷×›¨+UˆÓrp£gËQq±­XŸá×–+Uˆ<|» ½æ¸ý3u-ïŒh;Áõ&êJâ‡x‚è4U[Ò~ûWµæÆ&Rªü;d+‰“¶BÍ»é·.í³ ~o"ˆ„\å³³›ˆÏqpY]û«X–û²¨N©Bœà`=ƒ†b£®¿#¯Ø.pU>C‰æ¢†#ð°.h>riâ÷´Û§bá5ÿ~ʦü¼ºxSÃàèQ·$šûÝ,(ÕÏ)Ë®ü¼ºtSÃIð@¹~ŒTq{níkÍÛˆr¸¶<ßÔpÜñy$}VO)î-8Uûìù_àÿhðrSÃIðD%Ü̹\€7©!r+?¯®ÞÔp¼Pÿ9ÖÌ3%~ç8a¾×\ûy»©á$x¥>åÌâPÈüoWìóšÛà*ð~SÃIðFš*(Ãs*®S±¦9N¸Áïà¿zwSÃIpñÅü-H"œHæÔ6’–ÂÏ)po½,|œÞ‚î hârAdwCÕ<ù¸âóeQ}°^|œ-žn ô-åËãóeQ}´^l­¸C‘Û¦vˆÍ ½žç\[ž¬—ůD„eqüA·Æß çâóeQ}¶^{Y°ï3ë©FG“è;ø¿Àõ&òÅzYì †’qYDô³÷½,éW›ÈWëe±Á!2AôX܇º¦æ›Ÿ„™ñù²¨¾Y/‹ !hh…6Ï'¤UnoQ—ÅÙS„ O+™Ãg²ÐIÜ¡c‡¦ßÆŸi??{Šnph+E:¸Ð;WXdy‚ÉÖ®ýüì)ºÁ+-6Q£.‘¸–ÚLZ pmy¸ç‰$øV:Ï¡’ 3óO?Cy \ûù·Ÿ/Ê,ÏÓrpjáƒöÆ-â^´ýðÙòý_þ3cÍÓM™%Á u‡™Å-~µZ}Ïóáç ÜXó|Sfàà)CÜ ‘ÄU†Õ—Û/pmy¹)³Îee,÷4OÔVŠ»æÉܾÀ5¯7eÖ Ž¾\H×dî,\ÖÈÝˢΖÑS$(³.Ë¿BàÜjUEb!ÜàwKÚgáõA¿×-Ò­•Ú~öœ,7ø½ý£»)³$x#5ËômuOá\_ê„Ö²¨ƒ+zÓÏËÜD™HÜÕNGqû;´¼ý<ÓÏËokšÀ¡Ø@\^¸Ó ®ýÀY‰ÀËbokM{‹öón&ê²üûWK¬ˆeñ"Q<¸þê3±P“3 u~P5ÀvÚ¨½Í¯tóš«ÏÄBMÞL,,Ë[-Dމ×râlÏóúL,ÔÌÄBÛí¯÷´ý73$¶¿;ÁµŸ§h&êtE4¨ƒO¡S¾eEuñZÔgb¡¦d&êôH’¤È¥ùï?–+zçét}'R6 ËrpÍ$¤tùƒ®NÈçkY”+¦b&ø;;¶œ›!·òi_ŒÖõXHõŒx[ç9ŽÜH«ƒ°t‡nÒ}´ßÆŸ~ÞÎXñ/DW0“ë…dóäùŸà†Ÿ÷3V¼Á#2ÿžº M_o"d¡ý®,ÏîŒOpÔýÀÁÉ;2»{Y.˵Ÿgf¡oË{Kž íƒX–k͵Ÿçpf¡opô΂ÛN'Ú8opåç9žYhµæ‰>(ˆ–ðp\seK-p•X=E+ }¯9æÂ¡ ™s§DüV[k~g¡kβ4¯ý7”•˜03¬GnÎ3ǵýüÎBã̰ }‚cp܉Ζ…Ä眃~'r5 YèpyR' ë‘»ý¼¿ ¹™‰…ŽÞٜΜî"âš|¹ïÄBîfb¡/où¾$X­Æk.‰M ®7Qqfb¡¯;”‰aÁ›SЯ›¨_–«MT¼™XXàxÝ’<Ýþyeо (ÝË¢6Ñà)º }ÞD H̪p'z-ê ®.‹ÑSt'–å`¥†6q£p®Ñõ7^ýX(É*Yò±A®ˆ (Ìç-} 7@Êv´q>K–µd«d¹ÁÓ¸C ¡ù"\qöB?K–µ«d9ÁÑØp*:ž„ YˆM§Ëríç¥Z%Ë ŽñUZ<†ñTlâ©8ÇOž%ËZšU²œàãM„× äo®?Eñ²\ûy·J–õ5èœež²\éÖž×TγdYGOÑŸ/p÷¡&¸¥[[ÚºŠÔ²p€«ËbhŸ]%Ë ‰¡$%>¥ î%PBœNçš«Ëbô]%K±,ðóH—g”Bö·á¿7QÖeÁmì-ÄGLB™…î¥uûùPÿ¾,j².‹ ÞˆÝ/aß—3±°ûrýû²¨Ùº,&x'²œx΢PÑxYÎQ®×–벫|F½xÝRÒÒ/R;TÛ ®7Q­Öe±-ç<éùÑË¢­³¥äñà–«M4zŠþzYŽqÓï5çñü—"æCK¿Àõ&êÖe1Á='Ð òdî^ž¥œ|PuY ž¢ë²Ø–ƒÅz!™n¢˜„+†ûƒªË¢yëÍÇqCUŠÏÁÜAÓ³]T¸øƒ†÷ ºë=ÀOo$]Ó}ˆ^è1eÞ/è­ô`³ç•ÈÄך§|kË“õ‚«dœ~‘2o¢c!àÚÏ[¶^Ð{ÍÑtÁá8—+:çÛm¹òóV¬´XsðŸQ‰b‘ùÓgáý‚nÕzAoË!¦Ýs-r´¿,×/èÖ¬ô¶SÖP㈫¬ì\™\ܵŸ÷[ÈžÀWmÎgâW¤¡<´þW5Oôý”ÿLûyw·ýîÈ[0Ô:¡îS±ŸàÚÏ»¿…ìOpJ‰°|0Ͷ?—Å×–‡[È^€“®ÅçÈVRmN>ró ®ý¼Ç[È^Z^ɳ#³ £À½â–2iá…åÊÏ{º…ìOpHcÂ[*Op/ýÐ}¶lpå磧èOœØŸf‡Ï;æ‡l“WAÑÐ>ûyƒœ„2©©`§[qž_àJȾöz ÙKpÌÝglçt_§bËslv[®Jóƒ§HÙŸà.‘â äúèƒ:Ql½ü\ Ùƒõý²?ÁAe¥D–Çõ&‘nðkû·Sû 3Ç¾Š­àÂ5ÇÔ9[Ëò‹¸Ë÷Sþ3µýÛ©}vã&â—²ò¤‚ ?h8ÁÕöo§öÙ Q˜ðaùžÊ©aÎänpmy¼Û%x ÎS€G"+¬EöˆÆü žîÁvcÍã·a˜:'õTÜ–ÿ¡Àó=Ø.ÁÁ¡Ú(ÝZ*3gU5e¹Áï‚H;µÏ´å•\…ý¼U‘Ì›H€+W¬æó<¯V«@!tfâ©={^ÊhÃOÏóæšù<Ï«I,‘.èOpízjümpûù<ϳՊÆOÏ›/æ&*«ÓG®£ü9Ñ ôýAǼyo"_ÍMT¦åhÜDÜâšàãÚÜ×›È7s-pŒ„¼ÒÁUŠ8Ïó ®-ïæ&*Ó[ I‘¯Ì K]¹cû—÷& ÎÜDkÍ11ɉè™ó«ï³… ÜÂrµ‰‚77Ñþ µphAk.Þ¡uär¸ÚD!˜›¨Ì#wÚ‡ÊÜN4ÏôÛrµ‰B47ѲßDmôýz÷B/èV ¼­Ûß{ân>,ÿ¥À“ù&à$ Ek\Hy®yŸ²ªÂòûM2iëMTWwk¡;”ˆP¿Ç„ÃJ¬)Áïí‹ù&jkõ0KóP@ÜWCyøéëM«ù&j+nA; =Ç&Áý ®·l曨͸‰s”æÉqí îo vˆö~Ån¾‰ÚlˇŸ#n)¼C‹ØþåZ½ý“3ßDkYp¡ÚR™ªx7æÅI™ÕÞo¢äÍ7Q[Q.†¬¹§èßTŸ(¶ÞË¢ÞD)˜o¢e9rà)Q É+Š5Ìíý&Jñ–ø$ð¾F!ÿh%Y¡™ÿÁ‚òý”ÿL»bJ·Ä§oÜÎ\Š¢ çÒ ®]1å[âóGžʧ¬°R÷ôYžä\[^n‰OŽHå³Õó@$H=Êä|9Á W¬·Äçµ,èœÿ–%RêosqqË`Ý–kWl·ÄçùAÑŒ‘°ÄSÕOΜm¹º‰R·Æ¬¢›–;44šÍòHy2®pá§1«–5fµÁS¦Ô&`AÊY7Opopí-Ù[cVy*$çqÍeQ³¨3&ÀµåÁ³šà¸™)#Å-['·N­œ ®½%GkÌj‚Cµ-ãÚGÍË„·äk͵·ädYmpôˆ"`ñyò}ïn¨Ër·älYMðB™Dðˆ:"]ìÐÛ[tÜ’‹5f%ÀA ‰i 9®=«è ‚WqK®Ö˜Õÿ>! ÂbtŸ¸§‹¨A»ËÏUÜ’›õ‚Ž~m"NηNÞRš˜àæ~úzAçn½ 782E±SkQ¬Ô¾\‘ù ®7QqÖ z‚3y+NE.ø5d]ËÈZpeyñÖ z€£ ‘wâ@Ô­w(Ø!ò ®7Q Ö zZ^¿'=[Æ“œhúå䣰\m¢­ô‡&Q¶|WÏÛìWàj•d½ 'x!ž²„â-ý¦XñP]Š'¸ÚD%[/è 8ÊB{†ÜÒ*Šþ\sõ‚.ÅzA Ë37C6 Óã:Ï¿|Ù'ø·”jUϹoqóðÖÅĉLæð&zWÏ[iVõ|‚{š²DvÎQÑÅ%y‡úÜØDݪžOp¤å¸_‡#k.Wä:¾«ç`g2ªç<’ÆÀ;çtªç^Ïø®ž·ê­ê¹XG¹\ÒVH¤%°2EL°ßÕóVƒU=ßó !‡Ðo&·Ý$vƒ«M4´Ï®êùÇjAÁDWçyº–实ƒfϨžopÔpŸ’ %ܼÐñ]=o§öÙý¯ÿbð8½Å×ÊûžJt}D¦B~ʦýüÔ>»Á#…A8kGýogDÝè´ÜàÚÏOí3 Q÷¯T¢‹Y°CàȽ,7üüÌ]à‰[ áÿpM ñ#wƒ~~fŠÔš£9÷[£çùÖV¨nô+ Ë•Ÿ7g†ÐigwÕ3³­hËge%üôB7o†Ð ìÛqÈ5üÏ××´3„Nóƒ&îÁÜo<˜ÛëÈ+ pmy4Cè´>(¢~Ž1w1Ø>bÅô¡[2Cè4.0‡ÂM:]sURfõxY®?h6CèŽëQ.'éâbãlI}P}pMí3ïP’mªÜãž×ÝûЛàêöŸÚg&8)’à†¿ÛÎù=òC½V¸ ¡GOѯxtô^Á¬5‚€¾ã¤ÓN?W!´ÝSÄ}1pÅàÉIÅÖ~WÏñÓWþÜî)šà‰ˆð%EÒbä' "Á ®7‘ÝS4Á3]É‘5UF°{©üpm¹ÙS4Áñ&êÜÞFÑ-"°j\o"»§h¯9ªˆ¸•.„À÷솖«Md÷í5‡Ÿ# •¨Ð¢ ž~žßùs»§h[ž ÝþŽ—e «¡çÿ^sµ‰ìž¢ þ=ð;õBCòv]Æ­#—+\Qm"»§h[ŽèZ€'rCìPwZ®6‘ÝS´×œ(mUèieAõy[®òçvOѶÜWîœ/”v)KɺÏ2±¿‚¢~ö;\nùÄdO¡ThYü>];´<[ÄûÙStƒ—oÃÓÅð)žŠ^t ¹p‚«íßÏž¢¼’Œ:^Zž–¥EAƒ0ÑòlïgOÑ Î,(±qÃRE(EîÿGƒ'«E|/Kåt+¸¿gK}¹\²–ÿ¡À³Õ"¾—,–8sç(WÐ7ÝTmÿ>zŠ®ñ ÞHPñÛ÷ x–³Š÷½ƒÿ>xŠ®ñmyö£!ÊFå5~‚^èzƒ+?o·°× 9ié8m]¼CÙü”ÿÌðó~ «àh9ÜÜŽ W/ —ÚàÚϽ»…Õ$xcuBtÍнEÕ‘)J¸²Üû[XM‚=èL¨4ø'ø[Æ]àÚÏ}¸…ÕÎeñ\}Âø Ö|óZ`Lÿ²\ù¹·°Úµæ`²ÿ®R¨eC^l/‹òó©}æk^Y±¨xYHòÆqAopå烧H«]–s%„t¾²¯©Â)Áo?ŸÚgþñAiâÏóeQ¨®»;çoWüQ–×[Xí\p‰@à;߱┃—àÊòv «]®X¸}›»ÜZ󖵟ÿR´ßÂjœ<›Ú·ÑLF¥ù²™Û㮄Õð½.•oËÏ;åŸz£ÄhÙ‘8zþñSþ3}¶«üœàP”¸§Z¿_Ði¾ý¸>[B¸U~$xãžÿBtÙž÷¢3‡I¸¶<Þ*?'8†Óñm4%.š~û˜VØàúl éVù‘à¨ä&Z–žy”«ßƒÂru¶„|«üHðDœ§ô¥¶ÅÅóœ!¸:[O‘Pù9-G1žâQp­BZ%\à*ÖC½U~$8ù]xÂm• ànð{‡†f¸c_Þ’è~C2ïP/ü|œçïwÝ*popì<¸2uE´]³ˆ3Ýú.p÷è¬÷G±"F>k;¿ ÅÈÏÈ·¼ Üà2 Ü2a\à›¨ˆ º_Ë¢ý<«À½—e4‰nI^XnË•ŸÇh¸7xŽΡgš²b‡ŽSñ]àî1YÔpL†5‡9¼®žútvë,ùyrOj¸³E 7ÁÁNJE9¤]ð•ÃJ\>Ûà†·‹n‚loDÆ Nïn‰O®-¯5ÜǾçÒV癥užc²5à†·4ë&âB –w2 ŽÞ]% 5«±,þ}ÅnÝD¯9,6'-ënnO£uƒë5Oκ‰&x ‘lÔ@az\ KeÞD\Yž¼uMpèsZŽ‹Pu ñÔAS¶Áõš§`ÝDœ -°:‘â1 1ål„åj‡¦hÝDW~¶¢ü[k>®ø>Ïs6Ïóž9ýŒÉz*nV+xË®ý<ó<àeÞþ4=[híJq pmy5Ïó¾RTE+¢¥ô¹‰âû<ÏÍ<Ï—åŽ?(4é±Qƒ¸ ¹½MX®ý¼›çùZsÇMsTºÕ§ @t—åú´Ï~lðLdÍôè§ ¨.Wìn¼‰„+ÞE¨>xŠ~Ùà#£¥ðA?·*¹.¢ïç°\帪·ŠP<ÒAŽV«DÿÑ×´B/3VLÏ"’îFj/Ká ‘Ê“œ¾ a5oð{ûW“Ñš NhûûÀ3¹<­Eh1\oFë^MFë ŽþXj‡(ŠiYtÎçp‚ëí_MFë ^)“ˆØˆ›\ã&‡)sû¿­{5­784}ñÍÔ–Ÿ²ˆry@,½­{5­'8²òôlñ¾s2Z®y¾,WÛ¿šŒÖ<’ÒeF8(ÊíVUNoFë^MFëmyî@óþÈ/ì±Ywƒß®ØLFë (0GTÇ´wÑ .WÔáÜÉS4Û89ôÆ… þ™Æfc-âœKïß~òÝàøÉ°,¦vóL™sÐé=ðÛOž" î¹óQ¤3¼ˆÏ™K$½~ûÉS¤—Å1Æs0^°oÿ4Ré=ðÛOž"myï”~†Þ Y‹5g¦‚ôøí'OÑyŸHh. w×CÓ{à·ž¢?_àà0'w'rõ-d›(^àêšÚgW»ò÷”?²#ËËÊ·´©ØžÞ¿P¿0Ú•çšwÒB;óÅ*.‹\nð{‡v“%UùA‘QôAÃÖšo£?}Lüõn² lpHŒEžŸ!ý™yäæu‡ÖçÄȤ‰¿ Ñ J ÁiY&ÿ¹×–›,(‹¬"¤œ.ð4øq‡þ÷ß\³H}‚;žAÅï[–¼š~¡ˆÂ›èû)ÿÙÿÖàǪÁ} Ð‚Ä2¢èX %·ü^óàÜq‡Þà…T2ðš«$ ¸3¢yêp pmùq‡Þà™*ÌIÂ…Õ#ú\áZcÍ;ôï¤×„,4»b]‘\Yþ‡ç(wl½æðsäè ö%ÇóÜ_–_Û?8w¨B\àøj=ÌžYð·¬tžyÂòkûwöýŸ¿9VÌnå¸0ª º¢?›Èk?å?3\ñ¨¶Üà "Ñ8$'VZšÌ\»âÙStƒ ‘HkÞ1I©[¥œœFJ€+ËÏž¢£!#´h¤Û²8ŠóÌmpíŠgO‘±,(VdzäBýdÅçyÖæ„åÊÏž"½,%ÄstH«¬vå/^âFþ—?zŠnð@ŠD \H¤Pà7-|âGüÏÛÏÏž"Ãòo“CÉú»æ@¸²Ò­yÒÚHðÛÏÏž¢y –&Ï$Üs{Žc²U€ÿ(Ëž"cYÀ±Ðˆ\aˆ•­³…“\Y~ô=,¯Ôrð ã^ó~¯ù/õAÍ îìWbù§LƒþÄ%–…§ðS{ü$¸`Npop¨ÆÐšWV+⃺\oÿ`NpopÇ®C`‰Ï°Ã9®Í pm¹9Á=ÁÑ}GËžtQì2±¯¹ ®·0'¸'x%Ú#ºJ9ÊݱbÙ9a¹ÚþÁœàÞà’Ÿ·@+§ŠáÓÕj%ÀÕöæ÷ÿ"®44¡(Ê]趨5ÿS›Ü¼´¥ †a”¶ÛÛ8Êà? ÜœàÞà úÄêpbaüô>RÂÏÕ& Í ¡s˜®HyÖDýÖD€œ„Ÿ³åáBºBçUEð¸¥ó]¬³+ÝàzEg…ÐyUr·äE9úœx¶„ \Y½BçUƒÆ‰…,tgI¸•ËÍ}$-7¸ÞD1X=ÿ9NËAhtr1Ý÷²pÂ7?ÅI?ðhõüOpOzªDkÓ‰ŽO¹cû?ÅI?ðdõüOpG9-=qým¤<9ŠóSœ4€pÃèùààm*©ø=sη}qŽ+?ÅI?ðbõüïeA&™z ++%+†ËrupÅjõüïe)CΦõS¼îWש}¦Á}ä¡óLo¢­6›gÒ2?ÅI?ðnõüËš9—K¡E]Å9 áü' ¹5úsškŽÞpdºË¥8!Ž[Ò«_1¸ä­~Å îH=€‚ßÈy2rlpíç)XýŠ$Y‰C”@‹»©À_–k?OÑêWàè‚dEÈ󈊲‚K'¸öó”¬~Žæå;³e®­ ºÝ–+?=E½–YM¼=ñˆ¶ÝÇÕGºUX®üüì)š-â9¯þ–ñy ,ô.Yæ)–ŸÚ"Á=E7¸£ádÜv%›WŠËâNpÃ[š5!2À‰*ø¸&/tTüãe‘_"Á=E7x£8W)F\£Ï;œK—åÚ[Ξ¢{YP a=‹J–ç.®¹~/‹ò–ì­ ‘ýA+KMÙ÷Ú´ð~P på-9X"œäÉ7‹꼸Èx¸:s´&D68f%K Å x-P²¼-W§âà)úyYŽJ,Ìøéu3· põš=E?/ËÑÝJ–Szð²ªx[®^s£§è×냆á-šÄv£ÊíWèè)úûŽ„ íäÁ:[®MtOˆwòͪb.;´ ¸,((Ÿ­M”ã¼CŸR¶ÁûӶʸhYª,å¤ËõÁUŽ;ôïÿ9žŠk´ZPPþ'Šò¶’–y¶qâ§ügÚÏËq‡Þàè qR4·“°šôsw‚~~Ü¡78˜æ(âB¼bMÄ-¬8#ÀµåÇzƒ'âoIœ~¦J®¨*޼â×~^;T/ Q Ž2q3\HZ¶Ëråçõ¸Cõ²Üdb± ôùмb¸—EÝ¡£§h¸¢^– ´ÊjV-˺\Þ¢\±w¨Oð|+?÷®Xý-!t%Š_D\Iº¢»ÀµåÕêo)‹º¹18" $6IiʧÜðófõ·Ë+¿, ‹|­ n”æo˵Ÿw«¿E®9Ýþø^лwnÞD\ù¹wVË^–éíO:¹²ŠAÈûƒ*?÷Þêo®Xxš-……úè÷7øíç>Xý-Âò@ü$;Ú!J¯9þ£,V‹°œ©›ý<°+ ‘/…%¸²Ê «çÿs3Ðqà*ð`M+lp¢âo ¶çEÝ Ôïþ=Ö\íЩ}àõÛ‡ÙÓùE ›»ð~ïÐ`Np—8½…„2 Í¡„Ö<7”çwðÁœàžà•úüRgЍ@C ›1/žàzs‚{‚¬E§\.E¹y3Ï01}yNpÌ î Žðí¹¼ý…ˆÀ,+”çw€jª1Á½—­È·¸tNŸë/_–ëMdNpOðFJЏý¡7‡Þ‚ ¨„Ò®6Q4'¸78Ž* -šµN›E¼¦ßòœà>šÜs•ècV¾–Û”å9ÁüÙS4Ë %mo!?Í  ºžž.öÝàxäo¤då¼?ÎlpíçgO‘¶<|¯`l"®û;'8-9¯(ÀµåÙªpmË1A”;S8÷ Ú÷×~~öàè…Æ|ûxYÈzè½,ÊÏgO‘7ÁY0bÊIJA Ë-âü/åŠÍªpí5‡®¨¸ R½Òƒ–àÊ»Uá*K3žè…nTáÚ"µÎ;4=+\þì)š‰…’çÙxK#ÜÒŬ"§þÊ»KÄŸ=E7x£D˺a„sI´ýpɲ¼»DüÙStƒ3ùYF MŸåšo‘¯òîñgOÑN"àùÂË¢H¤ \ûùÙS¤—…jsß©ˆvå,d²¡¬/Ë•Ÿ§låÅ T³!4´geæ¿ÝË¢ÎóT¬¼bYÚÄX¨¹½ò°’ìI¸vÅjå·åT÷GÔß`y\µ9¼‰Ú ~o¢ÑSôc[ž©ç /èDC~óÍJnyw‰øÔ­¼â^©¢Š—eëY Úr¯¹Š¸²³òŠÛrššÏDhÂ_”¼÷š«7QöV^±,=hÜ÷H‰`EjêܬV×öWyEPX®²„Õ¾]–mT\zE>¸8V,ïWŽæƒ«laµL•rn†¬²|6ž-åýàÊÉ|p•-ÜÐi.±“+z9Σ¥¼\9›®2Ÿ-(jdÖâ%MNáŠc‡–÷ƒ+óÁµ–³o( wªž;o¤RÞ®\ÍׇOÎüC!* ¶Ÿ|ƒ«³%7óÁµÀZ`,9vø-Bö{}àêÁ•»ùà*+—û½Pæ^‹´(ʽ Ì–_ÊûÁ5xŠî×òs°Cd¦Î!TùTü¢°üÞþÅ›I˺\ñûwÏ$âÒ|Í3#õWßIË̤e]B…žŠtpA–'íæörëMT¢™´\utzàÍ–»"šÛùG\[žÌ¤åj‡@ ìGót­eOOzlp½‰J6“–kÍ«£·?µZyAšïp/‹ÚD¥˜IË:]±r º"Ý ~Ž"ú[npµ‰FOÑ´\–—-ž-àÉ‹‰¿qA×wÒrò ~žÓØ,rÎâ¶\]ÐCûìÇ?–%tŽrñl ¤T¼§Ïò®vèè)ú±-GÐÌ×w4/¨›óÔ„’à·å£§è×op0°!¯èhlÖïé³0–¸º k°D¾J[u"H«t*ÍãÈÝóþS?µE¾‚¯Ñù*‹]  ,ÈB»(zŠxjƒëí_“%òU6étïPms]D¸d)ÀµåÙù*»y&R|=Ͷl2^”,û ®·-–È×^sùA|No¢D× ®·59Ê.Í{nž©Ôó¿3ÿy’Nã§1_MŽ…²ÊÄž;sbåТŠa¥‘oéÏ1_MŽ… ^9Fs`µ’–÷ \YÞLŽ…"è²;·ý8€' ν\Г§hNüU·u[]_L‰‘Ÿ¦Z ëS›8ø“§èçQ\¶?xh6x>ÁõšŸµ‰?ðnM¶ÖU=ÇâsîÍË[®úÔ&Þæ)ª~‘«'N‰ „FÔm?çç9~jó}à&Oѯ1S¬ø-¶‡œ‚Ûã„LM»Áõ&²yŠ&x¥*köÄgãAïÅH~¿Àµå&OÑÝ“ŸQ§GK‹h*—åzÙ¸>hÌåŠ$øqç ƒ7u[8n¢DiÈ1¯zƒëíïMݱæßQ…×òw¨xÌç[„åjû{S·¥.=èü…Á9SO‘ÏÔY¸;ÐÊe¹ÚþþÒmá¶üºµ‰‘ TÃñLn5\±ýεdü”ÿL{‹¿t[Nð@m?8r;­y âàby² nxË¥Ûrƒ7¾ýт⃼,ühàÚòK·E-K¥d[>ðZ¬mpÃ[Ú™…>Á=õœáýóOßšûýÈìÊÂrí-ýÌBŸàŽç€àŠŸï8Q&¾?¨¾,FOÑŸ&8ú[ÚÈülÿ´ úz¶lpuYf¡OðF­Õà ù®zŸd‹øÔák~績ÿ'Ǭ´å±RAO#§E—íGÖBøùã !ZcVuIÙæD®ˆhØ;!Mâwh~ŽY…¬1« ^¹ŠÄ¹w¢6‡Œ?Áõ& Ù³šàžJÚ‰[‹¼“ÃJ“¿E€kË‹5fUW™˜:-ÁS„J‚øM#¯¸Áõ& Õ³ÚËmDœ#ß5׈ùk.KVk®7Ñè)úË OB}—g£Q´]²L÷šëMÔ­1«š÷h››] ±]–ãÖ¯e¹³Ðê1+ay«‹;þ÷ŠVèû‚ÎÏ1+HŽm?uÕæÐ,¡¼Ht÷Î¥IÅ_߃í!«í§®ªbH<Ãõ}J/‰¾1LÓNpíç1Zm?Åx}ïØ3ïMÄÌ3õ=ØŽ‘5£íg/KfÒ ô5b®Ýý¶cEîn­ïÁö³Õö³-‡­ | ™îЕËM“1¯¾ÛÃè)ºÚ~¶åˆ‰²ÑMTÅšçËrõÈ ±Zm?uUÁÙ v\Qÿsà·¾ÛÃÉS¤-o>óõü²ûȧâ{°=Än†Ð«ÚBb_ š2yK–3e~ú ¡“3Cè Î2è¡é|íMäOpíçÉ›!ô*Bžp¨Ÿx ‰:Wd¸¶<˜!ô*åTO|m!û´ÏþzYNäêDCäg7ÏÔËruž§læÏÛj÷$gƒ©ÂN}´*ÞžùóŠ™?oËòÊ3mßë®’Ê·´gþ<¤jæÏ8Úý 2ÿݘsn&1®-ofþ|- } Lô-†|Öš#´ð'¸á-ÝÌŸ/ðÄÌíø”TŸ¢ª8NÅöÌŸ‡ìÌüùZ–Xâ”TúŽ\¡N8™g¸ò–ìÍüùŸ2‰ðRÖÍ"«ç?ßT…Ѓ§èçŽÒMrFzžwÑ<“¯5Wùó£™?ßà¬ì¿£/D·Ù8ûåç*ß’MýPn.C{[©<ú¢Øðþ·íçÌTPß…¿MýÐ Ž¦p’ ¿¡‹³ÿ¦6ѻ𲩺Á!‹Ôïüþ~G\å÷Ö.pm¹©ºÁ±$«ÚãZÑ%âNp½‰r³ž-ÜKg +dRßÒ+&±ý¹ŠŸ¾ž-¹[Ï– Þ©]™ô÷¾7À~9y¶Ä\¯yqÖ³¥­áÓÆÞ’¿ó+}®³\”.peyñÖ³¥­±Yhå  ‹ó‹éÏ7]v?Áõš—`=[ö²àŸÆ)(ˆ±¨òup ËÕÁU¢õl‘kNŠj oVƒ_ñWWIÖ³e{ r\8[¾íè©ñB¨‡_Þ¢Ÿ-%[Ï–ýAñÜéÞþô¦Û=ÿ|‡ WTÏ–“§h¶ ´5”‡-L–ÃÏ»Kbû?O¶†“§èGV‹õC¿ãû¬%‹¸…ãöžl 'O‘'yŸNNX¸2¿yDý®-ïV“؇÷ašøóªÁÞ´ž-ñZíç'OÑ I¶)ro8ˆ~w”ÛG%·½'[ÃÉStƒãb#=èœðÈ Çƒ‹o¢öžl 5XMbs¾‰2Ö;ñ ‰šE?ÁÕöÎ[-âÛrToí/bEt^àÚò`µˆopÔWpy*Y¦,û[Ê þV‹x[Ìíäh“¥:Q^ãáiÝDéÙ"/ž¢ÿûƒ/ŠòVhT9Q¢˜Ø›¶Ÿóš?å?3>èÉS¤À#Y·ïiÔˆrÐñ]àÆ=yŠ8Éd7>íD̽ßD·åÆ=yŠNpô+G#Z§L—ˆrË n|Г§HY^]åp9®²«Šð wY®?èÉS¤—%õsüÞ÷]bü>Ï£?:nðN³OCºý¶\­ùŸ Ü[M¿mQ7ƒø‚†•p¶`´xo¢·”gÓoôÁjúÝàèó£!kÞ¡UìЈ–gÓoôÑjúm›¢=¢žør¿SQ¤¡fAD€kË“ÕôÛ]6äÂ<êþEôq¹ \»¢ÏVÓo[å8Ðâó!¨Ag1­À*?Âr劾XM¿m­™… ¿k.7©}V.ð»}µš~Ûâ?Ç{%óôìžRYè|ƒ‡üˆrÿŸŸÿfð*ål2kª4îm_~>^ßOùÏ W<¢Ü z‰šÄÒ ¡W$ŠSqdD¸vÅpD¹7x D >žG|îE2§Í#wƒ+ËÃåÞà‘DæÑ Yµ@ïŸÐUŒ'¸vÅpD¹zYÐy ðˆS1Š·?Ê;—åÊÃåàßñ„Ý\)üv÷Î pu*~GÑ~ûí?¾¼¥³+–o¯G¢¶–—åÿQ§âè)úyº"³ý@Û5Rï²-òeù7u[Z›Þ ƒ*¯±Äߺý‡Ÿ·'#G ¦nËDÇË¿«m¦¸ýó nø¹©Û²Á‰ê³SŠ¥ùu*¦IÈ#Àµå¦nËO”í£—]¹‰¾\wk?¦nËOžÙ3mñTL£ÕJX®ü<šº-{YpТé¿h‡h¸òóhê¶\à…Ζ3Å=g[øýš‹ÑÔmÙàµ,Ñbû) ðñÈmOFŽMÝ– îaæ÷¿TéÖÛòe¹©Û²7Qá(d¦™º\x½Á•å¦n‹§†¥öÙüÅXNÖCø/õAMÝ– î¸Õª~çk–g ”•®ª90ši0ý¶>ÁSåãõóô·¬T”xõ'ÓoŒÝbúÝàw(Þ¡ lXÚĈó ®Ï–ä,¦ß )+gá·1 ‘…Âï8Ïû“éºCÓï¯óŠ ¾õ‰28ËNp}¶¤`1ýNpoqÓgzû×TŽÀõÙ’¢Åô»ÁS¡³…ÚÛ ÜàÄàŒ¿ÀÕÙ’’Åô»ÁÑÏ +u y'úrë½æêlIÙbúàPmËt¶à˜@{›KªfÑŸL¿ñÔ>›Œ}I67¸OÎ-â9Œü9~ú`ž‰§öÙ þù›çiâïË·†ÈnØà†Ÿ7‹y¦/ nÏCÖ(¡EÜ ANý pmy·˜g684}ñÓàˆ=ðàê 9{YêlµàÊϧöÙ c;4•ãž×œEÏ£×B€«X1G‹y¦/o þAâ |óˆ:JÏk®®¹œ,æ™íç`!-"Êkkà·–AT"ÁïM”³Õ Ýý’ƒïÌõh¶¥Ë©y¾,ðÓG/tÌÅê…îK›¸3K!‚Ï$ñë©È‰… ®7Q®V/´Gv®R›‹g9®Ý‚rƒkË›Õ Ý—|pã X?¾³Åí·?ØËOpcu«zƒã ¢| ½ C‘cVõ²\m¢ÑStõBpz¯dîN ºG1ï[®ßþÅ[½Ð{͇´Jn”=–¥)ðÛK°z¡§åžZÏpžcÒsÓÿ"̸º‰J´òç=¬S±°tJ·ˆ ši'ñÓWþ¼$+¾Ás¤wæš…_›Øx‚k?/ÙÊŸoðâIãd–m[~Žp£^àÚòbåÏ'x%j%\¥ææ6¯¶@;ÁµŸ—jåÏû’ÉÆBPÏ¥4Ô çÒ³–k?7£Ü×Z¨À];Íä–=Ä‘&ø[:3Êà•O¿¯ç‹ Ë† …;Áõ­f”;Á+µù‘ ßD+õ‡dÎe¹þ ÕŒr'x#-8‚¢N~žÖÙ’û þÓ;Êmv”»-GFÁQV–¯íßâ à*Êmv”»,Ï• "8=}& ÇÅQ<âóôŽr›åîÚ‰é7áENDP”M”ÞQnË–Ä'¿H°,˜|Ç£nÌûq¶pµ?}H|Æ&¢Üÿ2ÀÛ·”u¥×ÍAU&.‚ëMÔÄýÃÇdu'08:–&87ýþ+¸©ºÁ‹ð"JøÀ{¹\þ0\/‹©:Á=ú¡1¯Ðu›b%¨eÑ;´‹j­y©$8EsШ‹Ô¹,ËóS?4vÑõg­9Êë8Ö#Ï*¢G|®ùxpý¸èúû/cYR! îÒi>”xD œX—.pµýeO‘e9I1ù,ÈÀ™àþÿg|6¸ös,•Ÿ î‰ 2æÈWФåÈÕ§ÊOòÑRù™àfr±æUwìÞÏ–úTùI>Y*?{Y ËMBsÚw" Ùå³ \û¹Ï–ÊÏþ  ²¡6N²¼TQá§b}ªü`âÓPù™àÕá:é¾cżÑúTùIƒ§èRù‘–sV¨3x"#­O•ŸtöÍ_ŽëñA3žçžÞºÍnM½ ßƒíéì)ºÁ‡¬*7)±Ðd…‹Jú{°==E¼GÇíÊtäÆuY€£«]àÊò³§èÿ" å¡÷ÈWÑܞˠ†ëïÁötöÝà`²à¾gO×\ÝUÅIÝÜ߃íéì)Òà˜O‚+fÊ·¸uAçYUìïÁö4xŠþ|- N6l"P¬d¢Ø*?—·¨t:{Š4x½Ð|͹E ×ýÜþïÁö4zŠ~½ÀQ¼Á›ÓQF´¶¼ŠP8N?ÿ¥À«Õ˜Ç÷6ò矷jDßȱŸ÷gc^ ÍjÌà,§L§bù6¥èX~ÞŸy)t«1o‚ƒ˜Œ¦‰1Ãå«LæÌ~E®,ÎjÌÛ–·oßcd©?p¬áçõ×›(z«1o‚ãMÜpYŠ»8¸Üe¹ÚD1XyüûwçÉVð ãíŸES¿—Em¢ïoƼ„H“­W¤ä¼èãb„ ®óÒì)z#˜ƒs¤‚H[Âj$,pY®6QÌV[>ZäÇ‘‹‘üL³y犨¶p«~úhËO±Xmù(:O3(9/šgò½æÊÏS°8-¡ûÍ4AÃ6RC~pÜS¸¼e,‹qZ¦-NË þE¹ ‡Á¥QÄåÜ]„ÚàÚS²8-·å`Ž\ñ žîéMßä/pmy¶8-·åÈŸGæžßs‚Ž/\–kWLÅâ´œàÜØ‚5ÿÖÏò&y¡¹s^X®]±Zœ–ÛrT#B‹Ë»÷7ǂ׮Ø,NËmyªt*’’u¤ù™-8•/píŠÝâ´œàŽ8%BûÝ$öE\éú *󟲳8-åš7¶œD¾œÌíõ²\'s²·¦æ=éëb‡C‰£ÙzYÈ@t|ÐðššO9XSó¸ò®6Ñà)úyYŽ(NÅ@±b•Ý­®_®¨â–ÑSôëe9ó@úé¿êD×_ºÀU»2ÄMŒ©ù Þ©JðFA͇ÏñðR/Ëï©ù4´Ï®©ùm9:?peù"iágZ‚ßÛ¿‹Êç¸Cqb5*Ê vå·Ä7T*Ñ↚àß îH˜õ¿ô›.œàzû—dqCMpOd_šƒ&ÔÁ˜WÖšÇ7† n¨ Žf"H·~w†Gî²ÞâG\oÿR,n¨½æÕ“äi‡·tRW]§bõ—åjû—jqCmðþƒ»f<ô¥…$œ»,×Û¿4‹J|P,6Ëc´“íZ•ã*Ý↚àß‘û9,÷Bo.œŠ·+ªWuV7˜rÆIiªEŽ‹8[âð–ôê†JÕ[ÝPœÄ ü‹)ñA‹lWî'¸öó¬n¨ ŽkˆÎvÒmÙ-(e’Ú pmy´º¡&x£Ü6èç@kÄX¼z-æe‘^ÝP©&«jƒVàAÖ58*]ì‚pY®ü¼f«j‚Wl‡¼.‹Îc9Órw/‹òóZ¬n¨ ^øI±þ³œÈ÷U~^«Õ %ÀGø}xÜ¡©ï@Ô»\ùy³jˆVFóL!Ë{¢dNÚDßmT¸ðÓWÍ¢v«f±ÁA•¡9°·A)O€Gw‚k?oΪYpH‡Uî(Æá(Äìf”›Ÿ5‹æ­šÅo¤ EšÓ”´ k*Jy¸öó¬šÅ§R™¦ nœ²B¸ÁßË¢ü¼E«f±ÁÁ}81P°ŽÜ6ç,¸ò󖬚…XsíßíïA,´D¾¦N®Xåç-[5‹i9 D¤§ŠÑÏ­¢#—;e>$øíçÍìúƒríXó@Á?ÉÙâD¹DñÓG×_jf×ßÇiùsNη$„B8Á ?7»þ8íÐðš(9_‚èú›/èòêúKÍìúÛàÄ ÉÚžmj"6½¥¼ºþR7»þæ² ÄÃr ïálqBã¯ÝË¢ü¼›]=¢¼æ•ÒPq‹|¹AT"À•Ÿw³ëo‚ƒºŠÀ©Õêû Kå§´1.ÀÕ³¥›]œÏÁUÊòdn¹eeŠÊ«ë xF×߇FÖ¼ÓÁÕ×ÁU&Ã’¿7Q7g[Àî?¼¥’+B$ ®ïö6üôÕÈÑÍÙ– Þ8—ëèˆñä§¶+º\o¢nζLðJ|\NkÞ’˜™¯¹úläèælË/¤ðK›¨øÊBSù76‘9Û²—/ðóLZ–΋t¿-¿7QvælËþ DRúŰ)E êó~7rdgζì5G– ¼ö,?æCýýAïFŽìÌÙ–my”ñß—DêoŸçup,ð?•åÑâoAn´   †Yòsï#GQn{ñ·d—,þ– ž‰Hªðßceb)!äò ®ü<»lñ·lp„ÐXóÆùóW CnR€kË‹Åß2ÁA¨Ì™¢Î‚S«×bInð4xµø[¶å$û‡=ò´‚á›!t{ñ·äÙSä–ƒàŽº¸KVcV\ûy·ø[68.h¼‰y‹s"´¨á¿ýüê)#?þûg²+bl—ω2ñü ý5f•¯ž¢ ü;ÁôëibÙc˜µŠyÿx‚kW¼zŠ”å¸"«Í“­kJ€kË£5fµÁ}¤²I|⬥œÛríŠWOÑÞˆúyÅDƒíu׉êòóþ³ÊÞT…ÀŒ9¯9{HHŒ¦Üšƒ¦‰Zÿ~úhžÉÞT…Øàx#åËM®Ž ½öÜø ¦*ÄÏ”­ù¬£æ'™ F”»Áµå¦*ć<Dè©ÁÏÞ.ËjªBLp>T ÆÇÃJ=-ËñÈÝ–«LUˆ ©kP”;Çj³QìÐü/nªBLpž:xH…[AL€ÿ©ÀMUˆ Žºd>'vÍbeŠ6ø7U!8<ñü#(B.wÕýËìœ~þK'‹íÇ{?Û8AÅyX©ŠvôˆòúýàÁö“C¶Ø~&xdžÿD¼%‹ÔœNp½‰B±Ø~&x q,”,=¿Cƒmãa%®-¯Ûχ@n§Úø=t¸ú ®7QhÛÏ^ŒpF–j÷Möå™@Û–ëMÔ-¶a¹'ðqõ")Vü®6Ñà)ºØ~68”QƒN¤ªÜvÛO›!ôW±âè)ºØ~&øùº,*7CÆÝöÓÆ‡¿oÿÑSôcZʬïJÆe‘© ²¯9ª _à?Êòh±ýlð\¸ÅùóE«U7¸²LvˆH4Mˆd¦_Û|Ðð þc5ƒÿþ™ü}µJϧÀ.ü…\Ÿ-±™ÁÿÇëZŽ„ÕjL¿£caƒkË»üpDCt¶ÔÌTBAŒäÔß×gKrfð¿,oÙÓö$N*Ñ8ØÛ„åêl=Ewð¿Àñ~'aH¢Ah][ÇÛƒ«àðÝÁÿþ ž¶?D5H¶IÜDµÞà÷&JÑLZú8o¢½s¾pºµoËgÜ_IËœ’™´\à˜F§e£š¤Ÿ;‚kWLÙLZ.p YfBž(™~Ýe¹vÅT̤ågÖÆØ‰+ ýŠA2\à†+V3i9À3U@©Ó’t¸ÒÊŸ/¢oa¹vÅf&-x¡DK,4³Fë*šg毤eÚg¾,ÏŸ¢/×j["¼¥­X1¾’–´’– ”pÅZNÁ²ò-{ÍÕM”½™´\à_€Ì«p¦&êD£ð'ÀïM”ƒÕhí}ZŒyÜ— ?Ï$˜·ü|är¿Ÿ>­sŽV£õœ…nT“ǃk¿,ÂzÍ¥W£5„ÌFë îˆ$Ë™^sGhá.Ëõ&ÊÙj´Þà˜$@„Q#w‰tѯNp½‰r±­÷²ÔD–'îÌÙ´6P縗Em¢\­Fë Rz08'îËm"VIË ®6ÑwñÖ¼4Ò² ‚¢Vnº þ½‰ºÕh=ÁQiÒ*ÕM=Qf)g[®\Å™9>OR;$s*‘Ï"óï£~¶äWgN.ÞìÌapðÎ9z*&.¶æ(F8gÄ•_9¹³3g€7*má²H45¢X–Ñß²ÁµåÑìÌàèÜâ:È$øŒ[ò«3'—dvæ,ËáNèœïLàà„Ÿ·k͵Ÿ—lvæ,Ëñ@Æ‘ÛH´oÝ•â÷š+?/ÅìÌà…ØIIú3¢1ÞŸbYÔ›¨T³3g#}C‚ÖÌ é$yãõAUgN>yŠVæß—áŠ÷ ŠÂ¦žˆ3þýôQ„Ê'O‘‡&^/.è]mIaå¸Ê«•Ož"´“ŽÉxѼ,òŠù²\ûùÉStƒ³JÆw¶Pê‹r‘Ÿr‚k??yŠ.ppZVÒá* Ìt4Ò±§‰Óe¹òóÉSäà8|â r$Þ¶Çfë®âóÑSt¡¶åßVÁÒ(qÔ줂X¿ÁoWœw·Â_RNÑZøSðD~ „ÛÃGc{ítˆ±çÛíʱð7¶Å° ‚%à Sgk{CÝ­ð—\}¢¿Wpßîó@ÙIÜβeQXÂʯí‡È›÷!zƒ£àO}EHk9LˆšØ­Z>ëàû!òö}ˆÞàNVnØ6 1;±¸í²òýy÷>Dop la^ùGvNZÜ6¼Á÷Cäý±åB›Øn¨#š³(¼Z•<ÜŠPɇcJÁA—!¸“FÈiòii¡b?ìy<¡:8Šw¬äòMƒxlwDíîÔ©¥à‘Ï>\FøŠ˜¤:]¹íV ·"TòùX„+wÜ– ½-µ“~9ô{Yùvqùr,B ð'ÂUöÂnuS¿ÿ ¾]\Ê)Z‹P}[E²LZ•SžøŠeÙó­•‚9¡º)Rç_*ôðr;§ˆÉÒÅ·°%ØóÝ[ZY­1È•;q¡•–ÿüèín î|·4ð„~¢ÔtŠr™dÊÚ!Š×»%øóÝõƒKF1DÜp·¸©ÉZË |_y8ß-Q?(Þ{~W^\ƒSS¸âõn ñ%Ý‹.K¸·rq¥éâjvž®1QH瘨Sî hD—xŽ{CÓ5& ù¥vˆÕ~@ÉqHL"ÚM<À÷•—sL”zÚðƒjg`ÚGo®EºÆD¡žc¢¤/‘pôY<+÷y¢Ch÷Xùv·DsމRór+?(ôJtÅ&uˆöÌ¥kLí9&J\‡=ǧYó”»\À7§H9EßnàN¸sˆ\ TL·–¼¬|‰”Sô˼p¢çÙ€—δLµ‡ŠéÅpމ’z\Y8EÕ[‘1댿iå[L¤³Ï¶˜¨­Þ9E'´”ÉYÁ×+7žë¡.·¤eá¶À;Á¶”éõ×t«»ÖCS<×C\fp£ˆþGè8Ì÷¹’gܵšâ¹ªà™óC‘êBó|˜Pq·Ô|_ù¹êZ=”â™cUŸ•§<ùŠmϯõДÎõÐ×¶Àãy‹WY¡Y˵šÒë ýãOÊâv­ªˆö4$éå-bšZ~ÚÅUô×öš^oè ^¨–Ê¥á3WFº5éÐÆ¾ÐôzCwð©i‰Ãæ¼Løî4N·€ï+½¡+8Ÿ$,nðÉËíj?üðA_ՖöHWŽ+Lû4‰}i³ÒXùþAÅËU"Çî ·Å²=tDו¯DŽÔtŠÜm[<ßP(ÂR‹{¢·Õ¸¬|%r¤7§h\\­ÂV;®h y.M·¥^]‹7§h/Q$ñúª\,ãƒønŠoNÑ ^X>#8OhÓñ/ËÊwS|sŠVðç¹1E4-3]‹2=ÐJ@íà»)¾9E+xbOÈŽîg[\˜&˜uå›)¾gŸíà¤åcDáñOSUQ囸æZäpv-jór 9ÿGðìò*Q>o®EŽg×¢¶Ò|äÊ1›^®›¦B´jK½º9]‹Ú]h~PÔäQñ¥0.tw-r>»µ×æ(ikœÿÑ|š6SÜ] Õ)Ú\‹ÚÑÄŽ?TϱçsáÏØ|=þ¹)…¾•rØ­8q‚biÊZhLôüè…R˜Š9R ;8¦*£¸ż<¿Dµ9¢|?þÅ)…Ž-Ö·Ž?8wvŠ,tÏø¾rw¤*¸Ñi¨úR ÉíÖÔ<®¾ÿ┾-ÈBkCÁ>KÒ5ËÊ·ã_‘Rè[ɼmP åDÛÅ5À·—H9E+¥°¯Œ<0ha4"ú3´D ¾šbIGJ¡‚Gγ) W.æÂúÑ6Öº¥DÊKsþ§ K¾õ*" FÀeìüuˆš)Zýµƒ¿4çWpÜXìƒ-š_vr-T‹»ƒìü¥9¿ƒ—B.´Ë|æëúŠnßV^_šó+8ž›*ÿ4=<·ªbßí¼¾4çWp+­m™Í–ˆ g§Hÿ±òÍÎëKs~÷TÅYÂ3f¥‚ÒúýøöÌ)§¨µÍnàøÍÅhÖb’oÒ+w€o¦Xëmv‹ŒúJ/×M9®må[vNuоÙË…Ž(&q†ŠCê]óy±ó­f¡œ¢oî1r¹™+wCÒÖ´¼âXùV³PNÑ7û U²UÊ ³þùjŠòWÆÂy告$.PèÛÌÃIóúAÙV®z¹·•£g­m•)îiW'LàËʳrŠ~º­Üˆµ€ª×¢Îâêëݲ¾þÙØ£Ô§o-œ®°ßŸ%ËȈ±PíÉ}~ô"õ™;J}vðjèŸCµÑIª~e uðíVÌÆ¥>}kU~<z¹x,ž§ÃMéVžàûÊÃQêSÁS¡¨¹z^\iÔ {×|ÿyG©Oߺ‰k¤Z>3ÁÜ,ëȉiåßoàé(õ9>(b"”,£ˆ«ÏÇß,à?nàù(õÙ÷eèŸGºÐ6—- 5À×[1›r”úìÛ‚10Г¬EÓšë ¾Ùy=J}zß[•u¡k/7 K™9Þߤ>óÂ)ÚÀ£(Z£ŽèÄÌÑ€«ƒïv¾pŠpô'P` ÃP¨¢Ó¡±¸ø¾rw”úTðÀž6Hf¡dYØ&:¼\÷ßí|á-à({jýA×¾PÉu„çuYùfç §h7l„|œDБ|íà)4þùßìÜÆ£ÔgÅIôBŠAšÆ¤e[6;o³ÏÜ í„xö+óh匃™Sã ¾Ú¹Íçû<ôÆv/B%YŒYó]¥Ð_¥›³-çû<4"CN…y9®ÃøÁÎëù>oJ¿奴åÊ4rá¹[À·•;s¾Ï›2D/ž{nýèʉµõ*ú«tsvö|Ÿ‡îqEžªm~Ã:æÃ_¥›³sçû¼}Ð(þ9þ?¨m7«·Ù|³sçÏ÷yû£h•¾¤áš#z•nÎ.œïóÎ=Çä‰ 9Cx~•nÎî¬Æéc“”Ô…­A†œ‡ÂèKoY‹ìÎjœ þØ[æ!ª&ˆÆú4´Q9ÿ|·swVãìà5Pw^¿Kl¹î®E{‰â-k‘ÝYÓ7búŠV®˜ô¸¥“з[Àv~Vãì+‡@6Ž¿£Sdæ=wËžïvîÏjœ¬P/â³.Lܨ*†|ÍZdVãôMÑ>h’t«1o_MÑŸÕ8»)BÇ ´gAdª‡vµŸ¾¢7§h¢¤àhÇÁ¹,pG?ó¼îyºÞçoNÑ žeˆz9˜IišfÕ<®t½Ïßœ¢ÔO×ûüÍ)ZÁ#5¬‘óËL‰ø0íyXW¾Ûy9ßç ÜÇ$ƒx’ØyšÛã¾Ý羞ïóÔ^'RŸ"÷?ªç~äðÍU§h»Ï›)Ú’dÊ‘é£UR79ƒ¯‡èÍ)êŠy¾kgªÎbv”“î¢~qµ°%ß”!ó›S´ƒcRžý"o¨›^­puðÝÎßœ¢¼=¤ÒÜñ†–IÐ^Ib|_y8*Cú®QŒ«ˆªe‚¤{¹eYùnç!•!}“n&3Ç|‡¤*Œ<¢u囇tT†ìà$äbØÅ­j—žÈ¶õ ðÍÎC>*C*x¤ =¾d⃔»¯XlÏåæ›2dV¢o·•ÛLocUáx Òoèt¾)C¢uå¤ ÙWn$<uÉèž³†j_/;_‹P9šó!*m´JH"®^D©Dmåhl‡Çp•7uå¾¢˜Î‡¨ÃCá/²NdÆÚÕ8øvˆ”Sôí îÉS&K„·@ê4ç^„*×C¤³Ï~¹ƒÏ :D••w§¨ÄFXv¾¢zl?ñµMâ'1¯Ìå¡'Šë­ý$'sl?Qp'ôíÊnb„-a8ßàû!JöØ~2ÀQ«Ìœ3ɘhj³RÚÏßWîŽí' îùxrøÓ iTë­ý$'l?é+76ÈŒ?™Çcë6‡k¬|;D)ÛOƶÆDAhœydŠ\ãZ ðí¥xl?éÛR8£[a‰ô²BWn½µŸdå­í' .£ŽÙnÁY_¦û-¹Ï³{¾–rÐÁ{j?é+ÜßÀ‰Yc lrMai_ݹ7§¨ûŠÁè!Bû'¬ÅÉ`ÈÙËÕ¬Åó£·°åÍ)ÚÁq‘ãK&¦[ã:¢ƒQœº¬M¸NUÎåÜÛ¢àQÒP(>3yá;5u•ÂpªœË¹·¥ƒ™ŠoƒäEWÌ{Àý²-‡CtîméÛb^"Çl”íÉyP­ì²òíÕsoKÿ Q~T8׺¯<¯Û²¢zîméà膈ˆgU±¼zÏוo‡¨ž{[:x ì€t–À4js¹ÍU שʹž{[\Þ{4Kårp¡K§C„ëTe ©;Pü±li€™ìžçîü‡ëTå\Ͻ-<éÐ3©p™®ÝšË¾-¥ž5çC«A;éRrbçaŽ,š;çošó¹ž5çC«žc¦ŠK2²¹Nòªc lß=k·V÷7+'Ó²°}¸Ð~ßW~Öœà2ËÒ 5ú)Ý–•oÇ¿˜³æ|ÇŒ!Ì&ö;ýtåj%w¬üû ü¬9ß÷œ~\æ¬ ø-nšÙª½Š|-åsÖœï+‡z*è,cІߒZÛìîVðóì³éWÃ8/´ŸÒP:…óùÑ‹)–Ëì³ÐD§M.a¡`G#dé¦n¦X.³Ï¼\'õÃ|±é®Â·€ï+?Ï>ëàÐ_u"îæ%©OlïàS<Ï>SpÜ-¬U2‚ŽsÇŸi*âcå»)žgŸõ•£åÑœ†-~Ä×mÙMñ<û¬P¾Ìh›%Ó²AžÃÝLq™}ÖX"!öÖ6¸Ðxàò;M ~M\–Ùg x–hÄœü aÄ¡¡éˆ†ëlâ²Ì>[À‘?çÊÑ`4Ô8þ¥å[Âu6qYfŸmàÌ‚k!%ËÐëD J}oðÝ—ÙgÛ¶0ÏŠ¡0Ü–Ü“–©Ó8Ãu6q±ñHXê+G·9ø›†žt ³P‰_ÀÜÀÓ‘°Zï9{},›ç‡öö“b7kÙKg9–úÊÑù ¿EX"¦ûç˜M¼îù›—#-?´n n%#:;ÿmÏÓ–_l=ÒòCë=qG3J%wÿÐh|·sgŽ´üкæAžAº"ò°ô›û|¢¾­ÜÙ#-?´ÞsäÀ™ ^nψ>àÊíà»;w¤å÷m¡_âüc ÆÝâZæ¬|³sç´üþA½dˆ½ì¹«3xXÀ7;wáHËWpéj+t—µI±©ˆðÍÎ]<ÒòûÊ«4AtEßs0Ü ¾Ú¹KGZ¾‚çv×fÉÎ¥îü?¦h–=ßhùÅå#-¿ƒa,@¯FœÀµ)o¬üÛ¶òr¤å÷må›1¾ Ü«µ¬´ü¢œ¢_n+/‘^¿ t_†:Ä®ìÖ¾Òò‹rŠ~ñkÁ€Q¤D"ûæÒœÛì|¥ååýt[yo(‰âP@ÅÊÓþÓ¶rw” ¹I i°Àc‘'uˆTûžç›”Pñþ(%¤à–4_ÈŸˆ—;«eÓïó|“*>¥„\J–xæ$ya{«rýâÊ7)¡âãQJ¨¯œÌSC‡…)]7¨³mÏóMJ3˜ORBí ÂËíù–gå-S”oRBÅ磔‚?KF¨øìo‘[ÀŒìÜsï¸7øv+úr”p¤¢ö å¦ÔŸûŽ’@Ó¶l·¢¯G)¡¾räÏ­LãÃ?Õ•Û‘ú£‹0™âv+s®5EkŒ^¶¨ó?jsµ•Ïž½Ô‰J°ç:QÓâ6:j&;q”(¶oðÝ΃;׉ºŠxæÊñìS©`:DnßWîÏu¢Ž šp¨¢I­ƒ7_±ÜêD%„s¨‰«é}•pÔO£²Ö•ovâ¹NTz 'DA±Å3×;Drhå|³óÎu¢.ÅïeÎd`GéM8¡yßì¼qŠÜÅZÀ ²àšsåµ¢œ¿«ëÊw;?Ïm ­=<Éûù†Úá…Æoy~ôBW.á<·¥ƒ³‘׺•YóÓ3§ÍJ|·óxžÛ¢à–ÓåÑ./—cš=.màÛÊãynK‡~‹Ó$ÝóoÔÉùËÊw;ç¹- )þÔßÐçŸUÅÐÚ ÇÊ7;ç¹-¡É €öƒgNTÅ|K,ÄóÜ–¾r¨ªÃÕJ”û0cÏ!Y¸‚¯¦Ïs[BS‡(R>#÷W:t{­åÏë®\¢öXDÓhÏ BHF»)i©‰âçGo~Ë¢SôGeH‹LUveb«~K?Øy9ú-åd- eá:uŠøßW^~‹n‹¡V»-nûªû»& ßÁw;_tж•«k2ߌ2ñ[ê²òÍΓ=ú- Å)^\•ÂÎW¬¡y\|»Ï•S´ú- דW”av¶ëˆÖ>ãolËfŠÉý–¾r’  {ó0r¢{\Õ´Òü0Åí>OáX÷VÁÑP€ðÜråuæ+jÖ"^ëþ%ÅcÝ¿ƒóVŒÌ+Ry&Ϙ7ønç)ëþ<>@äZÈ8ø2Ñ!TF8^ëþ%åcÝ?v¡ïÈè–ãlž=Ÿ9ÿvYùÁÎ˱î›ètË‹û2tþ#XÆËÊw;¯ÇºlrÙQò¬àŠY©‰:Ûî–kÝ¿(§è‡3¸ÊYa:aáÊû ½á7^ëþèž<Õý&Îôãä¼^\W¡ï’Ï~;xÁ(§ÊÜP „¥xú.ù<áWÁ+©xNºæ9ühÊq©ÜG¼ }CôåÄ´ìà>Ñ1G bsvNë¡ñ*ô]òyÂolŠÖ© Î?r“¯¨¥ùxú.ù<áWÁ£”‚ŒVADÞL±˜–̉W¡oT OLKT¡ErÚìÏ­åXˆøvˆÊyÂoGV‹—‡)wÎZD_{¾eçÊy¯‚CtZt›§ÍºÞOd©B·€¯/QqÇ:Qôj-ÈÞ«ˆfBÅéni·âµ±½¬Å&ôÙgðÏ+Ç“ù2ÝŠí±¸6¶—Žu¢Ø„¾AÒÆw5´–¡s+YÆkc;*§:Q/R”ƒ–­Åäý>¿6¶—’Žu¢¾- Y ಜMì:ç%˺¬|;D%ëDcÏ‘â6ÌŒàâ„¥ÔšOãµ±½”r¬Å& àŸK6 –Ý»,S8¯íÈ]êD}åAOMLâ0SÖBGNÄkc{©¯LÑÿ^%chhU ÜUÆ|ôj Z•õƒýµÝÎë+S´ƒ—…%Â1cÄgì­m|·óúÊíàÐg!4q6qšÄxÓ ¾¯ü•)ZÁ=UÄɵavuV)4oðÝÎë+StX¹#ñR,OfÝ&‚4V¾Ùy}eŠö•“ï7]‡MçMGt€ov^_™¢<Èëè…ƒÕ¦†ÛF6ðÍÎë+S´o Ôòá슸ú(å ÿº®à›—y&Ô þÃÊž\”Ï™¸I ðí%R¢oî²çV:¡tÞ\Γ8Œ_W¾¾DU9E¿|]öaÛ+çÍu’z¸Â¾úŠUgŸýôuÙ$%£—PÑ™‰Çe–m¸;f¡clY‹ ÔÙ,¾â$ö¥Í§Ï^²ÐÕøcZÁpà GeY™>7Äó|»[ª Ç,ôX¹ñRø+g•ûàûÊã1 ÝÁ™‘`È:Ž+éwKs-â- ]M:f¡œê!,üqÊOäA:EiYù÷x>f¡;¸º2­%M KlL[ÀÜÀË1 ­àh81¢ˆg€¯wKUNÑš…î+cNÀc4xîÔYöë­àë }sŠþð­¢Ô\ /•\„çž³a†x£ÞçIm·ó7§h…J9 Óž·P±ïvþæ­àžÝæÈÊ[‚Ǟ̉9?À÷•¿ÞÐèKca ‡•#šÃ8x; ø^W¾ЗÆÂ¾-@~ŒŒáÝ ðý¾ÿgãþðõõ‡Îà™ºª:ÒaKžäøVð?ü°¿4vpxH ªÇ5>6«µ|ÛÀÏܱ•Ï@š£œ?è¸p¥ýÄkwõçîØ+¹NXíÏú*'Cª#'ⵃ»úsw_9ÊN^&ˆÁcÌ_“`æ ¾¯üÜÁÝÁk¡ôR"Œ'p¿€ïvîÏܱI”ó„?‚S¾†)jK~¼vpWîàŽ­LŒÑ˜8DÏ›Q85sd-´k>^;¸«?wp÷•#—«ÄôB«ÕVƒŽ×îêÏÜÜ[#¡¢8ª7:Û|Åk7fTóŠC^CSýŠœÚ™9l}ƒ¯¾¢?O…HM‹3 !±çp[Ç!Òúüè¥)¯úóTˆÔ$Ê1]Ù?àÆ™é…|?Dá<"µÁÇLqKݬ° =]\eßVÎS!RÓ?–ßCࣟP$â|?Dá<¢o‹•Žöð|›Â×z<ÐZø+ßQ8O…èÛb­mÌSˆCô•§äðí…óTˆ¾r8‰š@K¢ ×É3vý Û! ç©}åÔ?@ À~¡*:Qol¦¸=á<" )~)¶>vfkAYÁ„÷Ê·¬E8O…èàP½£·ýò”ûîàæ;2¦•¯My5œ§Btp:ÿ‰mÄ~~,@)d„0ƒ¯Ç?Ôc¨˜¬î9gdUvüAÛ*~m3¡ž½…ŠÑCE¯"kc(´ôl„ “þ¹­oðýøG{ ;x­"šñ:DBýÇÊ·ã_Ü¿ÿ»ŸÜéʽŒVñÏ·ÑÛÉZÔoy~T~í`-¯î<j|RvW.nÿñ©ÞÁÖòêàÞÁÍ!ÓýœÍĺΔâö ø¾òW÷ Žl%ÛåÝs|’ ›í/‘v+tðƒµ¼:¸÷=·Wœ5‘9_iX‹YÀwkI¯î¼pJEžóP¨ø7Ím© øf-éÕÁ½‚gò¸¨QLk)óëŸ×•oErÇÎÔu¡h1à%rÞO{®´Ÿtí&®É[8ÓÐ…¦L„D¯`r-Ü|7ÅŽ-œ©ÉeãÀÃo©xæ¢í¯LÛÊwSLñØÂ™ZÛ,”™u}¶ù¹"ÇÅ•[J$]»‰kJÇÎÔ­c¦ó:Dàݸ›L×nâÚfŸÙËž›ª sÏî|W¦•‡ÖMœ®ÝÄU9Ek §®\²a-|C!$7ÀM^Á×74Õ#I,õnâ$ ¨Ñ$²®'éf7’XÍæHëà(˜"ýl „~í¼-á ¾›b¶G’X‡2¢ Ç?8;™bsDÃ$†êé‰$–Â4¶ ä¿gë‹ä‰;¸&Š;ønŠÙIb}åLÎ?ÿ1Yñ§íǬ۲™bG’˜‚;Ñù€ú$N(þ‰ÁoÑtëßnÅ$±Îiž¼ç5ôn$±ª³Ï¾ÝÀ} 8» ¿«þåˆ yf€o.´Î>ûå¶çIæØÀÃ@~ßsK ðÛÎ7:—#¿%E]y(1qhf„ÉSÑŒt탮¹ù-œSì@ÍpDsšª-JäH×>èZÌ‘ß2V.£}ñ"¯8 "}tºöA×bü–Î <’é¶Ž¯]?þqÙ–ýwä·(¸'çÌIÁߢiaâZh‡HºöA×âüÜeäÏÑ1ågˆÃ¤k4:×NübÄTänaA¤Ì|Åä^û «rŠV~KçûÂF"‹éµ½á×µf¥t탮%kÐ)u;7­UÁaš¥›ü*ô]K>Ö ;8)(S‰¯¯q6ö ~°ór¬A+¸e—$1&çóh²'ô*ô]K=Ö ;¸¶WBR婊N÷ßí¼šc ºo Ü4†zÜ-ПèÊyYùfçÕkЩiq;!‰!ºµ(õ—©A,,à›Ww¬A+xqCDŠ»‹”æ.“®BßµÍ>s—•g!‰yÉB»îüçÐ:¡ÒUè»ÖpLΧܢ¹"4NÏz¨“òR£ õ·´¶eD8°½·ŸhóéîVpl'Lm´“™‡`?¦˜ÍT=o¯¹´zóÖ)ÚÁu\(dæ˜.T‚9á ¾"oÞ:E x0­Bq’~Ñ}8ü–´€ï+OÇvŠ*CV°¸Mž˜9ªÈÑÁÞÁó±pì¹#K„|ÅÈ¢ñ¨Í•eåßoàåØN˜Úôð*êÊÌ´æ0Sºuù këÍ[§è°-&Ë\2Þ-Cn»‚¯¦¨³Ï~8¯<3Û+9o{71ätÖ•ÿ°®Üž§B¤Ú# ™íiÉnµirD[)§^"hoìy*„‚?¯¿#yÍ9`ÎçinKŠoðÝÎíy*Dçô'¬¿—N‘o­Ê|_ùy*ÄXydO.&Ú¢c¹ÔM¾©ƒïvnÏS!<¸‰&Ž'Àx¬e( ¡`ßíÜž§B(xdþ‰"ü ®÷‡Nþy½DÐÞØóTO¤(±³•vn:3'ׯXàÛcaÏS!:8ƺAŒ7Vyý͆µëý¶Ÿ§B(xf}Õ ƒãñ[RŸª\\O×KíQ>=1ç³é³‰«´Í K$LÝ Zà~~ôÌœÀÏÌùlºî¥[Ìy3wB)a©ƒï‡È™ó¹ øfÛ¬aÉ b}Z!Vð}ågæ|G;T­àŸæè» ‚ÒÛ:ø~ˆÜ™9ß·>¤'@´ž§Y¥žã+ß‘;3çûÊÙ1##„pˆ†óïéw€o‡È™ó¹øÎAÀy+¦>mëžo÷¹;3çûÊAQ×ôoh¬[™x__"wfÎ÷•ûÖGÈ»…9î.ààðÍsgæ|_¹K¼È¬Ü‡©µ-Õ|]¹?3çÇžGv̘X„ Õ›¬m røvüý™9ßÁÑg^ŸB.´é‚<µ¶.Ëaç?màgæ|n%Ë$ºsÐàç 5#lÑ^ÅçGÏY‹ü̜ϭd™EÖôø•´5ˆuðýnñgæ|_ypL,8Ë&ë!%4ú ø¾ò3s¾ƒ;éšO"W”Ý^ðýnñgæ|¶ßRùúƒ9Ÿ…TÔÛeåÛÝâÏÌyO$Ía[{Kú,´Al€ow‹?3çüñ±¬h,xiátóèú€ow‹?3ç;xÍ NlžNèPËŸÀ×úÖ)ê^nneb'ƒœtpÇ:Åþšoy~ôp½uŠvp:¢ž¾"¬%ÌvnÞ໿uŠVpË ‰ ñÖm*(|_¹?\£®9ª]’teW翃ïvþÖ)ÚÁY”C6ªNÖ’—=ßí¼éÙËž3ždœùκ‰ó¯î¾\mö™;‚;Qû‰ì:…òÌ`ΗkÀò1àêÛÂi¨øšâhVzÀ—=ßÑ[§¨³Dr/Ëðm+’"!Níášµx~ôLûñæ­S´‚{¶Ê@K$rÏsU ë|·ó·NÑ7Øê!<›~ʈÆ|[ù[§h‡:6[‚º1'7÷~ÿ¾Ûù[§hÍ•ÕIè»7Y•ov®³Ï~<ƒËé …ãÅUzó)r¹Ëžÿ¼Ýç1Õ~r¯AgºÐ>SaÉÆÙ…Vk µŸ<Õ~:8Ì޹:yê&Vºr?XK:ªýä^ƒôri‘íô1Ù·¼âßWžj?¹)Z£±*15ÑŸ1໾ÁÖRŽj?cåh¯Ì¢—k8§dUä+ß­¥Õ~Æž{†qÉ9üíš9¾¥¸øf-ÉÕ~:8²Zì®¶ÒðÛG«tµ±-ÛÅ•ìQí§ƒ³‘(òÆÂcÑheøŠá¢öãÍ›SÔ»rrl5 ±s02át/%Kµóxé„òæÍ)ZÀáˆÊȉ ¨¡ 8 ÈuoðÝÎßœ¢}å^Ï*Ó›bšôµÚ2À÷•Çc'T_9¼CÈN™d=Ô~Jëšï໿9E+8Z†## 6;nÅØí<^:¡<æìÖ^U¬|%ÀUpÒ‹6FêÊÓ5lQNÑïÎàÈ{Dy‚($8†Mµœ®a‹ê}ÝÀ³X êDdEøé⊠ø¶òlÎaK/úÌé‡ ãâR:DN×°E9EߟÁëÔW¥gzº5§kØ¢³Ï~¼³ƒå3ö3“ ”Æ™Ó5lQNѶ´‘ÍH‚ ÂåedÎ|BËb-»ÇÕfŸ¹ËÊdD­ð9¦Ùg©„Ëé¶ä/÷?µ×?·êiçÁ²ñoˆÚeÛòŠÏʯívž_¼Üc#èlj,|¬ö˜¢–,;ønçùÅË]Á¡€JkÁP+L´õs¦hßWþâå®à Ðø ™-‹ÙLÅmÏøÁÎ_¼Üܲ² I[¥Šö^ÅbZh¬|³sÕ)Ò°e¾ˆê,¢†PI‰ý™ë+ß–6ûÌÁ…‚Ø?pÏ'±¯ÐøŠcå[ØrÑ)ÊE÷<Š—Ë‘ÍvÞóÔyž=󸼹è)øsBÑ~‚Á¤¡Ç®±Sã·tðÝ/:EÜIþ£9XÍj–•ï¦xÑ)Rp5"Zß:öæÓ\{ì_.<.o.:EÅ Αo]ºNQé‚„Wg5#i˜ é%ývðý…soKGMcf$Š{…ë¹Ïµ 2À÷•Ÿ{[ú¶D“NÜ‘3¸²D:ø~ˆÂ¹·¥ÄÞ –D§ˆYh[ç|˾¢pîmà¹J›U`ŠÛM‰…mÏ·Ø?œ{[J+kù,Ù"’"q*Bå|5Åxîmé+ç!™î…#5ª-}zøßÑøêmùýO ¼ ' SxŽ,ˆÙFÙ>?*¿¶Ûy|õ¶ìàÞ²ƒÅø 3µ…- |·óøêmYÁYDnŽ7cb{éaKßWþêmÙÁ¡“ËñƒÖ•h=V¾Ùy>×CûÊ™ú l+§:%Š• =À7;ÏçzèØ#<®Çà`‘½’û\q]ùfçù\p\*BûAŽ€×˜éuŒqƒïv~®‡*x¡ì[! ëD&LĪ}ÛùöXäEÑúï¼öùD2¡ SÃ1˜‰–ßr\Um?DeQ´~ƒ{ŽmBvÎñþ²qz [ªï‡¨,ŠÖëÊéü‹â'†•uå „5 ø¾òEÑz"Æ‹F"ß>èæüW{¬ÍÕ6³Æ;ô2æ‡öÈ¢Úkm®ºcm®¶i³œ`_YfÁÓ¦ã¯Y‹¾ÛyõÇÚ\wŽùsgxu3£¸.àûÊñ6WÛ´YrDµåˆÿ\;þ:ønç5ks«×N÷æíà*®ŽŽ)'ÕÑ1D ¼Áv^-œµ²Íº2š•¬ßZòëµ…Ó½9E;8bfºŽ™î®9ÿûåë²rv<Š—;+ ÚÅ·,´ÓÙg?ÝÀ1áU´*4?9E«)nâ0Pï:Þ-±Íš÷2ûÌó M€Š)œzüãõnqé|·D5_a‹Ž…Ⱦñ[:ø~·¸|¾[ÚÊA*ð]õ#t×"–¦‚2À÷•—óÝÒVN;72ì™Ý)TlW¼Þ-®žï–¾-ž‘' Æ)àŠ#&Š×»Å›óÝÒÀ}%•ÝV9éqˆÌ¾Ý-Ê)ÚîÔƒ‡7$"®Óò¡Üžðínñî|·´ê#KóÅñ>w~Zyô+øzBU§èÛyå”)ŽyeÛ8)r¨`æßîÕ)úv[¹5ΜDÂÕçMyfßVÏwKÛs šƒ™€oü¹›‚Üõƒnw‹Î>ûéj•f\v¡¸~åbü²òýnñù|·$½r½á°iˆðcå³Dy»¸Òõnñå|·¤ž[™gÁÔQɶՉ:ø~·øz¾[¸önÆJ‡Úùç1ô»%]ï–`ÎwKRk©’WŇüÜ·­ÚÒÁ÷»%ØóÝ’Zk™!ÑÔ¢:©Îúu[¶»%¸óÝ’4²¹ŽY #b…“H©Šf ðín þ|·´•G™ð ýhgeT^ŸÙšðín á|·´!¼ÑòñôŽ÷¹óCK*™Àתœ¢oö²-:'WUœ\/BaØôºçÛÝ¢:EßÎ+w†’Q¶è*ì^.懺|[y>ß-m[Rä­ˆIcZéNQXí|»[B9ß-må^øŠÅ²¢“íœ?_¶e¿[Ây~hÍzBÎ'2E ¦!¼ÍW¼I”{ÏóC<±9Ù«Ê*x~“Œp]À÷»%žç‡*x”âshËÁO/Q‹ænåÞÅóüPÇ}NzˆËh’FJDÖ¹»·t½:EÊ)úéŽèý:ìÇMŒ…¼˜âîåp’BáK/®ÂDq”V®)àj~ ~ô"%är­Ü³fzþ3‹ ˜7ønŠ%žüó^8 m ƈdÖDA1ë¶l¦XÒÉ?àP`Ã*÷š¦&aqOàÛ3WòÉ?oàe®”hìyçåÖÃÊ7ÿ\uŠÿ|€£PËn+¡ý„©[!,+ßýsÕ)Züó.ŠÖ϶$1šØ90´Ñ•|=Dõ8t9¡NìÜ)ÊMPQ‘»úŠõ8b€— ôÀú_r[›ÕßQ=N…hàŽr¬„"Þ+¹h²¶ ø¾òãTˆŽW“•\È SîÃM]óñ ¾¢zœ 1Vî%š£¨œ;3iûeåÛ!ªÇ©Ó-Œæ‚´ü¸!°æº)º«¯XS!8Ì!é™Úf¥'wß|Åzœ 1­î\âx2öOÛ’Ë ¾Ùùq*„‚㙫ôrAûñ¢å¶ ³›À·ZS!¦mIÒÑ.Ý .M2ÂvÝóõ„zsœ ±¬\è!Å]†—|õ½9N…hàh_U¤tÑÄ1©CÄåƒn¾¢7î$k“Ó8KF®œ7oJJ¿[üMÖÆ’µiàc>àq%iœ ¨¥ß-þ&kãM8ÉÚ4ðÈcCê”jqƒxÚõ7Y”$²6cåY4„P}D»˜™Z•˲-?ïàé$k3öº„ÈFEž¥ÑgQ‚f¡§•¿ç“¬Í÷ÈùáŸG –7æ|ýŽ 2øx9ÉÚÌàB±”‘ó¦k,Ô¨’YÓ¶¬w ú'²6  é³ðTÌ«=àâÊóÛ×»Ã)³æAL“\n®N…‡œÃÔïßnÅp™5ï½µ§Yó \ϽÌ€’˜›•gðÝέ;ÍšàI|Å`©‚’z¦¨DÍüOàûÊýiÖügJDŒ-ù=iù€Ç|·sN³æ86–¾sR ÄN}Q(ËÊ7;·ñ4k¾#~c¨Èï'5Nˆ „|‰¼M§YóBr-0T'tRúE0öÿawçØ¿ƒ‰ëdJ‘SÛòòA·Ø¹šcìßÀµI1U*æ½ÑWðõ½uŠšæ<˜ãzˆ@‹öƒa=STªÎmÁ^Æø·NÑ ž¤ïm³àqÈÚ ºŽ/ðý½uŠö•£k"nE·b@úè ¾¯<ŸÆ4ðÀnJåÙ/jö‹ËÄvåæÛøÿÖ)Ú·%&Ò8q9B;¿wüA'ø÷Ê÷CTO㸌šñ¢ÞUÑ.ßd™÷~ƒo‡(˜Óøƒ ܈‚à³õ(\¶,¼Á·ÇB9EËøƒ^) ÷ñ:!A=N(êõËmÅOßþø»?þ-ÀÝ×ï¿ðòýíïŸË=ÿ¯/ rIU€(§ÃÜsÛém¡•‰&ã×~§ /p9¡ö÷Gp„ç"™8÷Üôé'1j^q€ È \ÞP÷ûËÊ F*Eö([7uˆD¯¤ß Ümà‡ú3xn"Hœ:9¤p¿“±ðW?Çÿù¯ŽàQi?ÏÑ´ß.ëôð. /p¾¡õ×gðòDÒTÙ¢e¦Zåü¯7ð"àá²rÒZ1Ì»|Él›‰‚’Vð°‚WÙ–pY9¦{rñl=âõkPP̺òŸWpáýÕ_ûËÊñ°!%‚Ñ­†9éN̓׼r¿‚[Yù<‘ÎŽnb/{Þg+<+¯ë¶ü¼;Y¹»­¼°£éÅg[ÊØ£ ¿óÊÝ îeåî²r•@Ĉ<èCÆùƒ®à?oàAVn/4VBÀùޣܭ'´¬+·+¸x¹õër·Ôª"$ÿ» ök",àò—7´ÁH¬üq¡‹ ²îeb‘Wàò—84ŸÁ_MTXH „¯iòébçò—74ÞV^E-#>‰áô¨‡®'T@^àò†¦ÛÊÁóö<e`{¯Ë•+ 3x’LÑüy'#­Éy<÷i·Ë¶ìàRµ·•c&•÷òWøÎM´M'œÀí.q¨;ƒªäôû¼þßAKbêVXîyK÷™¿Ùy© ŽmAX8–ܲr¿K¦(ÜVŽY9Üÿ…>yûµ6¶O+x„wÚ¼\üç_þõúWñé1\9ûynËó/AÖÁñ×~7¼ÜžVðo8ô&tŸ åÂ¥wÍàþžWðÿeÏT†$±˜‚<©÷ä"# ~ô“ä7ð²‚ÿ8ÀIÜ ßÕây ¬nK¼ƒ×ü§î©}†ÍF ŽT™âP8EOWppŠþÿ'àÏþ—¯Ð·mÄNí+þ c'u¯wKæ¯ýNA^àvuDÿ×aŠ: ‚Ðð¸žG¯ÊŒA3ÿÍ¿?8¢Ù­Žèî˜" dŸ(o?H¿é ¾;¢Ê)rGp |°W U£Î…îÝgøæˆæ°:¢óÊ-r!× i¬<,×÷G4ÇÕ}ƒS2«°LŒPÀLNQñoðÝNÑ숾ÁÁÒÄ´ìÊ:d'£Ñtë¾9¢9¯Žè îdÏ#uç0É:N࡬૯(³ÏfGô N9¾Ä‰ÇÕ×25qä°nË^WGô Žó÷˜aå½k>Úå±øþàˆÊì³Ù}ƒStÝV‰WÌè²lMóÊ7p»:¢3¸çx€;¢0ØPN9ÿóÊW_QtŠfGtÇÄr¡Så¶LdȰÄDßQáÍŽè²òL6äçžêËDn·n]ùꈖ°:¢ï=笉g•ÏÏáæõSOnXî–ÝUNQùºá¡4G¯Lܱ¾ÁwGôùéÅ}ߊÅs[à%XXýD5ËÝQåÅÛÊk6Ôó^Z蹩z¹wGTuŠÒmÏÑŸBQÿÅŒyÛR×mÙ|ÅRWGt½r‹ šKO›à’÷°%-wËîˆ*§ÈžÁ=u-Pð—•»ž?qÉ·|pD•SäÎà2¶ vþœd$sÿœY¨÷¶lŽhu«#ú‡§ ðÀFoË~üwGTuŠÂm[Œ¸ZHË=à“ŽhÜÞÐÝ­auŠþ·á9qÏ„&Žaе;Eâˆ~pŠÀ)z;E3œ"ˆYF²Dà´9aÐvN‘Õ+×ó×¾?8Eõ¥±ðÓÏ?}ýíWì×ãÞ:ÆþP·!á²PÅ…g[ðk¿éÖžïàžÐ_t¡áqE&Œzìff»;xù°rUšB=ÔñʵÞ’ÎBß¼~ÚhAÒoÁÁßB0³½þ\¨àáŒùž9 4/#›»óßÛfÃó©¯àö8úîù@Sídz¯`ôÍOwp÷ <ñ:ä¸6‹m <ã[ :<»s÷À0·‚ š32ú¬éÉÅ¿w¿bç14Gˆ4>hÆ+xü®ÐÌ¢\ŠèÈqÉž?_ü ž>"Šöó<*È‹ð¢ªVÁÚ;ø‡Š}@MpDÑýï¦×_LѺ;ø‡j$b–±„'tt?EЕû;ø§ZèÉ=±K©'rëá¼å¸ý•¢•^•ù&Ô‰²!øý„ÚO'4‰r M1}ý6r4T·ó¤{~?¡öã ­”äpR‹üy‡(ho ¨WpÿÜëÐÆ'²Bëè{nuå÷j?P÷<7M-¬…iÂol3¸ƒ½ŸPûñ„z‡KyŸ÷!!h·Z£¯à¿ò†@F!ÚÛ‹ÇL~+s§F[¬%ÜO¨/Ý9è”JœŒó#öWG4ÜO¨¯ýóÅWŒÑÈ5ÃZÔµ÷>P•AŽš9IôƬyýÃý„û1<(B±‰#‹ÎX¥'Àツ»¦q 9GyÅj&§(éÊï'4ø+ÏT&ñôlùÌ’—(ÜOhøxB#.*±éç±`áròrån ÷âG¿ìW¶†<ë³j™­%ó C¼ŸÐ>ûŠˆ±-äf¸û •Ú‚¦+xþ•ÄÂóÅ9ç» ¯8ºr*Ê÷>PH+EÒ•QÃÉ2àwÌ=—ï'4|8¡}x,’¤þ&5ÉBCŠîÍÇŠZ^¢(òsµç¸PÊÑm¹ŸÐøá„bdËÎ9ªZe3¥D¤¢ÎWp÷ Ü0MµiTèLB‚ºòû þ8¸­âŸCÚr¤eßiœ ñ~Bcø¸ç’*TÊ||Å2h¹]¹ñ~Bcülçd !`a;áh?WºŸÐøÑËÅD¢DîÜó¦=—dB‚­›8¤û ù£µÀÜðXD)Y–îZpX‘€ßOh,Ÿ_"äúQ}Â. pµ–t?¡±~Љ²$Ð ÷<‹`^,„9¦þ <}ŒC­ rüQpsÛlªJ@EOÐÜ~ô9`ŽíáÂoéýþ¸o䄦û M¿–Ë­Tm´:Ôª7ˆ…ª ´î'4}ŽCÕb‚=Ìýy˜zd9\òX¤û MNhsTÍ=¨™]E“¢µ‚ßOhú•8%´*ƒ:ûœÐ:—äƒæû Més¢ÆŒ7DíØø3Õ‰dåù~BÓ§7Ôˆn0‡XZ:Ey"Z‹Öô@®àå£)’ç‡P±TüW_óp¡5àÊ÷šêGpÇ¢s¤[§jý mÛr?¡y;¡ÿõu·`Ü|Åœ‘¼ð©¬êÊ­Âõý Ü~·2œÕb‘Ÿs]ímQ~®pÁݧ•g2çÑÄñøÎ¶NmVÏ Mq®pÁýpÔ*5ÐÐGŠL@o…@ûŸŸ+\Gðði[*IÐl€ö2Ô*®¼ÜVá:‚ÇOà‘”7=G,Ä:ÍÊIy®pÁÓ'p/ÚPˆE~n„Š¡ïy¾ƒçàÑJo‹') þyoø I›¬[…ë^>}P+Ù£\ ™9n( †¶h…ë^?€L˜«¬Ã–æËPXj4ÎVá:—'TtÑ…¶o*…)4k±÷Z>P¤&/û›¥QÄLÇ_íÜÞOhqŸ?hÊ?cþ¼bÐ_"¡Î¶ ×üã }B6QÅø`tˆÌòMzŸÛû -O(à$àJòú÷•gP¾¦ ×üÓ ½ éŠJ©O[`Œm‰s…ëþé„>·rƒ3ÏýõÔ›7ÑVá:‚çχw270š+ÝI¬U¸ŽàåWì<;™3éÚÐÙ‘´Ls…ëþé„‚™#l(tBeŠ!L÷y™+\'ðú鄲Ý)à¥qvw.Äæü»û ­O¨$û5Td?·*(6Ï®#ø§OŒ„sL,„Þ{šr«pÁ?PÔߟ éò¢ÆëŸµm¶U¸Žàáóñ‡ŽÞi¤] ·¾›¢VÏÝý„ÖO'ôùj,B=®…L?SÛBî{~?¡õã }^ÍÀ6br¡-P,ýD­ÂuÿtBKÊNBu ÞK¸sš)r÷Z?¾¡†(’\ ô“LY‹2W¸Žàõ³I…æçþ‚£12ÿ¥½DþzB£1Ÿ}îØt¥0‡QÉ}‚°8W¸Žàö³)‚`~Káýå牢öÓ*\Gp÷ùÊEhÁ6âç¡‚Õóþî?¿DhÆŽzßxIã\&s…ë>ï9z¸*å~‹úç¡×æâüÓ …æ„!Ó"(wbÞP?W¸ŽàŸN(c[F·KÈ>×i—>s>ßÁóçã_3A}âq_êL‡H¶=оÜÁËgçuX‹tÖYŒWY¾ÞÁ?PNÈäÉ uî:‰} ¹½U¸Nàöã m°º"'æLÌœ2W¸Žà¿rB‹t´C £Ïì×ÚgÑ*\Gp÷ÙãB7ÈIˆ²Sô+3W¸ŽàßPK9+„F‘z¹%OZÜ¢öÓ*\Gððù>§âóLdƒãŸý´-­Úr?¡öWÞP ²@/~¦)úžCœdç ×<}¾¸lŒlKޤ ”Þò3Ò­á~Bmþäš"OŒ8ÆÙ•Ën®#øG/7qÚ™¯BËÏÏ3—Ç,K[ç ×¼~~,8j¦Òã(Û™R˜æ × Ü}ŒC;e©Ë6«d§ÔŸFsñ~Bý|ŸC:,‘BZþÌŠmÊO«pÁÝçC„èñcF4½¨³i®pÁýçZ.$C¢Š†ŒèŒns…ë>_¹86Qžƒ¿¶:ÞO¨ûtBcÑÀ ×™™R˜›Çï'Ô¥Ï9.œnäŸD3÷ÈÂ(èL®#xþœWD]ù§$ºs£…Ó4–H¼ŸPW>?ÐÄcaÀ… 55BÖ¹Â5ÿO_¿ù·?ÿùßÿô›?ýãþù¿ýé?þõ?ÿåOÿøßÿõ?þüŸÿÏŸÿý»ýç¯óÿðŸÿãßÿåOÿñ?þÛ?ýùß.?ñïÿüOÿߟþíÿé_þMþç+ÔüƒÿöŸÑÏýŸþoÿþ—>Kü‹~ð?þåÿþ•Ÿûýßÿé_ÿû<ÿö?ÿËåçþ÷ÿÃþÃó×ÿÕýÃó×ÿÕÿÃóŸÝÇÿÕüw¯ÿ+ÿ¶üÿóÿú ¶€?öì=î/Øzù¹fìYú¯ÔD-›ß}ÕÎôøžðYm.¤û¹š<þÓBóó¹zÑ}Å_‘_ÐoØ«ô×lÎÙAÁHFåÖôq^ ùp‚>Q"b"w4Ÿy\\œróG])L Š}ï)ŒŸŠC?Sµtûó­Ñ›¿òz‘ïÙSy£àí㘒é6þ|\ÒØ˜/é”Ç4ˆIÙ$_Ýå +|M£y}(ý†=jÝ™øû?Ôu“âÊb§1ÈñÄçæ×wUL 'êùùeq&òìL¼A#ïShY¹O•…›&Åh;-³3ñ¥0$Í=ÿ[,/㯎Æ9ãx€r¥uv&^ í˜A³ï䘢Q˜gßðCeÚi¥z3;ËŸ_ùÙ1®…Ñ+OVl‹ç‡2å½RØ©·³3ñMøî¹ªù9Ë{>Aé ¹Rß °Sï¦KzùóM‰Âç÷Â(\ÞN^Þÿ*¤ÉŠKÚë‰ú¶ƒ:ÌTÞŠ×W¢˜RÐ7ªðD9Éj Ðo b§¿ì ¼ã‘ ØSÿ<+êìÊ6`WÔÂf;ý qó¤…ùE¦þ|g<÷Ôª'toÁK‡IÑNýË“¦¬ÖÛ“ 9Ë-% NL ìV‚~ýÄ¢«{Òyó¤;(JY˜ZyógqwÆWT<éJ¥ÿò¤Q]<éj ZäD%/J-ò¡ÊÅkÊ„‚yÒ¾nž´€â’·™;ƒê¾S/‡À¢ ÷)³ÎþåI³yÒ´ŠIIÈcÔƒvFžh bn)óZ)?ØÍ“î ìà øOx=IFþ|älpLÝûCá’nó¤ûž²³…£ì°SǬ¹’üóe4yÒØæÉ“þ{€†v¡p¥lŽÿ&+ôè%åŠúR8ûáK™vÂË“~&^ù¾ÝüY@}Ñm`tSÕ”vâË“~f #éK™"{Jí'þù•šŒ¢ \izyÒ/ÐB«ìÇ âówEþ|@ßAi§dŶ=ý/ó  Q¿>)DbÞzpOÅùµ‘>ÿãõÁNŸ_‘_–=-Óž. ÎD+¯P™$¨g^nþʧƒÊžÖiOÐÊg4ÆËWlHVŠ X)Ý”¯™ötÍìÌ …—•±òÞ{ë5ж²RÆQT¢h;ý”jLÏŸoŒüò´çä¹ûÆÕ7VгõDýh{Êð98-êö½¥ÿœ ö úý&6¯Ïm ¥ò> 5VþêöD¯ñ¾“K:•7èsLc˜.”õCÑé Ž9ã%yàP㟙Cñîý¡p¡Ä¸ÛijÊñëW6?vªÇTÓŽ™‰àHmJ/;i·Ója§Éóý3V2;’í!(ô &PÚiÌ»6P±¥¬šæN8HÒ†Ï 9õém§e·ÓZ-#¾ÀTŠÑrn§|¨,_žAÅNën§©]ÒÌsä ÆŸuO«Wë韊sL+e¶ÇìvÚ@+“]YÜ ÷oñe¬›"¾Ç 2/PØi²»¦îJr¥•©«IpnÁÓ‚R[z(Øir»¦–D –Â8Êyq!n'ñþ%ŽJ/;M~x³~(4 BE>zâó{+Ÿ©I¾>%½ó+àMax´dƒ³Ÿ-}Æ Ehº=²âç±aÄWã ”všâð6PôvZh0[z6è…âT®¾Äbd~¼)íïX)¬>& ª—×ÔA: âJ2Êó+àMyxh¢‘C»j«F'b§¼]üùù½RÚiÙÞÜBs €*_aM“]àõ4òìûB"f~¼©îonï>ú,ðàÓøÕCi^ŸS {J¯/¿^ô’¬oE‹@™›5Åé%­ ÑÓ¤Øn0™|)êõ_Š E÷ÔHš'0WTŠþù(Uþ–Úê’@¤¦@ùÒ_¦f÷ò¥fМùwÇLŸ±DéÀw&èJ%4·Ì¡tPÚiö/_êµRKЄyÏx±¬ºå²·&ðC¹Ê––Ê•†—/õþó#Ÿɽ?a“º;âTTU™¦²3(í4ÇWVòõç'ÚR2&ÉŸ¯é8q%Mе/PÚiN¯¬ä š¨ú]²tD3-ýR‰¢M0R‘°ï=eö<¿²’¯•ZIGföŪ¯Ÿ%F5^žhãÞ+EÀ›Ë++ùZi¤3¥9ªÄ´Œg¶Çi¹ë{Ь$(I°SÍJΠÑ3+ ;u!³\Æ1+é¥f:ìYÉbö€·¶•E¦„nOÑô±â¡tZIÚñW¦€·Ø=àm QìÔòÝ(ÐV¦!ž´Á, šTœAiüÅíomv r2:Miü)hP&É.G¹Lýùú x©±¼m¥…_?F¦ÉKÐWçŒ+å™p±¼VJã/axûŸÏD÷óRšTJê—ÊJTyŠ{í)¿Ä=àm •iž\ÙÍ ™’¸bc™?}¾byÂøKÚÞ¾§ÌŸ>nšS=žHü”q”Kìí¯ïÒQÞÞ¾RÛ¸§’ŒmáäãZò–ÞP}¼¥l¥#!ZÁN•EI–'E›¼^}’@DB¿2•ŽJÝJG4[¹ú¢ØiZCôgÅrIãC¹w騚­t4@³aÈSùõ}Ëï9QI²ç˜Ä4ƒb¥Õn¥£ýy ¦&IÅ,*(OTÁÓéÞ¥£ê¶ÒQ-–ÁÙc3üó“oo”ìi«œÁmvïÒQõ[é¨ÖÂÄLöâó»  œ™Ìj¤G³Þ ;­a+9-ÈpH$<fÐ|;ûEÝdùD¿þ|^ÒëYJG®×øèôVϰߖV3Ñ[*ÉsÂ’{—ŽjÚJG®×£ |)'‰îÕ?mï~æè!iûEÝ4o—´³ýᳬFÒ“ÎQ“²Å©‡’åB _™.éZ¶Kºƒf^(ý“_N-ǧ+µ4)2M¨Ý.éšÔà/ΜfzõbIÊÉÆ¨TxÍzI÷•VúüÎðá{nÓ 1•œ4õÞ¨ÔMqɼ/é±§QÊ;^ö4½ˆ&3}ü¼ûþµRÖMñ½/éþç‹è—S_ÊjüTÅý1…IWpyN ¬›ÂÌÞ—tÅ)‡_îô5ÿ}‘Œ£Bá‡BboÅ%Í6í÷%ÝA ãGWÛ”¤î¹Õ8ªÐÃt¸2ƒ¢Ä‰Üÿ;âÆ|ÏÚIämœ­æø¼S§—iUÇ©ø•¹Ä‰Õ;âë Nêp @´ gÄO5æëÂÈ*õ}äÞ_ÍIŒŸ_$ã&àŸ]4:q|M-¿¾{E|,%¿#¾Ê °!Ú)ÆÃÊ•§ž4CsK¥œªvZ׈¯F2'1A†~v‡)jüÈýa¥ˆÄ¦•ÒN±Ïïˆoüù´S¯ž´×+/ ·Ç`B#SÔï?Ÿv —ýñuP#ÆŸ™×HÉÚ¾—¥;ð\³ï?ŸvŠÍ;âë>¼&„¨LëÄ*#=‚òF´ñÍdR´SüCÝNÿîG€úf§²Ò$Š‘2‡iÏI`é8ƒ ¿"¿,vjÃd§oPôÍàC1£ªú¥Y.ëÊkn€*%NvúÅ\_”¤å>µ Zb;Q¼O-Ñ TVš&;]VÊ\ŸM|÷SÕ‹7¾€Ò¤Øw=@ÅNmžìtÙSº’ÞYùó“º;’š7¾ÊJÝkOÕNËd§oÐ"‰*òN]É Šç…bÑ0:ƒŠÖáL,  ÕRåC©}J]Ê qÊJg|’‡‚'ÁÙZDK2&ô‹ ÓçÄû¥ÁlRß*oÔ/;h•2d–"IµtÈŒAýRÀe¥þýõ!¨œ¨ŸìþçW†’|‘lè¦> ^@m’•¢ü0­ô'S''ê'·ƒ"E‡&B3#ÐVŠ­¶±š7(Ž)̬S ~@C%±Í&V R+põ¤£˜ÞZþŠü²SÇ1]@½UŠ<'ZÞ°š–CB ¹/Ê2vP9¦.cº€&ÞüÖ28˾êJ›‡"+Eü;ƒÊJó8¦+(¯¾çýçÄ+h{V$8³ðp¨SWÆ1}ƒf#uSÃ×4'M" ýÍ‹…ùš'B}í©SWG¢{µµ‘aÈc®êI[õPÄíA9ƒ"Ñ@»'º7P¾û)ˆ×§~Ôà¬ð5uä™AaR M,adl&•XßG4µ"¬m¹¾ÄwŸRÅø•™èÝFÆþðIÅVžŠ‚Íö$’°_Î*&åýFvP¦#kãoq~Ñ·JbS[xõÅ7¤‰%Œì 4š™Ô{ Mp xé·ÙüUbÜÂȾ§Žì+çäæZÛÓ§Zò§êKÅ7ѧ-ŒŒo§ÅX¼jRVM*ñ‚Ñ7*¾ˆ>oaä¨ñÑ?•C^‹&fL-óžÖ÷×—›ß—-Œl5>G6«wR5oåbe$˜H΄ÿ1ï)ˆ M,adì!y >?³Tïn • (¿n>nq³’Ʋ<žbÖ35ô r¢ÝáWfŸ?šÍçï E@¹y)¶‹ÄÖÉC±™qT~ûüÑn>n¯ÕçDþ|£´†b’ÄQ1›Ae¥nóù¨÷òœÈm546MQNT|ŠñG¿ùü ÔÍú<›V‚J]¡m¾¿¸ç yòÛçaóùsóP¤\yL³/Ml%N#Q4SHùíóǸùü Ó¸ù%’ 4š– Ú³)ï•Òø±%‹Ïß+¼‘7àž>¡iÔ³¯ñ”œ(ˆÂÌ&EŸ?æÍçÏ3Áô±tŠ˜–=÷ZÂSÍlòûϧÏä÷âó÷•BdžÈZ´× L/é´¾VJŸ?ÖÍço_¿ «¥&ÚiÒj¹×\J–=Mˉ¢ÏŸÌæó·"—d«cJ7õL’î­\^¶0ÛSÞ>²›Ïß@³“‚)kD,Ήíæ=-"Åþöù“Û|þZXn-™$óTšg"%$#¯FõúèLx«=<›þ;ó_Þ>Š›Ïß@—øx? Zªhƒþœ”>J›ÏßW*\‘L#xâ}í‹Ò·ªF‰£âJ“Êäþðíïªõ¨J3ë‡QüjÍïwŸ_ŒùüŠü²šT&õ …e¬,ï_l‰ÃØîS+Î]ɪ&U‡I- ÕIÅûôy=ZгYWêxL(Wš§ìù *ñ#¾¯l}E›ÏÏì¹%ÿt€ŠIå){¾ì©—g廟|»ñÕ´"ƒËŽìi¥4©,oÔPyœŠÄ‹©*­Aû¤LÔ=¯=ý™7ök;ƒ×zT"f¶dG­ïß>˜_ŠÃiÌ«oäÒÎ0@êeËtG4’è6.©KÉ|e®o€Ê×Ïqmgh Ñ²6V¬üùZ66N‰‘¶—|••¦µa€ÊŸ/$¬'ìÍ=u$yTK±oøMT¿~^ÛúŸï³–wèõ¡DüS%b0·ðÄ¿ï•Ê×/k;ôRÖ7Bã×{¾¤¸é…+…72Êׯk;C5RØ—QLêöM"`L/­ðå»_ÌÚÎÐA=Ó‘¥Ò™`ÈL8dò2£†´ÿ Êw¿Øµ¡ƒŠîzm‰nžuƒèBßü¤lg;å»_ÜñyÛn)>N¹¶[*(UD«D|œÚ_™#¾â׈¯ƒV#  BcÕ÷ÞåvŸòás(¾ P1þÖˆ¯ÆÀ´k^_Já}¢R‘[*½Ae¥qø(ÑŠ$NSÖÜIÓ•IUìêã/iø:¨ãŸ_< Oɩӫ%y#‹ëu^)¿ä5âó½)>¤|¨\ZúXÞ"!›Y&P)kÄç{s ø§Òm½æù}0ý©¦Ï__ büuø:h¢×ç#»olÒÔØr•… v5Ì{ʈ¯š5â+%aÈgòOm»Hš']ÅëéñŠw¿Úµé]³S¾B9:Iw´¢æR’d{œãL…¥áÒ­ÕÈÊÓ%¡ÿ„jjü-Ó+•3^s~©FV¿V#}/rIŽ3hlGÅLŽ)ÿ|ƒ’†_ª‘5¬ÕÈœx(’î(.÷T¼€z¹ùÓ *Æ_ãZ òîæ5RjÉaÎ(ÿTBs¿T#kZ«‘ 4K2º ¼8­A·Â¡TÍ-û÷ýR¬y­F6ÐT‰V kDŶ•*½¡p«-t/ç?ŸÕÈZÖjdÿósMžh¥Šš¨ŠD”‚Ù¤X¬sfâo~P­„¥•åæ×@Â5p©¥àÏá¯È/KÃ¥™3 (&¾#ÏaÕ:”†„IbŠ(­7Pm ž3/Ð踧9ò§ØÜuÏå 7Œ¢'PY霙X@ƒÔàù¨ö²±’M`ºÃƒ6@¥1ØÌ™‰÷Ÿå(IÄs{£|«ïs¥e÷i¥l 6as%C{MåìKÍ=5“²-+I£0Юá¯Ì±&n®dÍ’âuòðµPÇ&u~Å™ ûÕ•6W2ô7ŠI¾tO“‚ªIUÙSú*+Í›+Ù@}Ÿ_¾¢gÞæ¹xÀ`ÕU6W²ÿùUø7üú±ª[nµj.B ,¢Ï+•U7W2ô«On Í›Z5­D¯Ý3(;¸•3ñÚL‘*lr‹®´Ûøš2=5ƒþ@P»¹’½)9éÈTYVN¯1Zßϼž]x™]I§œ‰_vÐl‹4EV©è^šÖÎ@ê=¿ÙN!èÆ™ð±}(>Ùékª ÝTyNŽi|åOÝΙè EìTVJ…|(åŽRF¨ÿΙh ŒGpIÓ=Oª2b¬6 Ó@óp••nœ‰j5ÞWÒ€ÞøÚnƒ3ƒ2ð*Æ¿s&:¨wrL£”ã´¶gZ²‹™ C>ÿ´RÿΙh Ùi¿ sE¥hÁ4f—WÐüãß8¾7:Q¬aë™uÊ9Ö𖊯ü©Û9 Ê%›Yž`’ÉÉÆî0NJ˜Á’ßlR4þ3Ñ@½4FÇöPZlªnz• š ï=¥ñ¾ÒÊœ´—V{ïÔ×·YÙ¤…ºÌ¼RäOÝΙè ÔÃ‡Š yCUËVKHU²çèxyþÿl}M’f9ŽÜUò!ã/H®dS–aUZH=¦îòþÑG¸ƒÉX¥ÕØ„7ß÷ðHp¸OõÏ™5/Ëú¢:*‡¨Ÿ2Û³^ 3 ~9¥ >ÒžICõ+…Ž«LÜ+E­/ëíDhì¨.ºKéõ߃"NûŽÓ 4¹VØŠ7²_XVbbP?,Š8;N/Є•VIÆð5æ!^”æRêjìAgœÖú~X©žû쩾¸»¿2åŽDÎÄ÷+-d_Û3x‘(¶R\$4ñ ºÒ´¾T2ZG™nBFeÛ˜û©^Ĩ|äLüóhÕl·¦þüæÅºçzðm»8ýGA‹/Ìü÷íV>(ðGt$¬,—lìFWZÔz©ÿâãÛ/ÕfИA?J¨Jòl¢Œ&ûúígŠo¿ˆ/Ì ­#AÃJËÅ·ŽBi'(VÚ|aæíʈjƒ ™‡Ñº’øM§†ºÅ·_º/Ì ‰œ{ýWë(ò¬ãI9.~¥úí—á 3´=GZQXŒ!k}SœQ%+Å·_ƒ/Ìœ ú€­îüýØúD)”*•àJXäž§”ĶÞñ™–9¨ˆñ¹?æÞþ¿ôÛ¯énfëœá›i„öƒ/°„$ZЯUÿÄ5SÍwãpƒb“Î µ>ìJ:†ÇoqZËÝ8\ ¿)NcÊXÎÏZ_Ñœÿl¦ZïÆá­]žJ¦¬îôj‡Œ^_EœŠ8­r7hì§hÅãüÓµÎÇ×8­ínhÅŸup±¿ í?mfPaë|69Nc¦¬&WÅ…7£ˆínZܬy˜~!ú'NbgÎÒ^;‰i·_ÉçL&˜ŸÎ-ÜâíK¸%vJ83”ŽI.ËL)#%â‹ê'¨®Tâ-±³VŠÖncžÆË@É2ëGM?Š·/éÞùËêGEÖ9Rø‚t¸Úe}EAã¹óK¾wþÊ•f ³`JL/| +ø7(ÓrïüZÁ‡k¯USà͒ϸGið;P¬´Þ;ÿ-¤Ê°â©Í‚OðhÁã«[b…ãd–U=èd!ÍËÂ%±³@1«%äI'»ò,˜˜¡´ TCªÝ;ë7ÍHMl>*“†cm9'“çîA5ø[öÓ¼ª’hq"ùKì˜Q&D^#gicþ‰ßOÛxöÓZ\U²¥a­£²j)ú Ôê Ï~j xC’¬*#&^¢V•ƒrTWÚ㳟æÕß“—i«K1íLqHçã#N{zöS£G´úL¶ ·Ý™Ð hÇJ5NÉ™øûtÎz×Á‹n-øÂrÈI¶T÷Ó^žýtÍñ52#êü¼”1´ ”¡‰µ| )r&ü~ºFî ‚EÚÞM½ßd·èx‚Îàïr7ÊjaŒ#öõqƒQ±ó7;9›äLü× ‰í.i ç¼²¬¨ÿ}€2NûÝ<0PI¸ð¢‚ÖQ§óÂÊ•"øu—*g󀜉Ÿ@‹R„FÈ.aFbùi"k^Ϩr6F¸›ëñcZwòy°#<ø2™߻•jœŽx7èÐJoAÑ'ÌIff}cåT㔜‰ß?<þÐíiŒÍ.´‰µ ‡Êv©pþ¦Ú<ùnh+•“\Ä€>~ÅO¥õ\Hió`”»y` CMGg+Bç„°e‡yTñ#àŽ•j eÔ»y°@³Ê4怉{”hÃܺx9cˆ h ¹›ëñ«®Tµg«2%£_§ùŸéhOÚcý¨žÀ”íØ¤ñøZ2²Úô ©SS½%®´Ç@Y•„ o×\Y'hò øLÇxÒ›J”BÍ SbA9{hËáÛã…JsxÒž5Ȇ] À´á0s!\ÑÓ/'уRú9>iæ‚ßêbQ÷j¥JòTÅÛŸ5ýr*`fÓ™ˆ?<>’ý.ØôP ¡Ôc?Ír¼(M{29>í1P\K:Õpp¨âÅ€á%ûnz*`fÓ™øa¥¢e¹Ž³¸'ìŸs¦Åo}“ÜéAëJëMk,Ö‘À^ß* ^Ø@ôŠˆ·+ÐÜË9È–½ÎÄ*JiE?¥VÊ>>tÅýîUâA¡Òìu&NP ˜q¿5ÁO6uÜð…•+í7­q’|Íâ$t΃ÚØo»>Ós-{‰ëñÇÀ5²€á”yœðEd}:«_ÎA¶ìu&NÐ(*§{^o¼6Îß‹,\¬üüMU¢œœ‰ß/è˜Ó¹õé!Q¦bèÌú˜ý}N€r²ÍÍò¢5®ß´ó£Ç½—û©XY.°Ý1§åÊ9È6¯­±È¢5F¨3 èaaÊŸ3Aë«=WÎA¶©lpMÈ”ÕæG4döNø¸¨òè°šn0s—jÇ„LŽÏT|9›\"¸G‰eÒ¬¥tÛúºEðÇg*~¶°t`æ?Fb±¯"—jÇ„Œœ2eÏñA¯(‹eD ”=t šEðÇg*~ÿ¦È¥pnH7ºXǘñeiCË­ÁÿLÅïß”º˜;1Å6Õ­<êŠåÕM:=Sñ W…Æn‡1º3?ÏNÐOÐR)>¥Î¾zѧJ?úû Íö£Ô™SzJ«ÍQ8›Ð57ºß>w)­Kõ£Ô™S~JK±qÐú) 3 ç³:A±Òò”:û"µÜÑ"*¼›23 öE ŠJîŒúÏ?³¬\Æñ¢$hî+L%Cà1øøóg˜‚?æoêΨ 4ap?$H@"ÉUE&«NÚçºAù›º3êP¬©‚¯5ö£zV.´ Š•º3ê•€%¬L€u4ÕÁM/)Çãó7ugÔµRHÐKC-ðó¬™Ç _ÔÜXÜJõ3ÍÈúô3½@Ë[€| PCµˆƒûê¼ìº•êgJ΄~¦¨i–›’Hzgé ¹b þ½RýL=gâ¿ÿ™éP «| 6+8hHTÂa?ÇŒÑù'øc„”çLÜ š˜²Ï-… ™lwÓŠVü|Ô Šòœ‰ ³ŸìDìø³ØãfÍÃl°xP¬ÔUÏ/О1oƒÇ‡cö¸œV'š!å9h˜ b‹ÓåR_lCê‚Ï•"¤\õü~|ý¢jÃü¾ÉkãöèG¦ï±Õ´Çs&NPAñD%°ôŒ2Ù"Z@n5\T3iÏ™¸@ IçètZ»8Äá„”9r€Î8õœ‰ [HÅ0‹ ¶2•f„Ôßò šKyÎÄ O ⨄'¶µ„6G;SÍ¥!·§ Xé3qXw“«cØ T4­Ïh}ÛÇi™w€ ŠÏ´<‡ tèíS@n(P„j×ÈÌLefÖn¥ú™–gâÐ@[ÒëXÌ]\Τ¬n$N×r‚êgZž‰ÃºÕ1s†Ú…9Û‹BõG7ª·“úLh‡ÁÀmr€ŽÔÜç‹*ë£:P5PªÏġޑèÍŸMíE-ò`ôq ¥t§’uÍGi¡‚},ÈWvaƒNASñù'>•¬ùN%(Ð*´tm)%t³ËVésƒ"øk¹Sɺ2hìƒØ­ ^Øú²SlSΠÅJëJ.PœŸ’M Š Öäb™Nƒ?©d•›Ó[óA¿­øÕ”ñ‹L Ðq@Ęàz™RÕvszhÅošøµ2év¬è7¦ ¼åoÚoNoݽكܛÞ`"(~éàE½L©ê¸9½ ´ e˜¦r8Ðæ¢3†Y¦Š‘Åo*áæô®ÇÇŒv zí¯6!“ʼn3×~¥º¡H¼9½ thxWS¾6ɱ›™ß×Ó”*SgÂqz7(°:‰m¦.v­tþ¦zî“3á8½;¤´ê\#Þ—ØT|¶9)L§v‚ÎÓTÊÍ—ªå¸ð~Þú:©qDW¿óË<³æŸ8¾T–zó¥(®¯Y‡XŒ1^¦:PÄ©ÈÍ—Z ÁH™e˜À’Éà‹jüåàKMsÄ‹/µA±¡@Qq‘Z£Ms`×Rg– Ê8í7_j t Œ;äàöÉ×sƒv+EœŽ›/e  ‰MCIwiÉþ¦]i*0èWªqê9`{Öº2ŒpáÕØ04¹ÂÊÓu~çóO£;{ÎÄ ž¨5¢Ó¹0mæl6c7(­óÒÍè^  % J4SÄìô‚òæìœÅJóÍèÞ ¬ (kmÓ–N¦ãññö=gâÃTÀ—jø¾Ôœ‰DaŒ3hHÕƒÑ[½Ýu‰Rü°€‰‡Â¡Snkñ xûr3º7(æøR³¥NLE†ÐÙöp ºKg"½ Ô(7¤a˜S%ýÐΨz0º3u&¾X)Tc›€å0˜åY¡»r’+œ!¥·r&¾X©D0»ÐŠgï¹XçÌXõ||½3ñç}Q­’Ôiö“WÁíRó°ñ š ‘3ñÏ è><hSJ=s ˜a2ž|œþ£ énÖ=""(òá f‡ • ™ùE–dÙëLœ L%% 9$u)¢£Lõ´$Ë^gâZ)4Jƒ¥Kµi) ã ›4 Еֻq¸@[¡è1G™ÂjÁo·›0´†rZ’e¯3q‚|- _Kž*c¡®±›zZ’eÓ™ˆ?ü¦b¨J.žtòéùÂô Zì"gÂ5÷Jʤ7/Ÿ“º~[}û¨ÿ¸‡ \X( ™ÉOb kWM ªÊpgÔ?ÿK¯p™™ŸŽhͶ9PèšžãvRș𷓵ÒÁaëŒ&ZGÑ&¹p7<èŒSr&¾Xi¥+ˆWŒS»ç~é\é·®´=·“§n„KFG/ºñƇÃ&ôt®Tw~r&þüð¢¨-‡¡T¤Ù)µ~¶Õô¾¨ñÜNVðƒÐ.`tƒ„ ¾Ô¶ÏROQ§óvRR¸ë§²”ñíó4-v°¤ŽªÍ7ÿÄÕOKŠwýTNµFÁ0§˜”î:ÿ!þ«½é Šo?¥»~*«Ç7ÈìJ^cƪ>ø&âÔõ Xi¾ë§ ”º’|~Ë;ýQ_…æAñí§r×Oe‡ÞÐ"îÜùéÁ½UEϕ귟ê]?•݋ưT „õ(K& jÅíAõÛOr×O7($¯1´r©¼ÔšäµÎ•ê·ŸÚ]?] T§®øö»¥=¦4€"˜ ®_Têw_â*!aˆ}Ónñi C0¦ÑÞù'®¿_Ò¸ûû”s'#ÌÌ¡èzÓ©Ò<ë%qšÃÝß—%WˆÒ'ïLgÂì4@qŽêÅé@u¥9Þýý !ÍAh§Zcd›#FhË©;ÃEœæt÷÷÷ãóíŽôP¦˜Io‡/Êk¹•jœæ|÷÷(ÆþRbZ²Y_ ø¢f Îjœær÷÷(ê9 ’H"TèK¡2ÅáÕ8Íõîï/Єk$dà$ñ±›ÉjfHëÍðñ 3N³ÜýýŠ|çsîd?¼>šº˜î ±\o_ϨÜîþþ^)Ó=d”V™öN*ä´µáíAu¥ýîï/ÐP»ÅiyŠUó%³‘žŸ/JϨ.%Üìcm SœÍª’Ìöèv— Ð>-(>Óoöñ1\ÝýT·-?Xçª%™f,+M7ûØ@;úû½ákȤ#%u#Š]IC6(>Ó’oöñZi£ëaÔ3ƒ¿Úã·¸TïÝJõ3-åfèHzû]}“ƒ‰QS>b–'j¥×êgZêÍ>è´·“¦Ž¤jÌ–í7-Ñ<Ž5øgÕƒþVP¹ÙÇš«šQæy ‰wz(Â5qVýoª_”ys¤w¥ K òZ!¬„~SýöÓÁ>V¾Ï9q(Ùâ´ATÁ?øyÌFÔkR˜«’sâ°”gâp"?íÚùÜÍ¿¤°,žÕâQ<(‚¿>‡ ´ÃíV7»lŽ3FüO&MÅåœ8,õ™8”mó6 ¥Ÿl’Ë^ÐÏÕß4zP}&÷ã£kůn}}ÔÄDÕ{ÝùωÃRŸ‰ÃµRPÑ>x9kÈ¥"vÄ4yíTƒ¿>‡  ÄÆÌˆ¶õ±ˆcš4Î?¾žQõ™8\ ý˜äj9.Ó4wßWÃ9'‹çL ,KSœÈO›í£Ù¢@c#)5lþ‰ëHÏ™¸Au? Êq‘b@ü¢Ò¤¦9PÆi¿;Úƒž—tgè&ØùétÕiò +Òϧ|€"N=gâ~|½ŒBUzÅÓžàó¹êãkÖ­TãÔ8ñ¿xtJ!2”˜¦G¨9¤z¾(íHr&\GÂ@U™wh.Fð 2@}|õ6ñ 3¤$߉µÒ›·ªõÓ" “éÆ:\íg†ç@5ø=gwVY=>=0:ô¿{Äõ1V‹S-„&%\ÉÙ5/ž3qbz§WÊŸó­K´­/j>ªœ]óâ9'hÏœŒ ¶!‡ŠUô6rÒ±%9»æÅs&nP,QôøïRº1Ó8¹ CÑ6‡œ]óâ9÷oŠÇ§* çNbÍL&ƒãùøˆÓq×¥hÓµ JÊSa(Z†‚öwÒf9»æjK~Ö¥ÖãO¢ÍßÄt\‡ÒÀ²"zʤ)¾åA5¤Z¼ëRûñ#Ϩ“š‰+£Ðްœ 3øÉ™øþa¥³,7Í~´ÜñÙ¹¡ðí¬T…åìš«:ÖY—Ú+…͘]=ÛPìêæyÆËÙ5/­Üu©µÒ÷°Þ°âr­Ü1ô@HJ ”³k>›kW]Ê@GV;ÂÔºþ ±huŒ™”Ï…ºT“ç"!¶óÃG""N3rþh_È'Mpåc,­= ÅPÈ'ûiàö`wPšõ1AgïDÎ1ÆÒús‘0P +ò>Á™³X¹Ií(©ê½œcŒ¥ç"as|¨p~Òw[x<øpVÓwŠo¿‡ç"a ¨ô”@˜e²;pGM§é,Uû•ê·ßãs‘SjU_s,°aPP€ ]ó$ùüMõÛïé¹H´BL¶$=UjåÛ§2FB6ÍZ‚Õ‹DÏÏEÂ@E7”Üô¨™ŸgEI>Ar5Õ|€â"A ‘X jA^æ(S„Ì¢þ–ømS­ûdh ó3íõ)v™#ä~Xìj£ÚqÂÍš×H½ï·³ØÕå)v(Y‚äó7¹lë‹hüÆ¡wÓv»z{Š]í8Nt&zàÎOÎD„HlZémg±«÷§Øe ¸*4 @w*¶ENÉEü*š9PÿxŠ]Íi"PLë3üMi£U*¬óf3Û­Tƒ„§Øe ‰ÁètÒÍþs4ñraô¥—³v»È™ðÅ®vdÒŸ]Š©dãÎσÕó®ûi;‹]¦3‘~x|\õXDï¼êtî§ÐðJ:ÆèAgœÒ›ãû]©ŒÌªd5k.d&¬NF„T<ß¾~QäL|ÿ´Rˆ0C!=<Ú ƒ€yRª+­O±«9ƒO=G0ÈÍ·NC:µ#Vúóñõàr›SHwnŒÚ†dÍ‹vÙ×H:kQ¶æe´ÛœB–wælYï6÷²GŒ´# (>ÓÑosнR0»:jÒ­¤åˆBUR3”~˜SÌ”î2§X ¬£óÒz5¿H¦’úQ+åT?ÓÂmN± ÓÑàÄJëœÞìñQ˜™õ·Ò¿ô™á•Ýáý¶aÁVêdGbÀˆVÅ?柸Á‹žÞ ªQ_Qñ”FfW!³^8Q §7¨¾¨ž^µhNråqHêF˜Þ‚2â@±Òg†w‚Ò,ØiÖØÎgˆ|þ1sÑ ÊUïI®–"xbG ”؉± Ç7KÅíôŽ«^gâE£¸BþUU M¨nW{¢ÊµÓ;®z‰T;úÚŠØ¥hH“ÌÝ~,¯£vzÇU¯3q¯”4ìü¥š0}àSª¹ªvzÇU¯3q­”ªj¨¿5‚}Î½Ì (#óŒZ‘‹·iÌ›FÚ ©To8­àÂV‰œ¸çUÇ5ÒMµ0¿R ©$· ÜúMq‘J¡vðù#9¬ôFå›9PÝO3ñ(5føÓÖn9¿%pd+Çãc?%gâû‡ÇG …ƒ2h–â›ÑðíÏ Ï…”î§äLüùa¥Ô˜Éµ˜õб¬KáE©„™[©î§9Ü2pûEÑ>¤9NÅEªŠGä1Ÿ!¥2pÓ¤õ’[ æWScý…ùÍósm:âóÙ¤Û :?Óüèž·|Ñ‚æyKV•d)‰Þ™6Î?qs|5?ºçÚÙr~´¼ènÖ(ÿÇ™­;P|¦ùÑ=_ ‚¼äÎ^ÑŽKÑn~ µÎR˜ÅJÝó ˆ½ÎÖQ²ê$ì8/[Ÿi~tÏhߎÁ½̓u)ö÷õ3ÍÇßX¹æø¡L¤í¤=8÷SDð§€.ꢸ•êgšÝóÉ mÖæÀld+,ÌàE¥ˆÉ(ÈO'[ÿÌ·螟 È!;­¸ü‰."Q+Áƒâí—C÷܃vt›;¾ýN[wmë`_ÅJg/ßêJË¡{~€â¢Ø'ÍâÁjpcœqº@ñöKòUÉ2´Ñè ·’T™öHÆ}?œ+Õ·_²¯JnÐéÂÕÕ“kÞŠT e9Ë¥pNؤ7¨¾}r&~¿ :Í1‹|ZG¬ ©léÏè( ÎëÕMºT_•<@A–¨Q™¥b¥95þ¶EÙ|€jUReËmœá…%Y™ýÆÏ?¹ÒÂ=EYe¹85”IPºÆÚêò°†2|Ö—¯<°TÊéâÆjé÷8Ã-Ðé%ùš¾æÉˆm W„T=Æj÷8ƒ’&7Dÿº ®‘ʼÀ €à×Lºã µ†{œaƒVØãñÎ&%’|ÑÓYƒàx|÷8Ãz|¶w…ÛX‹Ž<ªaxgŠåWªÁOÎÄß/¨Í/ÂÀ|äÆß”!•Òžv>¾Í÷8Ãz|ÜÉûÜfïfM®„вbÝãϪäü’¯q†õøìéÐî°Z_J•=! çÕq†é|Ñš +Âz}J?ó·ì$ ìRÓâXÿÄÑj•›Þ°@‹@^DÓžÎ!ب+Ýõî›Þ÷ÏËZÛMoX “tm² ébµÛu²,V;.uZö¤7ìÇ }ÝO+«‘…Ü­àEéÛ?.k7½a‡w îÕL{$˜m&™](├ GoX èÂø|±Í6ëдµ®d;.«Ä›Þ` Ÿ«¶!!ÙÒ)Mg ‚»±kH—ÕëLܯÊÀ±Ù ݉•”ò¤’«zÞ¢ö¢Áˤ3So@Î?Úq*õI%ÛÊOÕ†1€‰0ŒyHÂpý¶j¡©¤È“Jè¤^Äycæš]žLÅÖ¥ðEµ3•”ö¤’k¥ÚŠ/Ê8Õ†Ó¦4»•"Nû“JtÌRÈt¡LJHȼHØÀe¬ITRÆS?í+ø»¦&šQ.΄IBÀ %ùù'¾~ÚÂS?íî4ý€bì™:}yÇá=[Äo¿Å§~Ú×¥E>èjõAï8ó:„ÛMTr‹ÅJÓS?] %ªŒ83Ý ÝfEž1s6Û™o¿å§~Ú×ίrM•[KHBš8ÚÆQÜJõí·òÔO»1eƒêžgƒ •>§0Ö‹ê¾æ@õí·úÔO;o'“ï«ÉŸ¾¯Dú-{Ñœ^u s šJRgâûUAk×ä/en$Å´Ð7U&ª÷ýÖžú©6Õé­]ÅÔ?· ÎîÆêt{¢¶ã\œê}¿=þQm¬ã„”‘ˆEÊ=9“p+ŠJÄhWC¦=þQ/ d³^øøæ …(ÚÛÕé”ö)Q3IÈZ'a͘s¦‡ÔûÙ!oWC¦?þQ 4)·'Wd”‘ÊfLW9#‘ŽÇGð÷t§çÝš=a—¢‘‚9&#acˆužGóO|zÞóžoÐ2@ìWÒº<¬¢ÆéÜ;7(Ór§çúÙŽÔŽhº*&‰êùp Xi½Óó:9kq¦Î`Ëñ­‹}Õžy½Ù üMåNÏ÷Jõ•éÊdƉ%ÛJ˶v+Õ ¥·;=ïÁ¾¨6WÚ’6d’éôŠI@Úu¬Jïwzn Œ?6n$Ý^ìt¼Î=¾f(}Üéù½ÿ8§nõ2ÙŽ›ÜœûemÒ.¤4=÷:è¦÷¥,Xôà‹ M.±Ï„]¶Êös’«z‰ 4`Ž¢m9[Á?G\(âÔëL\ “€aœ¡Üó§éM`Ö|f#ýœäª^gâ~|õàV¿E°¡+žûœ6ž7æ~NrU¯3q"NCJäôÍ€Ö)`O!S¿RS¯3q?¾öŒ ¾ƒl䡨'‰ äTãtÈMnY ` ÌÙj²ŽÍ™Ê‚Pôiªqêu&.PìCõÓn2…Õ&¹aöÓg¡»Ÿ“\ÕëL\  Ù ‰p{£÷v6ýs [«Z?'¹ê7¹Å¢2&žÉ¾Ú•§ë úY©„p“[(¦ãü2z ÂP±‘{hÊ΂…G´„§ÕÓâžã7ÅgJޤ2díX!CÿÄõ£$<ý¨•³ÍÊÄç¦JV‡}®à ©šx?ûQž~ÔM|Æ~´8CŒNRW)è+}úQûñÁhã<›mÒÝFî Ü:Ú±ÒôéG-PÌh¡|ðžÆ8®‘¼ñi*ÙÏ~”„§e #kURIËZž"Y€ DV£ ©õ³%áéGõd¬NÍv Í~èj+Å2úíŸý( O?Ê@sÓ+̯[‹“•Ì.Ž3 ‚¦õ’~ö£$<ý(•¹åÇϯ •ÞO^ÆbW7j§ãÎÒàO?Ê@ëLk# Ýqödóâ¢)‹ŽD=_ûQÊJ<ûQZš>w‹ TQkȘvf#Õ«£Ÿý¨¹Œû4Í«m ×4ŒÝp—Š»U5=”Ï£Î?q§©ÄG÷|Ò‚Ê䟅íÛŸ¶'Ÿi|tÏ ´¡/ÞI²&¬Ý§Bµ:î@±ÒG÷¼ï‘;úœ€É6œmÖ qt“ÎÇi*ñÑ=_OE °ä›yæT]ó0/bn¥ú™ÆG÷|&¤Ð0‹‘aWs'¼HÌM׃êgÝóý¢¸åCi`Ð2OxœäLíŽs¥¿5NÝó »Phÿ«ÖÊ%Ú¾¨úK>Æ:ã4=ºç 4’+ù{ìÚ£»E«&§©¤G÷|¯´ACWÑÈ‚”×ÊLÐfýÙƒêJÝs%K°c¸L¤o‘Jd( þv€ê†’òMíËå¦T¢8}M›Òž™7Í?qTQI妊nPL®7„”éöŒÖ}JAšÅgšêMík Mè„Q JAEv{BçÁ72+•›*ºV:àFeº–Éê0ÜÎ#zöò7(>ÓÔnªè~|² „e’m´CýVCªTQ9t&þï¿&h]^GÐÔ£é {&‘”ÑÓÔ1crþ þ˜/ÊëL  -lªá|~Ns;p7U­ ŠuèLœ Ðj|©`”{®ê·C¾ ª+=t&NÐJ&B†d')MTŽÈ1 Û³Añ¢‰êÆ-@ƒ·g´´arýJõE:'hëeÓš1º'ÀÍRývªûivÞûEA¸f`à2’ÎHFw3·“ ªiO®7Y°[ó 'Jì@XX˜  YÙGyõOYP²ÜdÁJ^µ·L°Ây>öø.P†T»É‚} ]a') ß²xs6^_^DaŠ•ö›,¸V J%ÊuT‡å`ºæ§r%›,¸W Ù"ÐÄ;ÕÄccH±2¡¶Nn¥R%ÜdÁJJæÀºu!mì–’eÊBr ³%%ÞdÁýøØI¢³4ΚǶ(NeõM=è<£ŠËúþ÷÷&¨uy¨Y#dŠ9Æ@ÑÊYz‘øü þ!U\Öw‚vA#w3cvEˆ¬è}Â"¤ŠËú.Ð ¡Ú‚ªd óWôˆŠŽ·{P¬Ôe}h—AßÛ¬]QîÐù² Š*.ë»@EUatIIÝXìíw²:ß”!å²¾t$~Ÿ¨JʨkÚ¿)Î(½ðnPÝ¥fèß¿þýû0[[Ξ£ò·ÞN ¨jôߺK‘3ñýÃJ Mþ0t5¬ÃËV§4xu̯ǭT/gõÑ=ï«•ëÖç×›2i# A¡}ޙ柸 n©îùÅÀB%sI•VO½WµbŠ8­îù^)œçqIiɲ¾b¥yúHô+}tÏ(ŠD•¤9rϴƇxM¿(4è@§õÑ=ïkB¿)ìy4±„Â3*—qZÝóŠ}…êÙe¥ÎϹ+Òz‚jœÖG÷|=>Øk¥V§q%8ynOPélª·“úèžï•FøGu2eMdÅÓoƒNxйŸÖG÷|­E¾Š=îó?a™4i8pe¾^n'õÑ=_  ‰W(*HC¡;®IÔútȃΕʣ{n 7ˆ Ôj·h’°0pTIÌêíDÝó ç]nÒ­;Þ5ìRª"èâôM·²`·©£Aó4zrñnj³<™´öžæŸ8eA‘|+ .PÑǯPWmÕ/z< =ÇÛ×3jÊ‚"åV4ÐV § ]ÉVx°™Ü.µ-åA±Òz+ .ÐˆÛ œæ[.¼“R²´ñEÍjÙÅ·/r+ îÇø¢xñcðõ­ŠCÉ¿Rýö¥ÝÊ‚î7…w8ØvÏÜX`±òÉùê߾ô[Yp.§«`7? ˆÔêÇããÛ—q+ .ÐLñ_:^$“|nÞç¬iebÊ‚â½90„=¬ÅY@1ΰš̨ ?Tëtþ‰SïÍqB€ƒMøØ‘CW–d3ø7(âÔ{s\  ®IF&m/È ”:¼9¦/­ÅJó­4°A# ?yP5³ô†a–IPw ˆSïÍq‚v8Ð ®v=ÒI€ž^ªðŽu+Õ8õÞt¥”±ßømìêè&dàs6»TãÔ¼9^Ð2”(œ¢R| æõaëŠìoü¢”Õ\ªµ[i`våôƦýÃ’ðb&w`±‘̆Øý¦zœ´~+ h¬Ý:ü\i O$WJUœ0<¨ÿ¸¹’cu#aõ$õ™`ŽÁÅf>¿ñçMÏ?q\IéáæJŽ5§YD-môâ4¿|pdÛ þo®äåt\Žþˆ67FÓàEŃ+©Êx'Wr6&l…‹jtyCºÙzÛ þžo®äúMK€!n|ÖŽ3ñÿÍ”:ù•jð“3Ḓ ´Á> {u‹PeŒÓE™_ƒÕ»i¯7WrÆŽôœ‡||3¢`N¡H:CªËÍ•Ü+…o„ÛÄTðXç·¤"LQuª;owýt¤Uéí-Þó ²']˶xžâë§½ßõSe¥·bR]"_Tbã¤õûr ŒÓq×O×JaŒ½|”˜–dz¯náTW:Â]?] T¤®˜’p\¶/u¹0oPÄéˆwýt=>¦Wkât8‹±ÖàFDõêýJ5NÇ“õ «žwLFäü‘ü{ëH[ƒÚhŒÓOÆ“õ-PŒ2}.º9§Ÿ2Ã÷.Ìãôã“ñd}c5´iR1 " ¦~S¡tùœrá+}²¾ŠJoízŽ”·#ÚßA•[ÆéÇ'ãÉúöãk§ÒÕ>RPÙJò…÷¨¹¿ŽÓOÆ“õí…Ë.%›É‚°T ªw8N?>OÖ·@qû¬˜X†ÉúRôáMç‹Òo—Š•¾YßjÈ@I¹#ëK(éêÎ&úJDú/€¾Yß’+¤à9›=Ìœ‹9[sDtîen¥)è›õfé¨Çé•g$Œk%Ç Õ§ÏVî@ÿVÐ7ë+ÆDH¢Ü;mÅg¨‡™¨°——B†ý­ oÖGÐÔ•-ç5éÒôßë&úÏyþiNå~Ó™õµðf}6É•”×—c¢Ìz%}ÒÕ) üã¶zuõ¢±õõ¶'™ÕŒ‘oûëçÖ2ÿoÎê©Åp[=mPÎðâÆW‘;)O“ŠbžñÍ"øc¼­žhT&`øA÷Em½[¢¦7 ùí×ÃêI5¦O«§ -i”ï1Ë€jZ01ê@ü1ßVO´Ã’¬;kç™Hó·;•¬‡ÕS‹å¶z2ÐF︑(²YqÔ}$áßx‚jðÇz[=-P|í·Õ"å§úò[ñ jõÔÈ™pVO T -A` éø Ø`ž:ß”[©nÒæÍñÓJaȽf†43 scÀJÓñøúEÅ~[=-P šjzÕ(ñ¶„°øMu6RÇð®{Բ΃½kE£7傞 Æo;SÉs欥÷%Ë…9nIùÚ9Áñ:W§+Î_TzïQŠX’Dv±{‹øM‘öœ3g“|ߣ(>%Lr‰,fÚ»#NQ™8gÎZzïQë7E±Kõ­"½d~SuçÌYKï=jYç¡(k†¬ÃŒhy4ƒ…4Sb:ïQ-½÷(E#£Ö²d¦p;96–y18@gH¥÷e ~Ñ8T×AWÛ²§E—ÕÏ4µ[¢|,G6}QBµ‘åé5R1¯ºÒvH”Ïãë’(_ 4ßî¦Þ€±õæc›tó ŒÓqK”¨DêjÁBƒ÷a¶PðÖ¹±Ì3ÞêJs¸%Ê(ý2¡&.HU±‰Zå·_=(â4Ç[¢|=~ˬôÂ9p — nÄË<Ý­Tã4§[¢|­´A½®×l¥IñJŽWŽ•bçÏù–(_ B ¶\á½6güÆéxQ:$ ÊͧDù8õú\(:ĶB¶ƒ/r¥áÁŸu±a6op‡3‘pxƒZèϰ’‰¡â¦ãZ~ÔÅ6(êb ”žÎt¨–OÇ_Üó™RÎÏwœÓq­<êbjÊ×:Öi죳g㜎kåQ3Ð1§gæ¬f£G¶‰ ß~øµ†Ç9×Ê£.6lmÎRjQVk³uËÃsßn~Ó‰bœÓq­<êbššVcsÁ4#tƒúðh•‡_ÖlGŽs:N½yOvÇË×JX¨ ÷,X˨ºmìŽVêÍîX fé𦪔ӴÏSM=ìüß þ"7»Ã@%pw¦"i^AkÓªé+m7»c­]eýðøüóÈ9Ùû7…^Q¢-»<ØfÂ$%é¿sUTÙœ‰ïVJ!0DVm¤5ಢ¥d(áX©M7Wr–³èÉv•K’Y’Ñ{[ø¹Îgþ‰ãJ¶šo®¤­ƒRˆ¬ï³·= {PÄi-7WÒ@™íªýîF†Šn ©èV˜P¬´Þ\É JsCxÆR ) Á *Eü‚"N«Ü\ÉÁꄳ‡T’¯áÌ­v|¿8ÉìWªqZÛÍ•\ ( 6J?¹”™ý BÁc%Ï´Ç"NûÍ•\/ â¿äKIâJAH„óºFX8@§ãXšÙ,+½°·ÃíÄn|°? •Å„ÙL×?qKMÂ-°´@!I'¹°€×h¨žUø¢f±Ç"¤$ÞK*ž±ÈvÅ-aÒT:3@Óƒb¥éXZ ‰}Áå'RJ%´i[êx|„”ä[`i=~î° Ã‹Š6„”R½w4¤Ú±R ))·ÀÒm™#É:gˆÓîƒú–yP )©·ÀÒz|pz; #>›«}û _en}r‚jH‰ÜK cÿ­˜·1ÇÁqðeV{Ô@Éý¦ZDv ,]!Õ ™/òsp+ÍGœjAú=r7 ú¼š£nl®öÙRhkqÐæŸ¸‘»&ã¹3PÁ%¼Ô¤«1¹QRzR:P ÷ÈÝÅ [Cí½Ó=eö'x|Vw ºÒï‘» RkƒÙª¿ƒÔÊ·dBpœ$?r×ZºGîöoª{¼¹Û;>aCQ†±†â±R þ–ï‘»µRH_ö8¨S_i(HÓǪÁßÊ=rg Ë• fw”&_œžÆ•Æó7Õàoõ¹3ÐÞ9¬{õç·Ïão¬ é° )=÷É™øsƒF¿LåÍÒãÒ‘ÆÖ§²ZúöÛ±RMz©3áFîöJ+ìCàø‚÷9Q±æµY»•Α»F 7rg+U™Š%Œ±MÈåÅ]L$?r×®ÔÈÚ=w+Õ85ÎDü ”i?¬¶ˆÂÚŠæ·ÎÇ×85oŽôhMîÊcÒ%ðá SàxèŒÓôpÏã'òÀîÀŒDàµg{ûþÕ?ÕýtxîyO÷|F0eQƒl¤7î]êeH=ÜóŠ+O…Æsí&ª†5s`„Ô8@u¥ùáž/PÈßÕÚ1"Éd"‘,`Õs¤=ÃsÏ{~¸ç 4ë­¬âø¯©[ëM.¦=Üù‡çžwïÍ~ê$BãEá‹T²ZÒ›Q@´ô<ëiúùGèÞ›ã…y…ÒN¥lQÃy¥çÊñÝ xQÞ›ãͬs¤b Ï/ö߬a¸n|+­7i`Â@©vP',øïQ¶R=N(_”ܤý›‚z/Ã¥,„ ˜J"p+Å‹j7iÀ@‹`î’W•:(!ñÊÓ2›éÕ-÷›4°S¹Ã·o„öAvgIþx|$hyܤŠRaí¨žÛ,Äþ±‘Øçê@gýT'åOÒÀ¬ÈØVýp@fõ<ž¯¤i€{ÚLjŽDéú 5iÁ´–Re‘\A£µï%Ý£öŒ¶ð¢¨0”dÞ­db"øK¾GíèÀq‚‰ûjÒäFnÖ9;VŠà/åµ_ ¢WžšÑæO§N¤ÝÚE¢ þRïQ{åÀ Ø«ÕN`!û¹šÚo:Ž•jð¹Gí(ÖT{àè™ÕNpšŠ]ÎòªÁO 7j¿Aáñ„<­vêô±|xî«ËÕ\Šœ 7j¿@KH–—kcž“[P›4…BJ«=Ýt&~Xiä¹Ï1)RF0‰ÉX%Ÿ¿©^yj¸Gí÷J¡9tÌqÛ€v\õÇ¥Wr&þ¼/Jb‡lºÇbTQ›‘ òSœêgZÓ=j¿A+ “!6"6bgÙħÿ(h~’Þ˜üH{‚^Q+ÕpB 8Nxã›_¯þ‰Ozky’^­ªZQar[lÚ˜½“ÒÇoß~­OÒKP;F¡Ï_Í45!—ZŸ©Þ£6(V*OÒk Ð –€­¯Ó1‘(Lʈö¤7(¾ýÚž¤×¿ƒ(ŒYåjӛ윲´}ìVªß>9>é] ‚{,£ð ª(‡ä<¨&½Ô™ðIï^iÜ©d¡r‹þ˜6Zât¿<óQsSCÚƒŸ‰J†kX@†¥“wnCÉG±Kžù(-œáMH%ÍÛ˜B “›»t&6(BJžù¨µRc]£ÎX@Ì´&!£{*îxP¬ô™Z hp i8‰3f¨ÀVÆŠ’g>j"m¨‚¬fô‘X?íâx({¥RòÌG¨¥ž‹ Ó'$h:q  /¨'òÌGhCõ¼ Ò‘zBÅúö³öM7¨æRòÌG­•´Ì8ÍA‹§€&úþ¢úR(vÉ3µ@AjWÒdŠyD;¢{>A5øÇMÀœ7sþ‚ZB±„/¶!]Mº =/ž€Ù½7Ç Zè‹RDŠUf/ 7j?j"ø½7ǵR ö°ƒm"–Råäõáp Xiº ˜'^Ì$ ‘.Æc"ŽAÅé6(‚ß{s\ ˜5ÐçÖ„º£¡ÁT›â±R ~ïÍqb4T Ö,´zÒß“©Å™Ø ü“3q0 T".)ªJRâÕ9¡Jv;P%`vïÍq­´Á‡7päŽ/héžGr&Ï•êE¢=ŽlóÆ€*øM1Ëj¾Q9ï¹}jëŸ8&‚:4^L„Š¿CÄÁØ×ÖгÊ=nPÆéãÈf 7#›ÐÝN‹{6¯­”p€êJûãȶ@aH'àž •ÚBéGúÓ°ŸVÏDèýqd[«'¾¯ÏQ%ëʃg2œ+Õ8í#Û~|z Õ)‰z’‰g§:gTõL„Ù ¸™¶R00xß/ƒu©4ú ÖVʱRtÎúãȶ@Q’dd5U»ØJÑ™ÐÒ)ÍùûãÈ6SýÿéæÁ±sÔΆL>¿(†”̓þ8²mP”ͱ¤j~Ñ…¿)ô†ƒàíËÑ<è#ÛEbJUmÉm;] ¥¯5é Š•>Žl Ÿ©`8b©7ë¨$솃OŽæAÙhEÔC´±UÔ®èä «[©ÆéxÙ6(NSº;Ù²)`J^-ˆ¨æ§ãqd[ Ò)1-KWÒy}t†ÔxÙhˤu ù#!å~îü'¨ÿxTšg–„s‡SÅx´©ÝW«úT†”Æi;*½ãQiÞ )n=”Zùö+KóèÓÿØ ˆÓñ¨4/ÐJʉ3,w,·jv…c¥ˆÓñ¨4/POáB!Å\íǶ|R¥ÖîA§ãQiÞOIH!йˆð·e]]ÜJ§ý`ŸzØ¥pÓÅtxµ&—ðí“Û«½§ù'n€½q°oÐTœi¸ÅgãüIæè½¶9¨¾¨Â=Àn µZ+}n*`v~QÊ-£+Õ5B¼Øh§¸ ¾#b˜‡ŒèÁ2”îØGH÷û~ü &Êý¥›¶?×îPŽ•þ¥ ù`_+ â@O5¡ºf´ åëœп´Üì ƒµ‚ñå }±³mRoôo~ûƒœ 7À¾AqðA©¼RO2ôdû*l‰´·W:¾A‰ïw¥Å4õPêl¶¡0M§3‹ºˆ8Ðo]i»Ø÷ošÜˆHíæu`&p¹‹áX©rÏGxtÏç¸?@Ñ߇ ÜZ©IìùcÁ?ü,Ïîù¼è³0ä·q³'_T|tÏh¥Ï?f}}'X#´ágyF|tϨPÙƒ~,Ë': $Š/*>ºç *xµ@­8²Ø%&Y6¶¤®[©~QñÑ=ß+MÙ&Ø”4` XÙ§Úì@õ‹Šîù~û)µUóqªMÊp ¿´ÞÖ$3ñD] [~FMZJƒ£"È bÔÓôó'ΚdD¹­I6ho®Í/™‚Šô:–³VÐ(CªÝÖ$jzšàˆˆÑFÛu ÿÜQŽ•2¤úmM²@3äübê‘©$­ITtƒ2¤ÆmM²ð†oy Áš$i2±Wª!•ÂmM²A Åñ™ýVÌ>“[ßùøRäL8k’ýø‰/ŠY_X½õOcÔLzƒê&M΄³&Ùo5ªm4c!™›ˆîˆQå6<èÜú¨3ñýÃJ¡4 W=ÇD¨+§ÂqÒNPݤəø~WZÁ¾²dÂøRÕtÐÏÇ×Mšœ‰??¼(²0Á]Kœv¡¨ô<¸Þþ}QòL%¹‹ÍÙh°x³Ù¾À•IM½æŸ¸©£AoŽÿú ;}“)®IV¦25ÕåÅgJÎÄ+X²ô`“w4žç0kÇJu´Öb¥ã™:Z …žÎ(ïf¤ÀÏÖ$: µAñ™Ò›ã¯¿Q^+qDÄì ÌðCTŸß­T?Sr&þþññáî6Ϧ8L£[êµw²Aõ3¥ÎÄï´£K2²ÞÐ{C•'‘5³Þ\SÖ1Æ ª;ÎÏÔ‘‚Α ¨¶aCí³bÇ&(a¢n©a_ž!õ­ å™:"hÎêBY:¼S®‘©¢:«²I…*ÜJ5ø©3á§ŽªåÐᣑí¥J-yL£&ÔPöJçÔÑÈòLÙã7õàÎpÎM ÕÈÔ)©G„4tâÐÎo?·§!“’Ý¢ÃVÛJÇ‹L>JÓ·Uñnþ‰kȌܟ†Œ}¿M"dékn~|æiZç_ øLóx2ZáÌ"äžG›;iëâK“ª+-áiÈ,Pxr!\¥à¾¿Lþð¢â¼U:P|¦%> …m‡t°Ð27Zõ²¨Q[on¥ú™–ô4d*U»Îóµ­Ðò! eÖ E;:T?Sr&~¿ cRöçÈlN#µäO€Š¤W¬¨~¦äLø†Œ=þÐ*ÉEFØå‰Á„ vþóEiCfЛÃ7dl¥UåïrPê} ˜zÌìJb¸3j÷ÁƒÎà/rûEO† ¾5槦"nféJ°-{¿èQÚí½@  eT+Ì4ÒÆ`°¹õeï=J¿ý¢ ´ŒÆ t$‚9[³Î\8Fe"lP¬tÜ~Ñs'Ô¦læi¬÷k³+jmƒ"ø½7+½‰ý¨†¨¯…¦å-z[wf}Å7†÷æ¸@á¼#à2ÕjžÆ}kêW[=(~SïÍqRaAQö:–î©# ¤’Å7†÷æ8A+{FÃô¬ö€˜¨þÉ”¿iyšëñ…}¸h„¼/šlª^VüJuCñÞ×J¡. ÍÉ<à±ã3û ¼ð¶T7r&|óÀVJ&„Ûª K˭ׇjO=_ÓóÚžæA3¼Ž>×ȳɃ´ŒèÕß4!¥éyíï·omå\^k´¹s·E©s ¤êñí×ñ~û¥°"&?aj¬NY¥šQŠ8•ð~ûÖ:jꡊl<±’<êü:!³Au¥ßoõøühõ™É_b&ü"ÇJ§Þ›Ã2áãSŸŸÏ %vx©4sÑù'>ç÷Þ(*½á¥b¶iµã8^<(Óòæüb•‰²7i Ѿ¤è‰˜ lj9¿÷æ¸VÚ SXÈ–3mQ§rë8VÊßTÞœ YEU•Ûc5¾Ì"¥v´°åVªß¾÷æ¸S  é¡t-Ç ¾}9r~éoÎOPi° Cý´Z—Ç.uóÒ)qªÉ9ß?<>GD@¿-ÝÔpŠçKǨ­£Ršó·ðæü╯;è(b+­6~ƒ¤w^YýJ5çoñÍùùø‚ ÜÓÍ  ¬ôvxÇéhè^©æü-½9¿ý¦Ùà(ì—îæÑt sëko?*5K{XBBßÔŽëœå­0ÿÄõ£F{ûQ*H÷ÈÃìÁ˜Æë¿Xór øLÛÛ"h}U ºÙ@5GÕæ +}ûQ›oZ|šðwd;. øLÛÛ²ÇïÉ1»ê(ù ·@>(寭T?ÓæÎ¨ÿü£¬Î´z'hE ‚ÖÄXò&†·Öù?‚?æ‹rgÔZËØcR*-‹ÇožˆïÅ‹ê@É“îÔV-[>”†Ò ª+í¿9O®jj8Æò0 V<(^TwÕó´¢1L7b‰¦.Æêyà&s­T_9Úà¾@#4 yCt^· {Q¸F®•Î÷0‰ô>~ÏÎE¤¶§œßÇ<_-ço:ÜÃs&lCL{XêÄ0K”«Á ÅöŠ#zG´çL\ \ÉÊ:¢õ¡l¥-.›· Êjï=¬(Ëe7õÞÉ”s¥ ©þÑÃŽh´9*mOê¡~‹[VÈ©qÑž3qæåš4sb1A0ê÷ÇqDðÑö¢ÀÆ×ZºoÐOšöTÂÂü¾ÕàÿDÆ<£~XiÅá”Yîˆq}™2;/+>N¿±Ò¼ÇÃ.Pz ÕÅø¢"çNLü|QßXiQÐü‚b¸®P¹,­K„”ø1FšT¿¨??­´ 9¿m(¡-WÆ5ͱAÿ`¥úEýùa¥œc`Ÿ;W²qàÒF™Â Š•¶ úÏ+ÐÅ0iQíJÎ2‡V#Í=Ìþƒ•ö[²,s”©Tr%‡¿šgކÚü>¾ýä%Ëâ,‘_’e cŒ‰ÿXÎ룽(HB,PîR)Ü’e­väÎOŠh¶Ü’/PÝž Š•~î—d™‚º¦\Ž ¢½q ]åx€r—Jé–,³ÇçðnÀÕ& QŠ_Ÿ)$!öJ±K¥|K–($iÙ?¬…ßþZˆNûxƒb—Jå–,3PRù™šÊˆ½¨õør€b—Jõ–,[ ¬Icˆ5ÛÁg£w4¤¨Æé'3¼$Ër¶:?;YIî§ÔAÃJ³—,‹SÄÿ’,3Pˆ™5I5>»šCÿ<©¬æµ8í·d™vêöЉœWsÌÌ­ã:½”+·dY^M®nÝ-_@¸†ç~ÈËÝvƒ2Ns¸%Ël¥æ¾ƒÁÝu"Üä„¿÷J§9Þ’e¶Ò!tᔘ4ª¤|‚"N?ÑpI–eSÀ´|HfLúE™F7Ì’Âñ¢þ…8ýü÷%Yf+…·•´D㋼ä ܘҪqšË³ŸAf–Hï#ûˆ#wIœzC.ç~šë³Ÿ(:1œ»4æa¥sp55ñèA§Yžýtõ£K0PŠ&Õâ2 ÕMk \i{öSk"N †lØJhPcÊתٕ˹Ÿæþì§P¬©ÂÏ´¸ŽÙžŠÇÎ_Îý4g?%(t%©þ¾t%—7~ÛœÏÇGœ–ðì§Š«^¢ÎçøVzhY©M¹œûi‰Ï~Z,™@ýT‰;ªâ¹RÄiIÏ-ºÚ…*#˜ß7«¼JƒšâUF>ânÑqV×î[tµ¤ohÖO*3 ‡ËöEØ(ã´”çÍI®ù»‚Áà"Æ“NË?bÉÁlP®´>·hÃÕ¼¡Ô)ô3§Û]²´§+eœynÑÖdBÖá­io$ûEÕó]Ús‹&hFÇÕ >©Ñ(S|½(Þ¢KnÑ6s’c6Û&öTH¦Ù…/ªž·è2ž[ôzûkB¡»ònšKª½AüŸðÞOåH{lFÐä #ÔÅìE©Nïç?Žý´Æg?µ'úÅB©™h“LÏ9ÕeœÖôì§6É=iÚ»^að+[nƒr¥ùÙO­Çâ`uŽ–Î&éÍ) -PÆi-Ï~JÐÈÊA‚©5B«ÓL´$¿WŠ8­õÙO «‹ZG3ÉN·ó ×0—’s?­òì§6qÇà¥-Ç3ÊÆm‘S©3‹eHµg?µA6'œ» |ûhÓ„`š]á|ûØOk¿•°²uÎ"’™ ‘vKûÌâ ?>â”°âì]JX ­ø$Ë@ÐéI«½»ÙoPÆ©„[ +/¹BX“€3H¾Áò8ÌZéÝ X©Ä[ +›°b‡ô¥æŒ-ø7u•ÚJ ”q*éV²•RžbÄËÁþ> Õ9Эq*ùVÂ"(ùü‚àÞM­‘õS(amPÄ©”[ ËV u®fæLlmÿHY"kô7@ë­„e¿)d5{×½óÛcñPpBa¦y%¬8ÿ/%,‚ö¹WD˜;D¬ì‹¾Μ*ɧڞàï^LpŸ! j!Ú.¥uþÏŸÁ/ý þîU›û‡¤6E»”æ§*ú¿@-øÇüÝ{Åwl ªW«Áσ2påã~ÿçÿç~‚Í™{Å‚;T›"ÛJg\M®Êàoñ þÕ7mÎÖ*h¨©»#z+Eð·ô¿ý¦àôJ É-ébP½Ÿó²îàëgð·ü?´Ž- ›m&õ¤s/ÛAÐ"ø[y‚ }zÍN=iå a\¬š‹™R¦ÁÿÙ5îàè˜Yæ4§P_ºÐ¬Ø…¤×´ã8VÊàorÓÄó° ¯@ôN™û)¨"©‹Sÿü‰£‰Ç¹]\4ñŠŽŒ„‹]‘b@f›É~Ôð4ñ8•r.š8Aú§ äÎ&œ8(”ÕäãCø{ƒr¥ã¦‰”Ö–=i¨ ~¦•YÍ}¡‡²@ü=Ü4q[i€4¨1kð‚ª Ù*½*ÿºWŠàïñ¦‰(4›ÀŒ93øéÊX¬Ð=Ž•2ø{ºiâíA §£“ØÍ–Ñ ªØà= gÔð4ñ8}.š¸Fµ Â6‡ÿ)¸Ü(Ñk¿)JòŸ¿¼hâö¢@•C3ÊaŠ-m”5·¯ßB=A5íéõNÏ }ÎÐä¢ó| ÓŽœÜ6Ç þäHÏ»Üé¹b²DÆv|Â(ËÇ&ü<(ƒ¿·;='hÃõ™ƒgTä `×Eû¦”+íwzn P’h…óŒÓf´Fº2êq²@-øÇžÛã—‘"FK>f6L¯OPÿwzn `³ Ä€„ƒÁ‘òÖŽ+*\³Aü#Þéù¥hÏ}0e£y›j“^Î6(Òó‘îô|½}ÚÜÀÞ•ìøÈ:¤qbÕ¡+ªqúùá® ¥p’‹³!†W9q:μ¸iû¢‰ÏŸÊ(w†b ¸E·v»œ 4hþh™Ê2NG½3‚6p¶[×ß´“%ÿYiö‚`(!mP®Tî Å@iqÕ+\´h¡#FÀ_ ÚÇ ”q:Ú¡hÖ¾CZ¯Sþ5šÄžùœj¯”qÚï … }vx#ü"nc¹ËžÚT•Ï¿A§ãÎP gÎ&×àsîd‹ºJª3¼ªM® ª›tgâûS|)þ<Ô“+'ˆÄN·:\3Nã‘¡ÌßÿÊPšçm(”1NÚ0~ÓŽ.Odß´æãí#C™F±­p>j¤&8¢ó°¡«à‰YÅ€ÊárcÈ7±mƒXÈñ)ËêiSšá¼AüóV{ÛÚ ÉÖ@‡o¬õiOÁÏò\=@¹ÒzÛl¥Ðhån…}SÓïæËárc›Ø¶V LݶÈñEs†l?ì]ÝJÿh»‰mÚ 3Àl”ÓŽæ&’Íåö|ü¿ÚobA{FÚ“uç×,3¼d£‹ØµRÿ¸‰mPWi bô†;i´:*­žaOP—»Á™øsƒFÌTÌy¶¨ß>g"ãH¦Ð¶3K9\îâTn½ˆmöøÈï:½Ò:qHò¨ß–Ãå.Fp&<±Í@¡ÔÚ MÚxJ6($$`Žz‚&Í·¦l1]Išû†æ-s—²0ªèQÅ€>â4eÕ]ðÒ”5PÐéL&ÕTâ8t¥uàrò3õÖ”]+…ôe¡ÄœµŽlæ ½“¨s'”+•[SÖ@…ÊŸ¦××§«B'!d(ÙkÊÆÛ­)[Vãhw ó:bÙ¬pîg¯)«±KSvý¦0å@ó\_T3uQ¬4ŽãE¡$Á™ðš²eÉjÂÖ–Cæ"ÂJ#TC*…'¤Šµ8Ñ„N¹cçÌæ¤°ó%´þ䩟2Ð^·$RíìF&öù³1g³eH¥ô„T±\ —˜‹v̶&ÝŸU9P®4?!U,ë«–íjœžäëÈðJ~¥ ©Tž*fø3‹gˆFëieÔn¥)p&Ž*fA.^PÙÈt³Ïys|(B*ÉR¦€Iò8Ø&•lL éOTš¸EHµ›ˆQÌ‘M|G"ñEV}ªI–~Žãù'žˆS¿‰ÊïS0jŸÉ“Žd‹MÉ}þïÔBjÜD ‚6xrq“p=œ<_n„Øð Xi7Ã@a‹Þ É.¹Wõ:øâpȃÁ2¤r¼‰* uàj.ݨwm6Ñ5KbåðŽ‹“As1:‚6¡;ô¿{@ò0ÃtÙh™ÈšE2AÎD|@;¤Jyþu.µñE'Z£å¸Ç×ßüÅ."F¡!]ÒÏt@°n – •ôã%-GúªqšëM¿-ô9ËØOIš¬ðþ¦¦†7ë|óO<ý6f¹é·JOg¡>?î÷ÚÖ0Ûl“,ß ŒÓÜnú­Bñ¤K[Zèó}QKŸkò \i¿é·Š!ó–ÀÄ`kP­<~øÔâtÜôÛ"^L(Y†²w˜Æ2Ç@Û¼ø»•"NK¸é·Zð¢È?­4Ll|û°—°ýTúm,ñ¦ß(ÞÐçWÈ›ßTõÎQF£¶ã6(â´¤›~k ¸ðò ‘ßR‹£‰GmÇ9PÓ’oúíz|t éòÚÑeú§¤9hõ|Çé7VZnúíz|&Q ÷^Véȶ<Îú•~c¥õ¦ß®·OûeÌðV^Æj±Ç§þùùøJ¿àLüI?=>fÍÁ–ëFÌ6"‚Îr~9è·œ‰??­ITFðcPÆI8FðøJw Xi¿é· 4òÄËÛž.LæWŠŸã )äüeÜ’…=¾¡_T£…éç5Š_^##óOœ$DœWÎK‚  6i­GúÒq^ŸÁoí¹ÉhߠܥÀ™øa¥ údyizŸPÒÖqFU=÷›—„P¿Kbv4úƒ8gVÎŒº/õÆ Ê]ªæ[Â@Ù0MÌ(á¢Z'¾ÕYSñ+Å.ÎÄß7¨ÞMµv„nÇÈÐæTN™¦C¿'yPìRµÞ’:ÛqÓÝ]ç.;xíQM¨¬4oW ÷øØ¥ªÜ’àÄŠÙºž©Ï—ùø¼ÿ+¸ )ìRµ=uþN»l [ã¤jÈöõ¢xÁóO|Š0Üuþ~x0÷åÀÅzQ=ËJ)7¨ÅéxêüíúÀ M=Mj,’ ’‰ìA±R O¿[ðC«,PµIÖg1òЖÓ~”q*ñ©ó(ôßàæ+œŒQËqÜûM°6ø•"N%=u~öMEËY :20µ9ój* ±@#Ã"N%?u~‚vHµÔ»mj“ù©¶1MÉ"NÉ™Hè@áí“ü ¡FgW¥]µ’F•ѦŸi?êüÓç¢5ëF*1¬aÔÒ¦46Ò]~çŸxZãÌŒ/Zc±¾)Ê‘÷í7ÅáÔ¡÷¦~'Z=NgaòüÝJ§ý>÷ Úy(ìñAQËpøö¿¬áA±Ÿ‚3áÏ}[é,HEsöX§Ù@ã yšFÿöyî÷pŸûë7ÍôŠÇåLOÓ´7†ÔŒRØO{¼û¦5¯OSôM+Õýö³yÈÎÂL=û¦œ‰ÿzAé$бçU8ÈÆ©WsúHM–N=û¦s“¹ú¦u©Š<þØ5ʘ-A³Ñ;¸¬gß4‚3ñ(š˜~_"ªó.»¤!´Ö•=(ã´×»oº_ÏýÝžš¨~c‰d¾|çJ§àLüý‚(*¶¡ä– êÝ>NУÕñhŠ8gâ÷ Ð·í¨Ÿ0|ãô»ôŠIÛêÕ7íýî›h×%ÑD郺gvªc»ÜÕ«oÚÇÝ7]oÞ¦t"®u&â+Õ˜¬¥î¡ž Ú9Û øöÁ™ðUI‚ÌHôêIkòJ¹¨~QŽ3ñïo¿5;ȼð¡w@ÇÉ|;M®ÏŸà-N÷uƒês7šÜBæ>_Œzb;P‹Ó}F] Tn­ˆ»›+É'Ìô€Ú´A±RÇ™¸@q8µŽ®9¾0,NSàDWõ ŒSÇ™¸@;ZG¹Tޤ7zC¢Á7NÓµRÄ)t&þ~A[Õ_q +'MèXmCaÛ#äs¥ˆÓœoMٺƩ3¡›´À1(„ëjދġ)gÿÒ”] h@¶¨T€¨Ë*~Ó¾06(ß~®·¦¬&ªþÔšµ_RæzhÊÆ92iÊÖSSšê¥cSžÌ,¯ˆñ°zhÊê¸Ó¥)»VŠIKäåE@ºêëíƒ/K²zhÊÆY´¹L¨«É¿B^ZZÕa2ïýÀeÖ÷çOœ uLyÜ&ÔJA®Å“6øû_$ô±Ðë@ù¢J¸M¨ëaˆŸ)È-z&²šr€b¥%Þ&Ô ζÐðªdîJ¶R²Š'.P¾¨’nj{|Ì/Ö¡çµ1héì²1¹WŠUòmBm `îQj¦ëPë7»ƒ_÷&ÔqD— µ΃ƒ‚¹.P’¯{ô jBS©· õ^©lǧÒHŒÜ¤ÇÄäøMQìJå™8¬ãÐ@­¿ ²Â$¦1Nqî+¡½ž‡©<‡Ê ™ 9c!µ©¬#óQ¼¨çÄa*ÏÄa5;BR› ®‰‚ÒçH^ þGÚ Ê•>‡صW©(”×E¢rèª{PÆi}&×o ³ˆ¨µ³)^zPõ9«çÄaªÏÄaµ~”~݃nìGé”®<Ø_q9;'S}& :¦VË,!¡1kJ¼—ôŠºTªÏÄ¡‚ÆÙuŸ 5¨éMLæcno¿Á„iÏ9q8k"w]ŠcŒte‚ƒ÷àôfM«|<¶µs='gùf³Î] gógîø ó•ŸÎúü‰fIUîa–Šé8£¥é0*·w‰-Pm÷0‹˜ËEjv7¾jD d˜ZØ \i¿‡Y$ÝèñŒ¶„jm³dÝù¨ÿ¸‡Y RÂ3ÊRÙ5+™h;AÛ+EðK¸‡Y*‘“AÐ)Ø•T(ÝĪ—f×Eðƒ3ñû‡•¢p*~'}”eœàü¢“ÎGmP¿¤{˜e½¨Î»©ÀýD+hÑ ˆiP],!…Mœ‰??<~­´|HÛ?QÛwL%±Òx¼(Þ¢¥ÜÃ,¶R0¤¨3QÒóhWs°’B’3øu˜%3á‡Y ´[Ö×!a&‚}ûD7LÅ;PýLEî+DïxÁÔ¿l Å~Sf(+ç•GÚ}å1Pðù+æMKF§,[& U•˜u üL¥ßW‚RY‰o¿F{ÛüM©13çH<(W:î+R Z+Õ4:(ð1d%`.P~¦-ÜW{|n+ø‹¦~!YYŽ#Ì Ý+ÅgÚâ}å1ÐÞÃ9°œ?šð·e(*¦¾WŠÏ´¥ÛÛXL¢UC¿Ö2 Vµ˜² |2MÞÛ8¦–oocíjâÁ –yzíMoP¾ýVnoc1?>Ì0Qa¥Íh ™À¸ÕÙÈ Ê•ÖÛÛX–u^ÄE èêD+Sƒl ÔÞ¾ÜÞÆëñ‘õ^Î’) “~Æ:]ÝJùöÛím¼Ÿ2¨LH-+>î9dß7(ß~¿½ 4‚~‹†ieµ|‡†UÁb“nãeSÀ¤4¾ýf:=«*‰Â~zNÇ¥îQ¦JÖ5®æ•WœUBâO ÝrNÇ¥ïQ&íðÂImŠZ<Øp-ä<(WšîQ&±™3˜oƒ‚΂*f_Tÿ}AR=ߣL¶RÈ‹Kç…wµâ‘®CK^Îé¸YºF™ z"®•æ`ðá$ ™¡œÓq œ ?Ê´^ŽhZ>4pÍW&(ú¯z}rNÇM–‹Ü"ÖæÐñ•*°|Tzëê ³ÎØúÊAnI½Ýä–Ú\AÊ 3uuyëþ݃ZHõ›Üb °%¢ÉmS›s^i !é-¹e ]ä¥D¹dü¦”Îoã$´ëhèeHp“[ ´ƒ×™B¾–V/§«ŠWì•"¤F¼É-múº ~ÔyÕ”…9ÌéA±Kt“[ÚaÕÀj CW=ä3ë+ñÅ.5òMnQЩ-»P*· šÄ^T²D(RÉQnr AÓô¶Ò[™|öxÖùYê¼ã©ä¨w_¬y€¦I6÷0€TÛ¤ÉDÐf9çøÒ»¿o ¸>’ý‚Ž__RzÎí&i EÎ9¾4ÚÝß'¨À´Òè‹P¹®{TX‚`”+íwß@s[ÎI´z"§Å_ÎÎýsŽ/q÷÷íñ1ÆX—ø=2g›‘àg 5q9çørw_ÖÄ!ÆŒYD -A†Z“Ž4ѬÁƒþ Ðx÷÷ jŽÞð÷nKBß4Êig9_{'œ ßß·•Âä¨Á,¦ES½m”ÒN÷\Î9¾Éù»z|"›Ù¥›õñ»GqZ.i]JNÐÊÝã3PèôÖb<ìü«ÍÁñ°~€"Ns¨w -¢$…ºÚŒz¿¨M; t \©Ü=>¥ò5¹çœäªmÜa<,+ýAÛÝã3Pp¸Mþµàm7»ïgЍª¨œ 9ô»ÇGÐõö)h< [šÍf$ SmqÊ)šÃ¸{|šJò͉ŠÑÄ™Yó7EœÒ›#ý:¨NƒÚ´2Óz÷ªqã]”Nr!U颵LýäxûmGQ6Çte D~Ó’‡„U˜ ;|QmùImPÆiÌwQÖ@©þÇ'UÄ›¬8•eûâ@¹Òre mHª7Ôgí$ O±Õ Ê8õ.ÊîÇǼ_`–GŒ€‰¢lPªèÿgëÛr(Ëqä¶’ȆÞ/c •¨2`Ožùqí!¾b)Š7¿ÙÕ7RçJâ3â®všGLÊTU~ ñ0 tw¥›SqÛ¢i ;Í3&e ú ¢%ÇYÀ.ÆÁ‹¦écåD<ÿôO€®˜”Õ•Â!_ ’ ™Ê]+­i«hóïôŒǤ¬}}п6J‘Š_«å¥HWœž¯¤lu<œ‘KÛ’²SKJ¡aš#‡>iq%×3vSÏDå1Üpý§¦ñ>õ¢A_` 4~Ç3ñ‚ªR Oº£u¬ÌÎ?ÍÂÒ|A¹ÒÇnAŽ}I©ÕAã@;îýõŒÝTÇ3ñõø^çŒå·:ÆSåZm·R¿ã™ ³5*)s`¥3?i¹RÞw*ÁYu<q¥ôÌÐÕ‰¶ŒTôfM­&5ãØv¶s¶›ö¾uSÖ£òzAqò—/–桼’ƒ=MháJ­ÌAÚBa¿ýüÄG|µ|±4+(4Xz¥ïÛ»éñøakøüû‰øjýbi:ÄLîãBö†ôÃ˾dá–» XiýbiVP8h#c¢©Í+HãXF$QsAi§õ‹¥Y¿S<Ùž‘¨ÊОÒ8»îJa§õ‹¥YWZA‚„Dé0µP¥,«®[î‚â®_,ÍCi5Ñ>:ÆùF¶^‰Ÿ$“—·ð‚â®_,Í  I5â=öªéãñ~ý_ýbi&èV~ 9ãV+ÉÈ@,W=ìÚé?ýê”:…a3JÇn6j=Ÿ)I#Æç'¾S¶Ö¯NYÅMßщ@yóÔ´OZ%Jä:1P5þ¯NYňÈàXxeºxÛ Û¶|¿ÅJÛW§¬‚Rš$‘ ‰™¦½’ƒÊÁ§%ñ‚ÒøÛW§,AW8é—+Mu&ú¾Ân¥0þöÕ)KÐ]H§=ý¸­µ5N2·œš©…ñ·¯NY=) 0´Œò¢Ž––ž§KÙpÎhèÅyÚ¾:eu¥ä=ÇnÆ„/õµ?ug*'ûê”Õ•NpË¡d6÷Â,cS‘ÉQe ·§}uÊêJ7èï8mŒÀA²Cäë­FaâA±Ò¯NY®´€7t£{—ŠÊ«ÀÓ“4%L=’_TítÇ@‚ ¤¹#“iǾö~øú¤ó‚b¥Žg"€RêLåcPñ"/U¼ºŠ¬”vêx&Âãg¸Ð\€Ê€‘ÌSAtò‚ÒNÉ3‘¿@;¦â9(ÓÛDV²´gŠ3Káð‚" 6Gù^)´;Š&}¶adj¸›8!3^P1©Ñb ¡+…_×9s–Z³a@Ç(,´pÆïx&X5ŸœäB›UŸ£jh6QJ|}áêüüÄ·7TÇ3@)?öКRéâ<½3”vêx&^Ѕј*¬RéK'7›u$:P®tÅömLËJ_%t4Ë36uΧU;ݱ½AAÓõ’ôä»,:¸î$ŸÜJa§Ðæøët£®3²“®cÛËN)›:º+Åy:sdo˜,¡k—Cæ£-)iAÙN åüij7ÔY"{ƒ‚²p¸È[†mKqÐ4ôg¢=ì uÖÈÞ  tMèBš™máñ­®¬â˃r¥-²7t'Ò‹Pñ‚Ä_ÚÜr8´[þ‚òëÏÙt¥ xï®-l¼è?LPA7ƒ[)¿þˆì “•3dö7z°÷îÍ„>°Òa½h”_Fö†©5>±Ó•8ÆÏöª^Ÿ (wÊÚÉ\‘½AA! ±`RÑ¿9¢®â”Τp›ºž vÉOd–ŠÒN™Ø.j§ÃDÓÏOüäAu=´³O½èn/ïOVtœñ‚ÒN]ÏDEzÔb¬Ú0vÒj9J}Ê•–8y`+U*aJUÂw9”¨þ¥ºž‰Š,ô(ƒä§K~à° ¸•ÂN×—jèÔÒ„h‘™$Sé<ĦŤÏO÷|}©†tlF2G1iÊÒ}þà5‚É.õ¥B‘kß¾¾ÿþ·éTqŠŠm îòô(³0)v“ä¤??Ái§ûöõP¤ $sÍ™Øô¨Õ <H”vºo_ß º¨ „Ñ¿€*Óò íi<+¥îÛ×W -“|çÂïÅSáK)(ítß¾¾°ÒŒ”LÒ²ˆ«.=OOåì®v ž qÏ( êy0™ê\t{Ö¾5n¥pÏ÷U }A†BZ G¥Ñrv M"ìçCÁ=ÿ晘Ê×Gº{t63j–2 ªùù‰ïCiß< J-NPuφÇÏj̤„ë½Â¤Ú7Ï„b6Õ¬9(ôÕ›Rê³Í±? \éÏ„‚VˆÆUf‘ •Y˜;,p#XOJûæ™ èÂþœMöÀJx쬲„“sÑÒ0tWú@¿x&4k;àB§Ðr/mÒ—Jî@YOJûæ™PÐ41Å)±Í¦ w¦2›È8éÿ}|9úÚ7ÏA³äÆ äzF[Àz_˜=O©G´è—ÆádE"Qì`á”jùkº]· ÍßÒQK_‡S¹å&9J'Ì•ªL$þDf)IgÂU;ýÒ8¼+…(U¦:Ѧ4‰jq7#®u XiþÒ8Ô•.H]À¡ªµŸlš€ÛÃø?{PÚiþÒ8´•ªV<æÙ¨!“™á]L{œ«Û­vš¿4§¹À¢M.Þ¦Ò«¯OWRrÒoé¨å/C‚2%¿@…±zçu‚»J(^A ò€þ Ð/ùH€ð|2N£'”*˜iß¹Ñ|KG§1"”ŽZ¡òé\BˆÕT9;K‰Ï?æìô€Ž˜–[És N”!g¥,‘’UuŽ7HCûç'>-×òŒi9EwÇDÎ$¥ŽÌç"Ù…óT† ”ÆŸWLË)hYåHY÷>™˜©ùR?_P®tÇ´AûæJ7njJ=ñ H•üÒëy|I1-§+…ÄãJTÒYTc$µŸR‘‰Ã»R?z&|Zn)¯$œ¨-Áô¤8EnLz!rå€Ð•{¿¡g§åõ÷UyJµ4½ˆ &¸¨œ§¥Æ´œ‚6Ô6Q^…we´Tš¤H1ö‚âä/-&f–Ò‚¤RŽpÑN©ÊHAtw|~â3­ô˜˜!hÇÈúì,rÁ-ÏšAë¨ùɸØ¥–3ºRz»vÚsz\IÐ×3'}A¹Ò3 ŠY­•Pà>ìwpb¦? j§+&fôñ!ô51p9kâéÄÒfg_Ÿ0·Ü•ÒNwLÌt¯/56ªdncÀÛÐ)êö‡4x&|b† óLpg$Ý3¡¯<øõ²Êí&UsLÌ,S¹“CžØ>/â¾LyTRפàLÔÞ¥)yöÝv&e©sÖÕë·±íó“Çé­5:½ Š”Ä$5®ԋ®êI«Âuw ´ÓÚ¢Ó«  "*Ô9+ôõ'^MÔœhËr¥=:½ŠZ4hp„ äéüSG›’_)í´Žèô*(ÕmÙŠ¹²Íµê´¾)·RØiÑé%èZÓmœq×—ñÙ™7M«¼ ´Ó^ÝÒ.¶碙†ÓÇßzï·íŸvº£Ó»LæÐPžt1F¬ Jã_½çʰ#=ÚËSèL˜õÑŽ» \éWïù¥:šÖusòìË}l 4þñÕ{® Û4áÞŸÚÒôñ·u̺•ÂøÇWï9A'ºY'8J†¶Ü5ÖøVQ_êy§ˆMÇWï¹­´¢AESyG1‰ Å®sIyP˜ÔWï¹®tsïÃ?mç3à]ªÇ›ž•ÂA·÷üÿü СôZðwHP¯¬œÇ¢ç'ø±Úéí=AT™8lÝÇT-Îa#"6xa ´Óy{ÏÃJAÌ?@"?²V 4Ž¢’ L^P¬tÞÞóŠÉƒÏ‚:ƒºãt~©Å‰.y¥ÎÛ{SgÔb©¨Ð‰è~< XéõúâJñ¡ÈU´µ©/?r×$'}Aáõ¹ž 2¶-eDX‚W+TúJ¬„²ñ¼ÚÆŸŸxÀæz&(®“™H8°µ-û.y´6(·©ë™ ‚æËñž§¤#wdH0©ù6×3@Ñ 9‡Xk5áy?p)$+Êmêz&"(Jf•C¬YYFºuÉëÄŒ[)¶éj‘ÐE®Á>m¼` ÑUã°< Ø¦+ $€ „F…¡m Š /Ø $€ =¿~gR(B«º­_­£Ø †[)BžõUZKSÖhî$_ßáü}ÔmOÞùÉSZ_õ(Eoì,àìe*¦vcÞæ@ÕN¿êQ Šv­É’¥©hóµ¶â‰´óÅJ÷W=Š TŒšHúŒ„ò[U>iM$Š8…ÒN÷W=JAÑ_<1}Ã1©d2ƒ]žðPÖ[Ú_õ(‚îAHb¦@õ ŒmÊÙ5驜!  ;Ý_õ¨Åça–8ÌRâÌ ÿM]9Âãá9PØéþªG´žN¾|8J³ôξ`êh¨6dôvº¿êQmUª[]ºÿU¡žiƒÁ9u¶Ž<&ÅzÔö ˜ÿO@IÉ<‰¿µ5Tûú 2 õ¨ÍÓø·gÀ|@>βʹj— ô"ÝœÛË6r×/·ÜåJ=æ Š£¯p:\…h°nYGâ…ñŸÀ;ÄQ›ˆ“I>PéWæ¡”ý¶’!CîýýÖøzúŠ£´ÃÙÍH{²cÁ“éŽ Šwz&^Bµ•®™^H>tNmÚ#'»¤ i¿5¾Ãïâ(])y`é •Mõ£HT{N­ ªïô+Ž2Єˆ£ö*™ÃdB%ÿ©ÔNö[ãëé+޲…!‰Ö{m¿Éšõ©ùw GVˆ£½[RÝ¢ÕªªÝO^N¼Ð" _qÔ}§™yŒˆ¨þÞx\I0µî·Æ×ÓŠµ“m¢T ª%S+Nþb Bøp ù©ô´cíDAÁ21xÑ&Iþ‡+RéZ¼>¥æk'í ¾œPÒ¯mD ø;б+Í9ÖN tB˜qTí$5ŠòmŠì”všK¬lck¬WÞ®¡s25ei˜Œ=9¿RØi®±v¢+E%fU?!ëŽ2ÑhïãÿÐk' ŠË¥ÒÎdÓÇoä?§÷>>L*÷X;QÐÍÖ&\èIJMÚÕ=*(*ÆÿÝ3±‹ç“ž$¯@º?µeä¸^¤µi¿£Lý»g‚ $âLnO7U&¶âËXø~G™úwÏ„­™Þ†D·jLåì"g·Ìòìw”©÷L(Úå H\S©ï~ŸvúÝ3¡OÅà19k&,•Ñ #&JGûeêß=[§ŽÀ…„Fþ7Ž^}§Í"ÀîAqž~÷LØJÑ'†9Ûc6ŸáÀ:”e< bRß=C_ƒÊÖ\©&e»ÎH|d ãw=œÙ,s Â;0u«4U)Ê È*yž¾£LÝõLPè t)?1r5´‹5aØï(Sw=tg¼SêHâêHL£/t \錣L J½è ‹¦T¥Ê‚“éy|µÓG™ÔÞÌ-äéMF\Õ¶ù®”vºã(“‚Rìg¦ªlý0¡fƒA¸•â<­)’lÕåÁÞçl$Õ²ÞQP*K]È÷+ŸÕkޤ Š8Qvl,s'‚‚ §Ô+ŸÕk‰¤»ùÊ‚›Ú;õ¢Ìéåø­xÒû•Ï:Nh Еn1þ8!ÊÆ`e¥4ÉVÑýÊgõÚ")À}|ĦJWtzÍN)ô--Íû•ÏêµGRÝì™°/n¯ºàŸ¾òY=žÀ¾~c¶gÂCa^Ÿ¥Nøi•÷â@AÏ„'PP ~܈S¨”³–Œ2è¶„ªt¿òY<¿~³R´­ÎJ:mj—\ü ï´ýÊguðLüúÍJ;š[?í«ŸvHê´*+…6Ç?¿±Ó‘?|¯‹µƒ²\ƒ÷+ŸÕ¡Íñ÷o_Þ)ëQ'¡ÿ“~2½ ”åIàœãÞ7©'&·rÙP¥tbiÉyÉJû»÷[ýÚû*Ÿ5‘çÈôOÉϯ Yœ¤iÀ@¹÷[ûÚûJV‰î«Qû¥Y4U{¥‚‚4ÉåJû×ÞW²ÊIfð¡ ‰u–—ý„ÊÊ½ßÆ×Þïš”M(ôb<ãa²ƒt|‘c|~¥Øûm~í}å•”mº‘A㔘SeZó–9.(ö>z&ž½o%NiÆê ¬÷%χUTr*{¿í¯½ßUqaà’|µ°Kc„Ç’Úû¡°÷Á3ñë7+=Ã,cpÖ”#µkî¬r³={<¿Êo@¡–ªUž\_ÕP¾‡¼Êƒb¥åkïëp`’î+¸’cÈE'1‚E|?©œè@±÷¡Íñ÷o@OÙõÈfrA²þy*ÑBe¨~&Øbï÷öµ÷‡§Ó^”'  ·œ’Väƻ÷{ÿÚûZ’*Þ‰œJç™-© $¬¼ Üû}|íýáµã6¢²…giBò‚" ì@¹Òùµ÷­Ûï`°ðžûÁ`f~éñîý¾¾ö¾ÑÌ;Å()÷b"ˈÀÝJ±÷ûþÚûúN1s†jG'Ûm7 äSOvÕƒbïôµ÷U;®#äI8¤é;µýÊg ïùÅÞùkïëJ1Á¦B ‰åJ Fášê *; =¿~³ÒNŸ,#ä=[9ȺÏw¥Øûà™øU~cüß\ ËHÂöË'M•»þ‚b¥íkïZ¬Gõ ¯o(ÅNL¿ {ô¯½?´ÌÑî”XçŒÙiœyjÒgÝÙ)öþøÒÚSÛ2öþ¾]~¢Â…XUZðöKUÚÇ—~”‚b4ti¼OBM!‘Vˆl.(÷þøÒRPh‘mt®÷ÎÃdÚqš“$º÷KUÚÇ—~”‚.±ú½ÈÌž^íEA-C3v¿T¥}~éG)(†ReÒëñ“ S*“- ^ûY)öþüÒÚ*ò·ÀÿÒFƒÎ¬›f„•»=+åÞŸ_úQ'UšµÇ~nvgeÁCÕg§÷þ Ð/ý(]éÂyÚ0k>’öGiÆwÿ 3„§Ø¦óK?Š äykƒý}ê¼8ù×»R´‹õÙcéhS‘mˆ²¡$Ð)›Q®¼«ìýµžÒQŸ#–Ž ´Šª=©f¨jgÎÒ6iã<]Oé¨ÏKG[¹:1µ·ÏAÁ¤æE©rÚÏJiüsÅÒ‘‚ ÝùÞŦGéJ˜¨ÿþr&T<­bŒcƒ§—zQZäÊee´6}~ò8®g"€v¹Úö× è&n•§H|‘2ûu&\ÏÄ *,@q£Ð]ÊGòl™à¾ \iùr&¬p(Æ¿@[Ô)ì¥&EY⹞Çç;u=_/ï´2ÞßôG5ð•ÿ9J$¿R(®g"€‚îgcä®WNqýSîCá¤õ 8PVÿr&ôëË­´¡àÝ•–¸ôáWz. gb/gÂL _&¿Ô~Úá¼gÈ:PÙûÐæøõ»•J%FcSíÔfA82YëAqJ¡gâ×oVŠÈiƒÅàãt°G’‡t•°R†P¬t9JT‹Cˆc±õÞ@¥Ì‘’]ûu&Ð3ñ÷o@Ï4ÇÑ‘ÈÎñ«/Žß  +Od»v gbç@ q(“`RkcG¡×8˜º­ì°uÞóO Ñw ”:¥½„¼ S(BÚà‹ó\Ñ”{×@ ¡ Òƒ–AuiZ·2aïÞy<Ê•¶@ a gÔâD|(ÇQ7béJ+3 üåÞß=PBèQo<óÁEoää²íª,øn¥Øû{JˆûøÁò܃&W²¥ä³åTÜJQæ@Ï„£„0ÐÓlŸÑ¼+¼Êë¡+,…’ùY)Ê{ÿôÏüú’˜Á wÛŒø”ø›ê ûT|ÏOÿtïàŸèaÃ9 G_b5RÝsJçt°…I”‚ª ŸOX„p`Cù’ãk?dêk­ô@sðOtœï0B;Œ®pßúÔý7AKðOïão˜‚3ÕÜ6ò_ÉŠ@¨_é­Á?µÇï²÷K&ék&éx«‘BåáAÿh þ©‚Α¤ç¦Ô“Æû © oú'@{ðO ´Šh*Õ§gA_t3±t ÕEÜ¿S9ùx&œjjˆƒVÁO>Òr¨tœÕwBíbY8ÝJaü9²4è)æfH|H6±©Òžt™(Œ?G–f=™C¹¢!MB±ýPÐ4*¾ÔÈ‘¥ù®·é1†NÈdi˜ÁµTì4G–æ»R4Í :·Ÿ8Ó‘OÚbŠ•#KóÅy :Q¡ëU~¾Û2"7ÇŠ•F–f3©u&žÿUel–GçO°÷ÅAs ÿàCE–fíòø…ãak7²ºÍןÀ'=vú7@w /8uñÿ¡üMv"LƒSJ[›: ÜC@ëC_0J ô ÚQ~Ý”uk*ò×)ð‰¥>ô£ä@_` hlÛhD¤{-*ò×y¾žØÔr¥%Ð(Ùp9u°¢Ý /ºrÓI´8PîýR}Á}§™$+CþT9GBV{Þ)÷~i¾@A\Éeë¹FNø*|({¿ô@_  3‹Û“¡“= I-äw®Ö…ä{¿Œ@_`  ÎG$.MAhpªóœ}Τ°÷Ë Zœ'÷À~~ô¡  ki¼¯_ÿÈiŒ“Ÿx-ÎQVÐâ4ÐÒ˜AC³à"Û­} •Ä ªvºƒ§‚Ž‚\¯èÅ‘åi&ÕíÊv XiMA‹Ó@q³orÉg%¥îúõQòD Ñ-ÎQsÐâ´ÇÇ}¹j'³.©žõŽR‰²ú¬vZKÐâ¼ ã >˜w$Gz}Ö³R´6ZƒçýPòø ÷CÇ™Ùès“ª-tuž4+N©B†…é(u³vv±y°"Œ|åGí¡«SA9°:œ¨Æ˜UŽp ÈUáö¼r„£ŽÐÕi }1Ù•Ð0‹Z‰Vy&?Ö³R5©º: ½w|`3‘IP{%"À^9P5©º:ïJ)Jµª½çJÿª:éY)Mj‡®N]碔 ´¡<çÊ,H©ç3èë@iüŽgâÝÇÎ?{ÉD“LU+Q AÐ.“šMñp<a¥›y)2˜h&§: „!L;”Æïx&h’âû†Ú'åë]h+ëY)ŒßñLІˆ,°J¨˜8sŽ %ŸÙƒÂøû 4ÅtMD'àÒ*¤²ßr¥û}§8¤Ñ3áhŠ ôÔÏÆwU.Ít"•ùZhÑò ;½qÔßÿó¿~üñãóâuïŸÇçtÀRj=^+6Ìz´ºÎOðcÚé¸qTÍ’™èò<¬BŠÚÊþd‘ï‚ÒNÇ£h“L/Iq6© 2õÍ’æ¦OJÌr¥7Ž  Bçj]`¡ñoLÆXêÉ]PÚé¸qT\é–£¯ š,¨–gމe-ÉŸ‚½[)ìtÜ8êÝçìȧ/§Ô‚+™A8t;»o¿…B›Cì4€6Ivõ…ôTEA&މ±·7§Ïï¾>ìtÜzT­ñuÑr²í)äIªs “çCÁNÇ­GÅ•N™<ÝéùR°ß² 9ýn¥Ì¡€gâ×oVÚä:é|`%óñs²ÉmXƒÿPÿ‰“¼SñüX¶£Î|æi–“Z˜„Í…Šu_m®óü˜;j¾Sñtn¹F×PºB¦xä²_Pî¨ùNÅû•ž‘ŠÏm*Žéši=  Órù„€”+}§âýJñøäíáÌn&fJú|>Ô厚ïT¼[)êûJWB ¾žî+½Ÿwú<>vÔ|§âè2G‡ï>a…¬LåÿþÙZ;j¾Sñ”ÛtÒíI¤€L*™·H^ù±æ Š©ø1ß©x·Ò3Ë“?7´9fåʬ¥‰»ïÀ­ô@çã¡üñcë‡*HÔ%ò¥¤¿Ê}hD‹S~‚«®ÇCq Y¶iEÇ\j—Ê}H.äsI]PµÓýx(ô0Ã~@¡Q!Ú7`¿U)R—~ÞŸÅJWz<¿RaVª³“þµ“ªTõÍÈÝùÙ¦”vºòã¡xÐ,gƒ]òÊ+ɧ|þ³[)ìt•ÇCq Sª<…üU8Z¿ÅA÷þ ;]õñP<¨ø§¬0«]l1)›•»ë³bŠ“µÇC¹ K#?ïT ùÂT®z{¦Ý®TÎÓÕŸ0ò'%Æ8jÂ?­'ؼM9FâÚÓ…|~‚ÓN×xÂHZñø F˜“<½ªÉv[º ´Ó5Ÿ0Òêóõ1É<¸£êTf üý4±9P®t=a¤Û§äi´‹rss3œ) ªvºŸ0ò‚ÎCËq?a@–§”ЍQQ®ÜJa§ÐæøëTür¹÷Ñ‹Å4õCñ büûeiþ€f~ý*÷H!ak&›¸òJæN7ècqç'ø1¿,Í”)y´  ®N˜F¯¤* Jãß/K³]ñÕÉÆnuÔ“iyÇgä‚r¥/Kóx™´EKïyeÌ`k.'޼ 4þý²4_о¥Y¨ ´m¬ðÎ:Aƒ[)ÿei~V*nPæ.ܦÜûäB̳;´ÁCa_{¡4Iãt\ÉJ9­xv:S{(v.èÌÚ&$ÉÙÄ»éïʇ*ByA¹ÒþPì8Ð^IЋ,rtæÙ_ÐS‹¿ ÿ&èx(vÜ㟤Ïã#¡? %t¨(˜T(§ÇÇ­ô€Î‡bç‚öuDFÿU¨»:ãýÊ´·i9YZJ;]ÅŽÿPt{¹äë{GáŠ.gúÈþ ÐýPìø% ÄZ‘CY|§…>•ŠTœÞÓnà)vè·Îšp{Æ«yÀQÑrþÐ<éSÄûJÙrö>>Tç^oÆÝQñNÏóÔדž¹Oú‚Š8EÇÈÝìHÁg¥Ö­…ïô¸=õõ¤g®Á“VP¹@ò™¸‡"|”€#X,gäÞr¥-xÒ*ç²=Õô\òÍ­åHf\PîÁ“¶ÇïH7ÐJsÆ,Œ%è9´ÝJaüyOú>>r(Ž_¼÷•°ŽBJE¦ýêëIÏ<ƒ'}AeælŒŸ¨çV¾>^Ù‡ÃÓâÎ+xÒzÆ'2ÐäCÍ玪Ȩ¡Õ¬¯'=]ÏÄéIÿ€6¾S¨§´gÑ—šE[šiüg3·ü1íÔõLÐ!£¡2t{ò2™ÝûËÒN]ÏDÝrôuP”/J‘™+©ç©d.(WZž~þ :“0 wðԯʓéX8¦Ë¹ª/(íôóQ=uÉ´«/5qòƒ Ü2òý“PòõOKáç'ø±¾ÓöP—\Ðξ¾å@ê•ûPù<ÈÕwÚê·Ò>•¤&½¼VŸñð|ˆê(W:êzxÖÏчx_”šÉe.žÉê;u‰-Ò)ÛÁ !¤âbüª  ÊóN¹÷Ñ3aÔ%TNþ²Ã©&•«’S3:ÙÏãK}R›£üæë‹ÛÓ éÏ5¾O¯PyPÙ¦5=Ô%(2½’†üx܉4Ú˦â±m§ÅRs´ÓñC©Ÿ@­Ç™hJUžøõOÀ;‚Öít¨bïS1¸0í*ëÚxœX{;­5Ú©®ôž5ª’ˆ‡’öEFÊ•¶h§ :ÄC!³ÒT•ªa$éÓEwAi§µG;µ•6I ¶Í"×|¼>¦ç²œº#Ø)z&¼¨dÐF–âÁ"ïižSÇÃ4àõŠvZg´Síx§*ÇKE6¤Ó•ž¨ÅƒÂNW´Ó¡I9ú&”tv¢áTÚ’íu8PÚéŽïüqGCå”"U·& §'ÿ•ÀóðΖbÀ«  å«å4ÇV“â¡]Nþb¾ïl9¼S·)œ Ty¦êï…~וWÊ•–ð¨LtôbÍ4‹øV¥9PÚi«1àµw*“\ãÑYµbµ»“{ÿô¥º•ÂN[‹/A×’n¹)$d’yÇæùŒ6;PøR­Ç€WJÍh@Š[“²Â¯…Eµdü×gÀÛF xu¥§¥+ÿk¡°¿»j«ñÛ;Þ¤àKµé|)]jR#;.•M|TmžvhŸŸàÇj§ËùRèiÙ;é°‰·Ä61&4‹~jTít;_ʃŽÔåb_#½Â깊ªÌù€b¥=9_êYé÷¼QØè^•`I…>Ÿ«ÒN{vYÉtHlZ‘é:§Â4:Ï7‡§´Ó^\VÒƒ Yå'à÷]¶©/c²«xPØi¯.+ù¬TBÙÈF&„ªEêTÃ(^ôL0+ù€Šºö >AS¬ê ô4\¹w*YÉC¨%ÎðøY‰’tæŒM׉mbÚtJa×N%+yj«!àÝjü þ)4)GSÚ"ݦ4©SµÚ!àí3¼TÎSt 4^XÞTiðøÙÒøûН‚®†Zôd¿”6`¹ûI&Ê•îð*h“ë¤mʽ6 Ø+Wz¸wxGН­tʶ± yÐt~j?Jó+…ñÞ­÷>¤InÓNúí“Öâ¡zu 0~ðLø€×@Ojð4ÍAîLG™t„™Wõ (iôLø€w[þT¢è5–ƒ}(Ëq©ÖçCá-”ŽJÒó_A–bJSöûfE¯JGsôP:2Ð8 Ò\ò2te޼JF¸K(Í1BéHAÇÉä›:¶fyT‚|Sšìsï—P::²ÏoéÈ@‹8h" !u´m$•H!±6ý9K(ͱBéÈ¡ùõåØ*ž¢Ü]ª!›Ÿ•ÒNw()è:sšùt tjðÎDÕ,åç (¡t4Ñ3ñg=ýŒŸ¬ü«RIGß©F'¼^ò‘%*¡t4¡ÍáJG Zgð”ÕŽª÷~Ö ™bêŒ%”Ž&z&\éÈVz8„Ê¿Ê1ÈŒ,ùORµb<¬›0m ¥££{üÒEóüMLªadkaÒ(ºßzÿùÉsHÏi•¼ø©Ê% *ñØ´SfÒyüéÙÃ!m Sб ­\CõÍTÚ¹¨¿º§åJG8¤ 5¾Š‘Øñ±ræNRØû~¥4þ9Ã!}9it‹öHH˽sòѧw+…ñ£gâ¯ß€naĨÈLŒ® º]Y:ñ”¥ñïpH+èHBYVA°4XâLSóRµšÜ«Å! ž wH;P–Ž ÐBmRë€Yô|¸ñ=¨ØéÊ!â+Z‘¨ÒƒVIT«Š–íáß3v~òD|«„ˆÏ@KGsKÎ:Õ'2û²à©"륮">=ãÿ;eå¬`º8×ùãq& žåJ[ˆø.èb¿æøØx¹ 4›JÃ¥®">mS>T}O/”&©FÔì,p+…®">•&fil—RôÙÞç?)z ;]3D|*þÜaÔG®/sÚXWJá/9 KˆøÖ Ÿ½Sd*(Z„ž…êin94NΤh§ûøü€VµÓÆn9“RWGí5°ø<Ïù ~L;Ýéøt Ex&è™;˜< môˆS-J;Ýùøt 8úÚÆ6MjRÆ„ÕLˆÞr¥åøt  /h+s€+Uj=mm>Æo ´Ó]Ï Úàó·±À‰ÀÑ%m¼TvÑÓDàV ;Ýíøt  o½Òø×º£|§Å¨ (ì=þîëK'B_R;¡„*×”TU} gÚ¿~³Rt!u2 lÓ¥ƒª"zz›I¡Ä¹ç#ðyAÇ–Ù õ0 ƒ¤Jû'‰%ÝJQâÏ„ |:ÐÃ>ž•M|f|ý¥s|U#Àáß)>çÞÀ§ü$ߨS½ö´zîŽ$ìôlÓå´9ÎÔÜ´i-‰î6@[¤ ™dïýÏ.>?Á±M—Óæ ¸£ælûä^·¨„ü='~A±M—Óæ ]ò –AGœ‚2ûsì8P®´>—t$–¡¡¦ ‰W´º”âè¿ ÚžK·Ò…ö[*ב}ñqW™?ý˜ƒ[éíÏÀ¥2vÓyJUúgcÖirq ’è^N›#®™^ôŸŽÆT¼r"±s&—]P˜ÔŒa¤–ãDè_-aé×WQJÒÇšÏO|¹ÒŠadÝž¶ÆÁôq¶Øt0×—¨šÔŽad3…Ñ(ŽnGŸJ‘O¿RšTN1Œìχªà,0W‘;y”ƒO#Ñ¥IåÃH{§’“®¨Gõ¥,xL!•šMéÒ­&•K #µr–ÄC©cáÊ€•5:Q…ëõ<þ_­1ŒìÚ2"M­$mœZ†ËêôütÆ9Ð?Úb© §±ö€J®ocL>{.süîJ¹ ÍáÃH}|‘˜ý¸=ØQSk{ $؆#cbTŒ?hüZã+2$Pñøò‘w6’5¿t„(ÎOãÏ3¿‚f¼¨”%"©JíéÖý¤œØ¥ñç_‹\¥¸£¡Ür;½+•xDãßÑø´2×× Ò@.$%-Y;¼Š¥ñ—ßÞéBÈCM.¤åšÎï­KÍçñaü%GãWÐÓóv^ì¨MÎ.Råœ(={P)Ñø‡ùü µhjþˆ+¹t„9SúIXFF0þR£ñÛã‹3Q‰+/•[•a댿´hü¶RIuv(?ÌMÃSã²wÉ3ûã/=úüV8\œå¡n$FA«ùRÌû·g¾>ÿ*#úüв1kŒah½wÔIËÍ×ç_eFŸß ‡2ÂÜ¡ê30}#$ƒ²b9iÊ•®èó[ápbä®$­ùHG“:hcƒËUãßÑçŸ! ¶F (¥Õ¬–þè~¥0þš¢Ï?-Œ<ÛTeÝgÆŽÒâAã¹z¸¤(Œ¿æèó+è’“(èQ;ÎÞiSW²úǧñ×}~­Fž#$LItƒÇr®Ê…¤3Τ~´FŸ"cù—xžginJ´ø¾Óڢϯ+=YÐrˆ…âQI@ •€84zn¥âóŸ"cðù´ÉŽÚôçLêªvSygjB*Û´ŽèóßÂ!›°àž“?Zå]‹ÖMÏÅ·‚Ï_gôù­Â‹žÞÉМÒ$iÿð]é(v^PnÓº¢Ï¯ÅØŒŽnÕ6ÞÝ8¹qG‘gòs“9P®tGŸY¿”€.Euwóß|Nçïå6m)úüë-°×Jó ªqœ ¬[)¶iËÑçWÐ*ï”{¨[®#M•})' p ðù[‰>ÿzÆÚ(Õ¶M jãpzV Ÿ¿ÕGê麭t„+zM§È–¶SeúIažóü˜&ÕÚ#õä@+ÜsÊhtœRÆÜ’•º4{PšTëÔ“fíÐå™*D«4í¡ø”+Ô“rJÕ ¢ZäAïÙO[îU“šÔ“_é‚ÏÏ“£yw½=éô*¹•Ò¤Ö#õtAGÇp zz‰lZAÏô±[)Nþ¶Ã8CÕ"—ˆážè ”yËå2_ÃIJüÄ3¬žÂ8ƒJ×Î'::¾NGB§9?ûü‚òë÷ÆªÕø`üò‘…j )½*=+å×ï%Œ3hëô¤ Ðªºmwäþ‚òë÷Æìñ+¾þDþ´Q‘ÍV IÓ.åWНß[g¸ªÃ¤(£5Û+LÃvFî(¾~ïaœÁ@ûÆ#4]IÛQéRA9PÜû}„q†û¡èNP$E_û%ªMpÏÛ¹ (îý>Ã8ƒ&©HÔµáõÉJsNö¡Še)âÞï+D|õVyp@ž2WŽ0÷[–;?y"¾¾CÄg =hàB‰l·Hôaß Jã)D|:%`æpü·™æANÆ.ê@±Ò‘CÄç@³«šwPëuíDHãÞY”Æ?Jˆø ´¡ûxƒZ ­ØuB}óÏûs+…ñ">A‚º!È:HþSë'‡rš¯(Œ´ñô°U¥S5—*àaبUytãc(Œôñè_9þ)Xµ©mT,{ÎDÍéÐsïß!âSP¤΄ºŠï|ES`Jh=|0Tîý1C5²ÞA6ÌH M%MðÃÊI~â«‘k¬P¼ Òغx»BaE·”{¿Y¶§†jä;T# t¡Ìøq@P'U Ošœý|骑GÇñ­F^Љ½/Ÿ},”ŽÚé‹LåëýÜœ5T#×Ì¡© BXg}}×¾=º‘i³6ݞLJñϪ‘¶Ò îŽ,®ÉØä@T­ø¬&õ¯¡¹ Í᪑¶Ò%{¿!ÝÑhà,ÕÉÆ6aĨ¡y„ÖÞjä}|Ù¦u2)[ö+¢Â)ÎÔP\N›ã?þ×P-È4Ì.S݇FUšt–÷lÓS;‘ÓN6G­ì?Eæà!]ñõOwC”¥:mŽŠö†6ÉϺŠþ còƒÉIÊ•ºÌD•†¡†©øÏ?"Ås%+oÛ¦¨Ú©ËL„ÇoÈžçér}'¡dR¹ZEw+…®]Éf'ÿp›ÎìÑk÷ÜùÉãJ®]IÅÅ— u›ãy›cÆ{ÏòC­]I›’Ø”ÂÁÄÐŒaÑüÅ™<(WZ£+© YÈÔûÒv1Èd¼ÓÌ•žwä@ù¡V‹®¤ ²¡¯o²Ê“(DCû¬øó´Oû•òCõèJêJA´0ÄC!:O[âŠÛÀ®< 8P¨Íñ º›°ªíBz-¹ò™»4ÊÒªŠÛ”ÚT¨$13Z@¦vÌ~ð6Ú»R¸’è™øçT˜ ( Ͻ}“¦ }ÿ0Æ6g§p%¿z&ªU$*Êq ^_í&<ëì4ɇê!úÕ3qA‘?E¹¶7‚»túl@ø‚Òø¿z&t&LŽ ã‡ÏR ?˜çæµR<(W{& 4ƒ\ ’c1Ž¢ñú<´š”ÆÿÕ3Qm8°¡l òßÎ ZÕ5•S¾ú•Âø¿z&ì6¹£¨—9R¥;O)­Lœ,«…ñõLØã)pOIÏ2ÈÌš~< íûýP0þ¯ž ÝM¬~‘ Ð"sÂ~þÂNÙáM ÆÿÕ3Ð 2C45#Q¨Š\xäî8“ˆn¥0þ¯ž [i’ ™ÙäÆ[ ô¯Ywe_„qÈ­ùÓ¯ž ÍrEOò'É¢­šçò€gb»ž ´æUëlÃÁyÊ‹¯c¥Ùd ?¯åüÄ·Šn×3@ÁÚÔrG)¤N'"¯tƒ´ Šmº]ÏD] …ØÏêtÛ~pØWô9PÆÛ*º]ÏDÝœåA È¡¨! Éƒþ› -´ŠhA1}(,`fÕj·‡[éí¡U´¾s|µçæ|~Ñ‘PÚŸEp t„VÑzg#7б<¥À(\ÔDЧw§ZEïJ“øü?‘Þ¡/ŽG± ©= °Óõ~|@çã }Âæ¡õ}¹Nèíu|°Ó§'?ÁÕN÷#øáA‹?’'jü§½ )O˜ÔQvq ´ÓœÁ :SÆp Dþ6/N؉×@gâ8¿+Íùüð è˜žÛ 6ÇRªaÅghËÒNsy?Üãwioƒl±é‰~P0^à»RØi®à‡]˜9«äç§VWxÚø°õ܃ÂN©Í‘óøÈ åŽZ´S Òr± (;PØ)x&LðÃfœR»cÞ´@)°´Á¥”6éTì”Ú¿YiE=ªfuæúšÿP¹ÖçC¬r£gâ×oVZÑ}¼Á{ŽW ºWiœžò‚b¥ëü¸ «Â¤pô­õKІ€ž4ýj?‚t$Wá]ÒÎB¦*ñ@O…ÓÛ©~ì’ÖûèúáUDzngEùfT"üçrœ½¿~ðÇÜû%?¬÷´¸Ç—âsFL¯=”{¿”‡õÞn™âÔ ™Þ(ôUiR,= \i}Xï/èLR5 j7Š–‰{ožRÏJ¹÷K{XïÝJ¡Ë3 ã:à¥Û×?I«TÒó+ÅÞ/ýa½w KÞ©tŒJC»pgÆûG= L{@±÷Á3a¬÷´ŠƒÖ¡ìÑ¡u áYIÏm·÷ïãcï—ù°Þ»w:e˜eâeΕòä/<­Vk/¨ì¨²Bá°j=ª½ª>ª#qœ¿T 7 ‡»ìP84Ð:І#® KÓÄ‘g&%²5wM¡ph D6SHgšÎ(ôˆö=(VZs(èZc„ا8'·§ ~‰{ ‡»–P84P™y=•ì}èFœ†C (´&]CáðÐî¼…CŒ }}”{ézíA/ÏJa§µŠfiÂjx™B¶%Íï;=“MòO±³k; Ú&¹ JÑ‹\twºnדA» úõG Øi·…¦Vdz+Z›ÎHÏ6ý`Û”åJg Ø1ÐŒª9j|’K×DV:~ØüéÕ¯¿ÂÉߌ°NºZ*êEII–£Ü¡iäÏ{h9œüu‡“ß@‡”"*•X’åP–Ô‘¶8}VuAùN[ '3bET# CžÍ¤¬¦ãÆ­J:P¬´åpò(:º êp£S6#ÑNU•ñPC]P¾ÓVÂÉo"W­ƒCWÙ¹;ß_»‘>æàVŠÕj8ùm¥½bâC¬,œæÖ£üJyò·N~£ºæøPM…“}®{|œüä™(ß ýüÐyB„šŠis›ÌƒÊÉßF ØiåiÀ,œàNe9Ç,U®X‰ç'žbg·(v ´C™5£IÎÔè™Tº”'Üw j§+Pì((YF ³=•³Ã²ƒ$óªN(WºÅŽ­tKNº0×7hüæ ñâ“Sª¼;»§@±ÓÊsô•ñ´¢™ šT¥ó{6ƒ[)ì´ç@±s?ÔÂJyh™cßÇf‰ÏƒJÒî%PìÜÇÇ‚{dPëà¸ÀX!L«ËÉ_^ŠÝk Ø¹+•âïRß¿M8´N ãï-ž§ÕÎS1©Þ9ßLîE…>Õù=?yÎÓÞãyj ˜²†Áǰº©ÙÊŸm/(í´xžjA¦JŸtÙ¬¢,r1‰Ð«Úéx@¹ÒÏÓú4¶nSnÏÎ줜TítÅó´>d@•Ýr‹ãa4%øÒ@­”vºãyj$€¥#]lõEŽœ‹ky²>çéHñÔÂȉ–‘C…M€Ç?y­TŒ”PvÞi{‹{ÔP<0P0 —Ù™@,?¨•Žì¤Õ¤›¥ŽŠ v±BÉÜN 2†<¦Èš?¯Ür¥= -xe-P”¦åªjw'CsAi§c„â{§8¥$(“mt̺‚žЭv:f(4+ ^CŸØýÙûCÿÔ;(ít…â‚.Èud×ÈåãKùŠ|6´9~ýtJ›ø'|îÈžWï—íï;MJΤ¤x°Á3ñOÍКËGõE„„ØÕ94zV ÃèVŠÊÌ¡xЬp(ÅØ9årÚ­¢ÈŤì°ÞÞÏKr+•âÁiÓ‹ö¡†§øRs@2wð<:~ªÊT¶éŒ ˜í’U‚a¨±¾T¡‡bó'¦Ì=#¦.é˜Éè=“­÷ÜQÅfz>ï¡Ì=#f»ÄŠS* |Ò(pÞûEµ¹dïÌ=#æ}|!þ·•³ÂCºòä?º”ÛtFL[i’Ì„0QHþ ªhÓàÉÍ·À€¹gdÀt òŽây{Ø€Q.Á ·gFÌûør›æŠù(^|™ŸmNª¾ bR+2`ÚJ'œ^Ì.‹ 9¹TpFðw;ÊñL ã« ‹øDågƒe„ÓYCré‘8* ²mÇ3@»x}R¸õ¨Ì<I>â ƒlÛñL¼ Ò"tʒ홪E;Ít)Á‡åJ[hk4PtË”ªÕr\Ò¶±Ï!РÛv<áñ'ä˜ú¤ñ'+ny¥ÒŸÇ‡:ž‰°Ò%îyVUó\X:Ò²ñ!´iam£gâÏoÐ]çÖ6ñ =×ÔbZ [dÛÐæpmšÑ0„DZÚªoꦇN»…A¶Ó¯ö¶5*èª`‚׿_šåÇ£o~ê- ²–¸wB¦éÐò§Ò¹Né>ty:G˜«¶5ºsõ‚ÒøwœŠ7P0b À9¤©ÛØ&¶&µý¬Æ¿ãTüýP’ŠÙh=‹;Š9ÝûgbÔƒâÞq*Þb. ÓB´‰=´ˆ0_P˜ÔŒ©ÎKW²®…2vTÒîcæSåZi+¤:÷Š©Îå(!N³Ú•XܲŸ«7Á '/¨šÔŽ©N]i‘·¸Ñt>IU“rËÐn%ݱžTgI)ÅT§=>¢2ŠQJ<&5'ß)Ì\ÐôÞQÿý÷I+·­Ô%íýRª»«¬¢d:M.ç'øñôÞQTÎúë‘8kžt,|L§º òNKJ÷ŽzA{/g€ªhÚRe%¯J”+½wTmÒ¼q9©8… ¢ÿdÐÕwz隷•ÊA -αHý˜Ê²Ž®ŸlHq+ý âõÉ6  ]²&{&¸FYe2öûìWz¶iIé²4‡EùŒ1Žnj¶|씯zWz¶iI®gâÿü}Ü¡žŒbGL ¬¢#™?’ÿÜÆóüXMêfÏèÀÝ\*Êά£!Eé »¥I¹ž‰´ƒYicH1tü%­š+u=²“:ô£µG=Ä8ºYW©dÄÈè»ÄÈÝö+…I¹ž‰°R” Wƒš•C¾­wØúY©¸=BtE¯È"¯¤¾K¸$A½4ú9Ð?ÅN]ÏD|§ ‡e #ÿS½òõÏ¡íAÅN]ÏDüú¹#pÊfã˜)oßéÆv ¿°ÒÛ3@a§JÆÒ\yXvÉŸ,«ÅJoÏD|§z™¸¢Õèõ®*|ÇçŸv ÿàCʼnîx¤‹š\Åx¤‡MǼ²ÉO\›xI%Nh&Ÿ&†*U¨Âhçêø€_PnÓ' tH…7%:£?GžNevʕƉC­¬òÌGˆ.%åìþ ”۴ĉÃ~) 7è1¿ÏÑåD"]éÉи•b›–8q¨ ³‹"›Œ ºòKåÇ=?õ5ŠmZâÄ¡‚®&Ä5Ò$̺)i¤§tpgÇþ Ð8q¨ ¤f—1FÉKuëå•ãT“?»Çþhœ8ì:tÕuy$ëAûs›<ïa¿pv ã/+¸’]ËIÊ»!{®‰y}e:41ç'Ε,©ìàJ*hG醎Dg·œŒ0É÷ÒQ¦Ï½ 4þš‚+i+m“¢ÆàìÒ”Ô© g÷ç;:P¬´æàJèæëÄÚ ‰’vuØ@[÷Oã¯%ôôv-È [`WÒjªÒE»œrÊ×Û_Qª’j =½ JEš ‡ºOžNºMU:÷ôLôW”ªêâ·§×VŠ˽I«¹T5?G`;'ÿ+JUN¶âíé5ІJbkSR°b.>¿R}§#ôôÚãÏ MâÞ>’ÒõñPN&±¿¢T%Õzzm¥Yüò¥lÜ— }demšvU÷W”Jˆ‚ßžÞ~™3ÔˆI_ðòžçQŒT½¿¢T%¡gÂõôÚJ+ÎÓ\ðN9´ª`¥nã›ê¯(UùøB¡_ªµž4ôín9eÍÆ†GÂúã¡´§_ª¤–C¿Ô/bƒ¥r©šú©iAý¥¶ú¥ú¥Ö“m:™“VÆ6‹RHóy-”+­¡_Ê@‹œü©³±U©–gÚøôD_PÚik¡_ÊBy~£jáFLŸy› ·RØ)z&þú™™^\|‰ŒÂJW”6/æ³RØ©ë™@·gï–?•mÚFV5zåBà©u˜ÃÏO\GwI®g"€6I &%Y¡³›pW%m9ë ª_…Žî»R&ùFæÚ-‰àia¹šåJwèè¾ 2Å™K|þ„J¯Î¤3Š~Aùõ]ÏDú×´xG%U O–HÃPñ+Å×ï9tt($È3•©K‡lÞÿXéóøøú½„ŽnÝêK1àÅ ¯ \Й8ò¸§T¯¡£û‚ʨ}§¬f%SÖ¤,*ù\ÕTN)ðLüú^éçÓlhs€ŽùÓ¦cŒ\éáïu ˆNÐ3ñë{¥ í%OäOS\)½>Á÷ Xéݺ1xÁàY“²‹Mêø(4ôLüý;Pd« Z˜ðÒ‘ßgÄÃaëáíôo€®P8ìC‰#6†|¹\ØoºC£êþy‚þJ’•äx&è”¶F©bI¨6ŸY³DåøõW’¬$Ç3ñ‚v,%ÔMûXåõO©$ÔO ñJ’•äx&¾@ŕĸÑÇ™Ø6e PF|c9Pî}Ç3@“Ì› E€2 © dÄ8šUý•$+‰<ùûB?* ¡÷ÊD7§ã.àŠdz&\áÐVšË¸a‰x(Z‘À»UNùò€ŠñC›ãÏ(Ç9ù‡ë=—´LjÞ;*¾Sg˜åéÝ49Ê“’?ƒyn¥¸£æ½£¾@G%¸$O7é¼~¯è Š;j^ý¨øN'R\L!US`§ñoo§ÿ‰;jŽ@²ÒoíDJñèì£=’$¦%'®äzHVJš3¬h28›dgÔ\ŸJ<ž©œ Ê5W Y1Ð%ã¶ÂæIæ–Ÿ$Ap>>dÀ”+ÝdÅ@‡L ÿ˜4ô‡››¡y>L6”;j¥@²b èBJºèªËcàÆ|ÝüJ±£V$+Ý„&Üž‚{ß1<úȇr¬ÂbG­HVîׯ¸ø N1(IÖ´¼AÕñ>>vÔªd¥_ºB$¤@¸šª1 cÂМ­‡d¥œv§aì}Q>—R~bSêôä1ŸŸ<ÑÉê1:1ЊîcIvõmt0O I4d/(ít쇸¦ ”ŽTÕÖò§<úNG¼åJgŒN¶uvmðõ•T¨Ò•_©ÚéŠÑ‰­T& 2ý§'7øœí¬£“mÃ,l¿5å@H·þf%wˆNvŠÑ‰1 6Ö-°‹a–G[›’vwœÞ4 ;EÏ„NöC­WH ŒÝd·1÷<½ b§è™øõ›•NLÆV„‘œ‘(lÃI7:yÞ)N~ðLüúÍJG&¹Ø‚§Œ-YÝgǃb¥-F':t51 v•9+‰À˜“VFŒsË:PD'»Çèdk÷q‚»d’®ÁTu*~T—’ß!:ÙQãpX•Gfx3ÏÓn;*ùë$Fæó“'º£Æ¡f y2‰j+5 ­¾O팼\Pîý5 ´ã@ë}Ÿ[58é¡ð"<ßÑr¥Qãð‚¢YpÈÀIŸ¬ñå–žJ¸ Øû§‘âÍŸhÂã“ö½l¦ä•^‹¢k·Ò?5ÝJf$0É*(#UM$­Û(ìýœ¢Æ¡6á™È˜¶ê$Z8¿,hÕÊÞÏ)j”Ç/“Ã,Xi±ØT›öó¡d·¾?¬Æ'ÄßÓV¨?X>ÕÜó_ßÏ©‡úþ•~© C‘QüÉÁ}çôæó’/(ì4§êûŠfÁ‚‘;ɨüäì²=ío”+¡¾?n5ÝÇ›2oªmT¼Ó›qÄU;]¡¾o UbÓÏ·1„SÌ„œ{žÓªvºC}ß@›Ëä¶É']ŸfÕ;;ý)všS¨ïßwŠÉƒ„ ZR ®<,z†âųRØiΡ¾ï>bmkµ¼?\g¼ÃƒŠæêûºÐÓ‹žÁ¡´ÄÉÞéýpôVZC}Øl$Þé!HÕHO£ivyœ¦Š•¶Pß¿+MeRV›®´"Á ­¿+ýª‡îãas|p{Ҭʈ¨žðÐ…òt—œGè>6Ð…9¾J‰GÞQ…î 7=Ÿ3ü‚r›æº”Sœµö¬jRá¹%:”+]¡ûØ@t#áõM5¡¥ñ>AÏ@Àå6Í;t(“q: ]¡rsºjd>ÃÈn¥Ø¦%…îc]­ƒ²LRHkñ<Õ%aÛÊ<„Å6-9tT¾ôM¥½L•wáUÍ–ºå“°C“‹êcã8,¹Ä‰CÍÂ1S¡™7~Zc>ÔáçïÄaÉ%NèÚ$®YxkzÓ÷ö9Ýx'K.qâPAg‚¾éή©ïr³Å‰YÉ•Žwâ°ñ8´•ÌE׊ö[µSÕ9ENZdÆ;qXr‰‡ [ê m[ºT|zNT­Æ;qXNtøNÚãçò 9OgáÉßµ  ìô8,¹Æ‰C-èCédÃa1¶ª•yô¥çâŽr=Èf,#Â4Ùw!M­ŸR4õüçó_‘È®g"€IÉWÈÜ •u_iú¢l>¢…”vêz&hi’Z%:5Îß Š¤ìçäo”+í¡"qAY:ÂØ Õí²6µ¨ºí á.(íÔõLDPI"÷|j§Ìèúøƒßò+…²g"ÿT„iZB©Í,ÓÞ)2h‡°ßþ“Z¡"a ƒä¨šoú¥]Å} U˜ç “Ú¡"1l6²ÊJA5û®à:‰Ç|JGÆïz&³›Ž“áÀRÖT«E^ŸWô¦ñŸÇUóìz&(DS3ôN† Ðmm¿Åfȇ„ªyv=/è8gû Î* 뺱2kÉHLêsƒP5Ï®g"€IÌÈì¬ÌHT•ƒ»nÓí@i§®g">>jÑ ZñšØ^e{E¶|¤ÈG¨šçÖC^ê‚ ±bàèÎ&îÕ.šÔzçi!/u¿>>ÔiUcò`M½MG•çCѤfÈKЉÀž -iŽò^Ñç@¡jžÑ3ñë7+Bþ+!ƒå;e2ÃùÍggP5?—û›—r+EöüüIU—Õ=ß xß•ÂCé)ä¥Ì¤jÇX8òüK…ùôñO’v„ªù 2Þ¼”"1SÁÒºMkõ+¥ñ“]V‹–{dc.z$º;³=‰™|æ/(ô˜ìºí óQ mnÓX™ýmzòTn¥0þ1b²Ëêû¢l¶¡ìÙ+ÞeÙV#Ó ãGÏ„OvYÏ„”"6Zºº\èÂi£úIùŠ‹<å7Ïw ‰ÇÞoíÙ%ºó)QxPØéŽÉ.[©$ùöšM•àŸÖÇ™8ÝÛ; =¿¾WÚ&2e‰xG™ÛC_ê¸=¨¬=ÿüöC¡JÕÁ§ž§“Šçè›!Ù5K§ëI'ÊgM¾Ó©ç)ì5Þ¢ó'N!½p¯8ŰÑPÉôJáMì”:‘µ=Ûô^PnÓÙ‚8Ÿ¥xm¸øt ¨k¹3ùtt;P®´qŠq•»ŽJ+í^–(ŸTýå6#ˆSh oÃ;íê¡èׯ÷Ðv+Å6q†wlk[蓞ê­þ$ï«=ù’ó?x‘gœáUÐvÎ)1q×~’ï'‹|ð²Õgxm¥JÜ(öÍ\t·ÇÇMpbVŠ•®8Ã{A1•Û|’²ü`úøç¿ üPŽgC.S‡Ñ*³6ú¥´Ypè;íܦŸí:_í¸’ÏÄ ÚFC'ZðVOyžsu¾Úq%;ž‰ ÚÛÙÓË4l̶[}¾Úq%;ž‰Úd¥nÏÐñÚ¡-øìî<×Ë|µãJv<q¥RÑß…+Õ“ö°÷»_)ŒßñL¼ óä!Ó Ò*04)ruØ>»g¾Úq%;ž‰tö‰÷w§îî®êõm楆…ƒ†ž‰_ß ;I]¶öbib†5F'g:n¾Úq%C›ãŸß€hpgIù©æv¾í›©Îæí'ÿÏļ…CˆÅæ¬Dµ>Ó«²™g4ëüÄñL”üÅ3qA3 ûeZsš–ñO²¡˜P(ÿ‹gÂ@7@·tuõKgÑR·^IÊ•Fž‰™±¢Ì›v´b3/µ5-wuÎ.(ÿ‹gÂVÚÁ‰UßQ¹=;ã(¡>Ÿ[)Œÿ‹gÂ@«(ÒLêœéDÌ ñW¶ŒŽ÷ñ‘Aûâ™0Ð& Ò«@?JçM{Ò•fó<¨x(Žg¼S³X_Ø3UC³5³ø¦Áù1ôóGW²ã™ˆ úË"ÔÛº9~²&ÿ“âGTMj8]˜Š'­fãŒÙZªÏ5PYiq<T\“Õ8ÏÇ_+ý #ÐÀÝ%¥sÊ]·åäldåuz'ÝJ \9 ‡—ÎÞ)ú¤Ç’‰ûVµlÌsµq»Â^Z4òžO¸„Lx¡*û£7¿z1Ûò?ÇWRä=w §”ïîÈÊÜR8x2öÛ´äÈ{n ‚<ˆ£É*²Ž„­N~þÿ+Í‘÷Ü@¡c¿PßìåÍJYRùóõ‚r›æÈ{n “mhÁk“®cݶñôr+Å6Í‘÷|V§Í!ÃÖàBb¾TÕBõþ?­én¥Ø¦ÙóžŸÚÙ¬ I û+!8Ó™™ßwúùßÏOðc~ýìyÏ_P©ÃÍÝ —Ýéî«m¨áÕ¯ïyÏÐŽwŠù¨¾ÔÝZâäõùœ”+õ¼çh•/DŠÖ>T/>VzûçÿuAõë/—•|A¥^¼À¢Þ'ó¥Œó­oœ¯Wʯ¿]VÒƒ.vÌ`îduÆ¢EßiÖ;êy||}ôLüAOÏ}†„Î@jšUòÊ]ù&N…Ââ.Ùe%ýJ73‡…þ)vPÖÃYES2¸{§’C9IRg+-^áqB泡’ÏJæS¥tv*9”ÓPðŽ3ÌnªâÕV´ùI ¬Ÿ$™=?ñã ¥´0Î` Ešzæ–>”®“±;cUy× Jã/=Œ3(t³Ø‹ú SËZï÷its \éã  íñµ¹£èIo #³ \P™aœAAUœ2‚…8ŠuSÖNN1Ö­Æž‰¿¾Ag’Ä鮘6Ö»is„©êtÇÇ&(‡q†ûøR‰™(sôÉgé»\ã7 ho8ýï8ƒBywnŒ‡Mº=›®¤ŽŠœt‘3)ÉJžƒümo˜6)B´mjÇ ]ÊÂêäáŸaà²ÔÚ ´£«Dµw©Î’2 1©0pYj í Ó†åñW‘ôTÛÜžšA+Ci6ºåJ[ho0Ð$É“•Äémœ iRµðãó‡ËR{ho¸ïKhklKïy¦mïŸS* \ôL¸ö†ûN38Ÿec5ÚˆÁˆO”0pYê í T¼¾…µÍ¦Ùóù$fN÷ß —ÅñLÄwºIX—1ÇÇž %­âý$¯f¸<‡Mp%5' ž•$Dm¼÷SW;eܘ[ÎOW²¥èJZ¢DÒI¢“¦+lš+Añ ´Ó–£+id•Ò,±3ö¾F|M»Ùßw<”у{®  +M°¬©Ã,úÕõƒµÏÙéL î¹ã™@5}å÷äo˜8TšÂÂÆ¶¢¾ÿç@Ya’«8ž‰ %ºæë¶™1+[}(ý{q j§+4·Ü•“®èDÈ<ù+ߩΜŸu…I®âx&(T(zÏÛd„W§†<ÍxÐV˜ä*Žg"€v9ždzÒ–{Íôj~jM¿Rة㙠äÖùaÛù%hʃöy;+Lrxõmn¹*!ŽBNÚ†–ÖPúoŸž´ã™  §šÐum:Å©{¿©÷÷¹V˜ä*Žg"®T Qs`62õñ8h}l+"¬0ÉUfÍ-Z Äcàöè ›²‹µ­ÌÂ/(V:BsË}|GAâÊŠjJ½o ÍW˜ä*3Ö£–Q@"8ÃyÚß©~uÍž_t…zT™±e H N}]÷œ)$ (Nè³B=ªÌX2ÐLA®T ”õëk þçŸ^±µb=JA8J'–>ñ>~hÞT³èÝr›®XZÅuÌœmŠ‹OoӮ׈ }dÅzÔŠõ(Ð\ßNÜ}¿“þøÄ+Ö£V¬GhÚ(ìcæl2(L·r2V¬G­XRÐz‘]0r×òxš°ô\=yÿëQ+Ö£l¥LÚhï›:ª…ÃÊë$½ïÆ¿b=Ê>è`&…Ûd¶|åljLJ·±b=jÅz”™Ôw:ÁP×4»³5!S.ÙÊŠõ¨yÏ—2`tc*¾5=œ•§¸£¯†ÛtEÞs­ Ü•3’TiÃí¨Ïk¹ Ü¦;òž/#ÅPägW潯;I‡¯Ït X鎼çëruÊ¡®wSׄŒ†‘ßê‚r›îÈ{n_JVqûó·¦I5¾s%w+Å6Ý‘÷Ü@AÇP¯i,ªÝÇuÞœŸÅ6Ý‘÷Ü@»¸=òvmpe•îOÝêdŠÛtGÞó»RÉÏ‚ÇWš"[©†‘Ÿ-éAÅNwä=7PãÂ5Ë[4ФBøú¸Mwä=7Ð&ݬs,?‹UMIoÓù¼Sܦ;òž_ãÏ|§Z,±Ýž«:?”½C«èjú¡Àûºñøšáe åæ§>Þûù‰o­)…VQ]¸¢¡mܬY°(¿Dµ‘¦ ŠmZS­¢ërÊâCgB÷|_êLè¤Ìð \i ­¢Z8Šb¥[®DÛûÕ¯ôß­¡Uô>>Í3ø¥t¾­õ¦ç>Žƒ[éu<ÿ÷ßT+  ôbuefÕì¹½ÓÏ1””Zƒ´¦ ý-z½¾ÿý뿨 m¨ûìJñ“¢û£dkçC}~‚«I]¯/€Â5£ìÍTCØ-Ϣש¸ 4©r½¾Ú zÐ?ÝMµ8ºõK æç€v Xi¹^_Ò%(£†òõ“iq * ÐÃÜzAiRåz}´¡Ev%rʲ«K‚æáIÑ»•¤Êõú(µ @u{jc¥IEIÐâ”ú¼¸ÿúñ_~ƒŽÂ€W{ÏëóõÁ +| ô¿pJ¡gâ×ïÞ)y @þ«É-å™@P¼ ¿Ò_¼ç˦9ÀX}šÞxä)!ˆU(Ž/µÞ îZ"ïù² 輠ɣkv§+‰ºR˜|¢é ªvyÏm¥m¬{ñ¤+UE“+H”+¼çŠ3O¨0dÖ\ã§·½ñt\PÚi¼ç÷ñÑ%¨¯¹ž+A î¨±¹•ÂNkä=·•f%”Üž¤ZyP—”¨Aè@a§5òž_P¸’‰®¤&´aXÅÔ’ÿúˆNj¼çŠŽî±ðÞ˜–ëMû¤²q"zP9Okä=7ЂÙez(mÕ§£{)öÉI¯w‚»ÖÈ{~MŠ®d©ú7xÐ\))õO¬ïA±ÒÈ{® ¼ZÚ%ÏyÓÔ™:¢Üë™ö ÿàCEÞsEnMØéä½ßuäj·ÂÙíìôo€®À,¸lê¨Ã=OèëKÚ3Á£OGD/µ_fÁZw`4Ð$‘îÌPdSJ]­öXRöó.(÷~KYÐ@7ĘtzÙ‡šúH7ŒÜ/³à)'½Ì‚ݘ:¦]Ȫ&òù§/(÷~+YpÙ̘@0%f„ušîÐ<êñðÜJ±÷[ Ì‚¶R¶àMNÔýD%ší‘ÒÑ~™kkYð~}äOÇÄmªE-=L¢ìÅÞo=0 Þ¯ÏØSœ *PÓxXŸ6*;Êis`{'knA™c£¾ßTo=Ôº'b>?ñLÕisP”Žæ”J“´¬öªÞä@ÕNW`0PœÎs3;—ç£ÈÖ5Sñ¹ÿ(WºÓÀ}|TͽŸ¥eŽõî¨åWJ;uÚ/èHTOY¤ØÑl$?”öJ>ï”vê´9^Еøùå”ÚZ„µŠ„f>×±…R›#‚ž п&iñ)ʇҬ^zEþ(|©^Ó€n·­K”tªöG5Íô2×w¢k÷Nqô˜ìñ‡Ó$ðõíɧkfQG­< bü½‡^ÉßÉÎðêuR»ú§Ý\ÎO|¯dí#ôJ(oSèX¿TUZi ?ì‚Òøû ½’ÚQ„F}Œ¾­ED鉕\Õr¥+ôJ^Ð…ˆ—S»ãu]Åþ–_©ÿ½’ûŠüᔂºíÖ¢­@/s‹[)Œ=®Wrç·Sv2œÒ ß[ɤ(bÓ‘C¯ä¾ÃÈŸ2šl·ïD' µ+ɃŠIz%tL´àÁ™Xy¿R¹k`¥G’Üâä5äO·•ãæP¶*N¡ÀN÷3%w¸ÏOžüéh!j ÙHZ®“e½æ0©l"j”v:zÈŸîò´ßÎ&IŸ¡ú»Út­³‡GÎr¥#äO(sÒˆN´1X£híõI”v:fÈŸÞw n _ªk•GÙ™µÆ÷qÜÜJi§ÑëÛV‘€´%ˆÀ.¥nÕ9þljb;èñÕ½>­D}I óÚÆè¯èS[ßA¯Îèõèf= ‘©ÚêKÛV:ÚAïÜ ¯×ç@å<CWª"¢Å.mÆšþñù¡fôú  ÒcªúÁþŠnÖ1“üJñ¡fôú t } ;íÝšZÚëõ}"‘ôøêŒ^ßýú<…èçoz ¬þ:h/(öþŒ^ß])æ£:‚3ý0úZ½=½;èñÕùåõiñ`bð‚\Êz«-MÖ„}¾~ ^ßüòúÚ£l=ш8¬—G%žöÕྠj§_^ŸÕN¤¶97Vº´µ)Í'á}:(Wúåõ5ciFÈøW5—N $#Zº ´Óõåõ5§x!m8Ý Ð¨êó< ´Óõåõ¯¤ç/¤ÊæTú7þÙyž8ÊÂN×—×§CWU8»r‡ïÖÆ 'Õøå^ßúòútæŒÒyƒÚ©*£¡,¸Ú渟w ¯o}y} ZÐ~Û+Ôˆs5qtßåqòÁTŒõ õ´µ…ü̘à¶v1•àùz¢­ó/õT—z}ÿA;¹ÊÀ×§²ÃESž*Oø¹t.(éŽúã Ò`ÐÇ×:¿2 þ4êGèFþ´/æù³'©´”ç q |ü¨µû3y0©lm3æ«ÚŒ¹ªˆ]P;êëNôKe€ê0`Ñ Ú(×Aë¯~TÝÚ…ôõN'ÛP6ɪæãÉK=ÞoPíBŠ+Õ!ÖOÚøyiüèÉMjŠmj=|¢ÈÕ@ªYI%XÜLu–õ;ÐF¥«¯•6$¦~¨áßiNŃ3éŒwÔîýZi¡ÏÍØ6_Y¢5éIÿtðâûZ)&cç€ñ¯±õ0t<¥ÓHêßV̯•ânþlGÈ»jÅL ÁHôõ;PeÀü^)Š\&5uî„\] vzf<(޾­ìbßïG~|§ô¤ÙÚ´!ªr ¼_ '×ŠÉØï•2…ÄÙH%XÈ:µãÇdìÉVôû‚ö–ü§Z‚¯ÊŠCþ[ÐÈÒ¼u-+}=émYH„欚>ÿcŒ-E–f­ÈJ€öj“Ûý¡Õ–ó4Œ1)ôšëJ+{%1À¾¬›CsÓL“ cŒÇ# ¡¹‚¢ýhrÖ\ã{ÍÐ>:¤Š;Œ1¶YšïJ«»NzÓóthsË0õðÆ¡EÍmâ Ĭ,#zHkáp›4ÙcŒ-E–fMÐv£pWfb¥xƦ'8 cŒ-E–æûøŨ½M¦þVx÷*§Ô ^Öû=fCnSÈgšŽ3h}ÿ´:ŸŸxÖû–s`½¿ hi.•¤ë‘ÏМߑ$» ´Ó\ëý¾b”Huâäפ¬Ò7ÊKe¾¬÷-×Àzo ›3=i‹+\ÉfÑ´Ùé|Yï[nõÞ@¹£:Ɉêîh†ª_)ì4÷Àz¿­À-9”…¬‡ñôfËùµòÌ—õþ´ê¾¬÷ÿ¿®·ÙÙ-·™Å湊 d܆¨>Àé {æ çLâû¿¼b%ŠkgÔ6ÐoµÖ³(-Š,VÐf“\ඦG¦P‡­LØ=>CjÕ{íeÀ6œÞe#¡2ášÍŠõ üŽ3Y´e}ÓŠmZñ¢òRngïÓäVÿÄ\VÇ™ˆ üFÁž Z Þ†®i¨ô”qê8/hËŠ2½a>R;tizVÊ8uœ‰ZuÈ|¶B¡!yåŠ`û–ß•2Ng"<>‹² ‰~d`Æáø˜/ï —Õq&ÂJ;£0#1¬s&¦Õ­ð´+ q ‰¿¿ £B¥k:’ÑñÝïï‹ú - \èšê ”\³mÐÒLÓÉ-Ûøk…Ë ÎÄ#è¾”ª u-ßÖN’'U<þ¶x[aàr?ãKZÖ8ÄhÌhþ®gìF‡¢ú'ž0Tó „¡ w¿›O·A6#ã5ê4ƒµà_0t@Y=§=ÁˆÃ,qºÝ.(VZR Ýǹ±Ó­±mÓqU3-½¬8P‘@º H% f$Lúée PÉ0t@'Ü<¥ÎXXµ»)@å}|ÿÏS½„¡u:¼lq¢àÛR}kÒܦ›%{AAªäLüéE©ÒÀ¬Ô”°Š™T„TzBŠÁ_Z ßîç)F™h@?êé=U”•NýO¿­¥úíõrÝ4;l4WV•îp ŒÓ2ýö€N|œÐ—éF¾¶fle'*”+~{@;ÏSÔ¤{–#aŒ„ïJ-NW ßБ8îeªÇ †½Y¿RÄiM~k G´ BÆé±z?¨i:íA§Uýö¾(f}«?]sæRxíÎyPÄiÍA³kDý@»±÷úd&ø ¤=à¥â5»j-A³ë€âƒ?z勺ó¥¦×Cž›eHÕ4»è³ØuJžgÖHK{tƒr¥-hv¨šQîdß}»êäž“õê(Cªö ÙuVÚ!WÈšô± ·V§àñGyVÊA³ë€ŒÝL-˜ž´ùG2”MA÷  ©4» ´g­I¯¹8 ›ŸT’iŸ ”!µ‚f×ùM»rš–¬Ç•éTÑAÀÜ©¨ÿMQ™h)hvÐÜ SŒ‚ÝJL[.e¬t_$\œ¢ˆÐ$LqîŒÁo44˜ ÕcŸ¥‡ô¶8Ô?ñSœµå0ÅyA1˃ɃaÖ$Õt&ð5Ý3ó”ÁßJ˜â4PS¿Mš¡ŒCÂ28¶Ž«åJk˜â¼ ¯ v‘°‘¤ëŸ-ð¬”ÁßZ˜â4Ђb¦šr¸ Z«×Ý^9ƒïã#ø[Sœç7-dÉs›ònfó6ð¢F{@üm„)ΊÊá@ãp˜D©MÆœR»ÑëAüЙøýPìϱÐì§»cã7§§T~¦8+8ÿýhŸt™¿©›åÑà—g¥Hz{ SœçE 4Jégå¸QæÑ”g³Ç¯T§8+¼9ÜçÅ(SþƒŸè¾Œ~£‡ôþR? ºM{Ú»dô?&¨¥ßýb…4?¸n;jè!]íŽÚKÐî8 PhµYK%k¹j⺣ªå6í5hw¨ú1ìmоÌ<"Uó8^èJÇÊ•¶ Ýq@+²ˆÅÂé=ËÑ–ÑàŸ”Û´÷ Ýq¿dfãùš¶#·¡/js?üJ±MûÚg¥JG eåWõ¾è·6öþ¦Ø¦}íŽ Š†LEWÎnÑÍivè ýyû¬¡ô´;ÎããRÃH-ùµÇ;j)i_úB e¤ÀîØý'ÎHÔ+_0Œh7>É©¢Û´>ìŽ:$°;hkàL`T„À¬)…Áß=(ãtäÀî0Ð^Q–C}fÒ|òèL ®¨úÊ”+-Ýq@³V¹&ƒ`™Ø¿É¾#CéøFÕ‡ÝQG ìŽóøÃHX|ûó‘ØI¨J®þ®q:Z`wÜ•‚Õ™™§ÍÇ)Em_Óú°;êèÝqVº&kPë볟÷uØñÚAÊ(âtŒÀî8+ÅÑ71_3-èAGU#VýD×ö‚"Ng¨Ioy^úñA¦¸@ª›n»ñtÎ:ÒžöÖ¤Ç 5é ŠrXH3±$H­z$èä½eœÎjÒ´ü¦MÓ¨¢ÆBæ5²> Xé”P“¾ Jž'ÿÊoç Bûv9koMzæP“> ðÌ›YSÉi‘f—´‰¤7¿+EœÎjÒºŠZ>¤Q ©{¸EQvv<(âtÖP“> Sëq?‘¨$ÎÛs+ÕW{öïq:[¨ITÀ2ßÔ^uyÍcX…רMå‡PÓYHûÖõ?&I¯ÕXÐû’‰¬\=I¦Tú'ž…Tgd!PA^^áp¹L¶ÀèbøœT|÷ûÃBª3²h.íòùûªåaCoQý…<(WYHZ™˜âÃ×ÍÛ°œÜ/ ÉDXHuEÒYiÂ礱(ý€áñ5“ÞïWŠ8]‘…tWŠosI¬¡ä£Ço,yê‚xPÄéŠ,$W[(N®3teެ\é|WŠô|EÒyüšåÚŒ¹ê3u„t 킽ÿMq7]‘…d ¥³eÈ š…Žyq£$¿3ËTƒEîù¦Бm8¶\7Âи³»w¥ãmp¯È=? `BLÝv›â8’e(oJeð¯È=7ÐV+•¯q‹n&©×N§—âM”+Üó š“}œtêÈÚqÙ£*®‘ãmp¯È=? ¥ÑC†Crã°MNÓyVªÁßRäžPxæM”R†ÉŠ•˜ôöõ€jƒ»¥È=¿¡ÚŠù¨ÅºT2­Nº·•Üš4rÏhW´ ³ôa mS¾D),­÷7E†²?Šïüþ¾=²2ôœB)Š©â÷ôs2Ÿùý–j˜ß¿ G~ÅuMïÄf‰Š«ù|æ÷[ja~ÿ€’‰0*¦ŽŽV‡œé#.؃r¥=ÌïP¯µc¨e¹òèI"ÏVª¤ýAG˜ß? ¿)œG2MY“(Oüœäg¥ŒÓæ÷#($!º¹0W²T¤=8Pæ3¿ß¨3‘¿ ƒÉ~&s¤] _êœïoŠ8õœ eÏo.=޾ÅÞIµ¥ãK¥ŠËÙ> õOü8C󜉔º Õód`&²2Ð6Æçd=ã Ís&P¸,4P2Áo“ƒ!³ UÉõŒ34Ï™xAq§èÈw z»i aíƒÑ2¤*ëéŸxÒ@“¨Ò|@É윊?ÅXûDãñww÷‚Ú‹Š*Ítdòù¡ÏÏ$®‘L%EKH”+*Ítfö÷aI&fIrÙ9Èå@íEE•æš1ˆÁÖ»<Ì.ë©á•_)^TŽ*Í'ÿÀT|?Ž,¦Ø†ÊD–÷7ý@£JóýM1$ ^7¦£Í-k­ï‚þ ШÒ|SÍO'愺 Ú|œS–ô7@£J³6ðÄVf ‰ÍÛ®åŽ]ðqú_€Ö Þ°§ä4i­¯›ìj·i€jÖ÷ó'^½¡åÔ.hå,0­Í±äÌžéŽz@ü¹õ†:*.4M57ûSìBœÖõ€r¥#¨7ÐÎSJ+½­dûœ ÿöwQÂ2øó ê ´b†7S ‹ ÚaËO|¢szVÊà_A½á>>/gm¡Jñ`èš¿à‡Î„So8 ýÇàÞ/‘Íœ®¨µæA‘K3áÔè"ûxB…º›nëÊr{ZìÕ¯)8¿¿+m9˜s|]òÍçi¥ÿ?ã<•G½¡Agâwþ(·iÆyZð¡kͬ¡”ñ‚b¥5¨7Ð1ᚆMp–…‰ZþÅÉ&ŠmZZPo8  —«ÐãÛr´ôY)ÔZé1é•l3g Õ¹ =ÌvÚpÖDàeEÿäIzˈIïí¸êÑï„™‰Yî6.™Pš¸[)ö>8>é5Ђ FC«ÙdŸ•žöñô Hz¡3á“^e¿¸ ´:Oz^ަ}ïJ‘ôÖ‹]r:xû°Ð1}~kvS[ñ?â‹]­ÖXì: ˜ ÁX¸Ît¢ˆ°å¶=ÕqARµÅb—òäÇ¥™k˜Euz7Ñr¥=»(å` v*¦‚Çâמ5¿ R#» ·èŽœ¢ÙDl1þi]GbÇ­”!5c±‹ -CTÑãmlެÃüxw•õ‚âsoŽ¿¿ cÀó`*ö¬Ö8ìãñ8Þ7AŠ\ª¥Xì: Ú0ýyëyÄÔÍúIÅîoŠ“¿I,v´£”=;š±iއƒf"©¯Tƒ¿å@ÀܵjÊ:>]6ojÜ ö¥ÔýüçO<³µ˜ ÅÕêìé¡áTJ—mfþeð·˜tñjê}µYžôv&öx£åJ[ `hK bàKݺiÉ[ù8]–Çeð·˜g¥¨ç´±ˆùñLfI~¥þ6ó€výŒÓXÕÜŠ]æ)³ó Šàÿy©/ó>¾À8q<>¼f”\F:§ ú¿ü+0ÏJ+û¦øF™œ¶ .ØvÝù¶[).=E&‚4;¤Q6ÏÝÛ˜g5Í®:ôO<¡u‰Le )ëp|µ©M«ŸšwÜÎ4/(ã´çÈD0PŽtJ?› ¡5xJí Ê•–ÈD8 ÐçG/º¥õ)Tóèè~¥ŒÓ^#Á@¦¬Ñz¨Ëâ’×I«¨áÕ&Bë-2呚æ.Ì\÷ÚíT*gnß:½qÚ{d"´5}CsÀÛxÚ<Ô±x*îèkaO&Â}Qãª47³ÉL¦àÒò@ÁDhNg‚•^aC¦RRºçg款šÛýfÿ‰o4§3A3»[Pi¶N„ÉlYH5Ê8u:/hK¨qB겓/`µÇôÏE¿Qýi4§3AyãCµÇFBíÈ;VäI(ãÔéL„ÇÝ+¶%Y9*¸&]2ýJ§Ô™?€b.:I¶ûŸÍîø‰Ãm×ç@‘Ÿ‚3᛺ Û“§§gÒ_ñß=ßîA5¤¨3ñ]iRƒ|AéUêΞÀ"øÎ«’bý(¼(4¹î‹²¤÷‚ˆþÉSéu:tTLÌâ/f Ð¬J© Úx+½Ngâm)­KhoÕŠ\füaŠÂ{:å‚r¥+Vz ” ·è£1c¥NãL®ä@§Ngâóøb#=dOùæC JÏJ§S‚&Ân4!—ZLÏáÆh¿©Qñ$»—ý'^¡ÍË”}A ôŠ(´P‹}ð¬‚f"€Ûû÷€òEÍä ÎJÁ1ÔÖ®~£Æq\¸eÖ݃r¥5È\Ð ¦uÏM§Ï^µœqÆ j/ê2eÃãOPï“^x« ÕYð(ÍåWÊu™²a¥fÁu¹ÚE"¯öœV»yâ@ñáƒ7‡îýŠ­ô“L,ÐÛßd¤vvfíÞ>ö>8úá‹+Eë~´µšÕ“}«lLüùMYìšë»"hc†Òñ´Œ¤<]8ùç3¿=Ð{A+†Yà#q~SIÏä!öþ|ØÛ’8Ë#ËVŠŽì]‡Sº~•¯“þ‰Ÿåi+ÇYž J‰¤’æsbŠq|†åŽZ%Îòè {XåT|øŠšKÃvC» \i³< §ä™ÆÂË[9ƒ—ŃrG­gy ýâãe¹ö­2‚ûxVеzœå1ЩIÔ„ÞÂ9¤Ky?€{ZÛbG­gy :½¥ö&=§×”}¾:P\yVt»ÙiÜÿ˜ÉÉù«‰¨Ö™m„‹î?ñn7mE·› J­N%#Õ+©û¶åvŸê‚"¤zŠn7ʾ©Ù»5Qû7úÞQô@£ÛͰw¥Js1^´+ô?n7çñ¥*ª Zçž7§«ln·;¤îJÿ Ðèvs@s&YJVáM×%ÜêSô_n7´b¥lÈ$K&̈NŒßïß>´ž¢ÛÍ…ÈÊÄ Lv’w¥»þìA³‚F·›Jj\üZÎoE´ö<¦ý•F·› Õû9žŸa+Sh~Vú+n7÷ñQìª:_Ó­a™ú]>ƒ—ô¿xQ+Neëœu –ü¹E[WÒ&eÇNÏþÄOuIqêÈ@Që› nÆéfGd¿iO”ÛT$N´É÷Ò%í¸ˆ¤Ç†x—X(WšãÔ‘­taÈ\X™hæk¾Ž½›1.(·©”8ud+EµP·zÆÁó#²67Ÿö®Ûœ‰}A»ÀÙºÒ1˜÷¨e+e‰~'…Û:AUüPþ!h¡L3I5º‡öÊèßíqꈠKÏôŸ[™Vz×Tk2KRÞ£¶Ë† ©ßqêHAbùë Ÿ[=j±ÿrjÓn¥~èLø©#{|ÐŽ3ùmSó9MVFžÕ¯T§Žº¬8uDйµ ŠéVœµ> oœbêhj††L¶Î™¶ŽæÀÚ!µ0´¬ß¿­öŸø†LÏ2T[»‹VO§ÂÛ_Çà®çi~2=çØ!h©gA§·•ñš¦ ï¨ûìt \i‰ ™Š5ú´É²:ê>µ.(·i®±!c ZÂFéA«ç¯“€Én¦[)¶in±!Cо/ß?J5ñ™®|¶°ÙBÛœ‰¿¿ sÿÜ?;jéò oçÌ6C+ÏoŠm ΄oÈtmÙ*[+$«ó´:ÍóhùÇGCfÝBCÆBªUÈÁè Ns\7ªÜ÷EÂ"øW° ß—P—I¯Òºõ{Ÿ¹h“×Þ5”\ ò^R° ?  É­2àÇ7í­—×/z”Á_$XPnSèI7e’#­sUÅ(WšƒùÜs-N6›‡¶ Åñ:Þe¹Êà/%6rõ$,ûF™»­ô÷kZ4ë«Oó —›ÚÀîëiá¤A3;PûM[l(Äûû¦}”ŽëµÍv \i̓Š·/W³Ö»íý%G÷ü‚Úo:bó€ …âU³¾bV$2ÍéâZ;»•â@)36úóÅ[Î,ݬr|9]îAq €3á›öø6y&´¶±i#X)iù·æÁηBóÀ¿$š§í”.õ¹>Îdö/ ̓­Í÷~³Î2éJ¿èyt$ŸÓj7cþäÙû5ö~óÊדvÙ³‘!c,$n‚¤ŸèöîýZ>{¿yµÆ5•Ð5eIXR®l‘åJëgï[—Gx-°q„÷E¹M©¦eœÖöÉù»uÎ&ézôtwúÑPý÷Ÿ<9ퟜߚئjâ ªlÏ¡â™p͵ßt|rþÓ™èÅ-”»-Ñ}˜ˆFts \éüäü6t¥9d‚1[k$]mràUеßt}r~ëGiaMöM'õOò¯#LÏQI¿Rìý–>9ÿéò@ >¼hÌ«¿ëhv9Pìý&ŸœßÞþ®9`F[«þ†d ÚfÒ»is~p&~ÿ´©7§*M¨\H¹÷ )äüàLü÷ ÚµŽóŒiFµËæF"˜®[)rþV?9¿¬N1sÖÒõ#JÐ~гn¥ÈùáÍñäüö›¦¤ƒ‡ìòH:²þµµø=¨}íÓÊ×åH®²÷”:ÒÏIÿÄ÷£zûô£ h ›¼BYªwž†»1.(·iûô£ ’ Ö"TÏÓ·µáк JÀlyÞ¹®´ Þ¹9^PnÓþéGÇ×# JòÇ5ÄÒs«¦ï+\~ûQ½ßoÔÿþ·²:ó´BÚ3éKaÚñËë¡h4ì?ÁóEõû  %Á=LOç ã ¥òœ2K² ÊÕï7*€BÛzòQ•êŸèe^ñ ÖhÈ€zßÎÛ/\éRøDþùDóEu$hà¡ôDº!O.UÚûøøDñùD[çL°RmõÕ‰oÔž»ÆœÉ–&Õ÷ÔÝ îÊm:K¤6•3¦¿JPѤ—ÚÔ¿x«ô+Å65R›Ê¹Ó•b<Ú’^}\÷›–õþ¦8ùÁ™ðÔ¦Ú4øÙŒ…}¤ªjâÓLP½œ9P ©Ù#µÉ~Ó®ªYͶêøøP½9P\ÍçÕ”ýßÿ*bëò ‚–;G™p;ÙR%.N‹*büü þØâôjÊFЄ Z=ñÂ;ìäGÆ’×jqz5e_PÒ-óá…XÅå´±^P¬t]MÙ ½ðocUµ %Á¨åõë(ãt]MÙðø0R öqƒÏ§ ²†,ó4¿RÄ麚²3¡³iÔÓUñirïš§õŸp&0ö‚‚ƒ6z|ƒb@Ù>|sÇ`ŠZõއ…LJҀ,0»J>V$n”I/kTƒ:¿ÿ°Ò®A3!tÚLS.™Z#s©òÆ)jÒàLüþÃJáð5ÎS+v%3¢̬׳RÔ¤Á™ø]¾  ݬJž¡»Hà€ nZÞü§ÇÇoŠ5X9³>Tæo¼)è±Rèõýa¥­x °7³tLFtÛÖƒî•p&þý§ßtTëuíßÔÜÁ­ÔÙ¡']Þ8Õ¯éH%ËŠMr‘ˆ‘ñöyß_oÚ½¾Ÿ?ñ’e#å(Yf •#̦»œaC©>§ÔH%J–´Aíh¡ŒjÊ–A;zçÚïž(WZ£d™|øz+&G^Û)ªüëóøÿ!h‹’eöø¨IOÔãès­"Œ‚ö‹RÝ~¥ÿh’eçEÑ0Yk³?ë~«’ !ÕU è‚þ  #J– DTÐÚ°ÄŽ>-tU» zJ4£dY9CŒÁEd ÇÀíñQ¥ìz9s ˆÓ%ËŠuy4øWê8P0`¹éRç Ñá#}Qå‘,ž3ñ‚b›®4°÷©Ò”ðmÒ+úîwë¨ýeœzÎÄÊä‘5”N¥–ýá:Q Môù€r¥9J–(Ž ûš }O½Š¤ˆ ™(dÙðœ‰ø›f”8à ݄ ß ^Õ–Ókä])âÔs&оçÞw­o‚Ž_c‚¶¿\ºÒô¼(Æ©´(Yv@‡–AwÐâoÚŠ*dÙ 7Gþ€ŽÝ(–䯛¿3“Þ®Äúø8ùË#Y¶/Çñ<­¾.µp±×oÕ´±uò‹Ö±ú'Ïy*ósž¨¦ ?fëÌù™Ÿê ’ºËƒZœ®ÏyZßÞIS·$×ÅéD&ýJ@n‰øxžšXåTw?Ñ…â¢ZAœâ\]* P^ È‘åsžÚ â†Éb…´JA€èWŠ8ÍùsžV;O¨2¹£"{ã6åoÛUÿ´¼#—Ïyj^°;àCß 3"ÏÛ¯é}QˆÓ\?çiµ ½òðù;ê ®‘›B¨G_-/¨Æiþ¨‹•HXr0ïpßr® òÏŸø[ôÈu1EbR¢RˆZ§8iºR{ŠeœæºX±v\Æ)…OkÐÂO´ð¹U'=(WúQ3P(´/ó¨ ÿÞäÞ”ÕÌÏJ-N?êbšôK?Ñåé`PhÒwÒtªÁù•"NËG]ìþ¦0ø„75ŽxEB‹³i¡û‚ê-z”ºØyûH& %Ô—å©ZaI»²ñ€jH•º˜=>´®ÞP‡RJéºåÔþ(‚ßq&,øZ£†Äš;ˆ£JçÿEi}Qxüþž§Ž3A1ÇG3JóâüšâE Óóþž§Ž3@—öØâ£¡w’YíQ!PèL\P®´ÎSÕ„|QhZ¸zÓùÅ ZUªô€2Ng"€Bã'ç/WnB¹¿Xæß 9½+eœÎÏyJЮߑ…€ÀÖ!££´_ÔxVÊó´¬Ïyj H{RBzNíŽÅ·Ÿ$”ú{žBgâ9O»ª](sF^u”± ç¾ûý=Og‚ CŤõ 0„©£Aã„̼´ z.5”ñ(a Ç™ »!ÖÈV•,,â«*Gßx”°†ãLЉf,Øâª'MúÉ‘(Õ*(WZ£ANmpSfŠ.ö™ÕžýÜ û"è@§µE%,[é@¡UäA:CåÉ¿Gõ7ÏoÊ8­=*aÙJ—¨)Ø3SG’ùéf8oЦ’eqZGTÂ"èÔÑô\§VÆ-zOÑoŸÅ®ñ(a èLüþ‚.åk¢&GE´'¦;‚ÕÒûø¿º¢–‚n ‚eû¶&Q‡gœô¼tl×øö¡„5Zú¿©5¢=(ˆE9-;úP“ ±ý'Oð7ùÿ4gÁÕF´ÌPã šdÜMçü-‚š‹ÈèÄEߨaµèÁmú¬”ÁßÊ'ø Gß 7U]¾"Q± „‰ù«Ÿà·ßiÏÀãC`µ=£Ž(mú])‚¿µOðO«ó£qˆJ/æ„PÜâÀ!n|ò€"ø[ÿÿ4 PXûùM±ÒÌÊ”÷E!øÁ™øýÛ¶ãg›‚ß¶hBe÷ü"L%k@üðæøïtêèÏ?Ê–eLl[?ŸÃñ®03Cð¯H/ËKêÎ[w# ðsR¸M3n'롉ž"MÜ@WeY‚Ê$³$øðeÄ«(Õé‚2ø»Dš8AÛPIˆU©\Ö®T 'C5NR롉ž#MÜ@!Y¶Z‡È˜QDAs(üDWÒ롉^"MÜ@‘ELŽòŠÖãû/÷}ì¨õÐÄG¯‘&n sbïÃç3™Zc¢PÝda¦< þÞ"Mœ 憲ûââªæð7ÆéêïJüàLxš8Aû~í”}ß÷Þœ¼'‡ð×êû›jI~³×Mü¼}ˆ«¡t6Í‘i¹=þT¡ZªiãL0—ªìœ*¶aìÔœÛÇ ‡tÓ[ôÏŸ<é¹ãLPÜøæ¢Õa5ññŸ“®J”Áï8/h-p¬ÁBîý†"‚쯌ÅJg"€¢³º¨#™¦k¦ŸTCÅ*(ƒßq&âãc‰h¡ÐøB‰\¿h&¢¿©ÞNîJüŽ3W vG¢ª6Ù5:ñö¥Ìg¥ þQcz~@Áî€èLvJQL/Pk¼ HÏG‹éùyQH¢^fFÅÏ /iR´Áí@5NÎOþÊnä\ ŠB^¬xÄR± o_Sy3§3ñ‚vy2;»‰CG$~QeCß¾v$¨Å錊N4¸qXMøÐ«$è7z dhË]P®tÅ … cKÔüœRi¶+…¨£<è™èÉŸKzVÊ8u:a¥[vn¯$æ§¹°ƒ>Îxûòf(Sb†BЉþþÏw ñ/jà·ÍêÄê@§3Ç … ?ŸæíÖM€Ø©¿5 ™DðµRòf(àLüŽ ÂùÌÝŠPPdV§*9ÀDÈ)=!Å eÖ˜¡hSvGÜP.…_QÖøÁ?æ³Rf(óÃ=¯Ù4ýœ·)ËǼï[ð/íðÖ×ånÌ÷ü€jcx-dÒ‹Æ^üàí^:@—eðÏ÷Ü@÷»Øw ×p#÷0ç/Úèã«×Q}]î¶ŠD ¶èn¨ï÷…Ç—Þ=»£2AëªÕY_—»1?Üs‚ZÚ“á§ÀÖQ9D!œ§‡ôër7Ö‡{n+ ¿)òìÙü¦Š< þõážT=H÷V|_8Pâ´#øsÆŽz]îöLQ ¶tfꛑ l~SÈ´I†z ô7¤üëÃ='èÚãÿ{ŠwÓ9)_nûTûSn¥þõážÛãgÑ*üR“k *¾*ËéWªÄ¶±>Üóóø M.¸f£‰œö@ó)poÐõ>>Zñë£)[ÙäÊø1Á¿hIã6â‚V¤?â5eÇúhÊôg Ê’gYŽ‘Î’¼]x;^Ty4eÇúhÊÚJwM¬?|}Z©äzWúO€~4eÏJqœÐYÊ` îùT¬Ÿ“=«îAµ$?ÓGSÖ@ák.r½èO¤ÉæVcafå÷EiI~~½9ª™§i9r 8=ÎÎjÝ¢ñö•ºÿćÔüzs(:†«Ã@ Þ4ªº ´{P„ÔüzsÔcH§/ªBU¾ÚD>^\§ò€r¥oUÑ<Œ¿ÝÊÂ[W-u\$žÇ·úxsPLq¢¿1ÀH‡tfHmÆÞ•2¤>ÞŠôvÁ~$ÉÛTÛÆ”!õñæ°µÀ™(øD“mÜšu#qßÏ©ú†”÷æ@×¼6¶sÒž†üœ‘`Êë7½7Ç ºÇáÅM­I½ßs¥…DYÊD¨¯wÜôÞèH¸E/Š7iB-Ö:*H*dè0K}½ã¦÷æxWºMþvž“è׈°qHƬ uºª¯wÜôÞïJiV:¡Y)ñ™êbÎ_ÞÇGHI‹D ‚Ωe¹Ì›$åe×g~Ñ—9ÿûøÿhD íàL`Þfì€êíÍ5™xW "ÆvÇDŒJï¸=»%{¨CK)¢a ™ºðk*ëEœÎH¿­ìò4hw æ5ŠÆ©ÇÈ®M®Ÿ?ñôÛ)+Òo/(æø x( <ùyó“-?á@§9Eú-AÇf1íZ¼M&œ¬%[7ß*ijùpA±Ò,‘~{@«ªŒÀÛaÂŒY2Ý™Á_Q™èývæé·öø¸•%ûœh~*‰¹‰'»RÄi.‘~{@;æøàŒžlZ¼›Ú}¬Î Š8Í5ÒoÏ‹êúø0ߢø}šÝ޾JÐ÷E!Ns‹ôÛ³RA‚6Ñæ Gìbß”]ž¤ìcªqš{¤ß(?|3ÁòeŽÁò±T2úó›‚~;óˆôÛóø~£DÈA›ï‹ú•ÎH¿=+-x|hÛJYßÞ§–Zè8Т +Òo ´à<ÍØQ"—ªõnV%Çû›*ýv‚3ñß?¬´iÕ­A3ŠÞH%¯ìTW ÎÄ¿ÿ°ÒнŸ@‘Eÿ=REqž¦‰Rgè·;i ’uðä×ßT úó՜ؖíéïÂüçO¼$Ä,gkU½Ð¢.¥Li6ArËÄ!=IˆY>Îֺɂ[¶*P+–βqeeb*Wò‚r¥gk‚N|¢ÁOÞ° s2驌™ÊSª|œ­m¥ªµ¯´Zìg^³¾Š^t†Æ])N©òq¶®è› >|}3waFÇøÒqf(ˆ‚¬Ä ŠSª|œ­ 4é‹jkBJ눲ºU&ðÝOãyQüš–³õݾ ›'¦40G²M©ëÏÿÇ4Ç )œRõãl]ÙŽ+¶žèË€j/•C»GqGͧÎ?ëÇÙú€ª­­Îî¥L©Ó~S¾}5ü8 ŒÓúq¶6Ð];Ú9?Da*Vî8^õ¹ \éÇÙÚ@wJ pÞÙqªiªT›5Ãv¸ÜPÆiý8[ŸÇŸ G ÝøšnÓ:Y•Ô÷])â´~œ­ :“RFÂe/ŠWó†#0g¥ß^PÄiý8[Ð1Pé…&’ ¶‹5VÐñ¾(Äiý8[hÕŠPÄ¡kõ\:y(ÝjÒc¼ ˆÓîyµÎš«@tá7%™µ&h8PÖCkœõ£{n  ˆ­ŽÏIc.ŽŸyžÂ‘í€2NÛG÷œ jÄ'6UÍ¢Øpàéð¦g¥ŒÓöÑ=?+í(v¡„„Q ),ÉïDKA•‚w@§í£{~~Ób¾û¨ÍŠ ’)+‚ªäzh³}tÏ t,‰1ÀDXäJ’:"ÊGu ¸ð¶îùY)FC;lÞýLY>¶Ù³•ß… oûèžÛ‹ÚcÁ¼¦~M—/Ì3éMNò ~¯3Cº±ÉæåŸqà2-6ƘqZ‰vz÷Ÿ<ß}¯3ñ‚V*a­v=*Dgw¨­+ղܵ8ñ»o ¥NL­Œ¬½=)cÌÌSq5¿ \éŠßýÊf,EÅñ‰ãõñ°žZé= ŒS¯3@¡†ÓZðøÍª’…™´öøîJ§]âw_A÷]GÙre™öuJOéÆî¨|üú¬”ç)8þ»OPIÚˆªM%æ¤kQº±ÈñYíyQüî÷¿û¶Ò¢÷ý²Ù<»–€o†ÆíI³<Ïï~¯±oڬоª½iUl0øLhÚÓÞ¾éì-öM›5cÑŠàžÃÊYŒ~#|aC+½íí›î fè›ÚJ’ á$—–QÁáu´1ýV9P®tľ©V~N`s 5IdÇÛ•ÎÖíí›ny–Ð7=¿©@·†t(N¢KNOJ<þó›Zœ®Ø7µß´ƒÕ)ð6Cl' §:šÞ¢ÛÛ7àLüýP•÷3;»†q!Éœ‘ʤÊÛ7Ý7¶Ð7µÇms”ÊÕ/Ú¸<înZÃãÿhŽ}S]𡍧°î}¼ýÖßUžÇGßtK3fWË6uDF7íZ”•lCWÌþ@Äh¯èG¶ºøá›×˜M2§98•ªÎš·WtŽ#›Bùs ?À›â°A¶‚à@çø8²èÒÿ¢„dT{xã³{×ö €Îñqd3С…™„Ké€x…0-·A6 ·¸•"øÇÇ‘ s¢±I®%úÊ{”1«æüíÝGQ`v)èÏAÚ´Ð=ª¦TÍ´‰ŠÈ‚íÝŒÀÀì2ÐÍj‘}íß¿BОt±ï>i¸*®Ö^Ð9?ŽlºôR:ö‘óóUáÝtœû>+¾8¤_ÐmbêüÍ:g‰„!X²ÔÙû;ʤ³ö\ÎYbß@óÂüþ¤Ó#RIƧ´r»ç”Á?k¬óŸ•â¢}>Ð7]ÌùŒUó Ê•¶Xç? ºD* tH!ª¹Ý/[¢áÝ<(ƒöXç7ЊÌQ8£z.ÕFC­à½ž•"øçˆu~‚8C.zS o'ÖbòÛUi ½—sÎXç7Њñ0XŒ¶ˆÑn³]'óóö™¡Ìëü²œPº«&×eÊ‚9ãA5NWŠ.Ì­ÚÜI¾î¶=Ð>M¶Èø}zúùïÂ<—DfÅÄ!EÿÛ€æáæVBŧ”V%(ãtåèÂl ¨îÍ¡'I—ŽyÓnâÖZßåJKta>¯Éþ¬¹ƒfsÚ|Äãfƒ”qºjta6PX‘N~÷Imš`3åhÌÜ•"NW‹.ÌŠ’Ä[ UÈjô#V‰"mK/(âtõèÂ|VÚé—‰©x^¬£4PsÆ"N׈.ÌíX<6“WuŒÖÕ¦ÜOÉÓƒ"Ngta> øM¡]Õ*…¿–éJR§—'?t&~ÿi¥ óŠ´”䛨ü>úý*äA÷J8ÿý¾¨Ž‘;ÎFöF²à )zp÷Ei.µàÍñï?€f0uï÷ÁÞfJ­äOɳR¸0ïQ•P•lgâ0;å–ƒä!&±c_mOUr¥«’Š)ëYôú§¹>š]‚ÇoOUr¥«’Š9¾ ®dçÛÖ©xsª;Êq&þ×o?Ç1Æ1„±iÁ’s'?ÆþÌ8uœ‰ }bŠUv̧~ƒû¡â_PÆ©ãL¼ ZHYÅõü4-$âA¹Òû  =#}ÎŽ;r|¯z¿¯~”qê8ª‡sa4ש4˜™$V&’ÒîJ§Ð™ø×t,½>ÿÜ÷3Üި̌Ó+®:ýJ§òÑ”m6ƈ"&î¾MòŠ’ßåÕ”]òÑ”5Ю%‰ì§Á@I®ý¦_P{ûMY…ô@õ¼ÃøéçPæ’)±‹»é«)»ª )k ‹œ{$hL{ŽpM6øÝo¯¦ìÊMÙó›Â‡j]0Â\ûxEU•%ß^MÙݳ &ÔÍFî°R„k+ ´—« µ1müü‰7¡^9GêvDUÍž£jŒ®¿(/ª¿iò |Q¹Dj[iM0¡†mfÔ”5P:´¤-e}A¹ÒM¨h—yQø+³­Ød×n„P{Q-šPŸÇt ¥>8½qð„Âm~¥|Q=šPŸU@•¡X£Ú­½}¬\(¶éÏ©L¨ ”mHÑœ‘[!éø‚¿]” ê•g4¡¶ÇÇXø¨ð;¡°‚˜Hu¦0¾Qó1¡^ù3qØl8°cÐT‰§uU†”ÒðKôä'WùL6¹¡¯åz½pÏ7oÿ(âuA§å3qh  ]ƒQ`îK‰2SÁ;.bÛ먽‡«|& ´£»5(¬H™Ê£¯ÐŸg×é.(ã´|& jUc!¤ðùW•Çœ"ëÉÿN®ò™8$èh ^øÐK£Ð‚(¦Ü¶Ûÿí8\å3q¨ òY:uT³N55f´cqj¶ÙqZ>‡šTµ©d¥¢Ù¬y¶ÛI¦G‡:^´wâp×b]  :ueLÜ7jwØŽ¢9•'¶wâpÂ0K7QUðÃA‘mT¼”~BŠ»Àöó'~˜e•J³‚"•„¾"$MCþ/Öç.(ƒ¿~Tš tÂß”–“ŸdûœTshùù‚;P¬´~Tš tψÁý6iMbÊ‚ôJS9  þúQi6P+ÚoZñö÷÷Ž`fC,~¥þúQi&(í—'CƒwS-výÅûãÑç¿ þúQi&èÚâ¿JlƒÛ_ÔyûôªïJüõ£ÒÜÑ9ËûÆùü(Ê §ãÄ/ö·ÖÌ)]Hᮕf‚ª°â¦áè@W­m=º=Õ\î†Q¼E×J3AËî)jãPËÇ©£)_ïú÷1ù»+Õa–U?*Í*:j_7+òçÇ öÄ·ÞZû'÷ ºMëŠWž.vž.è¿Áè«ß4ç/OÌúiÿÉsåi)^y 4Ó/RúF´©µÍ©; Ü¦Mâ•§‹ŸŒ˜‹îfí$æsjz[‹ä‚r¥9^y :J/Çè«¿¶îRò^yZ‰W{ü4AäG‚fNÁf d¿é¾£º•b›¶¯<ÝôOõvÒ¡¯XW±·oÆÞ ØíÅ»RlÓÖ¢·qçt (°9mf=n2­™9UÑ?ñÞÆ«õèml B ½è71ól–yæÛ¨½ý½ µ¾ÑáÀ>Ûõ7];†Ÿ”+ÑÛØ@gÃ! 'ÖIÏØBy­=öío.¨½ý½ 4#“ó57-YÚgŠY> ¿R¼ýž¢·±v¨aÖ¼5§Ï«/Š¿í®Ì8P¼ý.ÑÛø¬´»#¿]õ†É£i~£òãm¼¹a”©So€Ñß\D2O©>Ì4=ëŸøQ¦ÕKe"h‡z<=¸Ûäã[²;RûS}AR½ÆQ¦þvy@äïÝ­ið5è|±ë”+mq”É@YBBA¿O¾˜B'Öi>’zž¾Óq«÷8Êd m‘!,Fo·$WS¾¸]Ov+eH8Êd (IŒEýS‹Sž££›Ýkõ ÊìZàLøQ&…sÄL,wpe ×õiæ¾éÕoT_‘ÜÒ«ïïÞ¡×g®ö´Ê5?Þ}Îî?ñä–5R$·(L9.)½ÓÛмâ»9±oAÓÊÉ-Ú­ … Úf ß¾Y’nƳåJs$·ÐÚ\µg¥þ8³X†‚Lº>ä–5J$·t ˆ‰Kæm8ëñ5ý‹­3·R„Ô¨‘ÜBе 40 4”ÖëÅ9²¹‡?Sj´HnQÐM·VrKÞe%_—7N™õõçíó”‚7Çï/h™:ÉU‹²Š­¬ðíïYÓ&ŠTÞÿý‚j^¶/Ý~|õiñ^œ[1¾¼>¤@nYcÆþ~ç dýi°–ü×$Ý<9÷Ÿøþþ+ö÷ûis¸‰àÞLc&›)“‹o÷wŽoÍûû¶ÒV R­+™ùè®WØ9Jï4Š•N‰ý}E‘hÀh$K% ^¨3©³‘ýã[3ÇþþùM m‰hÃéñ5m(Òª’‹[)‚–Øß'¨ÊïÞ N@;@2ÏÑFO.íòôwŽoÍûû ™¦Ê>~aNÕùϤ™ô;Ç·À™ðýý þéÊlöXîÔI:_þª‡ôì±Ç×­Îô¼£†RŽY?Íl$½G½ kŽØã3Pt úÅc˜ •Ùºò›•6_¡¿ kÎØãë6r—òýi§¹/šs ÛÈI íý]sÅß5) ª¿_I]KLû‚2NWŠ=>Å@Ø`ÿÐ èÅr~‹Û=GÓ_е$öø:Q6˜paV™þ¿(Ô‹^9r[qºrìñhÓm.ØòY:.ݬri£QžßqJoŽü]¨s¬ OÊ4_ÚjIïRìñ­‹²}X.…œìãLçÀ&·ÍÁKµþÉS”]-e(ÆâXñ ‚¬ôëq¼ÀÞoQvõX”5Ph]OÈôB‡KYÖ:b‰~> \éˆEYÅtÀPï;ZEÅœXåµµÁ}@-Ng,ÊèÐXP1ñŠbLhWÇßÔâtÅ¢,AÌÓ&Êæ»+•c«Ñ=èŽÓ’RŠEÙΡ«¥®¡%i )³Éµ‹U¯Åó ú7@%e ªv]»Ì£_•"ò>g j}ÃeËþÆ„¢,AÓêL{ †­ÓÃC±²‡ÖÂhVÐÇnú4VǤ¥²å {'ͺ‘f¥§oú±›’œÎDÅÑ7à¢Õ©*–Í=L …X”Ï@5øKr:/h‡QÆ€—Jg‡÷¸ÛZ#Qeà.(WÚãØ­”÷}øgtˆn`àâ/6ö•€™=è:âØÍy|eK㕇]žùë\}ì?âVúO€Î8vc  ý(ÈßµA5ã ±íQæûøÿBH­8vsV îL&›\fK”ͪ$½ Rðæøû»Ò24íé(÷×RÓcH'é’]èߺRù¨4÷eI/F¸À’_=ËÄ)Þ¾³üü‰»ñ•$•fe*I/•B©Rë› 8ºÃ.(ãT>*ÍݦŽà!jÓ5Ië·±Š;P®ô£Òl …ŸÜV´4Oä–¡ä–Ê8•J³=>rÈ1ñÓKÐê:•4 ©ù¬q*•f‚Î?¾Ò'åc?Œ“¿¶÷7Å!-•f‚®¬uþ´5K¢ÇÖ„µæ–yÊ8PÒòQiîhr¥¤£¡JcÝÿh´q·b—õüžßTo|eÿe¼ñq¥þž˜iYìç×S’ïgöÌÅé4˜²#yu1΃ÏÄg:ÎÖÔ›ÚÛôçOS¶¤üaÊtЋßæ‰>72çrσÊàϦ¬¢eÆTR…L®v–Žàõ”+ý0e í8:³¬ {‚j–[/ÏeðçSö€ U*a(@»—C„˜…Š­¸•"øó‡)KÐÙ1p)~ê ãDŠWÁ‰õ‚"øó‡)k ‚u•æöí6ÆHúõ 8Oó‡)KPšþ$û(—é¥õ(úŸzxü¿õäϦ¬ Ð5 ÛG¢ð0–|6‘õ€"íɦ¬âÌ[‚LÐÚq»1%Ì7N‘ö”SvpŽO24» 8P!ú;h÷’©44•2rA±Mˇ);¬‰tO”;Pºæ“oŸ’ºi¾/J™²%•SvˆÕ¤ÒsÒL%J‰c7JpßânÑ%•SÖ@a:>ÑYõz’ˆ vʽ_>LÙqôOál=³SjmLÙó¢H¿UþÍhˆÓÁßÒüM!­wA±÷ˇ){^’‰ÚÙ>MOºe7ÇwA±÷ˇ)KP¦{sMÒ¦ Ø6Ì*©ödKÙ&¸åY)â”:òMtMÃ(Ó±!¶üT8/õ‚â"AoŽü‡E)a´úÊlnpý*,½8P„Ôˆ‰óøèœaˆg0W½Sçl 5ñ Šàw:ìškÇA"3ST•"@£&ÂÏŸ8zCINgâ N ›Ía7¼õ(ae5? ŒS§3@¡¢¾0º½:Æl¨Ø’¨2&˜Œ½ X©Ó™xA'f`(¥¿àu›L[®TW{@§Ng"¬´ ÕUÀÀ‡G,mÿÿ~SÆ)¼9þõ‡•6 q†Ô ýM‹©‹Qf£< ÿÁyÚjTolr 4Ånmdt—G±­â»_½zCI­Eõ]jxüJÍ.:³˜jSÃR½zCI­Gõ‚Î å–l´¤)r'b÷¨¨<+µ·?¢zƒ6Íw’aãñ%!((s= öögTo0PÌÄ,AFÙ&uÐ(_i]©ª…tWÊ·¿¢zÃ`/©² ìºñ®§¡\ܹÒÒíEÉŠ·ßSTo ¨ê¦«ö1ÔÄqô·OÛLu¾r¥KTo0Ð]Ç¢ÀÒNþ*O%ŽÙRÿT…\Hákê8dɶŽj¦è1 P4 ¥%)$"öŸ¸Éƒ’g"€ò u uÐånšTò ˜”qê8q¥&_Pá W Va:½”‚Â6m~ò $Ç™ˆ+…i¦q¥ük{œ®VyVÊ8uœ‰ŠòÖÈ}yJ­£Ø–(Už•"NûÇ5t°!# ö®=Ùþ"ÿÂéô‹Nï?yÒóþq Öä¢ò'Ì)hEÚ©4ÀÃZ¦NŽþ¦çýãj+­x´P*SI~NvV(ýMÏÇÇ5” š< ®_zUcÊD+&ô¥pìoz>>®¡çñ…î¶‚DCà)5p;éoz>>®¡÷EÑ?Jð‰FïÄ´:·iVÊe|\C/hÂ-š²ï¬I›ª(AÃoŠe|\Cïããí/ŽG£Í1¦ŸàÌïþ¦çãòúþ÷¿ñ5eïD‰µûEaö %¤ÑM !;¼Ÿ?Á3NÇåõÐNÉ2¨m°Ë3ùö;„k2æ÷(ãt\^ß :щôºV£š8–:tQ2ÊÇ”+½¼¾¸RÔú*º‘H×Ò`ñ ã…igÅZœ^^_Xi…t T+ÖhÌdºˆà·Í ¹ÔY)â:šž¿ Kt¥Ú­Þ_¬õžRB`jæWŠô|^×Ððøûk,w>—iIè8ùs / éùWgbPoé·YpÓ]5”5n•'mw =ú¦ç¡”ôÕ™0Щ΂.ÏÏÛOGQH?+€*­w@R_ ‚N=š ý X9«¶,@'’‰éy(%}u& jÂßÔ®‚ùUÁUídÊúêLhQ­Îœ(0ئ—}_¯RN°_)Cê£3¡ AÆMWÅR³&™¬PA}[<(޾¯Î„Nåó×-øºZÔ¥–iu¡ìQr@™K}u&*E›±:&µÝÞ˜öЇOpá-‡ÃŒ¾ò„ª(dQÐÒ4­Î M•8×?yZGëãqh èðfjX€ƒ-É´ºxžN5ú¡u´>‡:Õó@‹LÚçNÐíáÞG&›òPFh­ÇáE9’BCYG’(Å’g®íY)ãt}< :EÇ ܧµ=£ÂЬïC¥Y:~¥ˆÓõñ8$èK°eÈöá}ëê¨%§çE1N×ÇãpX—gªç€5‰@¹%SB?´´w¥h­Ç¡­T4¤Z†¸(Z^b×ÇÔ¹R|NBëh}<OœjÙ¼L°àn{Tï©Ó¯D§¨I¯ËrÓfy2–(6‚óÔ4º4´8þÄ—å$¥X–3P–|Õ4•´)µTq®L?YŸxP¿$‰e9mjÉRÀ@œ$I Añßµ{P®4DzÜY)~S!±ÉÄ,N­I•Z‡ýAK,Ëèþ7­t±žž%£RTU;w¥ÿhe¹ó¢N)±TÏM¥™âªT¿û΄/ËhÇÞÕÂï2uêJAíãò®T¿û’z,Ëtf„òÓu®æë6 5•LèßX鈅™i5éƒOäü‚Oôb ‰IE.*^ñó¯øÂŒ¤ 3*ʘ)´wµº=oÀ¬Æ_ÔâtÅÂŒvý6WzqâŠ*bqÊ´úü+• 3:t|³B_jRî(ÑGB¯“”q* 3ç7Uú þÍsR¬Ë{.ëùM§’ca† kè¯W È2ë39í¹(S\_дĠAg׬¯:±ÐEM“’e<ùsQzÃ}|„”ÔX˜1ЪD)ÓÞÐis0ûÓ™\RšLìt;$½3ÛѧüðŽñÔ»>|ærWxžêÛÏOÒ+ÒcÒ{Aõ»?àÁ\JJ/×*WM©ºeœÊˆI¯v]i­ ßÂQæH?wÔ¥²š©8P®tƤ×@+†"“’ZÇìüð™žt¥HµxP‹Ó“Þ*x|ú›BW²Šý¦~q+Eœæ“^‚Î ¥ÖEãD˜¨4ÊjûšÎ÷7EœÂ›Ã'½]S”>`ÒN¯tº‡M(´ë7Î?>â4ç˜ôÚJ ÔÿPà§Å£X7r@¿§ÀœÂjœæ‡­'Kò:ðý:·U¬ÉÕ‹§§Å[É5[hÆJ§ÔË?= ´'ÊjùxA§¹Åakšî5rí¥™Fg²ÏÉ|@¹Ò‡­ ´ãÊÙë±úé•hDu¼8 ŒÓ<â°õy|u`o…ã|z®ˆõøèŸUÖ­”q:ã°õyQ˜ g Jse?ª²Ë3×ó¢,NW¶ž¦¨†<-cê£öB+²T' ˆë]©&½»Å†­ tizÞ1ËÚÌ~(öÍ}˪ÃÖE 3ñß/è,Z—¨˜gK3z–:UµÙ­ô¿ÍqØš md„Æ3´ä;¯<ÅÜsJÃi° hó‹©ŠRU\Í,ýJ±Mˇ{>mMÅÔUgâfdÑÞ•½>%xPlÓúážO´Žê.Zä­¢¿BÖ_! ÅÕ 3E;¼Û´~¸ç-;kÌ»(«7Ÿ¢_jØs†Þùü¦JÂÚŒÝÀ=?+ýÉ2óÖºÖúLÖNJÎÌùLTJËëÕà¯îù´.ªŠô78Ð%ít$ö%_ÿüÉs7­îù4›·¦—³‰YV4c…ýR#dä¬fé”Á_?Üs[©j\ýCÈTÖ$4ö¦1NxP®ôÃ=? EKHœŽk©mãYPýpÏ ´¨Ág¦Ä\¢ì» ~³ÔYêûøþúážh×¥¥eã AãyÊL%ónïÝ´~¸çg¥(ËU€&­%ˆñù-tÀ”u RíÃ=Ÿ6Ȧ…îR”¾¹àüpBÊ’Þ®Ã, Z»ÜóÿûßíæNë ÝsäüüM™§²÷ó'øcÆi»Üó´õLÍ’zg:EŒˆARG*i ŒÓv¹ça¥E­HUtw ᔆEª~½ñ€r¥—{@«¶"Ô½CË5¤„• þ ¢³=”qÚ.÷<<þž~T–z'lr™Å#dŠïJ§írÏhY:œ’ÿTz Ž@•¯÷ 8¤Ûe!ÐÍ•Ô~Tƒ¢0î¦ÕLþHnÁ!}@RóôøâJÑ<€ªvcÛx™áÇ)x—Á¿ýöŠÎÄx}ÃVjíd}'?t&~ÿa¥û\:Û´A¶/MêógòQæxC '¿Y_Uݰí¥Ã,M)®V9;rÚcý(‚NŒTè×,i¸GŠSj>ý(Ÿ~Ôdß)t/¸HL´âÍÙš†É£Lq:>ý(‚Êþ1e hòÿ¨œŽa÷Õžùô£d|úQÍY/æ@Ðn‡ c~Ñ•åãùÄ)éá0ÿ_¥Zã¨èïSS¶a›.~NðY‘†ÛÉâ3ø§WÀ|@›¸æ^[½œmi:Òv¾^Pÿô ˜ïJõ"!ÈË»°y0LFÛhâò€r¥^ó- ~ÑÚæh“ÂL„2eÍéªzPÿüÜ£E«v͘†*²ØhˆTúðêÑ·Bo~îQš,®t+5Ž3,¦’‰uÔ¥mŽz|ós2Ð=P¤Y•é"ÂbW¢ƒ ^Õ(Wú¹GhS½¾LMYŒ³§ež±üÍúVèñÍÏ=ꀂΠ9´6Êz@9Í¡³=n¥8Pæçu^”­A(ÍãÃK#Úüþ¦¸GÍÏ=ê¾(¬ÁOIÝÉmjŽW=¬÷¨õ¹G”†ÉÄnj8¥a/Š«dQÀ@Þ>´ZÊdʳ¬_ÓÇ>«d™Qà€ ‚_·R]ýއýÅ}J™)Ê·¿¢(Àyüòµ^ôDÆR7…v3IÏãó@g‹hïíÚÚÖ)(u ƒr ”±ô¾¿û¬’¡3ñû+¥Wn¦,ÆÂ«ý¦×¯o=öY%Cgâ÷V:P”‚ß”NWÕÄ€([¸'~<(VZ¢(€Å)®‘rmP`é€RWrWT/¼ÞÿþèP% 1¼¶¨†SMµIÊUl[}V٠¸÷Ñæ¨èEÿ$ *±yãn›8, }Ôý'ÏÞÏý³÷ Úõ¾Ÿ™5غ‰pêÈ.iêu|A¹÷óøìýf5é—ÊUïi\4¥å8P®t~ö~³Þ‰‚B§¾‘Û“Ž&Aë³Rîý¼>{Ÿ?q‹†dËèï[†rÒäüíÝû%}ö>W ¢€!¬: Ž1#™ÔfåL\Pì}p&ž½o¿©àñµ€Ø<¸Kö˜õµwï—üÙûšR ¨ôsaÎd!¿o{:¿ÿ´R-ÉkV®æbƒl2™Ÿ†ß{:¿ÿ°ÒFÍ\$‚ÿ<¾‰”Š•¶ÏÞgÛ˜qÚ©ÒlnáÖæàOëü{Þÿþ‚Ñ•f؇ X§Ë°ñ°F[wW»qн_ÆgïwóåÑmZR»ŠÝ§uÄ{TBõüçOž½_ægïƒà ¯½¥Ñ=ùÚ–ª|©ʽ_ÖgïwóÔ'Ä[f×°½Ï¬¯ ™èïÞ¯é³÷¹RÜ Ö /ä^‡RÉÌS‡åÞ¯òÙûݤôÔªøÝ‡v‡U&ÌÙz< Üû5ö¾Â/’î‹“ óâì’PìýZ>{ß~SŒo6-GÒ Nahäw¥Øûµ~ö>W ¹û95Nëjãq 5¶¼ŽÝ8PÝQàLüþÃJ1 ½àôØ:ÜéˆÔ_§‘è@±÷¡3ñû+EÓd%añ?aã6ª‹Z_ÅJÇgïwoKô³… {ŽSÊâó¨á¨]öÅÞ¯ó³÷4 èZ¸ðv0ºSkÞ0ÿ§Øûõãµã¶° ¬ÐÆÅ#2éz=c×#UZrûøG(ì³æNjǯ™+S˜8œ®Ö#UZrûøG´©E®’ðí§#Tãë´æç@¹Ò”náš}‘€T›ó±ÍLXÖëÆz¤J•èbçñ1¾ …ö -ÀëqˆÙÖ¤/έ{¿}ü£ tê-:aä¼ÑÛ¸{WLË¥õ¼(îýöñZÖâÔ.@ÁD/•¦…ðÑŽsàz¥Jw6èb* ÓVhÖ ¥ÖU‘Ñ îõJ•æöñ²54AKMØÁeLÊkœ¨&ë•*Ý̺Ð:ZæסٕPèÆo:΋2›7Óù´Žr[±utAQCAÒ‹‚ošGZ/ÍÃH¸  þžbëÈ@÷Ž<ó¦=a,ÜÝ¢±£”(|A±Ò.±u´N‡W/8;ªp¸ôÛ4=  ~Ç™°SêxǡևO4¼¨Ónž{w[ˆ¬üüÉ“L8ÎÄ Z'ÊrnŒ0KÇçÄ–Ô~ÓúI&¸R ±fˆ75di¿hnf¹ \iû$Ëפ3ÄïÛZ8ù›¢¦ÒÒj¿iÿ$öøhà;ÒP•¼‡´ 0#ïoÊÅq&"(ÊrsÒqÚ-.¼&û><(”>?É„Ùâíónša“©Üsoû’ônºÞd¢¯O2±¬†RLïÚfn¡Ù×3v>+E2oŽßò‡8ÕLZh÷RPí¹ŠÂ&Y¾Ie§8¿ÿ´Rñ-¤’, ªO$ Xiþ$R`v”¨Ö(§†BÔ²-s/(’ p&þý§àOئìFBh!¥ãk† ÅÇ)’‰Qƒ$Ä>ÌXë÷©€˜ìî¤*aï?ñ’y´ q@‡–òÌ( jQvãïøGhýT<(÷þèAÂ@&Ë„³5{|: ø³ã¡:P®tIˆ :0mÚÑ9Ó¢Á_¼ð[íï‚rï$!Îã/­¡ä©'+œ;iò|NvIȯ{¬ q@{o—i©§‚–¯â[)ÚàL8I-¸F llk£&+t3“–”mŽ)!?Ý*} —ú5…i¸ä㾌m«â•ûOžütæŸÐ¥dÁZaôU2ˆÂ-]/N»›^P†Ô,!?5PÕºÞ&šEô‚•ùWêI«ð·åJkÈO(º@¢z}?›Óì]‘®ô¬”!5[ÈOïã+£©×ëC¬fšf ín¥©ÙC~j  ‡f?”ê–-Iáïqô¥(>'s„üÔ@'fBœçí´4qŽBgó¥(òSèL¸üô†”Ò›Ѳm\R4§Táo÷›âä‡Î„ËOºóòŸ³#ﮞféèËÈq /LÏ÷%̓jð¯¨Ò¼3NjÉC¾€Œî Ò@6…vä« çi~s©Uš/¨ÒôÀŒ&¾ÿýóÕ«ùÊà_Q¥Ù@ûÐç¦#iŸ ã Ö;ar±«u”+*ÍT/tŸîÔ˜Ù Bž§H-kò  þUšÏã þV;œX1Íqì]ñÍkέÁ¿¢J³ÕÁ`³5\üÒ²+Oëä§Á¿¢Jóyü¤–¹½ÓŒý}%a=Áÿ¾(äR+ª4ß•j¿5\xŒTªÄì‡yóy@§Q¥ù‚êí¤ƒÓÔu¶©¾¿)vÔŠ*Í7NU ƒh·‘]V{-ÈvšÝ+-)ª4Ÿß´èçd æÕZE£ºmj.MTs©’¢JóYéÂ!QÞÑ&î–öT®´üüÇ\œþ 9Èì»(wÔþDwdŸ ·“ ¡=§ì¹ý'^¾ ¤ä è|Nèž é’fF ɦ;ăbï—Tƒ|Ž¢ì3gB°K=%¤ÎMz@¹Òä (Šf΄ú©Ô“žCL}—/èÚƒ|ÁU÷BÃðà’yj(­8úÊ#_PÒò¸ýc-ˆ6.|÷yÒI8ÄšŸ…½¿›©¯|­t%mÅ¥ode°:ç£ØŽû¹Näçñuï—´‚|Á]©ÎF®¥©µ?«# Îð¶ñ„”îýmZ÷zqîK›9kS7ù€Ê½{Q‰§Ö^ñþçÅYŠHðâ¼ {_gy:2ù”åàt¥#Ì”q*9xqÐÙuïCzÀ×\MXí¹ÚH”+-Á‹ó€<(IŒÊ™³qVоûÕ{q–"5xqP°®~SŒÚó5¬y@qìýê½8K‘¼8(ÆmÉV¡='´8$ øDWïÅùڃ筹ÆmKGßÔ¸=¤ào-Ž!5«s3¨([¤¿)*½¦4Њ]$(ªÚu›>v„? 3°: tî¡@KUïÁëk¦'¯öñµZÕy@¡µÐŒy°Çgu©\ËÓb¥9VçÅ4ǪÐTG EöµâXsp|P52891|NUP84_YEAST Nucleoporin NUP84 OS=Saccharomyces cerevisiae (strain ATCC 204508 / S288c) OX=559292 GN=NUP84 PE=1 SV=1 MELSPTYQTERFTKFSDTLKEFKIEQNNEQNPIDPFNIIREFRSAAGQLALDLANSGDES NVISSKDWELEARFWHLVELLLVFRNADLDLDEMELHPYNSRGLFEKKLMQDNKQLYQIW IVMVWLKENTYVMERPKNVPTSKWLNSITSGGLKSCDLDFPLRENTNVLDVKDKEEDHIF FKYIYELILAGAIDEALEEAKLSDNISICMILCGIQEYLNPVIDTQIANEFNTQQGIKKH SLWRRTVYSLSQQAGLDPYERAIYSYLSGAIPNQEVLQYSDWESDLHIHLNQILQTEIEN YLLENNQVGTDELILPLPSHALTVQEVLNRVASRHPSESEHPIRVLMASVILDSLPSVIH SSVEMLLDVVKGTEASNDIIDKPYLLRIVTHLAICLDIINPGSVEEVDKSKLITTYISLL KLQGLYENIPIYATFLNESDCLEACSFILSSLEDPQVRKKQIETINFLRLPASNILRRTT QRVFDETEQEYSPSNEISISFDVNNIDMHLIYGVEWLIEGKLYVDAVHSIIALSRRFLLN GRVKALEQFMERNNIGEICKNYELEKIADNISKDENEDQFLEEITQYEHLIKGIREYEEW QKSVSLLSSESNIPTLIEKLQGFSKDTFELIKTFLVDLTSSNFADSADYEILYEIRALYT PFLLMELHKKLVEAAKLLKIPKFISEALAFTSLVANENDKIYLLFQSSGKLKEYLDLVAR TATLSN python-ihm-2.7/test/input/Rpb8.mrc-header000066400000000000000000000020001503573337200203400ustar00rootroot00000000000000HC5HC5B†BTB´B´B´B   €?€£Úß?ØNÿ‡½'4Ä!Q"hB]pB]à@] P©¸BC‚BMAP  @“.ΑS9.Î!Q"hB]pB]à@] P©²®ß? P©à@]¾0qaì_@8øßâ2@&W«OlD@8ÛÖ58ÛÖ5@1.1 are considered reliable' 1 3 zDOPE 'Normalized DOPE' zscore global . 2 4 'TSVMod RMSD' 'TSVMod predicted RMSD (MTALL)' distance global . . 5 'TSVMod NO35' 'TSVMod predicted native overlap (MTALL)' 'normalized score' global . . # # loop_ _ma_qa_metric_global.ordinal_id _ma_qa_metric_global.model_id _ma_qa_metric_global.metric_id _ma_qa_metric_global.metric_value 1 1 1 1 2 1 2 1.3764 3 1 3 -0.94 4 1 4 2.102 5 1 5 0.917 # python-ihm-2.7/test/input/modeller_incomplete.cif000066400000000000000000000015551503573337200223170ustar00rootroot00000000000000data_model # _exptl.method 'incomplete data' # _modeller.version 10.4 # loop_ _modeller_template.id _modeller_template.name _modeller_template.template_begin _modeller_template.template_end _modeller_template.target_begin _modeller_template.target_end _modeller_template.pct_seq_id 1 3jroC 33:C 424:C 33:A 424:A 100.0 2 3f3fG 482:G 551:G 429:A 488:A 10.0 3 1abcA 1:A 10:A 1:B 20:B 10.0 # loop_ _atom_site.group_PDB _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_asym_id _atom_site.auth_asym_id _atom_site.label_seq_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.label_entity_id _atom_site.id _atom_site.pdbx_PDB_model_num ATOM C CA . ALA A A 1 1 ? 25.847 14.655 5.416 1.000 21.304 1 2 1 python-ihm-2.7/test/input/modeller_model.ali000066400000000000000000000007671503573337200212700ustar00rootroot00000000000000>P1;5fd1 structureX:5fd1:1 :A:106 :A:ferredoxin:Azotobacter vinelandii: 1.90: 0.19 W* >P1;1fdx sequence:1fdx:1 : :54 : :ferredoxin:Peptococcus aerogenes: 2.00:-1.00 Y* ~ ~ ~ ~ python-ihm-2.7/test/input/modeller_model.cif000066400000000000000000000021101503573337200212440ustar00rootroot00000000000000data_model # _exptl.method 'model, MODELLER Version 10.4 2023/10/23 11:26:12' # _modeller.version 10.4 _modeller.objective_function 266.4716 _modeller.best_template_pct_seq_id 37.037 _modeller.sequence 1fdx _modeller.alignment modeller_model.ali _modeller.script modeller_model.py # loop_ _modeller_template.id _modeller_template.name _modeller_template.template_begin _modeller_template.template_end _modeller_template.target_begin _modeller_template.target_end _modeller_template.pct_seq_id 1 3jroC 33:C 424:C 33:A 424:A 100.0 2 3f3fG 482:G 551:G 429:A 488:A 10.0 3 1abcA 1:A 10:A 1:B 20:B 10.0 # loop_ _atom_site.group_PDB _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_asym_id _atom_site.auth_asym_id _atom_site.label_seq_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.label_entity_id _atom_site.id _atom_site.pdbx_PDB_model_num ATOM C CA . ALA A A 1 1 ? 25.847 14.655 5.416 1.000 21.304 1 2 1 python-ihm-2.7/test/input/modeller_model.pdb000066400000000000000000000006521503573337200212610ustar00rootroot00000000000000EXPDTA THEORETICAL MODEL, MODELLER 9.18 2017/02/10 22:21:34 REMARK 6 SCRIPT: modeller_model.py REMARK 6 ALIGNMENT: modeller_model.ali REMARK 6 TEMPLATE: 3jroC 33:C - 424:C MODELS 33:A - 424:A AT 100.0% REMARK 6 TEMPLATE: 3f3fG 482:G - 551:G MODELS 429:A - 488:A AT 10.0% REMARK 6 TEMPLATE: 1abcA 1:A - 10:A MODELS 1:B - 20:B AT 10.0% ATOM 2 CA TYR A 7 -8.986 11.688 -5.817 1.00 91.82 C python-ihm-2.7/test/input/modeller_model.py000066400000000000000000000000171503573337200211370ustar00rootroot00000000000000# Empty script python-ihm-2.7/test/input/modeller_model_local.pdb000066400000000000000000000004531503573337200224320ustar00rootroot00000000000000EXPDTA THEORETICAL MODEL, MODELLER 9.18 2017/02/10 22:21:34 REMARK 6 ALIGNMENT: modeller_model.ali REMARK 6 TEMPLATE PATH 15133C ./15133C.pdb REMARK 6 TEMPLATE: 15133C 33:C - 424:C MODELS 33:A - 424:A AT 100.0% ATOM 2 CA TYR A 7 -8.986 11.688 -5.817 1.00 91.82 C python-ihm-2.7/test/input/modeller_model_no_aln.pdb000066400000000000000000000005341503573337200226060ustar00rootroot00000000000000EXPDTA THEORETICAL MODEL, MODELLER 9.18 2017/02/10 22:21:34 REMARK 6 TEMPLATE: 3jroC 33:C - 424:C MODELS 33:A - 424:A AT 100.0% REMARK 6 TEMPLATE: 3f3fG 482:G - 551:G MODELS 429:A - 488:A AT 10.0% REMARK 6 TEMPLATE: 1abcA 1:A - 10:A MODELS 1:B - 20:B AT 10.0% ATOM 2 CA TYR A 7 -8.986 11.688 -5.817 1.00 91.82 C python-ihm-2.7/test/input/modeller_modelcif.cif000066400000000000000000000037221503573337200217400ustar00rootroot00000000000000data_model # _entry.id model _exptl.entry_id model _exptl.method "THEORETICAL MODEL" # _modeller.version should-be-ignored _modeller.produced_by 'MODELLER Version 10.5 2024/01/23 11:31:44' _modeller.objective_function 266.4716 _modeller.best_template_pct_seq_id 37.037 _modeller.sequence 1fdx _modeller.alignment modeller_model.ali _modeller.script modeller_model.py # loop_ _citation.id _citation.title _citation.journal_abbrev _citation.journal_volume _citation.page_first _citation.page_last _citation.year _citation.pdbx_database_id_PubMed _citation.pdbx_database_id_DOI primary 'Comparative protein modelling by satisfaction of spatial restraints.' 'J Mol Biol' 234 779 815 1993 8254673 10.1006/jmbi.1993.1626 # loop_ _citation_author.citation_id _citation_author.name _citation_author.ordinal primary 'Sali, A.' 1 primary 'Blundell, T.L.' 2 # loop_ _software.pdbx_ordinal _software.name _software.classification _software.description _software.version _software.type _software.location _software.citation_id 1 MODELLER 'comparative modeling' 'Comparative modeling by satisfaction of spatial restraints, build 2023/10/23 11:26:12' 10.4 program https://salilab.org/modeller/ primary # loop_ _modeller_template.id _modeller_template.name _modeller_template.template_begin _modeller_template.template_end _modeller_template.target_begin _modeller_template.target_end _modeller_template.pct_seq_id 1 3jroC 33:C 424:C 33:A 424:A 100.0 2 3f3fG 482:G 551:G 429:A 488:A 10.0 3 1abcA 1:A 10:A 1:B 20:B 10.0 # loop_ _atom_site.group_PDB _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_asym_id _atom_site.auth_asym_id _atom_site.label_seq_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.label_entity_id _atom_site.id _atom_site.pdbx_PDB_model_num ATOM C CA . ALA A A 1 1 ? 25.847 14.655 5.416 1.000 21.304 1 2 1 python-ihm-2.7/test/input/no_title.cif000066400000000000000000000001201503573337200200750ustar00rootroot00000000000000data_PDBDEV_00000025 _entry.id PDBDEV_00000025 _struct.entry_id PDBDEV_00000025 python-ihm-2.7/test/input/non_canon_atom.cif000066400000000000000000000026521503573337200212640ustar00rootroot00000000000000data_model # _exptl.method 'model, MODELLER Version 9.24 2020/08/21 11:54:31' # _modeller.version 9.24 # loop_ _entity.id _entity.type 1 polymer 2 non-polymer 3 non-polymer # loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 ? B 2 ? C 2 ? D 3 ? # loop_ _entity_poly_seq.entity_id _entity_poly_seq.num _entity_poly_seq.mon_id 1 1 VAL 1 2 GLY 1 3 GLN # loop_ _pdbx_entity_nonpoly.entity_id _pdbx_entity_nonpoly.name _pdbx_entity_nonpoly.comp_id 2 ? ZN 3 ? invalid-comp-name # loop_ _atom_site.group_PDB _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_asym_id _atom_site.auth_asym_id _atom_site.label_seq_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.label_entity_id _atom_site.id _atom_site.pdbx_PDB_model_num ATOM C bad1 . VAL A A 1 2 ? 114.370 27.980 -26.088 1.000 143.490 1 1 1 ATOM C bad2 . VAL A A 1 2 ? 114.370 27.980 -26.088 1.000 143.490 1 2 1 ATOM C CA . GLY A A 2 3 ? 111.506 26.368 -28.075 1.000 137.530 1 3 1 ATOM C bad3 . GLN A A 3 4 ? 113.468 23.113 -28.639 1.000 128.420 1 4 1 HETATM ZN ZN . ZN B A . 5 ? 113.808 21.534 -32.168 1.000 117.620 2 5 1 HETATM ZN bad4 . ZN C A . 6 ? 113.808 21.534 -32.168 1.000 117.620 2 6 1 HETATM ZN ZN . invalid-comp-name D A . 7 ? 113.808 21.534 -32.168 3.000 117.620 3 7 1 python-ihm-2.7/test/input/not_modeled.cif000066400000000000000000000100431503573337200205560ustar00rootroot00000000000000data_model # _exptl.method 'model, MODELLER Version 9.24 2020/08/21 11:54:31' # _modeller.version 9.24 # loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 ? B 2 ? # loop_ _entity_poly_seq.entity_id _entity_poly_seq.num _entity_poly_seq.mon_id 1 1 VAL 1 2 GLY 1 3 GLN 1 4 GLN 1 5 TYR 1 6 SER 1 7 SER 2 1 ASP 2 2 GLU # loop_ _atom_site.group_PDB _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_asym_id _atom_site.auth_asym_id _atom_site.label_seq_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.label_entity_id _atom_site.id _atom_site.pdbx_PDB_model_num ATOM N N . VAL A A 1 2 ? 115.846 27.965 -26.370 1.000 141.830 1 1 1 ATOM C CA . VAL A A 1 2 ? 114.370 27.980 -26.088 1.000 143.490 1 2 1 ATOM C C . VAL A A 1 2 ? 113.517 27.504 -27.287 1.000 143.910 1 3 1 ATOM O O . VAL A A 1 2 ? 113.885 27.746 -28.441 1.000 146.600 1 4 1 ATOM C CB . VAL A A 1 2 ? 113.901 29.406 -25.683 1.000 143.750 1 5 1 ATOM C CG1 . VAL A A 1 2 ? 115.030 30.438 -25.931 1.000 144.590 1 6 1 ATOM C CG2 . VAL A A 1 2 ? 112.669 29.783 -26.486 1.000 144.500 1 7 1 ATOM N N . GLY A A 2 3 ? 112.371 26.869 -27.012 1.000 142.200 1 8 1 ATOM C CA . GLY A A 2 3 ? 111.506 26.368 -28.075 1.000 137.530 1 9 1 ATOM C C . GLY A A 2 3 ? 111.719 24.869 -28.275 1.000 135.820 1 10 1 ATOM O O . GLY A A 2 3 ? 110.768 24.093 -28.268 1.000 134.380 1 11 1 ATOM N N . GLN A A 3 4 ? 112.989 24.479 -28.392 1.000 134.310 1 12 1 ATOM C CA . GLN A A 3 4 ? 113.468 23.113 -28.639 1.000 128.420 1 13 1 ATOM C C . GLN A A 3 4 ? 113.556 22.956 -30.163 1.000 121.240 1 14 1 ATOM O O . GLN A A 3 4 ? 113.552 23.977 -30.840 1.000 127.090 1 15 1 ATOM C CB . GLN A A 3 4 ? 112.614 22.038 -27.919 1.000 132.340 1 16 1 ATOM C CG . GLN A A 3 4 ? 113.028 21.943 -26.407 1.000 135.370 1 17 1 ATOM C CD . GLN A A 3 4 ? 112.604 20.667 -25.677 1.000 138.260 1 18 1 ATOM O OE1 . GLN A A 3 4 ? 112.836 19.543 -26.150 1.000 141.450 1 19 1 ATOM N NE2 . GLN A A 3 4 ? 112.006 20.839 -24.497 1.000 139.310 1 20 1 ATOM N N . GLN A A 4 5 ? 113.648 21.739 -30.710 1.000 124.970 1 21 1 ATOM C CA . GLN A A 4 5 ? 113.808 21.534 -32.168 1.000 117.620 1 22 1 ATOM C C . GLN A A 4 5 ? 114.778 22.519 -32.833 1.000 112.980 1 23 1 ATOM O O . GLN A A 4 5 ? 114.677 23.727 -32.677 1.000 116.850 1 24 1 ATOM C CB . GLN A A 4 5 ? 112.456 21.545 -32.905 1.000 121.870 1 25 1 ATOM C CG . GLN A A 4 5 ? 111.763 20.153 -32.917 1.000 123.750 1 26 1 ATOM C CD . GLN A A 4 5 ? 110.863 19.874 -34.145 1.000 123.650 1 27 1 ATOM O OE1 . GLN A A 4 5 ? 110.040 20.712 -34.537 1.000 122.500 1 28 1 ATOM N NE2 . GLN A A 4 5 ? 111.008 18.674 -34.737 1.000 122.090 1 29 1 ATOM N N . SER A A 7 8 ? 117.999 25.245 -39.224 1.000 89.750 1 48 1 ATOM C CA . SER A A 7 8 ? 119.165 25.590 -40.036 1.000 87.320 1 49 1 ATOM C C . SER A A 7 8 ? 119.224 27.089 -40.277 1.000 84.820 1 50 1 ATOM O O . SER A A 7 8 ? 120.074 27.594 -41.008 1.000 84.020 1 51 1 ATOM C CB . SER A A 7 8 ? 119.112 24.859 -41.383 1.000 88.180 1 52 1 ATOM O OG . SER A A 7 8 ? 117.956 25.221 -42.117 1.000 88.850 1 53 1 ATOM N N . ASP B B 1 3 ? 71.339 57.678 52.031 1.000 152.010 2 54 1 ATOM C CA . ASP B B 1 3 ? 70.427 58.819 51.717 1.000 152.390 2 55 1 ATOM C C . ASP B B 1 3 ? 70.144 58.821 50.222 1.000 151.960 2 56 1 ATOM O O . ASP B B 1 3 ? 70.984 59.245 49.435 1.000 151.590 2 57 1 ATOM C CB . ASP B B 1 3 ? 71.083 60.142 52.119 1.000 153.250 2 58 1 ATOM C CG . ASP B B 1 3 ? 71.660 60.105 53.526 1.000 154.120 2 59 1 ATOM O OD1 . ASP B B 1 3 ? 72.652 59.371 53.741 1.000 154.200 2 60 1 ATOM O OD2 . ASP B B 1 3 ? 71.119 60.804 54.415 1.000 154.250 2 61 1 # loop_ _ihm_residues_not_modeled.id _ihm_residues_not_modeled.model_id _ihm_residues_not_modeled.entity_description _ihm_residues_not_modeled.entity_id _ihm_residues_not_modeled.asym_id _ihm_residues_not_modeled.seq_id_begin _ihm_residues_not_modeled.seq_id_end _ihm_residues_not_modeled.comp_id_begin _ihm_residues_not_modeled.comp_id_end _ihm_residues_not_modeled.reason 1 1 . 1 A 5 5 TYR TYR . python-ihm-2.7/test/input/official.bcif000066400000000000000000000216601503573337200202120ustar00rootroot00000000000000ƒ§encoder¶mol*/ciftools cif2bcif§version¥0.3.0ªdataBlocks‘‚¦header¤2HBJªcategories™ƒ¤name¦_entry§columns‘ƒ¤name¢id¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData¤2HBJ®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmaskÀ¨rowCountƒ¤name«_database_2§columns”ƒ¤name«database_id¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤typeªstringData¬PDBRCSBWWPDB®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmaskÀƒ¤name­database_code¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤typeªstringDataº2HBJRCSB038162D_1000038162®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄ ¤dataĤmaskÀƒ¤name·pdbx_database_accession¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤typeªstringData¬pdb_00002hbj®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄ ¤dataĤmask‚¨encoding‘‚¤kind©ByteArray¤type¤dataă¤name¨pdbx_DOI¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤typeªstringData³10.2210/pdb2hbj/pdb®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmask‚¨encoding‘‚¤kind©ByteArray¤type¤dataĨrowCountƒ¤name§_struct§columns’ƒ¤name¨entry_id¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData¤2HBJ®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmaskÀƒ¤name¥title¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringDataÙyStructure of the yeast nuclear exosome component, Rrp6p, reveals an interplay between the active site and the HRDC domain®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄy¤dataĤmaskÀ¨rowCountƒ¤name¼_pdbx_audit_revision_history§columns•ƒ¤name§ordinal¤data‚¨encoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type¤dataĤmaskÀƒ¤name±data_content_type¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding“ƒ¤kind©RunLength§srcType§srcSize„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤typeªstringData¯Structure model®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmaskÀƒ¤name®major_revision¤data‚¨encoding“ƒ¤kind©RunLength§srcType§srcSize„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type¤dataĤmaskÀƒ¤name®minor_revision¤data‚¨encoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type¤dataĤmaskÀƒ¤name­revision_date¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤typeªstringDataÙ22006-07-252008-05-012011-07-132017-10-182021-11-10®offsetEncoding”ƒ¤kind¥Delta¦origin§srcTypeƒ¤kind©RunLength§srcType§srcSize„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄ ¤dataĤmaskÀ¨rowCountƒ¤name§_entity§columns’ƒ¤name¢id¤data‚¨encoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type¤dataĤmaskÀƒ¤nameªsrc_method¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤typeªstringData©mannatsyn®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmaskÀ¨rowCountƒ¤name¯_entity_src_gen§columnsšƒ¤name©entity_id¤data‚¨encoding‘‚¤kind©ByteArray¤type¤dataĤmaskÀƒ¤name«pdbx_src_id¤data‚¨encoding‘‚¤kind©ByteArray¤type¤dataÄ*¤maskÀƒ¤name½pdbx_gene_src_scientific_name¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData¬MUS MUSCULUS®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄ ¤dataĤmaskÀƒ¤name¾pdbx_gene_src_ncbi_taxonomy_id¤data‚¨encoding‘‚¤kind©ByteArray¤type¤dataÄj'¤maskÀƒ¤name´gene_src_common_name¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData«HOUSE MOUSE®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄ ¤dataĤmaskÀƒ¤name¯gene_src_strain¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData­TEST STRAIN 1®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄ ¤dataĤmaskÀƒ¤name½pdbx_host_org_scientific_name¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData°ESCHERICHIA COLI®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmaskÀƒ¤name¾pdbx_host_org_ncbi_taxonomy_id¤data‚¨encoding‘‚¤kind©ByteArray¤type¤dataÄ2¤maskÀƒ¤name´host_org_common_name¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData­TEST COMMON 1®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄ ¤dataĤmaskÀƒ¤name´pdbx_host_org_strain¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData­TEST STRAIN 2®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄ ¤dataĤmaskÀ¨rowCountƒ¤name¯_entity_src_nat§columns–ƒ¤name©entity_id¤data‚¨encoding‘‚¤kind©ByteArray¤type¤dataĤmaskÀƒ¤name«pdbx_src_id¤data‚¨encoding‘‚¤kind©ByteArray¤type¤dataÄ*¤maskÀƒ¤name¸pdbx_organism_scientific¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData°ESCHERICHIA COLI®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmaskÀƒ¤nameµpdbx_ncbi_taxonomy_id¤data‚¨encoding‘‚¤kind©ByteArray¤type¤dataÄ2¤maskÀƒ¤name«common_name¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData­TEST COMMON 2®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄ ¤dataĤmaskÀƒ¤name¦strain¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData­TEST STRAIN 3®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsÄ ¤dataĤmaskÀ¨rowCountƒ¤name´_pdbx_entity_src_syn§columns•ƒ¤name©entity_id¤data‚¨encoding‘‚¤kind©ByteArray¤type¤dataĤmaskÀƒ¤name«pdbx_src_id¤data‚¨encoding‘‚¤kind©ByteArray¤type¤dataÄ*¤maskÀƒ¤name³organism_scientific¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData±HELIANTHUS ANNUUS®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmaskÀƒ¤name´organism_common_name¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding‘‚¤kind©ByteArray¤typeªstringData°COMMON SUNFLOWER®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmaskÀƒ¤name°ncbi_taxonomy_id¤data‚¨encoding‘‚¤kind©ByteArray¤type¤dataĈ¤maskÀ¨rowCountƒ¤name¬_struct_asym§columns“ƒ¤name¢id¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤typeªstringData¤ABCD®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmaskÀƒ¤name©entity_id¤data‚¨encoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type¤dataĤmaskÀƒ¤name§details¤data‚¨encoding‘…¤kind«StringArray¬dataEncoding“ƒ¤kind©RunLength§srcType§srcSize„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤typeªstringData ®offsetEncoding“ƒ¤kind¥Delta¦origin§srcType„¤kind®IntegerPacking©byteCountªisUnsignedçsrcSize‚¤kind©ByteArray¤type§offsetsĤdataĤmask‚¨encoding‘‚¤kind©ByteArray¤type¤dataĨrowCountpython-ihm-2.7/test/input/official.cif000066400000000000000000000037521503573337200200520ustar00rootroot00000000000000data_2HBJ # _entry.id 2HBJ # loop_ _database_2.database_id _database_2.database_code _database_2.pdbx_database_accession _database_2.pdbx_DOI PDB 2HBJ pdb_00002hbj 10.2210/pdb2hbj/pdb RCSB RCSB038162 ? ? WWPDB D_1000038162 ? ? # _struct.entry_id 2HBJ _struct.title 'Structure of the yeast nuclear exosome component, Rrp6p, reveals an interplay between the active site and the HRDC domain' # # loop_ _pdbx_audit_revision_history.ordinal _pdbx_audit_revision_history.data_content_type _pdbx_audit_revision_history.major_revision _pdbx_audit_revision_history.minor_revision _pdbx_audit_revision_history.revision_date 1 'Structure model' 1 0 2006-07-25 2 'Structure model' 1 1 2008-05-01 3 'Structure model' 1 2 2011-07-13 4 'Structure model' 1 3 2017-10-18 5 'Structure model' 1 4 2021-11-10 # loop_ _entity.id _entity.src_method 1 man 2 nat 3 syn # _entity_src_gen.entity_id 1 _entity_src_gen.pdbx_src_id 42 _entity_src_gen.pdbx_gene_src_scientific_name 'MUS MUSCULUS' _entity_src_gen.pdbx_gene_src_ncbi_taxonomy_id 10090 _entity_src_gen.gene_src_common_name 'HOUSE MOUSE' _entity_src_gen.gene_src_strain 'TEST STRAIN 1' _entity_src_gen.pdbx_host_org_scientific_name 'ESCHERICHIA COLI' _entity_src_gen.pdbx_host_org_ncbi_taxonomy_id 562 _entity_src_gen.host_org_common_name 'TEST COMMON 1' _entity_src_gen.pdbx_host_org_strain 'TEST STRAIN 2' # _entity_src_nat.entity_id 2 _entity_src_nat.pdbx_src_id 42 _entity_src_nat.pdbx_organism_scientific 'ESCHERICHIA COLI' _entity_src_nat.pdbx_ncbi_taxonomy_id 562 _entity_src_nat.common_name 'TEST COMMON 2' _entity_src_nat.strain 'TEST STRAIN 3' # _pdbx_entity_src_syn.entity_id 3 _pdbx_entity_src_syn.pdbx_src_id 42 _pdbx_entity_src_syn.organism_scientific 'HELIANTHUS ANNUUS' _pdbx_entity_src_syn.organism_common_name 'COMMON SUNFLOWER' _pdbx_entity_src_syn.ncbi_taxonomy_id 4232 # loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 ? B 2 ? C 2 ? D 3 ? # python-ihm-2.7/test/input/official.pdb000066400000000000000000000031371503573337200200530ustar00rootroot00000000000000HEADER HYDROLASE, GENE REGULATION 14-JUN-06 2HBJ TITLE STRUCTURE OF THE YEAST NUCLEAR EXOSOME COMPONENT, RRP6P, TITLE 2 REVEALS AN INTERPLAY BETWEEN THE ACTIVE SITE AND THE HRDC TITLE 3 DOMAIN COMPND BADKEY: ignored; MOL_ID: 1; COMPND 2 MOLECULE: ADIPOCYTE LIPID BINDING PROTEIN; COMPND 3 CHAIN: A; COMPND 4 SYNONYM: ALBP; COMPND 5 ENGINEERED: YES; COMPND 6 MUTATION: YES; COMPND 7 MOL_ID: 2; COMPND 8 MOLECULE: ADIPOCYTE LIPID BINDING PROTEIN; COMPND 9 CHAIN: B, C; COMPND 10 SYNONYM: ALBP; garbage; COMPND 11 MOL_ID: 3; COMPND 12 CHAIN: D; COMPND 13 MOL_ID: 4; COMPND 14 MOLECULE: EXTRA SOURCE MOL_ID: 1; SOURCE 2 ORGANISM_SCIENTIFIC: MUS MUSCULUS; SOURCE 3 ORGANISM_COMMON: HOUSE MOUSE; SOURCE 4 ORGANISM_TAXID: 10090; SOURCE 5 STRAIN: TEST STRAIN 1; SOURCE 6 CELL: ADIPOCYTE; SOURCE 7 EXPRESSION_SYSTEM: ESCHERICHIA COLI; SOURCE 11 EXPRESSION_SYSTEM_COMMON: TEST COMMON 1; SOURCE 8 EXPRESSION_SYSTEM_TAXID: 562; SOURCE 9 EXPRESSION_SYSTEM_STRAIN: TEST STRAIN 2; SOURCE 10 MOL_ID: 2; SOURCE 11 ORGANISM_SCIENTIFIC: ESCHERICHIA COLI; SOURCE 11 ORGANISM_COMMON: TEST COMMON 2; SOURCE 12 STRAIN: TEST STRAIN 3; SOURCE 13 ORGANISM_TAXID: 562; SOURCE 14 MOL_ID: 3; SOURCE 15 ORGANISM_SCIENTIFIC: HELIANTHUS ANNUUS; SOURCE 15 ORGANISM_COMMON: COMMON SUNFLOWER; SOURCE 16 ORGANISM_TAXID: 4232; SOURCE 17 STRAIN: TEST STRAIN 4; SOURCE 18 SYNTHETIC: YES REMARK 1 COMMENT IGNORED BY MMCIF CODE HELIX 10 10 ASP A 607 GLU A 624 1 18 ATOM 2 CA GLY A 127 57.062 21.781 -5.354 1.00 88.40 C python-ihm-2.7/test/input/orphan.cif000066400000000000000000000016461503573337200175650ustar00rootroot00000000000000loop_ _ihm_geometric_object_center.id _ihm_geometric_object_center.xcoord _ihm_geometric_object_center.ycoord _ihm_geometric_object_center.zcoord 1 1.000 2.000 3.000 # loop_ _ihm_relaxation_time.id _ihm_relaxation_time.value _ihm_relaxation_time.unit _ihm_relaxation_time.amplitude _ihm_relaxation_time.dataset_group_id _ihm_relaxation_time.external_file_id _ihm_relaxation_time.details 1 3.000 seconds 0.5 . . details1 # loop_ _ihm_external_reference_info.reference_id _ihm_external_reference_info.reference_provider _ihm_external_reference_info.reference_type _ihm_external_reference_info.reference _ihm_external_reference_info.refers_to _ihm_external_reference_info.associated_url _ihm_external_reference_info.details 1 Zenodo DOI 10.5281/zenodo.46266 Archive nup84-v1.0.zip . # loop_ _chem_comp.id _chem_comp.type _chem_comp.name _chem_comp.formula _chem_comp.formula_weight THR 'L-peptide linking' THREONINE 'C4 H9 N O3' 119.120 python-ihm-2.7/test/input/phyre2_model.pdb000066400000000000000000000004111503573337200206600ustar00rootroot00000000000000REMARK 99 Chain ID : 1 REMARK 99 Residues : 361 REMARK 99 Atoms : 3263 REMARK 99 File : c4bzkA_.1.pdb.sc REMARK 6 TEMPLATE: 4bzkA 13:A - 334:A MODELS 8:A - 538:A AT 11% ATOM 2 CA MET A 8 221.364 50.140 158.235 1.00 0.00 C python-ihm-2.7/test/input/pubmed_api.json000066400000000000000000000202601503573337200206040ustar00rootroot00000000000000{ "header": { "type": "esummary", "version": "0.3" }, "result": { "uids": [ "29539637" ], "29539637": { "uid": "29539637", "pubdate": "2018 Mar 22", "epubdate": "2018 Mar 14", "source": "Nature", "authors": [ { "name": "Kim SJ", "authtype": "Author", "clusterid": "" }, { "name": "Fernandez-Martinez J", "authtype": "Author", "clusterid": "" }, { "name": "Nudelman I", "authtype": "Author", "clusterid": "" }, { "name": "Shi Y", "authtype": "Author", "clusterid": "" }, { "name": "Zhang W", "authtype": "Author", "clusterid": "" }, { "name": "Raveh B", "authtype": "Author", "clusterid": "" }, { "name": "Herricks T", "authtype": "Author", "clusterid": "" }, { "name": "Slaughter BD", "authtype": "Author", "clusterid": "" }, { "name": "Hogan JA", "authtype": "Author", "clusterid": "" }, { "name": "Upla P", "authtype": "Author", "clusterid": "" }, { "name": "Chemmama IE", "authtype": "Author", "clusterid": "" }, { "name": "Pellarin R", "authtype": "Author", "clusterid": "" }, { "name": "Echeverria I", "authtype": "Author", "clusterid": "" }, { "name": "Shivaraju M", "authtype": "Author", "clusterid": "" }, { "name": "Chaudhury AS", "authtype": "Author", "clusterid": "" }, { "name": "Wang J", "authtype": "Author", "clusterid": "" }, { "name": "Williams R", "authtype": "Author", "clusterid": "" }, { "name": "Unruh JR", "authtype": "Author", "clusterid": "" }, { "name": "Greenberg CH", "authtype": "Author", "clusterid": "" }, { "name": "Jacobs EY", "authtype": "Author", "clusterid": "" }, { "name": "Yu Z", "authtype": "Author", "clusterid": "" }, { "name": "de la Cruz MJ", "authtype": "Author", "clusterid": "" }, { "name": "Mironska R", "authtype": "Author", "clusterid": "" }, { "name": "Stokes DL", "authtype": "Author", "clusterid": "" }, { "name": "Aitchison JD", "authtype": "Author", "clusterid": "" }, { "name": "Jarrold MF", "authtype": "Author", "clusterid": "" }, { "name": "Gerton JL", "authtype": "Author", "clusterid": "" }, { "name": "Ludtke SJ", "authtype": "Author", "clusterid": "" }, { "name": "Akey CW", "authtype": "Author", "clusterid": "" }, { "name": "Chait BT", "authtype": "Author", "clusterid": "" }, { "name": "Sali A", "authtype": "Author", "clusterid": "" }, { "name": "Rout MP", "authtype": "Author", "clusterid": "" } ], "lastauthor": "Rout MP", "title": "Integrative structure and functional anatomy of a nuclear pore complex (test of python-ihm lib).", "sorttitle": "integrative structure and functional anatomy of a nuclear pore complex", "volume": "555", "issue": "7697", "pages": "475-482", "lang": [ "eng" ], "nlmuniqueid": "0410462", "issn": "0028-0836", "essn": "1476-4687", "pubtype": [ "Journal Article" ], "recordstatus": "PubMed - in process", "pubstatus": "256", "articleids": [ { "idtype": "pubmed", "idtypen": 1, "value": "29539637" }, { "idtype": "pii", "idtypen": 4, "value": "nature26003" }, { "idtype": "doi", "idtypen": 3, "value": "10.1038/nature26003" }, { "idtype": "rid", "idtypen": 8, "value": "29539637" }, { "idtype": "eid", "idtypen": 8, "value": "29539637" } ], "history": [ { "pubstatus": "received", "date": "2017/06/21 00:00" }, { "pubstatus": "accepted", "date": "2018/02/06 00:00" }, { "pubstatus": "pubmed", "date": "2018/03/15 06:00" }, { "pubstatus": "medline", "date": "2018/03/15 06:00" }, { "pubstatus": "entrez", "date": "2018/03/15 06:00" } ], "references": [ ], "attributes": [ "Has Abstract" ], "pmcrefcount": "", "fulljournalname": "Nature", "elocationid": "doi: 10.1038/nature26003", "doctype": "citation", "srccontriblist": [ ], "booktitle": "", "medium": "", "edition": "", "publisherlocation": "", "publishername": "", "srcdate": "", "reportnumber": "", "availablefromurl": "", "locationlabel": "", "doccontriblist": [ ], "docdate": "", "bookname": "", "chapter": "", "sortpubdate": "2018/03/22 00:00", "sortfirstauthor": "Kim SJ", "vernaculartitle": "" } } } python-ihm-2.7/test/input/pubmed_api_no_doi.json000066400000000000000000000200201503573337200221250ustar00rootroot00000000000000{ "header": { "type": "esummary", "version": "0.3" }, "result": { "uids": [ "29539637" ], "29539637": { "uid": "29539637", "pubdate": "2018 Mar 22", "epubdate": "2018 Mar 14", "source": "Nature", "authors": [ { "name": "Kim SJ", "authtype": "Author", "clusterid": "" }, { "name": "Fernandez-Martinez J", "authtype": "Author", "clusterid": "" }, { "name": "Nudelman I", "authtype": "Author", "clusterid": "" }, { "name": "Shi Y", "authtype": "Author", "clusterid": "" }, { "name": "Zhang W", "authtype": "Author", "clusterid": "" }, { "name": "Raveh B", "authtype": "Author", "clusterid": "" }, { "name": "Herricks T", "authtype": "Author", "clusterid": "" }, { "name": "Slaughter BD", "authtype": "Author", "clusterid": "" }, { "name": "Hogan JA", "authtype": "Author", "clusterid": "" }, { "name": "Upla P", "authtype": "Author", "clusterid": "" }, { "name": "Chemmama IE", "authtype": "Author", "clusterid": "" }, { "name": "Pellarin R", "authtype": "Author", "clusterid": "" }, { "name": "Echeverria I", "authtype": "Author", "clusterid": "" }, { "name": "Shivaraju M", "authtype": "Author", "clusterid": "" }, { "name": "Chaudhury AS", "authtype": "Author", "clusterid": "" }, { "name": "Wang J", "authtype": "Author", "clusterid": "" }, { "name": "Williams R", "authtype": "Author", "clusterid": "" }, { "name": "Unruh JR", "authtype": "Author", "clusterid": "" }, { "name": "Greenberg CH", "authtype": "Author", "clusterid": "" }, { "name": "Jacobs EY", "authtype": "Author", "clusterid": "" }, { "name": "Yu Z", "authtype": "Author", "clusterid": "" }, { "name": "de la Cruz MJ", "authtype": "Author", "clusterid": "" }, { "name": "Mironska R", "authtype": "Author", "clusterid": "" }, { "name": "Stokes DL", "authtype": "Author", "clusterid": "" }, { "name": "Aitchison JD", "authtype": "Author", "clusterid": "" }, { "name": "Jarrold MF", "authtype": "Author", "clusterid": "" }, { "name": "Gerton JL", "authtype": "Author", "clusterid": "" }, { "name": "Ludtke SJ", "authtype": "Author", "clusterid": "" }, { "name": "Akey CW", "authtype": "Author", "clusterid": "" }, { "name": "Chait BT", "authtype": "Author", "clusterid": "" }, { "name": "Sali A", "authtype": "Author", "clusterid": "" }, { "name": "Rout MP", "authtype": "Author", "clusterid": "" } ], "lastauthor": "Rout MP", "title": "Integrative structure and functional anatomy of a nuclear pore complex (test of python-ihm lib).", "sorttitle": "integrative structure and functional anatomy of a nuclear pore complex", "volume": "555", "issue": "7697", "pages": "475-82", "lang": [ "eng" ], "nlmuniqueid": "0410462", "issn": "0028-0836", "essn": "1476-4687", "pubtype": [ "Journal Article" ], "recordstatus": "PubMed - in process", "pubstatus": "256", "articleids": [ { "idtype": "pubmed", "idtypen": 1, "value": "29539637" }, { "idtype": "pii", "idtypen": 4, "value": "nature26003" }, { "idtype": "rid", "idtypen": 8, "value": "29539637" }, { "idtype": "eid", "idtypen": 8, "value": "29539637" } ], "history": [ { "pubstatus": "received", "date": "2017/06/21 00:00" }, { "pubstatus": "accepted", "date": "2018/02/06 00:00" }, { "pubstatus": "pubmed", "date": "2018/03/15 06:00" }, { "pubstatus": "medline", "date": "2018/03/15 06:00" }, { "pubstatus": "entrez", "date": "2018/03/15 06:00" } ], "references": [ ], "attributes": [ "Has Abstract" ], "pmcrefcount": "", "fulljournalname": "Nature", "elocationid": "doi: 10.1038/nature26003", "doctype": "citation", "srccontriblist": [ ], "booktitle": "", "medium": "", "edition": "", "publisherlocation": "", "publishername": "", "srcdate": "", "reportnumber": "", "availablefromurl": "", "locationlabel": "", "doccontriblist": [ ], "docdate": "", "bookname": "", "chapter": "", "sortpubdate": "2018/03/22 00:00", "sortfirstauthor": "Kim SJ", "vernaculartitle": "" } } } python-ihm-2.7/test/input/pubmed_api_no_pages.json000066400000000000000000000202461503573337200224630ustar00rootroot00000000000000{ "header": { "type": "esummary", "version": "0.3" }, "result": { "uids": [ "29539637" ], "29539637": { "uid": "29539637", "pubdate": "2018 Mar 22", "epubdate": "2018 Mar 14", "source": "Nature", "authors": [ { "name": "Kim SJ", "authtype": "Author", "clusterid": "" }, { "name": "Fernandez-Martinez J", "authtype": "Author", "clusterid": "" }, { "name": "Nudelman I", "authtype": "Author", "clusterid": "" }, { "name": "Shi Y", "authtype": "Author", "clusterid": "" }, { "name": "Zhang W", "authtype": "Author", "clusterid": "" }, { "name": "Raveh B", "authtype": "Author", "clusterid": "" }, { "name": "Herricks T", "authtype": "Author", "clusterid": "" }, { "name": "Slaughter BD", "authtype": "Author", "clusterid": "" }, { "name": "Hogan JA", "authtype": "Author", "clusterid": "" }, { "name": "Upla P", "authtype": "Author", "clusterid": "" }, { "name": "Chemmama IE", "authtype": "Author", "clusterid": "" }, { "name": "Pellarin R", "authtype": "Author", "clusterid": "" }, { "name": "Echeverria I", "authtype": "Author", "clusterid": "" }, { "name": "Shivaraju M", "authtype": "Author", "clusterid": "" }, { "name": "Chaudhury AS", "authtype": "Author", "clusterid": "" }, { "name": "Wang J", "authtype": "Author", "clusterid": "" }, { "name": "Williams R", "authtype": "Author", "clusterid": "" }, { "name": "Unruh JR", "authtype": "Author", "clusterid": "" }, { "name": "Greenberg CH", "authtype": "Author", "clusterid": "" }, { "name": "Jacobs EY", "authtype": "Author", "clusterid": "" }, { "name": "Yu Z", "authtype": "Author", "clusterid": "" }, { "name": "de la Cruz MJ", "authtype": "Author", "clusterid": "" }, { "name": "Mironska R", "authtype": "Author", "clusterid": "" }, { "name": "Stokes DL", "authtype": "Author", "clusterid": "" }, { "name": "Aitchison JD", "authtype": "Author", "clusterid": "" }, { "name": "Jarrold MF", "authtype": "Author", "clusterid": "" }, { "name": "Gerton JL", "authtype": "Author", "clusterid": "" }, { "name": "Ludtke SJ", "authtype": "Author", "clusterid": "" }, { "name": "Akey CW", "authtype": "Author", "clusterid": "" }, { "name": "Chait BT", "authtype": "Author", "clusterid": "" }, { "name": "Sali A", "authtype": "Author", "clusterid": "" }, { "name": "Rout MP", "authtype": "Author", "clusterid": "" } ], "lastauthor": "Rout MP", "title": "Integrative structure and functional anatomy of a nuclear pore complex (test of python-ihm lib).", "sorttitle": "integrative structure and functional anatomy of a nuclear pore complex", "volume": "", "issue": "7697", "pages": "", "lang": [ "eng" ], "nlmuniqueid": "0410462", "issn": "0028-0836", "essn": "1476-4687", "pubtype": [ "Journal Article" ], "recordstatus": "PubMed - in process", "pubstatus": "256", "articleids": [ { "idtype": "pubmed", "idtypen": 1, "value": "29539637" }, { "idtype": "pii", "idtypen": 4, "value": "nature26003" }, { "idtype": "doi", "idtypen": 3, "value": "10.1038/nature26003" }, { "idtype": "rid", "idtypen": 8, "value": "29539637" }, { "idtype": "eid", "idtypen": 8, "value": "29539637" } ], "history": [ { "pubstatus": "received", "date": "2017/06/21 00:00" }, { "pubstatus": "accepted", "date": "2018/02/06 00:00" }, { "pubstatus": "pubmed", "date": "2018/03/15 06:00" }, { "pubstatus": "medline", "date": "2018/03/15 06:00" }, { "pubstatus": "entrez", "date": "2018/03/15 06:00" } ], "references": [ ], "attributes": [ "Has Abstract" ], "pmcrefcount": "", "fulljournalname": "Nature", "elocationid": "doi: 10.1038/nature26003", "doctype": "citation", "srccontriblist": [ ], "booktitle": "", "medium": "", "edition": "", "publisherlocation": "", "publishername": "", "srcdate": "", "reportnumber": "", "availablefromurl": "", "locationlabel": "", "doccontriblist": [ ], "docdate": "", "bookname": "", "chapter": "", "sortpubdate": "2018/03/22 00:00", "sortfirstauthor": "Kim SJ", "vernaculartitle": "" } } } python-ihm-2.7/test/input/pubmed_api_one_page.json000066400000000000000000000202541503573337200224440ustar00rootroot00000000000000{ "header": { "type": "esummary", "version": "0.3" }, "result": { "uids": [ "29539637" ], "29539637": { "uid": "29539637", "pubdate": "2018 Mar 22", "epubdate": "2018 Mar 14", "source": "Nature", "authors": [ { "name": "Kim SJ", "authtype": "Author", "clusterid": "" }, { "name": "Fernandez-Martinez J", "authtype": "Author", "clusterid": "" }, { "name": "Nudelman I", "authtype": "Author", "clusterid": "" }, { "name": "Shi Y", "authtype": "Author", "clusterid": "" }, { "name": "Zhang W", "authtype": "Author", "clusterid": "" }, { "name": "Raveh B", "authtype": "Author", "clusterid": "" }, { "name": "Herricks T", "authtype": "Author", "clusterid": "" }, { "name": "Slaughter BD", "authtype": "Author", "clusterid": "" }, { "name": "Hogan JA", "authtype": "Author", "clusterid": "" }, { "name": "Upla P", "authtype": "Author", "clusterid": "" }, { "name": "Chemmama IE", "authtype": "Author", "clusterid": "" }, { "name": "Pellarin R", "authtype": "Author", "clusterid": "" }, { "name": "Echeverria I", "authtype": "Author", "clusterid": "" }, { "name": "Shivaraju M", "authtype": "Author", "clusterid": "" }, { "name": "Chaudhury AS", "authtype": "Author", "clusterid": "" }, { "name": "Wang J", "authtype": "Author", "clusterid": "" }, { "name": "Williams R", "authtype": "Author", "clusterid": "" }, { "name": "Unruh JR", "authtype": "Author", "clusterid": "" }, { "name": "Greenberg CH", "authtype": "Author", "clusterid": "" }, { "name": "Jacobs EY", "authtype": "Author", "clusterid": "" }, { "name": "Yu Z", "authtype": "Author", "clusterid": "" }, { "name": "de la Cruz MJ", "authtype": "Author", "clusterid": "" }, { "name": "Mironska R", "authtype": "Author", "clusterid": "" }, { "name": "Stokes DL", "authtype": "Author", "clusterid": "" }, { "name": "Aitchison JD", "authtype": "Author", "clusterid": "" }, { "name": "Jarrold MF", "authtype": "Author", "clusterid": "" }, { "name": "Gerton JL", "authtype": "Author", "clusterid": "" }, { "name": "Ludtke SJ", "authtype": "Author", "clusterid": "" }, { "name": "Akey CW", "authtype": "Author", "clusterid": "" }, { "name": "Chait BT", "authtype": "Author", "clusterid": "" }, { "name": "Sali A", "authtype": "Author", "clusterid": "" }, { "name": "Rout MP", "authtype": "Author", "clusterid": "" } ], "lastauthor": "Rout MP", "title": "Integrative structure and functional anatomy of a nuclear pore complex (test of python-ihm lib).", "sorttitle": "integrative structure and functional anatomy of a nuclear pore complex", "volume": "555", "issue": "7697", "pages": "475", "lang": [ "eng" ], "nlmuniqueid": "0410462", "issn": "0028-0836", "essn": "1476-4687", "pubtype": [ "Journal Article" ], "recordstatus": "PubMed - in process", "pubstatus": "256", "articleids": [ { "idtype": "pubmed", "idtypen": 1, "value": "29539637" }, { "idtype": "pii", "idtypen": 4, "value": "nature26003" }, { "idtype": "doi", "idtypen": 3, "value": "10.1038/nature26003" }, { "idtype": "rid", "idtypen": 8, "value": "29539637" }, { "idtype": "eid", "idtypen": 8, "value": "29539637" } ], "history": [ { "pubstatus": "received", "date": "2017/06/21 00:00" }, { "pubstatus": "accepted", "date": "2018/02/06 00:00" }, { "pubstatus": "pubmed", "date": "2018/03/15 06:00" }, { "pubstatus": "medline", "date": "2018/03/15 06:00" }, { "pubstatus": "entrez", "date": "2018/03/15 06:00" } ], "references": [ ], "attributes": [ "Has Abstract" ], "pmcrefcount": "", "fulljournalname": "Nature", "elocationid": "doi: 10.1038/nature26003", "doctype": "citation", "srccontriblist": [ ], "booktitle": "", "medium": "", "edition": "", "publisherlocation": "", "publishername": "", "srcdate": "", "reportnumber": "", "availablefromurl": "", "locationlabel": "", "doccontriblist": [ ], "docdate": "", "bookname": "", "chapter": "", "sortpubdate": "2018/03/22 00:00", "sortfirstauthor": "Kim SJ", "vernaculartitle": "" } } } python-ihm-2.7/test/input/struct_only.cif000066400000000000000000000002701503573337200206530ustar00rootroot00000000000000data_PDBDEV_00000025 _entry.id PDBDEV_00000025 _struct.entry_id PDBDEV_00000025 _struct.title 'Architecture of Pol II(G) and molecular mechanism of transcription regulation by Gdown1' python-ihm-2.7/test/input/swiss_model.pdb000066400000000000000000000143201503573337200206230ustar00rootroot00000000000000TITLE SWISS-MODEL SERVER (https://swissmodel.expasy.org) TITLE 2 RS2_MYCPN P75560 30S ribosomal protein S2 EXPDTA THEORETICAL MODEL (SWISS-MODEL SERVER) AUTHOR SWISS-MODEL SERVER (SEE REFERENCE IN JRNL Records) REVDAT 1 02-JUL-19 1MOD 1 18:01 JRNL AUTH A.WATERHOUSE,M.BERTONI,S.BIENERT,G.STUDER,G.TAURIELLO, JRNL AUTH 2 R.GUMIENNY,F.T.HEER,T.A.P.DE BEER,C.REMPFER,L.BORDOLI, JRNL AUTH 3 R.LEPORE,T.SCHWEDE JRNL TITL SWISS-MODEL: HOMOLOGY MODELLING OF PROTEIN STRUCTURES AND JRNL TITL 2 COMPLEXES JRNL REF NUCLEIC.ACIDS.RES.. V. 46 W296 2018 JRNL PMID 29788355 JRNL DOI 10.1093/nar/gky427 REMARK 1 REMARK 1 REFERENCE 1 REMARK 1 AUTH S.BIENERT,A.WATERHOUSE,T.A.P.DE BEER,G.TAURIELLO,G.STUDER, REMARK 1 AUTH 2 L.BORDOLI,T.SCHWEDE REMARK 1 TITL THE SWISS-MODEL REPOSITORY - NEW FEATURES AND FUNCTIONALITY REMARK 1 REF NUCLEIC.ACIDS.RES.. V. 22 2017 REMARK 1 REFN ISSN 0305-1048 REMARK 1 PMID 27899672 REMARK 1 DOI 10.1093/nar/gkw1132 REMARK 1 REMARK 1 REFERENCE 2 REMARK 1 AUTH N.GUEX,M.C.PEITSCH,T.SCHWEDE REMARK 1 TITL AUTOMATED COMPARATIVE PROTEIN STRUCTURE MODELING WITH REMARK 1 TITL 2 SWISS-MODEL AND SWISS-PDBVIEWER: A HISTORICAL PERSPECTIVE REMARK 1 REF ELECTROPHORESIS V. 30 2009 REMARK 1 REFN ISSN 0173-0835 REMARK 1 PMID 19517507 REMARK 1 DOI 10.1002/elps.200900140 REMARK 1 REMARK 1 REFERENCE 3 REMARK 1 AUTH P.BENKERT,M.BIASINI,T.SCHWEDE REMARK 1 TITL TOWARD THE ESTIMATION OF THE ABSOLUTE QUALITY OF INDIVIDUAL REMARK 1 TITL 2 PROTEIN STRUCTURE MODELS REMARK 1 REF BIOINFORMATICS V. 27 2011 REMARK 1 REFN ISSN 1367-4803 REMARK 1 PMID 21134891 REMARK 1 DOI 10.1093/bioinformatics/btq662 REMARK 1 REMARK 1 REFERENCE 4 REMARK 1 AUTH M.BERTONI,F.KIEFER,M.BIASINI,L.BORDOLI,T.SCHWEDE REMARK 1 TITL MODELING PROTEIN QUATERNARY STRUCTURE OF HOMO- AND REMARK 1 TITL 2 HETERO-OLIGOMERS BEYOND BINARY INTERACTIONS BY HOMOLOGY REMARK 1 REF SCI.REP. V. 7 2017 REMARK 1 REFN ISSN REMARK 1 PMID 28874689 REMARK 1 DOI 10.1038/s41598-017-09654-8 REMARK 1 REMARK 1 DISCLAIMER REMARK 1 The SWISS-MODEL SERVER produces theoretical models for proteins. REMARK 1 The results of any theoretical modelling procedure is REMARK 1 NON-EXPERIMENTAL and MUST be considered with care. These models may REMARK 1 contain significant errors. This is especially true for automated REMARK 1 modeling since there is no human intervention during model REMARK 1 building. Please read the header section and the logfile carefully REMARK 1 to know what templates and alignments were used during the model REMARK 1 building process. All information by the SWISS-MODEL SERVER is REMARK 1 provided "AS-IS", without any warranty, expressed or implied. REMARK 2 REMARK 2 COPYRIGHT NOTICE REMARK 2 This SWISS-MODEL protein model is copyright. It is produced by the REMARK 2 SWISS-MODEL server, developed by the Computational Structural REMARK 2 Biology Group at the SIB Swiss Institute of Bioinformatics at the REMARK 2 Biozentrum, University of Basel (https://swissmodel.expasy.org). This REMARK 2 model is licensed under the CC BY-SA 4.0 Creative Commons REMARK 2 Attribution-ShareAlike 4.0 International License REMARK 2 (https://creativecommons.org/licenses/by-sa/4.0/legalcode), i.e. you REMARK 2 can copy and redistribute the model in any medium or format, REMARK 2 transform and build upon the model for any purpose, even REMARK 2 commercially, under the following terms: REMARK 2 Attribution - You must give appropriate credit, provide a link to REMARK 2 the license, and indicate if changes were made. You may do so in any REMARK 2 reasonable manner, but not in any way that suggests the licensor REMARK 2 endorses you or your use. When you publish, patent or distribute REMARK 2 results that were fully or partially based on the model, please cite REMARK 2 the corresponding papers mentioned under JRNL. REMARK 2 ShareAlike - If you remix, transform, or build upon the material, REMARK 2 you must distribute your contributions under the same license as the REMARK 2 original. REMARK 2 No additional restrictions - you may not apply legal terms or REMARK 2 technological measures that legally restrict others from doing REMARK 2 anything the license permits. REMARK 2 Find a human-readable summary of (and not a substitute for) the REMARK 2 CC BY-SA 4.0 license at this link: REMARK 2 https://creativecommons.org/licenses/by-sa/4.0/ REMARK 3 REMARK 3 MODEL INFORMATION REMARK 3 ENGIN PROMOD3 REMARK 3 VERSN 1.3.0 REMARK 3 OSTAT monomer REMARK 3 OSRSN MONOMER (USER) REMARK 3 QSPRD 0.000 REMARK 3 QMN4 -4.55 REMARK 3 MODT FALSE REMARK 3 REMARK 3 TEMPLATE 1 REMARK 3 PDBID 3j9w REMARK 3 CHAIN AB REMARK 3 MMCIF B REMARK 3 PDBV 2019-06-21 REMARK 3 SMTLE 3j9w.1.B REMARK 3 SMTLV 2019-06-27 REMARK 3 MTHD ELECTRON MICROSCOPY 0.00 A REMARK 3 FOUND HHblits REMARK 3 SIM 0.39 REMARK 3 SID 40.35 REMARK 3 OSTAT monomer REMARK 3 ALN B TRG MSELITTPVETTAKAELVSLAKLGEMRTHVGMVKRYWNPKMGFFIEPERKHNNDHFVL REMARK 3 ALN B TRG ELQRQSLQTAYNYVKEVAQNNGQILFVGTKNDYVKKLVNNIAKRVDVAFITQRWLGGT REMARK 3 ALN B TRG LTNFKTLSISINKLNKLVEKQA-ENAADLTKKENLMLSREIERLEKFFGGVKSLKRLP REMARK 3 ALN B TRG NLLIVDDPVYEKNAVAEANILRIPVVALCNTNTNPELVDFIIPANNHQPQSTCLLMNL REMARK 3 ALN B TRG LADAVAEAKAMPTMFAYKPDEEIQIEIPQKQEAPRQVVNRANSKQITSQRLNITRNPE REMARK 3 ALN B TRG VLTRE REMARK 3 ALN B TPL --------------MSVISMKQLLEAGVHFGHQTRRWNPKMKRYIFTER-NGIYIIDL REMARK 3 ALN B TPL QKTVKKVEEAYNFTKNLAAEGGKILFVGTKKQ-AQDSVKEEAQRSGMYYVNQRWLGGT REMARK 3 ALN B TPL LTNFETIQKRIKRLKDIEKMQENGTFDVLPKKEVVQLKKELERLEKFLGGIKDMKDLP REMARK 3 ALN B TPL DALFIIDPRKERIAVAEARKLNIPIIGIVDTNCDPDEIDVVIPANDDAIRAVKLLTSK REMARK 3 ALN B TPL MADAILEAKQGEE--------------------------------------------- REMARK 3 ALN B TPL ----- REMARK 3 ALN B OFF 0 ATOM 2 CA TYR A 7 -8.986 11.688 -5.817 1.00 91.82 C python-ihm-2.7/test/input/swiss_model_multimer.pdb000066400000000000000000000372741503573337200225560ustar00rootroot00000000000000TITLE SWISS-MODEL SERVER (https://swissmodel.expasy.org) TITLE 2 RNAP EXPDTA THEORETICAL MODEL (SWISS-MODEL SERVER) AUTHOR SWISS-MODEL SERVER (SEE REFERENCE IN JRNL Records) REVDAT 1 26-NOV-19 1MOD 1 17:25 JRNL AUTH A.WATERHOUSE,M.BERTONI,S.BIENERT,G.STUDER,G.TAURIELLO, JRNL AUTH 2 R.GUMIENNY,F.T.HEER,T.A.P.DE BEER,C.REMPFER,L.BORDOLI, JRNL AUTH 3 R.LEPORE,T.SCHWEDE JRNL TITL SWISS-MODEL: HOMOLOGY MODELLING OF PROTEIN STRUCTURES AND JRNL TITL 2 COMPLEXES JRNL REF NUCLEIC.ACIDS.RES.. V. 46 W296 2018 JRNL PMID 29788355 JRNL DOI 10.1093/nar/gky427 REMARK 1 REMARK 1 REFERENCE 1 REMARK 1 AUTH S.BIENERT,A.WATERHOUSE,T.A.P.DE BEER,G.TAURIELLO,G.STUDER, REMARK 1 AUTH 2 L.BORDOLI,T.SCHWEDE REMARK 1 TITL THE SWISS-MODEL REPOSITORY - NEW FEATURES AND FUNCTIONALITY REMARK 1 REF NUCLEIC.ACIDS.RES.. V. 22 2017 REMARK 1 REFN ISSN 0305-1048 REMARK 1 PMID 27899672 REMARK 1 DOI 10.1093/nar/gkw1132 REMARK 1 REMARK 1 REFERENCE 2 REMARK 1 AUTH N.GUEX,M.C.PEITSCH,T.SCHWEDE REMARK 1 TITL AUTOMATED COMPARATIVE PROTEIN STRUCTURE MODELING WITH REMARK 1 TITL 2 SWISS-MODEL AND SWISS-PDBVIEWER: A HISTORICAL PERSPECTIVE REMARK 1 REF ELECTROPHORESIS V. 30 2009 REMARK 1 REFN ISSN 0173-0835 REMARK 1 PMID 19517507 REMARK 1 DOI 10.1002/elps.200900140 REMARK 1 REMARK 1 REFERENCE 3 REMARK 1 AUTH P.BENKERT,M.BIASINI,T.SCHWEDE REMARK 1 TITL TOWARD THE ESTIMATION OF THE ABSOLUTE QUALITY OF INDIVIDUAL REMARK 1 TITL 2 PROTEIN STRUCTURE MODELS REMARK 1 REF BIOINFORMATICS V. 27 2011 REMARK 1 REFN ISSN 1367-4803 REMARK 1 PMID 21134891 REMARK 1 DOI 10.1093/bioinformatics/btq662 REMARK 1 REMARK 1 REFERENCE 4 REMARK 1 AUTH M.BERTONI,F.KIEFER,M.BIASINI,L.BORDOLI,T.SCHWEDE REMARK 1 TITL MODELING PROTEIN QUATERNARY STRUCTURE OF HOMO- AND REMARK 1 TITL 2 HETERO-OLIGOMERS BEYOND BINARY INTERACTIONS BY HOMOLOGY REMARK 1 REF SCI.REP. V. 7 2017 REMARK 1 REFN ISSN REMARK 1 PMID 28874689 REMARK 1 DOI 10.1038/s41598-017-09654-8 REMARK 1 REMARK 1 DISCLAIMER REMARK 1 The SWISS-MODEL SERVER produces theoretical models for proteins. REMARK 1 The results of any theoretical modelling procedure is REMARK 1 NON-EXPERIMENTAL and MUST be considered with care. These models may REMARK 1 contain significant errors. This is especially true for automated REMARK 1 modeling since there is no human intervention during model REMARK 1 building. Please read the header section and the logfile carefully REMARK 1 to know what templates and alignments were used during the model REMARK 1 building process. All information by the SWISS-MODEL SERVER is REMARK 1 provided "AS-IS", without any warranty, expressed or implied. REMARK 2 REMARK 2 COPYRIGHT NOTICE REMARK 2 This SWISS-MODEL protein model is copyright. It is produced by the REMARK 2 SWISS-MODEL server, developed by the Computational Structural REMARK 2 Biology Group at the SIB Swiss Institute of Bioinformatics at the REMARK 2 Biozentrum, University of Basel (https://swissmodel.expasy.org). This REMARK 2 model is licensed under the CC BY-SA 4.0 Creative Commons REMARK 2 Attribution-ShareAlike 4.0 International License REMARK 2 (https://creativecommons.org/licenses/by-sa/4.0/legalcode), i.e. you REMARK 2 can copy and redistribute the model in any medium or format, REMARK 2 transform and build upon the model for any purpose, even REMARK 2 commercially, under the following terms: REMARK 2 Attribution - You must give appropriate credit, provide a link to REMARK 2 the license, and indicate if changes were made. You may do so in any REMARK 2 reasonable manner, but not in any way that suggests the licensor REMARK 2 endorses you or your use. When you publish, patent or distribute REMARK 2 results that were fully or partially based on the model, please cite REMARK 2 the corresponding papers mentioned under JRNL. REMARK 2 ShareAlike - If you remix, transform, or build upon the material, REMARK 2 you must distribute your contributions under the same license as the REMARK 2 original. REMARK 2 No additional restrictions - you may not apply legal terms or REMARK 2 technological measures that legally restrict others from doing REMARK 2 anything the license permits. REMARK 2 Find a human-readable summary of (and not a substitute for) the REMARK 2 CC BY-SA 4.0 license at this link: REMARK 2 https://creativecommons.org/licenses/by-sa/4.0/ REMARK 3 REMARK 3 MODEL INFORMATION REMARK 3 ENGIN PROMOD3 REMARK 3 VERSN 2.0.0 REMARK 3 OSTAT hetero-1-1-2-mer REMARK 3 OSRSN TEMPLATE REMARK 3 QSPRD NA REMARK 3 QMN4 -4.52 REMARK 3 MODT FALSE REMARK 3 REMARK 3 TEMPLATE 1 REMARK 3 PDBID 6flq REMARK 3 CHAIN B REMARK 3 MMCIF B REMARK 3 CHAIN A REMARK 3 MMCIF A REMARK 3 CHAIN C REMARK 3 MMCIF C REMARK 3 CHAIN D REMARK 3 MMCIF D REMARK 3 PDBV 2019-11-01 REMARK 3 SMTLE 6flq.1 REMARK 3 SMTLV 2019-11-21 REMARK 3 MTHD ELECTRON MICROSCOPY 4.10 A REMARK 3 FOUND BLAST / HHblits REMARK 3 SIM 0.40 REMARK 3 SID 40.95 REMARK 3 OSTAT hetero-1-1-2-mer REMARK 3 LIGND MG REMARK 3 LIGND 2 ZN REMARK 3 LIGND 3 ZN REMARK 3 ALN B TRG MEKFLKYEIKVNNEQARANPNYGIFEVGPLESGFVITIGNAMRRVLLSCIPGASVFAL REMARK 3 ALN B TRG SISGAKQEFAAVEGMKEDVTEVVLNFKQLVVKISDLLFEDGEMVEPPLERWPLLTVTA REMARK 3 ALN B TRG EKAGPVYAKDLECPAGFEVVNKDLYLFSLQTDKKVTVNVYVK--QGRGFV---TFLEN REMARK 3 ALN B TRG REMINSLGIIATDSNFSPVLHCGYEVQELKTSKQKITDHLTFKIATNGAISAVDAFAM REMARK 3 ALN B TRG AAKILIEHLNPIVNVNESIKALNIIQEKAEERRVRSFAKQIEELDFTVRTFNCLKRSG REMARK 3 ALN B TRG IHTLQELLSKSLADIREIRNLGKKSEREIIKKVHELGLKLRS REMARK 3 ALN B TPL ----------------------------PLERGFGHTLGNALRRILLSSMPGCAVTEV REMARK 3 ALN B TPL EIDGVLHEYSTKEGVQEDILEILLNLKGLAVRVQG----KDEVI---------LTLNK REMARK 3 ALN B TPL SGIGPVTAADITHDGDVEIV-KPQHVICHLTDENASISMRIKVQRGRGYVPASTRIHS REMARK 3 ALN B TPL EEDERPIGRLLVDACYSPVERIAYNVEAARVEQRTDLDKLVIEMETNGTIDPEEAIRR REMARK 3 ALN B TPL AATILAEQLEAFVDLRD-VRQPEVKEEKPEFDPI--LLRPVDDLELTVRSANCLKAEA REMARK 3 ALN B TPL IHYIGDLVQRTEVELLKTPNLGKKSLTEIKDVLASRGLSL-- REMARK 3 ALN B OFF 29 REMARK 3 ALN A TRG MEKFLKYEIKVNNEQARANPNYGIFEVGPLESGFVITIGNAMRRVLLSCIPGASVFAL REMARK 3 ALN A TRG SISGAKQEFAAVEGMKEDVTEVVLNFKQLVVKISDLLFEDGEMVEPPLERWPLLTVTA REMARK 3 ALN A TRG EKAGPVYAKDLECPAGFEVVNKDLYLFSLQTDKKVTVNVYVK--QGRGFV---TFLEN REMARK 3 ALN A TRG REMINSLGIIATDSNFSPVLHCGYEVQELKTSKQKITDHLTFKIATNGAISAVDAFAM REMARK 3 ALN A TRG AAKILIEHLNPIVNVNESIKALNIIQEKAEERRVRSFAKQIEELDFTVRTFNCLKRSG REMARK 3 ALN A TRG IHTLQELLSKSLADIREIRNLGKKSEREIIKKVHELGLKLRS REMARK 3 ALN A TPL ----------------------------PLERGFGHTLGNALRRILLSSMPGCAVTEV REMARK 3 ALN A TPL EIDGVLHEYSTKEGVQEDILEILLNLKGLAVRVQG----KDEVI---------LTLNK REMARK 3 ALN A TPL SGIGPVTAADITHDGDVEIV-KPQHVICHLTDENASISMRIKVQRGRGYVPASTRIHS REMARK 3 ALN A TPL EEDERPIGRLLVDACYSPVERIAYNVEAARVEQRTDLDKLVIEMETNGTIDPEEAIRR REMARK 3 ALN A TPL AATILAEQLEAFVDLRD-VRQPEVKEEKPEFDPI--LLRPVDDLELTVRSANCLKAEA REMARK 3 ALN A TPL IHYIGDLVQRTEVELLKTPNLGKKSLTEIKDVLASRGLSL-- REMARK 3 ALN A OFF 29 REMARK 3 ALN C TRG MSQKPSFFQKKYSPTATRRYYGKIATDFVQPNLADIQIRSYQTFLDH------DLENL REMARK 3 ALN C TRG IAAYFPIKSPNDRYTINFKGLRRTAPERNEAQSRSESKTYEIGIYADLELIDSATGTI REMARK 3 ALN C TRG KKPRKSKKNSATSSVDGVFLTNLPLITRDGVFIVNGIEKFVIAQITRSPGIYMLTKSQ REMARK 3 ALN C TRG LKLSSSRKRVQEGYVCEVLPANGSVMLIYISNKKKIEDAFVQILLRDAVREGAKIFPI REMARK 3 ALN C TRG TTLLKAFGMSGKEILKVFKNNEFIT-------RSLEAEVYN---AKDFL-NNVDPEIK REMARK 3 ALN C TRG NLLREFRDG-KTDLRRKGIA-SDQKIRSLVSDYVLL---EKEHKA--LSEAKPND-PK REMARK 3 ALN C TRG VGQLEA--------------DMDE-LMDKI------ITERAAKHIVHELSISLRGLEN REMARK 3 ALN C TRG TDECPENSYHALLCSRFFRQRRYNLSAAGRYKVSRKLRITERIYQKTLACDLHLKNGE REMARK 3 ALN C TRG LLLKKGTLLVKEEIDKIKQAAQNNQIDFVQKIKLTTDGSAVNLSPESLLYESLDVYVN REMARK 3 ALN C TRG NDNFDVSVPVVGIHNDNDLNKAITLSDFIASISYVINIPSAIGKYDDIDHLGNKRVKL REMARK 3 ALN C TRG INELISSRLESGITRMERFLKEKLTIADGVNRGQQINEEGQVIEQAEKKELTIKSLIN REMARK 3 ALN C TRG SKPIQIVIRDFFNTHQLTQFLDHQNPLSELSNKRRISAMGPGGISREDPNLDIRDVHY REMARK 3 ALN C TRG SQYGRICPIETPEGMNIGLIMSLASFAKIDENGFLMAPYRKIKNGVITDEVEYLTALR REMARK 3 ALN C TRG EDEHIIAEISSLVNIDENNKILDKEIIGRYRSMQGLYDPSKIDYIDVAPHQVVSIGSS REMARK 3 ALN C TRG LIPFLENDDSARALMGTNMQRQAYPLIKPYAPVVGTGQEYKIARDSGLTMLAPCSGTV REMARK 3 ALN C TRG KYVDNSKITIESDSG-------EQHTLDLIKFERSNQNTCYNHVPLVEKGQRVTKDEV REMARK 3 ALN C TRG IADGPAVNKSELSLGQNVLVAFTTWNGYNYEDAIVISERLVKDDVLTSLTINEYVAQC REMARK 3 ALN C TRG LSTKNGDEQITRDIPNVSDANKRYLDENGIIMVGAEVKEGDVLVGKVSPKGQVEVSPE REMARK 3 ALN C TRG EKLFKAIFPESVQNVRDSSLKLPHGGDGIVSCVKRFSI-------------------- REMARK 3 ALN C TRG ---------------------------------------------------------- REMARK 3 ALN C TRG ----------------------ANGNELNDGVIEMIKVYVVQKRKIQIGDKLAGRHGN REMARK 3 ALN C TRG KGVISKVVPVADMPHLEDGTPVDILLNPLGVPSRMNIGQIFEMHLGYAAHNLAKRMLI REMARK 3 ALN C TRG SACFDDKKAQALSTEINQPQYKLDRLITGLKAQITNRGLKDEQAALAQLNNGDIALVL REMARK 3 ALN C TRG KEIGMSFDDLHFKVATPIFQGVNFQDLQDIMDEAGLKPAETHGKFKLIDGRTGLPFEK REMARK 3 ALN C TRG PISLGIMYIMKLNHMVDDKIHARAVGPYSKITQQPLGGKSQNGGQRFGEMEVWALEAY REMARK 3 ALN C TRG GAAYNLQELLTIKSDDVQGRNKAYAAIVKGAAFPEPGIPESFKLLTKELQGLALSVSF REMARK 3 ALN C TRG IYDDNTQQDSNNVSILQADGEQDDLFNDFEFDTEGY REMARK 3 ALN C TPL -------------KKRIRKDFGKRPQVLDVPYLLSIQLDSFQKFIEQDPEGQYGLEAA REMARK 3 ALN C TPL FRSVFPIQSYSGNSELQYVSYRLGEPVFDVQECQIRGVTYSAPLRVKLRLVIYEREAP REMARK 3 ALN C TPL EGT--VKDIKE----QEVYMGEIPLMTDNGTFVINGTERVIVSQLHRSPGVFFDSDKG REMARK 3 ALN C TPL ----KTHSSGKVLYNARIIPYRGSW-LDFEFDPKDN----LFV--R---IDRRRKLPA REMARK 3 ALN C TPL TIILRALNYTTEQILDLFFEKVIFEIRDNKLQMELVPERLRGETASFDIEANG-KVYV REMARK 3 ALN C TPL EKGRRITARHIRQLEKDDVKLIEVPVEYIAGKVVAKDYIDESTGELICAANMELSLDL REMARK 3 ALN C TPL LAKLSQSGHKRIETLFTNDLDHGPYISETLRVDPTNDRLSALVEIYRM-------MRP REMARK 3 ALN C TPL GEPPTREAAESLFENLFFSEDRYDLSAVGRMKFNRSLLRE------------------ REMARK 3 ALN C TPL -------------------------E---------I---------------------- REMARK 3 ALN C TPL -----------------EGSGILSKDDIIDVMKKLIDIRNGKGEVDDIDHLGNRRIRS REMARK 3 ALN C TPL VGEMAENQFRVGLVRVERAVKERLSLGDL-------------------DTLMPQDMIN REMARK 3 ALN C TPL AKPISAAVKEFFGSSQLSQFMDQNNPLSEITHKRRISALGPGGLTRERAGFEVRDVHP REMARK 3 ALN C TPL THYGRVCPIETPEGPNIGLINSLSVYAQTNEYGFLETPYRKVTDGVVTDEIHYLSAIE REMARK 3 ALN C TPL EGNYVIAQANSNLDEE--GHFVEDLVTCRSKGESSLFSRDQVDYMDVSTQQVVSVGAS REMARK 3 ALN C TPL LIPFLEHDDANRALMGANMQRQAVPTLRADKPLVGTGMERAVAVDSGVTAVAKRGGVV REMARK 3 ALN C TPL QYVDASRIVIKVNEDEMYPGEAGIDIYNLTKYTRSNQNTCINQMPCVSLGEPVERGDV REMARK 3 ALN C TPL LADGPSTDLGELALGQNMRVAFMPWNGYNFEDSILVSERVVQEDRFTTIHIQELACVS REMARK 3 ALN C TPL RDTKLGPEEITADIPNVGEAALSKLDESGIVYIGAEVTGGDILVGKVTPKGETQLTPE REMARK 3 ALN C TPL EKLLRAIFGEKASDVKDSSLRVPNGVSGTVIDVQVFTRDGVEKDKRALEIEEMQLKQA REMARK 3 ALN C TPL KKDLSEELQILEAGLFSRIRAVLVAGGVEAEKLDKLPRDRWLELGLTDEEKQNQLEQL REMARK 3 ALN C TPL AEQYDELKHEFEKKLEAKRRKITQGDDLAPGVLKIVKVYLAVKRRIQPGDKMAGRHGN REMARK 3 ALN C TPL KGVISKINPIEDMPYDENGTPVDIVLNPLGVPSRMNIGQILETHLGMAAKGIGDKINA REMARK 3 ALN C TPL MLKQQQEVAKLREFIQRAYDLG--------------------ADVRQKVDLSTFSDEE REMARK 3 ALN C TPL VMRLAENLRKGMPIATPVFDGAKEAEIKELLKLGDLPT---SGQIRLYDGRTGEQFER REMARK 3 ALN C TPL PVTVGYMYMLKLNHLVDDKMHARSTGSYSLVTQQPLGGKAQFGGQRFGEMEVWALEAY REMARK 3 ALN C TPL GAAYTLQEMLTVKSDDVNGRTKMYKNIVDGNHQMEPGMPESFNVLLKEIRSLGINIEL REMARK 3 ALN C TPL E----------------------------------- REMARK 3 ALN C OFF 7 REMARK 3 ALN D TRG MTKRNKKNNKLYKNIKAIKLSIASNDTILNWSEGEVTKAETINYKSLKPEPGGLFDEA REMARK 3 ALN D TRG IFGPVKDYECACGKFKKIKYRGVRCDRCGVWVTESIVRRERMGHIALVSPVAHIWMSK REMARK 3 ALN D TRG ELPSPSKISLVLNISYKEVEQVLYFVNYIVLDTGKIKDPKIMPFKFKEVLDLAGKGSL REMARK 3 ALN D TRG TTRQKMRRVIGYIFRNLIKNRSSEDYRKGKIFYESLKNSSLPFSLNDAFNYIKKYT-G REMARK 3 ALN D TRG FRVGIGAEAILELLNKIDLNYEFSKLNDALRKAKKDSVEDAKVKKILRQLETISWFRN REMARK 3 ALN D TRG SKLHPKNMILHTVPVIPPDIRPIIQLDGAKFTTSDINNFYRRVIIRNDRLRRILEDGT REMARK 3 ALN D TRG VPAIVVNNEKRLLQESVDALFDNSSRHKPALSKDKRSLKSLTDRLKGKQGLFRHNLLG REMARK 3 ALN D TRG KRVDYSGRSVIVVGPELKMYEVGIPALMILKLFKPFIIHGLINKFDSNGNEIRPIASS REMARK 3 ALN D TRG IRQAEDMIKNQDDLIWGIVYDVIKDRPVLLNRAPTLHRLGIQAFEPRIVDGKAIRLHP REMARK 3 ALN D TRG LVTTAFNADFDGDQMAVHVPLSENAVNEARAILLASKHILGLKDGRPIVTPTQDMVLG REMARK 3 ALN D TRG NYYLTTERKGQTGEGIIFGTVHEARAAYEAGKVHLHAIVGISTKAFPNK--HFEAQG- REMARK 3 ALN D TRG -TLITTVGKIIFNDVLGDNIPYINEGEFDEHACPQKFIVPPSGDVRAAIAAHQVLPAF REMARK 3 ALN D TRG GKKVISKLIDLLYTVVEFKDLPRILENIKALGFKYSTHSSTTVSVFDIPKYSNKQQYF REMARK 3 ALN D TRG DEADQQVLKYKQFYNKGLLTDDERYKRVVKLWNGVKEKVSSEIQDLIKR--------- REMARK 3 ALN D TRG --EEYRDNSIVVMADSGARGNISNFTQLFGMRGLMSKSFNYERNNQSKIIKDTIEVPI REMARK 3 ALN D TRG KHSFLEGLTINEYFNSSYGARKGMTDTAMKTAKSGYMTRKLVDATHELIINHDDCGTR REMARK 3 ALN D TRG KGIVVEAIVETKTRSLVESLFDRIVNRYTIGPILDPETKAEIVPANSLITQELAKQIC REMARK 3 ALN D TRG ATSIKQVLVRSVIYCERENGVCQYCFGVDLSTGKLVELGTAVGVIAAQSIGEPGTQLT REMARK 3 ALN D TRG MRTFHTGGVSTE---------------------------------------------- REMARK 3 ALN D TRG ---------------------------------------------------------- REMARK 3 ALN D TRG ---------------------------------------------------------- REMARK 3 ALN D TRG --------------------------NNLAQGFERLKQIFEVVAPKDYERCVISEVKG REMARK 3 ALN D TRG VVKSI-TTTQNAQEVLIES--SVDERTYSIPFSAQLRVKVGDAVELGSKITEGSIDIR REMARK 3 ALN D TRG QLLRVAGIQRVRQYMIVEIQKVYRIQGIEIADKYVEIIIRQLTSLLQVTDAGSSNLFV REMARK 3 ALN D TRG GQLVHSHHLNELNKSLLLSGKMPVIAINQVFGIDEAASKSNSFLSAASFQDTKKILTD REMARK 3 ALN D TRG AAVKTQVDYLLGLKENVIIGGKIPAGTGFLTDEELAYLGAKTVQEEY REMARK 3 ALN D TPL ---------TKTEEFDAIKIALASPDMIRSWSFGEVKKPETINYRTFKPERDGLFCAR REMARK 3 ALN D TPL IFGPVKDYECLCGKYKRLKHRGVICEKCGVEVTQTKVRRERMGHIELASPTAHIWFLK REMARK 3 ALN D TPL --SLPSRIGLLLDMPLRDIERVLYFESYVVIEGGM------TNLERQQILT------- REMARK 3 ALN D TPL ------EEQYLD--------------------------------------ALEEFGDE REMARK 3 ALN D TPL FDAKMGAEAIQALLKSMDLEQECEQLREELNE----TNSETKRKKLTKRIKLLEAFVQ REMARK 3 ALN D TPL SGNKPEWMILTVLPVLPPDLRPLVPLDGGRFATSDLNDLYRRVINRNNRLKRLLDLAA REMARK 3 ALN D TPL -PDIIVRNEKRMLQEAVDALLDNGRRGRAITGSNKRPLKSLADMIKGKQGRFRQNLLG REMARK 3 ALN D TPL KRVDYSGRSVITVGPYLRLHQCGLPKKMALELFKPFIYGKLELR---------GLATT REMARK 3 ALN D TPL IKAAKKMVEREEAVVWDILDEVIREHPVLLNRAPTLHRLGIQAFEPVLIEGKAIQLHP REMARK 3 ALN D TPL LVCAAYNADFDGDQMAVHVPLTLEAQLEARALMMSTNNILSPANGEPIIVPSQDVVLG REMARK 3 ALN D TPL LYYMTRDCVNAKGEGMVLTGPKEAERLYRSGLASLHARVKVRITEYEKDANGELVAKT REMARK 3 ALN D TPL SLKDTTVGRAILWMIVPKGLPYSI-----------------------------VNQAL REMARK 3 ALN D TPL GKKAISKMLNTCYRILGLKPTVIFADQIMYTGFAYAARSGASVGIDDMVIPEKKHEII REMARK 3 ALN D TPL SEAEAEVAEIQEQFQSGLVTAGERYNKVIDIWAAANDRVSKAMMDNLQTETVINRDGQ REMARK 3 ALN D TPL EEKQVSFNSIYMMADSGARGSAAQIRQLAGMRGLMAKPDG-----------SIIETPI REMARK 3 ALN D TPL TANFREGLNVLQYFISTHGARKGLADTALKTANSGYLTRRLVDVAQDLVVTEDDCGTH REMARK 3 ALN D TPL EGIMMTPVIEGG--DVKEPLRDRVLGRVTAEDVLKPGTADILVPRNTLLHEQWCDLLE REMARK 3 ALN D TPL ENSVDAVKVRSVVSCDTDFGVCAHCYGRDLARGHIINKGEAIGVIAAQSIGEPGTQLT REMARK 3 ALN D TPL MRTFHIGGAASRAAAESSIQVKNKGSIKLSNVKSVVNSSGKLVITSRNTELKLIDEFG REMARK 3 ALN D TPL RTKESYKVPYGAVLAKGDGEQVAGGETVANWDPHTMPVITEVSGFVRFTDMIDGQTIT REMARK 3 ALN D TPL RQTDELTGLSSLVVLDSAERTAGGKDLRPALKIVDAQGNDVLIPGTDMPAQYFLPGKA REMARK 3 ALN D TPL IVQLEDGVQISSGDTLARIPQESGGTKDITGGLPRVADLFEARRPKE--PAILAEISG REMARK 3 ALN D TPL IVS-FGKETKGKRRLVITPVDGSDPYEEMIPKWRQLNVFEGERVERGDVISDGPEAPH REMARK 3 ALN D TPL DILRLRGVHAVTRYIVNEVQDVYRLQGVKINDKHIEVIVRQMLRKATIVNAGSSDFLE REMARK 3 ALN D TPL GEQVEYSRVKIANRELEANGKVGATYSRDLLGITKASLATESFISAASFQETTRVLTE REMARK 3 ALN D TPL AAVAGKRDELRGLKENVIVGRLIPAGTGYAYHQDRMR---------- REMARK 3 ALN D OFF 11 python-ihm-2.7/test/input/uniprot_bad_header.fasta000066400000000000000000000013561503573337200224470ustar00rootroot00000000000000>sp|P52891 MELSPTYQTERFTKFSDTLKEFKIEQNNEQNPIDPFNIIREFRSAAGQLALDLANSGDES NVISSKDWELEARFWHLVELLLVFRNADLDLDEMELHPYNSRGLFEKKLMQDNKQLYQIW IVMVWLKENTYVMERPKNVPTSKWLNSITSGGLKSCDLDFPLRENTNVLDVKDKEEDHIF FKYIYELILAGAIDEALEEAKLSDNISICMILCGIQEYLNPVIDTQIANEFNTQQGIKKH SLWRRTVYSLSQQAGLDPYERAIYSYLSGAIPNQEVLQYSDWESDLHIHLNQILQTEIEN YLLENNQVGTDELILPLPSHALTVQEVLNRVASRHPSESEHPIRVLMASVILDSLPSVIH SSVEMLLDVVKGTEASNDIIDKPYLLRIVTHLAICLDIINPGSVEEVDKSKLITTYISLL KLQGLYENIPIYATFLNESDCLEACSFILSSLEDPQVRKKQIETINFLRLPASNILRRTT QRVFDETEQEYSPSNEISISFDVNNIDMHLIYGVEWLIEGKLYVDAVHSIIALSRRFLLN GRVKALEQFMERNNIGEICKNYELEKIADNISKDENEDQFLEEITQYEHLIKGIREYEEW QKSVSLLSSESNIPTLIEKLQGFSKDTFELIKTFLVDLTSSNFADSADYEILYEIRALYT PFLLMELHKKLVEAAKLLKIPKFISEALAFTSLVANENDKIYLLFQSSGKLKEYLDLVAR TATLSN python-ihm-2.7/test/input/uniprot_no_details.fasta000066400000000000000000000013721503573337200225300ustar00rootroot00000000000000>sp|P52891|NUP84_YEAST MELSPTYQTERFTKFSDTLKEFKIEQNNEQNPIDPFNIIREFRSAAGQLALDLANSGDES NVISSKDWELEARFWHLVELLLVFRNADLDLDEMELHPYNSRGLFEKKLMQDNKQLYQIW IVMVWLKENTYVMERPKNVPTSKWLNSITSGGLKSCDLDFPLRENTNVLDVKDKEEDHIF FKYIYELILAGAIDEALEEAKLSDNISICMILCGIQEYLNPVIDTQIANEFNTQQGIKKH SLWRRTVYSLSQQAGLDPYERAIYSYLSGAIPNQEVLQYSDWESDLHIHLNQILQTEIEN YLLENNQVGTDELILPLPSHALTVQEVLNRVASRHPSESEHPIRVLMASVILDSLPSVIH SSVEMLLDVVKGTEASNDIIDKPYLLRIVTHLAICLDIINPGSVEEVDKSKLITTYISLL KLQGLYENIPIYATFLNESDCLEACSFILSSLEDPQVRKKQIETINFLRLPASNILRRTT QRVFDETEQEYSPSNEISISFDVNNIDMHLIYGVEWLIEGKLYVDAVHSIIALSRRFLLN GRVKALEQFMERNNIGEICKNYELEKIADNISKDENEDQFLEEITQYEHLIKGIREYEEW QKSVSLLSSESNIPTLIEKLQGFSKDTFELIKTFLVDLTSSNFADSADYEILYEIRALYT PFLLMELHKKLVEAAKLLKIPKFISEALAFTSLVANENDKIYLLFQSSGKLKEYLDLVAR TATLSN python-ihm-2.7/test/input/unknown_model.cif000066400000000000000000000000301503573337200211370ustar00rootroot00000000000000data_foo _entry.id foo python-ihm-2.7/test/input/unknown_model.pdb000066400000000000000000000000511503573337200211460ustar00rootroot00000000000000REMARK 1 COMMENT IGNORED BY MMCIF CODE python-ihm-2.7/test/mock/000077500000000000000000000000001503573337200153765ustar00rootroot00000000000000python-ihm-2.7/test/mock/non_canon_atom/000077500000000000000000000000001503573337200203665ustar00rootroot00000000000000python-ihm-2.7/test/mock/non_canon_atom/urllib/000077500000000000000000000000001503573337200216575ustar00rootroot00000000000000python-ihm-2.7/test/mock/non_canon_atom/urllib/__init__.py000066400000000000000000000000071503573337200237650ustar00rootroot00000000000000# noop python-ihm-2.7/test/mock/non_canon_atom/urllib/error.py000066400000000000000000000001121503573337200233540ustar00rootroot00000000000000class URLError(Exception): pass class HTTPError(URLError): pass python-ihm-2.7/test/mock/non_canon_atom/urllib/request.py000066400000000000000000000003551503573337200237240ustar00rootroot00000000000000from io import BytesIO import urllib.error zinc_atoms = b'_chem_comp_atom.comp_id ZN\n_chem_comp_atom.atom_id ZN\n' def urlopen(url): if 'invalid' in url: raise urllib.error.HTTPError("404") return BytesIO(zinc_atoms) python-ihm-2.7/test/test_analysis.py000066400000000000000000000046611503573337200177100ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.analysis class Tests(unittest.TestCase): def test_filter_step(self): """Test analysis FilterStep class""" s = ihm.analysis.FilterStep(feature='RMSD', num_models_begin=42, num_models_end=5) self.assertEqual(s.type, 'filter') self.assertEqual(s.feature, 'RMSD') self.assertEqual(s.num_models_begin, 42) self.assertEqual(s.num_models_end, 5) # test with invalid feature self.assertRaises(ValueError, ihm.analysis.FilterStep, feature='invalid', num_models_begin=42, num_models_end=5) def test_cluster_step(self): """Test analysis ClusterStep class""" s = ihm.analysis.ClusterStep(feature='RMSD', num_models_begin=42, num_models_end=5) self.assertEqual(s.type, 'cluster') self.assertEqual(s.feature, 'RMSD') self.assertEqual(s.num_models_begin, 42) self.assertEqual(s.num_models_end, 5) self.assertEqual(s._get_report(), "cluster (42->5 models)") def test_rescore_step(self): """Test analysis RescoreStep class""" s = ihm.analysis.RescoreStep(feature='RMSD', num_models_begin=42, num_models_end=5) self.assertEqual(s.type, 'rescore') self.assertEqual(s.feature, 'RMSD') self.assertEqual(s.num_models_begin, 42) self.assertEqual(s.num_models_end, 5) def test_step(self): """Test analysis Step class""" s = ihm.analysis.Step(feature='RMSD', num_models_begin=42, num_models_end=5) self.assertEqual(s.type, 'other') self.assertEqual(s.feature, 'RMSD') self.assertEqual(s.num_models_begin, 42) self.assertEqual(s.num_models_end, 5) def test_empty_step(self): """Test analysis EmptyStep class""" s = ihm.analysis.EmptyStep() self.assertEqual(s.type, 'none') self.assertEqual(s.feature, 'none') self.assertIsNone(s.num_models_begin) self.assertIsNone(s.num_models_end) def test_analysis(self): """Test Analysis class""" a = ihm.analysis.Analysis() self.assertEqual(a.steps, []) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_citations.py000066400000000000000000000007131503573337200200540ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.citations class Tests(unittest.TestCase): def test_citations(self): """Test citations module""" pmi = ihm.citations.pmi self.assertEqual(pmi.pmid, '31396911') self.assertEqual(pmi.doi, '10.1007/978-1-4939-9608-7_15') if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_cross_linkers.py000066400000000000000000000010031503573337200207300ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.cross_linkers class Tests(unittest.TestCase): def test_cross_linkers(self): """Test cross_linkers module""" d = ihm.cross_linkers.dss self.assertEqual(d.auth_name, 'DSS') self.assertEqual(d.smiles, 'C1CC(=O)N(C1=O)OC(=O)CCCCCCC(=O)ON2C(=O)CCC2=O') if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_dataset.py000066400000000000000000000243431503573337200175110ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.dataset import ihm.location import ihm.geometry def _make_test_file(fname): with open(fname, 'w') as fh: fh.write('contents') class Tests(unittest.TestCase): def test_dataset(self): """Test Dataset base class""" loc = ihm.location.PDBLocation('1abc', version='foo', details='bar') d = ihm.dataset.Dataset(loc) self.assertIsNone(d.details) self.assertEqual(len(d.parents), 0) l2 = ihm.location.PDBLocation('1xyz', version='foo', details='bar') d2 = ihm.dataset.Dataset(l2, details='foo') self.assertEqual(d2.details, 'foo') d.parents.append(d2) self.assertEqual(len(d.parents), 1) self.assertNotEqual(d, d2) l3 = ihm.location.PDBLocation('1cde', version='foo', details='bar') d3 = ihm.dataset.Dataset(l3, details='bar') t = ihm.geometry.Transformation( rot_matrix=[[-0.64, 0.09, 0.77], [0.76, -0.12, 0.64], [0.15, 0.99, 0.01]], tr_vector=[1., 2., 3.]) td = ihm.dataset.TransformedDataset(d3, transform=t) d.parents.append(td) self.assertEqual(len(d.parents), 2) def test_dataset_allow_duplicates(self): """Test Dataset base class with allow_duplicates=True""" loc = ihm.location.PDBLocation('1abc', version='foo', details='bar') d = ihm.dataset.Dataset(loc) d._allow_duplicates = True self.assertEqual(d._eq_vals(), id(d)) d2 = ihm.dataset.Dataset(loc) d2._allow_duplicates = True self.assertNotEqual(d, d2) def test_add_primary_no_parents(self): """Test add_primary() method, no parents""" l1 = ihm.location.PDBLocation('1abc', version='foo', details='bar') d1 = ihm.dataset.Dataset(l1) l2 = ihm.location.PDBLocation('1xyz', version='foo', details='bar') d2 = ihm.dataset.Dataset(l2) d1.add_primary(d2) self.assertEqual(d1.parents, [d2]) def test_add_primary_one_parent(self): """Test add_primary() method, one parent""" l1 = ihm.location.PDBLocation('1abc', version='foo', details='bar') d1 = ihm.dataset.Dataset(l1) l2 = ihm.location.PDBLocation('1xyz', version='foo', details='bar') d2 = ihm.dataset.Dataset(l2) l3 = ihm.location.PDBLocation('2def', version='foo', details='bar') d3 = ihm.dataset.Dataset(l3) d1.parents.append(d2) d1.add_primary(d3) self.assertEqual(d1.parents, [d2]) self.assertEqual(d2.parents, [d3]) def test_add_primary_two_parents(self): """Test add_primary() method, two parents""" l1 = ihm.location.PDBLocation('1abc', version='foo', details='bar') d1 = ihm.dataset.Dataset(l1) l2 = ihm.location.PDBLocation('1xyz', version='foo', details='bar') d2 = ihm.dataset.Dataset(l2) l3 = ihm.location.PDBLocation('2def', version='foo', details='bar') d3 = ihm.dataset.Dataset(l3) _ = ihm.location.PDBLocation('2ghi', version='foo', details='bar') d4 = ihm.dataset.Dataset(l3) d1.parents.extend((d2, d3)) self.assertRaises(ValueError, d1.add_primary, d4) def test_cxms_dataset(self): """Test CXMSDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.CXMSDataset(loc) self.assertEqual(d.data_type, 'Crosslinking-MS data') def test_hdx_dataset(self): """Test HDXDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.HDXDataset(loc) self.assertEqual(d.data_type, 'H/D exchange data') def test_mass_spec_dataset(self): """Test MassSpecDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.MassSpecDataset(loc) self.assertEqual(d.data_type, 'Mass Spectrometry data') def test_em_density_dataset(self): """Test EMDensityDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.EMDensityDataset(loc) self.assertEqual(d.data_type, '3DEM volume') def test_pdb_dataset(self): """Test PDBDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.PDBDataset(loc) self.assertEqual(d.data_type, 'Experimental model') def test_comp_model_dataset(self): """Test ComparativeModelDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.ComparativeModelDataset(loc) self.assertEqual(d.data_type, 'Comparative model') def test_int_model_dataset(self): """Test IntegrativeModelDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.IntegrativeModelDataset(loc) self.assertEqual(d.data_type, 'Integrative model') def test_de_novo_model_dataset(self): """Test DeNovoModelDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.DeNovoModelDataset(loc) self.assertEqual(d.data_type, 'De Novo model') def test_nmr_dataset(self): """Test NMRDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.NMRDataset(loc) self.assertEqual(d.data_type, 'NMR data') def test_mutagenesis_dataset(self): """Test MutagenesisDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.MutagenesisDataset(loc) self.assertEqual(d.data_type, 'Mutagenesis data') def test_em2d_class_dataset(self): """Test EM2DClassDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.EM2DClassDataset(loc) self.assertEqual(d.data_type, '2DEM class average') def test_em_micrographs_dataset(self): """Test EMMicrographsDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.EMMicrographsDataset(loc) self.assertEqual(d.data_type, 'EM raw micrographs') def test_sas_dataset(self): """Test SASDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.SASDataset(loc) self.assertEqual(d.data_type, 'SAS data') def test_fret_dataset(self): """Test FRETDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.FRETDataset(loc) self.assertEqual(d.data_type, 'Single molecule FRET data') def test_ensemble_fret_dataset(self): """Test EnsembleFRETDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.EnsembleFRETDataset(loc) self.assertEqual(d.data_type, 'Ensemble FRET data') def test_y2h_dataset(self): """Test YeastTwoHybridDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.YeastTwoHybridDataset(loc) self.assertEqual(d.data_type, 'Yeast two-hybrid screening data') def test_genetic_dataset(self): """Test GeneticInteractionsDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.GeneticInteractionsDataset(loc) self.assertEqual( d.data_type, 'Quantitative measurements of genetic interactions') def test_epr_dataset(self): """Test EPRDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.EPRDataset(loc) self.assertEqual(d.data_type, 'EPR data') def test_xray_diffraction_dataset(self): """Test XRayDiffractionDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.XRayDiffractionDataset(loc) self.assertEqual(d.data_type, 'X-ray diffraction data') def test_hydroxyl_radical_footprinting_dataset(self): """Test HydroxylRadicalFootprintingDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.HydroxylRadicalFootprintingDataset(loc) self.assertEqual(d.data_type, 'Hydroxyl radical footprinting data') def test_dna_footprinting_dataset(self): """Test DNAFootprintingDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.DNAFootprintingDataset(loc) self.assertEqual(d.data_type, 'DNA footprinting data') def test_predicted_contacts_dataset(self): """Test PredictedContactsDataset""" loc = ihm.location.FileLocation(repo='mydoi', path='a') d = ihm.dataset.PredictedContactsDataset(loc) self.assertEqual(d.data_type, 'Predicted contacts') def test_duplicate_datasets_details(self): """Datasets with differing details should be considered duplicates""" with utils.temporary_directory() as tmpdir: fname = os.path.join(tmpdir, 'test.pdb') _make_test_file(fname) l1 = ihm.location.InputFileLocation(fname, details='test details') _ = ihm.dataset.PDBDataset(l1) l2 = ihm.location.InputFileLocation(fname, details='other details') d2 = ihm.dataset.PDBDataset(l2) self.assertEqual(l1, l2) d3 = ihm.dataset.PDBDataset(l2, details='other dataset details') self.assertEqual(d2, d3) def test_duplicate_locations(self): """Datasets with same location should be considered duplicates""" with utils.temporary_directory() as tmpdir: fname1 = os.path.join(tmpdir, 'test1.pdb') fname2 = os.path.join(tmpdir, 'test2.pdb') _make_test_file(fname1) _make_test_file(fname2) loc1 = ihm.location.InputFileLocation(fname1) loc2 = ihm.location.InputFileLocation(fname2) # Identical datasets in the same location aren't duplicated pdb1 = ihm.dataset.PDBDataset(loc1) pdb2 = ihm.dataset.PDBDataset(loc1) self.assertEqual(pdb1, pdb2) # Datasets in different locations are OK pdb1 = ihm.dataset.PDBDataset(loc1) pdb2 = ihm.dataset.PDBDataset(loc2) self.assertNotEqual(pdb1, pdb2) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_dictionary.py000066400000000000000000000422531503573337200202310ustar00rootroot00000000000000import utils import os import unittest import sys from io import StringIO, BytesIO from test_format_bcif import MockMsgPack, MockFh, _add_msgpack TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.dictionary try: from ihm import _format except ImportError: _format = None def add_keyword(name, mandatory, category): k = ihm.dictionary.Keyword() k.name, k.mandatory = name, mandatory category.keywords[k.name] = k return k def make_test_dictionary(): d = ihm.dictionary.Dictionary() c = ihm.dictionary.Category() c.name = 'test_mandatory_category' c.mandatory = True add_keyword("foo", False, c) k = add_keyword("bar", True, c) k.item_type = ihm.dictionary.ItemType('int', 'numb', '[+-]?[0-9]+') d.categories[c.name] = c c = ihm.dictionary.Category() c.name = 'test_optional_category' c.mandatory = False k = add_keyword("foo", False, c) # For testing we only accept upper case values k.item_type = ihm.dictionary.ItemType('text', 'char', r'[ \n\t_()A-Z]+') k = add_keyword("bar", True, c) k.enumeration = set(('enum1', 'enum2')) add_keyword("baz", False, c) d.categories[c.name] = c d.linked_items = {'_test_optional_category.baz': '_test_mandatory_category.foo', '_test_optional_category.foo': '_entity.id'} return d def make_other_test_dictionary(): d = ihm.dictionary.Dictionary() c = ihm.dictionary.Category() c.name = 'ext_category' c.mandatory = False add_keyword("foo", False, c) d.categories[c.name] = c d.linked_items = {'_ext_category.foo': '_test_mandatory_category.foo'} return d class Tests(unittest.TestCase): def test_keyword_enum_case_insen(self): """Test KeywordEnumeration (case insensitive)""" x = ihm.dictionary._KeywordEnumeration() x.case_sensitive = False self.assertNotIn('foo', x) x.add('foo') self.assertNotIn('bar', x) self.assertIn('foo', x) self.assertIn('FOO', x) x.add('bar') self.assertIn('BAR', x) def test_keyword_enum_case_sen(self): """Test KeywordEnumeration (case sensitive)""" x = ihm.dictionary._KeywordEnumeration() self.assertNotIn('foo', x) x.add('foo') self.assertNotIn('bar', x) self.assertIn('foo', x) self.assertNotIn('FOO', x) x.add('bar') self.assertNotIn('BAR', x) def test_read(self): """Test read() function""" # Note that _item.category_id is intentionally missing from # save_unknown_code cif = r""" loop_ _item_type_list.code _item_type_list.primitive_code _item_type_list.construct code char '[][_,.;:"&<>()/\{}'`~!@#$%A-Za-z0-9*|+-]*' ucode uchar '[][_,.;:"&<>()/\{}'`~!@#$%A-Za-z0-9*|+-]*' save_foo _category.id test_category1 _category.mandatory_code yes save_ save_bar loop_ _item.name _item.category_id _item.mandatory_code '_test_category1.bar' test_category1 no '_test_category3.bar' test_category3 yes '_test_category4.foo' test_category4 yes _item_type.code code save_ save_unknown_code _item.name '_test_category1.unknown_code' _item.mandatory_code no _item_type.code atcode save_ save_missing_code _item.name '_test_category1.missing_code' _item.category_id test_category1 _item.mandatory_code no save_ save_insensitive_code _item.name '_test_category1.insensitive_code' _item.category_id test_category1 _item.mandatory_code no _item_type.code ucode save_ save_baz _item.name '_test_category2.baz' _item.category_id test_category2 _item.mandatory_code no _item_type.code ucode _item_linked.child_name '_test_category2.baz' _item_linked.parent_name '_test_category1.bar' loop_ _item_enumeration.value "enum 1" "enum 2" save_ save_foo _category.id test_category4 _category.description 'my desc4' _category.mandatory_code yes save_ save_bar _item.name '_test_category4.bar' _item.mandatory_code no _item_type.code ucode save_ """ d = ihm.dictionary.read(StringIO(cif)) self.assertEqual(sorted(d.categories.keys()), ['test_category1', 'test_category2', 'test_category3', 'test_category4']) c1 = d.categories['test_category1'] self.assertTrue(c1.mandatory) self.assertEqual( sorted(c1.keywords.keys()), ['bar', 'insensitive_code', 'missing_code', 'unknown_code']) self.assertFalse(c1.keywords['bar'].mandatory) self.assertIsNone(c1.keywords['bar'].enumeration) self.assertEqual(c1.keywords['bar'].item_type.name, "code") self.assertTrue(c1.keywords['bar'].item_type.case_sensitive) self.assertIsNone(c1.keywords['missing_code'].item_type) self.assertIsNone(c1.keywords['unknown_code'].item_type) self.assertFalse( c1.keywords['insensitive_code'].item_type.case_sensitive) c2 = d.categories['test_category2'] self.assertIsNone(c2.mandatory) self.assertEqual(sorted(c2.keywords.keys()), ["baz"]) self.assertFalse(c2.keywords['baz'].mandatory) self.assertEqual(c2.keywords['baz'].enumeration, set(('enum 1', 'enum 2'))) self.assertFalse(c2.keywords['baz'].enumeration.case_sensitive) self.assertFalse(c2.keywords['baz'].item_type.case_sensitive) c3 = d.categories['test_category3'] self.assertIsNone(c3.mandatory) self.assertEqual(sorted(c3.keywords.keys()), ["bar"]) self.assertTrue(c3.keywords['bar'].mandatory) # Test category that is defined after some keywords c4 = d.categories['test_category4'] self.assertEqual(c4.description, 'my desc4') self.assertTrue(c4.mandatory) self.assertEqual(sorted(c4.keywords.keys()), ["bar", "foo"]) self.assertEqual(d.linked_items, {'_test_category2.baz': '_test_category1.bar'}) # Make sure that files can be read in binary mode d = ihm.dictionary.read(BytesIO(cif.encode('latin-1'))) self.assertEqual(sorted(d.categories.keys()), ['test_category1', 'test_category2', 'test_category3', 'test_category4']) def test_add(self): """Test adding two Dictionaries""" d1 = make_test_dictionary() d2 = make_other_test_dictionary() d = d1 + d2 self._check_test_dictionary(d1) self._check_other_test_dictionary(d2) self._check_summed_dictionary(d) def test_add_update(self): """Test add Dictionaries that both contain same Category""" d1 = make_test_dictionary() d2 = ihm.dictionary.Dictionary() c = ihm.dictionary.Category() c.name = 'test_mandatory_category' c.mandatory = True add_keyword("baz", False, c) d2.categories[c.name] = c d = d1 + d2 self.assertEqual(sorted(d.categories.keys()), ['test_mandatory_category', 'test_optional_category']) ks = sorted(d.categories['test_mandatory_category'].keywords.keys()) # Category should now contain keywords from both dictionaries self.assertEqual(ks, ['bar', 'baz', 'foo']) def test_category_update(self): """Test Category._update()""" cman = ihm.dictionary.Category() cman.name = 'test_mandatory_category' cman.description = 'my description' cman.mandatory = True add_keyword("foo", False, cman) coth = ihm.dictionary.Category() coth.name = 'test_mandatory_category' coth.description = 'desc2' coth.mandatory = False add_keyword("bar", False, coth) cman._update(coth) self.assertIs(cman.mandatory, True) self.assertEqual(cman.description, 'my description') self.assertEqual(sorted(cman.keywords.keys()), ['bar', 'foo']) cnone = ihm.dictionary.Category() cnone.name = 'test_mandatory_category' cnone._update(coth) self.assertIs(cnone.mandatory, False) self.assertEqual(cnone.description, 'desc2') self.assertEqual(sorted(cnone.keywords.keys()), ['bar']) def test_add_inplace(self): """Test adding two Dictionaries in place""" d1 = make_test_dictionary() d2 = make_other_test_dictionary() d1 += d2 self._check_other_test_dictionary(d2) self._check_summed_dictionary(d1) def _check_test_dictionary(self, d): self.assertEqual(sorted(d.categories.keys()), ['test_mandatory_category', 'test_optional_category']) self.assertEqual(d.linked_items, {'_test_optional_category.baz': '_test_mandatory_category.foo', '_test_optional_category.foo': '_entity.id'}) def _check_other_test_dictionary(self, d): self.assertEqual(sorted(d.categories.keys()), ['ext_category']) self.assertEqual(d.linked_items, {'_ext_category.foo': '_test_mandatory_category.foo'}) def _check_summed_dictionary(self, d): self.assertEqual(sorted(d.categories.keys()), ['ext_category', 'test_mandatory_category', 'test_optional_category']) self.assertEqual(d.linked_items, {'_test_optional_category.baz': '_test_mandatory_category.foo', '_test_optional_category.foo': '_entity.id', '_ext_category.foo': '_test_mandatory_category.foo'}) def test_validate_ok(self): """Test successful validation""" d = make_test_dictionary() d.validate(StringIO("_test_mandatory_category.bar 1")) def test_validate_ok_binary_cif(self): """Test successful validation of BinaryCIF input""" sys.modules['msgpack'] = MockMsgPack d = make_test_dictionary() fh = MockFh() writer = ihm.format_bcif.BinaryCifWriter(fh) writer.start_block('ihm') with writer.category('_test_mandatory_category') as loc: loc.write(bar=1) with writer.category('_test_optional_category') as loc: loc.write(bar='enum1') writer.flush() if _format: # Convert Python object into msgpack format for the C parser bio = BytesIO() _add_msgpack(fh.data, bio) bio.seek(0) fh.data = bio d.validate(fh.data, format='BCIF') def test_validate_multi_data_ok(self): """Test successful validation of multiple data blocks""" d = make_test_dictionary() d.validate(StringIO(""" data_block1 _test_mandatory_category.bar 1 data_block2 _test_mandatory_category.bar 2 """)) def test_validate_missing_mandatory_category(self): """Test validation failure with missing mandatory category""" d = make_test_dictionary() self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO("_struct.entry_id id1")) def test_validate_missing_mandatory_keyword(self): """Test validation failure with missing mandatory keyword""" d = make_test_dictionary() # mandatory 'bar' is marked unknown self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO("_test_mandatory_category.bar ?")) # mandatory 'bar' is missing entirely self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO("_test_mandatory_category.foo xy")) def test_validate_enumeration(self): """Test validation of enumerated values""" prefix = """_test_mandatory_category.bar 1 _test_optional_category.bar """ d = make_test_dictionary() # Value in the enumeration is OK d.validate(StringIO(prefix + 'enum1')) # Omitted value is OK d.validate(StringIO(prefix + '.')) # Value not in the enumeration is not OK self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + 'bad')) def test_validate_item_type_int(self): """Test validation of int item type""" prefix = "_test_mandatory_category.bar " d = make_test_dictionary() # Int value is OK d.validate(StringIO(prefix + '+45')) d.validate(StringIO(prefix + '-4')) d.validate(StringIO(prefix + '5')) # Omitted value is OK d.validate(StringIO(prefix + '.')) # Non-int value is not OK self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + '45A')) self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + 'foo')) self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + '++44')) self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + '44+')) def test_validate_item_type_multiline(self): """Test validation of multiline item type""" # This regex '[ \n\t_()A-Z]+' includes \n and \t special characters, # which should match newline and tab, not literal \n and \t prefix = """_test_mandatory_category.bar 1 _test_optional_category.bar enum1 _test_optional_category.foo """ d = make_test_dictionary() # OK strings d.validate(StringIO(prefix + '"FOO BAR"')) d.validate(StringIO(prefix + '"FOO_BAR"')) d.validate(StringIO(prefix + '"FOO\tBAR"')) d.validate(StringIO(prefix + '\n;FOO\nBAR\n;')) # Bad strings self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + '"foo BAR"')) self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + '"FOO\\BAR"')) self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + 'n')) self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + 't')) def test_item_type_bad_regex(self): """Make sure that ItemType handles invalid regex""" # "+" is not a valid Python regex; it should be skipped and will # match any value it = ihm.dictionary.ItemType("test", "text", "+") self.assertTrue(it.regex.match("something")) self.assertTrue(it.regex.match(None)) def test_validate_linked_items(self): """Test validation of linked items""" prefix = "_test_mandatory_category.bar 1\n" d = make_test_dictionary() c = ihm.dictionary.Category() c.name = 'chem_comp_atom' add_keyword("foo", False, c) d.categories[c.name] = c d.linked_items['_test_optional_category.bar'] \ = '_chem_comp_atom.atom_id' # OK: same key in child and parent d.validate(StringIO(prefix + "_test_optional_category.bar .\n" "_test_optional_category.baz 42\n" "_test_mandatory_category.foo 42")) # OK: missing parent key but in category not in the dictionary d.validate(StringIO(prefix + "_test_optional_category.bar .\n" "_test_optional_category.foo AB")) # OK: missing parent key but chem_comp_* is explicitly excluded # from validation d.validate(StringIO(prefix + "_test_optional_category.bar enum1")) # Not OK: parent is missing or does not include the child key self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + "_test_optional_category.bar .\n" "_test_optional_category.baz 42\n" "_test_mandatory_category.foo 24")) self.assertRaises(ihm.dictionary.ValidatorError, d.validate, StringIO(prefix + "_test_optional_category.bar .\n" "_test_optional_category.baz 42")) def test_unknown_category(self): """Test validator failure for unknown categories""" d = make_test_dictionary() self.assertRaises( ihm.dictionary.ValidatorError, d.validate, StringIO("_test_mandatory_category.bar 1\n_foo.bar baz")) def test_unknown_keyword(self): """Test validator failure for unknown keywords""" d = make_test_dictionary() self.assertRaises( ihm.dictionary.ValidatorError, d.validate, StringIO("_test_mandatory_category.bar 1\n" "_test_mandatory_category.unk 42")) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_dumper.py000066400000000000000000006673301503573337200173710ustar00rootroot00000000000000import utils import datetime import os import unittest import warnings import sys from io import StringIO TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.dumper import ihm.format import ihm.location import ihm.representation import ihm.startmodel import ihm.dataset import ihm.protocol import ihm.analysis import ihm.model import ihm.reference import ihm.restraint import ihm.geometry import ihm.source import ihm.flr import ihm.multi_state_scheme from test_format_bcif import MockFh, MockMsgPack def _get_dumper_output(dumper, system, check=True): dumper._check = check fh = StringIO() writer = ihm.format.CifWriter(fh) dumper.dump(system, writer) return fh.getvalue() def _get_dumper_bcif_output(dumper, system): fh = MockFh() writer = ihm.format_bcif.BinaryCifWriter(fh) sys.modules['msgpack'] = MockMsgPack dumper.dump(system, writer) writer.flush() return fh.data class Tests(unittest.TestCase): def test_write(self): """Test write() function""" sys1 = ihm.System(id='system1') sys2 = ihm.System(id='system 2+3') fh = StringIO() ihm.dumper.write(fh, [sys1, sys2]) lines = fh.getvalue().split('\n') self.assertEqual(lines[:2], ["data_system1", "_entry.id system1"]) self.assertEqual(lines[16:18], ["data_system23", "_entry.id 'system 2+3'"]) def test_write_checks(self): """Test write() function with checks enabled or disabled""" s = ihm.System(id='system1') s.entities.append(ihm.Entity('AHC')) # Duplicate entity s.entities.append(ihm.Entity('AHC')) fh = StringIO() # Duplicate entity should cause a check failure by default self.assertRaises(ValueError, ihm.dumper.write, fh, [s]) # But OK if checks are disabled fh = StringIO() ihm.dumper.write(fh, [s], check=False) self.assertEqual(s.entities[0]._id, 1) self.assertEqual(s.entities[1]._id, 2) def test_write_custom_dumper(self): """Test write() function with custom dumper""" class MyDumper(ihm.dumper.Dumper): def dump(self, system, writer): with writer.category("_custom_category") as loc: loc.write(myfield="foo", field2="bar") sys1 = ihm.System(id='system1') fh = StringIO() ihm.dumper.write(fh, [sys1], dumpers=[MyDumper]) lines = fh.getvalue().split('\n') self.assertEqual(sorted(lines[-3:-1]), ['_custom_category.field2 bar', '_custom_category.myfield foo']) def test_dumper(self): """Test Dumper base class""" dumper = ihm.dumper.Dumper() dumper.finalize(None) dumper.dump(None, None) def test_prettyprint_seq(self): """Test _prettyprint_seq() function""" seq = ['x' * 30, 'y' * 20, 'z' * 10] # No line breaks self.assertEqual(list(ihm.dumper._prettyprint_seq(seq, 100)), ['x' * 30 + 'y' * 20 + 'z' * 10]) # Break inserted between sequence items self.assertEqual(list(ihm.dumper._prettyprint_seq(seq, 55)), ['x' * 30 + 'y' * 20, 'z' * 10]) # Items longer than width will exceed line length self.assertEqual(list(ihm.dumper._prettyprint_seq(seq, 25)), ['x' * 30, 'y' * 20, 'z' * 10]) # Empty sequence self.assertEqual(list(ihm.dumper._prettyprint_seq([], 25)), []) def test_entry_dumper(self): """Test EntryDumper""" system = ihm.System(id='test_model') dumper = ihm.dumper._EntryDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, "data_test_model\n_entry.id test_model\n") def test_entry_dumper_data_chars(self): """Test allowed characters in data_ block with EntryDumper""" system = ihm.System(id='foo99-bar94_ABC $#^% x') dumper = ihm.dumper._EntryDumper() out = _get_dumper_output(dumper, system).split('\n')[0] # Whitespace and special characters (except - _) should be removed self.assertEqual(out, "data_foo99-bar94_ABCx") def test_audit_conform_dumper(self): """Test AuditConformDumper""" system = ihm.System() dumper = ihm.dumper._AuditConformDumper() out = _get_dumper_output(dumper, system) lines = sorted(out.split('\n')) self.assertEqual(lines[1].split()[0], "_audit_conform.dict_location") self.assertEqual(lines[2].rstrip('\r\n'), "_audit_conform.dict_name mmcif_ihm.dic") self.assertEqual(lines[3].split()[0], "_audit_conform.dict_version") def test_struct_dumper(self): """Test StructDumper""" system = ihm.System(title='test model', model_details="test details") dumper = ihm.dumper._StructDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """_struct.entry_id model _struct.pdbx_model_details 'test details' _struct.pdbx_structure_determination_methodology integrative _struct.title 'test model' """) def test_comment_dumper(self): """Test CommentDumper""" system = ihm.System() system.comments.extend(("Comment 1", "Comment 2")) dumper = ihm.dumper._CommentDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# Comment 1 # Comment 2 """) # Comments should be ignored in BinaryCIF output out = _get_dumper_bcif_output(dumper, system) self.assertEqual(out['dataBlocks'], []) def test_software(self): """Test SoftwareDumper""" system = ihm.System() c1 = ihm.Citation( pmid='25161197', title='foo', journal="Mol Cell Proteomics", volume=13, page_range=(2927, 2943), year=2014, authors=['auth1', 'auth2', 'auth3'], doi='doi1') system.software.append(ihm.Software( name='test', classification='test code', description='Some test program', version=1, location='http://test.org')) system.software.append(ihm.Software( name='foo', classification='test code', description='Other test program', location='http://test2.org', citation=c1)) # Duplicate should be removed system.software.append(ihm.Software( name='foo', classification='x', description='y', location='z')) dumper = ihm.dumper._CitationDumper() dumper.finalize(system) dumper = ihm.dumper._SoftwareDumper() dumper.finalize(system) self.assertEqual(len(dumper._software_by_id), 2) # Repeated calls to finalize() should yield identical results dumper.finalize(system) self.assertEqual(len(dumper._software_by_id), 2) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _software.pdbx_ordinal _software.name _software.classification _software.description _software.version _software.type _software.location _software.citation_id 1 test 'test code' 'Some test program' 1 program http://test.org . 2 foo 'test code' 'Other test program' . program http://test2.org 1 # """) def test_citation(self): """Test CitationDumper""" system = ihm.System() c1 = ihm.Citation( pmid='25161197', title="Structural characterization by cross-linking reveals the\n" "detailed architecture of a coatomer-related heptameric\n" "module from the nuclear pore complex.", journal="Mol Cell Proteomics", volume=13, page_range=(2927, 2943), year=2014, authors=['Shi Y', 'Fernandez-Martinez J', 'Tjioe E', 'Pellarin R', 'Kim SJ', 'Williams R', 'Schneidman-Duhovny D', 'Sali A', 'Rout MP', 'Chait BT'], doi='10.1074/mcp.M114.041673') system.citations.extend((c1, c1)) # duplicates should be removed dumper = ihm.dumper._CitationDumper() dumper.finalize(system) # Assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _citation.id _citation.title _citation.journal_abbrev _citation.journal_volume _citation.page_first _citation.page_last _citation.year _citation.pdbx_database_id_PubMed _citation.pdbx_database_id_DOI 1 ;Structural characterization by cross-linking reveals the detailed architecture of a coatomer-related heptameric module from the nuclear pore complex. ; 'Mol Cell Proteomics' 13 2927 2943 2014 25161197 10.1074/mcp.M114.041673 # # loop_ _citation_author.citation_id _citation_author.name _citation_author.ordinal 1 'Shi Y' 1 1 'Fernandez-Martinez J' 2 1 'Tjioe E' 3 1 'Pellarin R' 4 1 'Kim SJ' 5 1 'Williams R' 6 1 'Schneidman-Duhovny D' 7 1 'Sali A' 8 1 'Rout MP' 9 1 'Chait BT' 10 # """) # Handle no last page c1.page_range = 'e1637' dumper = ihm.dumper._CitationDumper() dumper.finalize(system) # Assign IDs out = _get_dumper_output(dumper, system) self.assertIn("'Mol Cell Proteomics' 13 e1637 . 2014 ", out) def test_citation_primary(self): """Test CitationDumper with a primary citation""" system = ihm.System() c1 = ihm.Citation(pmid='x', title='y', journal='z', year=2014, authors=[], volume=1, page_range=1, doi='d') c2 = ihm.Citation(pmid='x2', title='y2', journal='z2', year=2015, authors=[], volume=1, page_range=1, doi='e', is_primary=True) system.citations.extend((c1, c2)) dumper = ihm.dumper._CitationDumper() dumper.finalize(system) # Assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _citation.id _citation.title _citation.journal_abbrev _citation.journal_volume _citation.page_first _citation.page_last _citation.year _citation.pdbx_database_id_PubMed _citation.pdbx_database_id_DOI primary y2 z2 1 1 . 2015 x2 e 2 y z 1 1 . 2014 x d # """) def test_citation_multiple_primary(self): """Test CitationDumper with multiple primary citations""" system = ihm.System() c1 = ihm.Citation(pmid='x', title='y', journal='z', year=2014, authors=[], volume=1, page_range=1, doi='d', is_primary=True) c2 = ihm.Citation(pmid='x2', title='y2', journal='z2', year=2015, authors=[], volume=1, page_range=1, doi='e', is_primary=True) system.citations.extend((c1, c2)) dumper = ihm.dumper._CitationDumper() self.assertRaises(ValueError, dumper.finalize, system) def test_audit_author_empty(self): """Test AuditAuthorDumper with empty list""" system = ihm.System() c1 = ihm.Citation( pmid='25161197', title='foo', journal="Mol Cell Proteomics", volume=13, page_range=(2927, 2943), year=2014, authors=['auth1', 'auth2', 'auth3'], doi='doi1') c2 = ihm.Citation( pmid='45161197', title='bar', journal="Mol Cell Proteomics", volume=13, page_range=(2927, 2943), year=2014, authors=['auth2', 'auth4'], doi='doi2') system.citations.extend((c1, c2)) # Citations indirectly referenced by software should *not* be used c3 = ihm.Citation( pmid='455', title='baz', journal="Mol Cell Proteomics", volume=13, page_range=(2927, 2943), year=2014, authors=['auth5', 'auth6', 'auth7'], doi='doi3') software = ihm.Software(name='test', classification='test code', description='Some test program', version=1, location='http://test.org', citation=c3) system.software.append(software) dumper = ihm.dumper._AuditAuthorDumper() out = _get_dumper_output(dumper, system) # auth2 is repeated in the input; we should see it only once in the # output self.assertEqual(out, """# loop_ _audit_author.name _audit_author.pdbx_ordinal auth1 1 auth2 2 auth3 3 auth4 4 # """) def test_omitted_unknown(self): """Test that Dumpers handle omitted/unknown values correctly""" system = ihm.System() system.authors.extend((None, ihm.unknown, '.', '?')) dumper = ihm.dumper._AuditAuthorDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _audit_author.name _audit_author.pdbx_ordinal . 1 ? 2 '.' 3 '?' 4 # """) def test_audit_author(self): """Test AuditAuthorDumper""" system = ihm.System() system.authors.extend(('auth1', 'auth2', 'auth3')) dumper = ihm.dumper._AuditAuthorDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _audit_author.name _audit_author.pdbx_ordinal auth1 1 auth2 2 auth3 3 # """) def test_audit_revision_dumper(self): """Test AuditRevisionDumper""" system = ihm.System() rev = ihm.Revision(data_content_type='Structure model', minor=2, major=0, date=datetime.date(year=1979, month=5, day=3)) rev.groups.extend(('group1', 'group2')) rev.categories.extend(('cat1', 'cat2')) rev.items.append('item1') d = ihm.RevisionDetails(provider="repository", type="Initial release", description="Test desc") rev.details.append(d) system.revisions.append(rev) rev = ihm.Revision(data_content_type=None, major=None, minor=None, date=None) system.revisions.append(rev) rev = ihm.Revision(data_content_type=None, major=None, minor=None, date=ihm.unknown) system.revisions.append(rev) dumper = ihm.dumper._AuditRevisionDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_audit_revision_history.ordinal _pdbx_audit_revision_history.data_content_type _pdbx_audit_revision_history.major_revision _pdbx_audit_revision_history.minor_revision _pdbx_audit_revision_history.revision_date 1 'Structure model' 0 2 1979-05-03 2 . . . . 3 . . . ? # # loop_ _pdbx_audit_revision_details.ordinal _pdbx_audit_revision_details.revision_ordinal _pdbx_audit_revision_details.data_content_type _pdbx_audit_revision_details.provider _pdbx_audit_revision_details.type _pdbx_audit_revision_details.description 1 1 'Structure model' repository 'Initial release' 'Test desc' # # loop_ _pdbx_audit_revision_group.ordinal _pdbx_audit_revision_group.revision_ordinal _pdbx_audit_revision_group.data_content_type _pdbx_audit_revision_group.group 1 1 'Structure model' group1 2 1 'Structure model' group2 # # loop_ _pdbx_audit_revision_category.ordinal _pdbx_audit_revision_category.revision_ordinal _pdbx_audit_revision_category.data_content_type _pdbx_audit_revision_category.category 1 1 'Structure model' cat1 2 1 'Structure model' cat2 # # loop_ _pdbx_audit_revision_item.ordinal _pdbx_audit_revision_item.revision_ordinal _pdbx_audit_revision_item.data_content_type _pdbx_audit_revision_item.item 1 1 'Structure model' item1 # """) def test_data_usage_dumper(self): """Test DataUsageDumper""" system = ihm.System() system.data_usage.append( ihm.License("some license", url="someurl", name="somename")) system.data_usage.append(ihm.Disclaimer("some disclaimer")) system.data_usage.append(ihm.DataUsage("misc usage")) dumper = ihm.dumper._DataUsageDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_data_usage.id _pdbx_data_usage.type _pdbx_data_usage.details _pdbx_data_usage.url _pdbx_data_usage.name 1 license 'some license' someurl somename 2 disclaimer 'some disclaimer' . . 3 other 'misc usage' . . # """) def test_grant(self): """Test GrantDumper""" system = ihm.System() g1 = ihm.Grant(funding_organization="NIH", country="United States", grant_number="foo") g2 = ihm.Grant(funding_organization="NSF", country="United States", grant_number="bar") system.grants.extend((g1, g2)) dumper = ihm.dumper._GrantDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_audit_support.funding_organization _pdbx_audit_support.country _pdbx_audit_support.grant_number _pdbx_audit_support.ordinal NIH 'United States' foo 1 NSF 'United States' bar 2 # """) def test_entity_dumper(self): """Test EntityDumper""" system = ihm.System() e1 = ihm.Entity('AHC', description='foo', source=ihm.source.Manipulated()) e2 = ihm.Entity('AHCD', description='baz', source=ihm.source.Natural()) e3 = ihm.Entity('AHD', description='bar', source=ihm.source.Synthetic()) water = ihm.Entity([ihm.WaterChemComp()]) system.entities.extend((e1, e2, e3, water)) system.asym_units.append(ihm.AsymUnit(e1, 'foo')) system.asym_units.append(ihm.AsymUnit(e2, 'bar')) system.asym_units.append(ihm.AsymUnit(e3, 'baz1')) system.asym_units.append(ihm.AsymUnit(e3, 'baz2')) system.asym_units.append(ihm.WaterAsymUnit(water, number=10)) dumper = ihm.dumper._EntityDumper() dumper.finalize(system) # Assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _entity.id _entity.type _entity.src_method _entity.pdbx_description _entity.formula_weight _entity.pdbx_number_of_molecules _entity.details 1 polymer man foo 366.413 1 . 2 polymer nat baz 499.516 1 . 3 polymer syn bar 378.362 2 . 4 water nat . 18.015 10 . # """) def test_entity_duplicates(self): """Test EntityDumper with duplicate non-branched entities""" system = ihm.System() system.entities.append(ihm.Entity('AHC')) system.entities.append(ihm.Entity('AHC')) dumper = ihm.dumper._EntityDumper() self.assertRaises(ValueError, dumper.finalize, system) # Also test with checks skipped dumper._check = False dumper.finalize(system) self.assertEqual(system.entities[0]._id, 1) self.assertEqual(system.entities[1]._id, 2) def test_entity_duplicate_branched(self): """Test EntityDumper with duplicate branched entities""" system = ihm.System() sacc1 = ihm.SaccharideChemComp('NAG') sacc2 = ihm.SaccharideChemComp('FUC') system.entities.append(ihm.Entity([sacc1, sacc2])) system.entities.append(ihm.Entity([sacc1, sacc2])) dumper = ihm.dumper._EntityDumper() dumper.finalize(system) # Assign IDs out = _get_dumper_output(dumper, system) # Duplicate "sequences" are OK for branched entities self.assertEqual(out, """# loop_ _entity.id _entity.type _entity.src_method _entity.pdbx_description _entity.formula_weight _entity.pdbx_number_of_molecules _entity.details 1 branched man . . 0 . 2 branched man . . 0 . # """) def test_entity_empty(self): """Test EntityDumper with empty entity""" system = ihm.System() system.entities.append(ihm.Entity('')) dumper = ihm.dumper._EntityDumper() with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") dumper.finalize(system) # Assign IDs _ = _get_dumper_output(dumper, system) self.assertEqual(len(w), 1) self.assertIn('At least one empty Entity', str(w[0].message)) def test_entity_duplicate_empty(self): """Test EntityDumper with duplicate empty entities""" system = ihm.System() system.entities.append(ihm.Entity('')) system.entities.append(ihm.Entity('')) dumper = ihm.dumper._EntityDumper() with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") dumper.finalize(system) # Assign IDs _ = _get_dumper_output(dumper, system) self.assertIn('At least one empty Entity', str(w[0].message)) def test_entity_src_nat_dumper(self): """Test EntitySrcNatDumper""" system = ihm.System() system.entities.append(ihm.Entity('AHC', description='foo', source=ihm.source.Manipulated())) s = ihm.source.Natural(ncbi_taxonomy_id='1234', scientific_name='Test latin name', common_name='Test common name', strain='test strain') system.entities.append(ihm.Entity('AHCD', description='baz', source=s)) ihm.dumper._EntityDumper().finalize(system) dumper = ihm.dumper._EntitySrcNatDumper() dumper.finalize(system) # Assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _entity_src_nat.entity_id _entity_src_nat.pdbx_src_id _entity_src_nat.pdbx_ncbi_taxonomy_id _entity_src_nat.pdbx_organism_scientific _entity_src_nat.common_name _entity_src_nat.strain 2 1 1234 'Test latin name' 'Test common name' 'test strain' # """) def test_entity_src_syn_dumper(self): """Test EntitySrcSynDumper""" system = ihm.System() system.entities.append(ihm.Entity('AHC', description='foo', source=ihm.source.Manipulated())) s = ihm.source.Synthetic(ncbi_taxonomy_id='1234', scientific_name='Test latin name', common_name='Test common name', strain='test strain') system.entities.append(ihm.Entity('AHCD', description='baz', source=s)) ihm.dumper._EntityDumper().finalize(system) dumper = ihm.dumper._EntitySrcSynDumper() dumper.finalize(system) # Assign IDs out = _get_dumper_output(dumper, system) # _pdbx_entity_src_syn.strain is not used in current PDB entries self.assertEqual(out, """# loop_ _pdbx_entity_src_syn.entity_id _pdbx_entity_src_syn.pdbx_src_id _pdbx_entity_src_syn.ncbi_taxonomy_id _pdbx_entity_src_syn.organism_scientific _pdbx_entity_src_syn.organism_common_name 2 1 1234 'Test latin name' 'Test common name' # """) def test_entity_src_gen_dumper(self): """Test EntitySrcGenDumper""" system = ihm.System() system.entities.append(ihm.Entity('AHC', description='foo', source=ihm.source.Natural())) system.entities.append( ihm.Entity('AHCG', description='bar', source=ihm.source.Manipulated(gene=None, host=None))) gene = ihm.source.Details(ncbi_taxonomy_id='1234', scientific_name='Test latin name', common_name='Test common name', strain='test strain') host = ihm.source.Details(ncbi_taxonomy_id='5678', scientific_name='Other latin name', common_name='Other common name', strain='other strain') s = ihm.source.Manipulated(gene=gene, host=host) system.entities.append(ihm.Entity('AHCD', description='baz', source=s)) ihm.dumper._EntityDumper().finalize(system) dumper = ihm.dumper._EntitySrcGenDumper() dumper.finalize(system) # Assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _entity_src_gen.entity_id _entity_src_gen.pdbx_src_id _entity_src_gen.pdbx_gene_src_ncbi_taxonomy_id _entity_src_gen.pdbx_gene_src_scientific_name _entity_src_gen.gene_src_common_name _entity_src_gen.gene_src_strain _entity_src_gen.pdbx_host_org_ncbi_taxonomy_id _entity_src_gen.pdbx_host_org_scientific_name _entity_src_gen.host_org_common_name _entity_src_gen.pdbx_host_org_strain 3 2 1234 'Test latin name' 'Test common name' 'test strain' 5678 'Other latin name' 'Other common name' 'other strain' # """) def test_struct_ref(self): """Test StructRefDumper""" system = ihm.System() lpep = ihm.LPeptideAlphabet() sd = ihm.reference.SeqDif(seq_id=2, db_monomer=lpep['W'], monomer=lpep['S'], details='Test mutation') # Test non-mandatory db_monomer and monomer sd2 = ihm.reference.SeqDif(seq_id=3, db_monomer=None, monomer=None, details='Test mutation') r1 = ihm.reference.UniProtSequence( db_code='NUP84_YEAST', accession='P52891', sequence='MELWPTYQT', details='test sequence') r1.alignments.append(ihm.reference.Alignment(db_begin=3, seq_dif=[sd, sd2])) r2 = ihm.reference.UniProtSequence( db_code='testcode', accession='testacc', sequence='MELSPTYQT', details='test2') r2.alignments.append(ihm.reference.Alignment( db_begin=4, db_end=5, entity_begin=2, entity_end=3)) r2.alignments.append(ihm.reference.Alignment( db_begin=9, db_end=9, entity_begin=4, entity_end=4)) r3 = ihm.reference.UniProtSequence( db_code='testcode2', accession='testacc2', sequence=None) r3.alignments.append(ihm.reference.Alignment( db_begin=4, db_end=5, entity_begin=2, entity_end=3)) r4 = ihm.reference.UniProtSequence( db_code='testcode3', accession='testacc3', sequence=ihm.unknown) r4.alignments.append(ihm.reference.Alignment( db_begin=4, db_end=5, entity_begin=2, entity_end=3)) # Duplicate reference; should be omitted even though details differ r5 = ihm.reference.UniProtSequence( db_code='NUP84_YEAST', accession='P52891', sequence='MELWPTYQT', details='other test sequence') r5.alignments.append(ihm.reference.Alignment(db_begin=3, seq_dif=[sd, sd2])) system.entities.append(ihm.Entity('LSPT', references=[r1, r2, r3, r4, r5])) # Duplicate reference, but should be included as it pertains to a # different Entity r3a = ihm.reference.UniProtSequence( db_code='testcode2', accession='testacc2', sequence=None) r3a.alignments.append(ihm.reference.Alignment( db_begin=4, db_end=5, entity_begin=2, entity_end=3)) system.entities.append(ihm.Entity('LSPTW', references=[r3a])) # Reference containing non-standard residues r6 = ihm.reference.UniProtSequence( db_code='testcode4', accession='testacc4', sequence='AA(FOO)ALS(BAR)TW') system.entities.append(ihm.Entity('LSATW', references=[r6])) r6.alignments.append(ihm.reference.Alignment(db_begin=5, db_end=9)) dumper = ihm.dumper._EntityDumper() dumper.finalize(system) # Assign entity IDs dumper = ihm.dumper._StructRefDumper() dumper.finalize(system) # Assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _struct_ref.id _struct_ref.entity_id _struct_ref.db_name _struct_ref.db_code _struct_ref.pdbx_db_accession _struct_ref.pdbx_align_begin _struct_ref.pdbx_seq_one_letter_code _struct_ref.details 1 1 UNP NUP84_YEAST P52891 3 LWPTYQT 'test sequence' 2 1 UNP testcode testacc 4 SPTYQT test2 3 1 UNP testcode2 testacc2 4 . . 4 1 UNP testcode3 testacc3 4 ? . 5 2 UNP testcode2 testacc2 4 . . 6 3 UNP testcode4 testacc4 5 LS(BAR)TW . # # loop_ _struct_ref_seq.align_id _struct_ref_seq.ref_id _struct_ref_seq.seq_align_beg _struct_ref_seq.seq_align_end _struct_ref_seq.db_align_beg _struct_ref_seq.db_align_end 1 1 1 4 3 6 2 2 2 3 4 5 3 2 4 4 9 9 4 3 2 3 4 5 5 4 2 3 4 5 7 5 2 3 4 5 8 6 1 5 5 9 # # loop_ _struct_ref_seq_dif.pdbx_ordinal _struct_ref_seq_dif.align_id _struct_ref_seq_dif.seq_num _struct_ref_seq_dif.db_mon_id _struct_ref_seq_dif.mon_id _struct_ref_seq_dif.details 1 1 2 TRP SER 'Test mutation' 2 1 3 ? ? 'Test mutation' # """) def test_struct_ref_bad_align(self): """Test StructRefDumper with bad entity align""" system = ihm.System() r = ihm.reference.UniProtSequence( db_code='NUP84_YEAST', accession='P52891', sequence='MELSPTYQT', details='test sequence') r.alignments.append(ihm.reference.Alignment(entity_begin=90)) system.entities.append(ihm.Entity('LSPT', references=[r])) dumper = ihm.dumper._EntityDumper() dumper.finalize(system) # Assign entity IDs dumper = ihm.dumper._StructRefDumper() dumper.finalize(system) # Assign IDs with self.assertRaises(IndexError) as cm: _get_dumper_output(dumper, system) self.assertIn('is (90-4), out of range 1-4', str(cm.exception)) # Should work with checks disabled _ = _get_dumper_output(dumper, system, check=False) def test_struct_ref_bad_db_align(self): """Test StructRefDumper with bad db align""" system = ihm.System() r = ihm.reference.UniProtSequence( db_code='NUP84_YEAST', accession='P52891', sequence='M(FOO)LSPTYQT', details='test sequence') r.alignments.append(ihm.reference.Alignment(db_begin=90)) system.entities.append(ihm.Entity('LSPT', references=[r])) dumper = ihm.dumper._EntityDumper() dumper.finalize(system) # Assign entity IDs dumper = ihm.dumper._StructRefDumper() dumper.finalize(system) # Assign IDs with self.assertRaises(IndexError) as cm: _get_dumper_output(dumper, system) self.assertIn('is (90-9), out of range 1-9', str(cm.exception)) # Should work with checks disabled _ = _get_dumper_output(dumper, system, check=False) def test_struct_ref_seq_mismatch(self): """Test StructRefDumper with sequence mismatch""" system = ihm.System() r = ihm.reference.UniProtSequence( db_code='NUP84_YEAST', accession='P52891', sequence='MELSPTYQT', details='test sequence') system.entities.append(ihm.Entity('LSPT', references=[r])) dumper = ihm.dumper._EntityDumper() dumper.finalize(system) # Assign entity IDs dumper = ihm.dumper._StructRefDumper() dumper.finalize(system) # Assign IDs with self.assertRaises(ValueError) as cm: _get_dumper_output(dumper, system) self.assertIn('does not match entity canonical sequence', str(cm.exception)) # Should work with checks disabled _ = _get_dumper_output(dumper, system, check=False) def test_struct_ref_seq_dif_outrange(self): """Test StructRefDumper with SeqDif out of range""" system = ihm.System() lpep = ihm.LPeptideAlphabet() sd = ihm.reference.SeqDif(seq_id=40, db_monomer=lpep['W'], monomer=lpep['S'], details='Test mutation') r = ihm.reference.UniProtSequence( db_code='NUP84_YEAST', accession='P52891', sequence='MELSPTYQT', details='test sequence') r.alignments.append(ihm.reference.Alignment(seq_dif=[sd])) system.entities.append(ihm.Entity('LSPT', references=[r])) dumper = ihm.dumper._EntityDumper() dumper.finalize(system) # Assign entity IDs dumper = ihm.dumper._StructRefDumper() dumper.finalize(system) # Assign IDs with self.assertRaises(IndexError) as cm: _get_dumper_output(dumper, system) self.assertIn('is 40, out of range 1-4', str(cm.exception)) # Should work with checks disabled _ = _get_dumper_output(dumper, system, check=False) def test_struct_ref_seq_dif_mismatch(self): """Test StructRefDumper with SeqDif code mismatch""" system = ihm.System() lpep = ihm.LPeptideAlphabet() sd = ihm.reference.SeqDif(seq_id=2, db_monomer=lpep['W'], monomer=lpep['Y'], details='Test mutation') r = ihm.reference.UniProtSequence( db_code='NUP84_YEAST', accession='P52891', sequence='MELWPTYQT', details='test sequence') r.alignments.append(ihm.reference.Alignment(seq_dif=[sd])) system.entities.append(ihm.Entity('LSPT', references=[r])) dumper = ihm.dumper._EntityDumper() dumper.finalize(system) # Assign entity IDs dumper = ihm.dumper._StructRefDumper() dumper.finalize(system) # Assign IDs with self.assertRaises(ValueError) as cm: _get_dumper_output(dumper, system) self.assertIn('one-letter code (Y) does not match', str(cm.exception)) self.assertIn('(S at position 2)', str(cm.exception)) # Should work with checks disabled _ = _get_dumper_output(dumper, system, check=False) def test_struct_ref_seq_dif_ins_del(self): """Test StructRefDumper with SeqDif insertions and deletions""" system = ihm.System() lpep = ihm.LPeptideAlphabet() sd1 = ihm.reference.SeqDif(seq_id=2, db_monomer=lpep['G'], monomer=None, details='deletion') sd2 = ihm.reference.SeqDif(seq_id=3, db_monomer=lpep['C'], monomer=None, details='insertion') r = ihm.reference.UniProtSequence( db_code='NUP84_YEAST', accession='P52891', sequence='MEWPTYQT', details='test sequence') r.alignments.append(ihm.reference.Alignment(seq_dif=[sd1, sd2])) system.entities.append(ihm.Entity('MEWPTYQT', references=[r])) dumper = ihm.dumper._EntityDumper() dumper.finalize(system) # Assign entity IDs dumper = ihm.dumper._StructRefDumper() dumper.finalize(system) # Assign IDs # Insertions and deletions are not currently checked, so # this should pass _ = _get_dumper_output(dumper, system) def test_chem_comp_dumper(self): """Test ChemCompDumper""" system = ihm.System() system.entities.append(ihm.Entity('ACGTTA')) system.entities.append(ihm.Entity('ACGA', alphabet=ihm.RNAAlphabet)) system.entities.append(ihm.Entity(('DA', 'DC'), alphabet=ihm.DNAAlphabet)) dumper = ihm.dumper._ChemCompDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _chem_comp.id _chem_comp.type _chem_comp.name _chem_comp.formula _chem_comp.formula_weight A 'RNA linking' "ADENOSINE-5'-MONOPHOSPHATE" 'C10 H14 N5 O7 P' 347.224 ALA 'L-peptide linking' ALANINE 'C3 H7 N O2' 89.094 C 'RNA linking' "CYTIDINE-5'-MONOPHOSPHATE" 'C9 H14 N3 O8 P' 323.198 CYS 'L-peptide linking' CYSTEINE 'C3 H7 N O2 S' 121.154 DA 'DNA linking' "2'-DEOXYADENOSINE-5'-MONOPHOSPHATE" 'C10 H14 N5 O6 P' 331.225 DC 'DNA linking' "2'-DEOXYCYTIDINE-5'-MONOPHOSPHATE" 'C9 H14 N3 O7 P' 307.199 G 'RNA linking' "GUANOSINE-5'-MONOPHOSPHATE" 'C10 H14 N5 O8 P' 363.223 GLY 'peptide linking' GLYCINE 'C2 H5 N O2' 75.067 THR 'L-peptide linking' THREONINE 'C4 H9 N O3' 119.120 # """) def test_chem_comp_ccd_descriptors(self): """Test ChemCompDumper with ccd or descriptors""" system = ihm.System() comp1 = ihm.NonPolymerChemComp("C1", name='C1', ccd='MA') comp2 = ihm.NonPolymerChemComp("C2", name='C2', descriptors=['foo', 'bar']) system.entities.append(ihm.Entity([comp1, comp2])) dumper = ihm.dumper._ChemCompDumper() # Cannot output ChemComp with ccd or descriptors to IHM files self.assertRaises(ValueError, _get_dumper_output, dumper, system) def test_chem_descriptor_dumper(self): """Test ChemDescriptorDumper""" system = ihm.System() d1 = ihm.ChemDescriptor('EDC', smiles='CCN=C=NCCCN(C)C', inchi_key='LMDZBCPBFSXMTL-UHFFFAOYSA-N') system.orphan_chem_descriptors.append(d1) dumper = ihm.dumper._ChemDescriptorDumper() dumper.finalize(system) # Assign descriptor IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_chemical_component_descriptor.id _ihm_chemical_component_descriptor.auth_name _ihm_chemical_component_descriptor.chemical_name _ihm_chemical_component_descriptor.common_name _ihm_chemical_component_descriptor.smiles _ihm_chemical_component_descriptor.smiles_canonical _ihm_chemical_component_descriptor.inchi _ihm_chemical_component_descriptor.inchi_key 1 EDC . . CCN=C=NCCCN(C)C . . LMDZBCPBFSXMTL-UHFFFAOYSA-N # """) def test_entity_poly_dumper(self): """Test EntityPolyDumper""" system = ihm.System() e1 = ihm.Entity('ACGT') # sequence containing glycine e2 = ihm.Entity(('A', 'C', 'C', 'UNK', 'MSE')) # no glycine # All D-peptides (with glycine) e3 = ihm.Entity(('DAL', 'DCY', 'G'), alphabet=ihm.DPeptideAlphabet) # All D-peptides (without glycine) e4 = ihm.Entity(('DAL', 'DCY'), alphabet=ihm.DPeptideAlphabet) # Mix of L- and D-peptides dpep_al = ihm.DPeptideAlphabet() e5 = ihm.Entity(('A', dpep_al['DCY'], 'G')) # Non-polymeric entity e6 = ihm.Entity([ihm.NonPolymerChemComp('HEM')]) # Sequence containing a non-standard residue e7 = ihm.Entity((ihm.NonPolymerChemComp('ACE'), 'C', 'C')) system.entities.extend((e1, e2, e3, e4, e5, e6, e7)) # One protein entity is modeled (with an asym unit) the other not; # this should be reflected in pdbx_strand_id system.asym_units.append(ihm.AsymUnit(e1, 'foo', strand_id='a')) system.asym_units.append(ihm.AsymUnit(e1, 'bar', strand_id='b')) rna = ihm.Entity('AC', alphabet=ihm.RNAAlphabet) dna = ihm.Entity(('DA', 'DC'), alphabet=ihm.DNAAlphabet) hybrid = ihm.Entity(rna.sequence + dna.sequence) system.entities.extend((rna, dna, hybrid)) ed = ihm.dumper._EntityDumper() ed.finalize(system) # Assign entity IDs sd = ihm.dumper._StructAsymDumper() sd.finalize(system) # Assign asym IDs dumper = ihm.dumper._EntityPolyDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _entity_poly.entity_id _entity_poly.type _entity_poly.nstd_linkage _entity_poly.nstd_monomer _entity_poly.pdbx_strand_id _entity_poly.pdbx_seq_one_letter_code _entity_poly.pdbx_seq_one_letter_code_can 1 polypeptide(L) no no a,b ACGT ACGT 2 polypeptide(L) no no . ACC(UNK)(MSE) ACCXM 3 polypeptide(D) no no . (DAL)(DCY)G ACG 4 polypeptide(D) no no . (DAL)(DCY) AC 5 polypeptide(L) no no . A(DCY)G ACG 7 polypeptide(L) no yes . (ACE)CC XCC 8 polyribonucleotide no no . AC AC 9 polydeoxyribonucleotide no no . (DA)(DC) AC 10 'polydeoxyribonucleotide/polyribonucleotide hybrid' no no . AC(DA)(DC) ACAC # """) def test_entity_nonpoly_dumper(self): """Test EntityNonPolyDumper""" system = ihm.System() # Polymeric entity e1 = ihm.Entity('ACGT') # Non-polymeric entity e2 = ihm.Entity([ihm.NonPolymerChemComp('HEM')], description='heme') e3 = ihm.Entity([ihm.WaterChemComp()]) # Branched entity e4 = ihm.Entity([ihm.SaccharideChemComp('NAG'), ihm.SaccharideChemComp('FUC')]) system.entities.extend((e1, e2, e3, e4)) ed = ihm.dumper._EntityDumper() ed.finalize(system) # Assign entity IDs dumper = ihm.dumper._EntityNonPolyDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_entity_nonpoly.entity_id _pdbx_entity_nonpoly.name _pdbx_entity_nonpoly.comp_id 2 heme HEM 3 . HOH # """) def test_entity_poly_seq_dumper(self): """Test EntityPolySeqDumper""" system = ihm.System() system.entities.append(ihm.Entity('ACGT')) system.entities.append(ihm.Entity('ACC')) system.entities.append(ihm.Entity('AC', alphabet=ihm.RNAAlphabet)) system.entities.append(ihm.Entity(('DA', 'DC'), alphabet=ihm.DNAAlphabet)) # Non-polymeric entity system.entities.append(ihm.Entity([ihm.NonPolymerChemComp('HEM')])) ed = ihm.dumper._EntityDumper() ed.finalize(system) # Assign IDs dumper = ihm.dumper._EntityPolySeqDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _entity_poly_seq.entity_id _entity_poly_seq.num _entity_poly_seq.mon_id _entity_poly_seq.hetero 1 1 ALA . 1 2 CYS . 1 3 GLY . 1 4 THR . 2 1 ALA . 2 2 CYS . 2 3 CYS . 3 1 A . 3 2 C . 4 1 DA . 4 2 DC . # """) def test_poly_seq_scheme_dumper(self): """Test PolySeqSchemeDumper""" system = ihm.System() e1 = ihm.Entity('ACGT') e2 = ihm.Entity('ACC') e3 = ihm.Entity('AC', alphabet=ihm.RNAAlphabet) e4 = ihm.Entity(('DA', 'DC'), alphabet=ihm.DNAAlphabet) # Non-polymeric entity e5 = ihm.Entity([ihm.NonPolymerChemComp('HEM')]) system.entities.extend((e1, e2, e3, e4, e5)) system.asym_units.append(ihm.AsymUnit(e1, 'foo')) system.asym_units.append(ihm.AsymUnit(e2, 'bar', auth_seq_id_map=5)) system.asym_units.append(ihm.AsymUnit(e3, 'baz')) system.asym_units.append(ihm.AsymUnit(e4, 'test', strand_id='X', auth_seq_id_map={1: (1, 'A'), 2: (1, 'B')})) system.asym_units.append(ihm.AsymUnit(e5, 'heme')) ihm.dumper._EntityDumper().finalize(system) ihm.dumper._StructAsymDumper().finalize(system) dumper = ihm.dumper._PolySeqSchemeDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.mon_id _pdbx_poly_seq_scheme.pdb_seq_num _pdbx_poly_seq_scheme.auth_seq_num _pdbx_poly_seq_scheme.pdb_mon_id _pdbx_poly_seq_scheme.auth_mon_id _pdbx_poly_seq_scheme.pdb_strand_id _pdbx_poly_seq_scheme.pdb_ins_code A 1 1 ALA 1 1 ALA ALA A . A 1 2 CYS 2 2 CYS CYS A . A 1 3 GLY 3 3 GLY GLY A . A 1 4 THR 4 4 THR THR A . B 2 1 ALA 6 6 ALA ALA B . B 2 2 CYS 7 7 CYS CYS B . B 2 3 CYS 8 8 CYS CYS B . C 3 1 A 1 1 A A C . C 3 2 C 2 2 C C C . D 4 1 DA 1 1 DA DA X A D 4 2 DC 1 1 DC DC X B # """) def test_poly_seq_scheme_unknown_auth_seq(self): """Test PolySeqSchemeDumper with explicit unknown auth_seq_num""" system = ihm.System() e1 = ihm.Entity('ACGT') system.entities.append(e1) a1 = ihm.AsymUnit(e1, 'foo', orig_auth_seq_id_map={1: 3, 2: 4, 3: ihm.unknown, 4: 6}) system.asym_units.append(a1) ihm.dumper._EntityDumper().finalize(system) ihm.dumper._StructAsymDumper().finalize(system) dumper = ihm.dumper._PolySeqSchemeDumper() out = _get_dumper_output(dumper, system) # If auth_seq_num is ?, so should pdb_mon_id and auth_mon_id; # see, e.g. PDB ID 8qb4 self.assertEqual(out, """# loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.mon_id _pdbx_poly_seq_scheme.pdb_seq_num _pdbx_poly_seq_scheme.auth_seq_num _pdbx_poly_seq_scheme.pdb_mon_id _pdbx_poly_seq_scheme.auth_mon_id _pdbx_poly_seq_scheme.pdb_strand_id _pdbx_poly_seq_scheme.pdb_ins_code A 1 1 ALA 1 3 ALA ALA A . A 1 2 CYS 2 4 CYS CYS A . A 1 3 GLY 3 ? ? ? A . A 1 4 THR 4 6 THR THR A . # """) def test_poly_seq_scheme_dumper_not_modeled(self): """Test PolySeqSchemeDumper with not-modeled residues""" system, m1, asym = self._make_test_model() rr = ihm.model.NotModeledResidueRange(asym, 1, 2) m1.not_modeled_residue_ranges.append(rr) m2 = ihm.model.Model(assembly=m1.assembly, protocol=m1.protocol, representation=m1.representation, name='2nd test model') rr = ihm.model.NotModeledResidueRange(asym, 2, 4) m2.not_modeled_residue_ranges.append(rr) m3 = ihm.model.Model(assembly=m1.assembly, protocol=m1.protocol, representation=m1.representation, name='3rd test model') rr = ihm.model.NotModeledResidueRange(asym, 2, 3) m3.not_modeled_residue_ranges.append(rr) mg = system.state_groups[0][0][0] mg.extend((m1, m2, m3)) ihm.dumper._EntityDumper().finalize(system) ihm.dumper._StructAsymDumper().finalize(system) dumper = ihm.dumper._PolySeqSchemeDumper() out = _get_dumper_output(dumper, system) # Only residue 2 is not-modeled in all three Models self.assertEqual(out, """# loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.mon_id _pdbx_poly_seq_scheme.pdb_seq_num _pdbx_poly_seq_scheme.auth_seq_num _pdbx_poly_seq_scheme.pdb_mon_id _pdbx_poly_seq_scheme.auth_mon_id _pdbx_poly_seq_scheme.pdb_strand_id _pdbx_poly_seq_scheme.pdb_ins_code A 1 1 ALA 1 1 ALA ALA A . A 1 2 CYS 2 ? ? ? A . A 1 3 GLY 3 3 GLY GLY A . A 1 4 THR 4 4 THR THR A . # """) def test_poly_seq_scheme_dumper_no_not_modeled(self): """Test PolySeqSchemeDumper with no not-modeled residue list""" # Older model with no not_modeled_residue_ranges member (e.g. # older versions of python-modelcif) system, m1, asym = self._make_test_model() del m1.not_modeled_residue_ranges mg = system.state_groups[0][0][0] mg.append(m1) ihm.dumper._EntityDumper().finalize(system) ihm.dumper._StructAsymDumper().finalize(system) dumper = ihm.dumper._PolySeqSchemeDumper() out = _get_dumper_output(dumper, system) # all residues are modeled self.assertEqual(out, """# loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.mon_id _pdbx_poly_seq_scheme.pdb_seq_num _pdbx_poly_seq_scheme.auth_seq_num _pdbx_poly_seq_scheme.pdb_mon_id _pdbx_poly_seq_scheme.auth_mon_id _pdbx_poly_seq_scheme.pdb_strand_id _pdbx_poly_seq_scheme.pdb_ins_code A 1 1 ALA 1 1 ALA ALA A . A 1 2 CYS 2 2 CYS CYS A . A 1 3 GLY 3 3 GLY GLY A . A 1 4 THR 4 4 THR THR A . # """) def test_nonpoly_scheme_dumper(self): """Test NonPolySchemeDumper""" system = ihm.System() e1 = ihm.Entity('ACGT') e2 = ihm.Entity([ihm.NonPolymerChemComp('HEM')]) e3 = ihm.Entity([ihm.NonPolymerChemComp('ZN')]) system.entities.extend((e1, e2, e3)) system.asym_units.append(ihm.AsymUnit(e1, 'foo')) system.asym_units.append(ihm.AsymUnit(e2, 'baz', strand_id='Q')) system.asym_units.append(ihm.AsymUnit(e3, 'bar', auth_seq_id_map=5)) ihm.dumper._EntityDumper().finalize(system) ihm.dumper._StructAsymDumper().finalize(system) dumper = ihm.dumper._NonPolySchemeDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_nonpoly_scheme.asym_id _pdbx_nonpoly_scheme.entity_id _pdbx_nonpoly_scheme.mon_id _pdbx_nonpoly_scheme.ndb_seq_num _pdbx_nonpoly_scheme.pdb_seq_num _pdbx_nonpoly_scheme.auth_seq_num _pdbx_nonpoly_scheme.auth_mon_id _pdbx_nonpoly_scheme.pdb_strand_id _pdbx_nonpoly_scheme.pdb_ins_code B 2 HEM 1 1 1 HEM Q . C 3 ZN 1 6 6 ZN C . # """) def test_collection_dumper(self): """Test CollectionDumper""" system = ihm.System() c = ihm.Collection('foo', name='bar', details='more text') system.collections.append(c) dumper = ihm.dumper._CollectionDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_entry_collection.id _ihm_entry_collection.name _ihm_entry_collection.details foo bar 'more text' # """) def test_struct_asym_dumper(self): """Test StructAsymDumper""" system = ihm.System() e1 = ihm.Entity('ACGT') e2 = ihm.Entity('ACC') e1._id = 1 e2._id = 2 system.entities.extend((e1, e2)) system.asym_units.append(ihm.AsymUnit(e1, 'foo', id='Z')) system.asym_units.append(ihm.AsymUnit(e1, 'bar')) system.asym_units.append(ihm.AsymUnit(e2, 'baz', id='A')) system.asym_units.append(ihm.AsymUnit(e2, 'tmp')) dumper = ihm.dumper._StructAsymDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details Z 1 foo B 1 bar A 2 baz C 2 tmp # """) def test_struct_asym_dumper_duplicate_ids(self): """Test StructAsymDumper detection of duplicate user IDs""" system = ihm.System() e1 = ihm.Entity('ACGT') system.entities.append(e1) a1 = ihm.AsymUnit(e1, 'foo', id='Z') a2 = ihm.AsymUnit(e1, 'baz', id='Z') system.asym_units.extend((a1, a2)) dumper = ihm.dumper._StructAsymDumper() self.assertRaises(ValueError, dumper.finalize, system) def test_assembly_all_modeled(self): """Test AssemblyDumper, all components modeled""" system = ihm.System() e1 = ihm.Entity('ACG', description='foo') e2 = ihm.Entity('AW', description='baz') a1 = ihm.AsymUnit(e1) a2 = ihm.AsymUnit(e1) a3 = ihm.AsymUnit(e2) system.entities.extend((e1, e2)) system.asym_units.extend((a1, a2, a3)) system.orphan_assemblies.append( ihm.Assembly((a1, a2(2, 3)), name='foo')) # Out of order assembly (should be ordered on output) system.orphan_assemblies.append(ihm.Assembly((a3, a2), name='bar', description='desc1')) # Duplicate (equal) assembly (should be ignored, but description # merged in) a = ihm.Assembly((a2, a3), description='desc2') system.orphan_assemblies.append(a) # Another duplicate with duplicate description (should be ignored) a = ihm.Assembly((a2, a3), description='desc2') system.orphan_assemblies.append(a) # Duplicate (identical) assembly (should be ignored, including # description) system.orphan_assemblies.append(a) # Assign entity, asym and range IDs ihm.dumper._EntityDumper().finalize(system) ihm.dumper._StructAsymDumper().finalize(system) system._make_complete_assembly() # Assign and check segment IDs dumper = ihm.dumper._EntityPolySegmentDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_entity_poly_segment.id _ihm_entity_poly_segment.entity_id _ihm_entity_poly_segment.seq_id_begin _ihm_entity_poly_segment.seq_id_end _ihm_entity_poly_segment.comp_id_begin _ihm_entity_poly_segment.comp_id_end 1 1 1 3 ALA GLY 2 2 1 2 ALA TRP 3 1 2 3 CYS GLY # """) d = ihm.dumper._AssemblyDumper() d.finalize(system) self.assertEqual(system.complete_assembly._id, 1) self.assertEqual([asmb._id for asmb in system.orphan_assemblies], [2, 3, 3, 3, 3]) out = _get_dumper_output(d, system) self.assertEqual(out, """# loop_ _ihm_struct_assembly.id _ihm_struct_assembly.name _ihm_struct_assembly.description 1 'Complete assembly' 'All known components' 2 foo . 3 bar 'desc1 & desc2' # # loop_ _ihm_struct_assembly_details.id _ihm_struct_assembly_details.assembly_id _ihm_struct_assembly_details.parent_assembly_id _ihm_struct_assembly_details.entity_description _ihm_struct_assembly_details.entity_id _ihm_struct_assembly_details.asym_id _ihm_struct_assembly_details.entity_poly_segment_id 1 1 1 foo 1 A 1 2 1 1 foo 1 B 1 3 1 1 baz 2 C 2 4 2 2 foo 1 A 1 5 2 2 foo 1 B 3 6 3 3 foo 1 B 1 7 3 3 baz 2 C 2 # """) def test_assembly_subset_modeled(self): """Test AssemblyDumper, subset of components modeled""" system = ihm.System() e1 = ihm.Entity('ACG', description='foo') e2 = ihm.Entity('EW', description='bar') a1 = ihm.AsymUnit(e1) system.entities.extend((e1, e2)) system.asym_units.append(a1) # Note that no asym unit uses entity e2, so it won't be included # in the assembly # Assign entity and asym IDs ihm.dumper._EntityDumper().finalize(system) ihm.dumper._StructAsymDumper().finalize(system) system._make_complete_assembly() # Assign and check segment IDs dumper = ihm.dumper._EntityPolySegmentDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_entity_poly_segment.id _ihm_entity_poly_segment.entity_id _ihm_entity_poly_segment.seq_id_begin _ihm_entity_poly_segment.seq_id_end _ihm_entity_poly_segment.comp_id_begin _ihm_entity_poly_segment.comp_id_end 1 1 1 3 ALA GLY # """) d = ihm.dumper._AssemblyDumper() d.finalize(system) out = _get_dumper_output(d, system) self.assertEqual(out, """# loop_ _ihm_struct_assembly.id _ihm_struct_assembly.name _ihm_struct_assembly.description 1 'Complete assembly' 'All known components' # # loop_ _ihm_struct_assembly_details.id _ihm_struct_assembly_details.assembly_id _ihm_struct_assembly_details.parent_assembly_id _ihm_struct_assembly_details.entity_description _ihm_struct_assembly_details.entity_id _ihm_struct_assembly_details.asym_id _ihm_struct_assembly_details.entity_poly_segment_id 1 1 1 foo 1 A 1 # """) def test_external_reference_dumper(self): """Test ExternalReferenceDumper""" system = ihm.System() repo1 = ihm.location.Repository(doi="foo", details='test repo') repo2 = ihm.location.Repository(doi="10.5281/zenodo.46266", url='nup84-v1.0.zip', top_directory=os.path.join('foo', 'bar')) repo3 = ihm.location.Repository(doi="10.5281/zenodo.58025", url='foo.spd') loc = ihm.location.InputFileLocation(repo=repo1, path='bar', file_format='TXT') system.locations.append(loc) # Duplicates should be ignored loc = ihm.location.InputFileLocation(repo=repo1, path='bar') system.locations.append(loc) # Different file, same repository loc = ihm.location.InputFileLocation(repo=repo1, path='baz') system.locations.append(loc) # Different repository loc = ihm.location.OutputFileLocation(repo=repo2, path='baz') system.locations.append(loc) # Repository containing a single file (not an archive) loc = ihm.location.InputFileLocation(repo=repo3, path='foo.spd', details='EM micrographs') system.locations.append(loc) # Path can also be None for Repository containing a single file loc = ihm.location.InputFileLocation(repo=repo3, path=None, details='EM micrographs') system.locations.append(loc) with utils.temporary_directory('') as tmpdir: # Force tmpdir to be a relative path (in Python 3.12 or later # it will be an absolute path) rel_tmpdir = os.path.relpath(tmpdir) bar = os.path.join(rel_tmpdir, 'test_mmcif_extref.tmp') with open(bar, 'w') as f: f.write("abcd") # Local file system.locations.append(ihm.location.WorkflowFileLocation(bar)) # DatabaseLocations should be ignored system.locations.append(ihm.location.PDBLocation( '1abc', '1.0', 'test details')) d = ihm.dumper._ExternalReferenceDumper() d.finalize(system) self.assertEqual(len(d._ref_by_id), 6) self.assertEqual(len(d._repo_by_id), 4) # Repeated calls to finalize() should yield identical results d.finalize(system) self.assertEqual(len(d._ref_by_id), 6) self.assertEqual(len(d._repo_by_id), 4) out = _get_dumper_output(d, system) self.assertEqual(out, """# loop_ _ihm_external_reference_info.reference_id _ihm_external_reference_info.reference_provider _ihm_external_reference_info.reference_type _ihm_external_reference_info.reference _ihm_external_reference_info.refers_to _ihm_external_reference_info.associated_url _ihm_external_reference_info.details 1 . DOI foo Other . 'test repo' 2 Zenodo DOI 10.5281/zenodo.46266 Archive nup84-v1.0.zip . 3 Zenodo DOI 10.5281/zenodo.58025 File foo.spd . 4 . 'Supplementary Files' . Other . . # # loop_ _ihm_external_files.id _ihm_external_files.reference_id _ihm_external_files.file_path _ihm_external_files.content_type _ihm_external_files.file_format _ihm_external_files.file_size_bytes _ihm_external_files.details 1 1 bar 'Input data or restraints' TXT . . 2 1 baz 'Input data or restraints' . . . 3 2 foo/bar/baz 'Modeling or post-processing output' . . . 4 3 foo.spd 'Input data or restraints' . . 'EM micrographs' 5 3 . 'Input data or restraints' . . 'EM micrographs' 6 4 %s 'Modeling workflow or script' . 4 . # """ % bar.replace(os.sep, '/')) def test_dataset_dumper_duplicates_details(self): """DatasetDumper ignores duplicate datasets with differing details""" system = ihm.System() dump = ihm.dumper._DatasetDumper() loc = ihm.location.PDBLocation('1abc', '1.0', 'test details') ds1 = ihm.dataset.PDBDataset(loc) system.orphan_datasets.append(ds1) # A duplicate dataset should be ignored even if details differ loc = ihm.location.PDBLocation('1abc', '1.0', 'other details') ds2 = ihm.dataset.PDBDataset(loc) system.orphan_datasets.append(ds2) ds3 = ihm.dataset.PDBDataset(loc, details='other dataset details') system.orphan_datasets.append(ds3) dump.finalize(system) # Assign IDs self.assertEqual(ds1._id, 1) self.assertEqual(ds2._id, 1) self.assertEqual(ds3._id, 1) self.assertEqual(len(dump._dataset_by_id), 1) def test_dataset_dumper_duplicates_samedata_sameloc(self): """DatasetDumper doesn't duplicate same datasets in same location""" system = ihm.System() loc1 = ihm.location.DatabaseLocation("abc", "1.0", "") # Identical datasets in the same location aren't duplicated cx1 = ihm.dataset.CXMSDataset(loc1) cx2 = ihm.dataset.CXMSDataset(loc1) dump = ihm.dumper._DatasetDumper() system.orphan_datasets.extend((cx1, cx2)) dump.finalize(system) # Assign IDs self.assertEqual(cx1._id, 1) self.assertEqual(cx2._id, 1) self.assertEqual(len(dump._dataset_by_id), 1) def test_dataset_dumper_duplicates_samedata_diffloc(self): """DatasetDumper is OK with same datasets in different locations""" system = ihm.System() loc1 = ihm.location.DatabaseLocation("abc", "1.0", "") loc2 = ihm.location.DatabaseLocation("xyz", "1.0", "") cx1 = ihm.dataset.CXMSDataset(loc1) cx2 = ihm.dataset.CXMSDataset(loc2) dump = ihm.dumper._DatasetDumper() system.orphan_datasets.extend((cx1, cx2)) dump.finalize(system) # Assign IDs self.assertEqual(cx1._id, 1) self.assertEqual(cx2._id, 2) self.assertEqual(len(dump._dataset_by_id), 2) def test_dataset_dumper_duplicates_diffdata_sameloc(self): """DatasetDumper is OK with different datasets in same location""" system = ihm.System() # Different datasets in same location are OK (but odd) loc2 = ihm.location.DatabaseLocation("xyz", "1.0", "") cx2 = ihm.dataset.CXMSDataset(loc2) em3d = ihm.dataset.EMDensityDataset(loc2) dump = ihm.dumper._DatasetDumper() system.orphan_datasets.extend((cx2, em3d)) dump.finalize(system) # Assign IDs self.assertEqual(cx2._id, 1) self.assertEqual(em3d._id, 2) self.assertEqual(len(dump._dataset_by_id), 2) def test_dataset_dumper_allow_duplicates(self): """DatasetDumper is OK with duplicates if allow_duplicates=True""" system = ihm.System() emloc1 = ihm.location.EMDBLocation("abc") emloc2 = ihm.location.EMDBLocation("abc") emloc1._allow_duplicates = True em3d_1 = ihm.dataset.EMDensityDataset(emloc1) em3d_2 = ihm.dataset.EMDensityDataset(emloc2) dump = ihm.dumper._DatasetDumper() system.orphan_datasets.extend((em3d_1, em3d_2)) dump.finalize(system) # Assign IDs self.assertEqual(em3d_1._id, 1) self.assertEqual(em3d_2._id, 2) self.assertEqual(len(dump._dataset_by_id), 2) def test_dataset_dumper_group_finalize(self): """Test DatasetDumper finalize of dataset groups""" system = ihm.System() loc = ihm.location.InputFileLocation(repo='foo', path='baz') ds1 = ihm.dataset.CXMSDataset(loc) group1 = ihm.dataset.DatasetGroup([ds1]) # Duplicate group group2 = ihm.dataset.DatasetGroup([ds1]) system.orphan_datasets.append(ds1) system.orphan_dataset_groups.extend((group1, group2)) d = ihm.dumper._DatasetDumper() d.finalize(system) # Assign IDs self.assertEqual(len(d._dataset_by_id), 1) self.assertEqual(len(d._dataset_group_by_id), 1) # Repeated calls to finalize should yield identical results d.finalize(system) self.assertEqual(len(d._dataset_by_id), 1) self.assertEqual(len(d._dataset_group_by_id), 1) def test_dataset_dumper_dump(self): """Test DatasetDumper.dump()""" system = ihm.System() loc = ihm.location.InputFileLocation(repo='foo', path='bar') loc._id = 97 ds1 = ihm.dataset.CXMSDataset(loc) system.orphan_datasets.append(ds1) # group1 contains just the first dataset (but duplicated) group1 = ihm.dataset.DatasetGroup([ds1, ds1], name="first") system.orphan_dataset_groups.append(group1) loc = ihm.location.InputFileLocation(repo='foo2', path='bar2') loc._id = 98 ds2 = ihm.dataset.CXMSDataset(loc) # group2 contains all datasets so far (ds1 & ds2) group2 = ihm.dataset.DatasetGroup([ds1, ds2], name="all so far") system.orphan_dataset_groups.append(group2) loc = ihm.location.PDBLocation('1abc', '1.0', 'test details') ds3 = ihm.dataset.PDBDataset(loc, details='test dataset details') system.orphan_datasets.append(ds3) ds3.parents.append(ds2) # Ignore duplicates ds3.parents.append(ds2) # Derived dataset with (shared) transformation loc = ihm.location.PDBLocation('1cde', version='foo', details='bar') dst = ihm.dataset.Dataset(loc, details='bar') t = ihm.geometry.Transformation( rot_matrix=[[-0.64, 0.09, 0.77], [0.76, -0.12, 0.64], [0.15, 0.99, 0.01]], tr_vector=[1., 2., 3.]) td = ihm.dataset.TransformedDataset(dst, transform=t) ds3.parents.append(td) loc = ihm.location.PDBLocation('1cdf', version='foo', details='bar') dst = ihm.dataset.Dataset(loc, details='baz') # Same transformation as before td = ihm.dataset.TransformedDataset(dst, transform=t) ds3.parents.append(td) # Dataset with no location ds4 = ihm.dataset.PDBDataset(None) system.orphan_datasets.append(ds4) # Dataset with multiple locations ds5 = ihm.dataset.PDBDataset(None) loc = ihm.location.PDBLocation('2xyz', '1.0', 'other details') ds5._add_location(loc) loc = ihm.location.InputFileLocation(repo='foo', path='bar') loc._id = 102 ds5._add_location(loc) system.orphan_datasets.append(ds5) # Transformation not referenced by any object trans2 = ihm.geometry.Transformation([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [4., 5., 6.]) system._orphan_dataset_transforms.append(trans2) d = ihm.dumper._DatasetDumper() d.finalize(system) # Assign IDs out = _get_dumper_output(d, system) self.assertEqual(out, """# loop_ _ihm_dataset_list.id _ihm_dataset_list.data_type _ihm_dataset_list.database_hosted _ihm_dataset_list.details 1 'Crosslinking-MS data' NO . 2 'Crosslinking-MS data' NO . 3 Other YES bar 4 Other YES baz 5 'Experimental model' YES 'test dataset details' 6 'Experimental model' NO . 7 'Experimental model' YES . # # loop_ _ihm_dataset_group.id _ihm_dataset_group.name _ihm_dataset_group.application _ihm_dataset_group.details 1 first . . 2 'all so far' . . # # loop_ _ihm_dataset_group_link.group_id _ihm_dataset_group_link.dataset_list_id 1 1 2 1 2 2 # # loop_ _ihm_dataset_external_reference.id _ihm_dataset_external_reference.dataset_list_id _ihm_dataset_external_reference.file_id 1 1 97 2 2 98 3 7 102 # # loop_ _ihm_dataset_related_db_reference.id _ihm_dataset_related_db_reference.dataset_list_id _ihm_dataset_related_db_reference.db_name _ihm_dataset_related_db_reference.accession_code _ihm_dataset_related_db_reference.version _ihm_dataset_related_db_reference.details 1 3 PDB 1cde foo bar 2 4 PDB 1cdf foo bar 3 5 PDB 1abc 1.0 'test details' 4 7 PDB 2xyz 1.0 'other details' # # loop_ _ihm_related_datasets.dataset_list_id_derived _ihm_related_datasets.dataset_list_id_primary _ihm_related_datasets.transformation_id 5 2 . 5 3 1 5 4 1 # # loop_ _ihm_data_transformation.id _ihm_data_transformation.rot_matrix[1][1] _ihm_data_transformation.rot_matrix[2][1] _ihm_data_transformation.rot_matrix[3][1] _ihm_data_transformation.rot_matrix[1][2] _ihm_data_transformation.rot_matrix[2][2] _ihm_data_transformation.rot_matrix[3][2] _ihm_data_transformation.rot_matrix[1][3] _ihm_data_transformation.rot_matrix[2][3] _ihm_data_transformation.rot_matrix[3][3] _ihm_data_transformation.tr_vector[1] _ihm_data_transformation.tr_vector[2] _ihm_data_transformation.tr_vector[3] 1 -0.640000 0.760000 0.150000 0.090000 -0.120000 0.990000 0.770000 0.640000 0.010000 1.000 2.000 3.000 2 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 1.000000 4.000 5.000 6.000 # """) def test_dataset_dumper_dump_invalid_transform(self): """Test DatasetDumper.dump() with invalid transformed dataset""" system = ihm.System() loc = ihm.location.PDBLocation('1cdf', version='foo', details='bar') loc._id = 1 dst = ihm.dataset.Dataset(loc, details='baz') t = ihm.geometry.Transformation( rot_matrix=None, tr_vector=[1., 2., 3.]) td = ihm.dataset.TransformedDataset(dst, transform=t) loc = ihm.location.InputFileLocation(repo='foo', path='bar') loc._id = 2 ds2 = ihm.dataset.CXMSDataset(loc) ds2.parents.append(td) system.orphan_datasets.append(ds2) d = ihm.dumper._DatasetDumper() d.finalize(system) # Assign IDs self.assertRaises(ValueError, _get_dumper_output, d, system) # OK if checks are disabled _ = _get_dumper_output(d, system, check=False) def test_model_representation_dump(self): """Test ModelRepresentationDumper""" system = ihm.System() e1 = ihm.Entity('AAAAAAAA', description='bar') system.entities.append(e1) asym = ihm.AsymUnit(e1, 'foo') system.asym_units.append(asym) s1 = ihm.representation.AtomicSegment( asym(1, 2), starting_model=None, rigid=True) s2 = ihm.representation.ResidueSegment( asym(3, 4), starting_model=None, rigid=False, primitive='sphere') s3 = ihm.representation.MultiResidueSegment( asym(1, 2), starting_model=None, rigid=False, primitive='gaussian') s4 = ihm.representation.FeatureSegment( asym(3, 4), starting_model=None, rigid=True, primitive='other', count=3, description='test segment') r1 = ihm.representation.Representation((s1, s2), name='foo', details='foo details') r2 = ihm.representation.Representation((s3, s4), name='bar') system.orphan_representations.extend((r1, r2)) e1._id = 42 asym._id = 'X' # Assign segment IDs ihm.dumper._EntityPolySegmentDumper().finalize(system) dumper = ihm.dumper._ModelRepresentationDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_model_representation.id _ihm_model_representation.name _ihm_model_representation.details 1 foo 'foo details' 2 bar . # # loop_ _ihm_model_representation_details.id _ihm_model_representation_details.representation_id _ihm_model_representation_details.entity_id _ihm_model_representation_details.entity_description _ihm_model_representation_details.entity_asym_id _ihm_model_representation_details.entity_poly_segment_id _ihm_model_representation_details.model_object_primitive _ihm_model_representation_details.starting_model_id _ihm_model_representation_details.model_mode _ihm_model_representation_details.model_granularity _ihm_model_representation_details.model_object_count _ihm_model_representation_details.description 1 1 42 bar X 1 atomistic . rigid by-atom . . 2 1 42 bar X 2 sphere . flexible by-residue . . 3 2 42 bar X 1 gaussian . flexible multi-residue . . 4 2 42 bar X 2 other . rigid by-feature 3 'test segment' # """) def test_starting_model_dumper(self): """Test StartingModelDumper""" class TestStartingModel(ihm.startmodel.StartingModel): def get_atoms(self): asym = self.asym_unit return [ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='CA', type_symbol='C', x=-8.0, y=-5.0, z=91.0, biso=42.)] def get_seq_dif(self): return [ihm.startmodel.MSESeqDif(db_seq_id=5, seq_id=7), ihm.startmodel.SeqDif(db_seq_id=6, seq_id=8, db_comp_id='LEU', details='LEU -> GLY')] system = ihm.System() e1 = ihm.Entity('A' * 6 + 'MG' + 'A' * 12, description='foo') system.entities.append(e1) asym = ihm.AsymUnit(e1, 'bar') system.asym_units.append(asym) loc = ihm.location.PDBLocation('1abc', '1.0', 'test details') dstemplate = ihm.dataset.PDBDataset(loc) loc = ihm.location.PDBLocation('2xyz', '1.0', 'test details') dstarget = ihm.dataset.PDBDataset(loc) ali = ihm.location.InputFileLocation(repo='foo', path='test.ali') script = ihm.location.WorkflowFileLocation(repo='foo', path='test.py') software = ihm.Software(name='test', classification='test code', description='Some test program', version=1, location='http://test.org') s1 = ihm.startmodel.Template( dataset=dstemplate, asym_id='C', seq_id_range=(-9, 0), # 1,10 in IHM numbering template_seq_id_range=(101, 110), sequence_identity=30.) s2 = ihm.startmodel.Template( dataset=dstemplate, asym_id='D', seq_id_range=(-5, 2), # 5,12 in IHM numbering template_seq_id_range=(201, 210), sequence_identity=ihm.startmodel.SequenceIdentity(40., None), alignment_file=ali) s3 = ihm.startmodel.Template( dataset=dstemplate, asym_id='D', seq_id_range=(-5, 2), # 5,12 in IHM numbering template_seq_id_range=(201, 210), sequence_identity=ihm.startmodel.SequenceIdentity(ihm.unknown, ihm.unknown), alignment_file=ali) s4 = ihm.startmodel.Template( dataset=dstemplate, asym_id='D', seq_id_range=(None, None), template_seq_id_range=(None, None), sequence_identity=ihm.startmodel.SequenceIdentity(None, None), alignment_file=ali) sm = TestStartingModel(asym(1, 12), dstarget, 'A', [s1, s2, s3, s4], offset=10, script_file=script, software=software) system.orphan_starting_models.append(sm) sm = TestStartingModel(asym(1, 15), dstarget, 'A', [], description="test desc") system.orphan_starting_models.append(sm) e1._id = 42 asym._id = 99 dstemplate._id = 101 dstarget._id = 102 ali._id = 5 script._id = 8 software._id = 99 # Assign and check segment IDs dumper = ihm.dumper._EntityPolySegmentDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_entity_poly_segment.id _ihm_entity_poly_segment.entity_id _ihm_entity_poly_segment.seq_id_begin _ihm_entity_poly_segment.seq_id_end _ihm_entity_poly_segment.comp_id_begin _ihm_entity_poly_segment.comp_id_end 1 42 1 12 ALA ALA 2 42 1 15 ALA ALA # """) dumper = ihm.dumper._StartingModelDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_starting_model_details.starting_model_id _ihm_starting_model_details.entity_id _ihm_starting_model_details.entity_description _ihm_starting_model_details.asym_id _ihm_starting_model_details.entity_poly_segment_id _ihm_starting_model_details.starting_model_source _ihm_starting_model_details.starting_model_auth_asym_id _ihm_starting_model_details.starting_model_sequence_offset _ihm_starting_model_details.dataset_list_id _ihm_starting_model_details.description 1 42 foo 99 1 'experimental model' A 10 102 . 2 42 foo 99 2 'experimental model' A 0 102 'test desc' # # loop_ _ihm_starting_computational_models.starting_model_id _ihm_starting_computational_models.software_id _ihm_starting_computational_models.script_file_id 1 99 8 # # loop_ _ihm_starting_comparative_models.id _ihm_starting_comparative_models.starting_model_id _ihm_starting_comparative_models.starting_model_auth_asym_id _ihm_starting_comparative_models.starting_model_seq_id_begin _ihm_starting_comparative_models.starting_model_seq_id_end _ihm_starting_comparative_models.template_auth_asym_id _ihm_starting_comparative_models.template_seq_id_begin _ihm_starting_comparative_models.template_seq_id_end _ihm_starting_comparative_models.template_sequence_identity _ihm_starting_comparative_models.template_sequence_identity_denominator _ihm_starting_comparative_models.template_dataset_list_id _ihm_starting_comparative_models.alignment_file_id 1 1 A 1 10 C 101 110 30.000 1 101 . 2 1 A 5 12 D 201 210 40.000 . 101 5 3 1 A 5 12 D 201 210 ? ? 101 5 4 1 A . . D . . . . 101 5 # # loop_ _ihm_starting_model_coord.starting_model_id _ihm_starting_model_coord.group_PDB _ihm_starting_model_coord.id _ihm_starting_model_coord.type_symbol _ihm_starting_model_coord.atom_id _ihm_starting_model_coord.comp_id _ihm_starting_model_coord.entity_id _ihm_starting_model_coord.asym_id _ihm_starting_model_coord.seq_id _ihm_starting_model_coord.Cartn_x _ihm_starting_model_coord.Cartn_y _ihm_starting_model_coord.Cartn_z _ihm_starting_model_coord.B_iso_or_equiv _ihm_starting_model_coord.ordinal_id 1 ATOM 1 C CA ALA 42 99 1 -8.000 -5.000 91.000 42.000 1 2 ATOM 1 C CA ALA 42 99 1 -8.000 -5.000 91.000 42.000 2 # # loop_ _ihm_starting_model_seq_dif.id _ihm_starting_model_seq_dif.entity_id _ihm_starting_model_seq_dif.asym_id _ihm_starting_model_seq_dif.seq_id _ihm_starting_model_seq_dif.comp_id _ihm_starting_model_seq_dif.starting_model_id _ihm_starting_model_seq_dif.db_asym_id _ihm_starting_model_seq_dif.db_seq_id _ihm_starting_model_seq_dif.db_comp_id _ihm_starting_model_seq_dif.details 1 42 99 7 MET 1 A 5 MSE 'Conversion of modified residue MSE to MET' 2 42 99 8 GLY 1 A 6 LEU 'LEU -> GLY' 3 42 99 7 MET 2 A 5 MSE 'Conversion of modified residue MSE to MET' 4 42 99 8 GLY 2 A 6 LEU 'LEU -> GLY' # """) def test_starting_model_dumper_atom_range(self): """Test StartingModelDumper with invalid atom seq_id""" class TestStartingModel(ihm.startmodel.StartingModel): def get_atoms(self): asym = self.asym_unit return [ihm.model.Atom(asym_unit=asym, seq_id=99, atom_id='CA', type_symbol='C', x=-8.0, y=-5.0, z=91.0, biso=42.)] system = ihm.System() e1 = ihm.Entity('ACG', description='foo') system.entities.append(e1) asym = ihm.AsymUnit(e1, 'bar') system.asym_units.append(asym) loc = ihm.location.PDBLocation('2xyz', '1.0', 'test details') dstarget = ihm.dataset.PDBDataset(loc) sm = TestStartingModel(asym(1, 3), dstarget, 'A', []) system.orphan_starting_models.append(sm) e1._id = 42 asym._id = 99 dstarget._id = 8 sm._id = 5 # Assign and check segment IDs dumper = ihm.dumper._EntityPolySegmentDumper() dumper.finalize(system) dumper = ihm.dumper._StartingModelDumper() with self.assertRaises(IndexError) as cm: _get_dumper_output(dumper, system) self.assertIn('Starting model 5 atom seq_id (99) out of range (1-3)', str(cm.exception)) # Should work with checks disabled _ = _get_dumper_output(dumper, system, check=False) def test_modeling_protocol(self): """Test ProtocolDumper""" class MockObject: pass system = ihm.System() p1 = ihm.protocol.Protocol('equilibration') assembly = ihm.Assembly(description='foo') assembly._id = 42 dsg = MockObject() dsg._id = 99 dsg2 = MockObject() dsg2._id = 101 software = MockObject() software._id = 80 script = MockObject() script._id = 90 p1.steps.append(ihm.protocol.Step( assembly=assembly, dataset_group=dsg, method='Monte Carlo', num_models_begin=0, num_models_end=500, multi_scale=True, ensemble=True, name='s1')) p1.steps.append(ihm.protocol.Step( assembly=assembly, dataset_group=dsg, method='Replica exchange', num_models_begin=500, num_models_end=2000, multi_scale=True, ensemble=False)) system.orphan_protocols.append(p1) p2 = ihm.protocol.Protocol('sampling', details='extra details') p2.steps.append(ihm.protocol.Step( assembly=assembly, dataset_group=dsg2, method='Replica exchange', num_models_begin=2000, num_models_end=1000, multi_scale=True, software=software, script_file=script, description='test step')) system.orphan_protocols.append(p2) dumper = ihm.dumper._ProtocolDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_modeling_protocol.id _ihm_modeling_protocol.protocol_name _ihm_modeling_protocol.num_steps _ihm_modeling_protocol.details 1 equilibration 2 . 2 sampling 1 'extra details' # # loop_ _ihm_modeling_protocol_details.id _ihm_modeling_protocol_details.protocol_id _ihm_modeling_protocol_details.step_id _ihm_modeling_protocol_details.struct_assembly_id _ihm_modeling_protocol_details.dataset_group_id _ihm_modeling_protocol_details.step_name _ihm_modeling_protocol_details.step_method _ihm_modeling_protocol_details.num_models_begin _ihm_modeling_protocol_details.num_models_end _ihm_modeling_protocol_details.multi_scale_flag _ihm_modeling_protocol_details.multi_state_flag _ihm_modeling_protocol_details.ordered_flag _ihm_modeling_protocol_details.ensemble_flag _ihm_modeling_protocol_details.software_id _ihm_modeling_protocol_details.script_file_id _ihm_modeling_protocol_details.description 1 1 1 42 99 s1 'Monte Carlo' 0 500 YES NO NO YES . . . 2 1 2 42 99 . 'Replica exchange' 500 2000 YES NO NO NO . . . 3 2 1 42 101 . 'Replica exchange' 2000 1000 YES NO NO NO 80 90 'test step' # """) def test_post_process(self): """Test PostProcessDumper""" class MockObject: pass system = ihm.System() p1 = ihm.protocol.Protocol('refinement') system.orphan_protocols.append(p1) a1 = ihm.analysis.Analysis() a1.steps.append(ihm.analysis.EmptyStep()) a2 = ihm.analysis.Analysis() a2.steps.append(ihm.analysis.FilterStep( feature='energy/score', num_models_begin=1000, num_models_end=200)) a2.steps.append(ihm.analysis.ClusterStep( feature='RMSD', num_models_begin=200, num_models_end=42)) asmb1 = MockObject() asmb1._id = 101 dg1 = MockObject() dg1._id = 301 software = MockObject() software._id = 401 script = MockObject() script._id = 501 a2.steps.append(ihm.analysis.ValidationStep( feature='energy/score', num_models_begin=42, num_models_end=42, assembly=asmb1, dataset_group=dg1, software=software, script_file=script, details='test step')) p1.analyses.extend((a1, a2)) dumper = ihm.dumper._ProtocolDumper() dumper.finalize(system) # assign protocol IDs dumper = ihm.dumper._PostProcessDumper() dumper.finalize(system) # assign analysis IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_modeling_post_process.id _ihm_modeling_post_process.protocol_id _ihm_modeling_post_process.analysis_id _ihm_modeling_post_process.step_id _ihm_modeling_post_process.type _ihm_modeling_post_process.feature _ihm_modeling_post_process.num_models_begin _ihm_modeling_post_process.num_models_end _ihm_modeling_post_process.struct_assembly_id _ihm_modeling_post_process.dataset_group_id _ihm_modeling_post_process.software_id _ihm_modeling_post_process.script_file_id _ihm_modeling_post_process.details 1 1 1 1 none none . . . . . . . 2 1 2 1 filter energy/score 1000 200 . . . . . 3 1 2 2 cluster RMSD 200 42 . . . . . 4 1 2 3 validation energy/score 42 42 101 301 401 501 'test step' # """) def test_model_dumper(self): """Test ModelDumper""" class MockObject: pass system = ihm.System() state = ihm.model.State() system.state_groups.append(ihm.model.StateGroup([state])) protocol = MockObject() protocol._id = 42 assembly = ihm.Assembly() assembly._id = 99 representation = ihm.representation.Representation() representation._id = 32 model = ihm.model.Model(assembly=assembly, protocol=protocol, representation=representation, name='test model') model2 = ihm.model.Model(assembly=assembly, protocol=protocol, representation=representation, name='test model2') model3 = ihm.model.Model(assembly=assembly, protocol=protocol, representation=representation, name='test model3') # Existing IDs should be overwritten model3._id = 999 # Group contains multiple copies of model - should be pruned on output group = ihm.model.ModelGroup([model, model, model2], name='Group1') state.append(group) group2 = ihm.model.ModelGroup([model3], name='Group 2', details='group 2 details') state.append(group2) dumper = ihm.dumper._ModelDumper() dumper.finalize(system) # assign model/group IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_model_list.model_id _ihm_model_list.model_name _ihm_model_list.assembly_id _ihm_model_list.protocol_id _ihm_model_list.representation_id 1 'test model' 99 42 32 2 'test model2' 99 42 32 3 'test model3' 99 42 32 # # loop_ _ihm_model_group.id _ihm_model_group.name _ihm_model_group.details 1 Group1 . 2 'Group 2' 'group 2 details' # # loop_ _ihm_model_group_link.group_id _ihm_model_group_link.model_id 1 1 1 2 2 3 # """) def test_model_representative_dumper(self): """Test ModelRepresentativeDumper""" class MockObject: pass system = ihm.System() m1 = ihm.model.Model(assembly=None, protocol=None, representation=None) m1._id = 5 m2 = ihm.model.Model(assembly=None, protocol=None, representation=None) m2._id = 8 group = ihm.model.ModelGroup([m1, m2]) group._id = 42 self.assertRaises(ValueError, ihm.model.ModelRepresentative, m1, "bad criteria") group.representatives.extend([ ihm.model.ModelRepresentative(m1, "medoid"), ihm.model.ModelRepresentative(m2, "lowest energy")]) state = ihm.model.State() state.append(group) system.state_groups.append(ihm.model.StateGroup([state])) dumper = ihm.dumper._ModelRepresentativeDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_model_representative.id _ihm_model_representative.model_group_id _ihm_model_representative.model_id _ihm_model_representative.selection_criteria 1 42 5 medoid 2 42 8 'lowest energy' # """) def _make_test_model(self, water=False, seq='ACGT'): class MockObject: pass system = ihm.System() state = ihm.model.State() system.state_groups.append(ihm.model.StateGroup([state])) if water: e1 = ihm.Entity([ihm.WaterChemComp()]) else: e1 = ihm.Entity(seq, description="Nup84") e1._id = 9 system.entities.append(e1) if water: asym = ihm.WaterAsymUnit(e1, number=3, details='foo') else: asym = ihm.AsymUnit(e1, 'foo') asym._id = 'X' system.asym_units.append(asym) protocol = MockObject() protocol._id = 42 assembly = ihm.Assembly([asym]) assembly._id = 99 s = ihm.representation.ResidueSegment(asym, True, 'sphere') representation = ihm.representation.Representation([s]) representation._id = 32 model = ihm.model.Model(assembly=assembly, protocol=protocol, representation=representation, name='test model') group = ihm.model.ModelGroup([model]) state.append(group) return system, model, asym def test_range_checker_asmb_asym(self): """Test RangeChecker class checking assembly asym ID match""" system, model, asym = self._make_test_model() asym2 = ihm.AsymUnit(asym.entity, 'bar') asym2._id = 'Y' system.asym_units.append(asym2) # Handle multiple ranges for a given asym model.assembly.append(asym(1, 2)) # RangeChecker should ignore entities in the assembly model.assembly.append(asym.entity) # Everything is represented for a in asym, asym2: s = ihm.representation.AtomicSegment(a, rigid=True) model.representation.append(s) s = ihm.representation.FeatureSegment(a, rigid=False, primitive='sphere', count=2) model.representation.append(s) rngcheck = ihm.dumper._RangeChecker(model) rngcheck_nocheck = ihm.dumper._RangeChecker(model, check=False) # Atom is OK (good asym) atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) rngcheck(atom) # Sphere is OK (good asym) sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 2), x=1.0, y=2.0, z=3.0, radius=4.0) rngcheck(sphere) # Atom is not OK (bad asym) atom = ihm.model.Atom(asym_unit=asym2, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) self.assertRaises(ValueError, rngcheck, atom) rngcheck_nocheck(atom) # Sphere is not OK (bad asym) sphere = ihm.model.Sphere(asym_unit=asym2, seq_id_range=(1, 2), x=1.0, y=2.0, z=3.0, radius=4.0) self.assertRaises(ValueError, rngcheck, sphere) rngcheck_nocheck(sphere) def test_range_checker_asmb_seq_id(self): """Test RangeChecker class checking assembly seq_id range""" system, model, asym = self._make_test_model() # Only part of asym is in the assembly asmb = ihm.Assembly([asym(1, 2)]) model.assembly = asmb # Everything is represented s = ihm.representation.AtomicSegment(asym, rigid=True) model.representation.append(s) s = ihm.representation.FeatureSegment(asym, rigid=False, primitive='sphere', count=2) model.representation.append(s) rngcheck = ihm.dumper._RangeChecker(model) self.assertIsNone(rngcheck._last_asmb_range_matched) self.assertIsNone(rngcheck._last_asmb_asym_matched) # Atom is OK (good range) atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) rngcheck(atom) # Cache should now be set self.assertEqual(rngcheck._last_asmb_range_matched, (1, 2)) self.assertEqual(rngcheck._last_asmb_asym_matched, 'X') # 2nd check with same seq_id should use the cache atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='CA', type_symbol='C', x=1.0, y=2.0, z=3.0) rngcheck(atom) # Sphere is OK (good range) sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 2), x=1.0, y=2.0, z=3.0, radius=4.0) rngcheck(sphere) # Atom is not OK (bad range) atom = ihm.model.Atom(asym_unit=asym, seq_id=10, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) self.assertRaises(ValueError, rngcheck, atom) # Sphere is not OK (bad range) sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 10), x=1.0, y=2.0, z=3.0, radius=4.0) self.assertRaises(ValueError, rngcheck, sphere) def test_range_checker_duplicate_atoms(self): """Test RangeChecker class checking duplicate atoms""" system, model, asym = self._make_test_model() asmb = ihm.Assembly([asym]) model.assembly = asmb # Everything is represented s = ihm.representation.AtomicSegment(asym, rigid=True) model.representation.append(s) rngcheck = ihm.dumper._RangeChecker(model) atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='CA', type_symbol='C', x=1.0, y=2.0, z=3.0) rngcheck(atom) # Error to write another atom with same atom_id to same seq_id atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='CA', type_symbol='C', x=1.0, y=2.0, z=3.0) self.assertRaises(ValueError, rngcheck, atom) # It's fine though if alt_id is different atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='CA', type_symbol='C', x=1.0, y=2.0, z=3.0, alt_id='A') rngcheck(atom) atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='CA', type_symbol='C', x=1.0, y=2.0, z=3.0, alt_id='B') rngcheck(atom) atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='CA', type_symbol='C', x=1.0, y=2.0, z=3.0, alt_id='A') self.assertRaises(ValueError, rngcheck, atom) def test_range_checker_duplicate_atoms_water(self): """Test RangeChecker class checking duplicate water atoms""" system, model, asym = self._make_test_model(water=True) asmb = ihm.Assembly([asym]) model.assembly = asmb # Everything is represented s = ihm.representation.AtomicSegment(asym, rigid=True) model.representation.append(s) rngcheck = ihm.dumper._RangeChecker(model) atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='O', type_symbol='O', x=1.0, y=2.0, z=3.0, het=True) rngcheck(atom) atom = ihm.model.Atom(asym_unit=asym, seq_id=2, atom_id='O', type_symbol='O', x=1.0, y=2.0, z=3.0, het=True) rngcheck(atom) atom = ihm.model.Atom(asym_unit=asym, seq_id=2, atom_id='O', type_symbol='O', x=1.0, y=2.0, z=3.0, het=True) self.assertRaises(ValueError, rngcheck, atom) def test_range_checker_repr_asym(self): """Test RangeChecker class checking representation asym ID match""" system, model, asym = self._make_test_model() asym2 = ihm.AsymUnit(asym.entity, 'bar') asym2._id = 'Y' system.asym_units.append(asym2) model.assembly.append(asym2) # Add multiple representation segments for asym s = ihm.representation.AtomicSegment(asym(1, 2), rigid=True) model.representation.append(s) s = ihm.representation.FeatureSegment(asym, rigid=False, primitive='sphere', count=2) model.representation.append(s) rngcheck = ihm.dumper._RangeChecker(model) # Atom is OK (good asym) atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) rngcheck(atom) # Sphere is OK (good asym) sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 2), x=1.0, y=2.0, z=3.0, radius=4.0) rngcheck(sphere) # Atom is not OK (bad asym) atom = ihm.model.Atom(asym_unit=asym2, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) self.assertRaises(ValueError, rngcheck, atom) # Sphere is not OK (bad asym) sphere = ihm.model.Sphere(asym_unit=asym2, seq_id_range=(1, 2), x=1.0, y=2.0, z=3.0, radius=4.0) self.assertRaises(ValueError, rngcheck, sphere) def test_range_checker_repr_seq_id(self): """Test RangeChecker class checking representation seq_id range""" system, model, asym = self._make_test_model() asym2 = ihm.AsymUnit(asym.entity, 'bar') asym2._id = 'Y' system.asym_units.append(asym2) model.assembly.append(asym2) heme = ihm.NonPolymerChemComp("HEM", name='heme', formula='C34 H32 Fe N4 O4') entity_heme = ihm.Entity([heme], description='Heme') entity_heme._id = 99 system.entities.append(entity_heme) asym_nonpol = ihm.AsymUnit(entity_heme, 'baz') asym_nonpol._id = 'Z' system.asym_units.append(asym_nonpol) model.assembly.append(asym_nonpol) # Add multiple representation segments for asym2 s = ihm.representation.AtomicSegment(asym2(1, 2), rigid=True) model.representation.append(s) s = ihm.representation.FeatureSegment(asym2(1, 2), rigid=False, primitive='sphere', count=2) model.representation.append(s) s = ihm.representation.AtomicSegment(asym_nonpol, rigid=True) model.representation.append(s) rngcheck = ihm.dumper._RangeChecker(model) self.assertIsNone(rngcheck._last_repr_segment_matched) # Atom is OK (good range) atom = ihm.model.Atom(asym_unit=asym2, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) rngcheck(atom) # Cache should now be set self.assertEqual( rngcheck._last_repr_segment_matched.asym_unit.seq_id_range, (1, 2)) # 2nd check with same seq_id should use the cache atom = ihm.model.Atom(asym_unit=asym2, seq_id=1, atom_id='CA', type_symbol='C', x=1.0, y=2.0, z=3.0) rngcheck(atom) # Sphere is OK (good range) sphere = ihm.model.Sphere(asym_unit=asym2, seq_id_range=(1, 2), x=1.0, y=2.0, z=3.0, radius=4.0) rngcheck(sphere) # Atom is not OK (bad range) atom = ihm.model.Atom(asym_unit=asym2, seq_id=4, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) self.assertRaises(ValueError, rngcheck, atom) # Sphere is not OK (bad range) sphere = ihm.model.Sphere(asym_unit=asym2, seq_id_range=(1, 4), x=1.0, y=2.0, z=3.0, radius=4.0) self.assertRaises(ValueError, rngcheck, sphere) # Atom in a nonpolymer must have no seq_id (or seq_id==1) atom = ihm.model.Atom(asym_unit=asym_nonpol, seq_id=None, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) rngcheck(atom) atom = ihm.model.Atom(asym_unit=asym_nonpol, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) rngcheck(atom) atom = ihm.model.Atom(asym_unit=asym2, seq_id=None, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) self.assertRaises(ValueError, rngcheck, atom) atom = ihm.model.Atom(asym_unit=asym_nonpol, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) self.assertRaises(ValueError, rngcheck, atom) def test_range_checker_repr_type_atomic(self): """Test RangeChecker class type checking against AtomicSegments""" system, model, asym = self._make_test_model() # Replace test model's residue representation with atomic s = ihm.representation.AtomicSegment(asym, rigid=False) model.representation = ihm.representation.Representation([s]) rngcheck = ihm.dumper._RangeChecker(model) # Atom is OK atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) rngcheck(atom) # Sphere is not OK sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 1), x=1.0, y=2.0, z=3.0, radius=4.0) self.assertRaises(ValueError, rngcheck, sphere) def test_range_checker_repr_type_residue(self): """Test RangeChecker class type checking against ResidueSegments""" system, model, asym = self._make_test_model() # Test model already has ResidueSegment representation rngcheck = ihm.dumper._RangeChecker(model) # Atom is not OK atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) self.assertRaises(ValueError, rngcheck, atom) # Multi-residue Sphere is not OK sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 2), x=1.0, y=2.0, z=3.0, radius=4.0) self.assertRaises(ValueError, rngcheck, sphere) # Single-residue Sphere is OK sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 1), x=1.0, y=2.0, z=3.0, radius=4.0) rngcheck(sphere) def test_range_checker_repr_type_multi_residue(self): """Test RangeChecker class type checking against MultiResidueSegments""" system, model, asym = self._make_test_model() # Replace test model's residue representation with multi-residue s = ihm.representation.MultiResidueSegment(asym, rigid=False, primitive='sphere') model.representation = ihm.representation.Representation([s]) rngcheck = ihm.dumper._RangeChecker(model) # Atom is not OK atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) self.assertRaises(ValueError, rngcheck, atom) # Sphere is OK if it matches the asym range exactly sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 4), x=1.0, y=2.0, z=3.0, radius=4.0) rngcheck(sphere) sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 2), x=1.0, y=2.0, z=3.0, radius=4.0) self.assertRaises(ValueError, rngcheck, sphere) def test_range_checker_repr_type_feature(self): """Test RangeChecker class type checking against FeatureSegments""" system, model, asym = self._make_test_model() # Replace test model's residue representation with feature s = ihm.representation.FeatureSegment(asym, rigid=False, primitive='sphere', count=2) model.representation = ihm.representation.Representation([s]) rngcheck = ihm.dumper._RangeChecker(model) # Atom is not OK atom = ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0) self.assertRaises(ValueError, rngcheck, atom) # Sphere is OK if it falls entirely within the segment range sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 2), x=1.0, y=2.0, z=3.0, radius=4.0) rngcheck(sphere) sphere = ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 20), x=1.0, y=2.0, z=3.0, radius=4.0) self.assertRaises(ValueError, rngcheck, sphere) def test_model_dumper_spheres(self): """Test ModelDumper with spheres""" system, model, asym = self._make_test_model() # Replace test model's residue representation with feature s = ihm.representation.FeatureSegment(asym, rigid=False, primitive='sphere', count=2) r = ihm.representation.Representation([s]) r._id = 32 model.representation = r model._spheres = [ihm.model.Sphere(asym_unit=asym, seq_id_range=(1, 3), x=1.0, y=2.0, z=3.0, radius=4.0), ihm.model.Sphere(asym_unit=asym, seq_id_range=(4, 4), x=4.0, y=5.0, z=6.0, radius=1.0, rmsf=8.0)] dumper = ihm.dumper._ModelDumper() dumper.finalize(system) # assign model/group IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_model_list.model_id _ihm_model_list.model_name _ihm_model_list.assembly_id _ihm_model_list.protocol_id _ihm_model_list.representation_id 1 'test model' 99 42 32 # # loop_ _ihm_model_group.id _ihm_model_group.name _ihm_model_group.details 1 . . # # loop_ _ihm_model_group_link.group_id _ihm_model_group_link.model_id 1 1 # # loop_ _ihm_sphere_obj_site.id _ihm_sphere_obj_site.entity_id _ihm_sphere_obj_site.seq_id_begin _ihm_sphere_obj_site.seq_id_end _ihm_sphere_obj_site.asym_id _ihm_sphere_obj_site.Cartn_x _ihm_sphere_obj_site.Cartn_y _ihm_sphere_obj_site.Cartn_z _ihm_sphere_obj_site.object_radius _ihm_sphere_obj_site.rmsf _ihm_sphere_obj_site.model_id 1 9 1 3 X 1.000 2.000 3.000 4.000 . 1 2 9 4 4 X 4.000 5.000 6.000 1.000 8.000 1 # """) def test_model_dumper_atoms(self): """Test ModelDumper with atoms""" system, model, asym = self._make_test_model() # Replace test model's residue representation with atomic s = ihm.representation.AtomicSegment(asym, rigid=False) r = ihm.representation.Representation([s]) r._id = 32 model.representation = r model._atoms = [ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='C', type_symbol='C', x=1.0, y=2.0, z=3.0), ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='CA', type_symbol='C', x=10.0, y=20.0, z=30.0, het=True), ihm.model.Atom(asym_unit=asym, seq_id=2, atom_id='N', type_symbol='N', x=4.0, y=5.0, z=6.0, biso=42.0, occupancy=0.2, alt_id='A')] dumper = ihm.dumper._ModelDumper() dumper.finalize(system) # assign model/group IDs # With auth_seq_id == seq_id out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_model_list.model_id _ihm_model_list.model_name _ihm_model_list.assembly_id _ihm_model_list.protocol_id _ihm_model_list.representation_id 1 'test model' 99 42 32 # # loop_ _ihm_model_group.id _ihm_model_group.name _ihm_model_group.details 1 . . # # loop_ _ihm_model_group_link.group_id _ihm_model_group_link.model_id 1 1 # # loop_ _atom_site.group_PDB _atom_site.id _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_seq_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.label_asym_id _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.label_entity_id _atom_site.auth_asym_id _atom_site.auth_comp_id _atom_site.B_iso_or_equiv _atom_site.pdbx_PDB_model_num _atom_site.ihm_model_id ATOM 1 C C . ALA 1 1 ? X 1.000 2.000 3.000 . 9 X ALA . 1 1 HETATM 2 C CA . ALA 1 1 ? X 10.000 20.000 30.000 . 9 X ALA . 1 1 ATOM 3 N N A CYS 2 2 ? X 4.000 5.000 6.000 0.200 9 X CYS 42.000 1 1 # # loop_ _atom_type.symbol C N # """) # Test dump_atoms with add_ihm=False fh = StringIO() writer = ihm.format.CifWriter(fh) dumper.dump_atoms(system, writer, add_ihm=False) self.assertNotIn('ihm_model_id', fh.getvalue()) # With auth_seq_id == seq_id-1 asym.auth_seq_id_map = -1 out = _get_dumper_output(dumper, system) self.assertEqual( out.split('\n')[44:47:2], ["ATOM 1 C C . ALA 1 0 ? X 1.000 2.000 3.000 . 9 X ALA . 1 1", "ATOM 3 N N A CYS 2 1 ? X 4.000 5.000 6.000 " "0.200 9 X CYS 42.000 1 1"]) # With auth_seq_id map asym.auth_seq_id_map = {1: 42, 2: 99} out = _get_dumper_output(dumper, system) self.assertEqual( out.split('\n')[44:47:2], ["ATOM 1 C C . ALA 1 42 ? X 1.000 2.000 3.000 . 9 X ALA . 1 1", "ATOM 3 N N A CYS 2 99 ? X 4.000 5.000 6.000 " "0.200 9 X CYS 42.000 1 1"]) # With duplicate atom IDs (atoms[0] is already 'C') model._atoms[1].atom_id = 'C' self.assertRaises(ValueError, _get_dumper_output, dumper, system) # Should work though if checks are disabled _ = _get_dumper_output(dumper, system, check=False) def test_model_dumper_assembly_asym_check(self): """Test ModelDumper Assembly asym check""" system, model, asym = self._make_test_model() dumper = ihm.dumper._ModelDumper() dumper.finalize(system) # assign model/group IDs # No atoms for assembly's asym with self.assertRaises(ValueError) as cm: _get_dumper_output(dumper, system) self.assertIn("reference asym IDs that don't have coordinates", str(cm.exception)) self.assertIn("ID 99, asym IDs X", str(cm.exception)) # Should work though if checks are disabled _ = _get_dumper_output(dumper, system, check=False) def test_model_dumper_water_atoms(self): """Test ModelDumper with water atoms""" system, model, asym = self._make_test_model(water=True) # Replace test model's residue representation with atomic s = ihm.representation.AtomicSegment(asym, rigid=False) r = ihm.representation.Representation([s]) r._id = 32 model.representation = r # No mapping for third water, so will get auth_seq_id=3 asym.auth_seq_id_map = {1: 42, 2: 99} model._atoms = [ihm.model.Atom(asym_unit=asym, seq_id=1, atom_id='O', type_symbol='O', het=True, x=1.0, y=2.0, z=3.0), ihm.model.Atom(asym_unit=asym, seq_id=2, atom_id='O', type_symbol='O', het=True, x=4.0, y=5.0, z=6.0), ihm.model.Atom(asym_unit=asym, seq_id=3, atom_id='O', type_symbol='O', het=True, x=7.0, y=8.0, z=9.0)] dumper = ihm.dumper._ModelDumper() dumper.finalize(system) # assign model/group IDs out = _get_dumper_output(dumper, system) self.assertEqual( out.split('\n')[44:47], ['HETATM 1 O O . HOH . 42 ? X 1.000 2.000 3.000 . 9 X HOH . 1 1', 'HETATM 2 O O . HOH . 99 ? X 4.000 5.000 6.000 . 9 X HOH . 1 1', 'HETATM 3 O O . HOH . 3 ? X 7.000 8.000 9.000 . 9 X HOH . 1 1']) def test_not_modeled_residue_range_dumper(self): """Test NotModeledResidueRangeDumper""" system, model, asym = self._make_test_model() rr1 = ihm.model.NotModeledResidueRange(asym, 1, 2) rr2 = ihm.model.NotModeledResidueRange( asym, 3, 4, reason="Highly variable models with poor precision") model.not_modeled_residue_ranges.extend((rr1, rr2)) dumper = ihm.dumper._ModelDumper() dumper.finalize(system) # assign model/group IDs dumper = ihm.dumper._NotModeledResidueRangeDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_residues_not_modeled.id _ihm_residues_not_modeled.model_id _ihm_residues_not_modeled.entity_description _ihm_residues_not_modeled.entity_id _ihm_residues_not_modeled.asym_id _ihm_residues_not_modeled.seq_id_begin _ihm_residues_not_modeled.seq_id_end _ihm_residues_not_modeled.comp_id_begin _ihm_residues_not_modeled.comp_id_end _ihm_residues_not_modeled.reason 1 1 Nup84 9 X 1 2 ALA CYS . 2 1 Nup84 9 X 3 4 GLY THR 'Highly variable models with poor precision' # """) def test_not_modeled_residue_range_bad_range(self): """Test NotModeledResidueRangeDumper with bad residue ranges""" for badrng, exc in [((10, 14), IndexError), ((-4, 1), IndexError), ((3, 1), ValueError)]: system, model, asym = self._make_test_model() # Disable construction-time check so that we # can see dump time check asym.entity._range_check = False rr1 = ihm.model.NotModeledResidueRange(asym, *badrng) asym.entity._range_check = True model.not_modeled_residue_ranges.append(rr1) dumper = ihm.dumper._ModelDumper() dumper.finalize(system) # assign model/group IDs dumper = ihm.dumper._NotModeledResidueRangeDumper() self.assertRaises(exc, _get_dumper_output, dumper, system) # Should be OK if checks are disabled _ = _get_dumper_output(dumper, system, check=False) def test_ensemble_dumper(self): """Test EnsembleDumper""" class MockObject: pass pp = MockObject() pp._id = 99 system = ihm.System() m1 = ihm.model.Model(assembly='a1', protocol='p1', representation='r1') m2 = ihm.model.Model(assembly='a2', protocol='p2', representation='r2') group = ihm.model.ModelGroup([m1, m2]) group._id = 42 e1 = ihm.model.Ensemble(model_group=group, num_models=10, post_process=pp, name='cluster1', clustering_method='Hierarchical', clustering_feature='RMSD', precision=4.2, superimposed=True) loc = ihm.location.OutputFileLocation(repo='foo', path='bar') loc._id = 3 e2 = ihm.model.Ensemble(model_group=group, num_models=10, file=loc, details='test details') ss1 = ihm.model.IndependentSubsample(name='ss1', num_models=5) ss2 = ihm.model.IndependentSubsample(name='ss2', num_models=5, model_group=group, file=loc) ss3 = ihm.model.RandomSubsample(name='ss3', num_models=5) e2.subsamples.extend((ss1, ss2, ss3)) # Ensemble without a model group e3 = ihm.model.Ensemble(model_group=None, num_models=10, details='no-group details') system.ensembles.extend((e1, e2, e3)) dumper = ihm.dumper._EnsembleDumper() dumper.finalize(system) # assign IDs # Should raise an error since ss3 is not the same type as ss1/ss2 self.assertRaises(TypeError, _get_dumper_output, dumper, system) del e2.subsamples[2] out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_ensemble_info.ensemble_id _ihm_ensemble_info.ensemble_name _ihm_ensemble_info.post_process_id _ihm_ensemble_info.model_group_id _ihm_ensemble_info.ensemble_clustering_method _ihm_ensemble_info.ensemble_clustering_feature _ihm_ensemble_info.num_ensemble_models _ihm_ensemble_info.num_ensemble_models_deposited _ihm_ensemble_info.ensemble_precision_value _ihm_ensemble_info.ensemble_file_id _ihm_ensemble_info.details _ihm_ensemble_info.model_group_superimposed_flag _ihm_ensemble_info.sub_sample_flag _ihm_ensemble_info.sub_sampling_type 1 cluster1 99 42 Hierarchical RMSD 10 2 4.200 . . YES NO . 2 . . 42 . . 10 2 . 3 'test details' . YES independent 3 . . . . . 10 . . . 'no-group details' . NO . # # loop_ _ihm_ensemble_sub_sample.id _ihm_ensemble_sub_sample.name _ihm_ensemble_sub_sample.ensemble_id _ihm_ensemble_sub_sample.num_models _ihm_ensemble_sub_sample.num_models_deposited _ihm_ensemble_sub_sample.model_group_id _ihm_ensemble_sub_sample.file_id 1 ss1 2 5 0 . . 2 ss2 2 5 2 42 3 # """) def test_density_dumper(self): """Test DensityDumper""" class MockObject: pass system = ihm.System() e1 = ihm.Entity('AHCD') e1._id = 9 asym = ihm.AsymUnit(e1) asym._id = 'X' group = MockObject() group._id = 42 ens = ihm.model.Ensemble(model_group=group, num_models=10) loc = ihm.location.OutputFileLocation(repo='foo', path='bar') loc._id = 3 ens.densities.append(ihm.model.LocalizationDensity(loc, asym(1, 2))) ens.densities.append(ihm.model.LocalizationDensity(loc, asym)) ens._id = 5 system.ensembles.append(ens) # Assign segment IDs ihm.dumper._EntityPolySegmentDumper().finalize(system) dumper = ihm.dumper._DensityDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_localization_density_files.id _ihm_localization_density_files.file_id _ihm_localization_density_files.ensemble_id _ihm_localization_density_files.entity_id _ihm_localization_density_files.asym_id _ihm_localization_density_files.entity_poly_segment_id 1 3 5 9 X 1 2 3 5 9 X 2 # """) def test_entity_poly_segment_dumper(self): """Test EntityPolySegmentDumper""" system = ihm.System() e1 = ihm.Entity('AHCD') e2 = ihm.Entity('ACG') e3 = ihm.Entity([ihm.NonPolymerChemComp('HEM')]) a1 = ihm.AsymUnit(e1) a1._id = 'X' system.entities.extend((e1, e2, e3)) system.asym_units.append(a1) res1 = e2.residue(1) res2 = e2.residue(2) system.orphan_features.append(ihm.restraint.ResidueFeature([e2])) system.orphan_features.append(ihm.restraint.ResidueFeature([res2])) system.orphan_features.append(ihm.restraint.NonPolyFeature([e3])) system._make_complete_assembly() ihm.dumper._EntityDumper().finalize(system) # assign entity IDs dumper = ihm.dumper._EntityPolySegmentDumper() dumper.finalize(system) # assign IDs # e1 isn't directly used in anything (a1 is used instead, in the # assembly) so should have no range ID self.assertFalse(hasattr(e1, '_range_id')) self.assertEqual(a1._range_id, 1) # e2 is use, in a ResidueFeature, so should have a range ID self.assertEqual(e2._range_id, 2) # non-polymers don't have ranges self.assertEqual(e3._range_id, None) # res2 should have been assigned a range, but not res1 self.assertFalse(hasattr(res1, '_range_id')) self.assertEqual(res2._range_id, 3) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_entity_poly_segment.id _ihm_entity_poly_segment.entity_id _ihm_entity_poly_segment.seq_id_begin _ihm_entity_poly_segment.seq_id_end _ihm_entity_poly_segment.comp_id_begin _ihm_entity_poly_segment.comp_id_end 1 1 1 4 ALA ASP 2 2 1 3 ALA GLY 3 2 2 2 CYS CYS # """) def test_entity_poly_segment_dumper_bad_range(self): """Test EntityPolySegmentDumper with bad residue ranges""" for badrng, exc in [((10, 14), IndexError), ((-4, 1), IndexError), ((3, 1), ValueError)]: system = ihm.System() e1 = ihm.Entity('AHCD') system.entities.append(e1) # Disable construction-time check so that we # can see dump time check e1._range_check = False system.orphan_features.append( ihm.restraint.ResidueFeature([e1(*badrng)])) e1._range_check = True dumper = ihm.dumper._EntityDumper() dumper.finalize(system) # assign IDs dumper = ihm.dumper._EntityPolySegmentDumper() dumper.finalize(system) # assign IDs self.assertRaises(exc, _get_dumper_output, dumper, system) # Should be OK though if checks are disabled _ = _get_dumper_output(dumper, system, check=False) def test_single_state(self): """Test MultiStateDumper with a single state""" system = ihm.System() state = ihm.model.State() system.state_groups.append(ihm.model.StateGroup([state])) dumper = ihm.dumper._MultiStateDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, "") def test_multi_state(self): """Test MultiStateDumper with multiple states""" system = ihm.System() sg1 = ihm.model.StateGroup() sg2 = ihm.model.StateGroup() system.state_groups.extend((sg1, sg2)) state1 = ihm.model.State(type='complex formation', name='unbound', experiment_type="Fraction of bulk", details="Unbound molecule 1") state1.append(ihm.model.ModelGroup(name="group1")) state1.append(ihm.model.ModelGroup(name="group2")) state2 = ihm.model.State(type='complex formation', name='bound', experiment_type="Fraction of bulk", details="Unbound molecule 2") state2.append(ihm.model.ModelGroup(name="group3")) sg1.extend((state1, state2)) state3 = ihm.model.State(population_fraction=0.4) state3.append(ihm.model.ModelGroup(name="group4")) sg2.append(state3) dumper = ihm.dumper._ModelDumper() dumper.finalize(system) # assign model group IDs dumper = ihm.dumper._MultiStateDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_multi_state_modeling.state_id _ihm_multi_state_modeling.state_group_id _ihm_multi_state_modeling.population_fraction _ihm_multi_state_modeling.state_type _ihm_multi_state_modeling.state_name _ihm_multi_state_modeling.experiment_type _ihm_multi_state_modeling.details 1 1 . 'complex formation' unbound 'Fraction of bulk' 'Unbound molecule 1' 2 1 . 'complex formation' bound 'Fraction of bulk' 'Unbound molecule 2' 3 2 0.400 . . . . # # loop_ _ihm_multi_state_model_group_link.state_id _ihm_multi_state_model_group_link.model_group_id 1 1 1 2 2 3 3 4 # """) def test_orphan_model_groups(self): """Test detection of ModelGroups not in States""" system = ihm.System() m1 = ihm.model.Model(assembly='a1', protocol='p1', representation='r1') group = ihm.model.ModelGroup([m1]) group._id = 42 e1 = ihm.model.Ensemble(model_group=group, num_models=10, post_process=None, name='cluster1', clustering_method='Hierarchical', clustering_feature='RMSD', precision=4.2) system.ensembles.append(e1) dumper = ihm.dumper._ModelDumper() self.assertRaises(ValueError, dumper.finalize, system) def test_ordered(self): """Test OrderedDumper""" system = ihm.System() mg1 = ihm.model.ModelGroup(name="group1") mg1._id = 42 mg2 = ihm.model.ModelGroup(name="group2") mg2._id = 82 mg3 = ihm.model.ModelGroup(name="group3") mg3._id = 92 proc = ihm.model.OrderedProcess("time steps") edge = ihm.model.ProcessEdge(mg1, mg2) step = ihm.model.ProcessStep([edge], "Linear reaction") proc.steps.append(step) system.ordered_processes.append(proc) proc = ihm.model.OrderedProcess("time steps", "Proc 2") edge1 = ihm.model.ProcessEdge(mg1, mg2) edge2 = ihm.model.ProcessEdge(mg1, mg3) step = ihm.model.ProcessStep([edge1, edge2], "Branched reaction") proc.steps.append(step) system.ordered_processes.append(proc) dumper = ihm.dumper._OrderedDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_ordered_model.process_id _ihm_ordered_model.process_description _ihm_ordered_model.ordered_by _ihm_ordered_model.step_id _ihm_ordered_model.step_description _ihm_ordered_model.edge_id _ihm_ordered_model.edge_description _ihm_ordered_model.model_group_id_begin _ihm_ordered_model.model_group_id_end 1 . 'time steps' 1 'Linear reaction' 1 . 42 82 2 'Proc 2' 'time steps' 1 'Branched reaction' 1 . 42 82 2 'Proc 2' 'time steps' 1 'Branched reaction' 2 . 42 92 # """) def test_em3d_restraint_dumper(self): """Test EM3DRestraintDumper""" class MockObject: pass system = ihm.System() dataset = MockObject() dataset._id = 97 dataset2 = MockObject() dataset2._id = 87 assembly = MockObject() assembly._id = 99 citation = MockObject() citation._id = 8 r = ihm.restraint.EM3DRestraint( dataset=dataset, assembly=assembly, segment=False, fitting_method='Gaussian mixture model', number_of_gaussians=40, details='GMM fitting') r2 = ihm.restraint.EM3DRestraint( dataset=dataset2, assembly=assembly, segment=True, fitting_method='Gaussian mixture model', fitting_method_citation=citation, number_of_gaussians=30, details='Other details') m = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') m._id = 42 m2 = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') m2._id = 44 system.restraints.extend((r, r2, MockObject())) r.fits[m] = ihm.restraint.EM3DRestraintFit(0.4) r.fits[m2] = ihm.restraint.EM3DRestraintFit() r2.fits[m2] = ihm.restraint.EM3DRestraintFit() dumper = ihm.dumper._EM3DDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_3dem_restraint.id _ihm_3dem_restraint.dataset_list_id _ihm_3dem_restraint.fitting_method _ihm_3dem_restraint.fitting_method_citation_id _ihm_3dem_restraint.struct_assembly_id _ihm_3dem_restraint.map_segment_flag _ihm_3dem_restraint.number_of_gaussians _ihm_3dem_restraint.model_id _ihm_3dem_restraint.cross_correlation_coefficient _ihm_3dem_restraint.details 1 97 'Gaussian mixture model' . 99 NO 40 42 0.400 'GMM fitting' 2 97 'Gaussian mixture model' . 99 NO 40 44 . 'GMM fitting' 3 87 'Gaussian mixture model' 8 99 YES 30 44 . 'Other details' # """) def test_sas_restraint_dumper(self): """Test SASRestraintDumper""" class MockObject: pass system = ihm.System() dataset = MockObject() dataset._id = 97 assembly = MockObject() assembly._id = 99 r = ihm.restraint.SASRestraint( dataset=dataset, assembly=assembly, segment=False, fitting_method='FoXS', fitting_atom_type='Heavy atoms', multi_state=False, radius_of_gyration=21.07, details='FoXS fitting') m = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') m._id = 42 m2 = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') m2._id = 44 system.restraints.extend((r, MockObject())) r.fits[m] = ihm.restraint.SASRestraintFit(4.69) r.fits[m2] = ihm.restraint.SASRestraintFit() dumper = ihm.dumper._SASDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_sas_restraint.id _ihm_sas_restraint.dataset_list_id _ihm_sas_restraint.model_id _ihm_sas_restraint.struct_assembly_id _ihm_sas_restraint.profile_segment_flag _ihm_sas_restraint.fitting_atom_type _ihm_sas_restraint.fitting_method _ihm_sas_restraint.fitting_state _ihm_sas_restraint.radius_of_gyration _ihm_sas_restraint.chi_value _ihm_sas_restraint.details 1 97 42 99 NO 'Heavy atoms' FoXS Single 21.070 4.690 'FoXS fitting' 2 97 44 99 NO 'Heavy atoms' FoXS Single 21.070 . 'FoXS fitting' # """) def test_em2d_restraint_dumper(self): """Test EM2DRestraintDumper""" class MockObject: pass system = ihm.System() dataset = MockObject() dataset._id = 97 assembly = MockObject() assembly._id = 99 r = ihm.restraint.EM2DRestraint( dataset=dataset, assembly=assembly, segment=False, number_raw_micrographs=400, pixel_size_width=0.6, pixel_size_height=0.5, image_resolution=30.0, number_of_projections=100, details='Test fit') m = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') m._id = 42 m2 = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') m2._id = 44 system.restraints.extend((r, MockObject())) r.fits[m] = ihm.restraint.EM2DRestraintFit( cross_correlation_coefficient=0.4, rot_matrix=[[-0.64, 0.09, 0.77], [0.76, -0.12, 0.64], [0.15, 0.99, 0.01]], tr_vector=[1., 2., 3.]) r.fits[m2] = ihm.restraint.EM2DRestraintFit() dumper = ihm.dumper._EM2DDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_2dem_class_average_restraint.id _ihm_2dem_class_average_restraint.dataset_list_id _ihm_2dem_class_average_restraint.number_raw_micrographs _ihm_2dem_class_average_restraint.pixel_size_width _ihm_2dem_class_average_restraint.pixel_size_height _ihm_2dem_class_average_restraint.image_resolution _ihm_2dem_class_average_restraint.image_segment_flag _ihm_2dem_class_average_restraint.number_of_projections _ihm_2dem_class_average_restraint.struct_assembly_id _ihm_2dem_class_average_restraint.details 1 97 400 0.600 0.500 30.000 NO 100 99 'Test fit' # # loop_ _ihm_2dem_class_average_fitting.id _ihm_2dem_class_average_fitting.restraint_id _ihm_2dem_class_average_fitting.model_id _ihm_2dem_class_average_fitting.cross_correlation_coefficient _ihm_2dem_class_average_fitting.rot_matrix[1][1] _ihm_2dem_class_average_fitting.rot_matrix[2][1] _ihm_2dem_class_average_fitting.rot_matrix[3][1] _ihm_2dem_class_average_fitting.rot_matrix[1][2] _ihm_2dem_class_average_fitting.rot_matrix[2][2] _ihm_2dem_class_average_fitting.rot_matrix[3][2] _ihm_2dem_class_average_fitting.rot_matrix[1][3] _ihm_2dem_class_average_fitting.rot_matrix[2][3] _ihm_2dem_class_average_fitting.rot_matrix[3][3] _ihm_2dem_class_average_fitting.tr_vector[1] _ihm_2dem_class_average_fitting.tr_vector[2] _ihm_2dem_class_average_fitting.tr_vector[3] 1 1 42 0.400 -0.640000 0.760000 0.150000 0.090000 -0.120000 0.990000 0.770000 0.640000 0.010000 1.000 2.000 3.000 2 1 44 . . . . . . . . . . . . . # """) def test_cross_link_restraint_dumper(self): """Test CrossLinkRestraintDumper""" class MockObject: pass system = ihm.System() e1 = ihm.Entity('ATC', description='foo') e2 = ihm.Entity('DEF', description='bar') system.entities.extend((e1, e2)) asym1 = ihm.AsymUnit(e1) asym2 = ihm.AsymUnit(e2) system.asym_units.extend((asym1, asym2)) dataset = MockObject() dataset._id = 97 dss = ihm.ChemDescriptor('DSS') r = ihm.restraint.CrossLinkRestraint(dataset=dataset, linker=dss) # intra, unambiguous xxl1 = ihm.restraint.ExperimentalCrossLink( e1.residue(2), e1.residue(3)) # inter, ambiguous xxl2 = ihm.restraint.ExperimentalCrossLink( e1.residue(2), e2.residue(3)) xxl3 = ihm.restraint.ExperimentalCrossLink( e1.residue(2), e2.residue(2)) # duplicate crosslink, should be combined with the original (xxl2) xxl4 = ihm.restraint.ExperimentalCrossLink( e1.residue(2), e2.residue(3)) # should end up in own group, not with xxl4 (since xxl4==xxl2) xxl5 = ihm.restraint.ExperimentalCrossLink( e1.residue(1), e2.residue(1), details='test xl') r.experimental_cross_links.extend(([xxl1], [xxl2, xxl3], [xxl4, xxl5])) system.restraints.extend((r, MockObject())) d = ihm.restraint.UpperBoundDistanceRestraint(25.0) xl1 = ihm.restraint.ResidueCrossLink( xxl1, asym1, asym1, d, psi=0.5, sigma1=1.0, sigma2=2.0, restrain_all=True) d = ihm.restraint.LowerBoundDistanceRestraint(34.0) xl2 = ihm.restraint.AtomCrossLink( xxl3, asym1, asym2, 'C', 'N', d, restrain_all=False) # Duplicates should be ignored xl3 = ihm.restraint.AtomCrossLink( xxl3, asym1, asym2, 'C', 'N', d, restrain_all=False) # Restraints on pseudo sites ps = ihm.restraint.PseudoSite(x=10., y=20., z=30.) ps._id = 89 psxl = ihm.restraint.CrossLinkPseudoSite(site=ps) xl4 = ihm.restraint.ResidueCrossLink( xxl5, asym1, asym2, d, psi=0.5, sigma1=1.0, sigma2=2.0, restrain_all=True, pseudo2=[psxl]) m = MockObject() m._id = 99 psxl = ihm.restraint.CrossLinkPseudoSite(site=ps, model=m) m = MockObject() m._id = 990 psxl2 = ihm.restraint.CrossLinkPseudoSite(site=ps, model=m) xl5 = ihm.restraint.ResidueCrossLink( xxl2, asym1, asym2, d, psi=0.5, sigma1=1.0, sigma2=2.0, restrain_all=True, pseudo2=[psxl, psxl2]) r.cross_links.extend((xl1, xl2, xl3, xl4, xl5)) model = ihm.model.Model(assembly=None, protocol=None, representation=None) model._id = 201 xl1.fits[model] = ihm.restraint.CrossLinkFit(psi=0.1, sigma1=4.2, sigma2=2.1) # Fit of a ModelGroup model_group = ihm.model.ModelGroup([model]) model_group._id = 301 xl1.fits[model_group] = ihm.restraint.CrossLinkGroupFit( num_models=40, median_distance=4.0, details='test fit') # Fit of an Ensemble both with and without a ModelGroup ens1 = ihm.model.Ensemble(model_group=model_group, num_models=10) ens1._id = 401 xl1.fits[ens1] = ihm.restraint.CrossLinkGroupFit( num_models=30, median_distance=3.0) ens2 = ihm.model.Ensemble(model_group=None, num_models=20) ens2._id = 501 xl1.fits[ens2] = ihm.restraint.CrossLinkGroupFit( num_models=50, median_distance=9.0) ihm.dumper._EntityDumper().finalize(system) # assign entity IDs ihm.dumper._StructAsymDumper().finalize(system) # assign asym IDs ihm.dumper._ChemDescriptorDumper().finalize(system) # descriptor IDs dumper = ihm.dumper._CrossLinkDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_cross_link_list.id _ihm_cross_link_list.group_id _ihm_cross_link_list.entity_description_1 _ihm_cross_link_list.entity_id_1 _ihm_cross_link_list.seq_id_1 _ihm_cross_link_list.comp_id_1 _ihm_cross_link_list.entity_description_2 _ihm_cross_link_list.entity_id_2 _ihm_cross_link_list.seq_id_2 _ihm_cross_link_list.comp_id_2 _ihm_cross_link_list.linker_chem_comp_descriptor_id _ihm_cross_link_list.linker_type _ihm_cross_link_list.dataset_list_id _ihm_cross_link_list.details 1 1 foo 1 2 THR foo 1 3 CYS 1 DSS 97 . 2 2 foo 1 2 THR bar 2 3 PHE 1 DSS 97 . 3 2 foo 1 2 THR bar 2 2 GLU 1 DSS 97 . 4 3 foo 1 1 ALA bar 2 1 ASP 1 DSS 97 'test xl' # # loop_ _ihm_cross_link_restraint.id _ihm_cross_link_restraint.group_id _ihm_cross_link_restraint.entity_id_1 _ihm_cross_link_restraint.asym_id_1 _ihm_cross_link_restraint.seq_id_1 _ihm_cross_link_restraint.comp_id_1 _ihm_cross_link_restraint.entity_id_2 _ihm_cross_link_restraint.asym_id_2 _ihm_cross_link_restraint.seq_id_2 _ihm_cross_link_restraint.comp_id_2 _ihm_cross_link_restraint.atom_id_1 _ihm_cross_link_restraint.atom_id_2 _ihm_cross_link_restraint.restraint_type _ihm_cross_link_restraint.conditional_crosslink_flag _ihm_cross_link_restraint.model_granularity _ihm_cross_link_restraint.distance_threshold _ihm_cross_link_restraint.psi _ihm_cross_link_restraint.sigma_1 _ihm_cross_link_restraint.sigma_2 _ihm_cross_link_restraint.pseudo_site_flag 1 1 1 A 2 THR 1 A 3 CYS . . 'upper bound' ALL by-residue 25.000 0.500 1.000 2.000 NO 2 3 1 A 2 THR 2 B 2 GLU C N 'lower bound' ANY by-atom 34.000 . . . NO 3 4 1 A 1 ALA 2 B 1 ASP . . 'lower bound' ALL by-residue 34.000 0.500 1.000 2.000 YES 4 2 1 A 2 THR 2 B 3 PHE . . 'lower bound' ALL by-residue 34.000 0.500 1.000 2.000 YES # # loop_ _ihm_cross_link_pseudo_site.id _ihm_cross_link_pseudo_site.restraint_id _ihm_cross_link_pseudo_site.cross_link_partner _ihm_cross_link_pseudo_site.pseudo_site_id _ihm_cross_link_pseudo_site.model_id 1 3 2 89 . 2 4 2 89 99 3 4 2 89 990 # # loop_ _ihm_cross_link_result.id _ihm_cross_link_result.restraint_id _ihm_cross_link_result.ensemble_id _ihm_cross_link_result.model_group_id _ihm_cross_link_result.num_models _ihm_cross_link_result.distance_threshold _ihm_cross_link_result.median_distance _ihm_cross_link_result.details 1 1 . 301 40 25.000 4.000 'test fit' 2 1 401 301 30 25.000 3.000 . 3 1 501 . 50 25.000 9.000 . # # loop_ _ihm_cross_link_result_parameters.id _ihm_cross_link_result_parameters.restraint_id _ihm_cross_link_result_parameters.model_id _ihm_cross_link_result_parameters.psi _ihm_cross_link_result_parameters.sigma_1 _ihm_cross_link_result_parameters.sigma_2 1 1 201 0.100 4.200 2.100 # """) def test_cross_link_restraint_dumper_range_check(self): """Test CrossLinkRestraintDumper with out-of-range residue""" class MockObject: pass system = ihm.System() e1 = ihm.Entity('ATC', description='foo') system.entities.append(e1) asym1 = ihm.AsymUnit(e1) system.asym_units.append(asym1) dataset = MockObject() dataset._id = 97 dss = ihm.ChemDescriptor('DSS') r = ihm.restraint.CrossLinkRestraint(dataset=dataset, linker=dss) # Disable construction-time check so that we # can see dump time check e1._range_check = False xxl1 = ihm.restraint.ExperimentalCrossLink( e1.residue(2), e1.residue(300)) e1._range_check = True r.experimental_cross_links.append([xxl1]) system.restraints.append(r) d = ihm.restraint.UpperBoundDistanceRestraint(25.0) xl1 = ihm.restraint.ResidueCrossLink( xxl1, asym1, asym1, d, psi=0.5, sigma1=1.0, sigma2=2.0, restrain_all=True) r.cross_links.append(xl1) ihm.dumper._EntityDumper().finalize(system) # assign entity IDs ihm.dumper._StructAsymDumper().finalize(system) # assign asym IDs ihm.dumper._ChemDescriptorDumper().finalize(system) # descriptor IDs dumper = ihm.dumper._CrossLinkDumper() dumper.finalize(system) # assign IDs self.assertRaises(IndexError, _get_dumper_output, dumper, system) # Should work though if checks are disabled _ = _get_dumper_output(dumper, system, check=False) def test_geometric_object_dumper(self): """Test GeometricObjectDumper""" system = ihm.System() center = ihm.geometry.Center(1., 2., 3.) trans = ihm.geometry.Transformation([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [1., 2., 3.]) sphere = ihm.geometry.Sphere(center=center, transformation=trans, radius=2.2, name='my sphere', description='a test sphere') torus = ihm.geometry.Torus(center=center, transformation=trans, major_radius=5.6, minor_radius=1.2) half_torus = ihm.geometry.HalfTorus( center=center, transformation=trans, major_radius=5.6, minor_radius=1.2, thickness=0.1, inner=True) axis = ihm.geometry.XAxis() plane = ihm.geometry.XYPlane() system.orphan_geometric_objects.extend((sphere, torus, half_torus, axis, plane)) # Transformation not referenced by any object trans2 = ihm.geometry.Transformation([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [4., 5., 6.]) system._orphan_geometric_transforms.append(trans2) # Center not referenced by any object center2 = ihm.geometry.Center(8., 9., 10.) system._orphan_centers.append(center2) dumper = ihm.dumper._GeometricObjectDumper() dumper.finalize(system) # assign IDs self.assertEqual(len(dumper._objects_by_id), 5) self.assertEqual(len(dumper._centers_by_id), 2) self.assertEqual(len(dumper._transformations_by_id), 2) # Repeated calls to finalize should yield identical results dumper.finalize(system) self.assertEqual(len(dumper._objects_by_id), 5) self.assertEqual(len(dumper._centers_by_id), 2) self.assertEqual(len(dumper._transformations_by_id), 2) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_geometric_object_center.id _ihm_geometric_object_center.xcoord _ihm_geometric_object_center.ycoord _ihm_geometric_object_center.zcoord 1 1.000 2.000 3.000 2 8.000 9.000 10.000 # # loop_ _ihm_geometric_object_transformation.id _ihm_geometric_object_transformation.rot_matrix[1][1] _ihm_geometric_object_transformation.rot_matrix[2][1] _ihm_geometric_object_transformation.rot_matrix[3][1] _ihm_geometric_object_transformation.rot_matrix[1][2] _ihm_geometric_object_transformation.rot_matrix[2][2] _ihm_geometric_object_transformation.rot_matrix[3][2] _ihm_geometric_object_transformation.rot_matrix[1][3] _ihm_geometric_object_transformation.rot_matrix[2][3] _ihm_geometric_object_transformation.rot_matrix[3][3] _ihm_geometric_object_transformation.tr_vector[1] _ihm_geometric_object_transformation.tr_vector[2] _ihm_geometric_object_transformation.tr_vector[3] 1 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 1.000000 1.000 2.000 3.000 2 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 1.000000 4.000 5.000 6.000 # # loop_ _ihm_geometric_object_list.object_id _ihm_geometric_object_list.object_type _ihm_geometric_object_list.object_name _ihm_geometric_object_list.object_description 1 sphere 'my sphere' 'a test sphere' 2 torus . . 3 half-torus . . 4 axis . . 5 plane . . # # loop_ _ihm_geometric_object_sphere.object_id _ihm_geometric_object_sphere.center_id _ihm_geometric_object_sphere.transformation_id _ihm_geometric_object_sphere.radius_r 1 1 1 2.200 # # loop_ _ihm_geometric_object_torus.object_id _ihm_geometric_object_torus.center_id _ihm_geometric_object_torus.transformation_id _ihm_geometric_object_torus.major_radius_R _ihm_geometric_object_torus.minor_radius_r 2 1 1 5.600 1.200 3 1 1 5.600 1.200 # # loop_ _ihm_geometric_object_half_torus.object_id _ihm_geometric_object_half_torus.thickness_th _ihm_geometric_object_half_torus.section 3 0.100 'inner half' # # loop_ _ihm_geometric_object_axis.object_id _ihm_geometric_object_axis.axis_type _ihm_geometric_object_axis.transformation_id 4 x-axis . # # loop_ _ihm_geometric_object_plane.object_id _ihm_geometric_object_plane.plane_type _ihm_geometric_object_plane.transformation_id 5 xy-plane . # """) def test_geometric_object_dumper_invalid_rotation(self): """Test GeometricObjectDumper with invalid rotation""" system = ihm.System() center = ihm.geometry.Center(1., 2., 3.) trans = ihm.geometry.Transformation(None, [1., 2., 3.]) sphere = ihm.geometry.Sphere(center=center, transformation=trans, radius=2.2, name='my sphere', description='a test sphere') system.orphan_geometric_objects.append(sphere) dumper = ihm.dumper._GeometricObjectDumper() dumper.finalize(system) self.assertRaises(ValueError, _get_dumper_output, dumper, system) # OK if checks are disabled _ = _get_dumper_output(dumper, system, check=False) def test_geometric_object_dumper_invalid_translation(self): """Test GeometricObjectDumper with invalid translation""" system = ihm.System() center = ihm.geometry.Center(1., 2., 3.) trans = ihm.geometry.Transformation([[1, 0, 0], [0, 1, 0], [0, 0, 1]], ihm.unknown) sphere = ihm.geometry.Sphere(center=center, transformation=trans, radius=2.2, name='my sphere', description='a test sphere') system.orphan_geometric_objects.append(sphere) dumper = ihm.dumper._GeometricObjectDumper() dumper.finalize(system) self.assertRaises(ValueError, _get_dumper_output, dumper, system) # OK if checks are disabled _ = _get_dumper_output(dumper, system, check=False) def test_feature_dumper(self): """Test FeatureDumper""" system = ihm.System() e1 = ihm.Entity('ACGT') e2 = ihm.Entity([ihm.NonPolymerChemComp('HEM')]) system.entities.extend((e1, e2)) a1 = ihm.AsymUnit(e1, 'foo') a2 = ihm.AsymUnit(e1, 'baz') a3 = ihm.AsymUnit(e2, 'heme') system.asym_units.extend((a1, a2, a3)) f = ihm.restraint.ResidueFeature([a1, a2(2, 3), e1, e1(2, 3)], details='test feature') system.orphan_features.append(f) # Duplicate feature, should be pruned from output f = ihm.restraint.ResidueFeature([a1, a2(2, 3), e1, e1(2, 3)], details='other details') system.orphan_features.append(f) # Cannot make a ResidueFeature that includes a non-polymer 'residue' self.assertRaises(ValueError, ihm.restraint.ResidueFeature, [a1, a3]) # Polymeric atom feature f = ihm.restraint.AtomFeature([a1.residue(1).atom('CA'), a2.residue(2).atom('N'), e1.residue(1).atom('CB')]) system.orphan_features.append(f) # Nonpolymeric atom feature f = ihm.restraint.AtomFeature([a3.residue(1).atom('FE'), e2.residue(1).atom('FE')]) system.orphan_features.append(f) # Cannot make one feature that selects both polymer and nonpolymer self.assertRaises(ValueError, ihm.restraint.AtomFeature, [a1.residue(1).atom('CA'), a2.residue(2).atom('N'), a3.residue(1).atom('FE')]) # Nonpolymeric feature f = ihm.restraint.NonPolyFeature([a3, e2]) system.orphan_features.append(f) # Cannot make a NonPolyFeature that includes a polymer 'residue' self.assertRaises(ValueError, ihm.restraint.NonPolyFeature, [a1, a3]) # Pseudo site feature ps = ihm.restraint.PseudoSite(x=10., y=20., z=30.) ps._id = 89 f = ihm.restraint.PseudoSiteFeature(site=ps) system.orphan_features.append(f) # Duplicate Pseudo site feature ps = ihm.restraint.PseudoSite(x=10., y=20., z=30.) f = ihm.restraint.PseudoSiteFeature(site=ps) system.orphan_features.append(f) ihm.dumper._EntityDumper().finalize(system) # assign entity IDs ihm.dumper._StructAsymDumper().finalize(system) # assign asym IDs dumper = ihm.dumper._FeatureDumper() dumper.finalize(system) # assign IDs self.assertEqual(len(dumper._features_by_id), 5) # Repeated calls to finalize should yield identical results dumper.finalize(system) self.assertEqual(len(dumper._features_by_id), 5) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_feature_list.feature_id _ihm_feature_list.feature_type _ihm_feature_list.entity_type _ihm_feature_list.details 1 'residue range' polymer 'test feature' 2 atom polymer . 3 atom non-polymer . 4 ligand non-polymer . 5 'pseudo site' other . # # loop_ _ihm_poly_residue_feature.ordinal_id _ihm_poly_residue_feature.feature_id _ihm_poly_residue_feature.entity_id _ihm_poly_residue_feature.asym_id _ihm_poly_residue_feature.seq_id_begin _ihm_poly_residue_feature.comp_id_begin _ihm_poly_residue_feature.seq_id_end _ihm_poly_residue_feature.comp_id_end 1 1 1 A 1 ALA 4 THR 2 1 1 B 2 CYS 3 GLY 3 1 1 . 1 ALA 4 THR 4 1 1 . 2 CYS 3 GLY # # loop_ _ihm_poly_atom_feature.ordinal_id _ihm_poly_atom_feature.feature_id _ihm_poly_atom_feature.entity_id _ihm_poly_atom_feature.asym_id _ihm_poly_atom_feature.seq_id _ihm_poly_atom_feature.comp_id _ihm_poly_atom_feature.atom_id 1 2 1 A 1 ALA CA 2 2 1 B 2 CYS N 3 2 1 . 1 ALA CB # # loop_ _ihm_non_poly_feature.ordinal_id _ihm_non_poly_feature.feature_id _ihm_non_poly_feature.entity_id _ihm_non_poly_feature.asym_id _ihm_non_poly_feature.comp_id _ihm_non_poly_feature.atom_id 1 3 2 C HEM FE 2 3 2 . HEM FE 3 4 2 C HEM . 4 4 2 . HEM . # # loop_ _ihm_pseudo_site_feature.feature_id _ihm_pseudo_site_feature.pseudo_site_id 5 89 # """) def test_feature_dumper_no_residues(self): """Test FeatureDumper with an empty ResidueFeature""" system = ihm.System() f = ihm.restraint.ResidueFeature([]) system.orphan_features.append(f) dumper = ihm.dumper._FeatureDumper() dumper.finalize(system) # assign IDs self.assertEqual(len(dumper._features_by_id), 1) self.assertRaises(ValueError, _get_dumper_output, dumper, system) def test_feature_dumper_base_class(self): """Test FeatureDumper with a Feature base class""" system = ihm.System() f = ihm.restraint.Feature() system.orphan_features.append(f) dumper = ihm.dumper._FeatureDumper() dumper.finalize(system) # assign IDs self.assertEqual(len(dumper._features_by_id), 1) self.assertRaises(ValueError, _get_dumper_output, dumper, system) # Should be OK if checks are disabled out = _get_dumper_output(dumper, system, check=False) self.assertEqual(out, """# loop_ _ihm_feature_list.feature_id _ihm_feature_list.feature_type _ihm_feature_list.entity_type _ihm_feature_list.details 1 ? ? . # """) def test_pseudo_site_dumper(self): """Test PseudoSiteDumper""" system = ihm.System() ps1 = ihm.restraint.PseudoSite(x=10., y=20., z=30.) ps2 = ihm.restraint.PseudoSite(x=10., y=20., z=30., radius=40., description="test pseudo") # Duplicate pseudo site, should be pruned ps3 = ihm.restraint.PseudoSite(x=10., y=20., z=30., radius=40., description="other pseudo") system.orphan_pseudo_sites.extend((ps1, ps2, ps3)) dumper = ihm.dumper._PseudoSiteDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_pseudo_site.id _ihm_pseudo_site.Cartn_x _ihm_pseudo_site.Cartn_y _ihm_pseudo_site.Cartn_z _ihm_pseudo_site.radius _ihm_pseudo_site.description 1 10.000 20.000 30.000 . . 2 10.000 20.000 30.000 40.000 'test pseudo' # """) def test_geometric_restraint_dumper(self): """Test GeometricRestraintDumper""" class MockObject: pass system = ihm.System() feat = MockObject() feat._id = 44 geom = MockObject() geom._id = 23 dataset = MockObject() dataset._id = 97 dist = ihm.restraint.UpperBoundDistanceRestraint(25.0) r = ihm.restraint.CenterGeometricRestraint( dataset=dataset, geometric_object=geom, feature=feat, distance=dist, harmonic_force_constant=2.0, restrain_all=False) system.restraints.append(r) dumper = ihm.dumper._GeometricRestraintDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_geometric_object_distance_restraint.id _ihm_geometric_object_distance_restraint.object_id _ihm_geometric_object_distance_restraint.feature_id _ihm_geometric_object_distance_restraint.object_characteristic _ihm_geometric_object_distance_restraint.restraint_type _ihm_geometric_object_distance_restraint.harmonic_force_constant _ihm_geometric_object_distance_restraint.distance_lower_limit _ihm_geometric_object_distance_restraint.distance_upper_limit _ihm_geometric_object_distance_restraint.group_conditionality _ihm_geometric_object_distance_restraint.dataset_list_id 1 23 44 center 'upper bound' 2.000 . 25.000 ANY 97 # """) def test_derived_distance_restraint_dumper(self): """Test DerivedDistanceRestraintDumper""" class MockObject: pass system = ihm.System() feat1 = MockObject() feat1._id = 44 feat2 = MockObject() feat2._id = 84 dataset = MockObject() dataset._id = 97 dist = ihm.restraint.LowerBoundDistanceRestraint(25.0) unkdist = ihm.restraint.DistanceRestraint() r1 = ihm.restraint.DerivedDistanceRestraint( dataset=dataset, feature1=feat1, feature2=feat2, distance=dist, probability=0.8) r2 = ihm.restraint.DerivedDistanceRestraint( dataset=dataset, feature1=feat1, feature2=feat2, distance=dist, probability=0.4) r3 = ihm.restraint.DerivedDistanceRestraint( dataset=dataset, feature1=feat1, feature2=feat2, distance=unkdist, probability=0.6, mic_value=0.4) rg = ihm.restraint.RestraintGroup((r2, r3)) system.restraints.extend((r1, r2)) # r2 is in restraints and groups system.restraint_groups.append(rg) dumper = ihm.dumper._DerivedDistanceRestraintDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_derived_distance_restraint.id _ihm_derived_distance_restraint.group_id _ihm_derived_distance_restraint.feature_id_1 _ihm_derived_distance_restraint.feature_id_2 _ihm_derived_distance_restraint.restraint_type _ihm_derived_distance_restraint.distance_lower_limit _ihm_derived_distance_restraint.distance_upper_limit _ihm_derived_distance_restraint.probability _ihm_derived_distance_restraint.mic_value _ihm_derived_distance_restraint.group_conditionality _ihm_derived_distance_restraint.dataset_list_id 1 . 44 84 'lower bound' 25.000 . 0.800 . . 97 2 1 44 84 'lower bound' 25.000 . 0.400 . . 97 3 1 44 84 . . . 0.600 0.400 . 97 # """) def test_derived_distance_restraint_dumper_fail(self): """Test DerivedDistanceRestraintDumper multi-group failure""" class MockObject: pass system = ihm.System() feat1 = MockObject() feat2 = MockObject() dataset = MockObject() dist = ihm.restraint.LowerBoundDistanceRestraint(25.0) r1 = ihm.restraint.DerivedDistanceRestraint( dataset=dataset, feature1=feat1, feature2=feat2, distance=dist, probability=0.8) rg1 = ihm.restraint.RestraintGroup([r1]) rg2 = ihm.restraint.RestraintGroup([r1]) system.restraint_groups.extend((rg1, rg2)) dumper = ihm.dumper._DerivedDistanceRestraintDumper() # r1 cannot be in multiple groups (rg1 and rg2) self.assertRaises(ValueError, dumper.finalize, system) def test_hdx_restraint_dumper(self): """Test HDXRestraintDumper""" class MockObject: pass system = ihm.System() feat = MockObject() feat._id = 44 dataset = MockObject() dataset._id = 97 r1 = ihm.restraint.HDXRestraint( dataset=dataset, feature=feat, protection_factor=1.0, details="foo") r2 = ihm.restraint.HDXRestraint(dataset=None, feature=feat) system.restraints.extend((r1, r2)) dumper = ihm.dumper._HDXRestraintDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_hdx_restraint.id _ihm_hdx_restraint.feature_id _ihm_hdx_restraint.protection_factor _ihm_hdx_restraint.dataset_list_id _ihm_hdx_restraint.details 1 44 1.000 97 foo 2 44 . . . # """) def test_bad_restraint_groups(self): """Test RestraintGroups containing unsupported restraints""" class MockObject: pass s = ihm.System() dataset = MockObject() dataset.parents = [] assembly = ihm.Assembly() # Empty restraint groups are OK (even though they don't get IDs) rg = ihm.restraint.RestraintGroup([]) s.restraint_groups.append(rg) fh = StringIO() ihm.dumper.write(fh, [s]) r = ihm.restraint.SASRestraint( dataset=dataset, assembly=assembly, segment=False, fitting_method='FoXS', fitting_atom_type='Heavy atoms', multi_state=False, radius_of_gyration=21.07, details='FoXS fitting') rg = ihm.restraint.RestraintGroup([r]) s.restraint_groups.append(rg) fh = StringIO() # SASRestraint is an unsupported type in RestraintGroup self.assertRaises(TypeError, ihm.dumper.write, fh, [s]) def test_predicted_contact_restraint_dumper(self): """Test PredictedContactRestraintDumper""" class MockObject: pass system = ihm.System() e1 = ihm.Entity('AHC') a1 = ihm.AsymUnit(e1) e2 = ihm.Entity('GWT') a2 = ihm.AsymUnit(e2) system.entities.extend((e1, e2)) system.asym_units.extend((a1, a2)) dataset = MockObject() dataset._id = 97 software = MockObject() software._id = 34 dist = ihm.restraint.LowerBoundDistanceRestraint(25.0) dist2 = ihm.restraint.UpperBoundDistanceRestraint(14.0) r1 = ihm.restraint.PredictedContactRestraint( dataset=dataset, resatom1=a1.residue(1), resatom2=a2.residue(2), distance=dist, probability=0.8, by_residue=True, software=software) r2 = ihm.restraint.PredictedContactRestraint( dataset=dataset, resatom1=a1.residue(1).atom('CA'), resatom2=a2.residue(2).atom('CB'), by_residue=True, distance=dist, probability=0.4) r3 = ihm.restraint.PredictedContactRestraint( dataset=dataset, resatom1=a1.residue(1), resatom2=a2.residue(2), distance=dist2, probability=0.6, by_residue=False) rg = ihm.restraint.RestraintGroup((r2, r3)) system.restraints.extend((r1, r2)) # r2 is in restraints and groups system.restraint_groups.append(rg) ihm.dumper._EntityDumper().finalize(system) # assign entity IDs ihm.dumper._StructAsymDumper().finalize(system) # assign asym IDs dumper = ihm.dumper._PredictedContactRestraintDumper() dumper.finalize(system) # assign IDs out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_predicted_contact_restraint.id _ihm_predicted_contact_restraint.group_id _ihm_predicted_contact_restraint.entity_id_1 _ihm_predicted_contact_restraint.asym_id_1 _ihm_predicted_contact_restraint.comp_id_1 _ihm_predicted_contact_restraint.seq_id_1 _ihm_predicted_contact_restraint.rep_atom_1 _ihm_predicted_contact_restraint.entity_id_2 _ihm_predicted_contact_restraint.asym_id_2 _ihm_predicted_contact_restraint.comp_id_2 _ihm_predicted_contact_restraint.seq_id_2 _ihm_predicted_contact_restraint.rep_atom_2 _ihm_predicted_contact_restraint.restraint_type _ihm_predicted_contact_restraint.distance_lower_limit _ihm_predicted_contact_restraint.distance_upper_limit _ihm_predicted_contact_restraint.probability _ihm_predicted_contact_restraint.model_granularity _ihm_predicted_contact_restraint.dataset_list_id _ihm_predicted_contact_restraint.software_id 1 . 1 A ALA 1 . 2 B TRP 2 . 'lower bound' 25.000 . 0.800 by-residue 97 34 2 1 1 A ALA 1 CA 2 B TRP 2 CB 'lower bound' 25.000 . 0.400 by-residue 97 . 3 1 1 A ALA 1 . 2 B TRP 2 . 'upper bound' . 14.000 0.600 by-feature 97 . # """) def test_multi_state_scheme_dumper(self): """ Test MultiStateScheme dumper""" class MockObject: pass cur_connectivity_1 = \ ihm.multi_state_scheme.Connectivity( begin_state='s1') r1 = MockObject() r2 = MockObject() system = ihm.System() mss1 = ihm.multi_state_scheme.MultiStateScheme( name="mss1", details="details1", connectivities=[cur_connectivity_1], relaxation_times=[r1, r2]) mss2 = ihm.multi_state_scheme.MultiStateScheme( name="mss2") system.multi_state_schemes.append(mss1) system.multi_state_schemes.append(mss2) # Check whether a scheme that was added twice is written twice system.multi_state_schemes.append(mss1) dumper = ihm.dumper._MultiStateSchemeDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_multi_state_scheme.id _ihm_multi_state_scheme.name _ihm_multi_state_scheme.details 1 mss1 details1 2 mss2 . # """) def test_multi_state_scheme_connectivity_dumper(self): """ Test MultiStateSchemeConnectivity dumper""" class MockObject: pass cur_state_1 = MockObject() cur_state_1._id = 1 cur_state_2 = MockObject() cur_state_2._id = 2 cur_datasetgroup_1 = MockObject() cur_datasetgroup_1._id = 10 cur_kinetic_rate_1 = MockObject() cur_kinetic_rate_1._id = 1 cur_kinetic_rate_2 = MockObject() cur_kinetic_rate_2._id = 2 cur_relaxation_time_1 = MockObject() cur_relaxation_time_1._id = 4 cur_relaxation_time_2 = MockObject() cur_relaxation_time_2._id = 5 # Prepare the system system = ihm.System() # Create the connectivities mssc1 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1) mssc2 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2) mssc3 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2, details="details3", dataset_group=cur_datasetgroup_1, kinetic_rate=cur_kinetic_rate_1) mssc4 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2, details="details4", kinetic_rate=cur_kinetic_rate_1, relaxation_time=cur_relaxation_time_1) mssc5 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2, details="details5", dataset_group=cur_datasetgroup_1, relaxation_time=cur_relaxation_time_2) # Check whether a duplicate entry with the same information is # written twice mssc6 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2, details="details5", dataset_group=cur_datasetgroup_1, relaxation_time=cur_relaxation_time_2) mssc7 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2, details="details7", dataset_group=cur_datasetgroup_1, relaxation_time=cur_relaxation_time_2) # Create the multi-state schemes mss1 = ihm.multi_state_scheme.MultiStateScheme( name="mss1") mss1.add_connectivity(mssc1) mss2 = ihm.multi_state_scheme.MultiStateScheme( name="mss2", connectivities=[mssc1, mssc2]) mss2.add_connectivity(mssc3) mss2.add_connectivity(mssc4) mss2.add_connectivity(mssc5) mss2.add_connectivity(mssc6) mss2.add_connectivity(mssc6) # Check whether a given _id is kept mssc7._id = '107' mss2.add_connectivity(mssc7) system.multi_state_schemes.append(mss1) system.multi_state_schemes.append(mss2) ihm.dumper._MultiStateSchemeDumper().finalize(system) dumper = ihm.dumper._MultiStateSchemeConnectivityDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_multi_state_scheme_connectivity.id _ihm_multi_state_scheme_connectivity.scheme_id _ihm_multi_state_scheme_connectivity.begin_state_id _ihm_multi_state_scheme_connectivity.end_state_id _ihm_multi_state_scheme_connectivity.dataset_group_id _ihm_multi_state_scheme_connectivity.details 1 1 1 . . . 2 2 1 . . . 3 2 1 2 . . 4 2 1 2 10 details3 5 2 1 2 . details4 6 2 1 2 10 details5 7 2 1 2 10 details5 107 2 1 2 10 details7 # """) def test_relaxation_time_dumper(self): """Test RelaxationTime dumpers. Tests both, _ihm_relaxation_time and _ihm_relaxation_time_multi_state_scheme""" class MockObject: pass cur_dataset_group_1 = MockObject() cur_dataset_group_1._id = 1 cur_external_file_1 = MockObject() cur_external_file_1._id = 2 cur_state_1 = MockObject() cur_state_1._id = 101 cur_state_2 = MockObject() cur_state_2._id = 102 cur_state_3 = MockObject() cur_state_3._id = 103 system = ihm.System() r1 = ihm.multi_state_scheme.RelaxationTime( value=3.0, unit='seconds', amplitude="0.5", details="details1") r2 = ihm.multi_state_scheme.RelaxationTime( value=4.0, unit='milliseconds', details="details2", dataset_group=cur_dataset_group_1, file=cur_external_file_1) r3 = ihm.multi_state_scheme.RelaxationTime( value=6.0, unit='seconds', details="details3", dataset_group=cur_dataset_group_1, file=cur_external_file_1) mss1 = ihm.multi_state_scheme.MultiStateScheme( name="mss1", relaxation_times=[r1] ) mss1.add_relaxation_time(r2) mss1.add_relaxation_time(r1) mssc1 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2, relaxation_time=r3 ) mssc2 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_3, relaxation_time=r3 ) # a multi-state scheme connectivity without a relaxation time mssc3 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_2, end_state=cur_state_3, kinetic_rate='rate' ) # a multi-state scheme that has None as relaxation time mssc4 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_3, end_state=cur_state_2, relaxation_time=None, kinetic_rate='rate' ) mss2 = ihm.multi_state_scheme.MultiStateScheme( name="mss2", connectivities=[mssc1, mssc2, mssc3, mssc4] ) system.multi_state_schemes.append(mss1) system.multi_state_schemes.append(mss2) ihm.dumper._MultiStateSchemeConnectivityDumper().finalize(system) ihm.dumper._MultiStateSchemeDumper().finalize(system) f = ihm.flr.FLRData() r4 = ihm.multi_state_scheme.RelaxationTime(value=5.0, unit='seconds', amplitude="0.6", details="details4") cur_fret_analysis = MockObject() c = ihm.flr.RelaxationTimeFretAnalysisConnection( fret_analysis=cur_fret_analysis, relaxation_time=r4, details='.') f.relaxation_time_fret_analysis_connections.append(c) f.relaxation_time_fret_analysis_connections.append(c) system.flr_data.append(f) # Explicitly setting an _id r5 = ihm.multi_state_scheme.RelaxationTime(value=10.0, unit='seconds', amplitude="0.1", details="details5") r5._id = '105' mss1.add_relaxation_time(r5) mss2.add_relaxation_time(None) ihm.dumper._FLRRelaxationTimeFretAnalysisConnectionDumper().finalize( system) dumper = ihm.dumper._RelaxationTimeDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_relaxation_time.id _ihm_relaxation_time.value _ihm_relaxation_time.unit _ihm_relaxation_time.amplitude _ihm_relaxation_time.dataset_group_id _ihm_relaxation_time.external_file_id _ihm_relaxation_time.details 1 3.000 seconds 0.5 . . details1 2 4.000 milliseconds . 1 2 details2 105 10.000 seconds 0.1 . . details5 3 6.000 seconds . 1 2 details3 4 5.000 seconds 0.6 . . details4 # # loop_ _ihm_relaxation_time_multi_state_scheme.id _ihm_relaxation_time_multi_state_scheme.relaxation_time_id _ihm_relaxation_time_multi_state_scheme.scheme_id _ihm_relaxation_time_multi_state_scheme.scheme_connectivity_id _ihm_relaxation_time_multi_state_scheme.details 1 1 1 . . 2 2 1 . . 3 1 1 . . 4 105 1 . . 5 3 2 1 . 6 3 2 2 . 7 4 . . . # """) def test_kinetic_rate_dumper(self): """"Test KineticRate dumper""" class MockObject: pass cur_dataset_group_1 = MockObject() cur_dataset_group_1._id = 1 cur_external_file_1 = MockObject() cur_external_file_1._id = 2 cur_state_1 = MockObject() cur_state_1._id = 101 cur_state_2 = MockObject() cur_state_2._id = 102 cur_state_3 = MockObject() cur_state_3._id = 103 e_k2 = ihm.multi_state_scheme.PopulationEquilibriumConstant( value=4.0, unit='unit_placeholder') e_k3 = ihm.multi_state_scheme.PopulationEquilibriumConstant( value=5.0) e_k4 = ihm.multi_state_scheme.KineticRateEquilibriumConstant( value=6.0 ) e_k5 = ihm.multi_state_scheme.EquilibriumConstant(value=7.0, unit='unit7') # k1 => id 1 k1 = ihm.multi_state_scheme.KineticRate( transition_rate_constant=3.0, details="transition rate constant 1" ) # k2 => id 2 k2 = ihm.multi_state_scheme.KineticRate( equilibrium_constant=e_k2, details='equilibrium constant 2', dataset_group=cur_dataset_group_1, file=cur_external_file_1 ) # k3 => id 3 k3 = ihm.multi_state_scheme.KineticRate( transition_rate_constant=6.0, equilibrium_constant=e_k3, details='equilibrium constant 3', dataset_group=cur_dataset_group_1 ) # k4 => id 4 k4 = ihm.multi_state_scheme.KineticRate( equilibrium_constant=e_k4, details='equilibrium constant 4' ) # k5 => id 5 k5 = ihm.multi_state_scheme.KineticRate( equilibrium_constant=e_k5, details='equilibrium constant 5' ) # mssc1 => id 1 mssc1 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2, kinetic_rate=k1 ) # mssc2 => id 2 mssc2 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2, kinetic_rate=k2 ) # mssc3 => id 3 mssc3 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2, kinetic_rate=k3 ) # mssc4 => id 4 mssc4 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_1, end_state=cur_state_2, kinetic_rate=k4 ) # mssc5 => id 5 mssc5 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_2, end_state=cur_state_1, kinetic_rate=k5 ) mss1 = ihm.multi_state_scheme.MultiStateScheme( name="mss1", connectivities=[mssc1, mssc2, mssc3, mssc4] ) mss1.add_connectivity(mssc5) # A multi-state scheme connectivity without a kinetic rate mssc6 = ihm.multi_state_scheme.Connectivity( begin_state=cur_state_2, end_state=cur_state_3, relaxation_time='rt' ) mss1.add_connectivity(mssc6) system = ihm.System() system.multi_state_schemes.append(mss1) ihm.dumper._MultiStateSchemeConnectivityDumper().finalize(system) ihm.dumper._MultiStateSchemeDumper().finalize(system) dumper = ihm.dumper._KineticRateDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_kinetic_rate.id _ihm_kinetic_rate.transition_rate_constant _ihm_kinetic_rate.equilibrium_constant _ihm_kinetic_rate.equilibrium_constant_determination_method _ihm_kinetic_rate.equilibrium_constant_unit _ihm_kinetic_rate.details _ihm_kinetic_rate.scheme_connectivity_id _ihm_kinetic_rate.dataset_group_id _ihm_kinetic_rate.external_file_id 1 3.000 . . . 'transition rate constant 1' 1 . . 2 . 4.000 'equilibrium constant is determined from population' unit_placeholder 'equilibrium constant 2' 2 1 2 3 6.000 5.000 'equilibrium constant is determined from population' . 'equilibrium constant 3' 3 1 . 4 . 6.000 'equilibrium constant is determined from kinetic rates, kAB/kBA' . 'equilibrium constant 4' 4 . . 5 . 7.000 'equilibrium constant is determined from another method not listed' unit7 'equilibrium constant 5' 5 . . # """) f = ihm.flr.FLRData() k4 = ihm.multi_state_scheme.KineticRate( transition_rate_constant=4.0, details="transition rate constant 4") cur_fret_analysis = MockObject() c = ihm.flr.KineticRateFretAnalysisConnection( fret_analysis=cur_fret_analysis, kinetic_rate=k4, details='.') f.kinetic_rate_fret_analysis_connections.append(c) f.kinetic_rate_fret_analysis_connections.append(c) system.flr_data.append(f) ihm.dumper._FLRKineticRateFretAnalysisConnectionDumper().finalize( system) dumper = ihm.dumper._KineticRateDumper() dumper.finalize(system) out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _ihm_kinetic_rate.id _ihm_kinetic_rate.transition_rate_constant _ihm_kinetic_rate.equilibrium_constant _ihm_kinetic_rate.equilibrium_constant_determination_method _ihm_kinetic_rate.equilibrium_constant_unit _ihm_kinetic_rate.details _ihm_kinetic_rate.scheme_connectivity_id _ihm_kinetic_rate.dataset_group_id _ihm_kinetic_rate.external_file_id 1 3.000 . . . 'transition rate constant 1' 1 . . 2 . 4.000 'equilibrium constant is determined from population' unit_placeholder 'equilibrium constant 2' 2 1 2 3 6.000 5.000 'equilibrium constant is determined from population' . 'equilibrium constant 3' 3 1 . 4 . 6.000 'equilibrium constant is determined from kinetic rates, kAB/kBA' . 'equilibrium constant 4' 4 . . 5 . 7.000 'equilibrium constant is determined from another method not listed' unit7 'equilibrium constant 5' 5 . . 6 4.000 . . . 'transition rate constant 4' . . . # """) def test_flr_dumper(self): """Test FLR dumpers""" class MockObject: pass cur_state = MockObject() cur_state._id = 1 cur_model_1 = MockObject() cur_model_1._id = 1 cur_model_2 = MockObject() cur_model_2._id = 2 dataset_1 = MockObject() dataset_1._id = 1 dataset_group_1 = MockObject() dataset_group_1._id = 1 cur_ihm_modeling_protocol = MockObject() cur_ihm_modeling_protocol._id = 1 system = ihm.System() # Fill the system cur_flr_data = ihm.flr.FLRData() cur_entity_1 = ihm.Entity("AG", description='Entity_1') cur_entity_2 = ihm.Entity("CCCCCCCCCC", description='Entity_2') system.entities.extend([cur_entity_1, cur_entity_2]) asym1 = ihm.AsymUnit(cur_entity_1, id='C') system.asym_units.append(asym1) # FLR cur_entity_assembly = ihm.flr.EntityAssembly() cur_entity_assembly.add_entity(entity=cur_entity_1, num_copies=1) cur_entity_assembly.add_entity(entity=cur_entity_2, num_copies=2) cur_instrument = ihm.flr.Instrument(details='My_Instrument') cur_inst_setting_1 = ihm.flr.InstSetting(details='My_Inst_setting_1') cur_inst_setting_2 = ihm.flr.InstSetting(details='My_Inst_setting_2') cur_exp_condition_1 = ihm.flr.ExpCondition( details='My_Exp_condition_1') cur_exp_condition_2 = ihm.flr.ExpCondition( details='My_Exp_condition_2') cur_sample_condition_1 = ihm.flr.SampleCondition( details='My_Sample_condition_1') cur_sample_condition_2 = ihm.flr.SampleCondition( details='My_Sample_condition_2') cur_sample_1 = ihm.flr.Sample(entity_assembly=cur_entity_assembly, num_of_probes=2, condition=cur_sample_condition_1, description='Sample_1', details='Details sample 1', solvent_phase='liquid') cur_sample_2 = ihm.flr.Sample(entity_assembly=cur_entity_assembly, num_of_probes=2, condition=cur_sample_condition_2, description='Sample_2', details='Details sample 2', solvent_phase='liquid') # Reference sample cur_sample_3 = ihm.flr.Sample(entity_assembly=cur_entity_assembly, num_of_probes=1, condition=cur_sample_condition_1, description='Reference Sample', details='Details Reference Sample', solvent_phase='liquid') cur_experiment = ihm.flr.Experiment() cur_experiment.add_entry(instrument=cur_instrument, inst_setting=cur_inst_setting_1, exp_condition=cur_exp_condition_1, sample=cur_sample_1) cur_experiment.add_entry(instrument=cur_instrument, inst_setting=cur_inst_setting_2, exp_condition=cur_exp_condition_2, sample=cur_sample_2) cur_experiment.add_entry(instrument=cur_instrument, inst_setting=cur_inst_setting_1, exp_condition=cur_exp_condition_1, sample=cur_sample_3) # Probes cur_probe_1 = ihm.flr.Probe() cur_probe_2 = ihm.flr.Probe() cur_probe_list_1 = ihm.flr.ProbeList(chromophore_name='Donor1', reactive_probe_flag=False, probe_origin='extrinsic', probe_link_type='covalent') cur_probe_list_2 = ihm.flr.ProbeList( chromophore_name='Acceptor2', reactive_probe_flag=True, reactive_probe_name='Acceptor1 reactive', probe_origin='extrinsic', probe_link_type='covalent') # Chem descriptor ID 1 cur_chem_desc_probe_1_chromophore = ihm.ChemDescriptor( auth_name='Donor1_chromophore_chem_desc', chem_comp_id=None, common_name=None, smiles='C1') cur_chem_desc_probe_1_chromophore._id = 1 # Chem descriptor ID 2 cur_chem_desc_probe_2_chromophore = ihm.ChemDescriptor( auth_name='Donor2_chromophore_chem_desc', chem_comp_id=None, common_name=None, smiles='C2') cur_chem_desc_probe_2_chromophore._id = 2 # Chem descriptor ID 3 cur_chem_desc_probe_2_reactive = ihm.ChemDescriptor( auth_name='Donor1_reactive_chem_desc', chem_comp_id=None, common_name=None, smiles='R1') cur_chem_desc_probe_2_reactive._id = 3 cur_probe_descriptor_1 = ihm.flr.ProbeDescriptor( reactive_probe_chem_descriptor=None, chromophore_chem_descriptor=cur_chem_desc_probe_1_chromophore, chromophore_center_atom='CB') cur_probe_descriptor_2 = ihm.flr.ProbeDescriptor( reactive_probe_chem_descriptor=cur_chem_desc_probe_2_reactive, chromophore_chem_descriptor=cur_chem_desc_probe_2_chromophore, chromophore_center_atom='CB') cur_probe_1.probe_descriptor = cur_probe_descriptor_1 cur_probe_1.probe_list_entry = cur_probe_list_1 cur_probe_2.probe_descriptor = cur_probe_descriptor_2 cur_probe_2.probe_list_entry = cur_probe_list_2 # Modified residue # Chem descriptor ID 4 cur_chem_descriptor_modified_residue = ihm.ChemDescriptor( auth_name='Modified_residue', smiles='Modified') cur_chem_comp_mutated_residue = ihm.ChemComp( id='Cys', code='C', code_canonical='C') cur_chem_descriptor_modified_residue._id = 4 # cur_chem_comp_mutated_residue._id = 5 # Poly_probe_position cur_poly_probe_position_1 = ihm.flr.PolyProbePosition( resatom=cur_entity_1.residue(1), # no atom ID given mutation_flag=True, modification_flag=True, auth_name='Position_1', mutated_chem_comp_id=cur_chem_comp_mutated_residue, modified_chem_descriptor=cur_chem_descriptor_modified_residue) cur_poly_probe_position_2 = ihm.flr.PolyProbePosition( # using asym instead of only entity resatom=asym1.residue(2).atom('CB'), mutation_flag=False, modification_flag=False, auth_name='Position_2') cur_poly_probe_position_3 = ihm.flr.PolyProbePosition( resatom=cur_entity_2.residue(10).atom('CB'), mutation_flag=True, modification_flag=True, auth_name='Position_3', mutated_chem_comp_id=cur_chem_comp_mutated_residue, modified_chem_descriptor=cur_chem_descriptor_modified_residue) # Sample_probe_details cur_sample_probe_details_1 = ihm.flr.SampleProbeDetails( sample=cur_sample_1, probe=cur_probe_1, fluorophore_type='donor', poly_probe_position=cur_poly_probe_position_1, description='Donor in position1-position3') cur_sample_probe_details_2 = ihm.flr.SampleProbeDetails( sample=cur_sample_1, probe=cur_probe_2, fluorophore_type='acceptor', poly_probe_position=cur_poly_probe_position_3, description='Acceptor in position1-position3') cur_sample_probe_details_3 = ihm.flr.SampleProbeDetails( sample=cur_sample_2, probe=cur_probe_1, fluorophore_type='donor', poly_probe_position=cur_poly_probe_position_2, description='Donor in position2-position3') cur_sample_probe_details_4 = ihm.flr.SampleProbeDetails( sample=cur_sample_2, probe=cur_probe_2, fluorophore_type='acceptor', poly_probe_position=cur_poly_probe_position_3, description='Acceptor in position2-position3') cur_sample_probe_details_5 = ihm.flr.SampleProbeDetails( sample=cur_sample_3, probe=cur_probe_1, fluorophore_type='donor', poly_probe_position=cur_poly_probe_position_1, description='Donor-only on reference sample') # Poly_probe_conjugate # Chem Descriptor ID 5 cur_poly_probe_conjugate_chem_descriptor = ihm.ChemDescriptor( auth_name='Conjugate', smiles='Conj1') cur_poly_probe_conjugate_chem_descriptor._id = 5 cur_poly_probe_conjugate_1 = ihm.flr.PolyProbeConjugate( sample_probe=cur_sample_probe_details_1, chem_descriptor=cur_poly_probe_conjugate_chem_descriptor, ambiguous_stoichiometry=False) cur_poly_probe_conjugate_2 = ihm.flr.PolyProbeConjugate( sample_probe=cur_sample_probe_details_2, chem_descriptor=cur_poly_probe_conjugate_chem_descriptor, ambiguous_stoichiometry=False) cur_poly_probe_conjugate_3 = ihm.flr.PolyProbeConjugate( sample_probe=cur_sample_probe_details_3, chem_descriptor=cur_poly_probe_conjugate_chem_descriptor, ambiguous_stoichiometry=False) cur_poly_probe_conjugate_4 = ihm.flr.PolyProbeConjugate( sample_probe=cur_sample_probe_details_4, chem_descriptor=cur_poly_probe_conjugate_chem_descriptor, ambiguous_stoichiometry=False) cur_poly_probe_conjugate_5 = ihm.flr.PolyProbeConjugate( sample_probe=cur_sample_probe_details_5, chem_descriptor=cur_poly_probe_conjugate_chem_descriptor, ambiguous_stoichiometry=False) cur_flr_data.poly_probe_conjugates.extend( (cur_poly_probe_conjugate_1, cur_poly_probe_conjugate_2, cur_poly_probe_conjugate_3, cur_poly_probe_conjugate_4, cur_poly_probe_conjugate_5)) # Forster_radius cur_forster_radius = ihm.flr.FRETForsterRadius( donor_probe=cur_probe_1, acceptor_probe=cur_probe_2, forster_radius=52.0, reduced_forster_radius=53.2) # Fret_calibration_parameters cur_fret_calibration_parameters_1 = ihm.flr.FRETCalibrationParameters( phi_acceptor=0.35, alpha=2.4, gg_gr_ratio=0.4, a_b=0.8) cur_fret_calibration_parameters_2 = ihm.flr.FRETCalibrationParameters( phi_acceptor=0.35, alpha=2.4, gg_gr_ratio=0.38, a_b=0.8) # LifetimeFitModel cur_lifetime_fit_model = ihm.flr.LifetimeFitModel( name='Lifetime fit model 1', description='Description of model') # RefMeasurementLifetime cur_lifetime_1 = ihm.flr.RefMeasurementLifetime(species_fraction=0.6, lifetime=3.2) cur_lifetime_2 = ihm.flr.RefMeasurementLifetime(species_fraction=0.4, lifetime=1.4) # RefMeasurement cur_ref_measurement_1 = ihm.flr.RefMeasurement( ref_sample_probe=cur_sample_probe_details_5, details='Reference Measurement 1') cur_ref_measurement_1.add_lifetime(cur_lifetime_1) cur_ref_measurement_1.add_lifetime(cur_lifetime_2) # RefMeasurementGroup cur_lifetime_ref_measurement_group = ihm.flr.RefMeasurementGroup( details='Reference measurement group 1') cur_lifetime_ref_measurement_group.add_ref_measurement( cur_ref_measurement_1) # FretAnalysis cur_fret_analysis_1 = ihm.flr.FRETAnalysis( experiment=cur_experiment, sample_probe_1=cur_sample_probe_details_1, sample_probe_2=cur_sample_probe_details_2, forster_radius=cur_forster_radius, type='intensity-based', calibration_parameters=cur_fret_calibration_parameters_1, method_name='PDA', chi_square_reduced=1.5, dataset=dataset_1) cur_fret_analysis_2 = ihm.flr.FRETAnalysis( experiment=cur_experiment, sample_probe_1=cur_sample_probe_details_3, sample_probe_2=cur_sample_probe_details_4, forster_radius=cur_forster_radius, type='intensity-based', calibration_parameters=cur_fret_calibration_parameters_2, method_name='PDA', chi_square_reduced=1.8, dataset=dataset_1) # lifetime-based FRETAnalysis cur_fret_analysis_3 = ihm.flr.FRETAnalysis( experiment=cur_experiment, sample_probe_1=cur_sample_probe_details_1, sample_probe_2=cur_sample_probe_details_2, forster_radius=cur_forster_radius, type='lifetime-based', lifetime_fit_model=cur_lifetime_fit_model, ref_measurement_group=cur_lifetime_ref_measurement_group, method_name='Lifetime fit', chi_square_reduced=1.6, dataset=dataset_1) # Peak_assignment cur_peak_assignment = ihm.flr.PeakAssignment( method_name='Population', details='Peaks were assigned by population fractions.') # Fret_distance_restraints cur_fret_distance_restraint_1 = ihm.flr.FRETDistanceRestraint( sample_probe_1=cur_sample_probe_details_1, sample_probe_2=cur_sample_probe_details_2, analysis=cur_fret_analysis_1, distance=53.5, distance_error_plus=2.5, distance_error_minus=2.3, distance_type='_E', state=cur_state, population_fraction=0.80, peak_assignment=cur_peak_assignment) cur_fret_distance_restraint_2 = ihm.flr.FRETDistanceRestraint( sample_probe_1=cur_sample_probe_details_3, sample_probe_2=cur_sample_probe_details_4, analysis=cur_fret_analysis_2, distance=49.0, distance_error_plus=2.0, distance_error_minus=2.1, distance_type='_E', state=cur_state, population_fraction=0.80, peak_assignment=cur_peak_assignment) cur_fret_distance_restraint_3 = ihm.flr.FRETDistanceRestraint( sample_probe_1=cur_sample_probe_details_1, sample_probe_2=cur_sample_probe_details_2, analysis=cur_fret_analysis_3, distance=53.5, distance_error_plus=2.5, distance_error_minus=2.3, distance_type='_E', state=cur_state, population_fraction=0.80, peak_assignment=cur_peak_assignment) cur_fret_dist_restraint_group = ihm.flr.FRETDistanceRestraintGroup() cur_fret_dist_restraint_group.add_distance_restraint( cur_fret_distance_restraint_1) cur_fret_dist_restraint_group.add_distance_restraint( cur_fret_distance_restraint_2) cur_fret_dist_restraint_group.add_distance_restraint( cur_fret_distance_restraint_3) cur_flr_data.distance_restraint_groups.append( cur_fret_dist_restraint_group) # fret_model_quality cur_fret_model_quality_1 = ihm.flr.FRETModelQuality( model=cur_model_1, chi_square_reduced=1.3, dataset_group=dataset_group_1, method=None) cur_fret_model_quality_2 = ihm.flr.FRETModelQuality( model=cur_model_2, chi_square_reduced=1.9, dataset_group=dataset_group_1, method=None) cur_flr_data.fret_model_qualities.extend( (cur_fret_model_quality_1, cur_fret_model_quality_2)) # fret_model_distance cur_fret_model_distance_1_1 = ihm.flr.FRETModelDistance( restraint=cur_fret_distance_restraint_1, model=cur_model_1, distance=52.0) cur_fret_model_distance_1_2 = ihm.flr.FRETModelDistance( restraint=cur_fret_distance_restraint_2, model=cur_model_1, distance=50.0) cur_fret_model_distance_2_1 = ihm.flr.FRETModelDistance( restraint=cur_fret_distance_restraint_1, model=cur_model_2, distance=53.8) cur_fret_model_distance_2_2 = ihm.flr.FRETModelDistance( restraint=cur_fret_distance_restraint_2, model=cur_model_2, distance=49.4) cur_flr_data.fret_model_distances.extend( (cur_fret_model_distance_1_1, cur_fret_model_distance_1_2, cur_fret_model_distance_2_1, cur_fret_model_distance_2_2)) # FPS modeling cur_fps_global_parameters = ihm.flr.FPSGlobalParameters( forster_radius=52, conversion_function_polynom_order=3, repetition=1000, av_grid_rel=0.2, av_min_grid_a=0.4, av_allowed_sphere=0.5, av_search_nodes=3, av_e_samples_k=200, sim_viscosity_adjustment=1, sim_dt_adjustment=1, sim_max_iter_k=200, sim_max_force=400, sim_clash_tolerance_a=1, sim_reciprocal_kt=10, sim_clash_potential='^2', convergence_e=100, convergence_k=0.001, convergence_f=0.001, convergence_t=0.002) cur_fps_modeling_1 = ihm.flr.FPSModeling( protocol=cur_ihm_modeling_protocol, restraint_group=cur_fret_dist_restraint_group, global_parameter=cur_fps_global_parameters, probe_modeling_method="AV3") cur_fps_modeling_2 = ihm.flr.FPSModeling( protocol=cur_ihm_modeling_protocol, restraint_group=cur_fret_dist_restraint_group, global_parameter=cur_fps_global_parameters, probe_modeling_method="MPP") # Modeling by AV cur_fps_av_parameters_1 = ihm.flr.FPSAVParameter( num_linker_atoms=15, linker_length=20.0, linker_width=3.5, probe_radius_1=10.0, probe_radius_2=5.0, probe_radius_3=3.5) cur_fps_av_modeling_1 = ihm.flr.FPSAVModeling( fps_modeling=cur_fps_modeling_1, sample_probe=cur_sample_probe_details_1, parameter=cur_fps_av_parameters_1) cur_fps_av_modeling_3 = ihm.flr.FPSAVModeling( fps_modeling=cur_fps_modeling_1, sample_probe=cur_sample_probe_details_3, parameter=cur_fps_av_parameters_1) cur_flr_data.fps_modeling.append(cur_fps_av_modeling_1) cur_flr_data.fps_modeling.append(cur_fps_av_modeling_3) # Modeling by mean probe position cur_mpp_atom_position_1 = ihm.flr.FPSMPPAtomPosition( atom=asym1.residue(1).atom('CA'), x=1.0, y=1.0, z=1.0) cur_mpp_atom_position_2 = ihm.flr.FPSMPPAtomPosition( atom=asym1.residue(2).atom('CA'), x=2.0, y=2.0, z=2.0) cur_mpp_atom_position_group = ihm.flr.FPSMPPAtomPositionGroup() cur_mpp_atom_position_group.add_atom_position(cur_mpp_atom_position_1) cur_mpp_atom_position_group.add_atom_position(cur_mpp_atom_position_2) cur_mean_probe_position_2 = ihm.flr.FPSMeanProbePosition( sample_probe=cur_sample_probe_details_2, x=1.0, y=2.0, z=3.0) cur_mean_probe_position_4 = ihm.flr.FPSMeanProbePosition( sample_probe=cur_sample_probe_details_4, x=1.0, y=2.0, z=3.0) cur_fps_mpp_modeling_2 = ihm.flr.FPSMPPModeling( fps_modeling=cur_fps_modeling_2, mpp=cur_mean_probe_position_2, mpp_atom_position_group=cur_mpp_atom_position_group) cur_fps_mpp_modeling_4 = ihm.flr.FPSMPPModeling( fps_modeling=cur_fps_modeling_2, mpp=cur_mean_probe_position_4, mpp_atom_position_group=cur_mpp_atom_position_group) cur_flr_data.fps_modeling.append(cur_fps_mpp_modeling_2) cur_flr_data.fps_modeling.append(cur_fps_mpp_modeling_4) # KineticRateFretAnalysisConnection cur_kinetic_rate1 = ihm.multi_state_scheme.KineticRate( transition_rate_constant=1.0) cur_kinetic_rate2 = ihm.multi_state_scheme.KineticRate( transition_rate_constant=2.0) cur_kinetic_rate_fret_analysis_connection1 = \ ihm.flr.KineticRateFretAnalysisConnection( fret_analysis=cur_fret_analysis_1, kinetic_rate=cur_kinetic_rate1, details='connection1') cur_kinetic_rate_fret_analysis_connection2 = \ ihm.flr.KineticRateFretAnalysisConnection( fret_analysis=cur_fret_analysis_2, kinetic_rate=cur_kinetic_rate2, details='connection2') cur_flr_data.kinetic_rate_fret_analysis_connections.append( cur_kinetic_rate_fret_analysis_connection1) cur_flr_data.kinetic_rate_fret_analysis_connections.append( cur_kinetic_rate_fret_analysis_connection2) # RelaxationTimeFretAnalysisConnection cur_relaxation_time1 = \ ihm.multi_state_scheme.RelaxationTime(value=3.0, unit='seconds') cur_relaxation_time2 = \ ihm.multi_state_scheme.RelaxationTime(value=4.0, unit='milliseconds') cur_relaxation_time_fret_analysis_connection1 = \ ihm.flr.RelaxationTimeFretAnalysisConnection( fret_analysis=cur_fret_analysis_1, relaxation_time=cur_relaxation_time1, details='connection3') cur_relaxation_time_fret_analysis_connection2 = \ ihm.flr.RelaxationTimeFretAnalysisConnection( fret_analysis=cur_fret_analysis_3, relaxation_time=cur_relaxation_time2, details='connection4') cur_flr_data.relaxation_time_fret_analysis_connections.append( cur_relaxation_time_fret_analysis_connection1) cur_flr_data.relaxation_time_fret_analysis_connections.append( cur_relaxation_time_fret_analysis_connection2) system.flr_data = [cur_flr_data] ihm.dumper._EntityDumper().finalize(system) # assign entity IDs ihm.dumper._StructAsymDumper().finalize(system) # assign asym IDs ihm.dumper._ChemCompDumper().finalize(system) experiment_dumper = ihm.dumper._FLRExperimentDumper() experiment_dumper.finalize(system) inst_setting_dumper = ihm.dumper._FLRInstSettingDumper() inst_setting_dumper.finalize(system) exp_condition_dumper = ihm.dumper._FLRExpConditionDumper() exp_condition_dumper.finalize(system) instrument_dumper = ihm.dumper._FLRInstrumentDumper() instrument_dumper.finalize(system) entity_assembly_dumper = ihm.dumper._FLREntityAssemblyDumper() entity_assembly_dumper.finalize(system) sample_condition_dumper = ihm.dumper._FLRSampleConditionDumper() sample_condition_dumper.finalize(system) sample_dumper = ihm.dumper._FLRSampleDumper() sample_dumper.finalize(system) probe_dumper = ihm.dumper._FLRProbeDumper() probe_dumper.finalize(system) sample_probe_details_dumper = ihm.dumper._FLRSampleProbeDetailsDumper() sample_probe_details_dumper.finalize(system) poly_probe_pos_dumper = ihm.dumper._FLRPolyProbePositionDumper() poly_probe_pos_dumper.finalize(system) conjugate_dumper = ihm.dumper._FLRConjugateDumper() conjugate_dumper.finalize(system) radii_dumper = ihm.dumper._FLRForsterRadiusDumper() radii_dumper.finalize(system) parameters_dumper = ihm.dumper._FLRCalibrationParametersDumper() parameters_dumper.finalize(system) lifetime_fit_model_dumper = ihm.dumper._FLRLifetimeFitModelDumper() lifetime_fit_model_dumper.finalize(system) ref_measurement_dumper = ihm.dumper._FLRRefMeasurementDumper() ref_measurement_dumper.finalize(system) analysis_dumper = ihm.dumper._FLRAnalysisDumper() analysis_dumper.finalize(system) peak_assignment_dumper = ihm.dumper._FLRPeakAssignmentDumper() peak_assignment_dumper.finalize(system) distance_restraint_dumper = ihm.dumper._FLRDistanceRestraintDumper() distance_restraint_dumper.finalize(system) model_quality_dumper = ihm.dumper._FLRModelQualityDumper() model_quality_dumper.finalize(system) model_distance_dumper = ihm.dumper._FLRModelDistanceDumper() model_distance_dumper.finalize(system) fps_modeling_dumper = ihm.dumper._FLRFPSModelingDumper() fps_modeling_dumper.finalize(system) av_dumper = ihm.dumper._FLRFPSAVModelingDumper() av_dumper.finalize(system) mpp_dumper = ihm.dumper._FLRFPSMPPModelingDumper() mpp_dumper.finalize(system) # assign IDs # Assign IDs to the kinetic rates ihm.dumper._KineticRateDumper().finalize(system) # Assign IDs to the relaxation times ihm.dumper._RelaxationTimeDumper().finalize(system) kinetic_rate_fret_analysis_connection_dumper = \ ihm.dumper._FLRKineticRateFretAnalysisConnectionDumper() kinetic_rate_fret_analysis_connection_dumper.finalize(system) relaxation_time_fret_analysis_connection_dumper = \ ihm.dumper._FLRRelaxationTimeFretAnalysisConnectionDumper() relaxation_time_fret_analysis_connection_dumper.finalize(system) out = _get_dumper_output(experiment_dumper, system) self.assertEqual(out, """# loop_ _flr_experiment.ordinal_id _flr_experiment.id _flr_experiment.instrument_id _flr_experiment.inst_setting_id _flr_experiment.exp_condition_id _flr_experiment.sample_id _flr_experiment.details 1 1 1 1 1 1 . 2 1 1 2 2 2 . 3 1 1 1 1 3 . # """) out = _get_dumper_output(inst_setting_dumper, system) self.assertEqual(out, """# loop_ _flr_inst_setting.id _flr_inst_setting.details 1 My_Inst_setting_1 2 My_Inst_setting_2 # """) out = _get_dumper_output(exp_condition_dumper, system) self.assertEqual(out, """# loop_ _flr_exp_condition.id _flr_exp_condition.details 1 My_Exp_condition_1 2 My_Exp_condition_2 # """) out = _get_dumper_output(instrument_dumper, system) self.assertEqual(out, """# loop_ _flr_instrument.id _flr_instrument.details 1 My_Instrument # """) out = _get_dumper_output(entity_assembly_dumper, system) self.assertEqual(out, """# loop_ _flr_entity_assembly.ordinal_id _flr_entity_assembly.assembly_id _flr_entity_assembly.entity_id _flr_entity_assembly.num_copies _flr_entity_assembly.entity_description 1 1 1 1 Entity_1 2 1 2 2 Entity_2 # """) out = _get_dumper_output(sample_condition_dumper, system) self.assertEqual(out, """# loop_ _flr_sample_condition.id _flr_sample_condition.details 1 My_Sample_condition_1 2 My_Sample_condition_2 # """) out = _get_dumper_output(sample_dumper, system) self.assertEqual(out, """# loop_ _flr_sample.id _flr_sample.entity_assembly_id _flr_sample.num_of_probes _flr_sample.sample_condition_id _flr_sample.sample_description _flr_sample.sample_details _flr_sample.solvent_phase 1 1 2 1 Sample_1 'Details sample 1' liquid 2 1 2 2 Sample_2 'Details sample 2' liquid 3 1 1 1 'Reference Sample' 'Details Reference Sample' liquid # """) out = _get_dumper_output(probe_dumper, system) self.assertEqual(out, """# loop_ _flr_probe_list.probe_id _flr_probe_list.chromophore_name _flr_probe_list.reactive_probe_flag _flr_probe_list.reactive_probe_name _flr_probe_list.probe_origin _flr_probe_list.probe_link_type 1 Donor1 NO . extrinsic covalent 2 Acceptor2 YES 'Acceptor1 reactive' extrinsic covalent # # loop_ _flr_probe_descriptor.probe_id _flr_probe_descriptor.reactive_probe_chem_descriptor_id _flr_probe_descriptor.chromophore_chem_descriptor_id _flr_probe_descriptor.chromophore_center_atom 1 . 1 CB 2 3 2 CB # """) out = _get_dumper_output(sample_probe_details_dumper, system) self.assertEqual(out, """# loop_ _flr_sample_probe_details.sample_probe_id _flr_sample_probe_details.sample_id _flr_sample_probe_details.probe_id _flr_sample_probe_details.fluorophore_type _flr_sample_probe_details.description _flr_sample_probe_details.poly_probe_position_id 1 1 1 donor 'Donor in position1-position3' 1 2 1 2 acceptor 'Acceptor in position1-position3' 2 3 2 1 donor 'Donor in position2-position3' 3 4 2 2 acceptor 'Acceptor in position2-position3' 2 5 3 1 donor 'Donor-only on reference sample' 1 # """) out = _get_dumper_output(poly_probe_pos_dumper, system) self.assertEqual(out, """# loop_ _flr_poly_probe_position.id _flr_poly_probe_position.entity_id _flr_poly_probe_position.entity_description _flr_poly_probe_position.asym_id _flr_poly_probe_position.seq_id _flr_poly_probe_position.comp_id _flr_poly_probe_position.atom_id _flr_poly_probe_position.mutation_flag _flr_poly_probe_position.modification_flag _flr_poly_probe_position.auth_name 1 1 Entity_1 . 1 ALA . YES YES Position_1 2 2 Entity_2 . 10 CYS CB YES YES Position_3 3 1 Entity_1 C 2 GLY CB NO NO Position_2 # # loop_ _flr_poly_probe_position_mutated.id _flr_poly_probe_position_mutated.chem_comp_id _flr_poly_probe_position_mutated.atom_id 1 Cys . 2 Cys CB # # loop_ _flr_poly_probe_position_modified.id _flr_poly_probe_position_modified.chem_descriptor_id _flr_poly_probe_position_modified.atom_id 1 4 . 2 4 CB # """) out = _get_dumper_output(conjugate_dumper, system) self.assertEqual(out, """# loop_ _flr_poly_probe_conjugate.id _flr_poly_probe_conjugate.sample_probe_id _flr_poly_probe_conjugate.chem_descriptor_id _flr_poly_probe_conjugate.ambiguous_stoichiometry_flag _flr_poly_probe_conjugate.probe_stoichiometry 1 1 5 NO . 2 2 5 NO . 3 3 5 NO . 4 4 5 NO . 5 5 5 NO . # """) out = _get_dumper_output(radii_dumper, system) self.assertEqual(out, """# loop_ _flr_fret_forster_radius.id _flr_fret_forster_radius.donor_probe_id _flr_fret_forster_radius.acceptor_probe_id _flr_fret_forster_radius.forster_radius _flr_fret_forster_radius.reduced_forster_radius 1 1 2 52.000 53.200 # """) out = _get_dumper_output(parameters_dumper, system) self.assertEqual(out, """# loop_ _flr_fret_calibration_parameters.id _flr_fret_calibration_parameters.phi_acceptor _flr_fret_calibration_parameters.alpha _flr_fret_calibration_parameters.alpha_sd _flr_fret_calibration_parameters.gG_gR_ratio _flr_fret_calibration_parameters.beta _flr_fret_calibration_parameters.gamma _flr_fret_calibration_parameters.delta _flr_fret_calibration_parameters.a_b 1 0.350 2.400 . 0.400 . . . 0.800 2 0.350 2.400 . 0.380 . . . 0.800 # """) out = _get_dumper_output(ref_measurement_dumper, system) self.assertEqual(out, """# loop_ _flr_reference_measurement_group.id _flr_reference_measurement_group.num_measurements _flr_reference_measurement_group.details 1 1 'Reference measurement group 1' # # loop_ _flr_reference_measurement_group_link.group_id _flr_reference_measurement_group_link.reference_measurement_id 1 1 # # loop_ _flr_reference_measurement.id _flr_reference_measurement.reference_sample_probe_id _flr_reference_measurement.num_species _flr_reference_measurement.details 1 5 2 'Reference Measurement 1' # # loop_ _flr_reference_measurement_lifetime.ordinal_id _flr_reference_measurement_lifetime.reference_measurement_id _flr_reference_measurement_lifetime.species_name _flr_reference_measurement_lifetime.species_fraction _flr_reference_measurement_lifetime.lifetime 1 1 . 0.600 3.200 2 1 . 0.400 1.400 # """) out = _get_dumper_output(lifetime_fit_model_dumper, system) self.assertEqual(out, """# loop_ _flr_lifetime_fit_model.id _flr_lifetime_fit_model.name _flr_lifetime_fit_model.description _flr_lifetime_fit_model.external_file_id _flr_lifetime_fit_model.citation_id 1 'Lifetime fit model 1' 'Description of model' . . # """) out = _get_dumper_output(analysis_dumper, system) self.assertEqual(out, """# loop_ _flr_fret_analysis.id _flr_fret_analysis.experiment_id _flr_fret_analysis.type _flr_fret_analysis.sample_probe_id_1 _flr_fret_analysis.sample_probe_id_2 _flr_fret_analysis.forster_radius_id _flr_fret_analysis.dataset_list_id _flr_fret_analysis.external_file_id _flr_fret_analysis.software_id 1 1 intensity-based 1 2 1 1 . . 2 1 intensity-based 3 4 1 1 . . 3 1 lifetime-based 1 2 1 1 . . # # loop_ _flr_fret_analysis_intensity.ordinal_id _flr_fret_analysis_intensity.analysis_id _flr_fret_analysis_intensity.calibration_parameters_id _flr_fret_analysis_intensity.donor_only_fraction _flr_fret_analysis_intensity.chi_square_reduced _flr_fret_analysis_intensity.method_name _flr_fret_analysis_intensity.details 1 1 1 . 1.500 PDA . 2 2 2 . 1.800 PDA . # # loop_ _flr_fret_analysis_lifetime.ordinal_id _flr_fret_analysis_lifetime.analysis_id _flr_fret_analysis_lifetime.reference_measurement_group_id _flr_fret_analysis_lifetime.lifetime_fit_model_id _flr_fret_analysis_lifetime.donor_only_fraction _flr_fret_analysis_lifetime.chi_square_reduced _flr_fret_analysis_lifetime.method_name _flr_fret_analysis_lifetime.details 1 3 1 1 . 1.600 'Lifetime fit' . # """) out = _get_dumper_output(peak_assignment_dumper, system) self.assertEqual(out, """# loop_ _flr_peak_assignment.id _flr_peak_assignment.method_name _flr_peak_assignment.details 1 Population 'Peaks were assigned by population fractions.' # """) out = _get_dumper_output(distance_restraint_dumper, system) self.assertEqual(out, """# loop_ _flr_fret_distance_restraint.ordinal_id _flr_fret_distance_restraint.id _flr_fret_distance_restraint.group_id _flr_fret_distance_restraint.sample_probe_id_1 _flr_fret_distance_restraint.sample_probe_id_2 _flr_fret_distance_restraint.state_id _flr_fret_distance_restraint.analysis_id _flr_fret_distance_restraint.distance _flr_fret_distance_restraint.distance_error_plus _flr_fret_distance_restraint.distance_error_minus _flr_fret_distance_restraint.distance_type _flr_fret_distance_restraint.population_fraction _flr_fret_distance_restraint.peak_assignment_id 1 1 1 1 2 1 1 53.500 2.500 2.300 _E 0.800 1 2 2 1 3 4 1 2 49.000 2.000 2.100 _E 0.800 1 3 3 1 1 2 1 3 53.500 2.500 2.300 _E 0.800 1 # """) out = _get_dumper_output(model_quality_dumper, system) self.assertEqual(out, """# loop_ _flr_fret_model_quality.model_id _flr_fret_model_quality.chi_square_reduced _flr_fret_model_quality.dataset_group_id _flr_fret_model_quality.method _flr_fret_model_quality.details 1 1.300 1 . . 2 1.900 1 . . # """) out = _get_dumper_output(model_distance_dumper, system) self.assertEqual(out, """# loop_ _flr_fret_model_distance.id _flr_fret_model_distance.restraint_id _flr_fret_model_distance.model_id _flr_fret_model_distance.distance _flr_fret_model_distance.distance_deviation 1 1 1 52.000 1.500 2 2 1 50.000 -1.000 3 1 2 53.800 -0.300 4 2 2 49.400 -0.400 # """) out = _get_dumper_output(fps_modeling_dumper, system) self.assertEqual(out, """# loop_ _flr_FPS_modeling.id _flr_FPS_modeling.ihm_modeling_protocol_ordinal_id _flr_FPS_modeling.restraint_group_id _flr_FPS_modeling.global_parameter_id _flr_FPS_modeling.probe_modeling_method _flr_FPS_modeling.details 1 1 1 1 AV3 . 2 1 1 1 MPP . # # loop_ _flr_FPS_global_parameter.id _flr_FPS_global_parameter.forster_radius_value _flr_FPS_global_parameter.conversion_function_polynom_order _flr_FPS_global_parameter.repetition _flr_FPS_global_parameter.AV_grid_rel _flr_FPS_global_parameter.AV_min_grid_A _flr_FPS_global_parameter.AV_allowed_sphere _flr_FPS_global_parameter.AV_search_nodes _flr_FPS_global_parameter.AV_E_samples_k _flr_FPS_global_parameter.sim_viscosity_adjustment _flr_FPS_global_parameter.sim_dt_adjustment _flr_FPS_global_parameter.sim_max_iter_k _flr_FPS_global_parameter.sim_max_force _flr_FPS_global_parameter.sim_clash_tolerance_A _flr_FPS_global_parameter.sim_reciprocal_kT _flr_FPS_global_parameter.sim_clash_potential _flr_FPS_global_parameter.convergence_E _flr_FPS_global_parameter.convergence_K _flr_FPS_global_parameter.convergence_F _flr_FPS_global_parameter.convergence_T 1 52 3 1000 0.200 0.400 0.500 3 200 1 1 200 400 1 10 ^2 100 0.001 0.001 0.002 # """) out = _get_dumper_output(av_dumper, system) self.assertEqual(out, """# loop_ _flr_FPS_AV_parameter.id _flr_FPS_AV_parameter.num_linker_atoms _flr_FPS_AV_parameter.linker_length _flr_FPS_AV_parameter.linker_width _flr_FPS_AV_parameter.probe_radius_1 _flr_FPS_AV_parameter.probe_radius_2 _flr_FPS_AV_parameter.probe_radius_3 1 15 20.000 3.500 10.000 5.000 3.500 # # loop_ _flr_FPS_AV_modeling.id _flr_FPS_AV_modeling.sample_probe_id _flr_FPS_AV_modeling.FPS_modeling_id _flr_FPS_AV_modeling.parameter_id 1 1 1 1 2 3 1 1 # """) out = _get_dumper_output(mpp_dumper, system) self.assertEqual(out, """# loop_ _flr_FPS_mean_probe_position.id _flr_FPS_mean_probe_position.sample_probe_id _flr_FPS_mean_probe_position.mpp_xcoord _flr_FPS_mean_probe_position.mpp_ycoord _flr_FPS_mean_probe_position.mpp_zcoord 1 2 1.000 2.000 3.000 2 4 1.000 2.000 3.000 # # loop_ _flr_FPS_MPP_atom_position.id _flr_FPS_MPP_atom_position.entity_id _flr_FPS_MPP_atom_position.seq_id _flr_FPS_MPP_atom_position.comp_id _flr_FPS_MPP_atom_position.atom_id _flr_FPS_MPP_atom_position.asym_id _flr_FPS_MPP_atom_position.xcoord _flr_FPS_MPP_atom_position.ycoord _flr_FPS_MPP_atom_position.zcoord _flr_FPS_MPP_atom_position.group_id 1 1 1 ALA CA C 1.000 1.000 1.000 1 2 1 2 GLY CA C 2.000 2.000 2.000 1 # # loop_ _flr_FPS_MPP_modeling.ordinal_id _flr_FPS_MPP_modeling.FPS_modeling_id _flr_FPS_MPP_modeling.mpp_id _flr_FPS_MPP_modeling.mpp_atom_position_group_id 1 2 1 1 2 2 2 1 # """) out = _get_dumper_output( kinetic_rate_fret_analysis_connection_dumper, system) self.assertEqual(out, """# loop_ _flr_kinetic_rate_analysis.id _flr_kinetic_rate_analysis.fret_analysis_id _flr_kinetic_rate_analysis.kinetic_rate_id _flr_kinetic_rate_analysis.details 1 1 1 connection1 2 2 2 connection2 # """) out = _get_dumper_output( relaxation_time_fret_analysis_connection_dumper, system) self.assertEqual(out, """# loop_ _flr_relaxation_time_analysis.id _flr_relaxation_time_analysis.fret_analysis_id _flr_relaxation_time_analysis.relaxation_time_id _flr_relaxation_time_analysis.details 1 1 1 connection3 2 3 2 connection4 # """) def test_variant_base(self): """Test Variant base class""" v = ihm.dumper.Variant() self.assertIsNone(v.get_dumpers()) self.assertEqual( v.get_system_writer('system', 'writer_class', 'writer'), 'writer') def test_write_variant(self): """Test write() function with Variant object""" sys1 = ihm.System(id='system1') fh = StringIO() ihm.dumper.write(fh, [sys1], variant=ihm.dumper.IHMVariant()) def test_ignore_writer(self): """Test _IgnoreWriter utility class""" class BaseWriter: def flush(self): return 'flush called' def write_comment(self, comment): return 'write comment ' + comment s = ihm.dumper._IgnoreWriter(BaseWriter(), []) # These methods are not usually called in ordinary operation, but # we should provide them for Writer compatibility self.assertEqual(s.flush(), 'flush called') self.assertEqual(s.write_comment('foo'), 'write comment foo') def test_write_ignore_variant(self): """Test write() function with IgnoreVariant object""" sys1 = ihm.System(id='system1') fh = StringIO() ihm.dumper.write(fh, [sys1]) self.assertIn('_ihm_struct_assembly', fh.getvalue()) # Test exclude of ihm_struct_assembly category fh = StringIO() ihm.dumper.write( fh, [sys1], variant=ihm.dumper.IgnoreVariant(['_ihm_struct_assembly'])) self.assertNotIn('_ihm_struct_assembly', fh.getvalue()) # Should be case-insensitive and tolerant of missing underscore fh = StringIO() ihm.dumper.write( fh, [sys1], variant=ihm.dumper.IgnoreVariant(['IHM_STRUCT_ASSEMBLY', 'AUDIT_CONFORM'])) self.assertNotIn('_ihm_struct_assembly', fh.getvalue()) def test_dumper_unwrapped(self): """Test dumper output with line wrapping disabled""" system = ihm.System() system.software.append(ihm.Software( name='long-software-name', classification='test code', description='Some test program', version=1, location='http://some-long-url.org')) dumper = ihm.dumper._SoftwareDumper() dumper.finalize(system) try: ihm.dumper.set_line_wrap(False) out = _get_dumper_output(dumper, system) finally: ihm.dumper.set_line_wrap(True) self.assertEqual(out, """# loop_ _software.pdbx_ordinal _software.name _software.classification _software.description _software.version _software.type _software.location _software.citation_id 1 long-software-name 'test code' 'Some test program' 1 program http://some-long-url.org . # """) # noqa: E501 def test_entity_branch_list_dumper(self): """Test EntityBranchListDumper""" system = ihm.System() system.entities.append(ihm.Entity( [ihm.SaccharideChemComp('NAG'), ihm.SaccharideChemComp('FUC')])) # Non-branched entity system.entities.append(ihm.Entity('ACGT')) ed = ihm.dumper._EntityDumper() ed.finalize(system) # Assign IDs dumper = ihm.dumper._EntityBranchListDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_entity_branch_list.entity_id _pdbx_entity_branch_list.num _pdbx_entity_branch_list.comp_id _pdbx_entity_branch_list.hetero 1 1 NAG . 1 2 FUC . # """) def test_entity_branch_dumper(self): """Test EntityBranchDumper""" system = ihm.System() system.entities.append(ihm.Entity( [ihm.SaccharideChemComp('NAG'), ihm.SaccharideChemComp('FUC')])) # Non-branched entity system.entities.append(ihm.Entity('ACGT')) ed = ihm.dumper._EntityDumper() ed.finalize(system) # Assign IDs dumper = ihm.dumper._EntityBranchDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_entity_branch.entity_id _pdbx_entity_branch.type 1 oligosaccharide # """) def test_branch_scheme_dumper(self): """Test BranchSchemeDumper""" system = ihm.System() e1 = ihm.Entity([ihm.SaccharideChemComp('NAG'), ihm.SaccharideChemComp('FUC')]) e2 = ihm.Entity([ihm.SaccharideChemComp('FUC'), ihm.SaccharideChemComp('BGC')]) e3 = ihm.Entity([ihm.SaccharideChemComp('NAG'), ihm.SaccharideChemComp('BGC')]) # Non-branched entity e4 = ihm.Entity('ACT') system.entities.extend((e1, e2, e3, e4)) system.asym_units.append(ihm.AsymUnit(e1, 'foo')) system.asym_units.append(ihm.AsymUnit(e2, 'bar', auth_seq_id_map=5)) system.asym_units.append(ihm.AsymUnit( e3, 'bar', auth_seq_id_map={1: 6, 2: (7, 'A')}, orig_auth_seq_id_map={1: 100})) system.asym_units.append(ihm.AsymUnit(e4, 'baz')) ihm.dumper._EntityDumper().finalize(system) ihm.dumper._StructAsymDumper().finalize(system) dumper = ihm.dumper._BranchSchemeDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_branch_scheme.asym_id _pdbx_branch_scheme.entity_id _pdbx_branch_scheme.mon_id _pdbx_branch_scheme.num _pdbx_branch_scheme.pdb_seq_num _pdbx_branch_scheme.pdb_ins_code _pdbx_branch_scheme.auth_seq_num _pdbx_branch_scheme.auth_mon_id _pdbx_branch_scheme.pdb_mon_id _pdbx_branch_scheme.pdb_asym_id A 1 NAG 1 1 . 1 NAG NAG A A 1 FUC 2 2 . 2 FUC FUC A B 2 FUC 1 6 . 6 FUC FUC B B 2 BGC 2 7 . 7 BGC BGC B C 3 NAG 1 6 . 100 NAG NAG C C 3 BGC 2 7 A 7 BGC BGC C # """) def test_branch_descriptor_dumper(self): """Test BranchDescriptorDumper""" system = ihm.System() e1 = ihm.Entity([ihm.SaccharideChemComp('NAG')]) bd1 = ihm.BranchDescriptor('foo', type='typ1', program='prog', program_version='1.0') bd2 = ihm.BranchDescriptor('bar', type='typ2') e1.branch_descriptors.extend((bd1, bd2)) system.entities.append(e1) ihm.dumper._EntityDumper().finalize(system) dumper = ihm.dumper._BranchDescriptorDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_entity_branch_descriptor.ordinal _pdbx_entity_branch_descriptor.entity_id _pdbx_entity_branch_descriptor.descriptor _pdbx_entity_branch_descriptor.type _pdbx_entity_branch_descriptor.program _pdbx_entity_branch_descriptor.program_version 1 1 foo typ1 prog 1.0 2 1 bar typ2 . . # """) def test_branch_link_dumper(self): """Test BranchLinkDumper""" system = ihm.System() e1 = ihm.Entity([ihm.SaccharideChemComp('NAG'), ihm.SaccharideChemComp('BMC'), ihm.SaccharideChemComp('FUC')]) lnk1 = ihm.BranchLink(num1=1, atom_id1='CA', leaving_atom_id1='H1', num2=2, atom_id2='N', leaving_atom_id2='H2', order='sing', details='foo') lnk2 = ihm.BranchLink(num1=2, atom_id1='CA', leaving_atom_id1='H1', num2=3, atom_id2='N', leaving_atom_id2='H2') e1.branch_links.extend((lnk1, lnk2)) system.entities.append(e1) ihm.dumper._EntityDumper().finalize(system) dumper = ihm.dumper._BranchLinkDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _pdbx_entity_branch_link.link_id _pdbx_entity_branch_link.entity_id _pdbx_entity_branch_link.entity_branch_list_num_1 _pdbx_entity_branch_link.comp_id_1 _pdbx_entity_branch_link.atom_id_1 _pdbx_entity_branch_link.leaving_atom_id_1 _pdbx_entity_branch_link.entity_branch_list_num_2 _pdbx_entity_branch_link.comp_id_2 _pdbx_entity_branch_link.atom_id_2 _pdbx_entity_branch_link.leaving_atom_id_2 _pdbx_entity_branch_link.value_order _pdbx_entity_branch_link.details 1 1 1 NAG CA H1 2 BMC N H2 sing foo 2 1 2 BMC CA H1 3 FUC N H2 . . # """) def test_database_dumper(self): """Test DatabaseDumper""" system = ihm.System() dumper = ihm.dumper._DatabaseDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, '') system = ihm.System( databases=[ihm.Database(id='foo', code='bar'), ihm.Database(id='baz', code='1abc', accession='1abcxyz', doi='1.2.3.4')]) dumper = ihm.dumper._DatabaseDumper() out = _get_dumper_output(dumper, system) self.assertEqual(out, """# loop_ _database_2.database_id _database_2.database_code _database_2.pdbx_database_accession _database_2.pdbx_DOI foo bar . . baz 1abc 1abcxyz 1.2.3.4 # """) def test_database_status_dumper(self): """Test DatabaseStatusDumper""" system = ihm.System() self.assertEqual(system.database_status._map, {}) system.database_status._map = { 'status_code': 'REL', 'entry_id': '5FD1', 'recvd_initial_deposition_date': '1993-06-29', 'deposit_site': ihm.unknown, 'process_site': 'BNL', 'sg_entry': None} dumper = ihm.dumper._DatabaseStatusDumper() out = _get_dumper_output(dumper, system) # sort to remove dict order self.assertEqual("\n".join(sorted(out.split('\n'))), """ _pdbx_database_status.deposit_site ? _pdbx_database_status.entry_id 5FD1 _pdbx_database_status.process_site BNL _pdbx_database_status.recvd_initial_deposition_date 1993-06-29 _pdbx_database_status.sg_entry . _pdbx_database_status.status_code REL""") if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_edit.py000066400000000000000000000025241503573337200170060ustar00rootroot00000000000000import utils import os import unittest from io import StringIO TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.reader import ihm.dumper class Tests(unittest.TestCase): def test_entity(self): """Test Entity read followed by write""" sin = StringIO(""" loop_ _entity.id _entity.type _entity.pdbx_description _entity.pdbx_number_of_molecules _entity.formula_weight _entity.details 1 polymer Nup84 2 100.0 . # loop_ _entity_poly_seq.entity_id _entity_poly_seq.num _entity_poly_seq.mon_id _entity_poly_seq.hetero 1 1 ALA . 1 2 CYS . """) s, = ihm.reader.read(sin) sout = StringIO() ihm.dumper.write(sout, [s]) def test_orphan(self): """Make sure orphaned objects are preserved""" incif = utils.get_input_file_name(TOPDIR, 'orphan.cif') with open(incif) as fh: s, = ihm.reader.read(fh) sout = StringIO() ihm.dumper.write(sout, [s]) newcif = sout.getvalue() # Make sure orphan object tables show up in the output self.assertIn('_ihm_geometric_object_center', newcif) self.assertIn('_ihm_relaxation_time', newcif) self.assertIn('_ihm_external_reference_info', newcif) self.assertIn('_chem_comp.', newcif) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_examples.py000066400000000000000000000074661503573337200177110ustar00rootroot00000000000000import utils import os import unittest import sys import subprocess import pickle TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.reader def get_example_dir(): return os.path.join(TOPDIR, "examples") def get_example_path(fname): return os.path.join(get_example_dir(), fname) class Tests(unittest.TestCase): @unittest.skipIf('APPVEYOR' in os.environ, "AppVeyor environments have old SSL certs") @unittest.skipIf('GITHUB_ACTIONS' in os.environ, "Example is slow and fails when PDB-IHM is down") def test_validator_example(self): """Test validator example""" subprocess.check_call([sys.executable, get_example_path("validate_pdb_ihm.py")]) def test_simple_docking_example(self): """Test simple-docking example""" with utils.temporary_directory() as tmpdir: subprocess.check_call([sys.executable, get_example_path("simple-docking.py")], cwd=tmpdir) # Make sure that a complete output file was produced and that we # can read it with open(os.path.join(tmpdir, 'output.cif')) as fh: contents = fh.readlines() self.assertEqual(len(contents), 321) with open(os.path.join(tmpdir, 'output.cif')) as fh: s, = ihm.reader.read(fh) def test_locations_example(self): """Test locations example""" subprocess.check_call([sys.executable, "locations.py"], cwd=get_example_dir()) out = get_example_path("output.cif") # Make sure that a complete output file was produced and that we # can read it with open(out) as fh: contents = fh.readlines() self.assertEqual(len(contents), 71) with open(out) as fh: s, = ihm.reader.read(fh) os.unlink(out) def test_ligands_water_example(self): """Test ligands_water example""" subprocess.check_call([sys.executable, "ligands_water.py"], cwd=get_example_dir()) out = get_example_path("output.cif") # Make sure that a complete output file was produced and that we # can read it with open(out) as fh: contents = fh.readlines() self.assertEqual(len(contents), 255) with open(out) as fh: s, = ihm.reader.read(fh) # Make sure that resulting Python objects are picklable testpck = 'test-lig-wat.pck' with open(testpck, 'wb') as fh: pickle.dump(s, fh, protocol=-1) with open(testpck, 'rb') as fh: _ = pickle.load(fh) os.unlink(out) os.unlink(testpck) def test_non_standard_residues_example(self): """Test non_standard_residues example""" subprocess.check_call([sys.executable, "non_standard_residues.py"], cwd=get_example_dir()) out = get_example_path("output.cif") # Make sure that a complete output file was produced and that we # can read it with open(out) as fh: contents = fh.readlines() self.assertEqual(len(contents), 66) with open(out) as fh: s, = ihm.reader.read(fh) os.unlink(out) def test_stream_parser_example(self): """Test stream_parser example""" subprocess.check_call([sys.executable, "stream_parser.py"], cwd=get_example_dir()) def test_token_reader_example(self): """Test token_reader example""" subprocess.check_call([sys.executable, "token_reader.py"], cwd=get_example_dir()) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_flr.py000066400000000000000000002130441503573337200166450ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.flr class Tests(unittest.TestCase): def test_probe_init(self): """Test initialization of probe_list_entry and probe_descriptor.""" p = ihm.flr.Probe(probe_list_entry='foo', probe_descriptor='bar') self.assertEqual(p.probe_list_entry, 'foo') self.assertEqual(p.probe_descriptor, 'bar') def test_probe_eq(self): """Test equality and inequality of Probe objects""" p_ref = ihm.flr.Probe(probe_list_entry='foo', probe_descriptor='bar') p_equal = ihm.flr.Probe(probe_list_entry='foo', probe_descriptor='bar') p_unequal = ihm.flr.Probe( probe_list_entry='foo2', probe_descriptor='bar') self.assertTrue(p_ref == p_equal) self.assertFalse(p_ref == p_unequal) self.assertTrue(p_ref != p_unequal) def test_probe_descriptor_init(self): """ Test initialization of ProbeDescriptor """ p = ihm.flr.ProbeDescriptor(reactive_probe_chem_descriptor='foo', chromophore_chem_descriptor='bar', chromophore_center_atom='foo2') self.assertEqual(p.reactive_probe_chem_descriptor, 'foo') self.assertEqual(p.chromophore_chem_descriptor, 'bar') self.assertEqual(p.chromophore_center_atom, 'foo2') def test_probe_descriptor_eq(self): """Test equality and inequality of ProbeDescriptor objects.""" p_ref = ihm.flr.ProbeDescriptor(reactive_probe_chem_descriptor='foo', chromophore_chem_descriptor='bar', chromophore_center_atom='foo2') p_equal = ihm.flr.ProbeDescriptor(reactive_probe_chem_descriptor='foo', chromophore_chem_descriptor='bar', chromophore_center_atom='foo2') p_unequal = ihm.flr.ProbeDescriptor( reactive_probe_chem_descriptor='foo', chromophore_chem_descriptor='bar2', chromophore_center_atom='foo2') self.assertTrue(p_ref == p_equal) self.assertFalse(p_ref == p_unequal) self.assertTrue(p_ref != p_unequal) def test_probe_list_init(self): """ Test initialization of ProbeList. """ p = ihm.flr.ProbeList(chromophore_name='foo', reactive_probe_flag=False, reactive_probe_name='bar', probe_origin='foo2', probe_link_type='bar2') self.assertEqual(p.chromophore_name, 'foo') self.assertEqual(p.reactive_probe_flag, False) self.assertEqual(p.reactive_probe_name, 'bar') self.assertEqual(p.probe_origin, 'foo2') self.assertEqual(p.probe_link_type, 'bar2') def test_probe_list_eq(self): """Test equality and inequality of ProbeList objects.""" p_ref = ihm.flr.ProbeList(chromophore_name='foo', reactive_probe_flag=False, reactive_probe_name='bar', probe_origin='foo2', probe_link_type='bar2') p_equal = ihm.flr.ProbeList(chromophore_name='foo', reactive_probe_flag=False, reactive_probe_name='bar', probe_origin='foo2', probe_link_type='bar2') p_unequal = ihm.flr.ProbeList(chromophore_name='foo2', reactive_probe_flag=True, reactive_probe_name='bar', probe_origin='foo2', probe_link_type='bar2') self.assertTrue(p_ref == p_equal) self.assertFalse(p_ref == p_unequal) self.assertTrue(p_ref != p_unequal) def test_sample_probe_details_init(self): """ Test initialization of SampleProbeDetails. """ s = ihm.flr.SampleProbeDetails(sample='foo', probe='bar', description='foo2', poly_probe_position='bar2') self.assertEqual(s.sample, 'foo') self.assertEqual(s.probe, 'bar') self.assertEqual(s.fluorophore_type, 'unspecified') self.assertEqual(s.description, 'foo2') self.assertEqual(s.poly_probe_position, 'bar2') def test_sample_probe_details_eq(self): """Test equality and inequality of SampleProbeDetails objects.""" s_ref = ihm.flr.SampleProbeDetails( sample='foo', probe='bar', description='foo2', poly_probe_position='bar2') s_equal = ihm.flr.SampleProbeDetails( sample='foo', probe='bar', description='foo2', poly_probe_position='bar2') s_unequal = ihm.flr.SampleProbeDetails( sample='foo', probe='bar3', description='foo2', poly_probe_position='bar2') self.assertTrue(s_ref == s_equal) self.assertFalse(s_ref == s_unequal) self.assertTrue(s_ref != s_unequal) def test_poly_probe_conjugate_init(self): """ Test initialization of PolyProbeConjugate. """ p = ihm.flr.PolyProbeConjugate(sample_probe='foo', chem_descriptor='bar', ambiguous_stoichiometry=True, probe_stoichiometry=0.5) self.assertEqual(p.sample_probe, 'foo') self.assertEqual(p.chem_descriptor, 'bar') self.assertEqual(p.ambiguous_stoichiometry, True) self.assertEqual(p.probe_stoichiometry, 0.5) def test_poly_probe_conjugate_eq(self): """ Test equality and inequality of PolyProbeConjugate objects. """ p_ref = ihm.flr.PolyProbeConjugate(sample_probe='foo', chem_descriptor='bar', ambiguous_stoichiometry=True, probe_stoichiometry=0.5) p_equal = ihm.flr.PolyProbeConjugate(sample_probe='foo', chem_descriptor='bar', ambiguous_stoichiometry=True, probe_stoichiometry=0.5) p_unequal = ihm.flr.PolyProbeConjugate( sample_probe='foo2', chem_descriptor='bar', ambiguous_stoichiometry=True, probe_stoichiometry=0.5) self.assertTrue(p_ref == p_equal) self.assertFalse(p_ref == p_unequal) self.assertTrue(p_ref != p_unequal) def test_poly_probe_position_init(self): """ Test initialization of PolyProbePosition. """ p = ihm.flr.PolyProbePosition(resatom='foo', mutation_flag=True, modification_flag=True, auth_name='foo3', mutated_chem_comp_id='foobar', modified_chem_descriptor='foobar2') self.assertEqual(p.resatom, 'foo') self.assertEqual(p.mutation_flag, True) self.assertEqual(p.modification_flag, True) self.assertEqual(p.auth_name, 'foo3') self.assertEqual(p.mutated_chem_comp_id, 'foobar') self.assertEqual(p.modified_chem_descriptor, 'foobar2') def test_poly_probe_position_eq(self): """Test equality and inequality of PolyProbePosition objects.""" p_ref = ihm.flr.PolyProbePosition( resatom='foo', mutation_flag=True, modification_flag=True, auth_name='foo3', mutated_chem_comp_id='foobar', modified_chem_descriptor='foobar2') p_equal = ihm.flr.PolyProbePosition( resatom='foo', mutation_flag=True, modification_flag=True, auth_name='foo3', mutated_chem_comp_id='foobar', modified_chem_descriptor='foobar2') p_unequal = ihm.flr.PolyProbePosition( resatom='bar', mutation_flag=True, modification_flag=True, auth_name='foo3', mutated_chem_comp_id='foobar', modified_chem_descriptor='foobar2') self.assertTrue(p_ref == p_equal) self.assertFalse(p_ref == p_unequal) self.assertTrue(p_ref != p_unequal) def test_sample_init(self): """Test initialization of Sample.""" s = ihm.flr.Sample(entity_assembly='foo', num_of_probes='bar', condition='foo2', description='foo3', details='foo4', solvent_phase='foobar') self.assertEqual(s.entity_assembly, 'foo') self.assertEqual(s.num_of_probes, 'bar') self.assertEqual(s.condition, 'foo2') self.assertEqual(s.description, 'foo3') self.assertEqual(s.details, 'foo4') self.assertEqual(s.solvent_phase, 'foobar') def test_sample_eq(self): """Test equality and inequality of Sample objects.""" s_ref = ihm.flr.Sample(entity_assembly='foo', num_of_probes='foo2', condition='foo3', description='foo4', details='foo5', solvent_phase='foo6') s_equal = ihm.flr.Sample(entity_assembly='foo', num_of_probes='foo2', condition='foo3', description='foo4', details='foo5', solvent_phase='foo6') s_unequal = ihm.flr.Sample(entity_assembly='bar', num_of_probes='bar2', condition='bar3', description='bar4', details='bar5', solvent_phase='bar6') self.assertTrue(s_ref == s_equal) self.assertFalse(s_ref == s_unequal) self.assertTrue(s_ref != s_unequal) def test_entity_assembly_init(self): """ Test initialization of EntityAssembly. """ e = ihm.flr.EntityAssembly(entity='foo', num_copies=1) self.assertEqual(len(e.entity_list), 1) self.assertEqual(e.entity_list[0], 'foo') self.assertEqual(e.num_copies_list[0], 1) def test_entity_assembly_add_entity(self): """Test addition of entities to the assembly.""" e = ihm.flr.EntityAssembly(entity='foo', num_copies=1) e.add_entity(entity='foo2', num_copies=2) self.assertRaises(ValueError, e.add_entity, entity='foo2', num_copies=-1) self.assertEqual(len(e.entity_list), 2) self.assertEqual(e.entity_list, ['foo', 'foo2']) self.assertEqual(e.num_copies_list, [1, 2]) def test_entity_assembly_eq(self): """ Test equality and inequality of EntityAssembly objects. """ e_ref = ihm.flr.EntityAssembly(entity='foo', num_copies=1) e_equal = ihm.flr.EntityAssembly(entity='foo', num_copies=1) e_unequal = ihm.flr.EntityAssembly(entity='foo2', num_copies=1) self.assertTrue(e_ref == e_equal) self.assertFalse(e_ref == e_unequal) self.assertTrue(e_ref != e_unequal) def test_sample_condition_init(self): """ Test initialization of SampleCondition. """ s = ihm.flr.SampleCondition(details='foo') self.assertEqual(s.details, 'foo') def test_sample_condition_eq(self): """ Test equality and inequality of SampleCondition objects. """ s_ref = ihm.flr.SampleCondition(details='foo') s_equal = ihm.flr.SampleCondition(details='foo') s_unequal = ihm.flr.SampleCondition(details='bar') self.assertTrue(s_ref == s_equal) self.assertFalse(s_ref == s_unequal) self.assertTrue(s_ref != s_unequal) def test_experiment_init(self): """Test initialization of Experiment.""" # Initialization with only one parameter given should not add an entry e1 = ihm.flr.Experiment(instrument='foo') self.assertEqual(len(e1.instrument_list), 0) self.assertEqual(len(e1.inst_setting_list), 0) self.assertEqual(len(e1.exp_condition_list), 0) self.assertEqual(len(e1.sample_list), 0) # Correct initialization should fill the lists e2 = ihm.flr.Experiment(instrument='foo', inst_setting='bar', exp_condition='foo2', sample='foo3', details='bar2') self.assertEqual(len(e2.instrument_list), 1) self.assertEqual(e2.instrument_list[0], 'foo') self.assertEqual(e2.inst_setting_list[0], 'bar') self.assertEqual(e2.exp_condition_list[0], 'foo2') self.assertEqual(e2.sample_list[0], 'foo3') self.assertEqual(e2.details_list[0], 'bar2') # Initialization without details given should still have an entry # in the list e3 = ihm.flr.Experiment(instrument='foo', inst_setting='bar', exp_condition='bar2', sample='foo2') self.assertEqual(len(e3.details_list), 1) self.assertIsNone(e3.details_list[0]) def test_experiment_add_entry(self): """ Test addition of an entry to the experiment. """ # Adding to an empty Experiment e1 = ihm.flr.Experiment() e1.add_entry(instrument='foo', inst_setting='bar', exp_condition='foo2', sample='foo3', details='bar2') self.assertEqual(e1.instrument_list[0], 'foo') self.assertEqual(e1.inst_setting_list[0], 'bar') self.assertEqual(e1.exp_condition_list[0], 'foo2') self.assertEqual(e1.sample_list[0], 'foo3') self.assertEqual(e1.details_list[0], 'bar2') # adding to an existing Experiment e2 = ihm.flr.Experiment(instrument='foo', inst_setting='foo2', exp_condition='foo3', sample='foo4', details='foo5') e2.add_entry(instrument='bar', inst_setting='bar2', exp_condition='bar3', sample='bar4', details='bar5') self.assertEqual(e2.instrument_list, ['foo', 'bar']) self.assertEqual(e2.inst_setting_list, ['foo2', 'bar2']) self.assertEqual(e2.exp_condition_list, ['foo3', 'bar3']) self.assertEqual(e2.sample_list, ['foo4', 'bar4']) self.assertEqual(e2.details_list, ['foo5', 'bar5']) def test_experiment_get_entry_by_index(self): """ Test access to entries by index. """ e = ihm.flr.Experiment() e.add_entry(instrument='foo', inst_setting='foo2', exp_condition='foo3', sample='foo4', details='foo5') e.add_entry(instrument='bar', inst_setting='bar2', exp_condition='bar3', sample='bar4', details='bar5') e.add_entry(instrument='foobar', inst_setting='foobar2', exp_condition='foobar3', sample='foobar4', details='foobar5') return_value_index0 = e.get_entry_by_index(0) return_value_index1 = e.get_entry_by_index(1) return_value_index2 = e.get_entry_by_index(2) self.assertEqual(return_value_index0, ('foo', 'foo2', 'foo3', 'foo4', 'foo5')) self.assertEqual(return_value_index1, ('bar', 'bar2', 'bar3', 'bar4', 'bar5')) self.assertEqual(return_value_index2, ('foobar', 'foobar2', 'foobar3', 'foobar4', 'foobar5')) def test_experiment_contains(self): """Test whether experiment contains a combination of instrument, exp_setting, and sample.""" # An empty experiment should not contain anything e1 = ihm.flr.Experiment() self.assertFalse(e1.contains('foo', 'foo2', 'foo3', 'foo4')) # After addition, the entry should be contained e1.add_entry(instrument='foo', inst_setting='foo2', exp_condition='foo3', sample='foo4') e1.add_entry(instrument='bar', inst_setting='bar2', exp_condition='bar3', sample='bar4') self.assertTrue(e1.contains('foo', 'foo2', 'foo3', 'foo4')) # If one of the entries is not contained, then False self.assertFalse(e1.contains('foo2', 'foo2', 'foo4', 'foo5')) self.assertFalse(e1.contains('foobar', 'foobar2', 'foobar3', 'foobar4')) def test_experiment_eq(self): """ Test equality and inequality of Experiment objects. """ e_ref = ihm.flr.Experiment() e_ref.add_entry(instrument='foo', inst_setting='foo2', exp_condition='foo3', sample='foo4') e_equal = ihm.flr.Experiment() e_equal.add_entry(instrument='foo', inst_setting='foo2', exp_condition='foo3', sample='foo4') e_unequal = ihm.flr.Experiment() e_unequal.add_entry(instrument='bar', inst_setting='bar2', exp_condition='bar3', sample='bar4') self.assertTrue(e_ref == e_equal) self.assertFalse(e_ref == e_unequal) self.assertTrue(e_ref != e_unequal) def test_instrument_init(self): """ Test initialization of Instrument. """ i = ihm.flr.Instrument(details='foo') self.assertEqual(i.details, 'foo') def test_instrument_eq(self): """Test equality and inequality of Instrument objects.""" i_ref = ihm.flr.Instrument(details='foo') i_equal = ihm.flr.Instrument(details='foo') i_unequal = ihm.flr.Instrument(details='bar') self.assertTrue(i_ref == i_equal) self.assertFalse(i_ref == i_unequal) self.assertTrue(i_ref != i_unequal) def test_inst_setting_init(self): """Test initialization of InstSetting.""" e = ihm.flr.InstSetting(details='foo') self.assertEqual(e.details, 'foo') def test_inst_setting_eq(self): """Test equality and inequality of InstSetting objects.""" e_ref = ihm.flr.InstSetting(details='foo') e_equal = ihm.flr.InstSetting(details='foo') e_unequal = ihm.flr.InstSetting(details='bar') self.assertTrue(e_ref == e_equal) self.assertFalse(e_ref == e_unequal) self.assertTrue(e_ref != e_unequal) def test_exp_condition_init(self): """Test initialization of ExpCondition.""" e = ihm.flr.ExpCondition(details='foo') self.assertEqual(e.details, 'foo') def test_exp_condition_eq(self): """Test equality and inequality of ExpCondition objects.""" e_ref = ihm.flr.ExpCondition(details='foo') e_equal = ihm.flr.ExpCondition(details='foo') e_unequal = ihm.flr.ExpCondition(details='bar') self.assertTrue(e_ref == e_equal) self.assertFalse(e_ref == e_unequal) self.assertTrue(e_ref != e_unequal) def test_fret_analysis_init(self): """Test initialization of FRETAnalysis.""" f = ihm.flr.FRETAnalysis( experiment='this_experiment', sample_probe_1='this_sample_probe_1', sample_probe_2='this_sample_probe_2', forster_radius='this_forster_radius', type='intensity-based', calibration_parameters='this_calibration_parameters', lifetime_fit_model='this_lifetime_fit_model', ref_measurement_group='this_ref_measurement_group', method_name='this_method_name', chi_square_reduced='this_chi_square_reduced', donor_only_fraction='this_donly_fraction', dataset='this_dataset_list_id', file='this_external_file', software='this_software') self.assertEqual(f.experiment, 'this_experiment') self.assertEqual(f.sample_probe_1, 'this_sample_probe_1') self.assertEqual(f.sample_probe_2, 'this_sample_probe_2') self.assertEqual(f.forster_radius, 'this_forster_radius') self.assertEqual(f.type, 'intensity-based') self.assertEqual(f.calibration_parameters, 'this_calibration_parameters') self.assertEqual(f.lifetime_fit_model, 'this_lifetime_fit_model') self.assertEqual(f.ref_measurement_group, 'this_ref_measurement_group') self.assertEqual(f.method_name, 'this_method_name') self.assertEqual(f.chi_square_reduced, 'this_chi_square_reduced') self.assertEqual(f.donor_only_fraction, 'this_donly_fraction') self.assertEqual(f.dataset, 'this_dataset_list_id') self.assertEqual(f.external_file, 'this_external_file') self.assertEqual(f.software, 'this_software') self.assertRaises( ValueError, ihm.flr.FRETAnalysis, experiment='this_experiment', sample_probe_1='this_sample_probe_1', sample_probe_2='this_sample_probe_2', forster_radius='this_forster_radius', type='garbage') def test_fret_analysis_eq(self): """Test equality and inequality of FRETAnalysis objects.""" f_ref = ihm.flr.FRETAnalysis( experiment='this_experiment', sample_probe_1='this_sample_probe_1', sample_probe_2='this_sample_probe_2', forster_radius='this_forster_radius', type='intensity-based', calibration_parameters='this_calibration_parameters', method_name='this_method_name', chi_square_reduced='this_chi_square_reduced', donor_only_fraction='this_donly_fraction', dataset='this_dataset_list_id', file='this_external_file', software='this_software') f_equal = ihm.flr.FRETAnalysis( experiment='this_experiment', sample_probe_1='this_sample_probe_1', sample_probe_2='this_sample_probe_2', forster_radius='this_forster_radius', type='intensity-based', calibration_parameters='this_calibration_parameters', method_name='this_method_name', chi_square_reduced='this_chi_square_reduced', donor_only_fraction='this_donly_fraction', dataset='this_dataset_list_id', file='this_external_file', software='this_software') f_unequal = ihm.flr.FRETAnalysis( experiment='this_experiment', sample_probe_1='foo', sample_probe_2='this_sample_probe_2', forster_radius='this_forster_radius', type='intensity-based', calibration_parameters='this_calibration_parameters', method_name='this_method_name', chi_square_reduced='this_chi_square_reduced', donor_only_fraction='this_donly_fraction', dataset='this_dataset_list_id', file='this_external_file', software='this_software') f_unequal_type = ihm.flr.FRETAnalysis( experiment='this_experiment', sample_probe_1='this_sample_probe_1', sample_probe_2='this_sample_probe_2', forster_radius='this_forster_radius', type='lifetime-based', calibration_parameters='this_calibration_parameters', method_name='this_method_name', chi_square_reduced='this_chi_square_reduced', donor_only_fraction='this_donly', dataset='this_dataset_list_id', file='this_external_file', software='this_software') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) self.assertFalse(f_ref == f_unequal_type) self.assertTrue(f_ref != f_unequal_type) def test_lifetime_fit_model_init(self): """ Test initialization of LifetimeFitModel.""" f = ihm.flr.LifetimeFitModel(name='this_name', description='this_description', file='this_ext_file', citation='this_citation') self.assertEqual(f.name, 'this_name') self.assertEqual(f.description, 'this_description') self.assertEqual(f.external_file, 'this_ext_file') self.assertEqual(f.citation, 'this_citation') def test_lifetime_fit_model_eq(self): """ Test equality and inequality of LifeTimeFitModel objects.""" f_ref = ihm.flr.LifetimeFitModel( name='this_name', description='this_desc') f_equal = ihm.flr.LifetimeFitModel( name='this_name', description='this_desc') f_unequal = ihm.flr.LifetimeFitModel( name='other_name', description='this_desc') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_ref_measurement_group_init(self): """ Test initialization of RefMeasurementGroup.""" r = ihm.flr.RefMeasurementGroup() self.assertEqual(r.ref_measurement_list, []) def test_ref_measurement_group_add_ref_measurement(self): """ Test the addition of a RefMeasurement to the group.""" r = ihm.flr.RefMeasurementGroup() r.add_ref_measurement('foo') r.add_ref_measurement('bar') self.assertEqual(r.ref_measurement_list, ['foo', 'bar']) def test_ref_measurement_group_get_info(self): """ Test the retrieval of the ref_measurement_list.""" r = ihm.flr.RefMeasurementGroup() r.add_ref_measurement('foo') r.add_ref_measurement('bar') return_value = r.get_info() self.assertEqual(return_value, ['foo', 'bar']) def test_ref_measurement_group_eq(self): """ Test equality and inequality of RefMeasurementGroup objects.""" r_ref = ihm.flr.RefMeasurementGroup() r_ref.add_ref_measurement('foo') r_equal = ihm.flr.RefMeasurementGroup() r_equal.add_ref_measurement('foo') r_unequal = ihm.flr.RefMeasurementGroup() r_unequal.add_ref_measurement('foo2') self.assertTrue(r_ref == r_equal) self.assertFalse(r_ref == r_unequal) self.assertTrue(r_ref != r_unequal) def test_ref_measurement_init(self): """Test initialization of RefMeasurement.""" r1 = ihm.flr.RefMeasurement(ref_sample_probe='this_ref_sample_probe', details='this_details') self.assertEqual(r1.ref_sample_probe, 'this_ref_sample_probe') self.assertEqual(r1.details, 'this_details') self.assertEqual(r1.list_of_lifetimes, []) r2 = ihm.flr.RefMeasurement(ref_sample_probe='this_ref_sample_probe_2', details='this_details_2', list_of_lifetimes=['foo', 'bar']) self.assertEqual(r2.ref_sample_probe, 'this_ref_sample_probe_2') self.assertEqual(r2.details, 'this_details_2') self.assertEqual(r2.list_of_lifetimes, ['foo', 'bar']) def test_ref_measurement_add_lifetime(self): """ Test addition of to the list_of_lifetimes.""" r = ihm.flr.RefMeasurement(ref_sample_probe='this_ref_sample_probe', details='this_details') r.add_lifetime('foo') r.add_lifetime('bar') self.assertEqual(r.list_of_lifetimes, ['foo', 'bar']) def test_ref_measurement_eq(self): """ Test equality and inequality of RefMeasurement objects.""" r_ref = ihm.flr.RefMeasurement( ref_sample_probe='this_ref_sample_probe_1', details='this_details_1') r_equal = ihm.flr.RefMeasurement( ref_sample_probe='this_ref_sample_probe_1', details='this_details_1') r_unequal = ihm.flr.RefMeasurement( ref_sample_probe='this_ref_sample_probe_2', details='this_details_2') r_unequal_list = ihm.flr.RefMeasurement( ref_sample_probe='this_ref_sample_probe_1', details='this_details_1', list_of_lifetimes=['foo']) self.assertTrue(r_ref == r_equal) self.assertTrue(r_ref != r_unequal) self.assertTrue(r_ref != r_unequal_list) self.assertFalse(r_ref == r_unequal) self.assertFalse(r_ref == r_unequal_list) def test_ref_measurement_lifetime_init(self): """ Test initialization of RefMeasuremenLifetime objects.""" f = ihm.flr.RefMeasurementLifetime(species_fraction='this_frac', lifetime='this_lifetime', species_name='foo') self.assertEqual(f.species_fraction, 'this_frac') self.assertEqual(f.lifetime, 'this_lifetime') self.assertEqual(f.species_name, 'foo') def test_ref_measurement_lifetime_eq(self): """ Test equality and inequality of RefMeasurementLifetime objects.""" f_ref = ihm.flr.RefMeasurementLifetime(species_fraction='this_frac_1', lifetime='this_lifetime_1') f_equal = ihm.flr.RefMeasurementLifetime( species_fraction='this_frac_1', lifetime='this_lifetime_1') f_unequal = ihm.flr.RefMeasurementLifetime( species_fraction='this_frac_2', lifetime='this_lifetime_1') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fret_distance_restraint_group_init(self): """ Test initialization of FRETDistanceRestraintGroup. """ f = ihm.flr.FRETDistanceRestraintGroup() self.assertEqual(f.distance_restraint_list, []) def test_fret_distance_restraint_group_add_distance_restraint(self): """ Test the addition of a Fret_distance restraint to the group. """ f = ihm.flr.FRETDistanceRestraintGroup() f.add_distance_restraint('foo') f.add_distance_restraint('bar') self.assertEqual(f.distance_restraint_list, ['foo', 'bar']) def test_fret_distance_restraint_group_get_info(self): """ Test the retrieval of the distance_restraint_list. """ f = ihm.flr.FRETDistanceRestraintGroup() f.add_distance_restraint('foo') f.add_distance_restraint('bar') return_value = f.get_info() self.assertEqual(return_value, ['foo', 'bar']) def test_fret_distance_restraint_group_eq(self): """Test equality and inequality of FRETDistanceRestraintGroup objects.""" f_ref = ihm.flr.FRETDistanceRestraintGroup() f_ref.add_distance_restraint('foo') f_equal = ihm.flr.FRETDistanceRestraintGroup() f_equal.add_distance_restraint('foo') f_unequal = ihm.flr.FRETDistanceRestraintGroup() f_unequal.add_distance_restraint('bar') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fret_distance_restraint_init(self): """Test initialization of FRETDistanceRestraint.""" f = ihm.flr.FRETDistanceRestraint( sample_probe_1='this_sample_probe_1', sample_probe_2='this_sample_probe_2', state='this_state', analysis='this_analysis', distance='this_distance', distance_error_plus='this_distance_error_plus', distance_error_minus='this_distance_error_minus', distance_type='this_distance_type', population_fraction='this_population_fraction', peak_assignment='this_peak_assignment') self.assertEqual(f.sample_probe_1, 'this_sample_probe_1') self.assertEqual(f.sample_probe_2, 'this_sample_probe_2') self.assertEqual(f.state, 'this_state') self.assertEqual(f.analysis, 'this_analysis') self.assertEqual(f.distance, 'this_distance') self.assertEqual(f.distance_error_plus, 'this_distance_error_plus') self.assertEqual(f.distance_error_minus, 'this_distance_error_minus') self.assertEqual(f.distance_type, 'this_distance_type') self.assertEqual(f.population_fraction, 'this_population_fraction') self.assertEqual(f.peak_assignment, 'this_peak_assignment') def test_fret_distance_restraint_eq(self): """Test equality and inequality of FRETDistanceRestraint objects.""" f_ref = ihm.flr.FRETDistanceRestraint( sample_probe_1='this_sample_probe_1', sample_probe_2='this_sample_probe_2', state='this_state', analysis='this_analysis', distance='this_distance', distance_error_plus='this_distance_error_plus', distance_error_minus='this_distance_error_minus', distance_type='this_distance_type', population_fraction='this_population_fraction', peak_assignment='this_peak_assignment') f_equal = ihm.flr.FRETDistanceRestraint( sample_probe_1='this_sample_probe_1', sample_probe_2='this_sample_probe_2', state='this_state', analysis='this_analysis', distance='this_distance', distance_error_plus='this_distance_error_plus', distance_error_minus='this_distance_error_minus', distance_type='this_distance_type', population_fraction='this_population_fraction', peak_assignment='this_peak_assignment') f_unequal = ihm.flr.FRETDistanceRestraint( sample_probe_1='this_sample_probe_1', sample_probe_2='this_sample_probe_2', state='foo', analysis='this_analysis', distance='this_distance', distance_error_plus='this_distance_error_plus', distance_error_minus='this_distance_error_minus', distance_type='this_distance_type', population_fraction='this_population_fraction', peak_assignment='this_peak_assignment') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fret_forster_radius_init(self): """ Test initialization of FRETForsterRadius. """ f = ihm.flr.FRETForsterRadius( donor_probe='foo', acceptor_probe='bar', forster_radius='foo2', reduced_forster_radius='bar2') self.assertEqual(f.donor_probe, 'foo') self.assertEqual(f.acceptor_probe, 'bar') self.assertEqual(f.forster_radius, 'foo2') self.assertEqual(f.reduced_forster_radius, 'bar2') def test_fret_forster_radius_eq(self): """ Test equality and inequality of FRETForsterRadius objects. """ f_ref = ihm.flr.FRETForsterRadius( donor_probe='foo', acceptor_probe='bar', forster_radius='foo2', reduced_forster_radius='bar2') f_equal = ihm.flr.FRETForsterRadius( donor_probe='foo', acceptor_probe='bar', forster_radius='foo2', reduced_forster_radius='bar2') f_unequal = ihm.flr.FRETForsterRadius( donor_probe='foobar', acceptor_probe='bar', forster_radius='foo2', reduced_forster_radius='bar2') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fret_calibration_parameters_init(self): """Test initialization of FRETCalibrationParameters.""" f = ihm.flr.FRETCalibrationParameters(phi_acceptor='this_phi_acceptor', alpha='this_alpha', alpha_sd='this_alpha_sd', gg_gr_ratio='this_gG_gR_ratio', beta='this_beta', gamma='this_gamma', delta='this_delta', a_b='this_a_b') self.assertEqual(f.phi_acceptor, 'this_phi_acceptor') self.assertEqual(f.alpha, 'this_alpha') self.assertEqual(f.alpha_sd, 'this_alpha_sd') self.assertEqual(f.gg_gr_ratio, 'this_gG_gR_ratio') self.assertEqual(f.beta, 'this_beta') self.assertEqual(f.gamma, 'this_gamma') self.assertEqual(f.delta, 'this_delta') self.assertEqual(f.a_b, 'this_a_b') def test_fret_calibration_parameters_eq(self): """Test equality and inequality of FRETCalibrationParameters objects.""" f_ref = ihm.flr.FRETCalibrationParameters( phi_acceptor='this_phi_acceptor', alpha='this_alpha', alpha_sd='this_alpha_sd', gg_gr_ratio='this_gG_gR_ratio', beta='this_beta', gamma='this_gamma', delta='this_delta', a_b='this_a_b') f_equal = ihm.flr.FRETCalibrationParameters( phi_acceptor='this_phi_acceptor', alpha='this_alpha', alpha_sd='this_alpha_sd', gg_gr_ratio='this_gG_gR_ratio', beta='this_beta', gamma='this_gamma', delta='this_delta', a_b='this_a_b') f_unequal = ihm.flr.FRETCalibrationParameters( phi_acceptor='foo', alpha='this_alpha', alpha_sd='this_alpha_sd', gg_gr_ratio='this_gG_gR_ratio', beta='this_beta', gamma='this_gamma', delta='this_delta', a_b='this_a_b') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_peak_assignment_init(self): """ Test initialization of PeakAssignment. """ p = ihm.flr.PeakAssignment(method_name='foo', details='bar') self.assertEqual(p.method_name, 'foo') self.assertEqual(p.details, 'bar') def test_peak_assignment_eq(self): """ Test equality and inequality of PeakAssignment objects. """ p_ref = ihm.flr.PeakAssignment(method_name='foo', details='bar') p_equal = ihm.flr.PeakAssignment(method_name='foo', details='bar') p_unequal = ihm.flr.PeakAssignment(method_name='foobar', details='bar') self.assertTrue(p_ref == p_equal) self.assertFalse(p_ref == p_unequal) self.assertTrue(p_ref != p_unequal) def test_fret_model_quality_init(self): """ Test initialization of FRETModelQuality. """ f = ihm.flr.FRETModelQuality( model='this_model_id', chi_square_reduced='this_chi_square_reduced', dataset_group='this_dataset_group_id', method='this_method', details='this_details') self.assertEqual(f.model, 'this_model_id') self.assertEqual(f.chi_square_reduced, 'this_chi_square_reduced') self.assertEqual(f.dataset_group, 'this_dataset_group_id') self.assertEqual(f.method, 'this_method') self.assertEqual(f.details, 'this_details') def test_fret_model_quality_eq(self): """ Test equality and inequality of FRETModelQuality objects. """ f_ref = ihm.flr.FRETModelQuality( model='this_model_id', chi_square_reduced='this_chi_square_reduced', dataset_group='this_dataset_group_id', method='this_method', details='this_details') f_equal = ihm.flr.FRETModelQuality( model='this_model_id', chi_square_reduced='this_chi_square_reduced', dataset_group='this_dataset_group_id', method='this_method', details='this_details') f_unequal = ihm.flr.FRETModelQuality( model='foo', chi_square_reduced='this_chi_square_reduced', dataset_group='this_dataset_group_id', method='this_method', details='this_details') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fret_model_distance_init(self): """Test initialization of FRETModelDistance. Also test the implicit calculation of the distance deviation""" # Initialization with explicit setting of distance deviation f1 = ihm.flr.FRETModelDistance(restraint='foo', model='bar', distance=50, distance_deviation=4.0) self.assertEqual(f1.restraint, 'foo') self.assertEqual(f1.model, 'bar') self.assertEqual(f1.distance, 50) self.assertEqual(f1.distance_deviation, 4.0) # Initialization with calculation of distance deviation class DummyRestraint(): def __init__(self, distance): self.distance = distance f2 = ihm.flr.FRETModelDistance(restraint=DummyRestraint(40), model='bar2', distance=30) self.assertEqual(f2.model, 'bar2') self.assertEqual(f2.distance, 30) self.assertEqual(f2.distance_deviation, 10.0) def test_fret_model_distance_calculate_deviation(self): """Test FRETModelDistance.calculate_deviation()""" class DummyRestraint(): def __init__(self, distance): self.distance = distance f1 = ihm.flr.FRETModelDistance(restraint=DummyRestraint(40), model='foo', distance=30) self.assertEqual(f1.distance_deviation, 10.0) # Directly changing the distance should not change the deviation f1.distance = 25 self.assertEqual(f1.distance_deviation, 10.0) # Calculation of the distance deviation should update the deviation f1.update_deviation() self.assertEqual(f1.distance_deviation, 15.0) # update_deviation() is a noop if restraint is None f2 = ihm.flr.FRETModelDistance(restraint=None, model='foo', distance=30) f2.update_deviation() self.assertIsNone(f2.distance_deviation) def test_fret_model_distance_eq(self): """ Test equality and inequality of FRETModelDistance objects. """ f_ref = ihm.flr.FRETModelDistance(restraint='foo', model='bar', distance=50, distance_deviation=4.0) f_equal = ihm.flr.FRETModelDistance(restraint='foo', model='bar', distance=50, distance_deviation=4.0) f_unequal = ihm.flr.FRETModelDistance(restraint='foo2', model='bar', distance=50, distance_deviation=4.0) self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fps_modeling_init(self): """ Test initialization of FPSModeling. """ f = ihm.flr.FPSModeling(protocol='foo', restraint_group='bar', global_parameter='foo2', probe_modeling_method='foo3', details='bar2') self.assertEqual(f.protocol, 'foo') self.assertEqual(f.restraint_group, 'bar') self.assertEqual(f.global_parameter, 'foo2') self.assertEqual(f.probe_modeling_method, 'foo3') self.assertEqual(f.details, 'bar2') def test_fps_modeling_eq(self): """Test equality and inequality of FPSModeling objects.""" f_ref = ihm.flr.FPSModeling(protocol='foo', restraint_group='bar', global_parameter='foo2', probe_modeling_method='foo3', details='bar2') f_equal = ihm.flr.FPSModeling(protocol='foo', restraint_group='bar', global_parameter='foo2', probe_modeling_method='foo3', details='bar2') f_unequal = ihm.flr.FPSModeling(protocol='foo', restraint_group='foobar', global_parameter='foo2', probe_modeling_method='foo3', details='bar2') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fps_global_parameters_init(self): """Test initialization of FPSGlobalParameters.""" f = ihm.flr.FPSGlobalParameters( forster_radius='this_forster_radius', conversion_function_polynom_order='this_cfpo', repetition='this_repetition', av_grid_rel='this_AV_grid_rel', av_min_grid_a='this_AV_min_grid_A', av_allowed_sphere='this_AV_allowed_sphere', av_search_nodes='this_AV_search_nodes', av_e_samples_k='this_AV_E_samples_k', sim_viscosity_adjustment='this_sim_viscosity_adjustment', sim_dt_adjustment='this_sim_dt_adjustment', sim_max_iter_k='this_sim_max_iter_k', sim_max_force='this_sim_max_force', sim_clash_tolerance_a='this_sim_clash_tolerance_A', sim_reciprocal_kt='this_sim_reciprocal_kT', sim_clash_potential='this_sim_clash_potential', convergence_e='this_convergence_E', convergence_k='this_convergence_K', convergence_f='this_convergence_F', convergence_t='this_convergence_T', optimized_distances='this_optimized_distances') self.assertEqual(f.forster_radius, 'this_forster_radius') self.assertEqual(f.conversion_function_polynom_order, 'this_cfpo') self.assertEqual(f.repetition, 'this_repetition') self.assertEqual(f.av_grid_rel, 'this_AV_grid_rel') self.assertEqual(f.av_min_grid_a, 'this_AV_min_grid_A') self.assertEqual(f.av_allowed_sphere, 'this_AV_allowed_sphere') self.assertEqual(f.av_search_nodes, 'this_AV_search_nodes') self.assertEqual(f.av_e_samples_k, 'this_AV_E_samples_k') self.assertEqual(f.sim_viscosity_adjustment, 'this_sim_viscosity_adjustment') self.assertEqual(f.sim_dt_adjustment, 'this_sim_dt_adjustment') self.assertEqual(f.sim_max_iter_k, 'this_sim_max_iter_k') self.assertEqual(f.sim_max_force, 'this_sim_max_force') self.assertEqual(f.sim_clash_tolerance_a, 'this_sim_clash_tolerance_A') self.assertEqual(f.sim_reciprocal_kt, 'this_sim_reciprocal_kT') self.assertEqual(f.sim_clash_potential, 'this_sim_clash_potential') self.assertEqual(f.convergence_e, 'this_convergence_E') self.assertEqual(f.convergence_k, 'this_convergence_K') self.assertEqual(f.convergence_f, 'this_convergence_F') self.assertEqual(f.convergence_t, 'this_convergence_T') self.assertEqual(f.optimized_distances, 'this_optimized_distances') def test_fps_global_parameters_eq(self): """Test equality and inequality of FPSGlobalParameters objects.""" f_ref = ihm.flr.FPSGlobalParameters( forster_radius='this_forster_radius', conversion_function_polynom_order='this_cfpo', repetition='this_repetition', av_grid_rel='this_AV_grid_rel', av_min_grid_a='this_AV_min_grid_A', av_allowed_sphere='this_AV_allowed_sphere', av_search_nodes='this_AV_search_nodes', av_e_samples_k='this_AV_E_samples_k', sim_viscosity_adjustment='this_sim_viscosity_adjustment', sim_dt_adjustment='this_sim_dt_adjustment', sim_max_iter_k='this_sim_max_iter_k', sim_max_force='this_sim_max_force', sim_clash_tolerance_a='this_sim_clash_tolerance_A', sim_reciprocal_kt='this_sim_reciprocal_kT', sim_clash_potential='this_sim_clash_potential', convergence_e='this_convergence_E', convergence_k='this_convergence_K', convergence_f='this_convergence_F', convergence_t='this_convergence_T', optimized_distances='this_optimized_distances') f_equal = ihm.flr.FPSGlobalParameters( forster_radius='this_forster_radius', conversion_function_polynom_order='this_cfpo', repetition='this_repetition', av_grid_rel='this_AV_grid_rel', av_min_grid_a='this_AV_min_grid_A', av_allowed_sphere='this_AV_allowed_sphere', av_search_nodes='this_AV_search_nodes', av_e_samples_k='this_AV_E_samples_k', sim_viscosity_adjustment='this_sim_viscosity_adjustment', sim_dt_adjustment='this_sim_dt_adjustment', sim_max_iter_k='this_sim_max_iter_k', sim_max_force='this_sim_max_force', sim_clash_tolerance_a='this_sim_clash_tolerance_A', sim_reciprocal_kt='this_sim_reciprocal_kT', sim_clash_potential='this_sim_clash_potential', convergence_e='this_convergence_E', convergence_k='this_convergence_K', convergence_f='this_convergence_F', convergence_t='this_convergence_T', optimized_distances='this_optimized_distances') f_unequal = ihm.flr.FPSGlobalParameters( forster_radius='foo', conversion_function_polynom_order='this_cfpo', repetition='this_repetition', av_grid_rel='this_AV_grid_rel', av_min_grid_a='this_AV_min_grid_A', av_allowed_sphere='this_AV_allowed_sphere', av_search_nodes='this_AV_search_nodes', av_e_samples_k='this_AV_E_samples_k', sim_viscosity_adjustment='this_sim_viscosity_adjustment', sim_dt_adjustment='this_sim_dt_adjustment', sim_max_iter_k='this_sim_max_iter_k', sim_max_force='this_sim_max_force', sim_clash_tolerance_a='this_sim_clash_tolerance_A', sim_reciprocal_kt='this_sim_reciprocal_kT', sim_clash_potential='this_sim_clash_potential', convergence_e='this_convergence_E', convergence_k='this_convergence_K', convergence_f='this_convergence_F', convergence_t='this_convergence_T', optimized_distances='this_optimized_distances') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fps_av_modeling_init(self): """ Test initialization of FPSAVModeling. """ f = ihm.flr.FPSAVModeling(fps_modeling='foo', sample_probe='bar', parameter='foobar') self.assertEqual(f.fps_modeling, 'foo') self.assertEqual(f.sample_probe, 'bar') self.assertEqual(f.parameter, 'foobar') def test_fps_av_modeling_eq(self): """Test equality and inequality of FPSAVModeling objects.""" f_ref = ihm.flr.FPSAVModeling(fps_modeling='foo', sample_probe='bar', parameter='foobar') f_equal = ihm.flr.FPSAVModeling(fps_modeling='foo', sample_probe='bar', parameter='foobar') f_unequal = ihm.flr.FPSAVModeling(fps_modeling='foo', sample_probe='bar2', parameter='foobar') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fps_av_parameter_init(self): """Test initialization of FPSAVParameter.""" # Initialization with AV1 f1 = ihm.flr.FPSAVParameter(num_linker_atoms='this_num_linker_atoms_1', linker_length='this_linker_length_1', linker_width='this_linker_width_1', probe_radius_1='this_probe_radius_1_1') self.assertEqual(f1.num_linker_atoms, 'this_num_linker_atoms_1') self.assertEqual(f1.linker_length, 'this_linker_length_1') self.assertEqual(f1.linker_width, 'this_linker_width_1') self.assertEqual(f1.probe_radius_1, 'this_probe_radius_1_1') self.assertIsNone(f1.probe_radius_2) self.assertIsNone(f1.probe_radius_3) # Initialization with AV3 f2 = ihm.flr.FPSAVParameter(num_linker_atoms='this_num_linker_atoms_2', linker_length='this_linker_length_2', linker_width='this_linker_width_2', probe_radius_1='this_probe_radius_1_2', probe_radius_2='this_probe_radius_2_2', probe_radius_3='this_probe_radius_3_2') self.assertEqual(f2.num_linker_atoms, 'this_num_linker_atoms_2') self.assertEqual(f2.linker_length, 'this_linker_length_2') self.assertEqual(f2.linker_width, 'this_linker_width_2') self.assertEqual(f2.probe_radius_1, 'this_probe_radius_1_2') self.assertEqual(f2.probe_radius_2, 'this_probe_radius_2_2') self.assertEqual(f2.probe_radius_3, 'this_probe_radius_3_2') def test_fps_av_parameter_eq(self): """ Test equality and inequality of FPSAVParameter objects. """ f_ref = ihm.flr.FPSAVParameter( num_linker_atoms='this_num_linker_atoms_1', linker_length='this_linker_length_1', linker_width='this_linker_width_1', probe_radius_1='this_probe_radius_1_1') f_equal = ihm.flr.FPSAVParameter( num_linker_atoms='this_num_linker_atoms_1', linker_length='this_linker_length_1', linker_width='this_linker_width_1', probe_radius_1='this_probe_radius_1_1') f_unequal = ihm.flr.FPSAVParameter( num_linker_atoms='this_num_linker_atoms_1', linker_length='foo', linker_width='this_linker_width_1', probe_radius_1='this_probe_radius_1_1') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fps_mpp_modeling_init(self): """Test initialization of FPSMPPModeling.""" f = ihm.flr.FPSMPPModeling(fps_modeling='foo', mpp='bar', mpp_atom_position_group='foobar') self.assertEqual(f.fps_modeling, 'foo') self.assertEqual(f.mpp, 'bar') self.assertEqual(f.mpp_atom_position_group, 'foobar') def test_fps_mpp_modeling_eq(self): """Test equality and inequality of FPSMPPModeling objects.""" f_ref = ihm.flr.FPSMPPModeling(fps_modeling='foo', mpp='bar', mpp_atom_position_group='foobar') f_equal = ihm.flr.FPSMPPModeling(fps_modeling='foo', mpp='bar', mpp_atom_position_group='foobar') f_unequal = ihm.flr.FPSMPPModeling(fps_modeling='foo2', mpp='bar', mpp_atom_position_group='foobar') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fps_mean_probe_position_init(self): """Test initialization of FPSMeanProbePosition.""" f = ihm.flr.FPSMeanProbePosition(sample_probe='foo', x='bar', y='bar2', z='bar3') self.assertEqual(f.sample_probe, 'foo') self.assertEqual(f.x, 'bar') self.assertEqual(f.y, 'bar2') self.assertEqual(f.z, 'bar3') def test_fps_mean_probe_position_eq(self): """Test equality and inequality of FPSMeanProbePosition objects.""" f_ref = ihm.flr.FPSMeanProbePosition(sample_probe='foo', x='bar', y='bar2', z='bar3') f_equal = ihm.flr.FPSMeanProbePosition(sample_probe='foo', x='bar', y='bar2', z='bar3') f_unequal = ihm.flr.FPSMeanProbePosition( sample_probe='foobar', x='bar', y='bar2', z='bar3') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fps_mpp_atom_position_group_init(self): """ Test initialization of FPSMPPAtomPositionGroup. """ f = ihm.flr.FPSMPPAtomPositionGroup() self.assertEqual(f.mpp_atom_position_list, []) def test_fps_mpp_atom_position_group_add_atom_position(self): """ Test addition of an atom position to FPSMPPAtomPositionGroup. """ f = ihm.flr.FPSMPPAtomPositionGroup() f.add_atom_position('foo') f.add_atom_position('bar') self.assertEqual(f.mpp_atom_position_list, ['foo', 'bar']) def test_fps_mpp_atom_position_group_eq(self): """Test equality and inequality of FPSMPPAtomPositionGroup objects.""" f_ref = ihm.flr.FPSMPPAtomPositionGroup() f_ref.add_atom_position('foo') f_equal = ihm.flr.FPSMPPAtomPositionGroup() f_equal.add_atom_position('foo') f_unequal = ihm.flr.FPSMPPAtomPositionGroup() f_unequal.add_atom_position('bar') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_fps_mpp_atom_position_init(self): """Test initialization of FPSMPPAtomPosition.""" f = ihm.flr.FPSMPPAtomPosition(atom='this_atom_id', x='this_xcoord', y='this_ycoord', z='this_zcoord') self.assertEqual(f.atom, 'this_atom_id') self.assertEqual(f.x, 'this_xcoord') self.assertEqual(f.y, 'this_ycoord') self.assertEqual(f.z, 'this_zcoord') def test_fps_mpp_atom_position_eq(self): """Test equality and inequality of FPSMPPAtomPosition objects.""" f_ref = ihm.flr.FPSMPPAtomPosition(atom='this_atom_id', x='this_xcoord', y='this_ycoord', z='this_zcoord') f_equal = ihm.flr.FPSMPPAtomPosition(atom='this_atom_id', x='this_xcoord', y='this_ycoord', z='this_zcoord') f_unequal = ihm.flr.FPSMPPAtomPosition( atom='other_atom_id', x='this_xcoord', y='this_ycoord', z='this_zcoord') self.assertTrue(f_ref == f_equal) self.assertFalse(f_ref == f_unequal) self.assertTrue(f_ref != f_unequal) def test_kinetic_rate_fret_analysis_connection_init(self): """Test initialization of KineticRateFretAnalysisConnection objects.""" c = ihm.flr.KineticRateFretAnalysisConnection(fret_analysis='f', kinetic_rate='k', details='d') self.assertEqual(c.fret_analysis, 'f') self.assertEqual(c.kinetic_rate, 'k') self.assertEqual(c.details, 'd') def test_kinetic_rate_fret_analysis_connection_eq(self): """Test equality of KineticRateFretAnalysisConnection objects.""" class MockObject: pass f1 = MockObject() f1._id = '1' f1.name = 'fret_analysis1' f2 = MockObject() f2._id = '2' f2.name = 'fret_analysis2' k1 = MockObject() k1._id = '11' k1.name = 'kinetic_rate1' k2 = MockObject() k2._id = '12' k2.name = 'kinetic_rate2' c_ref = ihm.flr.KineticRateFretAnalysisConnection( fret_analysis=f1, kinetic_rate=k1, details='details1') c_equal = ihm.flr.KineticRateFretAnalysisConnection( fret_analysis=f1, kinetic_rate=k1, details='details1') c_unequal = ihm.flr.KineticRateFretAnalysisConnection( fret_analysis=f2, kinetic_rate=k2, details='details2') self.assertTrue(c_ref == c_equal) self.assertFalse(c_ref == c_unequal) self.assertTrue(c_ref != c_unequal) def test_relaxation_time_fret_analysis_connection_init(self): """Test initialization of RelaxationTimeFretAnalysisConnection objects.""" c = ihm.flr.RelaxationTimeFretAnalysisConnection( fret_analysis='f', relaxation_time='r', details='d') self.assertEqual(c.fret_analysis, 'f') self.assertEqual(c.relaxation_time, 'r') self.assertEqual(c.details, 'd') def test_relaxation_time_fret_analysis_connection_eq(self): """Test equality of RelaxationTimeFretAnalysisConnection objects.""" class MockObject: pass f1 = MockObject() f1._id = '1' f1.name = 'fret_analysis1' f2 = MockObject() f2._id = '2' f2.name = 'fret_analysis2' r1 = MockObject() r1._id = '11' r1.name = 'relaxation_time1' r2 = MockObject() r2._id = '12' r2.name = 'relaxation_time2' c_ref = ihm.flr.RelaxationTimeFretAnalysisConnection( fret_analysis=f1, relaxation_time=r1, details='details1') c_equal = ihm.flr.RelaxationTimeFretAnalysisConnection( fret_analysis=f1, relaxation_time=r1, details='details1') c_unequal = ihm.flr.RelaxationTimeFretAnalysisConnection( fret_analysis=f2, relaxation_time=r2, details='details2') self.assertTrue(c_ref == c_equal) self.assertFalse(c_ref == c_unequal) self.assertTrue(c_ref != c_unequal) def test_flr_data_init(self): """ Test initialization of FLRData. """ f = ihm.flr.FLRData() self.assertEqual(f.distance_restraint_groups, []) self.assertEqual(f.poly_probe_conjugates, []) self.assertEqual(f.fret_model_qualities, []) self.assertEqual(f.fret_model_distances, []) self.assertEqual(f.fps_modeling, []) self.assertEqual(f.kinetic_rate_fret_analysis_connections, []) def test_flr_data_add_distance_restraint_group(self): """Test addition of a distance restraint group.""" f = ihm.flr.FLRData() f.distance_restraint_groups.append('foo') f.distance_restraint_groups.append('bar') self.assertEqual(f.distance_restraint_groups, ['foo', 'bar']) def test_flr_data_add_poly_probe_conjugate(self): """Test addition of a poly_probe_conjugate.""" f = ihm.flr.FLRData() f.poly_probe_conjugates.extend(('foo', 'bar')) self.assertEqual(f.poly_probe_conjugates, ['foo', 'bar']) def test_flr_data_add_fret_model_quality(self): """Test addition of a fret_model_quality.""" f = ihm.flr.FLRData() f.fret_model_qualities.extend(('foo', 'bar')) self.assertEqual(f.fret_model_qualities, ['foo', 'bar']) def test_flr_data_add_fret_model_distance(self): """Test addition of a fret_model_distance.""" f = ihm.flr.FLRData() f.fret_model_distances.append('foo') f.fret_model_distances.append('bar') self.assertEqual(f.fret_model_distances, ['foo', 'bar']) def test_flr_data_add_flr_fps_modeling(self): """Test addition of flr_FPS_modeling.""" f = ihm.flr.FLRData() f.fps_modeling.append('foo') f.fps_modeling.append('bar') self.assertEqual(f.fps_modeling, ['foo', 'bar']) def test_flr_data_add_kinetic_rate_fret_analysis_connection(self): """Test addition of object to kinetic_rate_fret_analysis_connections.""" f = ihm.flr.FLRData() f.kinetic_rate_fret_analysis_connections.append('foo') f.kinetic_rate_fret_analysis_connections.append('bar') self.assertEqual(f.kinetic_rate_fret_analysis_connections, ['foo', 'bar']) def test_flr_data_add_relaxation_time_fret_analysis_connection(self): """Test addition of object to relaxation_time_fret_analysis_connections.""" f = ihm.flr.FLRData() f.relaxation_time_fret_analysis_connections.append('foo') f.relaxation_time_fret_analysis_connections.append('bar') self.assertEqual(f.relaxation_time_fret_analysis_connections, ['foo', 'bar']) def test_flr_data_all_chemical_descriptors(self): """Test for collection of all chemical descriptors.""" f = ihm.flr.FLRData() # Define probe descriptors this_probe_descriptor_1 = ihm.flr.ProbeDescriptor( reactive_probe_chem_descriptor='This_reactive_probe_desc_1', chromophore_chem_descriptor='This_chromophore_desc_1') this_probe_descriptor_2 = ihm.flr.ProbeDescriptor( reactive_probe_chem_descriptor='This_reactive_probe_desc_2', chromophore_chem_descriptor='This_chromophore_desc_2') this_probe_descriptor_3 = ihm.flr.ProbeDescriptor( reactive_probe_chem_descriptor='This_reactive_probe_desc_3', chromophore_chem_descriptor='This_chromophore_desc_3') this_probe_descriptor_4 = ihm.flr.ProbeDescriptor( reactive_probe_chem_descriptor='Ref_reactive_probe_desc', chromophore_chem_descriptor='Ref_chromophore_desc') # Define probes this_probe_1 = ihm.flr.Probe(probe_list_entry='foo', probe_descriptor=this_probe_descriptor_1) this_probe_2 = ihm.flr.Probe(probe_list_entry='foo', probe_descriptor=this_probe_descriptor_2) this_probe_3 = ihm.flr.Probe(probe_list_entry='foo', probe_descriptor=this_probe_descriptor_3) this_probe_4 = ihm.flr.Probe(probe_list_entry='foo', probe_descriptor=this_probe_descriptor_4) # Define poly probe positions this_poly_probe_position_1 = ihm.flr.PolyProbePosition( resatom='foo', mutation_flag=True, mutated_chem_comp_id='Mutated_Chem_comp_id_1', modification_flag=False) this_poly_probe_position_2 = ihm.flr.PolyProbePosition( resatom='foo', mutation_flag=False, modification_flag=True, modified_chem_descriptor='Modified_Chem_descriptor_1') this_poly_probe_position_3 = ihm.flr.PolyProbePosition( resatom='foo', mutation_flag=False, modification_flag=False) this_poly_probe_position_4 = ihm.flr.PolyProbePosition( resatom='foo', mutation_flag=True, mutated_chem_comp_id='Mutated_Chem_comp_id_2') this_poly_probe_position_5 = ihm.flr.PolyProbePosition( resatom='foo', modification_flag=True, modified_chem_descriptor='Modified_Chem_descriptor_2') this_sample_probe_1 = ihm.flr.SampleProbeDetails( sample='foo', probe=this_probe_1, fluorophore_type='donor', poly_probe_position=this_poly_probe_position_1) this_sample_probe_2 = ihm.flr.SampleProbeDetails( sample='foo2', probe=this_probe_2, fluorophore_type='donor', poly_probe_position=this_poly_probe_position_2) this_sample_probe_3 = ihm.flr.SampleProbeDetails( sample='foo3', probe=this_probe_3, fluorophore_type='donor', poly_probe_position=this_poly_probe_position_3) this_sample_probe_4 = ihm.flr.SampleProbeDetails( sample='foo4', probe=this_probe_4, fluorophore_type='donor', poly_probe_position=this_poly_probe_position_4) this_sample_probe_5 = ihm.flr.SampleProbeDetails( sample='foo5', probe=this_probe_4, fluorophore_type='donor', poly_probe_position=this_poly_probe_position_5) this_reference_measurement_1 = ihm.flr.RefMeasurement( ref_sample_probe=this_sample_probe_4) this_reference_measurement_2 = ihm.flr.RefMeasurement( ref_sample_probe=this_sample_probe_5) this_reference_measurement_group_1 = ihm.flr.RefMeasurementGroup() this_reference_measurement_group_1.add_ref_measurement( this_reference_measurement_1) this_reference_measurement_group_1.add_ref_measurement( this_reference_measurement_2) this_analysis_1 = ihm.flr.FRETAnalysis( experiment='foo', sample_probe_1=this_sample_probe_1, sample_probe_2=this_sample_probe_2, forster_radius='foo', type='lifetime-based', ref_measurement_group=this_reference_measurement_group_1) this_analysis_2 = ihm.flr.FRETAnalysis( experiment='bar', sample_probe_1=this_sample_probe_1, sample_probe_2=this_sample_probe_2, forster_radius='bar', type='intensity-based') this_distance_restraint_1 = ihm.flr.FRETDistanceRestraint( sample_probe_1=this_sample_probe_1, sample_probe_2=this_sample_probe_2, analysis=this_analysis_1, distance=50) this_distance_restraint_2 = ihm.flr.FRETDistanceRestraint( sample_probe_1=this_sample_probe_1, sample_probe_2=this_sample_probe_3, analysis=this_analysis_2, distance=50) this_distance_restraint_group = ihm.flr.FRETDistanceRestraintGroup() this_distance_restraint_group.add_distance_restraint( this_distance_restraint_1) this_distance_restraint_group.add_distance_restraint( this_distance_restraint_2) f.distance_restraint_groups.append(this_distance_restraint_group) conj = ihm.flr.PolyProbeConjugate( sample_probe=this_sample_probe_1, chem_descriptor='Conjugate_probe_desc', ambiguous_stoichiometry=False) f.poly_probe_conjugates.append(conj) descs = list(f._all_flr_chemical_descriptors()) self.assertEqual( descs, ['This_reactive_probe_desc_1', 'This_chromophore_desc_1', 'This_reactive_probe_desc_2', 'This_chromophore_desc_2', 'Modified_Chem_descriptor_1', 'Ref_reactive_probe_desc', 'Ref_chromophore_desc', 'Ref_reactive_probe_desc', 'Ref_chromophore_desc', 'Modified_Chem_descriptor_2', 'This_reactive_probe_desc_1', 'This_chromophore_desc_1', 'This_reactive_probe_desc_3', 'This_chromophore_desc_3', 'Conjugate_probe_desc']) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_format.py000066400000000000000000001523661503573337200173630ustar00rootroot00000000000000import utils import os import unittest import sys try: import numpy except ImportError: numpy = None from io import StringIO TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.format import ihm.dumper try: from ihm import _format except ImportError: _format = None class GenericHandler: """Capture mmCIF data as a simple list of dicts""" not_in_file = None omitted = None unknown = ihm.unknown _keys = ('method', 'foo', 'bar', 'baz', 'pdbx_keywords', 'var1', 'var2', 'var3', 'intkey1', 'intkey2', 'floatkey1', 'floatkey2', 'boolkey1') _int_keys = frozenset(('intkey1', 'intkey2')) _float_keys = frozenset(('floatkey1', 'floatkey2')) _bool_keys = frozenset(('boolkey1',)) def __init__(self): self.data = [] def __call__(self, *args): d = {} for k, v in zip(self._keys, args): if v is not None: d[k] = v self.data.append(d) def end_save_frame(self): self.data.append('SAVE') class _TestFinalizeHandler(GenericHandler): if _format is not None: _add_c_handler = _format._test_finalize_callback class StringWriter: def __init__(self): self.fh = StringIO() def _repr(self, val): return repr(val) def getvalue(self): return self.fh.getvalue() class Tests(unittest.TestCase): def test_line_writer_wrap(self): """Test LineWriter class line wrap""" writer = StringWriter() lw = ihm.format._LineWriter(writer, line_len=15) lw.write("foo") self.assertEqual(writer.getvalue(), "'foo'") lw.write("bar") self.assertEqual(writer.getvalue(), "'foo' 'bar'") lw.write("baz") self.assertEqual(writer.getvalue(), "'foo' 'bar'\n'baz'") def test_line_writer_multiline(self): """Test LineWriter class given a multiline string""" writer = StringWriter() lw = ihm.format._LineWriter(writer, line_len=15) lw.write("foo\nbar\nbaz") self.assertEqual(writer.getvalue(), "\n;foo\nbar\nbaz\n;\n") def test_line_writer_multiline_nl_term(self): """Test LineWriter class given a newline-terminated multiline string""" writer = StringWriter() lw = ihm.format._LineWriter(writer, line_len=15) lw.write("foo\nbar\nbaz\n") self.assertEqual(writer.getvalue(), "\n;foo\nbar\nbaz\n;\n") def test_category(self): """Test CategoryWriter class""" fh = StringIO() writer = ihm.format.CifWriter(fh) with writer.category('foo') as loc: loc.write(bar='baz') self.assertEqual(fh.getvalue(), "foo.bar baz\n") def test_category_none(self): """Test CategoryWriter class with value=None""" fh = StringIO() writer = ihm.format.CifWriter(fh) with writer.category('foo') as loc: loc.write(bar=None) self.assertEqual(fh.getvalue(), "foo.bar .\n") def test_category_literal_dot(self): """Test CategoryWriter class with literal value=.""" fh = StringIO() writer = ihm.format.CifWriter(fh) with writer.category('foo') as loc: loc.write(bar='.') self.assertEqual(fh.getvalue(), "foo.bar '.'\n") def test_category_unknown(self): """Test CategoryWriter class with value=unknown""" fh = StringIO() writer = ihm.format.CifWriter(fh) with writer.category('foo') as loc: loc.write(bar=ihm.unknown) self.assertEqual(fh.getvalue(), "foo.bar ?\n") def test_category_literal_question(self): """Test CategoryWriter class with literal value=?""" fh = StringIO() writer = ihm.format.CifWriter(fh) with writer.category('foo') as loc: loc.write(bar='?') self.assertEqual(fh.getvalue(), "foo.bar '?'\n") def test_category_multiline(self): """Test CategoryWriter class with multiline value""" fh = StringIO() writer = ihm.format.CifWriter(fh) with writer.category('foo') as loc: loc.write(bar='line1\nline2') self.assertEqual(fh.getvalue(), "foo.bar\n;line1\nline2\n;\n") def test_empty_loop(self): """Test LoopWriter class with no values""" fh = StringIO() writer = ihm.format.CifWriter(fh) with writer.loop('foo', ["bar", "baz"]): pass self.assertEqual(fh.getvalue(), "") def test_loop(self): """Test LoopWriter class""" fh = StringIO() writer = ihm.format.CifWriter(fh) with writer.loop('foo', ["bar", "baz"]) as loc: loc.write(bar='x') loc.write(bar=None, baz='z') loc.write(baz='y') loc.write(bar=ihm.unknown, baz='z') loc.write(bar="?", baz=".") self.assertEqual(fh.getvalue(), """# loop_ foo.bar foo.baz x . . z . y ? z '?' '.' # """) def test_loop_special_chars(self): """Test LoopWriter class with keys containing special characters""" fh = StringIO() writer = ihm.format.CifWriter(fh) with writer.loop('foo', ["matrix[1][1]"]) as loc: loc.write(matrix11='x') self.assertEqual(fh.getvalue(), """# loop_ foo.matrix[1][1] x # """) def test_write_comment(self): """Test CifWriter.write_comment()""" fh = StringIO() writer = ihm.format.CifWriter(fh) writer.write_comment('X' * 85) self.assertEqual(fh.getvalue(), "# " + "X" * 78 + '\n# ' + "X" * 7 + '\n') def test_write_comment_unwrapped(self): """Test CifWriter.write_comment() with line wrapping disabled""" fh = StringIO() try: ihm.format.CifWriter._set_line_wrap(False) writer = ihm.format.CifWriter(fh) writer.write_comment('X' * 85) finally: ihm.format.CifWriter._set_line_wrap(True) self.assertEqual(fh.getvalue(), "# " + "X" * 85 + '\n') def test_repr(self): """Test CifWriter._repr()""" w = ihm.format.CifWriter(None) self.assertEqual(w._repr('foo'), 'foo') self.assertEqual(w._repr('fo"o'), "'fo\"o'") self.assertEqual(w._repr("fo'o"), '"fo\'o"') self.assertEqual(w._repr('foo bar'), "'foo bar'") self.assertEqual(w._repr(42.123456), '42.123') self.assertEqual(w._repr(0.000123456), '0.000123') self.assertEqual(w._repr(0.00000123456), '1.23e-06') self.assertEqual(w._repr(False), 'NO') self.assertEqual(w._repr(True), 'YES') # data_ should be quoted to distinguish from data blocks self.assertEqual(w._repr('data_foo'), "'data_foo'") self.assertEqual(w._repr('data_'), "'data_'") # [ is a reserved character and cannot start a nonquoted string self.assertEqual(w._repr('[foo'), "'[foo'") # _ indicates an identifier and cannot start a nonquoted string self.assertEqual(w._repr('_foo'), "'_foo'") # Empty string must be quoted self.assertEqual(w._repr(""), "''") # Reserved words cannot start a nonquoted string for word in ('save', 'loop', 'stop', 'global'): self.assertEqual(w._repr('%s_foo' % word), "'%s_foo'" % word) self.assertEqual(w._repr('%s_' % word), "'%s_'" % word) # Literal ? must be quoted to distinguish from the unknown value self.assertEqual(w._repr('?foo'), "?foo") self.assertEqual(w._repr('?'), "'?'") # Literal . must be quoted to distinguish from the omitted value self.assertEqual(w._repr('.foo'), ".foo") self.assertEqual(w._repr('.'), "'.'") # Make sure that numpy ints are treated like plain ints, # not rendered as "np.int32(42)" or similar if numpy is not None: self.assertEqual(w._repr(numpy.int32(42)), '42') self.assertEqual(w._repr(numpy.int64(42)), '42') def test_reader_base(self): """Test Reader base class""" class _MockHandler: def __call__(self, a, b): pass # Test handler with no _int_keys, _float_keys r = ihm.format._Reader() m = _MockHandler() r.category_handler = {'foo': m} r._add_category_keys() self.assertEqual(m._keys, ['a', 'b']) self.assertEqual(m._int_keys, frozenset()) self.assertEqual(m._float_keys, frozenset()) # Test handler with typos in _int_keys r = ihm.format._Reader() m = _MockHandler() m._int_keys = ['bar'] r.category_handler = {'foo': m} self.assertRaises(ValueError, r._add_category_keys) # Test handler with typos in _float_keys r = ihm.format._Reader() m = _MockHandler() m._float_keys = ['bar'] r.category_handler = {'foo': m} self.assertRaises(ValueError, r._add_category_keys) def test_handler_annotations(self): """Test Reader using Handler annotations""" class _OKHandler: def __call__(self, a: int, b: float, c: bool, d): pass class _BadHandler: def __call__(self, a: set, b, c): pass # Test that handler _int_keys, _float_keys, _bool_keys are filled in r = ihm.format._Reader() m = _OKHandler() r.category_handler = {'foo': m} r._add_category_keys() self.assertEqual(m._keys, ['a', 'b', 'c', 'd']) self.assertEqual(m._int_keys, frozenset(['a'])) self.assertEqual(m._float_keys, frozenset(['b'])) self.assertEqual(m._bool_keys, frozenset(['c'])) # Check handling of unsupported annotations r = ihm.format._Reader() m = _BadHandler() r.category_handler = {'foo': m} self.assertRaises(ValueError, r._add_category_keys) def _check_bad_cif(self, cif, real_file, category_handlers={}): """Ensure that the given bad cif results in a parser error""" if real_file: with utils.temporary_directory() as tmpdir: fname = os.path.join(tmpdir, 'test') with open(fname, 'w') as fh: fh.write(cif) with open(fname) as fh: r = ihm.format.CifReader(fh, category_handlers) self.assertRaises(ihm.format.CifParserError, r.read_file) else: r = ihm.format.CifReader(StringIO(cif), category_handlers) self.assertRaises(ihm.format.CifParserError, r.read_file) def test_comments_start_line_skipped(self): """Make sure that comments at start of line are skipped""" for real_file in (True, False): self._read_cif("# _exptl.method\n# ;foo\n", real_file, {}) def test_comments_mid_line_skipped(self): """Make sure that comments part way through line are skipped""" for real_file in (True, False): h = GenericHandler() self._read_cif('_exptl.method #bar baz\nfoo', real_file, {'_exptl': h}) self.assertEqual(h.data, [{'method': 'foo'}]) def test_missing_semicolon(self): """Make sure that missing semicolon is handled in multiline strings""" for real_file in (True, False): self._check_bad_cif("_exptl.method\n;foo\n", real_file) def test_missing_single_quote(self): """Make sure that missing single quote is handled""" for real_file in (True, False): self._check_bad_cif("_exptl.method 'foo\n", real_file) self._check_bad_cif("_exptl.method\n'foo'bar\n", real_file) self._check_bad_cif("loop_\n_exptl.method\n'foo\n", real_file) def test_missing_double_quote(self): """Make sure that missing double quote is handled""" for real_file in (True, False): self._check_bad_cif('_exptl.method "foo\n', real_file) self._check_bad_cif('_exptl.method "foo"bar\n', real_file) self._check_bad_cif('loop_\n_exptl.method\n"foo\n', real_file) def test_nested_loop(self): """Loop constructs cannot be nested""" for real_file in (True, False): self._check_bad_cif('loop_ loop_\n', real_file) def test_malformed_key(self): """Keys must be of the form _abc.xyz""" for real_file in (True, False): self._check_bad_cif('_category\n', real_file) self._check_bad_cif('loop_\n_atom_site\n', real_file) def test_missing_value(self): """Key without a value should be an error""" for real_file in (True, False): h = GenericHandler() # Checks aren't done unless we have a handler for the category self._check_bad_cif('_exptl.method\n', real_file, {'_exptl': h}) def test_loop_mixed_categories(self): """Test bad mmCIF loop with a mix of categories""" for real_file in (True, False): h = GenericHandler() self._check_bad_cif('loop_\n_atom_site.id\n_foo.bar\n', real_file, {'_atom_site': h}) self._check_bad_cif('loop_\n_foo.bar\n_atom_site.id\n', real_file, {'_foo': h}) def _read_cif(self, cif, real_file, category_handlers, unknown_category_handler=None, unknown_keyword_handler=None): if real_file: with utils.temporary_directory() as tmpdir: fname = os.path.join(tmpdir, 'test') with open(fname, 'w') as fh: fh.write(cif) with open(fname) as fh: r = ihm.format.CifReader(fh, category_handlers, unknown_category_handler, unknown_keyword_handler) r.read_file() else: r = ihm.format.CifReader(StringIO(cif), category_handlers, unknown_category_handler, unknown_keyword_handler) r.read_file() def test_category_case_insensitive(self): """Categories and keywords should be case insensitive""" for real_file in (True, False): for cat in ('_exptl.method', '_Exptl.METHod'): h = GenericHandler() self._read_cif(cat + ' foo', real_file, {'_exptl': h}) self.assertEqual(h.data, [{'method': 'foo'}]) def test_duplicated_key(self): """If a key is duplicated, we take the final value""" cif = "_exptl.method foo\n_exptl.method bar\n" for real_file in (True, False): h = GenericHandler() self._read_cif(cif, real_file, {'_exptl': h}) self.assertEqual(h.data, [{'method': 'bar'}]) def test_duplicated_key_omitted(self): """If a key is duplicated, we take the final (omitted) value""" cif = "_exptl.method foo\n_exptl.method .\n" for real_file in (True, False): h = GenericHandler() h.omitted = 'OMIT' self._read_cif(cif, real_file, {'_exptl': h}) self.assertEqual(h.data, [{'method': 'OMIT'}]) def test_duplicated_key_unknown(self): """If a key is duplicated, we take the final (unknown) value""" cif = "_exptl.method foo\n_exptl.method ?\n" for real_file in (True, False): h = GenericHandler() self._read_cif(cif, real_file, {'_exptl': h}) self.assertEqual(h.data, [{'method': ihm.unknown}]) def test_save_frames(self): """Category handlers should be called for each save frame""" cif = """ save_foo _exptl.method foo save_ save_bar _exptl.method bar save_ """ for real_file in (True, False): h = GenericHandler() self._read_cif(cif, real_file, {'_exptl': h}) self.assertEqual(h.data, [{'method': 'foo'}, 'SAVE', {'method': 'bar'}, 'SAVE']) def test_omitted_ignored(self): """CIF omitted value ('.') should be ignored""" for real_file in (True, False): h = GenericHandler() # Omitted value is a literal . - anything else (quoted, or # a longer string) should be reported as a string self._read_cif("_foo.bar .1\n_foo.baz .\n" "_foo.var1 '.'\n_foo.var2 \".\"\n", real_file, {'_foo': h}) self.assertEqual(h.data, [{'bar': '.1', 'var1': '.', 'var2': '.'}]) h = GenericHandler() self._read_cif("loop_\n_foo.bar\n_foo.baz\n_foo.var1\n_foo.var2\n" ".1 . '.' \".\"\n", real_file, {'_foo': h}) self.assertEqual(h.data, [{'bar': '.1', 'var1': '.', 'var2': '.'}]) def test_omitted_explicit(self): """Check explicit handling of CIF omitted value ('.')""" for real_file in (True, False): h = GenericHandler() h.omitted = 'OMIT' self._read_cif("_foo.bar .1\n_foo.baz .\n" "_foo.var1 '.'\n_foo.var2 \".\"\n", real_file, {'_foo': h}) self.assertEqual(h.data, [{'baz': 'OMIT', 'bar': '.1', 'var1': '.', 'var2': '.'}]) h = GenericHandler() h.omitted = 'OMIT' self._read_cif("loop_\n_foo.bar\n_foo.baz\n_foo.var1\n_foo.var2\n" ".1 . '.' \".\"\n", real_file, {'_foo': h}) self.assertEqual(h.data, [{'baz': 'OMIT', 'bar': '.1', 'var1': '.', 'var2': '.'}]) def test_not_in_file_explicit(self): """Check explicit handling of keywords not in the file""" for real_file in (True, False): h = GenericHandler() h.not_in_file = 'NOT' self._read_cif("_foo.bar .1\n_foo.baz x\n", real_file, {'_foo': h}) self.assertEqual( h.data, [{'var1': 'NOT', 'var3': 'NOT', 'var2': 'NOT', 'pdbx_keywords': 'NOT', 'bar': '.1', 'foo': 'NOT', 'method': 'NOT', 'baz': 'x', 'intkey1': 'NOT', 'intkey2': 'NOT', 'floatkey1': 'NOT', 'floatkey2': 'NOT', 'boolkey1': 'NOT'}]) h = GenericHandler() h.not_in_file = 'NOT' self._read_cif("loop_\n_foo.bar\n_foo.baz\n.1 x\n", real_file, {'_foo': h}) self.assertEqual( h.data, [{'var1': 'NOT', 'var3': 'NOT', 'var2': 'NOT', 'pdbx_keywords': 'NOT', 'bar': '.1', 'foo': 'NOT', 'method': 'NOT', 'baz': 'x', 'intkey1': 'NOT', 'intkey2': 'NOT', 'floatkey1': 'NOT', 'floatkey2': 'NOT', 'boolkey1': 'NOT'}]) def test_loop_linebreak(self): """Make sure that linebreaks are ignored in loop data""" for real_file in (True, False): h = GenericHandler() self._read_cif("loop_\n_foo.bar\n_foo.baz\n1\n2\n", real_file, {'_foo': h}) self.assertEqual(h.data, [{'bar': '1', 'baz': '2'}]) def test_keyword_free(self): """Make sure keyword data is cleaned up""" for real_file in (True, False): h = GenericHandler() # The unterminated single quote will cause an exception so # the _exptl category is never handled, so the C parser relies # on ihm_keyword_free to free the memory self._check_bad_cif("_exptl.method foo\n'", real_file, {'_exptl': h}) def test_unknown(self): """CIF unknown value ('?') should be reported as-is""" for real_file in (True, False): h = GenericHandler() # Unknown value is a literal ? - anything else (quoted, or # a longer string) should be reported as a string self._read_cif("_foo.bar ?1\n_foo.baz ?\n" "_foo.var1 '?'\n_foo.var2 \"?\"\n", real_file, {'_foo': h}) self.assertEqual(h.data, [{'bar': '?1', 'baz': ihm.unknown, 'var1': '?', 'var2': '?'}]) h = GenericHandler() self._read_cif("loop_\n_foo.bar\n_foo.baz\n_foo.var1\n_foo.var2\n" "?1 ? '?' \"?\"\n", real_file, {'_foo': h}) self.assertEqual(h.data, [{'bar': '?1', 'baz': ihm.unknown, 'var1': '?', 'var2': '?'}]) def test_unknown_explicit(self): """Check explicit handling of CIF unknown value""" for real_file in (True, False): h = GenericHandler() h.unknown = 'UNK' self._read_cif("_foo.bar ?1\n_foo.baz ?\n" "_foo.var1 '?'\n_foo.var2 \"?\"\n", real_file, {'_foo': h}) self.assertEqual(h.data, [{'bar': '?1', 'baz': 'UNK', 'var1': '?', 'var2': '?'}]) h = GenericHandler() h.unknown = 'UNK' self._read_cif("loop_\n_foo.bar\n_foo.baz\n_foo.var1\n_foo.var2\n" "?1 ? '?' \"?\"\n", real_file, {'_foo': h}) self.assertEqual(h.data, [{'bar': '?1', 'baz': 'UNK', 'var1': '?', 'var2': '?'}]) def test_multiline(self): """Check that multiline strings are handled correctly""" for real_file in (True, False): self._check_bad_cif("_struct_keywords.pdbx_keywords\n" ";COMPLEX \n(HYDROLASE/PEPTIDE)\n", real_file) # multiline in category h = GenericHandler() self._read_cif("_struct_keywords.pdbx_keywords\n" ";COMPLEX \n(HYDROLASE/PEPTIDE)\n;", real_file, {'_struct_keywords': h}) self.assertEqual( h.data, [{'pdbx_keywords': 'COMPLEX \n(HYDROLASE/PEPTIDE)'}]) # multiline in loop h = GenericHandler() self._read_cif("loop_ _struct_keywords.pdbx_keywords\n" "_struct_keywords.foo\n" ";COMPLEX \n(HYDROLASE/PEPTIDE)\n;\nbar\n", real_file, {'_struct_keywords': h}) self.assertEqual( h.data, [{'pdbx_keywords': 'COMPLEX \n(HYDROLASE/PEPTIDE)', 'foo': 'bar'}]) def test_ignored_loop(self): """Check that loops are ignored if they don't have a handler""" for real_file in (True, False): h = GenericHandler() self._read_cif("loop_\n_struct_keywords.pdbx_keywords\nfoo", real_file, {'_atom_site': h}) self.assertEqual(h.data, []) def test_quotes_in_strings(self): """Check that quotes in strings are handled""" for real_file in (True, False): h = GenericHandler() self._read_cif("_struct_keywords.pdbx_keywords 'foo'bar'", real_file, {'_struct_keywords': h}) self.assertEqual(h.data, [{'pdbx_keywords': "foo'bar"}]) h = GenericHandler() self._read_cif('_struct_keywords.pdbx_keywords "foo"bar" ', real_file, {'_struct_keywords': h}) self.assertEqual(h.data, [{'pdbx_keywords': 'foo"bar'}]) def test_wrong_loop_data_num(self): """Check wrong number of loop data elements""" for real_file in (True, False): h = GenericHandler() self._check_bad_cif(""" loop_ _atom_site.x _atom_site.y oneval """, real_file, {'_atom_site': h}) def test_int_keys(self): """Check handling of integer keywords""" for real_file in (True, False): h = GenericHandler() # intkey1, intkey2 should be returned as ints, not strings self._read_cif("_foo.var1 42\n_foo.intkey1 42", real_file, {'_foo': h}) self.assertEqual(h.data, [{'var1': "42", 'intkey1': 42}]) # float cannot be coerced to int self.assertRaises(ValueError, self._read_cif, "_foo.intkey1 42.34", real_file, {'_foo': h}) # string cannot be coerced to int self.assertRaises(ValueError, self._read_cif, "_foo.intkey1 str", real_file, {'_foo': h}) def test_int_keys_loop(self): """Check handling of integer keywords in loop construct""" for real_file in (True, False): h = GenericHandler() self._read_cif("loop_\n_foo.intkey1\n_foo.x\n_foo.bar\n" "42 xval barval", real_file, {'_foo': h}) self.assertEqual(h.data, [{'bar': 'barval', 'intkey1': 42}]) def test_float_keys(self): """Check handling of floating-point keywords""" for real_file in (True, False): h = GenericHandler() # floatkey1, floatkey2 should be returned as floats, not strings self._read_cif("_foo.floatkey1 42.340", real_file, {'_foo': h}) val = h.data[0]['floatkey1'] self.assertIsInstance(val, float) self.assertAlmostEqual(val, 42.34, delta=0.01) # int will be coerced to float h = GenericHandler() self._read_cif("_foo.floatkey1 42", real_file, {'_foo': h}) val = h.data[0]['floatkey1'] self.assertIsInstance(val, float) self.assertAlmostEqual(val, 42.0, delta=0.01) # string cannot be coerced to float h = GenericHandler() self.assertRaises(ValueError, self._read_cif, "_foo.floatkey1 str", real_file, {'_foo': h}) def test_float_keys_loop(self): """Check handling of float keywords in loop construct""" for real_file in (True, False): h = GenericHandler() self._read_cif("loop_\n_foo.x\n_foo.bar\n_foo.floatkey1\n" "xval barval 42.34", real_file, {'_foo': h}) val = h.data[0]['floatkey1'] self.assertIsInstance(val, float) self.assertAlmostEqual(val, 42.34, delta=0.01) def test_bool_keys(self): """Check handling of bool keywords""" for real_file in (True, False): h = GenericHandler() # boolkey1 should be returned as bool, not str self._read_cif("_foo.var1 YES\n_foo.boolkey1 YES", real_file, {'_foo': h}) self.assertEqual(h.data, [{'var1': "YES", 'boolkey1': True}]) h = GenericHandler() self._read_cif("_foo.var1 no\n_foo.boolkey1 no", real_file, {'_foo': h}) self.assertEqual(h.data, [{'var1': "no", 'boolkey1': False}]) # Anything else should map to omitted (None, or handler.omitted) for val in ('GARBAGE', '42', '42.34'): h = GenericHandler() self._read_cif("_foo.var1 no\n_foo.boolkey1 %s" % val, real_file, {'_foo': h}) self.assertEqual(h.data, [{'var1': 'no'}]) h = GenericHandler() h.omitted = 'OM' self._read_cif("_foo.var1 no\n_foo.boolkey1 %s" % val, real_file, {'_foo': h}) self.assertEqual(h.data, [{'var1': 'no', 'boolkey1': 'OM'}]) def test_bool_keys_loop(self): """Check handling of bool keywords in loop construct""" for real_file in (True, False): h = GenericHandler() self._read_cif("loop_\n_foo.boolkey1\n_foo.x\n_foo.bar\n" "YES xval barval", real_file, {'_foo': h}) self.assertEqual(h.data, [{'bar': 'barval', 'boolkey1': True}]) def test_first_data_block(self): """Only information from the first data block should be read""" cif = """ _foo.var1 test1 data_model _foo.var2 test2 data_model2 _foo.var3 test3 """ h = GenericHandler() r = ihm.format.CifReader(StringIO(cif), {'_foo': h}) self._check_first_data(r, h) with utils.temporary_directory() as tmpdir: fname = os.path.join(tmpdir, 'test') with open(fname, 'w') as fh: fh.write(cif) with open(fname) as fh: h = GenericHandler() r = ihm.format.CifReader(fh, {'_foo': h}) self._check_first_data(r, h) def _check_first_data(self, r, h): # Read to end of first data block self.assertTrue(r.read_file()) self.assertEqual(h.data, [{'var1': 'test1', 'var2': 'test2'}]) # Read to end of second data block h.data = [] self.assertFalse(r.read_file()) self.assertEqual(h.data, [{'var3': 'test3'}]) # No more data blocks h.data = [] self.assertFalse(r.read_file()) self.assertEqual(h.data, []) def test_eof_after_loop_data(self): """Make sure EOF after loop data is handled""" for real_file in (True, False): h = GenericHandler() self._read_cif(""" loop_ _foo.bar _foo.baz x y # """, real_file, {'_foo': h}) def test_finalize_handler(self): """Make sure that C parser finalize callback works""" for real_file in (True, False): h = _TestFinalizeHandler() self._read_cif("# _exptl.method foo\n", real_file, {'_exptl': h}) @unittest.skipIf(_format is None, "No C tokenizer") def test_file_new_python_no_read_method(self): """Test ihm_file_new_from_python with object with no read method""" self.assertRaises(AttributeError, _format.ihm_file_new_from_python, None, False) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_read_exception(self): """Test exception in read callback is handled""" class MyError(Exception): pass class MyFileLike: def read(self, numbytes): raise MyError("some error") fh = MyFileLike() f = _format.ihm_file_new_from_python(fh, False) reader = _format.ihm_reader_new(f, False) self.assertRaises(MyError, _format.ihm_read_file, reader) _format.ihm_reader_free(reader) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_read_not_string(self): """Test that read() returning an invalid type is handled""" class MyFileLike: def read(self, numbytes): return 42 fh = MyFileLike() f = _format.ihm_file_new_from_python(fh, False) reader = _format.ihm_reader_new(f, False) self.assertRaises(ValueError, _format.ihm_read_file, reader) _format.ihm_reader_free(reader) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_read_too_long(self): """Test that read() returning too many bytes is handled""" class MyFileLike: def read(self, numbytes): return " " * (numbytes * 4 + 10) fh = MyFileLike() f = _format.ihm_file_new_from_python(fh, False) reader = _format.ihm_reader_new(f, False) self.assertRaises(ValueError, _format.ihm_read_file, reader) _format.ihm_reader_free(reader) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_read_binary_exception(self): """Test exception in binary read callback is handled""" class MyError(Exception): pass class MyFileLike: def read(self, numbytes): raise MyError("some error") fh = MyFileLike() f = _format.ihm_file_new_from_python(fh, True) reader = _format.ihm_reader_new(f, True) self.assertRaises(MyError, _format.ihm_read_file, reader) _format.ihm_reader_free(reader) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_read_binary_not_string(self): """Test that binary read() returning an invalid type is handled""" class MyFileLike: def read(self, numbytes): return 42 fh = MyFileLike() f = _format.ihm_file_new_from_python(fh, True) reader = _format.ihm_reader_new(f, True) self.assertRaises(ValueError, _format.ihm_read_file, reader) _format.ihm_reader_free(reader) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_read_binary_too_long(self): """Test that binary read() returning too many bytes is handled""" class MyFileLike: def read(self, numbytes): return b" " * (numbytes + 10) fh = MyFileLike() f = _format.ihm_file_new_from_python(fh, True) reader = _format.ihm_reader_new(f, True) self.assertRaises(ValueError, _format.ihm_read_file, reader) _format.ihm_reader_free(reader) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_readinto_exception(self): """Test exception in readinto callback is handled""" class MyError(Exception): pass class MyFileLike: def readinto(self, buffer): raise MyError("some error") fh = MyFileLike() f = _format.ihm_file_new_from_python(fh, True) reader = _format.ihm_reader_new(f, True) self.assertRaises(MyError, _format.ihm_read_file, reader) _format.ihm_reader_free(reader) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_readinto_not_length(self): """Test that readinto() returning an invalid type is handled""" class MyFileLike: def readinto(self, buffer): return "garbage" fh = MyFileLike() f = _format.ihm_file_new_from_python(fh, True) reader = _format.ihm_reader_new(f, True) self.assertRaises(ValueError, _format.ihm_read_file, reader) _format.ihm_reader_free(reader) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_readinto_too_long(self): """Test that readinto() returning too many bytes is handled""" class MyFileLike: def readinto(self, buffer): return len(buffer) + 10 fh = MyFileLike() f = _format.ihm_file_new_from_python(fh, True) reader = _format.ihm_reader_new(f, True) self.assertRaises(ValueError, _format.ihm_read_file, reader) _format.ihm_reader_free(reader) @unittest.skipIf(_format is None or sys.platform == 'win32', "No C tokenizer, or Windows") def test_fd_read_failure(self): """Test handling of C read() failure""" f = open('/dev/null') os.close(f.fileno()) # Force read from file descriptor to fail r = ihm.format.CifReader(f, {}) self.assertRaises(IOError, r.read_file) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_read_bytes(self): """Test read() returning bytes (binary file)""" class MyFileLike: def __init__(self): self.calls = 0 def read(self, numbytes): self.calls += 1 if self.calls == 1: return b"_exptl.method foo" else: return b"" h = GenericHandler() r = ihm.format.CifReader(MyFileLike(), {'_exptl': h}) r.read_file() self.assertEqual(h.data, [{'method': 'foo'}]) @unittest.skipIf(_format is None, "No C tokenizer") def test_python_read_unicode(self): """Test read() returning Unicode (text file)""" class MyFileLike: def __init__(self): self.calls = 0 def read(self, numbytes): self.calls += 1 if self.calls == 1: return "_exptl.method foo" else: return "" h = GenericHandler() r = ihm.format.CifReader(MyFileLike(), {'_exptl': h}) r.read_file() self.assertEqual(h.data, [{'method': 'foo'}]) @unittest.skipIf(_format is None, "No C tokenizer") def test_line_endings(self): """Check that C tokenizer works with different line endings""" # todo: the Python tokenizer should handle the same endings for end in ('\n', '\r', '\r\n', '\0'): h = GenericHandler() self._read_cif("_struct_keywords.pdbx_keywords\n" ";COMPLEX %s(HYDROLASE/PEPTIDE)%s;" % (end, end), False, {'_struct_keywords': h}) self.assertEqual( h.data, [{'pdbx_keywords': 'COMPLEX \n(HYDROLASE/PEPTIDE)'}]) def test_unknown_category_ignored(self): """Test that unknown categories are just ignored""" h = GenericHandler() self._read_cif(""" _cat1.foo baz _cat2.bar test # loop_ _foo.bar _foo.baz x y """, False, {'_cat1': h}) self.assertEqual(h.data, [{'foo': 'baz'}]) def test_unknown_category_handled(self): """Test that unknown categories are handled if requested""" class CatHandler: def __init__(self): self.warns = [] def __call__(self, cat, line): self.warns.append((cat, line)) ch = CatHandler() h = GenericHandler() self._read_cif(""" _cat1.foo baz _cat2.bar test # loop_ _foo.bar _foo.baz x y """, False, {'_cat1': h}, unknown_category_handler=ch) self.assertEqual(h.data, [{'foo': 'baz'}]) self.assertEqual(ch.warns, [('_cat2', 3), ('_foo', 6)]) def test_unknown_keyword_ignored(self): """Test that unknown keywords are just ignored""" h = GenericHandler() self._read_cif(""" _cat1.foo baz _cat1.unknown_keyword1 test # loop_ _foo.bar _foo.unknown_keyword2 x y """, False, {'_cat1': h, '_foo': h}) self.assertEqual(h.data, [{'bar': 'x'}, {'foo': 'baz'}]) def test_unknown_keyword_handled(self): """Test that unknown keywords are handled if requested""" class KeyHandler: def __init__(self): self.warns = [] def __call__(self, cat, key, line): self.warns.append((cat, key, line)) kh = KeyHandler() h = GenericHandler() self._read_cif(""" _cat1.foo baz _cat1.unknown_keyword1 test # loop_ _foo.bar _foo.unknown_keyword2 x y """, False, {'_cat1': h, '_foo': h}, unknown_keyword_handler=kh) self.assertEqual(h.data, [{'bar': 'x'}, {'foo': 'baz'}]) self.assertEqual(kh.warns, [('_cat1', 'unknown_keyword1', 3), ('_foo', 'unknown_keyword2', 7)]) @unittest.skipIf(_format is None, "No C tokenizer") def test_multiple_set_unknown_handler(self): """Test setting unknown handler multiple times""" class Handler: def __call__(self): pass uc = Handler() fh = StringIO() c_file = _format.ihm_file_new_from_python(fh, False) reader = _format.ihm_reader_new(c_file, False) # Handler must be a callable object self.assertRaises(ValueError, _format.add_unknown_category_handler, reader, None) self.assertRaises(ValueError, _format.add_unknown_keyword_handler, reader, None) _format.add_unknown_category_handler(reader, uc) _format.add_unknown_category_handler(reader, uc) _format.add_unknown_keyword_handler(reader, uc) _format.add_unknown_keyword_handler(reader, uc) _format.ihm_reader_remove_all_categories(reader) _format.ihm_reader_remove_all_categories(reader) _format.ihm_reader_free(reader) fh.close() def test_preserving_tokenizer_get_token(self): """Test _PreservingCifTokenizer._get_token()""" cif = """ # Full line comment _cat1.Foo baz # End of line comment """ t = ihm.format._PreservingCifTokenizer(StringIO(cif)) tokens = [t._get_token() for _ in range(11)] self.assertIsInstance(tokens[0], ihm.format._EndOfLineToken) self.assertIsInstance(tokens[1], ihm.format._CommentToken) self.assertEqual(tokens[1].txt, ' Full line comment') self.assertIsInstance(tokens[2], ihm.format._EndOfLineToken) self.assertIsInstance(tokens[3], ihm.format._PreservingVariableToken) self.assertEqual(tokens[3].category, '_cat1') self.assertEqual(tokens[3].keyword, 'foo') self.assertEqual(tokens[3].orig_keyword, 'Foo') self.assertIsInstance(tokens[4], ihm.format._WhitespaceToken) self.assertEqual(tokens[4].txt, ' ') self.assertIsInstance(tokens[5], ihm.format._TextValueToken) self.assertEqual(tokens[5].txt, 'baz') self.assertIsInstance(tokens[6], ihm.format._WhitespaceToken) self.assertEqual(tokens[6].txt, ' ') self.assertIsInstance(tokens[7], ihm.format._CommentToken) self.assertEqual(tokens[7].txt, ' End of line comment') self.assertIsInstance(tokens[8], ihm.format._EndOfLineToken) self.assertIsNone(tokens[9]) self.assertIsNone(tokens[10]) # Make sure we can reconstruct the original mmCIF from the tokens new_cif = "".join(x.as_mmcif() for x in tokens[:-2]) self.assertEqual(new_cif, cif) def test_preserving_tokenizer_reconstruct(self): """Make sure _PreservingCifTokenizer can reconstruct original mmCIF""" cif = """ data_foo_bar # _cat1.foo ? # _cat2.BaR . # loop_ foo.bar foo.baz foo.single foo.double foo.multi x . 'single' "double" ;multi ; """ t = ihm.format._PreservingCifTokenizer(StringIO(cif)) tokens = [] while True: tok = t._get_token() if tok is None: break else: tokens.append(tok) new_cif = "".join(x.as_mmcif() for x in tokens) self.assertEqual(new_cif, cif) def test_preserving_variable_token(self): """Test _PreservingVariableToken class""" t = ihm.format._PreservingVariableToken("foo.BAR", 1) self.assertEqual(t.keyword, 'bar') self.assertEqual(t.orig_keyword, 'BAR') self.assertEqual(t.as_mmcif(), 'foo.BAR') t.keyword = 'baz' self.assertEqual(t.as_mmcif(), 'foo.baz') def test_cif_token_reader(self): """Test CifTokenReader class""" cif = """ data_foo_bar # _cat1.foo ? # loop_ _foo.bar _foo.baz a b c d x y """ r = ihm.format.CifTokenReader(StringIO(cif)) tokens = list(r.read_file()) self.assertIsInstance(tokens[5], ihm.format._CategoryTokenGroup) self.assertIsInstance(tokens[8], ihm.format._LoopHeaderTokenGroup) self.assertIsInstance(tokens[9], ihm.format._LoopRowTokenGroup) self.assertIsInstance(tokens[10], ihm.format._LoopRowTokenGroup) self.assertIsInstance(tokens[11], ihm.format._LoopRowTokenGroup) new_cif = "".join(x.as_mmcif() for x in tokens) self.assertEqual(new_cif, cif) def test_cif_token_reader_missing_value(self): """Key without a value should be an error""" cif = "_exptl.method\n" r = ihm.format.CifTokenReader(StringIO(cif)) self.assertRaises(ihm.format.CifParserError, list, r.read_file()) def test_cif_token_reader_loop_mixed_categories(self): """Test bad mmCIF loop with a mix of categories""" cif = 'loop_\n_atom_site.id\n_foo.bar\n' r = ihm.format.CifTokenReader(StringIO(cif)) self.assertRaises(ihm.format.CifParserError, list, r.read_file()) def test_cif_token_reader_loop_header(self): """Loop constructs cannot be nested""" cif = 'loop_\nloop_\n' r = ihm.format.CifTokenReader(StringIO(cif)) self.assertRaises(ihm.format.CifParserError, list, r.read_file()) def test_cif_token_reader_loop_data_num(self): """Check wrong number of loop data elements""" cif = "loop_\n_atom_site.x\n_atom_site.y\noneval\n" r = ihm.format.CifTokenReader(StringIO(cif)) self.assertRaises(ihm.format.CifParserError, list, r.read_file()) def test_cif_token_reader_filter(self): """Test CifTokenReader class with filters""" cif = """ data_foo_bar # _cat1.bar old # loop_ _foo.bar _foo.baz a b c d x y """ r = ihm.format.CifTokenReader(StringIO(cif)) filters = [ihm.format.ChangeValueFilter(".bar", old='old', new='new'), ihm.format.ChangeValueFilter(".bar", old='a', new='newa'), ihm.format.ChangeValueFilter(".foo", old='old', new='new')] tokens = list(r.read_file(filters)) new_cif = "".join(x.as_mmcif() for x in tokens) self.assertEqual(new_cif, """ data_foo_bar # _cat1.bar new # loop_ _foo.bar _foo.baz newa b c d x y """) def test_cif_token_reader_change_func_value_filter(self): """Test CifTokenReader class with ChangeFuncValueFilter""" class MyFunc: def __init__(self): self.calls = [] def __call__(self, value, category, keyword): self.calls.append((value, category, keyword)) return value.upper() cif = """ data_foo_bar # _cat1.bar old _cat2.baz old2 # # loop_ _cat3.baz a b c # loop_ _foo.bar _foo.baz a b c d x y """ f = MyFunc() r = ihm.format.CifTokenReader(StringIO(cif)) filters = [ihm.format.ChangeFuncValueFilter(".bar", f), ihm.format.ChangeFuncValueFilter("_cat4.foo", f)] tokens = list(r.read_file(filters)) new_cif = "".join(x.as_mmcif() for x in tokens) self.assertEqual(f.calls, [('old', '_cat1', 'bar'), ('a', '_foo', 'bar'), ('c', '_foo', 'bar'), ('x', '_foo', 'bar')]) self.assertEqual(new_cif, """ data_foo_bar # _cat1.bar OLD _cat2.baz old2 # # loop_ _cat3.baz a b c # loop_ _foo.bar _foo.baz A b C d X y """) def test_cif_token_reader_replace_category_filter(self): """Test CifTokenReader class with ReplaceCategoryFilter""" cif = """ data_foo_bar # _cat1.bar old # loop_ _cat2.bar _cat2.baz a b c d x y # _cat3.x 1 _cat3.y 2 # _cat4.z 1 # loop_ _cat5.bar _cat5.baz a b """ d = ihm.dumper._CommentDumper() s = ihm.System() s.comments.extend(['comment1', 'comment2']) r = ihm.format.CifTokenReader(StringIO(cif)) filters = [ihm.format.ReplaceCategoryFilter("cat1"), ihm.format.ReplaceCategoryFilter("_cat2", raw_cif='FOO'), ihm.format.ReplaceCategoryFilter("cat3", dumper=d, system=s)] tokens = list(r.read_file(filters)) new_cif = "".join(x.as_mmcif() for x in tokens) self.assertEqual(new_cif, """ data_foo_bar # # FOO # # comment1 # comment2 # _cat4.z 1 # loop_ _cat5.bar _cat5.baz a b """) def test_category_token_group(self): """Test CategoryTokenGroup class""" var = ihm.format._PreservingVariableToken("_foo.bar", 1) space = ihm.format._WhitespaceToken(" ") val = ihm.format._TextValueToken("baz", quote=None) tg = ihm.format._CategoryTokenGroup( var, ihm.format._SpacedToken([space], val)) self.assertEqual(str(tg), "<_CategoryTokenGroup(_foo.bar, baz)>") self.assertEqual(tg.as_mmcif(), '_foo.bar baz\n') self.assertEqual(tg.category, "_foo") self.assertEqual(tg.keyword, "bar") self.assertEqual(tg.value, "baz") tg.value = None self.assertIsNone(tg.value) def test_spaced_token(self): """Test SpacedToken class""" space = ihm.format._WhitespaceToken(" ") val = ihm.format._TextValueToken("baz", quote=None) sp = ihm.format._SpacedToken([space], val) self.assertEqual(sp.as_mmcif(), " baz") self.assertEqual(sp.value, 'baz') sp.value = None self.assertIsNone(sp.value) self.assertEqual(sp.as_mmcif(), ' .') sp.value = ihm.unknown self.assertIs(sp.value, ihm.unknown) self.assertEqual(sp.as_mmcif(), ' ?') sp.value = "test value" self.assertEqual(sp.as_mmcif(), ' "test value"') def test_loop_header_token_group(self): """Test LoopHeaderTokenGroup class""" cif = """ loop_ _foo.bar _foo.baz x y """ r = ihm.format.CifTokenReader(StringIO(cif)) token = list(r.read_file())[1] self.assertIsInstance(token, ihm.format._LoopHeaderTokenGroup) self.assertEqual(str(token), "<_LoopHeaderTokenGroup(_foo, ['bar', 'baz'])>") self.assertEqual(token.keyword_index("bar"), 0) self.assertEqual(token.keyword_index("baz"), 1) self.assertRaises(ValueError, token.keyword_index, "foo") def test_filter(self): """Test Filter base class""" f = ihm.format.Filter("_citation.id") self.assertEqual(f.category, '_citation') self.assertEqual(f.keyword, 'id') f = ihm.format.Filter("CITATION.ID") self.assertEqual(f.category, '_citation') self.assertEqual(f.keyword, 'id') f = ihm.format.Filter(".bar") self.assertIsNone(f.category) self.assertEqual(f.keyword, 'bar') f = ihm.format.Filter("bar") self.assertIsNone(f.category) self.assertEqual(f.keyword, 'bar') self.assertRaises(NotImplementedError, f.filter_category, None) self.assertRaises(NotImplementedError, f.get_loop_filter, None) def test_change_value_filter_category(self): """Test ChangeValueFilter.filter_category""" var = ihm.format._PreservingVariableToken("_foo.bar", 1) space = ihm.format._WhitespaceToken(" ") val = ihm.format._TextValueToken("baz", quote=None) tg = ihm.format._CategoryTokenGroup( var, ihm.format._SpacedToken([space], val)) # Value does not match f = ihm.format.ChangeValueFilter("_foo.bar", old='old', new='new') new_tg = f.filter_category(tg) self.assertEqual(new_tg.value, 'baz') # Keyword does not match f = ihm.format.ChangeValueFilter("_foo.foo", old='baz', new='new') new_tg = f.filter_category(tg) self.assertEqual(new_tg.value, 'baz') # Category does not match f = ihm.format.ChangeValueFilter("_bar.bar", old='baz', new='new') new_tg = f.filter_category(tg) self.assertEqual(new_tg.value, 'baz') # Category matches exactly f = ihm.format.ChangeValueFilter("_foo.bar", old='baz', new='new') new_tg = f.filter_category(tg) self.assertEqual(new_tg.value, 'new') # All-category match f = ihm.format.ChangeValueFilter(".bar", old='new', new='new2') new_tg = f.filter_category(tg) self.assertEqual(new_tg.value, 'new2') def test_change_value_filter_loop(self): """Test ChangeValueFilter.get_loop_filter""" cif = """ loop_ _foo.bar _foo.baz x y """ r = ihm.format.CifTokenReader(StringIO(cif)) tokens = list(r.read_file()) header = tokens[1] row = tokens[2] # Keyword does not match f = ihm.format.ChangeValueFilter("_foo.foo", old='x', new='new') self.assertIsNone(f.get_loop_filter(header)) # Category does not match f = ihm.format.ChangeValueFilter("_bar.bar", old='x', new='new') self.assertIsNone(f.get_loop_filter(header)) # Value does not match f = ihm.format.ChangeValueFilter("_foo.bar", old='notx', new='new') lf = f.get_loop_filter(header) self.assertEqual(lf(row).as_mmcif(), "x y") # Category matches exactly f = ihm.format.ChangeValueFilter("_foo.bar", old='x', new='new') lf = f.get_loop_filter(header) self.assertEqual(lf(row).as_mmcif(), "new y") # All-category match f = ihm.format.ChangeValueFilter(".bar", old='new', new='new2') lf = f.get_loop_filter(header) self.assertEqual(lf(row).as_mmcif(), "new2 y") def test_remove_item_filter(self): """Test RemoveItemFilter""" cif = """ _bar.id 1 _bar.bar 42 # loop_ _foo.a _foo.b _foo.bar _foo.baz a b c d 1 2 3 4 A B C D """ r = ihm.format.CifTokenReader(StringIO(cif)) filters = [ihm.format.RemoveItemFilter(".bar"), ihm.format.RemoveItemFilter("_foo.a"), ihm.format.RemoveItemFilter("_foo.z")] new_cif = "".join(t.as_mmcif() for t in r.read_file(filters)) self.assertEqual(new_cif, """ _bar.id 1 # loop_ _foo.b _foo.baz b d 2 4 B D """) def test_remove_item_all_loop_keywords(self): """Test RemoveItemFilter removing all keywords from a loop""" cif = """ # start loop_ _foo.a _foo.b a b c d # end """ r = ihm.format.CifTokenReader(StringIO(cif)) filters = [ihm.format.RemoveItemFilter(".a"), ihm.format.RemoveItemFilter(".b")] new_cif = "".join(t.as_mmcif() for t in r.read_file(filters)) self.assertEqual(new_cif, '\n# start\n\n# end\n') def test_change_keyword_filter(self): """Test ChangeKeywordFilter""" cif = """ _bar.id 1 _bar.bar 42 # loop_ _foo.foo _foo.bar a b c d """ r = ihm.format.CifTokenReader(StringIO(cif)) filters = [ihm.format.ChangeKeywordFilter(".bar", "newbar"), ihm.format.ChangeKeywordFilter(".baz", "newbaz"), ihm.format.ChangeKeywordFilter("x.y", "newy")] new_cif = "".join(t.as_mmcif() for t in r.read_file(filters)) self.assertEqual(new_cif, """ _bar.id 1 _bar.newbar 42 # loop_ _foo.foo _foo.newbar a b c d """) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_format_bcif.py000066400000000000000000002162541503573337200203430ustar00rootroot00000000000000import utils import os import unittest import sys import struct from io import BytesIO TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.format_bcif try: from ihm import _format except ImportError: _format = None # Provide dummy implementations of msgpack.unpack() and msgpack.pack() which # just return the data unchanged. We can use these to test the Python BinaryCIF # parser with Python objects rather than having to install msgpack and # generate real binary files class MockMsgPack: @staticmethod def unpack(fh, raw=False): return fh @staticmethod def pack(data, fh, use_bin_type=True): fh.data = data class MockFh: pass class GenericHandler: """Capture BinaryCIF data as a simple list of dicts""" not_in_file = None omitted = None unknown = "?" _keys = ('method', 'foo', 'bar', 'baz', 'pdbx_keywords', 'var1', 'var2', 'var3', 'intkey1', 'intkey2', 'floatkey1', 'floatkey2', 'boolkey1') _int_keys = frozenset(('intkey1', 'intkey2')) _float_keys = frozenset(('floatkey1', 'floatkey2')) _bool_keys = frozenset(('boolkey1',)) def __init__(self): self.data = [] def __call__(self, *args): d = {} for k, v in zip(self._keys, args): if v is not None: d[k] = v self.data.append(d) def _encode_int(rows): d = {'data': struct.pack('%db' % len(rows), *rows), 'encoding': [{'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}]} return d, None def _encode_float(rows): d = {'data': struct.pack('<%dd' % len(rows), *rows), 'encoding': [{'kind': 'ByteArray', 'type': ihm.format_bcif._Float64}]} return d, None def _encode(rows): # Assume rows is a list of strings; make simple BinaryCIF encoding mask = [0] * len(rows) need_mask = False for i, row in enumerate(rows): if row is None: need_mask = True mask[i] = 1 elif row == '?': need_mask = True mask[i] = 2 if need_mask: rows = ['' if r == '?' or r is None else r for r in rows] mask = {'data': ''.join(chr(i) for i in mask).encode('ascii'), 'encoding': [{'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}]} else: mask = None string_data = "".join(rows) offsets = [] total_len = 0 for row in rows: offsets.append(total_len) total_len += len(row) offsets.append(total_len) offsets = ''.join(chr(i) for i in offsets).encode('ascii') indices = ''.join(chr(i) for i in range(len(rows))).encode('ascii') string_array_encoding = { 'kind': 'StringArray', 'dataEncoding': [{'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}], 'stringData': string_data, 'offsetEncoding': [{'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}], 'offsets': offsets} d = {'data': indices, 'encoding': [string_array_encoding]} return d, mask class Category: def __init__(self, name, data): self.name = name self.data = data def _encode_rows(self, rows): if len(rows) and isinstance(rows[0], int): return _encode_int(rows) elif len(rows) and isinstance(rows[0], float): return _encode_float(rows) else: return _encode(rows) def get_bcif(self): nrows = 0 cols = [] for name, rows in self.data.items(): nrows = len(rows) data, mask = self._encode_rows(rows) cols.append({'mask': mask, 'name': name, 'data': data}) return {'name': self.name, 'columns': cols, 'rowCount': nrows} class Block(list): pass class _BadMsgPackType: pass class _RawMsgPack: def __init__(self, val): self._val = val BAD_MSGPACK_TYPE = _BadMsgPackType() def _add_msgpack(d, fh): """Add `d` to filelike object `fh` in msgpack format""" if isinstance(d, dict): fh.write(struct.pack('>Bi', 0xdf, len(d))) for key, val in d.items(): _add_msgpack(key, fh) _add_msgpack(val, fh) elif isinstance(d, list): fh.write(struct.pack('>Bi', 0xdd, len(d))) for val in d: _add_msgpack(val, fh) elif isinstance(d, str): b = d.encode('utf8') fh.write(struct.pack('>Bi', 0xdb, len(b))) fh.write(b) elif isinstance(d, bytes): fh.write(struct.pack('>Bi', 0xc6, len(d))) fh.write(d) elif isinstance(d, int): fh.write(struct.pack('>Bi', 0xce, d)) elif isinstance(d, float): fh.write(struct.pack('>Bf', 0xca, d)) elif isinstance(d, _RawMsgPack): fh.write(d._val) elif d is None: fh.write(b'\xc0') elif d is BAD_MSGPACK_TYPE: # 0xc1 is not used in msgpack fh.write(b'\xc1') else: raise TypeError("Cannot handle %s" % type(d)) def _make_bcif_file(blocks): blocks = [{'header': 'ihm', 'categories': [c.get_bcif() for c in block]} for block in blocks] d = {'version': '0.1', 'encoder': 'python-ihm test suite', 'dataBlocks': blocks} return _python_to_msgpack(d) def _python_to_msgpack(d): if _format: # Convert Python object `d` into msgpack format for the C-accelerated # parser fh = BytesIO() _add_msgpack(d, fh) fh.seek(0) return fh else: # Pure Python reader uses mocked-out msgpack to work on `d` directly return d class Tests(unittest.TestCase): def test_decoder_base(self): """Test Decoder base class""" d = ihm.format_bcif._Decoder() self.assertIsNone(d._kind) d(enc=None, data=None) # noop def test_string_array_decoder(self): """Test StringArray decoder""" d = ihm.format_bcif._StringArrayDecoder() self.assertEqual(d._kind, 'StringArray') # Int8 is signed char (so FF is -1) enc = {'stringData': 'aAB', 'dataEncoding': [{'kind': 'ByteArray', 'type': ihm.format_bcif._Int8}], 'offsetEncoding': [{'kind': 'ByteArray', 'type': ihm.format_bcif._Int8}], 'offsets': b'\x00\x01\x03'} data = b'\x00\x01\x00\xFF' data = d(enc, data) self.assertEqual(list(data), ['a', 'AB', 'a', None]) def test_byte_array_decoder(self): """Test ByteArray decoder""" d = ihm.format_bcif._ByteArrayDecoder() self.assertEqual(d._kind, 'ByteArray') # type 1 (signed char) data = d({'type': ihm.format_bcif._Int8}, b'\x00\x01\xFF') self.assertEqual(list(data), [0, 1, -1]) # type 2 (signed short) data = d({'type': ihm.format_bcif._Int16}, b'\x00\x01\x01\xAC') self.assertEqual(list(data), [256, -21503]) # type 3 (signed int) data = d({'type': ihm.format_bcif._Int32}, b'\x00\x01\x01\x05') self.assertEqual(list(data), [83951872]) # type 4 (unsigned char) data = d({'type': ihm.format_bcif._Uint8}, b'\x00\xFF') self.assertEqual(list(data), [0, 255]) # type 5 (unsigned short) data = d({'type': ihm.format_bcif._Uint16}, b'\x00\x01\x01\xAC') self.assertEqual(list(data), [256, 44033]) # type 6 (unsigned int) data = d({'type': ihm.format_bcif._Uint32}, b'\x00\x01\x01\xFF') self.assertEqual(list(data), [4278255872]) # type 32 (32-bit float) data = d({'type': ihm.format_bcif._Float32}, b'\x00\x00(B') self.assertAlmostEqual(list(data)[0], 42.0, delta=0.1) # type 33 (64-bit float) data = d({'type': ihm.format_bcif._Float64}, b'\x00\x00\x00\x00\x00\x00E@') self.assertAlmostEqual(list(data)[0], 42.0, delta=0.1) @unittest.skipIf(_format is None, "No C tokenizer") def test_byte_array_decoder_full_file(self): """Test ByteArray decoder working on full BinaryCIF file""" class MyCategory(Category): def __init__(self, name, data, raw_data, data_type, enc=None): Category.__init__(self, name, data) self.raw_data, self.data_type = raw_data, data_type self.enc = enc def _encode_rows(self, rows): if self.enc is not None: enc = self.enc else: enc = [{'kind': 'ByteArray', 'type': self.data_type}] return {'data': self.raw_data, 'encoding': enc}, None def get_decoded(data_type, raw_data, enc=None): cat = MyCategory('_exptl', {'method': []}, raw_data, data_type, enc=enc) h = GenericHandler() self._read_bcif([Block([cat])], {'_exptl': h}) return [x['method'] for x in h.data] # type 3 (signed int) data = get_decoded(ihm.format_bcif._Int32, b'\x00\x01\x01\x05') self.assertEqual(data, ['83951872']) # Raw data not a multiple of type size self.assertRaises(_format.FileFormatError, get_decoded, ihm.format_bcif._Int16, b'\x00\x01\x01') self.assertRaises(_format.FileFormatError, get_decoded, ihm.format_bcif._Uint16, b'\x00\x01\x01') self.assertRaises(_format.FileFormatError, get_decoded, ihm.format_bcif._Int32, b'\x00\x01\x01') self.assertRaises(_format.FileFormatError, get_decoded, ihm.format_bcif._Uint32, b'\x00\x01\x01') self.assertRaises(_format.FileFormatError, get_decoded, ihm.format_bcif._Float64, b'\x00\x00\x00\x00') self.assertRaises(_format.FileFormatError, get_decoded, ihm.format_bcif._Float32, b'\x00\x00\x00') # ByteArray must take raw data, not the output of another decoder self.assertRaises(_format.FileFormatError, get_decoded, ihm.format_bcif._Int32, b'\x00\x01\x01\x05', enc=[{'kind': 'ByteArray', 'type': ihm.format_bcif._Int32}] * 2) # type 1 (signed char) data = get_decoded(ihm.format_bcif._Int8, struct.pack('2b', -10, 100)) self.assertEqual(data, ['-10', '100']) # type 4 (unsigned char) data = get_decoded(ihm.format_bcif._Uint8, b'\x00\xFF') self.assertEqual(data, ['0', '255']) # type 2 (16-bit signed int) data = get_decoded(ihm.format_bcif._Int16, struct.pack('<2h', -5, 10)) self.assertEqual(data, ['-5', '10']) # type 5 (16-bit unsigned int) data = get_decoded(ihm.format_bcif._Uint16, struct.pack('BH', 0xcd, 100), True), # uint16 (struct.pack('>BI', 0xce, 100), True), # uint32 (struct.pack('Bb', 0xd0, 100), True), # int8 (struct.pack('Bb', 0xd0, -10), False), (struct.pack('>Bh', 0xd1, 100), True), # int16 (struct.pack('>Bh', 0xd1, -10), False), (struct.pack('>Bi', 0xd2, 100), True), # int32 (struct.pack('>Bi', 0xd2, -10), False), (struct.pack('>Bf', 0xca, 100.0), True), # float32 (struct.pack('>Bd', 0xcb, 100.0), True)): # float64 d = make_bcif(data=struct.pack('b', 0), data_type=ihm.format_bcif._Int8, minval=_RawMsgPack(raw), maxval=200, numsteps=3) h = GenericHandler() self._read_bcif_raw(d, {'_foo': h}) bar = h.data[0]['bar'] self.assertIsInstance(bar, str) self.assertAlmostEqual(float(bar), 100.0 if positive else -10.0, delta=0.01) # Bad number type d = make_bcif(data=struct.pack('b', 0), data_type=ihm.format_bcif._Int8, minval='foo', maxval=200, numsteps=3) h = GenericHandler() self.assertRaises(_format.FileFormatError, self._read_bcif_raw, d, {'_foo': h}) # Bad input type d = make_bcif(data=struct.pack('<3f', 0.0, 1.0, 2.0), data_type=ihm.format_bcif._Float32) h = GenericHandler() self.assertRaises(_format.FileFormatError, self._read_bcif_raw, d, {'_foo': h}) # Error trying to read encoding min parameter d = make_bcif(data=struct.pack('3b', 0, 1, 2), data_type=ihm.format_bcif._Int8, minval=BAD_MSGPACK_TYPE) h = GenericHandler() self.assertRaises(_format.FileFormatError, self._read_bcif_raw, d, {'_foo': h}) # Error trying to read encoding max parameter d = make_bcif(data=struct.pack('3b', 0, 1, 2), data_type=ihm.format_bcif._Int8, maxval=BAD_MSGPACK_TYPE) h = GenericHandler() self.assertRaises(_format.FileFormatError, self._read_bcif_raw, d, {'_foo': h}) # Error trying to read encoding numsteps parameter d = make_bcif(data=struct.pack('3b', 0, 1, 2), data_type=ihm.format_bcif._Int8, numsteps='foo') h = GenericHandler() self.assertRaises(_format.FileFormatError, self._read_bcif_raw, d, {'_foo': h}) # Bad number of steps d = make_bcif(data=struct.pack('3b', 0, 1, 2), data_type=ihm.format_bcif._Int8, numsteps=1) h = GenericHandler() self.assertRaises(_format.FileFormatError, self._read_bcif_raw, d, {'_foo': h}) @unittest.skipIf(_format is None, "No C tokenizer") def test_process_bcif_category_c(self): """Test processing of BinaryCIF category""" def make_bcif(data1, data2, data_type): c1 = {'name': 'bar', 'data': {'data': data1, 'encoding': [{'kind': 'IntegerPacking'}, {'kind': 'ByteArray', 'type': data_type}]}} c2 = {'name': 'baz', 'data': {'data': data2, 'encoding': [{'kind': 'IntegerPacking'}, {'kind': 'ByteArray', 'type': data_type}]}} return {'dataBlocks': [{'categories': [{'name': '_foo', 'columns': [c1, c2]}]}]} class _ThrowHandler(GenericHandler): def __call__(self, *args): raise ValueError("some error") # Normal operation d = make_bcif(data1=struct.pack('2b', 1, 42), data2=struct.pack('2b', 8, 4), data_type=ihm.format_bcif._Int8) h = GenericHandler() self._read_bcif_raw(d, {'_foo': h}) self.assertEqual(h.data, [{'bar': '1', 'baz': '8'}, {'bar': '42', 'baz': '4'}]) # Handler errors should be propagated h = _ThrowHandler() self.assertRaises(ValueError, self._read_bcif_raw, d, {'_foo': h}) # Mismatched column size d = make_bcif(data1=struct.pack('3b', 1, 42, 9), data2=struct.pack('2b', 8, 4), data_type=ihm.format_bcif._Int8) h = GenericHandler() self.assertRaises(_format.FileFormatError, self._read_bcif_raw, d, {'_foo': h}) @unittest.skipIf(_format is None, "No C tokenizer") def test_read_error(self): """Test handling of errors from filelike read()""" def make_bcif(): c1 = {'name': 'bar', 'data': {'data': struct.pack('2b', 1, 42), 'encoding': [{'kind': 'IntegerPacking'}, {'kind': 'ByteArray', 'type': ihm.format_bcif._Int8, 'min': 1.0}]}, 'mask': None} return {'dataBlocks': [{'categories': [{'name': '_foo', 'columns': [c1]}]}]} class ReadError: """Filelike object that returns defined-size blocks from read, or errors out if empty""" def __init__(self, read_sz): self.read_sz = read_sz self.data = b'' def write(self, b): self.data += b def read(self, sz): if not self.read_sz: raise IndexError("foo") rsz = self.read_sz.pop(0) assert sz >= rsz ret = self.data[:rsz] self.data = self.data[rsz:] return ret def run_test(read_sz, ind=-1, cat='_foo'): d = make_bcif() h = GenericHandler() fh = ReadError(read_sz) _add_msgpack(d, fh) # If ind is given, just read up to the first instance of that # msgpack type if ind > 0: fh.read_sz = [fh.data.index(ind)] r = ihm.format_bcif.BinaryCifReader(fh, {cat: h}) r.read_file() # Less data read than requested with self.assertRaises(OSError) as cm: run_test([0]) self.assertIn('Less data read than requested', str(cm.exception)) # Exception in read_map_or_nil (None=0xc0 in msgpack) self.assertRaises(IndexError, run_test, [], ind=0xc0) # Exception in read_array (array=0xdd) self.assertRaises(IndexError, run_test, [], ind=0xdd) # Exception in read_int (int=0xce) self.assertRaises(IndexError, run_test, [], ind=0xce) # Exception in read_bcif_exact_string (str=0xdb) self.assertRaises(IndexError, run_test, [], ind=0xdb) # Exception in read_bcif_any_double (float=0xca) self.assertRaises(IndexError, run_test, [], ind=0xca) # Exception in read_bcif_string_dup (107=index of column name's # string size) self.assertRaises(IndexError, run_test, [107]) # Exception in read_bcif_string (168=index of encoding name's # string size) self.assertRaises(IndexError, run_test, [168]) # Exception in read_map (25=index of second map) self.assertRaises(IndexError, run_test, [25]) # Exception in read_binary (137=index of data's binary size) self.assertRaises(IndexError, run_test, [137]) # Exception in skip_no_limit self.assertRaises(IndexError, run_test, [147], cat='_bar') def test_omitted_unknown_not_in_file_explicit(self): """Test explicit handling of omitted/unknown/not in file data""" cat = Category('_foo', {'var1': ['test1', '?', 'test2', None, 'test3']}) h = GenericHandler() h.omitted = 'OMIT' h.unknown = 'UNK' h.not_in_file = 'NOT' h._keys = ('var1', 'var2') h._int_keys = frozenset() h._float_keys = frozenset() h._bool_keys = frozenset() self._read_bcif([Block([cat])], {'_foo': h}) self.assertEqual(h.data, [{'var1': 'test1', 'var2': 'NOT'}, {'var1': 'UNK', 'var2': 'NOT'}, {'var1': 'test2', 'var2': 'NOT'}, {'var1': 'OMIT', 'var2': 'NOT'}, {'var1': 'test3', 'var2': 'NOT'}]) def test_unknown_categories_ignored(self): """Check that unknown categories are just ignored""" cat1 = Category('_foo', {'var1': ['test1']}) cat2 = Category('_bar', {'var2': ['test2']}) h = GenericHandler() self._read_bcif([Block([cat1, cat2])], {'_foo': h}) self.assertEqual(h.data, [{'var1': 'test1'}]) def test_unknown_categories_handled(self): """Check that unknown categories are handled if requested""" class CatHandler: def __init__(self): self.warns = [] def __call__(self, cat, line): self.warns.append((cat, line)) ch = CatHandler() cat1 = Category('_foo', {'var1': ['test1']}) cat2 = Category('_bar', {'var2': ['test2']}) h = GenericHandler() self._read_bcif([Block([cat1, cat2])], {'_foo': h}, unknown_category_handler=ch) self.assertEqual(h.data, [{'var1': 'test1'}]) self.assertEqual(ch.warns, [('_bar', 0)]) def test_unknown_keywords_ignored(self): """Check that unknown keywords are ignored""" cat = Category('_foo', {'var1': ['test1'], 'othervar': ['test2']}) h = GenericHandler() self._read_bcif([Block([cat])], {'_foo': h}) self.assertEqual(h.data, [{'var1': 'test1'}]) def test_unknown_keywords_handled(self): """Check that unknown keywords are handled if requested""" class KeyHandler: def __init__(self): self.warns = [] def __call__(self, cat, key, line): self.warns.append((cat, key, line)) kh = KeyHandler() cat = Category('_foo', {'var1': ['test1'], 'othervar': ['test2']}) h = GenericHandler() self._read_bcif([Block([cat])], {'_foo': h}, unknown_keyword_handler=kh) self.assertEqual(h.data, [{'var1': 'test1'}]) self.assertEqual(kh.warns, [('_foo', 'othervar', 0)]) def test_multiple_data_blocks(self): """Test handling of multiple data blocks""" block1 = Block([Category('_foo', {'var1': ['test1'], 'var2': ['test2']})]) block2 = Block([Category('_foo', {'var3': ['test3']})]) fh = _make_bcif_file([block1, block2]) h = GenericHandler() r = ihm.format_bcif.BinaryCifReader(fh, {'_foo': h}) sys.modules['msgpack'] = MockMsgPack # Read first data block self.assertTrue(r.read_file()) self.assertEqual(h.data, [{'var1': 'test1', 'var2': 'test2'}]) # Read second data block h.data = [] self.assertFalse(r.read_file()) self.assertEqual(h.data, [{'var3': 'test3'}]) # No more data blocks h.data = [] self.assertFalse(r.read_file()) self.assertEqual(h.data, []) def test_encoder(self): """Test _Encoder base class""" e = ihm.format_bcif._Encoder() e(None) # noop def test_byte_array_encoder(self): """Test ByteArray encoder""" d = ihm.format_bcif._ByteArrayEncoder() # type 1 (signed char) data, encd = d([0, 1, -1]) self.assertEqual(data, b'\x00\x01\xFF') self.assertEqual(encd, {'kind': 'ByteArray', 'type': ihm.format_bcif._Int8}) # type 2 (signed short) data, encd = d([256, -21503]) self.assertEqual(data, b'\x00\x01\x01\xAC') self.assertEqual(encd, {'kind': 'ByteArray', 'type': ihm.format_bcif._Int16}) # type 3 (signed int) data, encd = d([-83951872]) self.assertEqual(data, b'\x00\xff\xfe\xfa') self.assertEqual(encd, {'kind': 'ByteArray', 'type': ihm.format_bcif._Int32}) # type 4 (unsigned char) data, encd = d([0, 255]) self.assertEqual(data, b'\x00\xFF') self.assertEqual(encd, {'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}) # type 5 (unsigned short) data, encd = d([256, 44033]) self.assertEqual(data, b'\x00\x01\x01\xAC') self.assertEqual(encd, {'kind': 'ByteArray', 'type': ihm.format_bcif._Uint16}) # type 6 (unsigned int) data, encd = d([4278255872]) self.assertEqual(data, b'\x00\x01\x01\xFF') self.assertEqual(encd, {'kind': 'ByteArray', 'type': ihm.format_bcif._Uint32}) # type 32 (32-bit float) data, encd = d([42.0]) self.assertEqual(len(data), 4) self.assertEqual(encd, {'kind': 'ByteArray', 'type': ihm.format_bcif._Float32}) # Too-large ints should cause an error self.assertRaises(TypeError, d, [2**34]) self.assertRaises(TypeError, d, [-2**34]) def test_delta_encoder(self): """Test Delta encoder""" d = ihm.format_bcif._DeltaEncoder() # too-small data is returned unchanged data = [0, 1, -1] encdata, encdict = d(data) self.assertEqual(data, encdata) self.assertIsNone(encdict) # large data is encoded data = [0, 1, -1] + [-1] * 40 encdata, encdict = d(data) self.assertEqual(encdata, [0, 1, -2] + [0] * 40) self.assertEqual(encdict, {'origin': 0, 'kind': 'Delta', 'srcType': ihm.format_bcif._Int8}) def test_run_length_encoder(self): """Test RunLength encoder""" d = ihm.format_bcif._RunLengthEncoder() # too-small data is returned unchanged data = [0, 1, -1] encdata, encdict = d(data) self.assertEqual(data, encdata) self.assertIsNone(encdict) # large data that can't be compressed is returned unchanged data = list(range(50)) encdata, encdict = d(data) self.assertEqual(data, encdata) self.assertIsNone(encdict) # large data that can be compressed data = [0] * 30 + [1] * 40 encdata, encdict = d(data) self.assertEqual(encdata, [0, 30, 1, 40]) self.assertEqual(encdict, {'kind': 'RunLength', 'srcSize': 70, 'srcType': ihm.format_bcif._Uint8}) def test_encode(self): """Test _encode function""" data = [1, 1, 1, 2, 3, 3] encoders = [ihm.format_bcif._ByteArrayEncoder()] encdata, encds = ihm.format_bcif._encode(data, encoders) self.assertEqual(encdata, b'\x01\x01\x01\x02\x03\x03') self.assertEqual(encds, [{'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}]) # DeltaEncoder will be a noop here since data is small encoders = [ihm.format_bcif._DeltaEncoder(), ihm.format_bcif._ByteArrayEncoder()] encdata, encds = ihm.format_bcif._encode(data, encoders) self.assertEqual(encdata, b'\x01\x01\x01\x02\x03\x03') self.assertEqual(encds, [{'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}]) def test_mask_type_no_mask(self): """Test get_mask_and_type with no mask""" data = [1, 2, 3, 4] mask, typ = ihm.format_bcif._get_mask_and_type(data) self.assertIsNone(mask) self.assertEqual(typ, int) def test_mask_type_masked_int(self): """Test get_mask_and_type with masked int data""" data = [1, 2, 3, None, ihm.unknown, 4] mask, typ = ihm.format_bcif._get_mask_and_type(data) self.assertEqual(mask, [0, 0, 0, 1, 2, 0]) self.assertEqual(typ, int) def test_mask_type_masked_float(self): """Test get_mask_and_type with masked float data""" data = [1.0, 2.0, 3.0, None, ihm.unknown, 4.0] mask, typ = ihm.format_bcif._get_mask_and_type(data) self.assertEqual(mask, [0, 0, 0, 1, 2, 0]) self.assertEqual(typ, float) def test_mask_type_masked_numpy_float(self): """Test get_mask_and_type with masked numpy float data""" try: import numpy except ImportError: self.skipTest("this test requires numpy") data = [numpy.float64(4.2), None, ihm.unknown] mask, typ = ihm.format_bcif._get_mask_and_type(data) self.assertEqual(mask, [0, 1, 2]) self.assertEqual(typ, float) def test_mask_type_masked_str(self): """Test get_mask_and_type with masked str data""" # Literal . and ? should not be masked data = ['a', 'b', None, ihm.unknown, 'c', '.', '?'] mask, typ = ihm.format_bcif._get_mask_and_type(data) self.assertEqual(mask, [0, 0, 1, 2, 0, 0, 0]) self.assertEqual(typ, str) def test_mask_type_mix_int_float(self): """Test get_mask_and_type with a mix of int and float data""" data = [1, 2, 3, 4.0] mask, typ = ihm.format_bcif._get_mask_and_type(data) self.assertIsNone(mask) self.assertEqual(typ, float) # int/float is coerced to float def test_mask_type_mix_int_float_str(self): """Test get_mask_and_type with a mix of int/float/str data""" data = [1, 2, 3, 4.0, 'foo'] mask, typ = ihm.format_bcif._get_mask_and_type(data) self.assertIsNone(mask) self.assertEqual(typ, str) # int/float/str is coerced to str def test_mask_type_bad_type(self): """Test get_mask_and_type with unknown type data""" class MockObject: pass data = [MockObject()] self.assertRaises(ValueError, ihm.format_bcif._get_mask_and_type, data) def test_masked_encoder(self): """Test MaskedEncoder base class""" e = ihm.format_bcif._MaskedEncoder() e(None, None) # noop def test_string_array_encoder_no_mask(self): """Test StringArray encoder with no mask""" d = ihm.format_bcif._StringArrayMaskedEncoder() indices, encs = d(['a', 'AB', 'a'], None) self.assertEqual(indices, b'\x00\x01\x00') enc, = encs self.assertEqual(enc['dataEncoding'], [{'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}]) self.assertEqual(enc['offsetEncoding'], [{'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}]) self.assertEqual(enc['offsets'], b'\x00\x01\x03') self.assertEqual(enc['stringData'], 'aAB') def test_string_array_encoder_mask(self): """Test StringArray encoder with mask""" d = ihm.format_bcif._StringArrayMaskedEncoder() # True should be mapped to 'YES'; int 3 to str '3' # Unmasked literal . and ? should be kept as-is indices, encs = d(['a', 'AB', True, ihm.unknown, None, 'a', 3, '.', '?'], [0, 0, 0, 2, 1, 0, 0, 0, 0]) # \xff is -1 (masked value) as a signed char (Int8) self.assertEqual(indices, b'\x00\x01\x02\xff\xff\x00\x03\x04\x05') enc, = encs self.assertEqual(enc['dataEncoding'], [{'kind': 'ByteArray', 'type': ihm.format_bcif._Int8}]) self.assertEqual(enc['offsetEncoding'], [{'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}]) self.assertEqual(enc['offsets'], b'\x00\x01\x03\x06\x07\x08\t') self.assertEqual(enc['stringData'], 'aABYES3.?') def test_int_array_encoder_no_mask(self): """Test IntArray encoder with no mask""" d = ihm.format_bcif._IntArrayMaskedEncoder() data, encs = d([5, 7, 8], None) self.assertEqual(data, b'\x05\x07\x08') self.assertEqual(encs, [{'kind': 'ByteArray', 'type': ihm.format_bcif._Uint8}]) def test_int_array_encoder_mask(self): """Test IntArray encoder with mask""" d = ihm.format_bcif._IntArrayMaskedEncoder() data, encs = d([5, 7, '?', 8, None], [0, 0, 2, 0, 1]) # \xff is -1 (masked value) as a signed char (Int8) self.assertEqual(data, b'\x05\x07\xff\x08\xff') self.assertEqual(encs, [{'kind': 'ByteArray', 'type': ihm.format_bcif._Int8}]) def test_float_array_encoder_no_mask(self): """Test FloatArray encoder with no mask""" d = ihm.format_bcif._FloatArrayMaskedEncoder() # int data should be coerced to float data, encs = d([5.0, 7.0, 8.0, 4], None) self.assertEqual(len(data), 4 * 4) self.assertEqual(encs, [{'kind': 'ByteArray', 'type': ihm.format_bcif._Float32}]) def test_float_array_encoder_mask(self): """Test FloatArray encoder with mask""" d = ihm.format_bcif._FloatArrayMaskedEncoder() data, encs = d([5., 7., '?', 8., None], [0, 0, 2, 0, 1]) self.assertEqual(len(data), 5 * 4) self.assertEqual(encs, [{'kind': 'ByteArray', 'type': ihm.format_bcif._Float32}]) def test_category(self): """Test CategoryWriter class""" fh = MockFh() sys.modules['msgpack'] = MockMsgPack writer = ihm.format_bcif.BinaryCifWriter(fh) writer.start_block('ihm') with writer.category('foo') as loc: loc.write(bar='baz') writer.flush() block, = fh.data['dataBlocks'] category, = block['categories'] column, = category['columns'] self.assertEqual(block['header'], 'ihm') self.assertEqual(category['name'], 'foo') self.assertEqual(category['rowCount'], 1) self.assertEqual(column['name'], 'bar') self.assertEqual(column['data']['encoding'][0]['stringData'], 'baz') def test_empty_loop(self): """Test LoopWriter class with no values""" fh = MockFh() sys.modules['msgpack'] = MockMsgPack writer = ihm.format_bcif.BinaryCifWriter(fh) writer.start_block('ihm') with writer.loop('foo', ["bar", "baz"]): pass writer.flush() self.assertEqual(fh.data['dataBlocks'][0]['categories'], []) def test_loop(self): """Test LoopWriter class""" fh = MockFh() sys.modules['msgpack'] = MockMsgPack writer = ihm.format_bcif.BinaryCifWriter(fh) writer.start_block('ihm') with writer.loop('foo', ["bar", "baz"]) as lp: lp.write(bar='x') lp.write(bar=None, baz='z') lp.write(bar=ihm.unknown, baz='z') lp.write(bar='.', baz='z') lp.write(bar='?', baz='z') lp.write(baz='y') writer.flush() block, = fh.data['dataBlocks'] category, = block['categories'] self.assertEqual(category['name'], 'foo') self.assertEqual(category['rowCount'], 6) cols = sorted(category['columns'], key=lambda x: x['name']) self.assertEqual(len(cols), 2) # Check mask for bar column; literal . and ? should not be masked (=0) self.assertEqual(cols[0]['mask']['data'], b'\x00\x01\x02\x00\x00\x01') if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_geometry.py000066400000000000000000000120701503573337200177110ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.geometry class Tests(unittest.TestCase): def test_center(self): """Test Center class""" c = ihm.geometry.Center(1, 2, 3) self.assertEqual(c.x, 1) self.assertEqual(c.y, 2) self.assertEqual(c.z, 3) def test_transformation(self): """Test Transformation class""" _ = ihm.geometry.Transformation([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [1, 2, 3]) def test_identity_transformation(self): """Test identity transformation""" t = ihm.geometry.Transformation.identity() for i in range(3): self.assertAlmostEqual(t.tr_vector[i], 0., delta=0.1) for j in range(3): self.assertAlmostEqual(t.rot_matrix[i][j], 1. if i == j else 0., delta=0.1) def test_geometric_object(self): """Test GeometricObject bass class""" g = ihm.geometry.GeometricObject(name='foo', description='bar') self.assertEqual(g.name, 'foo') self.assertEqual(g.description, 'bar') self.assertEqual(g.type, 'other') def test_sphere(self): """Test Sphere class""" c = ihm.geometry.Center(1, 2, 3) t = ihm.geometry.Transformation([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [1, 2, 3]) g = ihm.geometry.Sphere(center=c, transformation=t, radius=5) self.assertIsNone(g.name) self.assertEqual(g.radius, 5) self.assertEqual(g.type, 'sphere') g = ihm.geometry.Sphere(center=c, radius=5) self.assertIsNone(g.transformation) def test_torus(self): """Test Torus class""" c = ihm.geometry.Center(1, 2, 3) t = ihm.geometry.Transformation([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [1, 2, 3]) g = ihm.geometry.Torus(center=c, transformation=t, major_radius=5, minor_radius=1) self.assertIsNone(g.name) self.assertEqual(g.major_radius, 5) self.assertEqual(g.type, 'torus') g = ihm.geometry.Torus(center=c, major_radius=5, minor_radius=1) self.assertIsNone(g.transformation) def test_half_torus(self): """Test HalfTorus class""" c = ihm.geometry.Center(1, 2, 3) t = ihm.geometry.Transformation([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [1, 2, 3]) g = ihm.geometry.HalfTorus(center=c, transformation=t, major_radius=5, minor_radius=1, thickness=0.1, inner=True) self.assertIsNone(g.name) self.assertEqual(g.major_radius, 5) self.assertEqual(g.inner, True) self.assertEqual(g.type, 'half-torus') g = ihm.geometry.HalfTorus(center=c, major_radius=5, minor_radius=1, thickness=0.1, inner=True) self.assertIsNone(g.transformation) def test_axis(self): """Test Axis base class""" g = ihm.geometry.Axis(name='foo', description='bar') self.assertIsNone(g.transformation) self.assertEqual(g.type, 'axis') def test_x_axis(self): """Test XAxis class""" g = ihm.geometry.XAxis(name='foo', description='bar') self.assertIsNone(g.transformation) self.assertEqual(g.type, 'axis') self.assertEqual(g.axis_type, 'x-axis') def test_y_axis(self): """Test YAxis class""" g = ihm.geometry.YAxis(name='foo', description='bar') self.assertIsNone(g.transformation) self.assertEqual(g.type, 'axis') self.assertEqual(g.axis_type, 'y-axis') def test_z_axis(self): """Test ZAxis class""" g = ihm.geometry.ZAxis(name='foo', description='bar') self.assertIsNone(g.transformation) self.assertEqual(g.type, 'axis') self.assertEqual(g.axis_type, 'z-axis') def test_plane(self): """Test Plane base class""" g = ihm.geometry.Plane(name='foo', description='bar') self.assertIsNone(g.transformation) self.assertEqual(g.type, 'plane') def test_xy_plane(self): """Test XYPlane base class""" g = ihm.geometry.XYPlane(name='foo', description='bar') self.assertIsNone(g.transformation) self.assertEqual(g.type, 'plane') self.assertEqual(g.plane_type, 'xy-plane') def test_yz_plane(self): """Test YZPlane base class""" g = ihm.geometry.YZPlane(name='foo', description='bar') self.assertIsNone(g.transformation) self.assertEqual(g.type, 'plane') self.assertEqual(g.plane_type, 'yz-plane') def test_xz_plane(self): """Test XZPlane base class""" g = ihm.geometry.XZPlane(name='foo', description='bar') self.assertIsNone(g.transformation) self.assertEqual(g.type, 'plane') self.assertEqual(g.plane_type, 'xz-plane') if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_location.py000066400000000000000000000316571503573337200177020ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.location def _make_test_file(fname): with open(fname, 'w') as fh: fh.write('contents') class Tests(unittest.TestCase): def test_database_location(self): """Test DatabaseLocation""" dl1 = ihm.location.DatabaseLocation('abc', version=1) dl2 = ihm.location.DatabaseLocation('abc', version=1) self.assertEqual(dl1, dl2) dl3 = ihm.location.DatabaseLocation('abc', version=2) self.assertNotEqual(dl1, dl3) # details can change without affecting equality dl4 = ihm.location.DatabaseLocation('abc', version=1, details='foo') self.assertEqual(dl1, dl4) self.assertEqual(dl1.db_name, 'Other') self.assertEqual(dl1.access_code, 'abc') self.assertEqual(dl1.version, 1) self.assertIsNone(dl1.details) self.assertEqual(str(dl1), "") def test_pdb_location(self): """Test PDBLocation""" loc = ihm.location.PDBLocation('1abc', version='foo', details='bar') self.assertEqual(loc.db_name, 'PDB') self.assertEqual(loc.access_code, '1abc') self.assertEqual(loc.version, 'foo') self.assertEqual(loc.details, 'bar') def test_pdb_dev_location(self): """Test PDBDevLocation""" loc = ihm.location.PDBDevLocation( 'PDBDEV_00000029', version='foo', details='bar') self.assertEqual(loc.db_name, 'PDB-Dev') self.assertEqual(loc.access_code, 'PDBDEV_00000029') self.assertEqual(loc.version, 'foo') self.assertEqual(loc.details, 'bar') def test_model_archive_location(self): """Test ModelArchiveLocation""" loc = ihm.location.ModelArchiveLocation( 'ma-bak-cepc-0250', version='foo', details='bar') self.assertEqual(loc.db_name, 'MODEL ARCHIVE') self.assertEqual(loc.access_code, 'ma-bak-cepc-0250') self.assertEqual(loc.version, 'foo') self.assertEqual(loc.details, 'bar') def test_bmrb_location(self): """Test BMRBLocation""" loc = ihm.location.BMRBLocation('27600', version='foo', details='bar') self.assertEqual(loc.db_name, 'BMRB') self.assertEqual(loc.access_code, '27600') self.assertEqual(loc.version, 'foo') self.assertEqual(loc.details, 'bar') def test_emdb_location(self): """Test EMDBLocation""" loc = ihm.location.EMDBLocation('EMDB-123', version='foo', details='bar') self.assertEqual(loc.db_name, 'EMDB') self.assertEqual(loc.access_code, 'EMDB-123') self.assertEqual(loc.version, 'foo') self.assertEqual(loc.details, 'bar') def test_massive_location(self): """Test MassIVELocation class""" d = ihm.location.MassIVELocation('abc', version=1, details='foo') self.assertEqual(d.db_name, 'MASSIVE') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_proxl_location(self): """Test ProXLLocation class""" d = ihm.location.ProXLLocation('abc', version=1, details='foo') self.assertEqual(d.db_name, 'ProXL') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_jpost_location(self): """Test JPOSTLocation class""" d = ihm.location.JPOSTLocation('abc', version=1, details='foo') self.assertEqual(d.db_name, 'jPOSTrepo') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_iprox_location(self): """Test IProXLocation class""" d = ihm.location.IProXLocation('abc', version=1, details='foo') self.assertEqual(d.db_name, 'iProX') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_alpha_fold_db_location(self): """Test AlphaFoldDBLocation class""" d = ihm.location.AlphaFoldDBLocation('abc', version=1, details='foo') self.assertEqual(d.db_name, 'AlphaFoldDB') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_proteome_xchange_location(self): """Test ProteomeXchangeLocation class""" d = ihm.location.ProteomeXchangeLocation( 'abc', version=1, details='foo') self.assertEqual(d.db_name, 'ProteomeXchange') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_bmr_big_location(self): """Test BMRbigLocation class""" d = ihm.location.BMRbigLocation( 'abc', version=1, details='foo') self.assertEqual(d.db_name, 'BMRbig') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_empiar_location(self): """Test EMPIARLocation class""" d = ihm.location.EMPIARLocation('abc', version=1, details='foo') self.assertEqual(d.db_name, 'EMPIAR') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_sasbdb_location(self): """Test SASBDBLocation class""" d = ihm.location.SASBDBLocation('abc', version=1, details='foo') self.assertEqual(d.db_name, 'SASBDB') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_pride_location(self): """Test PRIDELocation class""" d = ihm.location.PRIDELocation('abc', version=1, details='foo') self.assertEqual(d.db_name, 'PRIDE') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_biogrid_location(self): """Test BioGRIDLocation class""" d = ihm.location.BioGRIDLocation('abc', version=1, details='foo') self.assertEqual(d.db_name, 'BioGRID') self.assertEqual(d.access_code, 'abc') self.assertEqual(d.version, 1) self.assertEqual(d.details, 'foo') def test_location(self): """Test Location base class""" loc = ihm.location.Location(details='foo') loc._allow_duplicates = True self.assertEqual(loc._eq_vals(), id(loc)) # Locations should never compare equal to None self.assertNotEqual(loc, None) def test_file_location_local(self): """Test InputFileLocation with a local file""" # Make tmpdir under current directory, as it's not always possible to # get a relative path from cwd to /tmp (e.g. on Windows where they may # be on different drives) with utils.temporary_directory('.') as tmpdir: fname = os.path.join(tmpdir, 'test.pdb') _make_test_file(fname) loc = ihm.location.InputFileLocation(fname) self.assertEqual(loc.path, os.path.abspath(fname)) self.assertIsNone(loc.repo) self.assertEqual(loc.file_size, 8) def test_file_location_local_not_exist(self): """Test InputFileLocation with a local file that doesn't exist""" with utils.temporary_directory() as tmpdir: fname = os.path.join(tmpdir, 'test.pdb') self.assertRaises( ValueError, ihm.location.InputFileLocation, fname) def test_file_location_repo(self): """Test InputFileLocation with a file in a repository""" r = ihm.location.Repository(doi='1.2.3.4') loc = ihm.location.InputFileLocation('foo/bar', repo=r) self.assertEqual(loc.path, 'foo/bar') self.assertEqual(loc.repo, r) self.assertIsNone(loc.file_size) self.assertEqual(str(loc), "") self.assertEqual(str(r), "") # locations should only compare equal if path and repo both match loc2 = ihm.location.InputFileLocation('foo/bar', repo=r) self.assertEqual(loc, loc2) loc3 = ihm.location.InputFileLocation('otherpath', repo=r) self.assertNotEqual(loc, loc3) r2 = ihm.location.Repository(doi='5.6.7.8') loc4 = ihm.location.InputFileLocation('foo/bar', repo=r2) self.assertNotEqual(loc, loc4) loc5 = ihm.location.InputFileLocation(None, repo=r) self.assertNotEqual(loc, loc5) loc6 = ihm.location.InputFileLocation(None, repo=r2) self.assertNotEqual(loc, loc6) def test_repository_equality(self): """Test Repository equality""" r1 = ihm.location.Repository(doi='foo') r2 = ihm.location.Repository(doi='foo') r3 = ihm.location.Repository(doi='foo', url='bar') r4 = ihm.location.Repository(doi='bar') self.assertEqual(r1, r2) self.assertEqual(hash(r1), hash(r2)) self.assertNotEqual(r1, r3) self.assertNotEqual(r1, r4) def test_repository(self): """Test Repository""" # Make tmpdir under current directory, as it's not always possible to # get a relative path from cwd to /tmp (e.g. on Windows where they may # be on different drives) with utils.temporary_directory(os.getcwd()) as tmpdir: subdir = os.path.join(tmpdir, 'subdir') subdir2 = os.path.join(tmpdir, 'subdir2') os.mkdir(subdir) _make_test_file(os.path.join(subdir, 'bar')) s = ihm.location.Repository(doi='10.5281/zenodo.46266', root=os.path.relpath(tmpdir), url='foo', top_directory='baz') self.assertEqual(s._root, tmpdir) self.assertEqual(s.url, 'foo') self.assertEqual(s.top_directory, 'baz') loc = ihm.location.InputFileLocation( os.path.relpath(os.path.join(subdir, 'bar'))) self.assertIsNone(loc.repo) ihm.location.Repository._update_in_repos(loc, [s]) self.assertEqual(loc.repo.doi, '10.5281/zenodo.46266') self.assertEqual(loc.path, os.path.join('subdir', 'bar')) # Shouldn't touch locations that are already in repos loc = ihm.location.InputFileLocation(repo='foo', path='bar') self.assertEqual(loc.repo, 'foo') ihm.location.Repository._update_in_repos(loc, [s]) self.assertEqual(loc.repo, 'foo') # Shortest match should win loc = ihm.location.InputFileLocation( os.path.relpath(os.path.join(subdir, 'bar'))) s2 = ihm.location.Repository(doi='10.5281/zenodo.46280', root=os.path.relpath(subdir), url='foo', top_directory='baz') # Repositories that aren't above the file shouldn't count s3 = ihm.location.Repository(doi='10.5281/zenodo.56280', root=os.path.relpath(subdir2), url='foo', top_directory='baz') ihm.location.Repository._update_in_repos(loc, [s2, s3, s]) self.assertEqual(loc.repo.doi, '10.5281/zenodo.46280') self.assertEqual(loc.path, 'bar') def test_repository_no_checkout(self): """Test Repository with no checkout""" r = ihm.location.Repository(doi='10.5281/zenodo.46266') f = ihm.location.InputFileLocation(repo=r, path='foo') self.assertEqual(f.repo.doi, '10.5281/zenodo.46266') self.assertEqual(f.path, 'foo') self.assertEqual(str(f), "") def test_repository_get_full_path(self): """Test Repository._get_full_path""" r = ihm.location.Repository(doi='10.5281/zenodo.46266', top_directory='/foo') self.assertEqual(r._get_full_path('bar'), '/foo%sbar' % os.sep) def test_file_locations(self): """Test FileLocation derived classes""" r = ihm.location.Repository(doi='10.5281/zenodo.46266') loc = ihm.location.InputFileLocation(repo=r, path='foo') self.assertEqual(loc.content_type, 'Input data or restraints') loc = ihm.location.OutputFileLocation(repo=r, path='foo') self.assertEqual(loc.content_type, 'Modeling or post-processing output') loc = ihm.location.WorkflowFileLocation(repo=r, path='foo') self.assertEqual(loc.content_type, 'Modeling workflow or script') loc = ihm.location.VisualizationFileLocation(repo=r, path='foo') self.assertEqual(loc.content_type, 'Visualization script') if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_main.py000066400000000000000000001506021503573337200170060ustar00rootroot00000000000000import utils import os import unittest import urllib.request TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm import ihm.location import ihm.representation import ihm.model import ihm.source import ihm.flr import ihm.multi_state_scheme class Tests(unittest.TestCase): def test_system(self): """Test System class""" s = ihm.System(title='test system') self.assertEqual(s.title, 'test system') self.assertEqual(s.id, 'model') self.assertEqual(s.databases, []) def test_chem_comp(self): """Test ChemComp class""" cc1 = ihm.ChemComp(id='GLY', code='G', code_canonical='G') self.assertEqual(cc1.id, 'GLY') self.assertEqual(cc1.code, 'G') self.assertEqual(cc1.code_canonical, 'G') self.assertEqual(cc1.type, 'other') self.assertEqual(str(cc1), '') self.assertIsNone(cc1.formula) self.assertIsNone(cc1.formula_weight) cc2 = ihm.ChemComp(id='GLY', code='G', code_canonical='G') cc3 = ihm.ChemComp(id='G', code='G', code_canonical='G') self.assertEqual(cc1, cc2) self.assertEqual(hash(cc1), hash(cc2)) self.assertNotEqual(cc1, cc3) cc4 = ihm.ChemComp(id='GLY', code='G', code_canonical='G', formula=ihm.unknown) self.assertIsNone(cc4.formula_weight) def test_chem_comp_id_5(self): """Test new-style 5-character CCD IDs in ChemComp""" cc1 = ihm.ChemComp(id='MYGLY', code='G', code_canonical='G') self.assertEqual(cc1.id, 'MYGLY') self.assertEqual(cc1.code, 'G') self.assertEqual(cc1.code_canonical, 'G') self.assertEqual(cc1.type, 'other') def test_chem_comp_weight(self): """Test ChemComp.formula_weight""" # No formula cc = ihm.ChemComp('X', 'X', 'X', formula=None) self.assertIsNone(cc.formula_weight) # Bad formula cc = ihm.ChemComp('X', 'X', 'X', formula='C90H') self.assertRaises(ValueError, lambda x: x.formula_weight, cc) # Formula with unknown element cc = ihm.ChemComp('X', 'X', 'X', formula='C5 Es') self.assertIsNone(cc.formula_weight) # Formula with known elements and no charge cc = ihm.ChemComp('X', 'X', 'X', formula='C6 H12 P') self.assertAlmostEqual(cc.formula_weight, 115.136, delta=0.01) # Formula with element 'X' (e.g. GLX, ASX) cc = ihm.ChemComp('X', 'X', 'X', formula='C6 H12 P X2') self.assertAlmostEqual(cc.formula_weight, 115.136, delta=0.01) # Formula with known elements and formal charge cc = ihm.ChemComp('X', 'X', 'X', formula='C6 H12 P 1') self.assertAlmostEqual(cc.formula_weight, 115.136, delta=0.01) def test_peptide_chem_comp(self): """Test PeptideChemComp class""" cc1 = ihm.PeptideChemComp(id='GLY', code='G', code_canonical='G') self.assertEqual(cc1.type, 'peptide linking') def test_l_peptide_chem_comp(self): """Test LPeptideChemComp class""" cc1 = ihm.LPeptideChemComp(id='MET', code='M', code_canonical='M') self.assertEqual(cc1.type, 'L-peptide linking') def test_rna_chem_comp(self): """Test RNAChemComp class""" cc1 = ihm.RNAChemComp(id='G', code='G', code_canonical='G') self.assertEqual(cc1.type, 'RNA linking') def test_dna_chem_comp(self): """Test DNAChemComp class""" cc1 = ihm.DNAChemComp(id='DG', code='DG', code_canonical='G') self.assertEqual(cc1.type, 'DNA linking') def test_sugar_chem_comp(self): """Test SaccharideChemComp class and subclasses""" cc = ihm.SaccharideChemComp('NAG') self.assertEqual(cc.type, 'saccharide') cc = ihm.LSaccharideChemComp('NAG') self.assertEqual(cc.type, 'L-saccharide') cc = ihm.DSaccharideChemComp('NAG') self.assertEqual(cc.type, 'D-saccharide') cc = ihm.LSaccharideAlphaChemComp('NAG') self.assertEqual(cc.type, 'L-saccharide, alpha linking') cc = ihm.LSaccharideBetaChemComp('NAG') self.assertEqual(cc.type, 'L-saccharide, beta linking') cc = ihm.DSaccharideAlphaChemComp('NAG') self.assertEqual(cc.type, 'D-saccharide, alpha linking') cc = ihm.DSaccharideBetaChemComp('NAG') self.assertEqual(cc.type, 'D-saccharide, beta linking') def test_non_polymer_chem_comp(self): """Test NonPolymerChemComp class""" cc1 = ihm.NonPolymerChemComp('HEM') self.assertEqual(cc1.type, 'non-polymer') self.assertEqual(cc1.code_canonical, 'X') cc2 = ihm.NonPolymerChemComp('HEM', code_canonical='G') self.assertEqual(cc2.code_canonical, 'G') def test_water_chem_comp(self): """Test WaterChemComp class""" cc1 = ihm.WaterChemComp() self.assertEqual(cc1.type, 'non-polymer') def test_l_peptide_alphabet(self): """Test LPeptideAlphabet class""" a = ihm.LPeptideAlphabet self.assertEqual(a._comps['G'].type, 'peptide linking') self.assertEqual(a._comps['M'].id, 'MET') self.assertEqual(a._comps['M'].code, 'M') self.assertEqual(a._comps['M'].code_canonical, 'M') self.assertEqual(a._comps['M'].type, 'L-peptide linking') self.assertEqual(a._comps['M'].name, "METHIONINE") self.assertEqual(a._comps['M'].formula, 'C5 H11 N O2 S') self.assertAlmostEqual(a._comps['M'].formula_weight, 149.211, delta=0.01) self.assertEqual(a._comps['Z'].id, 'GLX') self.assertEqual(a._comps['Z'].name, "GLU/GLN AMBIGUOUS") self.assertEqual(a._comps['Z'].formula, 'C5 H8 N O2 X2') self.assertAlmostEqual(a._comps['Z'].formula_weight, 114.124, delta=0.01) a = ihm.LPeptideAlphabet() self.assertIn('MSE', a) self.assertNotIn('DG', a) self.assertEqual(len(a.keys), 25) self.assertEqual(len(a.values), 25) self.assertEqual(sorted(a.keys)[0], 'A') self.assertEqual(len(a.items), 25) item0 = sorted(a.items)[0] self.assertEqual(item0[0], 'A') self.assertEqual(item0[1].id, 'ALA') self.assertEqual(a['MSE'].id, 'MSE') self.assertEqual(a['MSE'].code, 'MSE') self.assertEqual(a['MSE'].code_canonical, 'M') self.assertEqual(a['MSE'].type, 'L-peptide linking') self.assertEqual(a['UNK'].id, 'UNK') self.assertEqual(a['UNK'].code, 'UNK') self.assertEqual(a['UNK'].code_canonical, 'X') self.assertEqual(a['UNK'].type, 'L-peptide linking') def test_d_peptide_alphabet(self): """Test DPeptideAlphabet class""" dcode_from_canon = {'A': 'DAL', 'C': 'DCY', 'D': 'DAS', 'E': 'DGL', 'F': 'DPN', 'H': 'DHI', 'I': 'DIL', 'K': 'DLY', 'L': 'DLE', 'M': 'MED', 'N': 'DSG', 'P': 'DPR', 'Q': 'DGN', 'R': 'DAR', 'S': 'DSN', 'T': 'DTH', 'V': 'DVA', 'W': 'DTR', 'Y': 'DTY', 'G': 'G'} da = ihm.DPeptideAlphabet la = ihm.LPeptideAlphabet # Weights and formulae of all standard amino acids should be identical # between L- and D- forms (except for lysine, where the formal charge # differs between the two forms) for canon in 'ACDEFGHILMNPQRSTVWY': lcode = canon dcode = dcode_from_canon[canon] self.assertEqual(da._comps[dcode].formula, la._comps[lcode].formula) self.assertAlmostEqual(da._comps[dcode].formula_weight, la._comps[lcode].formula_weight, delta=0.01) def test_rna_alphabet(self): """Test RNAAlphabet class""" a = ihm.RNAAlphabet self.assertEqual(a._comps['A'].id, 'A') self.assertEqual(a._comps['A'].code, 'A') self.assertEqual(a._comps['A'].code_canonical, 'A') def test_dna_alphabet(self): """Test DNAAlphabet class""" a = ihm.DNAAlphabet self.assertEqual(a._comps['DA'].id, 'DA') self.assertEqual(a._comps['DA'].code, 'DA') self.assertEqual(a._comps['DA'].code_canonical, 'A') def test_chem_descriptor(self): """Test ChemDescriptor class""" d1 = ihm.ChemDescriptor( 'EDC', chemical_name='test-EDC', chem_comp_id='test-chem-comp', common_name='test-common-EDC', smiles='CCN=C=NCCCN(C)C') self.assertEqual(d1.auth_name, 'EDC') self.assertEqual(d1.chem_comp_id, 'test-chem-comp') self.assertEqual(d1.chemical_name, 'test-EDC') self.assertEqual(d1.common_name, 'test-common-EDC') self.assertEqual(d1.smiles, 'CCN=C=NCCCN(C)C') self.assertIsNone(d1.inchi) self.assertIsNone(d1.inchi_key) def test_entity(self): """Test Entity class""" e1 = ihm.Entity('AHCD', description='foo') # Should compare identical if sequences are the same, if not branched e2 = ihm.Entity('AHCD', description='bar') e3 = ihm.Entity('AHCDE', description='foo') heme = ihm.Entity([ihm.NonPolymerChemComp('HEM')]) sugar = ihm.Entity([ihm.SaccharideChemComp('NAG'), ihm.SaccharideChemComp('FUC')]) self.assertEqual(e1, e2) self.assertNotEqual(e1, e3) self.assertEqual(e1.seq_id_range, (1, 4)) self.assertEqual(e3.seq_id_range, (1, 5)) sugar2 = ihm.Entity([ihm.SaccharideChemComp('NAG'), ihm.SaccharideChemComp('FUC')]) # Branched entities never compare equal unless they are the same object self.assertEqual(sugar, sugar) self.assertNotEqual(sugar, sugar2) # seq_id does not exist for nonpolymers self.assertEqual(heme.seq_id_range, (None, None)) # We do have an internal seq_id_range for branched entities self.assertEqual(sugar.seq_id_range, (1, 2)) def test_entity_weight(self): """Test Entity.formula_weight""" e1 = ihm.Entity('AHCD') self.assertAlmostEqual(e1.formula_weight, 499.516, delta=0.1) # Entity containing a component with unknown weight heme = ihm.Entity([ihm.NonPolymerChemComp('HEM')]) self.assertIsNone(heme.formula_weight) def test_entity_type(self): """Test Entity.type""" protein = ihm.Entity('AHCD') heme = ihm.Entity([ihm.NonPolymerChemComp('HEM')]) water = ihm.Entity([ihm.WaterChemComp()]) sugar = ihm.Entity([ihm.SaccharideChemComp('NAG'), ihm.SaccharideChemComp('FUC')]) self.assertEqual(protein.type, 'polymer') self.assertTrue(protein.is_polymeric()) self.assertFalse(protein.is_branched()) self.assertEqual(heme.type, 'non-polymer') self.assertFalse(heme.is_polymeric()) self.assertFalse(heme.is_branched()) self.assertEqual(water.type, 'water') self.assertFalse(water.is_polymeric()) self.assertFalse(water.is_branched()) self.assertEqual(sugar.type, 'branched') self.assertFalse(sugar.is_polymeric()) self.assertTrue(sugar.is_branched()) # A single sugar should be classified non-polymer single_sugar = ihm.Entity([ihm.SaccharideChemComp('NAG')]) self.assertEqual(single_sugar.type, 'non-polymer') self.assertFalse(single_sugar.is_polymeric()) self.assertFalse(single_sugar.is_branched()) # A single amino acid should be classified non-polymer single_aa = ihm.Entity('A') self.assertEqual(single_aa.type, 'non-polymer') self.assertFalse(single_aa.is_polymeric()) self.assertFalse(single_aa.is_branched()) # ... unless forced polymer single_aa._force_polymer = True self.assertEqual(single_aa.type, 'polymer') self.assertTrue(single_aa.is_polymeric()) self.assertFalse(single_aa.is_branched()) # An entity with no sequence is a polymer empty = ihm.Entity([]) self.assertEqual(empty.type, 'polymer') self.assertTrue(empty.is_polymeric()) self.assertFalse(empty.is_branched()) # ... unless hint branched empty._hint_branched = True self.assertEqual(empty.type, 'branched') self.assertFalse(empty.is_polymeric()) self.assertTrue(empty.is_branched()) def test_entity_src_method_default(self): """Test default values of Entity.src_method""" protein = ihm.Entity('AHCD') water = ihm.Entity([ihm.WaterChemComp()]) self.assertEqual(protein.src_method, "man") self.assertEqual(water.src_method, "nat") # src_method is readonly def try_set(): protein.src_method = 'foo' self.assertRaises(TypeError, try_set) def test_entity_source(self): """Test setting Entity source""" man = ihm.Entity('AHCD', source=ihm.source.Manipulated()) self.assertEqual(man.src_method, "man") nat = ihm.Entity('AHCD', source=ihm.source.Natural()) self.assertEqual(nat.src_method, "nat") syn = ihm.Entity('AHCD', source=ihm.source.Synthetic()) self.assertEqual(syn.src_method, "syn") def test_software(self): """Test Software class""" s1 = ihm.Software(name='foo', version='1.0', classification='1', description='2', location='3') s2 = ihm.Software(name='foo', version='2.0', classification='4', description='5', location='6') s3 = ihm.Software(name='foo', version='1.0', classification='7', description='8', location='9') s4 = ihm.Software(name='bar', version='1.0', classification='a', description='b', location='c') s5 = ihm.Software(name='bar', version=ihm.unknown, classification='a', description='b', location='c', citation='foo') # Should compare equal iff name and version both match self.assertEqual(s1, s3) self.assertEqual(hash(s1), hash(s3)) self.assertNotEqual(s1, s2) self.assertNotEqual(s1, s4) # Unknown values should not compare equal to known self.assertNotEqual(s4, s5) self.assertNotEqual(hash(s4), hash(s5)) def test_citation(self): """Test Citation class""" s = ihm.Citation(title='Test paper', journal='J Mol Biol', volume=45, page_range=(1, 20), year=2016, authors=['Smith A', 'Jones B'], doi='10.2345/S1384107697000225', pmid='1234') self.assertEqual(s.title, 'Test paper') def _get_from_pubmed_id(self, json_fname, is_primary=False): def mock_urlopen(url): self.assertTrue(url.endswith('&id=29539637')) fname = utils.get_input_file_name(TOPDIR, json_fname) return open(fname) # Need to mock out urllib.request so we don't hit the network # (expensive) every time we test try: orig_urlopen = urllib.request.urlopen urllib.request.urlopen = mock_urlopen return ihm.Citation.from_pubmed_id(29539637, is_primary=is_primary) finally: urllib.request.urlopen = orig_urlopen def test_citation_from_pubmed_id(self): """Test Citation.from_pubmed_id()""" c = self._get_from_pubmed_id('pubmed_api.json') self.assertEqual( c.title, 'Integrative structure and functional anatomy of a nuclear ' 'pore complex (test of python-ihm lib).') self.assertEqual(c.journal, 'Nature') self.assertEqual(c.volume, '555') self.assertEqual(c.page_range, ['475', '482']) self.assertEqual(c.year, '2018') self.assertEqual(c.pmid, 29539637) self.assertEqual(c.doi, '10.1038/nature26003') self.assertEqual(len(c.authors), 32) self.assertEqual(c.authors[0], 'Kim, S.J.') self.assertFalse(c.is_primary) c = self._get_from_pubmed_id('pubmed_api.json', is_primary=True) self.assertTrue(c.is_primary) def test_citation_from_pubmed_id_one_page(self): """Test Citation.from_pubmed_id() with page rather than range""" c = self._get_from_pubmed_id('pubmed_api_one_page.json') self.assertEqual(c.page_range, '475') def test_citation_from_pubmed_id_no_volume_page(self): """Test Citation.from_pubmed_id() with no volume or page info""" c = self._get_from_pubmed_id('pubmed_api_no_pages.json') self.assertIsNone(c.page_range) self.assertIsNone(c.volume) def test_citation_from_pubmed_id_no_doi(self): """Test Citation.from_pubmed_id() with no DOI""" c = self._get_from_pubmed_id('pubmed_api_no_doi.json') self.assertEqual( c.title, 'Integrative structure and functional anatomy of a nuclear ' 'pore complex (test of python-ihm lib).') self.assertIsNone(c.doi) # Make sure that page range "475-82" is handled as 475,482 self.assertEqual(c.page_range, ['475', '482']) def test_entity_residue(self): """Test Residue derived from an Entity""" e = ihm.Entity('AHCDAH') r = e.residue(3) self.assertEqual(r.entity, e) self.assertIsNone(r.asym) self.assertEqual(r.seq_id, 3) self.assertEqual(r.comp.id, 'CYS') self.assertRaises(IndexError, e.residue, -3) self.assertRaises(IndexError, e.residue, 30) def test_water_asym(self): """Test WaterAsymUnit class""" e = ihm.Entity('AHCDAH') water = ihm.Entity([ihm.WaterChemComp()]) a = ihm.AsymUnit(e) self.assertEqual(a.seq_id_range, (1, 6)) self.assertEqual(len(a.sequence), 6) self.assertEqual(a.number_of_molecules, 1) a = ihm.WaterAsymUnit(water, number=3) self.assertEqual(a.seq_id_range, (1, 3)) self.assertEqual(len(a.sequence), 3) self.assertEqual(a.number_of_molecules, 3) self.assertRaises(TypeError, ihm.AsymUnit, water) self.assertRaises(TypeError, ihm.WaterAsymUnit, e, number=3) # Residue range checks are not done for waters, currently _ = a.residue(-3) _ = a.residue(30) def test_asym_unit_residue(self): """Test Residue derived from an AsymUnit""" e = ihm.Entity('AHCDAH') a = ihm.AsymUnit(e, auth_seq_id_map=5) r = a.residue(3) self.assertEqual(r.entity, e) self.assertEqual(r.asym, a) self.assertEqual(r.seq_id, 3) self.assertEqual(r.auth_seq_id, 8) self.assertIsNone(r.ins_code) self.assertEqual(r.comp.id, 'CYS') self.assertRaises(IndexError, e.residue, -3) self.assertRaises(IndexError, e.residue, 30) def test_atom_entity(self): """Test Atom class built from an Entity""" e = ihm.Entity('AHCDAH') a = e.residue(3).atom('CA') self.assertEqual(a.id, 'CA') self.assertEqual(a.residue.entity, e) self.assertEqual(a.residue.seq_id, 3) self.assertEqual(a.entity, e) self.assertIsNone(a.asym) self.assertEqual(a.seq_id, 3) def test_atom_asym(self): """Test Atom class built from an AsymUnit""" e = ihm.Entity('AHCDAH') asym = ihm.AsymUnit(e) a = asym.residue(3).atom('CA') self.assertEqual(a.id, 'CA') self.assertEqual(a.residue.seq_id, 3) self.assertEqual(a.entity, e) self.assertEqual(a.asym, asym) self.assertEqual(a.seq_id, 3) def test_entity_range(self): """Test EntityRange class""" e = ihm.Entity('AHCDAH') heme = ihm.Entity([ihm.NonPolymerChemComp('HEM')]) sugar = ihm.Entity([ihm.SaccharideChemComp('NAG')]) e._id = 42 self.assertEqual(e.seq_id_range, (1, 6)) r = e(3, 4) self.assertEqual(r.seq_id_range, (3, 4)) self.assertEqual(r._id, 42) # Cannot create ranges for nonpolymeric or branched entities self.assertRaises(TypeError, heme.__call__, 1, 1) self.assertRaises(TypeError, sugar.__call__, 1, 1) samer = e(3, 4) otherr = e(2, 4) self.assertEqual(r, samer) self.assertEqual(hash(r), hash(samer)) self.assertNotEqual(r, otherr) self.assertNotEqual(r, e) # entity_range != entity # Cannot create reversed range self.assertRaises(ValueError, e.__call__, 3, 1) # Cannot create out-of-range range self.assertRaises(IndexError, e.__call__, -3, 1) self.assertRaises(IndexError, e.__call__, 1, 10) def test_asym_range(self): """Test AsymUnitRange class""" e = ihm.Entity('AHCDAH') heme = ihm.Entity([ihm.NonPolymerChemComp('HEM')]) sugar = ihm.Entity([ihm.SaccharideChemComp('NAG'), ihm.SaccharideChemComp('FUC')]) a = ihm.AsymUnit(e, "testdetail") aheme = ihm.AsymUnit(heme) asugar = ihm.AsymUnit(sugar) a._id = 42 self.assertEqual(a.seq_id_range, (1, 6)) # seq_id is not defined for nonpolymers self.assertEqual(aheme.seq_id_range, (None, None)) # We use seq_id internally for branched entities self.assertEqual(asugar.seq_id_range, (1, 2)) r = a(3, 4) self.assertEqual(r.seq_id_range, (3, 4)) self.assertEqual(r._id, 42) self.assertEqual(r.entity, e) self.assertEqual(r.details, "testdetail") # Cannot create ranges for nonpolymeric or branched entities self.assertRaises(TypeError, aheme.__call__, 1, 1) self.assertRaises(TypeError, asugar.__call__, 1, 1) samer = a(3, 4) otherr = a(2, 4) self.assertEqual(r, samer) self.assertEqual(hash(r), hash(samer)) self.assertNotEqual(r, otherr) self.assertNotEqual(r, a) # asym_range != asym self.assertNotEqual(r, e(3, 4)) # asym_range != entity_range self.assertNotEqual(r, e) # asym_range != entity # Cannot create reversed range self.assertRaises(ValueError, a.__call__, 3, 1) # Cannot create out-of-range range self.assertRaises(IndexError, a.__call__, -3, 1) self.assertRaises(IndexError, a.__call__, 1, 10) def test_asym_segment(self): """Test AsymUnitSegment class""" e = ihm.Entity('AHCDAH') a = ihm.AsymUnit(e) seg = a.segment('AH--CD', 1, 4) self.assertEqual(seg.gapped_sequence, 'AH--CD') self.assertEqual(seg.seq_id_range, (1, 4)) def test_auth_seq_id_offset(self): """Test auth_seq_id offset from seq_id""" e = ihm.Entity('AHCDAH') a = ihm.AsymUnit(e, auth_seq_id_map=5) self.assertEqual(a._get_auth_seq_id_ins_code(1), (6, None)) def test_auth_seq_id_dict(self): """Test auth_seq_id dict map from seq_id""" e = ihm.Entity('AHCDAH') a = ihm.AsymUnit(e, auth_seq_id_map={1: 0, 2: (4, 'A')}) self.assertEqual(a._get_auth_seq_id_ins_code(1), (0, None)) self.assertEqual(a._get_auth_seq_id_ins_code(2), (4, 'A')) self.assertEqual(a._get_auth_seq_id_ins_code(3), (3, None)) def test_auth_seq_id_list(self): """Test auth_seq_id list map from seq_id""" e = ihm.Entity('AHCDAH') a = ihm.AsymUnit(e, auth_seq_id_map=[None, 0, 4]) self.assertEqual(a._get_auth_seq_id_ins_code(1), (0, None)) self.assertEqual(a._get_auth_seq_id_ins_code(2), (4, None)) self.assertEqual(a._get_auth_seq_id_ins_code(3), (3, None)) def test_orig_auth_seq_id_none(self): """Test default orig_auth_seq_idm_map (None)""" e = ihm.Entity('AHCDAH') a = ihm.AsymUnit(e, auth_seq_id_map={1: 0, 2: (4, 'A')}) self.assertIsNone(a.orig_auth_seq_id_map) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(1), (0, 0, None)) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(2), (4, 4, 'A')) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(3), (3, 3, None)) def test_orig_auth_seq_id_dict(self): """Test orig_auth_seq_id_map as dict""" e = ihm.Entity('AHCDAH') a = ihm.AsymUnit(e, auth_seq_id_map={1: 0, 2: (4, 'A')}, orig_auth_seq_id_map={1: 5}) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(1), (0, 5, None)) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(2), (4, 4, 'A')) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(3), (3, 3, None)) def test_water_orig_auth_seq_id_none(self): """Test default water orig_auth_seq_id_map (None)""" water = ihm.Entity([ihm.WaterChemComp()]) a = ihm.WaterAsymUnit(water, number=3, auth_seq_id_map={1: 0, 2: (4, 'A')}) self.assertIsNone(a.orig_auth_seq_id_map) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(1), (0, 0, None)) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(2), (4, 4, 'A')) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(3), (3, 3, None)) def test_water_orig_auth_seq_id_dict(self): """Test water orig_auth_seq_id_map as dict""" water = ihm.Entity([ihm.WaterChemComp()]) a = ihm.WaterAsymUnit(water, number=3, auth_seq_id_map={1: 0, 2: (4, 'A')}, orig_auth_seq_id_map={1: 5}) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(1), (0, 5, None)) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(2), (4, 4, 'A')) self.assertEqual(a._get_pdb_auth_seq_id_ins_code(3), (3, 3, None)) def test_assembly(self): """Test Assembly class""" e1 = ihm.Entity('AHCD') e2 = ihm.Entity('AHC') a = ihm.Assembly([e1, e2], name='foo', description='bar') self.assertEqual(a.name, 'foo') self.assertEqual(a.description, 'bar') def test_assembly_signature(self): """Test Assembly._signature method""" e1 = ihm.Entity('AHCD') a1 = ihm.AsymUnit(e1) e2 = ihm.Entity('AHC') a2 = ihm.AsymUnit(e2) enonpol = ihm.Entity([ihm.NonPolymerChemComp('HEM')], description='heme') anonpol = ihm.AsymUnit(enonpol) asm = ihm.Assembly([a1, a2, anonpol], name='foo', description='bar') # Component order, name, description do not affect signature asm2 = ihm.Assembly([a2, a1, anonpol], name='other', description='other') self.assertEqual(asm._signature(), asm2._signature()) # Different components, different signatures asm2 = ihm.Assembly([a1, anonpol]) self.assertNotEqual(asm._signature(), asm2._signature()) # Different component ranges, different signatures asm2 = ihm.Assembly([a1, a2(1, 2), anonpol]) self.assertNotEqual(asm._signature(), asm2._signature()) # Same signature if ranges completely overlap asm2 = ihm.Assembly([a1, a2(1, 2), a2(3, 3), anonpol]) self.assertEqual(asm._signature(), asm2._signature()) def test_remove_identical(self): """Test remove_identical function""" x = {} y = {} all_objs = ihm._remove_identical([x, x, y]) # Order should be preserved, but only one x should be returned self.assertEqual(list(all_objs), [x, y]) def test_all_model_groups(self): """Test _all_model_groups() method""" model_group1 = 'mg1' model_group2 = 'mg2' model_group3 = 'mg3' model_group4 = 'mg4' state1 = [model_group1, model_group2] state2 = [model_group2, model_group2] s = ihm.System() s.state_groups.append([state1, state2]) proc = ihm.model.OrderedProcess("time steps") edge = ihm.model.ProcessEdge(model_group1, model_group2) step = ihm.model.ProcessStep([edge], "Linear reaction") proc.steps.append(step) s.ordered_processes.append(proc) e1 = ihm.model.Ensemble(model_group=model_group3, num_models=10, post_process=None, name='cluster1', clustering_method='Hierarchical', clustering_feature='RMSD', precision=4.2) ss1 = ihm.model.Subsample(name='foo', num_models=1) ss2 = ihm.model.Subsample(name='foo', num_models=1, model_group=model_group4) e1.subsamples.extend((ss1, ss2)) s.ensembles.append(e1) # Ensemble without a model_group e2 = ihm.model.Ensemble(model_group=None, num_models=10, post_process=None, name='cluster1', clustering_method='Hierarchical', clustering_feature='RMSD', precision=4.2) s.ensembles.append(e2) mg = s._all_model_groups() # List may contain duplicates but only includes states self.assertEqual(list(mg), [model_group1, model_group2, model_group2, model_group2]) mg = s._all_model_groups(only_in_states=False) # List contains all model groups self.assertEqual(list(mg), [model_group1, model_group2, model_group2, model_group2, model_group3, model_group4, model_group1, model_group2]) model_group5 = 'mg5' model_group6 = 'mg6' model_group7 = 'mg7' state3 = [model_group5] state4 = [model_group6] state5 = [model_group7] mssc1 = ihm.multi_state_scheme.Connectivity( begin_state=state3, end_state=state4) mssc2 = ihm.multi_state_scheme.Connectivity( begin_state=state5) mssc3 = ihm.multi_state_scheme.Connectivity( begin_state=state3, end_state=state4) mss = ihm.multi_state_scheme.MultiStateScheme( name='mss', connectivities=[mssc1, mssc2, mssc3]) s.multi_state_schemes.append(mss) mg = s._all_model_groups() self.assertEqual(list(mg), [model_group1, model_group2, model_group2, model_group2, model_group5, model_group6, model_group7]) def test_all_models(self): """Test _all_models() method""" class MockModel: pass model1 = MockModel() model2 = MockModel() model_group1 = [model1, model2] model_group2 = [model1, model1] s = ihm.System() s.state_groups.append([[model_group1, model_group2]]) ms = s._all_models() models = [model for group, model in ms] # duplicates should be filtered within groups, but not between groups self.assertEqual(models, [model1, model2, model1]) def test_all_protocols(self): """Test _all_protocols() method""" class MockObject: pass model1 = MockObject() model2 = MockObject() model3 = MockObject() model_group1 = [model1, model2, model3] s = ihm.System() s.state_groups.append([[model_group1]]) p1 = MockObject() p2 = MockObject() s.orphan_protocols.append(p1) model1.protocol = None model2.protocol = p2 model3.protocol = p1 # duplicates should be filtered globally self.assertEqual(list(s._all_protocols()), [p1, p2]) def test_all_representations(self): """Test _all_representations() method""" class MockObject: pass model1 = MockObject() model2 = MockObject() model3 = MockObject() model_group1 = [model1, model2, model3] s = ihm.System() s.state_groups.append([[model_group1]]) r1 = MockObject() r2 = MockObject() s.orphan_representations.append(r1) model1.representation = None model2.representation = r2 model3.representation = r1 # duplicates should be filtered globally self.assertEqual(list(s._all_representations()), [r1, r2]) def test_all_assemblies(self): """Test _all_assemblies() method""" class MockObject: pass model1 = MockObject() model2 = MockObject() model_group1 = [model1, model2] s = ihm.System() s.state_groups.append([[model_group1]]) asmb1 = MockObject() asmb2 = MockObject() s.orphan_assemblies.append(asmb1) model1.assembly = None model1.protocol = None model2.assembly = asmb2 step = MockObject() step.assembly = asmb1 prot = MockObject() prot.steps = [step] analysis1 = MockObject() astep1 = MockObject() astep1.assembly = asmb2 analysis1.steps = [astep1] prot.analyses = [analysis1] model2.protocol = prot rsr1 = MockObject() rsr1.assembly = asmb2 rsr2 = MockObject() rsr2.assembly = None s.restraints.extend((rsr1, rsr2)) # duplicates should be present; complete assembly is always first self.assertEqual(list(s._all_assemblies()), [s.complete_assembly, asmb1, asmb2, asmb1, asmb2, asmb2]) def test_all_citations(self): """Test _all_citations() method""" class MockObject: pass c1 = ihm.Citation(title='Test paper', journal='J Mol Biol', volume=45, page_range=(1, 20), year=2016, authors=['Smith A', 'Jones B'], doi='10.2345/S1384107697000225', pmid='1234') c2 = ihm.Citation(title='Test paper', journal='J Mol Biol', volume=45, page_range=(1, 20), year=2016, authors=['Smith A', 'Jones B'], doi='1.2.3.4', pmid='1234') c3 = ihm.Citation(title='Test paper2', journal='J Mol Biol2', volume=46, page_range=(1, 20), year=2017, authors=['Smith A', 'Jones B'], doi='5.6.7.8', pmid='5678') rsr1 = MockObject() # Not a 3dem restraint rsr2 = MockObject() # 3dem but with no provided citation rsr2.fitting_method_citation_id = None rsr3 = MockObject() rsr2.fitting_method_citation_id = c1 s1 = ihm.Software(name='test', classification='test code', description='Some test program', version=1, location='http://test.org') s2 = ihm.Software(name='test', classification='test code', description='Some test program', version=1, location='http://test.org', citation=c3) s = ihm.System() s.restraints.extend((rsr1, rsr2, rsr3)) s.citations.extend((c2, c2)) s.software.extend((s1, s2)) # duplicates should be filtered globally self.assertEqual(list(s._all_citations()), [c2, c3, c1]) def test_all_software(self): """Test _all_software() method""" class MockObject: pass s1 = ihm.Software(name='test', classification='test code', description='Some test program', version=1, location='http://test.org') s2 = ihm.Software(name='foo', classification='test code', description='Other test program', location='http://test2.org') sm1 = MockObject() sm1.software = None sm2 = MockObject() sm2.software = s1 s = ihm.System() s.orphan_starting_models.extend((sm1, sm2)) s.software.extend((s2, s2)) step1 = MockObject() step2 = MockObject() step1.software = None step2.software = s2 protocol1 = MockObject() protocol1.steps = [step1, step2] analysis1 = MockObject() astep1 = MockObject() astep1.software = s2 analysis1.steps = [astep1] protocol1.analyses = [analysis1] s.orphan_protocols.append(protocol1) r1 = MockObject() r2 = MockObject() r3 = MockObject() r2.software = None r3.software = s1 s.restraints.extend((r1, r2, r3)) # duplicates are kept self.assertEqual(list(s._all_software()), [s2, s2, s1, s2, s2, s1]) def test_all_dataset_groups(self): """Test _all_dataset_groups() method""" class MockObject: pass dg1 = MockObject() dg2 = MockObject() s = ihm.System() s.orphan_dataset_groups.append(dg1) step1 = MockObject() step2 = MockObject() step3 = MockObject() step1.dataset_group = None step2.dataset_group = dg2 step3.dataset_group = dg1 protocol1 = MockObject() protocol1.steps = [step1, step2, step3] analysis1 = MockObject() astep1 = MockObject() astep1.dataset_group = dg2 analysis1.steps = [astep1] protocol1.analyses = [analysis1] s.orphan_protocols.append(protocol1) # duplicates should not be filtered self.assertEqual(list(s._all_dataset_groups()), [dg1, dg2, dg1, dg2]) def test_all_locations(self): """Test _all_locations() method""" class MockObject: pass class MockDataset: parents = [] loc1 = MockObject() loc2 = MockObject() loc3 = MockObject() loc4 = MockObject() s = ihm.System() dataset1 = MockDataset() dataset2 = MockDataset() dataset2.location = None dataset3 = MockDataset() dataset3.location = loc1 s.locations.append(loc1) s.orphan_datasets.extend((dataset1, dataset2, dataset3)) ensemble = MockObject() ensemble.file = loc2 density = MockObject() density.file = loc1 ss1 = MockObject() ss1.file = None ss2 = MockObject() ss2.file = loc4 ensemble.densities = [density] ensemble.subsamples = [ss1, ss2] ensemble2 = MockObject() ensemble2.file = None ensemble2.densities = [] ensemble2.subsamples = [] s.ensembles.extend((ensemble, ensemble2)) start_model = MockObject() start_model.dataset = None start_model.script_file = loc2 template = MockObject() template.dataset = None template.alignment_file = loc3 start_model.templates = [template] s.orphan_starting_models.append(start_model) step1 = MockObject() step2 = MockObject() step1.dataset_group = None step2.dataset_group = None step1.script_file = None step2.script_file = loc2 protocol1 = MockObject() protocol1.steps = [step1, step2] analysis1 = MockObject() astep1 = MockObject() astep1.dataset_group = None astep1.script_file = loc2 analysis1.steps = [astep1] protocol1.analyses = [analysis1] s.orphan_protocols.append(protocol1) # duplicates should not be filtered self.assertEqual(list(s._all_locations()), [loc1, loc1, loc2, loc4, loc1, loc2, loc3, loc2, loc2]) def test_all_datasets(self): """Test _all_datasets() method""" class MockObject: pass class MockDataset: parents = [] s = ihm.System() d1 = MockDataset() d2 = MockDataset() d3 = MockDataset() d4 = MockDataset() s.orphan_datasets.append(d1) dg1 = [d2] s.orphan_dataset_groups.append(dg1) start_model1 = MockObject() start_model1.dataset = None start_model2 = MockObject() start_model2.dataset = d3 template = MockObject() template.dataset = None start_model1.templates = [template] start_model2.templates = [] s.orphan_starting_models.extend((start_model1, start_model2)) rsr1 = MockObject() rsr1.dataset = d4 d4.parents = [d2] # Handle parent being a TransformedDataset trand = MockObject() trand.transform = None trand.dataset = d1 d2.parents = [trand] d1.parents = d3.parents = [] s.restraints.append(rsr1) # duplicates should not be filtered self.assertEqual(list(s._all_datasets()), [d1, d1, d2, d3, d1, d2, d4]) def test_all_starting_models(self): """Test _all_starting_models() method""" class MockObject: pass s = ihm.System() sm1 = MockObject() sm2 = MockObject() s.orphan_starting_models.append(sm1) rep = ihm.representation.Representation() seg1 = ihm.representation.Segment() seg1.starting_model = None seg2 = ihm.representation.Segment() seg2.starting_model = sm2 seg3 = ihm.representation.Segment() seg3.starting_model = sm2 rep.extend((seg1, seg2, seg3)) s.orphan_representations.append(rep) # duplicates should be filtered self.assertEqual(list(s._all_starting_models()), [sm1, sm2]) def test_all_geometric_objects(self): """Test _all_geometric_objects() method""" class MockObject: pass geom1 = MockObject() geom2 = MockObject() s = ihm.System() r1 = MockObject() r2 = MockObject() r2.geometric_object = None r3 = MockObject() r3.geometric_object = geom1 s.orphan_geometric_objects.extend((geom1, geom2)) s.restraints.extend((r1, r2, r3)) # duplicates should not be filtered self.assertEqual(list(s._all_geometric_objects()), [geom1, geom2, geom1]) def test_all_features(self): """Test _all_features() method""" class MockObject: pass f1 = MockObject() f2 = MockObject() s = ihm.System() r1 = MockObject() r2 = MockObject() r2._all_features = (None,) r3 = MockObject() r3._all_features = (f1,) s.orphan_features.extend((f1, f2)) s.restraints.extend((r1, r2, r3)) # duplicates should not be filtered self.assertEqual(list(s._all_features()), [f1, f2, f1]) def test_all_pseudo_sites(self): """Test _all_pseudo_sites() method""" class MockObject: pass s1 = MockObject() s2 = MockObject() r1 = MockObject() xl = MockObject() ps = MockObject() ps.site = s2 xl.pseudo1 = [ps] xl.pseudo2 = None r1.cross_links = [xl] r2 = MockObject() xl = MockObject() xl.pseudo1 = None ps = MockObject() ps.site = s1 xl.pseudo2 = [ps] r2.cross_links = [xl] s = ihm.System() s.orphan_pseudo_sites.extend((s1, s2)) s.restraints.extend((r1, r2)) f1 = MockObject() f1.site = s2 s.orphan_features.append(f1) # duplicates should not be filtered self.assertEqual(list(s._all_pseudo_sites()), [s1, s2, s2, s1, s2]) def test_all_chem_descriptors(self): """Test _all_chem_descriptors() method""" class MockObject: pass d1 = ihm.ChemDescriptor("d1") d2 = ihm.ChemDescriptor("d2") d3 = ihm.ChemDescriptor("d3") d4 = ihm.ChemDescriptor("d4") s = ihm.System() f = ihm.flr.FLRData() s.flr_data.append(f) r1 = MockObject() r2 = MockObject() r2.linker = d3 s.restraints.extend((r1, r2)) r2.feature = None s.orphan_chem_descriptors.extend((d1, d2, d1)) # FLR chemical descriptors conj = ihm.flr.PolyProbeConjugate( sample_probe=None, chem_descriptor=d4, ambiguous_stoichiometry=False) f.poly_probe_conjugates.append(conj) # duplicates should not be filtered self.assertEqual(list(s._all_chem_descriptors()), [d1, d2, d1, d3, d4]) def test_all_entity_ranges(self): """Test _all_entity_ranges() method""" class MockObject: pass s = ihm.System() e1 = ihm.Entity('AHCD', description='foo') a1 = ihm.AsymUnit(e1) s.entities.append(e1) s.asym_units.append(a1) e1rng = e1(1, 3) a1rng = a1(1, 2) sm1 = MockObject() sm1.asym_unit = e1rng s.orphan_starting_models.append(sm1) rep = ihm.representation.Representation() seg1 = ihm.representation.Segment() seg1.starting_model = None seg1.asym_unit = a1 rep.append(seg1) s.orphan_representations.append(rep) asmb1 = ihm.Assembly([e1, a1]) s.orphan_assemblies.append(asmb1) ensemble = MockObject() density = MockObject() density.asym_unit = a1rng ensemble.densities = [density] s.ensembles.append(ensemble) # duplicates should not be filtered self.assertEqual(list(s._all_entity_ranges()), [e1rng, a1, e1, a1, a1rng]) def test_all_multi_state_schemes(self): """Test _all_multi_state_schemes() method""" class MockObject: pass s = ihm.System() m1 = MockObject() m2 = MockObject() m3 = MockObject() s.multi_state_schemes.append(m1) s.multi_state_schemes.append(m2) s.multi_state_schemes.append(m3) self.assertEqual(list(s._all_multi_state_schemes()), [m1, m2, m3]) def test_all_multi_state_scheme_connectivities(self): """Test _all_multi_state_scheme_connectivities() method""" class MockObject: pass s = ihm.System() # Multi-state schemes mss1 = ihm.multi_state_scheme.MultiStateScheme('mss1') mss2 = ihm.multi_state_scheme.MultiStateScheme('mss2') # States s1 = MockObject() s2 = MockObject() s3 = MockObject() # Connectivities c1 = ihm.multi_state_scheme.Connectivity(s1) c2 = ihm.multi_state_scheme.Connectivity(s2) c3 = ihm.multi_state_scheme.Connectivity(s3) mss1.add_connectivity(c1) mss1.add_connectivity(c2) mss2.add_connectivity(c1) mss2.add_connectivity(c3) s.multi_state_schemes.append(mss1) s.multi_state_schemes.append(mss2) # Duplicates are kept self.assertEqual(list(s._all_multi_state_scheme_connectivities()), [c1, c2, c1, c3]) def test_all_kinetic_rates(self): """Test _all_kinetic_rates() method""" class MockObject: pass s = ihm.System() # Multi-state schemes mss1 = ihm.multi_state_scheme.MultiStateScheme('mss1') mss2 = ihm.multi_state_scheme.MultiStateScheme('mss2') # States s1 = MockObject() s2 = MockObject() s3 = MockObject() # Kinetic rates k1 = MockObject() k2 = MockObject() k3 = MockObject() # Connectivities c1 = ihm.multi_state_scheme.Connectivity( begin_state=s1, kinetic_rate=k1) c2 = ihm.multi_state_scheme.Connectivity( begin_state=s2, kinetic_rate=k2) c3 = ihm.multi_state_scheme.Connectivity( begin_state=s3, kinetic_rate=k3) c4 = ihm.multi_state_scheme.Connectivity( begin_state=s3, kinetic_rate=None) mss1.add_connectivity(c1) mss1.add_connectivity(c2) mss2.add_connectivity(c1) mss2.add_connectivity(c3) mss2.add_connectivity(c4) s.multi_state_schemes.append(mss1) s.multi_state_schemes.append(mss2) # Does not contain duplicates self.assertEqual(list(s._all_kinetic_rates()), [k1, k2, k3]) # From kinetic_rate_fret_analysis_connections in FLRData k4 = MockObject() kfc = MockObject() kfc.kinetic_rate = k4 f = ihm.flr.FLRData() f.kinetic_rate_fret_analysis_connections.append(kfc) s.flr_data.append(f) # Does not contain duplicates self.assertEqual(list(s._all_kinetic_rates()), [k1, k2, k3, k4]) def test_all_relaxation_times(self): """Test _all_relaxation_times() method""" class MockObject: pass s = ihm.System() # Multi-state schemes mss1 = ihm.multi_state_scheme.MultiStateScheme('mss1') mss2 = ihm.multi_state_scheme.MultiStateScheme('mss2') # States s1 = MockObject() s2 = MockObject() s3 = MockObject() # Kinetic rates r1 = MockObject() r2 = MockObject() r3 = MockObject() r4 = MockObject() r5 = MockObject() # Connectivities c1 = ihm.multi_state_scheme.Connectivity( begin_state=s1, relaxation_time=r1) c2 = ihm.multi_state_scheme.Connectivity( begin_state=s2, relaxation_time=r2) c3 = ihm.multi_state_scheme.Connectivity( begin_state=s3, relaxation_time=r3) c4 = ihm.multi_state_scheme.Connectivity( begin_state=s3, relaxation_time=None, kinetic_rate='rate' ) mss1.add_relaxation_time(r4) mss2.add_relaxation_time(r5) mss1.add_connectivity(c1) mss1.add_connectivity(c2) mss2.add_connectivity(c1) mss2.add_connectivity(c3) mss2.add_connectivity(c4) s.multi_state_schemes.append(mss1) s.multi_state_schemes.append(mss2) # Does not contain duplicates self.assertEqual(list(s._all_relaxation_times()), [r4, r5, r1, r2, r3]) # From relaxation_time_fret_analysis_connections in FLRData r6 = MockObject() rfc = MockObject() rfc.relaxation_time = r6 f = ihm.flr.FLRData() f.relaxation_time_fret_analysis_connections.append(rfc) s.flr_data.append(f) # Does not contain duplicates self.assertEqual(list(s._all_relaxation_times()), [r4, r5, r1, r2, r3, r6]) def test_update_locations_in_repositories(self): """Test update_locations_in_repositories() method""" s = ihm.System() loc = ihm.location.InputFileLocation(path='foo', repo='bar') s.locations.append(loc) r = ihm.location.Repository(doi='foo', root='..') s.update_locations_in_repositories([r]) def test_unknown(self): """Test the 'unknown' special object""" u = ihm.unknown self.assertEqual(str(u), '?') self.assertEqual(repr(u), '?') # Should only be equal to itself self.assertEqual(u, u) self.assertLessEqual(u, u) self.assertGreaterEqual(u, u) self.assertNotEqual(u, '?') self.assertNotEqual(u, None) self.assertFalse(u < u) self.assertFalse(u > u) # Should act like False self.assertFalse(u) def test_branch_descriptor(self): """Test the BranchDescriptor class""" bd = ihm.BranchDescriptor(text='foo', type='bar', program='baz', program_version="1.0") self.assertEqual(bd.text, 'foo') self.assertEqual(bd.type, 'bar') self.assertEqual(bd.program, 'baz') self.assertEqual(bd.program_version, '1.0') def test_branch_link(self): """Test the BranchLink class""" lnk = ihm.BranchLink(num1=1, atom_id1='CA', leaving_atom_id1='H1', num2=2, atom_id2='N', leaving_atom_id2='H2', order='sing', details='foo') self.assertEqual(lnk.num1, 1) self.assertEqual(lnk.atom_id1, 'CA') self.assertEqual(lnk.leaving_atom_id1, 'H1') self.assertEqual(lnk.num2, 2) self.assertEqual(lnk.atom_id2, 'N') self.assertEqual(lnk.leaving_atom_id2, 'H2') self.assertEqual(lnk.order, 'sing') self.assertEqual(lnk.details, 'foo') def test_data_usage(self): """Test DataUsage classes""" d = ihm.DataUsage("foo", name="fooname", url="foourl") d = ihm.License("foo", name="fooname", url="foourl") self.assertEqual(d.type, "license") d = ihm.Disclaimer("foo", name="fooname", url="foourl") self.assertEqual(d.type, "disclaimer") if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_make_mmcif.py000066400000000000000000000353241503573337200201550ustar00rootroot00000000000000import utils import os import sys import unittest import subprocess TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.reader import ihm.util.make_mmcif # Script should also be importable MAKE_MMCIF = os.path.join(TOPDIR, 'ihm', 'util', 'make_mmcif.py') class Tests(unittest.TestCase): def test_simple(self): """Simple test of make_mmcif utility script""" incif = utils.get_input_file_name(TOPDIR, 'struct_only.cif') subprocess.check_call([sys.executable, MAKE_MMCIF, incif]) with open('output.cif') as fh: s, = ihm.reader.read(fh) self.assertEqual(s.title, 'Architecture of Pol II(G) and molecular mechanism ' 'of transcription regulation by Gdown1') os.unlink('output.cif') def test_non_default_output(self): """Simple test of make_mmcif with non-default output name""" incif = utils.get_input_file_name(TOPDIR, 'struct_only.cif') subprocess.check_call([sys.executable, MAKE_MMCIF, incif, 'non-default-output.cif']) with open('non-default-output.cif') as fh: s, = ihm.reader.read(fh) self.assertEqual(s.title, 'Architecture of Pol II(G) and molecular mechanism ' 'of transcription regulation by Gdown1') os.unlink('non-default-output.cif') def test_no_title(self): """Check that make_mmcif adds missing title""" incif = utils.get_input_file_name(TOPDIR, 'no_title.cif') subprocess.check_call([sys.executable, MAKE_MMCIF, incif]) with open('output.cif') as fh: s, = ihm.reader.read(fh) self.assertEqual(s.title, 'Auto-generated system') os.unlink('output.cif') def test_bad_usage(self): """Bad usage of make_mmcif utility script""" ret = subprocess.call([sys.executable, MAKE_MMCIF]) self.assertEqual(ret, 2) def test_same_file(self): """Check that make_mmcif fails if input and output are the same""" incif = utils.get_input_file_name(TOPDIR, 'struct_only.cif') ret = subprocess.call([sys.executable, MAKE_MMCIF, incif, incif]) self.assertEqual(ret, 1) def test_mini(self): """Check that make_mmcif works given only basic atom info""" incif = utils.get_input_file_name(TOPDIR, 'mini.cif') subprocess.check_call([sys.executable, MAKE_MMCIF, incif]) with open('output.cif') as fh: s, = ihm.reader.read(fh) self.assertEqual(len(s.state_groups), 1) self.assertEqual(len(s.state_groups[0]), 1) self.assertEqual(len(s.state_groups[0][0]), 1) self.assertEqual(len(s.state_groups[0][0][0]), 1) m = s.state_groups[0][0][0][0] self.assertEqual(m.protocol.name, 'modeling') self.assertEqual(m.assembly.name, 'Complete assembly') chain_a, chain_b, = m.representation for chain in chain_a, chain_b: self.assertIsInstance(chain, ihm.representation.AtomicSegment) self.assertFalse(chain.rigid) self.assertEqual(s.title, 'Auto-generated system') os.unlink('output.cif') def test_pass_through(self): """Check that make_mmcif passes through already-compliant files""" incif = utils.get_input_file_name(TOPDIR, 'docking.cif') subprocess.check_call([sys.executable, MAKE_MMCIF, incif]) with open('output.cif') as fh: s, = ihm.reader.read(fh) self.assertEqual(len(s.state_groups), 1) self.assertEqual(len(s.state_groups[0]), 1) self.assertEqual(len(s.state_groups[0][0]), 1) self.assertEqual(len(s.state_groups[0][0][0]), 1) m = s.state_groups[0][0][0][0] self.assertEqual(m.protocol.name, 'Modeling') self.assertEqual(m.assembly.name, 'Our complete assembly') self.assertEqual(m.assembly.description, 'All our known components') chain_a, chain_b, = m.representation self.assertIsInstance(chain_a, ihm.representation.AtomicSegment) self.assertTrue(chain_a.rigid) self.assertIsInstance(chain_b, ihm.representation.FeatureSegment) self.assertFalse(chain_b.rigid) self.assertEqual(s.title, 'Output from simple-docking example') os.unlink('output.cif') def test_complete_assembly_order(self): """Check that assembly order does not matter""" incif = utils.get_input_file_name(TOPDIR, 'docking_order.cif') subprocess.check_call([sys.executable, MAKE_MMCIF, incif]) with open('output.cif') as fh: s, = ihm.reader.read(fh) m = s.state_groups[0][0][0][0] self.assertEqual(m.assembly.name, 'Our complete assembly') self.assertEqual(m.assembly.description, 'All our known components') def test_add_polymers(self): """Check that make_mmcif combines polymer information""" # mini.cif contains two chains A, B incif = utils.get_input_file_name(TOPDIR, 'mini.cif') # mini_add.cif also contains A, B; A is the same sequence as mini.cif # but B is different (so should be renamed C when we add) addcif = utils.get_input_file_name(TOPDIR, 'mini_add.cif') subprocess.check_call([sys.executable, MAKE_MMCIF, incif, '--add', addcif]) with open('output.cif') as fh: s, = ihm.reader.read(fh) self.assertEqual(len(s.entities), 3) self.assertEqual(len(s.asym_units), 3) self.assertEqual(len(s.state_groups), 2) # Model from mini.cif self.assertEqual(len(s.state_groups[0]), 1) self.assertEqual(len(s.state_groups[0][0]), 1) self.assertEqual(len(s.state_groups[0][0][0]), 1) m = s.state_groups[0][0][0][0] self.assertEqual(m.protocol.name, 'modeling') self.assertEqual(m.assembly.name, 'Modeled assembly') chain_a, chain_b, = m.representation self.assertIs(chain_a.asym_unit.asym, s.asym_units[0]) self.assertIs(chain_b.asym_unit.asym, s.asym_units[1]) for chain in chain_a, chain_b: self.assertIsInstance(chain, ihm.representation.AtomicSegment) self.assertFalse(chain.rigid) # Model from mini_add.cif self.assertEqual(len(s.state_groups[1]), 1) self.assertEqual(len(s.state_groups[1][0]), 1) self.assertEqual(len(s.state_groups[1][0][0]), 1) m = s.state_groups[1][0][0][0] self.assertEqual(m.protocol.name, 'modeling') self.assertEqual(m.assembly.name, 'Modeled assembly') chain_a, chain_c, = m.representation self.assertIs(chain_a.asym_unit.asym, s.asym_units[0]) self.assertIs(chain_c.asym_unit.asym, s.asym_units[2]) for chain in chain_a, chain_c: self.assertIsInstance(chain, ihm.representation.AtomicSegment) self.assertFalse(chain.rigid) self.assertEqual(s.title, 'Auto-generated system') os.unlink('output.cif') def test_add_non_polymers(self): """Check that make_mmcif combines non-polymer information""" # mini_nonpoly.cif contains two hemes A, B incif = utils.get_input_file_name(TOPDIR, 'mini_nonpoly.cif') # mini_nonpoly_add.cif also contains A, B; A has the same author # provided residue number as mini_nonpoly.cif but B is different # (so should be renamed C when we add) addcif = utils.get_input_file_name(TOPDIR, 'mini_nonpoly_add.cif') subprocess.check_call([sys.executable, MAKE_MMCIF, incif, '--add', addcif]) with open('output.cif') as fh: s, = ihm.reader.read(fh) self.assertEqual(len(s.entities), 1) self.assertEqual(len(s.asym_units), 3) self.assertEqual(len(s.state_groups), 2) # Model from mini_nonpoly.cif self.assertEqual(len(s.state_groups[0]), 1) self.assertEqual(len(s.state_groups[0][0]), 1) self.assertEqual(len(s.state_groups[0][0][0]), 1) m = s.state_groups[0][0][0][0] self.assertEqual(m.protocol.name, 'modeling') self.assertEqual(m.assembly.name, 'Modeled assembly') chain_a, chain_b, = m.representation self.assertIs(chain_a.asym_unit, s.asym_units[0]) self.assertIs(chain_b.asym_unit, s.asym_units[1]) for chain in chain_a, chain_b: self.assertIsInstance(chain, ihm.representation.AtomicSegment) self.assertFalse(chain.rigid) # Model from mini_nonpoly_add.cif self.assertEqual(len(s.state_groups[1]), 1) self.assertEqual(len(s.state_groups[1][0]), 1) self.assertEqual(len(s.state_groups[1][0][0]), 1) m = s.state_groups[1][0][0][0] self.assertEqual(m.protocol.name, 'modeling') self.assertEqual(m.assembly.name, 'Modeled assembly') chain_a, chain_c, = m.representation self.assertIs(chain_a.asym_unit, s.asym_units[0]) self.assertIs(chain_c.asym_unit, s.asym_units[2]) for chain in chain_a, chain_c: self.assertIsInstance(chain, ihm.representation.AtomicSegment) self.assertFalse(chain.rigid) self.assertEqual(s.title, 'Auto-generated system') os.unlink('output.cif') def test_add_multi_data(self): """make_mmcif should fail to add system with multiple data blocks""" incif = utils.get_input_file_name(TOPDIR, 'mini.cif') addcif = utils.get_input_file_name(TOPDIR, 'mini_add.cif') with open(addcif) as fh: addcif_contents = fh.read() addcif_multi = 'addcif_multi.cif' with open(addcif_multi, 'w') as fh: fh.write(addcif_contents) fh.write(addcif_contents.replace('data_model', 'data_model2')) ret = subprocess.call([sys.executable, MAKE_MMCIF, incif, '--add', addcif_multi]) self.assertEqual(ret, 1) os.unlink(addcif_multi) def test_not_modeled(self): """Check addition of not-modeled residue information""" incif = utils.get_input_file_name(TOPDIR, 'not_modeled.cif') subprocess.check_call([sys.executable, MAKE_MMCIF, incif]) with open('output.cif') as fh: s, = ihm.reader.read(fh) # Residues 5 and 6 in chain A, and 2 in chain B, are missing from # atom_site. But the file already has an _ihm_residues_not_modeled # table listing residue 5:A, so we expect to see just 6:A and 2:B # added m = s.state_groups[0][0][0][0] r1, r2, r3 = m.not_modeled_residue_ranges self.assertEqual((r1.seq_id_begin, r1.seq_id_end), (5, 5)) self.assertEqual(r1.asym_unit._id, 'A') self.assertEqual((r2.seq_id_begin, r2.seq_id_end), (6, 6)) self.assertEqual(r2.asym_unit._id, 'A') self.assertEqual((r3.seq_id_begin, r3.seq_id_end), (2, 2)) self.assertEqual(r3.asym_unit._id, 'B') os.unlink('output.cif') def test_histidine(self): """Test handling multiple histidine protonation states""" incif = utils.get_input_file_name(TOPDIR, 'histidine.cif') with open(incif) as fh: s, = ihm.reader.read(fh) self.assertEqual([c.id for c in s.entities[0].sequence], ['ALA', 'HIS', 'HIE', 'HIP', 'ALA']) subprocess.check_call([sys.executable, MAKE_MMCIF, '--histidine', incif]) with open('output.cif') as fh: s, = ihm.reader.read(fh) # All histidines should now be HIS self.assertEqual([c.id for c in s.entities[0].sequence], ['ALA', 'HIS', 'HIS', 'HIS', 'ALA']) # All modified histidine atoms should now be ATOM not HETATM; # the last atom in the last ALA residue (which was marked HETATM in # the input) should still be HETATM. for state_group in s.state_groups: for state in state_group: for model_group in state: for model in model_group: self.assertEqual([x.het for x in model._atoms], [False, False, False, False, True]) os.unlink('output.cif') def test_check_non_canon_atom_standard(self): """Test check for non-canonical atom names, standard restypes""" minicif = utils.get_input_file_name(TOPDIR, 'mini.cif') incif = utils.get_input_file_name(TOPDIR, 'non_canon_atom.cif') # Should work fine without check subprocess.check_call([sys.executable, MAKE_MMCIF, incif]) os.unlink('output.cif') # Should also work fine for file with canonical atom names subprocess.check_call([sys.executable, MAKE_MMCIF, "--check_atom_names=standard", minicif]) os.unlink('output.cif') # Should fail with check enabled r = subprocess.Popen([sys.executable, MAKE_MMCIF, "--check_atom_names=standard", incif], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) out, err = r.communicate() self.assertEqual(r.returncode, 1) # Non-canonical atoms in standard residues should be reported # Non-standard residues (ZN, ...) are not checked self.assertIn("Non-canonical atom names found in the following " "residues: GLN: ['bad3']; VAL: ['bad1', 'bad2']", err) os.unlink('output.cif') def test_check_non_canon_atom_all(self): """Test check for non-canonical atom names, all restypes""" incif = utils.get_input_file_name(TOPDIR, 'non_canon_atom.cif') # Use mock urllib so we don't hit the network during this test env = os.environ.copy() mockdir = os.path.join(TOPDIR, 'test', 'mock', 'non_canon_atom') env['PYTHONPATH'] = mockdir + os.pathsep + env['PYTHONPATH'] r = subprocess.Popen([sys.executable, MAKE_MMCIF, "--check_atom_names=all", incif], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, env=env) out, err = r.communicate() self.assertEqual(r.returncode, 1) # Non-canonical atoms in standard residues should be reported # Non-standard residue (ZN) should also be checked self.assertIn("Non-canonical atom names found in the following " "residues: GLN: ['bad3']; VAL: ['bad1', 'bad2']; " "ZN: ['bad4']", err) # Residues not in CCD should give a warning self.assertIn("Component invalid-comp-name could not be found in CCD", err) os.unlink('output.cif') if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_metadata.py000066400000000000000000000662321503573337200176470ustar00rootroot00000000000000import utils import os import unittest import warnings import urllib.request import urllib.error from io import StringIO TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.metadata try: from ihm import _format except ImportError: _format = None class Tests(unittest.TestCase): def test_parser(self): """Test Parser base class""" p = ihm.metadata.Parser() p.parse_file(None) # does nothing def test_mrc_parser_local_mrc(self): """Test MRCParser pointing to a locally-available MRC file""" p = ihm.metadata.MRCParser() # Note that this are not complete MRC files (only the headers), # to save space in the repository for input_file in ('Rpb8.mrc-header', 'emptylabel.mrc-header'): fname = utils.get_input_file_name(TOPDIR, input_file) d = p.parse_file(fname) self.assertEqual(list(d.keys()), ['dataset']) dataset = d['dataset'] self.assertEqual(dataset.data_type, '3DEM volume') self.assertEqual(dataset.location.path, fname) self.assertEqual(dataset.location.details, 'Electron microscopy density map') self.assertIsNone(dataset.location.repo) def test_mrc_parser_emdb_ok(self): """Test MRCParser pointing to an MRC in EMDB, no network errors""" def mock_urlopen(url, timeout=None): return StringIO( '{"admin": {"key_dates": {"map_release": "2011-04-21"},' '"title": "test details"}}') p = ihm.metadata.MRCParser() fname = utils.get_input_file_name(TOPDIR, 'emd_1883.map.mrc-header') d = p.parse_file(fname) self.assertEqual(list(d.keys()), ['dataset']) dataset = d['dataset'] self.assertEqual(dataset.data_type, '3DEM volume') self.assertEqual(dataset.location.db_name, 'EMDB') self.assertEqual(dataset.location.access_code, 'EMD-1883') # Need to mock out urllib.request so we don't hit the network # (expensive) every time we test try: orig_urlopen = urllib.request.urlopen urllib.request.urlopen = mock_urlopen self.assertEqual(dataset.location.version, '2011-04-21') self.assertEqual(dataset.location.details, 'test details') dataset.location.version = 'my version' dataset.location.details = 'my details' self.assertEqual(dataset.location.version, 'my version') self.assertEqual(dataset.location.details, 'my details') finally: urllib.request.urlopen = orig_urlopen def test_mrc_parser_emdb_bad(self): """Test MRCParser pointing to an MRC in EMDB, with a network error""" def mock_urlopen(url, timeout=None): raise urllib.error.URLError("Mock network error") p = ihm.metadata.MRCParser() fname = utils.get_input_file_name(TOPDIR, 'emd_1883.map.mrc-header') d = p.parse_file(fname) self.assertEqual(list(d.keys()), ['dataset']) dataset = d['dataset'] self.assertEqual(dataset.data_type, '3DEM volume') self.assertEqual(dataset.location.db_name, 'EMDB') self.assertEqual(dataset.location.access_code, 'EMD-1883') # Mock out urllib.request to raise an error try: orig_urlopen = urllib.request.urlopen urllib.request.urlopen = mock_urlopen with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") self.assertIsNone(dataset.location.version) self.assertEqual(dataset.location.details, 'Electron microscopy density map') finally: urllib.request.urlopen = orig_urlopen self.assertEqual(len(w), 1) def test_mrc_parser_emdb_override(self): """Test MRCParser pointing to an MRC in EMDB with overridden metadata""" def mock_urlopen(url, timeout=None): raise ValueError("shouldn't be here") p = ihm.metadata.MRCParser() fname = utils.get_input_file_name(TOPDIR, 'emd_1883.map.mrc-header') d = p.parse_file(fname) self.assertEqual(list(d.keys()), ['dataset']) dataset = d['dataset'] self.assertEqual(dataset.data_type, '3DEM volume') self.assertEqual(dataset.location.db_name, 'EMDB') self.assertEqual(dataset.location.access_code, 'EMD-1883') # Set version manually; should prevent network access below dataset.location.version = 'foo' # Mock out urllib.request to raise an error try: orig_urlopen = urllib.request.urlopen urllib.request.urlopen = mock_urlopen self.assertEqual(dataset.location.version, 'foo') self.assertEqual(dataset.location.details, 'Electron microscopy density map') finally: urllib.request.urlopen = orig_urlopen def _parse_pdb(self, fname): p = ihm.metadata.PDBParser() return p.parse_file(fname) def test_official_pdb(self): """Test PDBParser when given an official PDB""" p = self._parse_pdb(utils.get_input_file_name(TOPDIR, 'official.pdb')) self.assertEqual(p['templates'], {}) self.assertEqual(len(p['metadata']), 1) self.assertEqual(p['metadata'][0].helix_id, '10') self.assertIsNone(p['script']) self._check_parsed_official_pdb(p, pdb_format=True) def test_bad_header(self): """Test PDBParser when given a non-official PDB with HEADER line""" pdbname = utils.get_input_file_name(TOPDIR, 'bad_header.pdb') p = self._parse_pdb(pdbname) self.assertEqual(p['templates'], {}) self.assertEqual(p['software'], []) self.assertEqual(p['metadata'], []) dataset = p['dataset'] self.assertEqual(dataset.data_type, 'Comparative model') self.assertEqual(dataset.location.path, pdbname) self.assertIsNone(dataset.location.repo) self.assertEqual(dataset.location.details, 'Starting model structure') def test_derived_pdb(self): """Test PDBarser when given a file derived from a PDB""" pdbname = utils.get_input_file_name(TOPDIR, 'derived_pdb.pdb') p = self._parse_pdb(pdbname) self.assertEqual(p['templates'], {}) self.assertIsNone(p['script']) self.assertEqual(p['entity_source'], {}) dataset = p['dataset'] self.assertEqual(dataset.data_type, 'Experimental model') self.assertEqual(dataset.location.path, pdbname) self.assertIsNone(dataset.location.repo) self.assertEqual(dataset.location.details, 'MED7C AND MED21 STRUCTURES FROM PDB ENTRY 1YKH, ' 'ROTATED AND TRANSLATED TO ALIGN WITH THE ' 'MED4-MED9 MODEL') parent, = dataset.parents self.assertEqual(parent.data_type, 'Experimental model') self.assertEqual(parent.location.db_name, 'PDB') self.assertEqual(parent.location.access_code, '1YKH') self.assertIsNone(parent.location.version) self.assertIsNone(parent.location.details) def test_derived_comp_model(self): """Test PDBParser when given a file derived from a comparative model""" pdbname = utils.get_input_file_name(TOPDIR, 'derived_model.pdb') p = self._parse_pdb(pdbname) self.assertEqual(p['templates'], {}) self.assertIsNone(p['script']) dataset = p['dataset'] self.assertEqual(dataset.data_type, 'Comparative model') self.assertEqual(dataset.location.path, pdbname) self.assertIsNone(dataset.location.repo) self.assertEqual(dataset.location.details, 'MED4 AND MED9 STRUCTURE TAKEN FROM LARIVIERE ' 'ET AL, NUCLEIC ACIDS RESEARCH. 2013;41:9266-9273. ' 'DOI: 10.1093/nar/gkt704. THE MED10 STRUCTURE ALSO ' 'PROPOSED IN THAT WORK IS NOT USED IN THIS STUDY.') parent, = dataset.parents self.assertEqual(parent.data_type, 'Comparative model') self.assertEqual(parent.location.path, '.') self.assertEqual(parent.location.repo.doi, '10.1093/nar/gkt704') self.assertEqual(parent.location.details, 'Starting comparative model structure') def test_derived_int_model(self): """Test PDBParser when given a file derived from an integrative model""" pdbname = utils.get_input_file_name(TOPDIR, 'derived_int_model.pdb') p = self._parse_pdb(pdbname) self.assertEqual(p['templates'], {}) self.assertIsNone(p['script']) dataset = p['dataset'] self.assertEqual(dataset.data_type, 'Integrative model') self.assertEqual(dataset.location.path, pdbname) self.assertIsNone(dataset.location.repo) self.assertEqual(dataset.location.details, 'POM152 STRUCTURE TAKEN FROM UPLA ET AL, STRUCTURE ' '25(3) 434-445. DOI: 10.1016/j.str.2017.01.006.') parent, = dataset.parents self.assertEqual(parent.data_type, 'Integrative model') self.assertEqual(parent.location.path, '.') self.assertEqual(parent.location.repo.doi, '10.1016/j.str.2017.01.006') self.assertEqual(parent.location.details, 'Starting integrative model structure') def test_modeller_model_aln(self): """Test PDBParser when given a Modeller model with alignment/script""" pdbname = utils.get_input_file_name(TOPDIR, 'modeller_model.pdb') p = self.check_modeller_model(pdbname) aliname = utils.get_input_file_name(TOPDIR, 'modeller_model.ali') script = utils.get_input_file_name(TOPDIR, 'modeller_model.py') self.assertEqual(p['script'].path, script) for templates in p['templates'].values(): for t in templates: self.assertEqual(t.alignment_file.path, aliname) def test_modeller_model_no_aln(self): "Test PDBParser when given a Modeller model with no alignment/script" pdbname = utils.get_input_file_name(TOPDIR, 'modeller_model_no_aln.pdb') p = self.check_modeller_model(pdbname) for templates in p['templates'].values(): for t in templates: self.assertIsNone(t.alignment_file) def check_modeller_model(self, pdbname, cif=False): if cif: p = self._parse_cif(pdbname) else: p = self._parse_pdb(pdbname) dataset = p['dataset'] self.assertEqual(sorted(p['templates'].keys()), ['A', 'B']) s1, s2 = p['templates']['A'] s3, = p['templates']['B'] self.assertEqual(s1.asym_id, 'C') self.assertEqual(s1.seq_id_range, (33, 424)) self.assertEqual(s1.template_seq_id_range, (33, 424)) self.assertAlmostEqual(s1.sequence_identity.value, 100.0, delta=0.1) self.assertEqual( s1.sequence_identity.denominator, ihm.startmodel.SequenceIdentityDenominator.SHORTER_LENGTH) self.assertEqual(s2.asym_id, 'G') self.assertEqual(s2.seq_id_range, (429, 488)) self.assertEqual(s2.template_seq_id_range, (482, 551)) self.assertAlmostEqual(s2.sequence_identity.value, 10.0, delta=0.1) self.assertEqual( s2.sequence_identity.denominator, ihm.startmodel.SequenceIdentityDenominator.SHORTER_LENGTH) self.assertEqual(dataset.data_type, 'Comparative model') self.assertEqual(dataset.location.path, pdbname) self.assertIsNone(dataset.location.repo) self.assertEqual(dataset.location.details, 'Starting model structure') p1, p2, p3 = dataset.parents self.assertEqual(s1.dataset, p1) self.assertEqual(s2.dataset, p2) self.assertEqual(s3.dataset, p3) self.assertEqual(p1.data_type, 'Experimental model') self.assertEqual(p1.location.db_name, 'PDB') self.assertEqual(p1.location.access_code, '3JRO') self.assertIsNone(p1.location.version) self.assertIsNone(p1.location.details) self.assertEqual(p2.location.access_code, '3F3F') self.assertEqual(p3.location.access_code, '1ABC') s, = p['software'] self.assertEqual(len(s.citation.authors), 2) self.assertEqual(s.name, 'MODELLER') if cif: self.assertEqual(s.version, '10.4') self.assertEqual( s.description, 'Comparative modeling by satisfaction of spatial restraints, ' 'build 2023/10/23 11:26:12') else: self.assertEqual(s.version, '9.18') self.assertEqual( s.description, 'Comparative modeling by satisfaction of spatial restraints, ' 'build 2017/02/10 22:21:34') return p def test_modeller_local(self): "Test PDBParser when given a Modeller model with local template" pdbname = utils.get_input_file_name(TOPDIR, 'modeller_model_local.pdb') p = self._parse_pdb(pdbname) self.assertEqual(list(p['templates'].keys()), ['A']) s, = p['templates']['A'] self.assertEqual(s.asym_id, 'C') parent, = p['dataset'].parents self.assertEqual(parent.data_type, 'Experimental model') self.assertEqual(parent.location.details, 'Template for comparative modeling') self.assertEqual(parent.location.path, utils.get_input_file_name(TOPDIR, '15133C.pdb')) def test_phyre2_model(self): """Test PDBParser when given a Phyre2 model.""" pdbname = utils.get_input_file_name(TOPDIR, 'phyre2_model.pdb') p = self._parse_pdb(pdbname) s, = p['templates']['A'] self.assertEqual(s.asym_id, 'A') dataset = p['dataset'] self.assertEqual(dataset.data_type, 'Comparative model') self.assertEqual(dataset.location.path, pdbname) self.assertIsNone(dataset.location.repo) self.assertEqual(dataset.location.details, 'Starting model structure') parent, = dataset.parents self.assertEqual(parent.data_type, 'Experimental model') self.assertEqual(parent.location.db_name, 'PDB') self.assertEqual(parent.location.access_code, '4BZK') self.assertIsNone(parent.location.version) self.assertIsNone(parent.location.details) s, = p['software'] self.assertEqual(s.name, 'Phyre2') self.assertEqual(s.version, '2.0') def test_swiss_model_monomer(self): """Test PDBParser when given a SWISS-MODEL monomer model.""" pdbname = utils.get_input_file_name(TOPDIR, 'swiss_model.pdb') p = self._parse_pdb(pdbname) s, = p['software'] self.assertEqual(s.name, 'SWISS-MODEL') self.assertIn('using PROMOD3 engine', s.description) self.assertEqual(s.version, '1.3.0') s, = p['templates']['B'] self.assertEqual(s.asym_id, 'B') self.assertEqual(s.seq_id_range, (15, 244)) self.assertEqual(s.template_seq_id_range, (1, 229)) self.assertAlmostEqual(s.sequence_identity.value, 40.35, delta=0.1) denom = ihm.startmodel.SequenceIdentityDenominator self.assertEqual( s.sequence_identity.denominator, denom.NUM_ALIGNED_WITHOUT_GAPS) # alignment is also stored in the PDB file self.assertEqual(s.alignment_file.path, pdbname) dataset = p['dataset'] self.assertEqual(dataset.data_type, 'Comparative model') self.assertEqual(dataset.location.path, pdbname) self.assertIsNone(dataset.location.repo) self.assertEqual(dataset.location.details, 'Starting model structure') parent, = dataset.parents self.assertEqual(parent.data_type, 'Experimental model') self.assertEqual(parent.location.db_name, 'PDB') self.assertEqual(parent.location.access_code, '3j9w') self.assertIsNone(parent.location.version) self.assertIsNone(parent.location.details) def test_swiss_model_multimer(self): """Test PDBParser when given a SWISS-MODEL multimer model.""" pdbname = utils.get_input_file_name(TOPDIR, 'swiss_model_multimer.pdb') p = self._parse_pdb(pdbname) s, = p['software'] self.assertEqual(s.name, 'SWISS-MODEL') self.assertIn('using PROMOD3 engine', s.description) self.assertEqual(s.version, '2.0.0') self.assertEqual(sorted(p['templates'].keys()), ['A', 'B', 'C', 'D']) s, = p['templates']['C'] self.assertEqual(s.asym_id, 'C') self.assertEqual(s.seq_id_range, (14, 1356)) self.assertEqual(s.template_seq_id_range, (8, 1340)) self.assertAlmostEqual(s.sequence_identity.value, 40.95, delta=0.1) denom = ihm.startmodel.SequenceIdentityDenominator self.assertEqual( s.sequence_identity.denominator, denom.NUM_ALIGNED_WITHOUT_GAPS) # alignment is also stored in the PDB file self.assertEqual(s.alignment_file.path, pdbname) dataset = p['dataset'] self.assertEqual(dataset.data_type, 'Comparative model') self.assertEqual(dataset.location.path, pdbname) self.assertIsNone(dataset.location.repo) self.assertEqual(dataset.location.details, 'Starting model structure') parent, = dataset.parents self.assertEqual(parent.data_type, 'Experimental model') self.assertEqual(parent.location.db_name, 'PDB') self.assertEqual(parent.location.access_code, '6flq') self.assertIsNone(parent.location.version) self.assertIsNone(parent.location.details) def test_unknown_model(self): """Test PDBParser when given an unknown model.""" pdbname = utils.get_input_file_name(TOPDIR, 'unknown_model.pdb') p = self._parse_pdb(pdbname) self.assertEqual(p['templates'], {}) self.assertEqual(p['software'], []) self.assertEqual(p['metadata'], []) dataset = p['dataset'] self.assertEqual(dataset.data_type, 'Comparative model') self.assertEqual(dataset.location.path, pdbname) self.assertIsNone(dataset.location.repo) self.assertEqual(dataset.location.details, 'Starting model structure') def test_get_aligned_region_empty(self): """Test _get_aligned_region() with empty alignment""" self.assertRaises(ValueError, ihm.metadata._get_aligned_region, 'AAAA', '----') def _parse_cif(self, fname): p = ihm.metadata.CIFParser() return p.parse_file(fname) def test_cif_official_pdb(self): """Test CIFParser when given an mmCIF in the official PDB database""" p = self._parse_cif(utils.get_input_file_name(TOPDIR, 'official.cif')) self._check_parsed_official_pdb(p) @unittest.skipIf(_format is None, "No C tokenizer") def test_binary_cif_official_pdb(self): """Test BinaryCIFParser when given a BinaryCIF in the official PDB""" fname = utils.get_input_file_name(TOPDIR, 'official.bcif') parser = ihm.metadata.BinaryCIFParser() p = parser.parse_file(fname) self._check_parsed_official_pdb(p) def _check_parsed_official_pdb(self, p, pdb_format=False): dataset = p['dataset'] self.assertEqual(dataset.data_type, 'Experimental model') self.assertEqual(dataset.location.db_name, 'PDB') self.assertEqual(dataset.location.access_code, '2HBJ') if pdb_format: self.assertEqual(dataset.location.version, '14-JUN-06') else: self.assertEqual(dataset.location.version, '2021-11-10') details = ('Structure of the yeast nuclear exosome component, ' 'Rrp6p, reveals an interplay between the active ' 'site and the HRDC domain') if pdb_format: details = details.upper() self.assertEqual(dataset.location.details, details) es = p['entity_source'] self.assertEqual(sorted(es.keys()), ['A', 'B', 'C', 'D']) self.assertEqual(es['B'], es['C']) self.assertEqual(es['A'].src_method, 'man') self.assertEqual(es['A'].gene.scientific_name, 'MUS MUSCULUS') self.assertEqual(es['A'].gene.common_name, 'HOUSE MOUSE') self.assertEqual(es['A'].gene.strain, 'TEST STRAIN 1') self.assertEqual(es['A'].gene.ncbi_taxonomy_id, '10090') self.assertEqual(es['A'].host.scientific_name, 'ESCHERICHIA COLI') self.assertEqual(es['A'].host.common_name, 'TEST COMMON 1') self.assertEqual(es['A'].host.ncbi_taxonomy_id, '562') self.assertEqual(es['A'].host.strain, 'TEST STRAIN 2') self.assertEqual(es['B'].src_method, 'nat') self.assertEqual(es['B'].scientific_name, 'ESCHERICHIA COLI') self.assertEqual(es['B'].common_name, 'TEST COMMON 2') self.assertEqual(es['B'].ncbi_taxonomy_id, '562') self.assertEqual(es['B'].strain, 'TEST STRAIN 3') self.assertEqual(es['D'].src_method, 'syn') self.assertEqual(es['D'].scientific_name, 'HELIANTHUS ANNUUS') self.assertEqual(es['D'].common_name, 'COMMON SUNFLOWER') self.assertEqual(es['D'].ncbi_taxonomy_id, '4232') # _pdbx_entity_src_syn.strain is not used in current PDB entries if pdb_format: self.assertEqual(es['D'].strain, 'TEST STRAIN 4') else: self.assertIsNone(es['D'].strain) def test_cif_model_archive(self): """Test CIFParser when given an mmCIF in Model Archive""" p = self._parse_cif(utils.get_input_file_name(TOPDIR, 'modarchive.cif')) dataset = p['dataset'] self.assertEqual(dataset.data_type, 'De Novo model') self.assertEqual(dataset.location.db_name, 'MODEL ARCHIVE') self.assertEqual(dataset.location.access_code, 'ma-bak-cepc-0250') self.assertEqual(dataset.location.version, '2022-11-30') self.assertEqual(dataset.location.details, 'Predicted interaction between CWP1 and IKI1') def test_cif_unknown(self): """Test CIFParser when given an mmCIF not in a database""" fname = utils.get_input_file_name(TOPDIR, 'unknown_model.cif') p = self._parse_cif(fname) dataset = p['dataset'] self.assertEqual(dataset.data_type, 'Comparative model') self.assertIsInstance(dataset.location, ihm.location.FileLocation) self.assertEqual(dataset.location.path, fname) self.assertEqual(dataset.location.details, 'Starting model structure') def test_cif_modeller_model_old(self): """Test CIFParser when given a Modeller model, old style""" fname = utils.get_input_file_name(TOPDIR, 'modeller_model.cif') p = self.check_modeller_model(fname, cif=True) aliname = utils.get_input_file_name(TOPDIR, 'modeller_model.ali') script = utils.get_input_file_name(TOPDIR, 'modeller_model.py') self.assertEqual(p['script'].path, script) for templates in p['templates'].values(): for t in templates: self.assertEqual(t.alignment_file.path, aliname) def test_cif_modeller_incomplete_model(self): """Test CIFParser when given an incomplete Modeller model""" fname = utils.get_input_file_name(TOPDIR, 'modeller_incomplete.cif') p = self._parse_cif(fname) self.assertIsNone(p['script']) def test_cif_modeller_modelcif(self): """Test CIFParser when given a Modeller ModelCIF-compliant model""" # For new-style Modeller models, should read software info directly # from the _software table, not _exptl fname = utils.get_input_file_name(TOPDIR, 'modeller_modelcif.cif') p = self.check_modeller_model(fname, cif=True) aliname = utils.get_input_file_name(TOPDIR, 'modeller_model.ali') script = utils.get_input_file_name(TOPDIR, 'modeller_model.py') self.assertEqual(p['script'].path, script) for templates in p['templates'].values(): for t in templates: self.assertEqual(t.alignment_file.path, aliname) def test_cif_modbase_modelcif(self): """Test CIFParser when given a ModBase ModelCIF-compliant model""" fname = utils.get_input_file_name( TOPDIR, 'modbase-model_e224ef5d7f96947a99dd618618021328.cif') p = self._parse_cif(fname) dataset = p['dataset'] # ModBase isn't in IHMCIF's official set of databases, so should # be reported as "Other" self.assertEqual(dataset.location.db_name, 'Other') self.assertEqual(dataset.location.access_code, 'e224ef5d7f96947a99dd618618021328') self.assertEqual(dataset.location.details, "ModBase database of comparative protein " "structure models") self.assertEqual(sorted(p['templates'].keys()), ['A']) s1, = p['templates']['A'] self.assertEqual(s1.asym_id, 'B') self.assertEqual(s1.seq_id_range, (1, 149)) self.assertEqual(s1.template_seq_id_range, (18, 166)) self.assertAlmostEqual(s1.sequence_identity.value, 99.0, delta=0.1) self.assertEqual( s1.sequence_identity.denominator, ihm.startmodel.SequenceIdentityDenominator.SHORTER_LENGTH) self.assertEqual(dataset.data_type, 'Comparative model') p1, = dataset.parents self.assertEqual(p1.data_type, 'Experimental model') self.assertEqual(p1.location.db_name, 'PDB') self.assertEqual(p1.location.access_code, '2fom') self.assertEqual([s.name for s in p['software']], ['ModPipe', 'MODELLER', 'modbase_pdb_to_cif.py']) def test_cif_alphafold_modelcif(self): """Test CIFParser when given an AlphaFoldDB ModelCIF-compliant model""" fname = utils.get_input_file_name(TOPDIR, 'AF-O78126-F1-model_v4.cif') p = self._parse_cif(fname) dataset = p['dataset'] self.assertEqual(dataset.location.db_name, 'AlphaFoldDB') self.assertEqual(dataset.location.access_code, 'AF-O78126-F1') self.assertEqual(dataset.location.details, "Starting model structure") self.assertEqual(dataset.data_type, 'De Novo model') self.assertEqual(len(dataset.parents), 4) p1, p2, p3, p4 = dataset.parents self.assertEqual(p1.data_type, 'Experimental model') self.assertEqual(p1.location.db_name, 'PDB') self.assertEqual(p1.location.access_code, '6ENY') self.assertEqual(p2.location.access_code, '6FGB') self.assertEqual(p3.location.access_code, '1MHC') self.assertEqual(p4.location.access_code, '1K5N') self.assertEqual([s.name for s in p['software']], ['AlphaFold', 'dssp']) self.assertEqual(sorted(p['templates'].keys()), ['A']) s1, s2, s3, s4 = p['templates']['A'] self.assertEqual(s1.asym_id, 'F') self.assertEqual(s1.seq_id_range, (None, None)) self.assertEqual(s1.template_seq_id_range, (None, None)) self.assertIsNone(s1.sequence_identity) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_model.py000066400000000000000000000174351503573337200171700ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.model class Tests(unittest.TestCase): def test_sphere(self): """Test Sphere class""" s = ihm.model.Sphere(asym_unit='foo', seq_id_range=(1, 5), x=1.0, y=2.0, z=3.0, radius=4.0) self.assertEqual(s.asym_unit, 'foo') self.assertEqual(s.seq_id_range, (1, 5)) def test_atom(self): """Test Atom class""" s = ihm.model.Atom(asym_unit='foo', seq_id=1, atom_id='N', type_symbol='N', x=1.0, y=2.0, z=3.0) self.assertEqual(s.asym_unit, 'foo') self.assertEqual(s.seq_id, 1) def test_model(self): """Test Model class""" m = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') self.assertIsNone(m.name) self.assertEqual(m.protocol, 'bar') def test_model_get_spheres(self): """Test Model.get_spheres()""" m = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') spheres = ['sphere1', 'sphere2'] m._spheres = spheres[:] new_spheres = [s for s in m.get_spheres()] self.assertEqual(new_spheres, spheres) def test_model_add_sphere(self): """Test Model.add_sphere()""" spheres = ['sphere1', 'sphere2'] m = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') m.add_sphere(spheres[0]) m.add_sphere(spheres[1]) self.assertEqual(m._spheres, spheres) def test_model_get_atoms(self): """Test Model.get_atoms()""" m = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') atoms = ['atom1', 'atom2'] m._atoms = atoms[:] new_atoms = [a for a in m.get_atoms()] self.assertEqual(new_atoms, atoms) def test_model_add_atom(self): """Test Model.add_atom()""" atoms = ['atom1', 'atom2'] m = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') m.add_atom(atoms[0]) m.add_atom(atoms[1]) self.assertEqual(m._atoms, atoms) def test_model_group(self): """Test ModelGroup class""" m = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') g = ihm.model.ModelGroup([m], name='foo', details='d') self.assertEqual(g.name, 'foo') self.assertEqual(g.details, 'd') self.assertEqual(g[0].protocol, 'bar') def test_ensemble(self): """Test Ensemble class""" e = ihm.model.Ensemble(model_group=['m1', 'm2', 'm3'], num_models=10, name='bar') self.assertEqual(e.name, 'bar') self.assertIsNone(e.precision) self.assertEqual(e.num_models, 10) self.assertEqual(e.num_models_deposited, 3) self.assertEqual(e.densities, []) def test_ensemble_invalid(self): """Test Ensemble class with invalid feaure/method""" self.assertRaises(ValueError, ihm.model.Ensemble, model_group=['m1', 'm2', 'm3'], num_models=10, name='bar', clustering_method="invalid") self.assertRaises(ValueError, ihm.model.Ensemble, model_group=['m1', 'm2', 'm3'], num_models=10, name='bar', clustering_feature="invalid") def test_density(self): """Test LocalizationDensity class""" e = ihm.model.LocalizationDensity(file='foo', asym_unit='bar') self.assertEqual(e.file, 'foo') self.assertEqual(e.asym_unit, 'bar') def test_subsample(self): """Test Subsample classes""" s = ihm.model.Subsample("foo", num_models=42) self.assertEqual(s.name, 'foo') self.assertEqual(s.sub_sampling_type, 'other') self.assertEqual(s.num_models, 42) self.assertIsNone(s.model_group) self.assertIsNone(s.file) self.assertEqual(s.num_models_deposited, 0) mg = ihm.model.ModelGroup(['model1'], name='foo') s = ihm.model.RandomSubsample("foo", num_models=4, model_group=mg) self.assertEqual(s.name, 'foo') self.assertEqual(s.sub_sampling_type, 'random') self.assertEqual(s.num_models, 4) self.assertIs(s.model_group, mg) self.assertIsNone(s.file) self.assertEqual(s.num_models_deposited, 1) f = 'foo' s = ihm.model.IndependentSubsample("foo", num_models=4, file=f) self.assertEqual(s.sub_sampling_type, 'independent') self.assertIs(s.file, f) def test_state(self): """Test State class""" s = ihm.model.State(name='foo') self.assertEqual(s.name, 'foo') self.assertEqual(s, []) def test_state_group(self): """Test StateGroup class""" s = ihm.model.StateGroup() self.assertEqual(s, []) def test_ordered_process(self): """Test OrderedProcess class""" p = ihm.model.OrderedProcess("time steps", "test process") self.assertEqual(p.ordered_by, "time steps") self.assertEqual(p.description, "test process") self.assertEqual(p.steps, []) s = ihm.model.ProcessStep(description="foo") self.assertEqual(s.description, "foo") p.steps.append(s) e = ihm.model.ProcessEdge('from', 'to') s.append(e) self.assertEqual(p.steps[0][0].group_begin, 'from') self.assertEqual(p.steps[0][0].group_end, 'to') self.assertIsNone(p.steps[0][0].description) def test_dcd_writer(self): """Test DCDWriter class""" m1 = ihm.model.Model(None, None, None) m1._atoms = [ihm.model.Atom(None, None, None, None, x=1, y=2, z=3), ihm.model.Atom(None, None, None, None, x=4, y=5, z=6)] with utils.temporary_directory() as tmpdir: dcd = os.path.join(tmpdir, 'out.dcd') with open(dcd, 'wb') as fh: d = ihm.model.DCDWriter(fh) d.add_model(m1) d.add_model(m1) with open(dcd, 'rb') as fh: contents = fh.read() self.assertEqual(len(contents), 452) def test_dcd_writer_framesize_mismatch(self): """Test DCDWriter class with framesize mismatch""" m1 = ihm.model.Model(None, None, None) m1._atoms = [ihm.model.Atom(None, None, None, None, x=1, y=2, z=3), ihm.model.Atom(None, None, None, None, x=4, y=5, z=6)] m2 = ihm.model.Model(None, None, None) m2._atoms = [ihm.model.Atom(None, None, None, None, x=1, y=2, z=3)] with utils.temporary_directory() as tmpdir: dcd = os.path.join(tmpdir, 'out.dcd') with open(dcd, 'wb') as fh: d = ihm.model.DCDWriter(fh) d.add_model(m1) self.assertRaises(ValueError, d.add_model, m2) def test_not_modeled_residue_range(self): """Test construction of NotModeledResidueRange""" e = ihm.Entity('ACGT') asym = ihm.AsymUnit(e, 'foo') rr = ihm.model.NotModeledResidueRange(asym, 1, 2) self.assertEqual(rr.seq_id_begin, 1) self.assertEqual(rr.seq_id_end, 2) # Cannot create reversed range self.assertRaises(ValueError, ihm.model.NotModeledResidueRange, asym, 3, 1) # Cannot create out-of-range range self.assertRaises(IndexError, ihm.model.NotModeledResidueRange, asym, -3, 1) self.assertRaises(IndexError, ihm.model.NotModeledResidueRange, asym, 1, 10) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_multi_state_scheme.py000066400000000000000000000565041503573337200217460ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.multi_state_scheme class Tests(unittest.TestCase): def test_multistatescheme_init(self): """Test the initialization of MultiStateScheme""" class MockObject: pass s1 = MockObject() s2 = MockObject() s1.name = 's1' s2.name = 's2' mssc1 = ihm.multi_state_scheme.Connectivity( begin_state=s1, end_state=s2) mss1 = ihm.multi_state_scheme.MultiStateScheme( name='n', details='d', connectivities=[mssc1], relaxation_times=['lr']) self.assertEqual(mss1.name, 'n') self.assertEqual(mss1.details, 'd') self.assertEqual(mss1._connectivity_list, [mssc1]) self.assertEqual(mss1._relaxation_time_list, ['lr']) self.assertEqual(mss1._states, [s1, s2]) mss2 = ihm.multi_state_scheme.MultiStateScheme( name='n2', details='d2', connectivities=[], relaxation_times=[]) self.assertEqual(len(mss2._connectivity_list), 0) self.assertEqual(len(mss2._relaxation_time_list), 0) mss3 = ihm.multi_state_scheme.MultiStateScheme( name='n3', details='d3', connectivities=[mssc1, mssc1], relaxation_times=['lr', 'lr'] ) self.assertEqual(mss3._connectivity_list, [mssc1]) self.assertEqual(mss3._relaxation_time_list, ['lr']) # Handle empty states (i.e. without models assigned) # Both states should still be added s1 = ihm.model.State( elements=[], type='conformational change', name='s1', experiment_type='Single molecule', population_fraction=0.4) s2 = ihm.model.State( elements=[], type='conformational change', name='s2', experiment_type='Single molecule', population_fraction=0.6) mssc2 = ihm.multi_state_scheme.Connectivity(begin_state=s1, end_state=s2) mss4 = ihm.multi_state_scheme.MultiStateScheme( name='n4', details='d4', connectivities=[mssc2] ) self.assertEqual(len(mss4._states), 2) self.assertEqual(mss4._states, [s1, s2]) # Adding a connectivity with the same states again, should not # add the states again mssc3 = ihm.multi_state_scheme.Connectivity(begin_state=s2) mss4.add_connectivity(mssc3) self.assertEqual(len(mss4._states), 2) self.assertEqual(mss4._states, [s1, s2]) # Compare a state with and without additional information mssc4 = ihm.multi_state_scheme.Connectivity(begin_state=s2, end_state=[]) mss4.add_connectivity(mssc4) self.assertEqual(len(mss4._states), 3) self.assertEqual(mss4._states, [s1, s2, []]) def test_multistatescheme_add_connectivity(self): """Test addition of a connectivity to a MultiStateScheme""" class MockObject: pass mss1 = ihm.multi_state_scheme.MultiStateScheme(name='n', details='d') # The connectivity_list should be empty upon initialization self.assertEqual(len(mss1._connectivity_list), 0) # Add a connectivity should add it to the connectivity_list and the # states should be stored as well s1 = MockObject() s2 = MockObject() s1.name = 's1' s2.name = 's2' mssc1 = ihm.multi_state_scheme.Connectivity( begin_state=s1, end_state=s2) mss1.add_connectivity(mssc1) self.assertEqual(len(mss1._connectivity_list), 1) self.assertEqual(mss1._connectivity_list, [mssc1]) self.assertEqual(mss1._states, [s1, s2]) # add a connectivity without end_state s3 = MockObject() s3.name = 's3' mssc2 = ihm.multi_state_scheme.Connectivity( begin_state=s3) mss1.add_connectivity(mssc2) self.assertEqual(len(mss1._connectivity_list), 2) self.assertEqual(mss1._connectivity_list, [mssc1, mssc2]) self.assertEqual(mss1._states, [s1, s2, s3]) # add a connectivity with a previously known state should not add it # to the states s4 = MockObject() s4.name = 's4' mssc3 = ihm.multi_state_scheme.Connectivity( begin_state=s2, end_state=s4) mss1.add_connectivity(mssc3) self.assertEqual(len(mss1._connectivity_list), 3) self.assertEqual(mss1._connectivity_list, [mssc1, mssc2, mssc3]) self.assertEqual(mss1._states, [s1, s2, s3, s4]) mss1.add_connectivity(None) self.assertEqual(len(mss1._connectivity_list), 3) self.assertEqual(mss1._connectivity_list, [mssc1, mssc2, mssc3]) self.assertEqual(mss1._states, [s1, s2, s3, s4]) def test_multistatescheme_add_relaxation_time(self): """Test addition of a relaxation time to a MultiStateScheme""" mss1 = ihm.multi_state_scheme.MultiStateScheme(name='n') # The relaxation_time_list should be empty upon initialization self.assertEqual(len(mss1._relaxation_time_list), 0) # Add a relaxation time mss1.add_relaxation_time('r') self.assertEqual(mss1._relaxation_time_list, ['r']) def test_multistatesscheme_get_connectivities(self): """Test the return of connectivities from a MultiStateScheme""" class MockObject: pass s1 = MockObject() s2 = MockObject() s3 = MockObject() s4 = MockObject() s1.name = 's1' s2.name = 's2' s3.name = 's3' s4.name = 's4' mssc1 = ihm.multi_state_scheme.Connectivity( begin_state=s1, end_state=s2) mssc2 = ihm.multi_state_scheme.Connectivity( begin_state=s2, end_state=s3) mssc3 = ihm.multi_state_scheme.Connectivity( begin_state=s2, end_state=s4) mss1 = ihm.multi_state_scheme.MultiStateScheme( name='n', connectivities=[mssc1, mssc2]) self.assertEqual(mss1.get_connectivities(), [mssc1, mssc2]) mss1.add_connectivity(mssc3) # Test addition of the same connectivity multiple times self.assertEqual(mss1.get_connectivities(), [mssc1, mssc2, mssc3]) mss1.add_connectivity(mssc3) self.assertEqual(mss1.get_connectivities(), [mssc1, mssc2, mssc3]) mss1.add_connectivity(mssc1) self.assertEqual(mss1.get_connectivities(), [mssc1, mssc2, mssc3]) def test_multistatescheme_get_relaxation_times(self): """Test the return of relaxation times from a MultiStateScheme""" mss1 = ihm.multi_state_scheme.MultiStateScheme( name='n', relaxation_times=['r1', 'r2'] ) self.assertEqual(mss1.get_relaxation_times(), ['r1', 'r2']) mss1.add_relaxation_time('r3') self.assertEqual(mss1.get_relaxation_times(), ['r1', 'r2', 'r3']) mss1.add_relaxation_time('r3') self.assertEqual(mss1.get_relaxation_times(), ['r1', 'r2', 'r3', 'r3']) def test_multistatescheme_get_states(self): """Test the return of states from a MultiStateScheme""" class MockObject: pass s1 = MockObject() s2 = MockObject() s3 = MockObject() s1.name = 's1' s2.name = 's2' s3.name = 's3' mssc1 = ihm.multi_state_scheme.Connectivity( begin_state=s1, end_state=s2) mssc2 = ihm.multi_state_scheme.Connectivity( begin_state=s1) mssc3 = ihm.multi_state_scheme.Connectivity( begin_state=s2, end_state=s3) mss1 = ihm.multi_state_scheme.MultiStateScheme( name='n', connectivities=[mssc1]) self.assertEqual(mss1.get_states(), [s1, s2]) mss1.add_connectivity(mssc2) self.assertEqual(mss1.get_states(), [s1, s2]) mss1.add_connectivity(mssc3) self.assertEqual(mss1.get_states(), [s1, s2, s3]) def test_multistatescheme_eq(self): """Test equality of MultiStateScheme objects""" class MockObject: pass s1 = MockObject() s2 = MockObject() s3 = MockObject() s4 = MockObject() s1.name = 's1' s2.name = 's2' s3.name = 's3' s4.name = 's4' mssc1 = ihm.multi_state_scheme.Connectivity( begin_state=s1) mssc2 = ihm.multi_state_scheme.Connectivity( begin_state=s2) mssc3 = ihm.multi_state_scheme.Connectivity( begin_state=s3) mssc4 = ihm.multi_state_scheme.Connectivity( begin_state=s1, end_state=s4) mss_ref = ihm.multi_state_scheme.MultiStateScheme( name='name1', details='details1', connectivities=[mssc1]) mss_equal = ihm.multi_state_scheme.MultiStateScheme( name='name1', details='details1', connectivities=[mssc1]) mss_unequal = ihm.multi_state_scheme.MultiStateScheme( name='name2', details='details2', connectivities=[mssc2]) mss_unequal2 = ihm.multi_state_scheme.MultiStateScheme( name='name1', details='details1', connectivities=[mssc3]) mss_unequal4 = ihm.multi_state_scheme.MultiStateScheme( name='name1', details='details1', connectivities=[mssc4]) mss_unequal5 = ihm.multi_state_scheme.MultiStateScheme( name='name1', details='details1', connectivities=[mssc1], relaxation_times=['r1']) self.assertTrue(mss_ref == mss_equal) self.assertFalse(mss_ref == mss_unequal) self.assertTrue(mss_ref != mss_unequal) self.assertFalse(mss_ref == mss_unequal2) self.assertTrue(mss_ref != mss_unequal2) self.assertFalse(mss_ref == mss_unequal4) self.assertTrue(mss_ref != mss_unequal4) self.assertFalse(mss_ref == mss_unequal5) self.assertTrue(mss_ref != mss_unequal5) def test_multistateschemeconnectivity_init(self): """Test initialization of Connectivity""" class MockObject: pass s1 = MockObject() s2 = MockObject() s1.name = 's1' s2.name = 's2' mssc1 = ihm.multi_state_scheme.Connectivity( begin_state=s1, end_state=s2, details='details1', dataset_group='dataset_group1', kinetic_rate='kinetic_rate1', relaxation_time='relaxation_time1') self.assertEqual(mssc1.begin_state, s1) self.assertEqual(mssc1.end_state, s2) self.assertEqual(mssc1.details, 'details1') self.assertEqual(mssc1.dataset_group, 'dataset_group1') self.assertEqual(mssc1.kinetic_rate, 'kinetic_rate1') self.assertEqual(mssc1.relaxation_time, 'relaxation_time1') def test_multistateschemeconnectivity_eq(self): """Test equality of Connectivity objects""" class MockObject: pass s1 = MockObject() s2 = MockObject() s3 = MockObject() s1.name = 's1' s2.name = 's2' s3.name = 's3' mssc_ref = ihm.multi_state_scheme.Connectivity( begin_state=s1, end_state=s2, details='details1', dataset_group='dataset_group1', kinetic_rate='kinetic_rate1', relaxation_time='relaxation_time1') mssc_equal = ihm.multi_state_scheme.Connectivity( begin_state=s1, end_state=s2, details='details1', dataset_group='dataset_group1', kinetic_rate='kinetic_rate1', relaxation_time='relaxation_time1') mssc_unequal1 = ihm.multi_state_scheme.Connectivity( begin_state=s1, end_state=s3, details='details1', dataset_group='dataset_group1', kinetic_rate='kinetic_rate1', relaxation_time='relaxation_time1') mssc_unequal2 = ihm.multi_state_scheme.Connectivity( begin_state=s1, details='details1', dataset_group='dataset_group1', kinetic_rate='kinetic_rate1', relaxation_time='relaxation_time1') self.assertTrue(mssc_ref == mssc_equal) self.assertFalse(mssc_ref == mssc_unequal1) self.assertTrue(mssc_ref != mssc_unequal1) self.assertFalse(mssc_ref == mssc_unequal2) self.assertTrue(mssc_ref != mssc_unequal2) def test_equilibriumconstant_init(self): """Test initialization of EquilibriumConstant and the derived classes""" e1 = ihm.multi_state_scheme.EquilibriumConstant(value=1.0, unit='a') self.assertIsInstance(e1, ihm.multi_state_scheme.EquilibriumConstant) self.assertEqual(e1.value, 1.0) self.assertEqual(e1.unit, 'a') self.assertEqual( e1.method, 'equilibrium constant is determined from another method ' 'not listed') e2 = ihm.multi_state_scheme.EquilibriumConstant(value=2.0) self.assertIsInstance(e2, ihm.multi_state_scheme.EquilibriumConstant) self.assertEqual(e2.value, 2.0) self.assertIsNone(e2.unit) self.assertEqual( e2.method, 'equilibrium constant is determined from another method ' 'not listed') e3 = ihm.multi_state_scheme.PopulationEquilibriumConstant(value=3.0, unit='b') self.assertIsInstance( e3, ihm.multi_state_scheme.PopulationEquilibriumConstant) self.assertEqual(e3.value, 3.0) self.assertEqual(e3.unit, 'b') self.assertEqual( e3.method, 'equilibrium constant is determined from population') e4 = ihm.multi_state_scheme.KineticRateEquilibriumConstant(value=4.0, unit='c') self.assertIsInstance( e4, ihm.multi_state_scheme.KineticRateEquilibriumConstant) self.assertEqual(e4.value, 4.0) self.assertEqual(e4.unit, 'c') self.assertEqual( e4.method, 'equilibrium constant is determined from kinetic rates, kAB/kBA') def test_equilibrium_constant_eq(self): """Test equality of EquilibriumConstant objects""" e_ref1 = ihm.multi_state_scheme.EquilibriumConstant( value='1.0', unit='a') e_equal1 = ihm.multi_state_scheme.EquilibriumConstant( value='1.0', unit='a') eq_unequal1 = ihm.multi_state_scheme.EquilibriumConstant( value='1.0', unit='b') eq_unequal2 = ihm.multi_state_scheme.EquilibriumConstant( value='1.0') eq_unequal3 = ihm.multi_state_scheme.EquilibriumConstant( value='2.0', unit='b') eq_unequal4 = ihm.multi_state_scheme.PopulationEquilibriumConstant( value='1.0', unit='a') eq_unequal5 = ihm.multi_state_scheme.KineticRateEquilibriumConstant( value='1.0', unit='a') self.assertTrue(e_ref1 == e_equal1) self.assertFalse(e_ref1 == eq_unequal1) self.assertFalse(e_ref1 == eq_unequal2) self.assertFalse(e_ref1 == eq_unequal3) self.assertFalse(e_ref1 == eq_unequal4) self.assertFalse(e_ref1 == eq_unequal5) e_ref2 = ihm.multi_state_scheme.PopulationEquilibriumConstant( value='1.0', unit='a') e_equal2 = ihm.multi_state_scheme.PopulationEquilibriumConstant( value='1.0', unit='a') e_unequal6 = ihm.multi_state_scheme.PopulationEquilibriumConstant( value='2.0', unit='a') e_unequal7 = ihm.multi_state_scheme.EquilibriumConstant( value='1.0', unit='a') e_unequal8 = ihm.multi_state_scheme.KineticRateEquilibriumConstant( value='1.0', unit='a') self.assertTrue(e_ref2 == e_equal2) self.assertFalse(e_ref2 == e_unequal6) self.assertFalse(e_ref2 == e_unequal7) self.assertFalse(e_ref2 == e_unequal8) e_ref3 = ihm.multi_state_scheme.KineticRateEquilibriumConstant( value='1.0', unit='a') e_equal3 = ihm.multi_state_scheme.KineticRateEquilibriumConstant( value='1.0', unit='a') e_unequal9 = ihm.multi_state_scheme.KineticRateEquilibriumConstant( value='2.0', unit='a') e_unequal10 = ihm.multi_state_scheme.EquilibriumConstant( value='1.0', unit='a') self.assertTrue(e_ref3 == e_equal3) self.assertFalse(e_ref3 == e_unequal9) self.assertFalse(e_ref3 == e_unequal10) def test_kineticrate_init(self): """Test initialization of KineticRate""" # Initialization with only transition_rate_constant given k1 = ihm.multi_state_scheme.KineticRate(transition_rate_constant=1.0) self.assertEqual(k1.transition_rate_constant, 1.0) e1 = ihm.multi_state_scheme.PopulationEquilibriumConstant(value=1.0, unit='unit') # Initialization with equilibrium_constant k2 = ihm.multi_state_scheme.KineticRate( equilibrium_constant=e1) self.assertEqual(k2.equilibrium_constant.value, 1.0) self.assertEqual(k2.equilibrium_constant.unit, "unit") self.assertEqual(k2.equilibrium_constant.method, 'equilibrium constant is determined from population') # Initialization with all values given k3 = ihm.multi_state_scheme.KineticRate( transition_rate_constant=0.5, equilibrium_constant=e1, details="details1", dataset_group='dataset_group1', file='file1') self.assertEqual(k3.transition_rate_constant, 0.5) self.assertEqual(k3.equilibrium_constant.value, 1.0) self.assertEqual(k3.equilibrium_constant.unit, "unit") self.assertEqual(k3.equilibrium_constant.method, 'equilibrium constant is determined from population') self.assertEqual(k3.details, "details1") self.assertEqual(k3.dataset_group, 'dataset_group1') self.assertEqual(k3.external_file, 'file1') def test_kineticrate_eq(self): """Test equality of KineticRate objects""" e1 = ihm.multi_state_scheme.PopulationEquilibriumConstant( value=1.5, unit='unit1') e2 = ihm.multi_state_scheme.KineticRateEquilibriumConstant( value=1.5, unit='unit1') k_ref = ihm.multi_state_scheme.KineticRate( transition_rate_constant=1.0, equilibrium_constant=e1, details="details1", dataset_group="dataset_group1", file="file1" ) k_equal = ihm.multi_state_scheme.KineticRate( transition_rate_constant=1.0, equilibrium_constant=e1, details="details1", dataset_group="dataset_group1", file="file1" ) k_unequal1 = ihm.multi_state_scheme.KineticRate( transition_rate_constant=2.0, equilibrium_constant=e1, details="details1", dataset_group="dataset_group1", file="file1" ) k_unequal2 = ihm.multi_state_scheme.KineticRate( transition_rate_constant=1.0, equilibrium_constant=e2, details="details1", dataset_group="dataset_group1", file="file1" ) k_unequal3 = ihm.multi_state_scheme.KineticRate( transition_rate_constant=1.0, details="details1", dataset_group="dataset_group1", file="file1" ) self.assertTrue(k_ref == k_equal) self.assertFalse(k_ref == k_unequal1) self.assertTrue(k_ref != k_unequal1) self.assertFalse(k_ref == k_unequal2) self.assertTrue(k_ref != k_unequal2) self.assertFalse(k_ref == k_unequal3) self.assertTrue(k_ref != k_unequal3) def test_relaxationtime_init(self): """Test initialization of RelaxationTime""" r1 = ihm.multi_state_scheme.RelaxationTime( value=1.0, unit='milliseconds', details='details1', dataset_group='dataset_group1', file='file1' ) self.assertEqual(r1.value, 1.0) self.assertEqual(r1.unit, 'milliseconds') self.assertEqual(r1.details, 'details1') self.assertEqual(r1.dataset_group, 'dataset_group1') self.assertEqual(r1.external_file, 'file1') # Check whether a wrong unit raises a ValueError self.assertRaises( ValueError, ihm.multi_state_scheme.RelaxationTime, value=2.0, unit='wrong_unit') def test_relaxationtime_eq(self): """Test equality of RelaxationTime objects""" r_ref = ihm.multi_state_scheme.RelaxationTime( value=1.0, unit='milliseconds', details='details1', dataset_group='dataset_group1', file='file1' ) r_equal = ihm.multi_state_scheme.RelaxationTime( value=1.0, unit='milliseconds', details='details1', dataset_group='dataset_group1', file='file1' ) r_unequal1 = ihm.multi_state_scheme.RelaxationTime( value=2.0, unit='milliseconds', details='details1', dataset_group='dataset_group1', file='file1' ) r_unequal2 = ihm.multi_state_scheme.RelaxationTime( value=1.0, unit='seconds', details='details1', dataset_group='dataset_group1', file='file1' ) r_unequal3 = ihm.multi_state_scheme.RelaxationTime( value=1.0, unit='milliseconds', details='details2', dataset_group='dataset_group1', file='file1' ) r_unequal4 = ihm.multi_state_scheme.RelaxationTime( value=1.0, unit='milliseconds', details='details', dataset_group='dataset_group2', file='file1' ) r_unequal5 = ihm.multi_state_scheme.RelaxationTime( value=1.0, unit='milliseconds', details='details1', file='file1' ) self.assertTrue(r_ref == r_equal) self.assertFalse(r_ref == r_unequal1) self.assertTrue(r_ref != r_unequal1) self.assertFalse(r_ref == r_unequal2) self.assertTrue(r_ref != r_unequal2) self.assertFalse(r_ref == r_unequal3) self.assertTrue(r_ref != r_unequal3) self.assertFalse(r_ref == r_unequal4) self.assertTrue(r_ref != r_unequal4) self.assertFalse(r_ref == r_unequal5) self.assertTrue(r_ref != r_unequal5) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_protocol.py000066400000000000000000000024761503573337200177300ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.protocol class Tests(unittest.TestCase): def test_step(self): """Test protocol Step class""" s = ihm.protocol.Step(assembly='foo', dataset_group='bar', method='baz', num_models_begin=0, num_models_end=42) self.assertIsNone(s.name) self.assertEqual(s.method, 'baz') self.assertEqual(s._get_report(), 'Unnamed step (baz) (0->42 models)') s = ihm.protocol.Step(assembly='foo', dataset_group='bar', method='baz', num_models_begin=0, num_models_end=42, name="myname", multi_scale=True, multi_state=True, ordered=True) self.assertEqual(s._get_report(), 'myname (baz; multi-scale; multi-state; ordered) ' '(0->42 models)') def test_protocol(self): """Test Protocol class""" s = ihm.protocol.Protocol(name='foo') self.assertEqual(s.name, 'foo') self.assertEqual(s.steps, []) self.assertEqual(s.analyses, []) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_reader.py000066400000000000000000006307711503573337200173360ustar00rootroot00000000000000import utils import datetime import os import unittest import gzip import operator import warnings from io import StringIO, BytesIO TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.reader try: from ihm import _format except ImportError: _format = None def cif_file_handles(cif): """Yield both in-memory and real-file handles for the given mmCIF text. This allows us to test both the pure Python reader and the C-accelerated version with the same input file.""" yield StringIO(cif) with utils.temporary_directory() as tmpdir: fname = os.path.join(tmpdir, 'test') with open(fname, 'w') as fh: fh.write(cif) with open(fname) as fh: yield fh ASYM_ENTITY = """ loop_ _entity_poly_seq.entity_id _entity_poly_seq.num _entity_poly_seq.mon_id _entity_poly_seq.hetero 1 1 MET . 1 2 CYS . 1 3 MET . 1 4 SER . # loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 foo """ CENTERS_TRANSFORMS = """ loop_ _ihm_geometric_object_center.id _ihm_geometric_object_center.xcoord _ihm_geometric_object_center.ycoord _ihm_geometric_object_center.zcoord 1 1.000 2.000 3.000 # # loop_ _ihm_geometric_object_transformation.id _ihm_geometric_object_transformation.rot_matrix[1][1] _ihm_geometric_object_transformation.rot_matrix[2][1] _ihm_geometric_object_transformation.rot_matrix[3][1] _ihm_geometric_object_transformation.rot_matrix[1][2] _ihm_geometric_object_transformation.rot_matrix[2][2] _ihm_geometric_object_transformation.rot_matrix[3][2] _ihm_geometric_object_transformation.rot_matrix[1][3] _ihm_geometric_object_transformation.rot_matrix[2][3] _ihm_geometric_object_transformation.rot_matrix[3][3] _ihm_geometric_object_transformation.tr_vector[1] _ihm_geometric_object_transformation.tr_vector[2] _ihm_geometric_object_transformation.tr_vector[3] 1 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 1.000000 1.000 2.000 3.000 # """ class Tests(unittest.TestCase): def test_read(self): """Test read() function""" cif = "data_model\n_struct.entry_id testid\n" for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) self.assertEqual(s.id, 'testid') # Make sure we can read the file in binary mode too s, = ihm.reader.read(BytesIO(cif.encode('latin-1'))) self.assertEqual(s.id, 'testid') def test_read_unicode(self): """Test that Unicode characters are handled sensibly""" # mmCIF files should technically be ASCII, but try not to fall over # if we're fed a Unicode file cif = "data_model\n_struct.entry_id test\u00dc\U0001f600\n" s, = ihm.reader.read(StringIO(cif)) self.assertEqual(s.id, 'test\u00dc\U0001f600') s, = ihm.reader.read(BytesIO(cif.encode('utf-8'))) # Reading in binary mode should give us the raw text (latin-1) self.assertEqual(s.id, 'test\xc3\x9c\xf0\x9f\x98\x80') with utils.temporary_directory() as tmpdir: fname = os.path.join(tmpdir, 'test') with open(fname, 'w', encoding='utf-8') as fh: fh.write(cif) # Should get the input back if we use the right UTF-8 encoding with open(fname, encoding='utf-8') as fh: s, = ihm.reader.read(fh) self.assertEqual(s.id, 'test\u00dc\U0001f600') # Should get a decode error if we treat it as ASCII: with open(fname, encoding='ascii') as fh: self.assertRaises(UnicodeDecodeError, ihm.reader.read, fh) # A permissive 8-bit encoding should work but give us garbage with open(fname, encoding='latin-1') as fh: s, = ihm.reader.read(fh) self.assertEqual(s.id, 'test\xc3\x9c\xf0\x9f\x98\x80') def test_read_custom_handler(self): """Test read() function with custom Handler""" class MyHandler(ihm.reader.Handler): category = "_custom_category" def __call__(self, field1, myfield): self.system.custom_data = (field1, myfield) cif = "data_model\n_struct.entry_id testid\n" \ "_custom_category.field1 foo\n_custom_category.myfield bar\n" for fh in cif_file_handles(cif): s, = ihm.reader.read(fh, handlers=[MyHandler]) self.assertEqual(s.id, 'testid') self.assertEqual(s.custom_data, ('foo', 'bar')) def test_system_reader(self): """Test SystemReader class""" _ = ihm.reader.SystemReader(ihm.model.Model, ihm.startmodel.StartingModel) def test_id_mapper(self): """Test IDMapper class""" class MockObject: def __init__(self, x, y): self.x, self.y = x, y testlist = [] im = ihm.reader.IDMapper(testlist, MockObject, '1', y='2') a = im.get_by_id('ID1') b = im.get_by_id('ID1') self.assertEqual(id(a), id(b)) self.assertEqual(a.x, '1') self.assertEqual(a.y, '2') self.assertEqual(testlist, [a]) def test_handler(self): """Test Handler base class""" class MockObject: pass o = MockObject() o.system = 'foo' h = ihm.reader.Handler(o) self.assertEqual(h.system, 'foo') def test_handler_copy_if_present(self): """Test copy_if_present method""" class MockObject: pass # Keys = namedtuple('Keys', 'foo bar t test x') o = MockObject() h = ihm.reader.Handler(None) h.copy_if_present(o, {'foo': 'bar', 'bar': 'baz', 't': 'u'}, keys=['test', 'foo'], mapkeys={'bar': 'baro', 'x': 'y'}) self.assertEqual(o.foo, 'bar') self.assertEqual(o.baro, 'baz') self.assertFalse(hasattr(o, 't')) self.assertFalse(hasattr(o, 'x')) self.assertFalse(hasattr(o, 'bar')) def test_struct_handler(self): """Test StructHandler""" cif = """ _struct.entry_id eid _struct.title 'Test title' _struct.pdbx_model_details 'Test details' """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) self.assertEqual(s.id, 'eid') self.assertEqual(s.title, 'Test title') self.assertEqual(s.model_details, 'Test details') def test_multiple_systems(self): """Test multiple systems from data blocks""" cif = """ data_id1 _struct.entry_id id1 data_id2 _struct.entry_id id2 data_id3 _struct.entry_id id3 data_long-entry$#<> _struct.entry_id id4 """ for fh in cif_file_handles(cif): s1, s2, s3, s4 = ihm.reader.read(fh) self.assertEqual(s1.id, 'id1') self.assertEqual(s2.id, 'id2') self.assertEqual(s3.id, 'id3') # Should not be tripped up by odd characters in data_ block, # and the system ID should match entry_id self.assertEqual(s4.id, 'id4') def test_collection_handler(self): """Test CollectionHandler""" cif = """ loop_ _ihm_entry_collection.id _ihm_entry_collection.name _ihm_entry_collection.details foo bar 'more text' """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) c, = s.collections self.assertEqual(c.id, 'foo') self.assertEqual(c.name, 'bar') self.assertEqual(c.details, 'more text') def test_software_handler(self): """Test SoftwareHandler""" cif = """ loop_ _citation.id _citation.journal_abbrev _citation.journal_volume _citation.page_first _citation.page_last _citation.year _citation.pdbx_database_id_PubMed _citation.pdbx_database_id_DOI 1 'Mol Cell Proteomics' 9 2943 . 2014 1234 . # loop_ _software.pdbx_ordinal _software.name _software.classification _software.description _software.version _software.type _software.location _software.citation_id 1 'test software' 'test class' 'test desc' program 1.0.1 https://example.org . 2 'other software' 'oth class' 'test desc' program 1.0.1 https://example.org 1 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) software, s2 = s.software self.assertEqual(software._id, '1') self.assertEqual(software.name, 'test software') self.assertEqual(software.classification, 'test class') self.assertIsNone(software.citation) self.assertEqual(s2._id, '2') self.assertEqual(s2.name, 'other software') self.assertEqual(s2.classification, 'oth class') self.assertEqual(s2.citation.pmid, '1234') def test_audit_author_handler(self): """Test AuditAuthorHandler""" cif = """ loop_ _audit_author.name _audit_author.pdbx_ordinal auth1 1 auth2 2 auth3 3 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) self.assertEqual(s.authors, ['auth1', 'auth2', 'auth3']) def test_audit_revision_handler(self): """Test AuditRevisionHistoryHandler and related handlers""" cif = """ loop_ _pdbx_audit_revision_history.ordinal _pdbx_audit_revision_history.data_content_type _pdbx_audit_revision_history.major_revision _pdbx_audit_revision_history.minor_revision _pdbx_audit_revision_history.revision_date 40 'Structure model' 1 0 ? 41 'Structure model' 1 0 . 42 'Structure model' 2 0 1979-05-03 # # loop_ _pdbx_audit_revision_details.ordinal _pdbx_audit_revision_details.revision_ordinal _pdbx_audit_revision_details.data_content_type _pdbx_audit_revision_details.provider _pdbx_audit_revision_details.type _pdbx_audit_revision_details.description 1 42 'Structure model' repository 'Initial release' 'Test desc' # # loop_ _pdbx_audit_revision_group.ordinal _pdbx_audit_revision_group.revision_ordinal _pdbx_audit_revision_group.data_content_type _pdbx_audit_revision_group.group 1 42 'Structure model' group1 2 42 'Structure model' group2 # # loop_ _pdbx_audit_revision_category.ordinal _pdbx_audit_revision_category.revision_ordinal _pdbx_audit_revision_category.data_content_type _pdbx_audit_revision_category.category 1 42 'Structure model' cat1 2 42 'Structure model' cat2 # # loop_ _pdbx_audit_revision_item.ordinal _pdbx_audit_revision_item.revision_ordinal _pdbx_audit_revision_item.data_content_type _pdbx_audit_revision_item.item 1 42 'Structure model' item1 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) r1, r2, r = s.revisions self.assertIs(r1.date, ihm.unknown) self.assertEqual(len(r1.details), 0) self.assertEqual(len(r1.groups), 0) self.assertEqual(len(r1.categories), 0) self.assertEqual(len(r1.items), 0) self.assertIsNone(r2.date) self.assertIsInstance(r, ihm.Revision) self.assertEqual(r.data_content_type, "Structure model") self.assertEqual(r.major, 2) self.assertEqual(r.minor, 0) self.assertEqual(r.date, datetime.date(1979, 5, 3)) self.assertEqual(len(r.details), 1) self.assertEqual(r.details[0].provider, 'repository') self.assertEqual(r.details[0].type, 'Initial release') self.assertEqual(r.details[0].description, 'Test desc') self.assertEqual(r.groups, ['group1', 'group2']) self.assertEqual(r.categories, ['cat1', 'cat2']) self.assertEqual(r.items, ['item1']) def test_data_usage_handler(self): """Test DataUsageHandler""" cif = """ loop_ _pdbx_data_usage.id _pdbx_data_usage.type _pdbx_data_usage.details _pdbx_data_usage.url _pdbx_data_usage.name 1 license 'some license' someurl somename 2 disclaimer 'some disclaimer' . . 3 "some other type" 'misc usage' . .""" for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) d1, d2, d3 = s.data_usage self.assertIsInstance(d1, ihm.License) self.assertEqual(d1.details, "some license") self.assertEqual(d1.name, "somename") self.assertEqual(d1.url, "someurl") self.assertIsInstance(d2, ihm.Disclaimer) self.assertEqual(d2.details, "some disclaimer") self.assertIsNone(d2.name) self.assertIsNone(d2.url) self.assertEqual(d3.type, "other") self.assertEqual(d3.details, "misc usage") def test_grant_handler(self): """Test GrantHandler""" cif = """ loop_ _pdbx_audit_support.funding_organization _pdbx_audit_support.country _pdbx_audit_support.grant_number _pdbx_audit_support.ordinal NIH 'United States' foo 1 NSF 'United States' bar 2 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) g1, g2 = s.grants self.assertEqual(g1.funding_organization, "NIH") self.assertEqual(g1.country, "United States") self.assertEqual(g1.grant_number, "foo") self.assertEqual(g2.funding_organization, "NSF") self.assertEqual(g2.country, "United States") self.assertEqual(g2.grant_number, "bar") def test_citation_handler(self): """Test CitationHandler and CitationAuthorHandler""" cif = """ loop_ _citation.id _citation.journal_abbrev _citation.journal_volume _citation.page_first _citation.page_last _citation.year _citation.pdbx_database_id_PubMed _citation.pdbx_database_id_DOI primary 'Mol Cell Proteomics' 9 2943 . 2014 1234 . 3 'Mol Cell Proteomics' 9 2943 2946 2014 1234 1.2.3.4 4 'Mol Cell Proteomics' 9 . . 2014 1234 1.2.3.4 # # loop_ _citation_author.citation_id _citation_author.name _citation_author.ordinal 3 'Foo A' 1 3 'Bar C' 2 3 . 3 5 'Baz X' 4 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) citation1, citation2, citation3, citation4 = s.citations self.assertEqual(citation1._id, 'primary') self.assertTrue(citation1.is_primary) self.assertEqual(citation1.page_range, '2943') self.assertEqual(citation1.authors, []) self.assertEqual(citation1.pmid, '1234') self.assertIsNone(citation1.doi) self.assertEqual(citation2._id, '3') self.assertFalse(citation2.is_primary) self.assertEqual(citation2.page_range, ('2943', '2946')) self.assertEqual(citation2.authors, ['Foo A', 'Bar C']) self.assertEqual(citation2.doi, '1.2.3.4') self.assertEqual(citation3._id, '4') self.assertFalse(citation3.is_primary) self.assertEqual(citation3.authors, []) self.assertIsNone(citation3.page_range) # todo: should probably be an error, no _citation.id == 4 self.assertEqual(citation4._id, '5') self.assertEqual(citation4.authors, ['Baz X']) def test_chem_comp_handler(self): """Test ChemCompHandler and EntityPolySeqHandler""" chem_comp_cat = """ loop_ _chem_comp.id _chem_comp.type _chem_comp.name _chem_comp.formula MET 'L-peptide linking' . . CYS 'D-peptide linking' CYSTEINE . MYTYPE 'D-PEPTIDE LINKING' 'MY CUSTOM COMPONENT' 'C6 H12' """ entity_poly_cat = """ loop_ _entity_poly_seq.entity_id _entity_poly_seq.num _entity_poly_seq.mon_id _entity_poly_seq.hetero 1 1 MET . 1 4 MYTYPE . 1 5 CYS . 1 2 MET . """ cif1 = chem_comp_cat + entity_poly_cat cif2 = entity_poly_cat + chem_comp_cat # Order of the two categories shouldn't matter for cif in cif1, cif2: for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) e1, = s.entities s = e1.sequence self.assertEqual(len(s), 5) lpeptide = ihm.LPeptideAlphabet() self.assertEqual(id(s[0]), id(lpeptide['M'])) self.assertEqual(id(s[1]), id(lpeptide['M'])) self.assertEqual(id(s[4]), id(lpeptide['C'])) self.assertEqual(s[0].name, 'METHIONINE') self.assertIsNone(s[2]) self.assertEqual(s[3].id, 'MYTYPE') self.assertEqual(s[3].type, 'D-peptide linking') self.assertEqual(s[3].name, 'MY CUSTOM COMPONENT') self.assertAlmostEqual(s[3].formula_weight, 84.162, delta=0.1) self.assertEqual(s[3].__class__, ihm.DPeptideChemComp) # Class of standard type shouldn't be changed self.assertEqual(s[4].type, 'L-peptide linking') self.assertEqual(s[4].__class__, ihm.LPeptideChemComp) def test_entity_poly_handler(self): """Test EntityPolyHandler""" fh = StringIO(""" loop_ _entity_poly_seq.entity_id _entity_poly_seq.num _entity_poly_seq.mon_id _entity_poly_seq.hetero 1 1 OCS . 1 2 MET . 1 3 ACE . 2 1 MET . 3 1 MET . # loop_ _entity_poly.entity_id _entity_poly.type _entity_poly.pdbx_seq_one_letter_code _entity_poly.pdbx_seq_one_letter_code_can 1 'polypeptide(L)' ;(OCS) M ; SM 3 other . . """) s, = ihm.reader.read(fh) e1, e2, e3 = s.entities c1, c2, c3 = e1.sequence self.assertEqual(c1.id, 'OCS') # Missing information should be filled in from entity_poly self.assertEqual(c1.code, 'OCS') self.assertEqual(c1.code_canonical, 'S') # No info in entity_poly for this component self.assertEqual(c3.id, 'ACE') self.assertIsNone(c3.code) self.assertIsNone(c3.code_canonical) def test_chem_comp_nonpoly_handler(self): """Test ChemCompHandler and EntityNonPolyHandler""" chem_comp_cat = """ loop_ _chem_comp.id _chem_comp.type HEM non-polymer HOH non-polymer """ entity_nonpoly_cat = """ loop_ _pdbx_entity_nonpoly.entity_id _pdbx_entity_nonpoly.name _pdbx_entity_nonpoly.comp_id 1 Heme HEM 2 Water HOH """ cif1 = chem_comp_cat + entity_nonpoly_cat cif2 = entity_nonpoly_cat + chem_comp_cat # Order of the two categories shouldn't matter for cif in cif1, cif2: for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) e1, e2 = s.entities s = e1.sequence self.assertEqual(len(s), 1) self.assertEqual(s[0].id, 'HEM') self.assertEqual(s[0].type, 'non-polymer') self.assertEqual(s[0].__class__, ihm.NonPolymerChemComp) s = e2.sequence self.assertEqual(len(s), 1) self.assertEqual(s[0].id, 'HOH') self.assertEqual(s[0].type, 'non-polymer') self.assertEqual(s[0].__class__, ihm.WaterChemComp) def test_chem_descriptor_handler(self): """Test ChemDescriptorHandler""" for cat in ("ihm_chemical_component_descriptor", "flr_chemical_descriptor"): cif = """ loop_ _%(cat)s.id _%(cat)s.auth_name _%(cat)s.chemical_name _%(cat)s.common_name _%(cat)s.smiles _%(cat)s.smiles_canonical _%(cat)s.inchi _%(cat)s.inchi_key 1 EDC "test-chem-EDC" . "CCN=C=NCCCN(C)C" . test-inchi test-inchi-key """ % {'cat': cat} for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) d1, = s.orphan_chem_descriptors self.assertEqual(d1.auth_name, 'EDC') self.assertIsNone(d1.chem_comp_id) self.assertEqual(d1.chemical_name, 'test-chem-EDC') self.assertEqual(d1.smiles, 'CCN=C=NCCCN(C)C') self.assertIsNone(d1.smiles_canonical) self.assertEqual(d1.inchi, 'test-inchi') self.assertEqual(d1.inchi_key, 'test-inchi-key') def test_entity_handler(self): """Test EntityHandler""" cif = """ loop_ _entity.id _entity.type _entity.src_method _entity.pdbx_description _entity.pdbx_number_of_molecules _entity.formula_weight _entity.details 1 polymer nat Nup84 2 100.0 . 2 polymer syn Nup85 3 200.0 . 3 polymer . Nup86 3 300.0 . 4 polymer unknown Nup87 3 300.0 . 5 branched unknown Nup88 3 300.0 . """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) e1, e2, e3, e4, e5 = s.entities self.assertEqual(e1.description, 'Nup84') self.assertTrue(e1._force_polymer) self.assertEqual( e1.number_of_molecules, '2') # todo: coerce to int self.assertEqual(e1.source.src_method, 'nat') self.assertEqual(e2.source.src_method, 'syn') self.assertIsNone(e3.source) self.assertIsNone(e4.source) self.assertFalse(e5._force_polymer) self.assertTrue(e5._hint_branched) def test_entity_handler_minimal(self): """Test EntityHandler with minimal entity category""" cif = """ loop_ _entity.id 1 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) e1, = s.entities self.assertIsNone(e1.description) def test_entity_src_gen_handler(self): """Test EntitySrcGenHandler""" entity = """ loop_ _entity.id _entity.type _entity.src_method _entity.pdbx_description _entity.pdbx_number_of_molecules _entity.formula_weight _entity.details 1 polymer man Nup84 2 100.0 . """ src_gen = """ _entity_src_gen.entity_id 1 _entity_src_gen.pdbx_src_id 42 _entity_src_gen.pdbx_gene_src_scientific_name 'test latin name' _entity_src_gen.pdbx_gene_src_ncbi_taxonomy_id 1234 _entity_src_gen.gene_src_common_name 'test common name' _entity_src_gen.gene_src_strain 'test strain' _entity_src_gen.pdbx_host_org_scientific_name 'host latin name' _entity_src_gen.pdbx_host_org_ncbi_taxonomy_id 5678 _entity_src_gen.host_org_common_name 'other common name' _entity_src_gen.pdbx_host_org_strain 'other strain' """ # Order of the categories shouldn't matter cif1 = entity + src_gen cif2 = src_gen + entity for cif in cif1, cif2: for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) e, = s.entities self.assertEqual(e.source.src_method, 'man') self.assertEqual(e.source._id, '42') self.assertEqual(e.source.host.ncbi_taxonomy_id, '5678') self.assertEqual(e.source.host.scientific_name, 'host latin name') self.assertEqual(e.source.host.common_name, 'other common name') self.assertEqual(e.source.host.strain, 'other strain') self.assertEqual(e.source.gene.ncbi_taxonomy_id, '1234') self.assertEqual(e.source.gene.scientific_name, 'test latin name') self.assertEqual(e.source.gene.common_name, 'test common name') self.assertEqual(e.source.gene.strain, 'test strain') def test_entity_src_nat_handler(self): """Test EntitySrcNatHandler""" entity = """ loop_ _entity.id _entity.type _entity.src_method _entity.pdbx_description _entity.pdbx_number_of_molecules _entity.formula_weight _entity.details 1 polymer man Nup84 2 100.0 . """ src_nat = """ _entity_src_nat.entity_id 1 _entity_src_nat.pdbx_src_id 42 _entity_src_nat.pdbx_organism_scientific 'test latin name' _entity_src_nat.pdbx_ncbi_taxonomy_id 5678 _entity_src_nat.common_name 'test common name' _entity_src_nat.strain 'test strain' """ # Order of the categories shouldn't matter cif1 = entity + src_nat cif2 = src_nat + entity for cif in cif1, cif2: for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) e, = s.entities self.assertEqual(e.source.src_method, 'nat') self.assertEqual(e.source._id, '42') self.assertEqual(e.source.ncbi_taxonomy_id, '5678') self.assertEqual(e.source.scientific_name, 'test latin name') self.assertEqual(e.source.common_name, 'test common name') self.assertEqual(e.source.strain, 'test strain') def test_entity_src_syn_handler(self): """Test EntitySrcSynHandler""" entity = """ loop_ _entity.id _entity.type _entity.src_method _entity.pdbx_description _entity.pdbx_number_of_molecules _entity.formula_weight _entity.details 1 polymer man Nup84 2 100.0 . """ src_syn = """ _pdbx_entity_src_syn.entity_id 1 _pdbx_entity_src_syn.pdbx_src_id 42 _pdbx_entity_src_syn.organism_scientific 'test latin name' _pdbx_entity_src_syn.organism_common_name 'test common name' _pdbx_entity_src_syn.ncbi_taxonomy_id 5678 _pdbx_entity_src_syn.strain 'test strain' """ # Order of the categories shouldn't matter cif1 = entity + src_syn cif2 = src_syn + entity for cif in cif1, cif2: for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) e, = s.entities self.assertEqual(e.source.src_method, 'syn') self.assertEqual(e.source._id, '42') self.assertEqual(e.source.ncbi_taxonomy_id, '5678') self.assertEqual(e.source.scientific_name, 'test latin name') self.assertEqual(e.source.common_name, 'test common name') # _pdbx_entity_src_syn.strain is not used in current PDB self.assertIsNone(e.source.strain) def test_struct_ref_handler(self): """Test StructRefHandler""" entity = """ loop_ _entity.id _entity.type _entity.src_method _entity.pdbx_description _entity.pdbx_number_of_molecules _entity.formula_weight _entity.details 1 polymer man test 1 100.0 . """ struct_ref = """ loop_ _struct_ref.id _struct_ref.entity_id _struct_ref.db_name _struct_ref.db_code _struct_ref.pdbx_db_accession _struct_ref.pdbx_align_begin _struct_ref.pdbx_seq_one_letter_code _struct_ref.details 1 1 UNP NUP84_YEAST P52891 3 ;LSP TYQT ; 'test sequence' 2 1 MyDatabase testcode testacc 1 MEL 'other sequence' 3 1 MyDatabase testcode2 testacc2 1 . 'other sequence' 4 1 MyDatabase testcode3 testacc3 1 ? 'other sequence' # # loop_ _struct_ref_seq.align_id _struct_ref_seq.ref_id _struct_ref_seq.seq_align_beg _struct_ref_seq.seq_align_end _struct_ref_seq.db_align_beg _struct_ref_seq.db_align_end 1 1 1 4 3 6 2 1 5 5 8 8 # # loop_ _struct_ref_seq_dif.pdbx_ordinal _struct_ref_seq_dif.align_id _struct_ref_seq_dif.seq_num _struct_ref_seq_dif.db_mon_id _struct_ref_seq_dif.mon_id _struct_ref_seq_dif.details 1 1 2 TRP SER 'Test mutation' 2 1 2 . . 'Test mutation' # """ # Order of the categories shouldn't matter cif1 = entity + struct_ref cif2 = struct_ref + entity for cif in cif1, cif2: for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) e, = s.entities r1, r2, r3, r4 = e.references self.assertIsInstance(r1, ihm.reference.UniProtSequence) self.assertEqual(r1.db_name, 'UNP') self.assertEqual(r1.db_code, 'NUP84_YEAST') self.assertEqual(r1.accession, 'P52891') self.assertEqual(r1.sequence, '--LSPTYQT') self.assertEqual(r1.details, 'test sequence') self.assertIsNone(r3.sequence) self.assertEqual(r4.sequence, ihm.unknown) a1, a2 = r1.alignments self.assertEqual(a1.db_begin, 3) self.assertEqual(a1.db_end, 6) self.assertEqual(a1.entity_begin, 1) self.assertEqual(a1.entity_end, 4) sd, sd2 = a1.seq_dif self.assertEqual(sd.seq_id, 2) self.assertIsInstance(sd.db_monomer, ihm.ChemComp) self.assertIsInstance(sd.monomer, ihm.ChemComp) self.assertEqual(sd.db_monomer.id, 'TRP') self.assertEqual(sd.monomer.id, 'SER') self.assertEqual(sd.details, 'Test mutation') # Both mon_id and db_mon_id are optional, so could be empty self.assertIsNone(sd2.db_monomer) self.assertIsNone(sd2.monomer) self.assertEqual(a2.db_begin, 8) self.assertEqual(a2.db_end, 8) self.assertEqual(a2.entity_begin, 5) self.assertEqual(a2.entity_end, 5) self.assertEqual(len(a2.seq_dif), 0) self.assertIsInstance(r2, ihm.reference.Sequence) self.assertEqual(r2.db_name, 'MyDatabase') self.assertEqual(r2.db_code, 'testcode') self.assertEqual(r2.accession, 'testacc') self.assertEqual(r2.sequence, 'MEL') self.assertEqual(r2.details, 'other sequence') self.assertEqual(len(r2.alignments), 0) def test_asym_unit_handler(self): """Test AsymUnitHandler""" cif = """ loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 Nup84 B 1 Nup85 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) a1, a2 = s.asym_units self.assertEqual(a1._id, 'A') self.assertEqual(a1.id, 'A') self.assertEqual(a1.entity._id, '1') self.assertEqual(a1.details, 'Nup84') self.assertEqual(a2.entity._id, '1') self.assertEqual(a2._id, 'B') self.assertEqual(a2.id, 'B') self.assertEqual(a2.details, 'Nup85') self.assertEqual(id(a1.entity), id(a2.entity)) def test_assembly_handler(self): """Test AssemblyHandler""" cif = """ loop_ _ihm_struct_assembly.id _ihm_struct_assembly.name _ihm_struct_assembly.description 1 'Complete assembly' 'All known components' """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) a1, = s.orphan_assemblies self.assertEqual(a1._id, '1') self.assertEqual(a1.name, 'Complete assembly') self.assertEqual(a1.description, 'All known components') def test_assembly_details_handler(self): """Test AssemblyDetailsHandler""" entity_cif = """ loop_ _entity_poly_seq.entity_id _entity_poly_seq.num _entity_poly_seq.mon_id 1 1 ALA 1 2 ALA 2 1 ALA 2 2 ALA # loop_ _ihm_entity_poly_segment.id _ihm_entity_poly_segment.entity_id _ihm_entity_poly_segment.seq_id_begin _ihm_entity_poly_segment.seq_id_end 1 1 1 726 2 2 1 744 3 1 1 2 4 2 1 50 5 2 1 2 """ assembly_cif = """ loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 Nup84 B 2 Nup85 # loop_ _ihm_struct_assembly.id _ihm_struct_assembly.name _ihm_struct_assembly.description 1 . . 2 . . 3 . . 4 'user-provided name' 'user-provided desc' # loop_ _ihm_struct_assembly_details.id _ihm_struct_assembly_details.assembly_id _ihm_struct_assembly_details.parent_assembly_id _ihm_struct_assembly_details.entity_description _ihm_struct_assembly_details.entity_id _ihm_struct_assembly_details.asym_id _ihm_struct_assembly_details.entity_poly_segment_id 1 1 1 Nup84 1 A 1 2 1 1 Nup85 2 B 2 3 1 1 Nup84 1 A 3 4 2 1 Nup86 2 . 4 5 2 1 Nup85 2 . 5 6 3 1 Nup84 1 A . 7 3 1 Nup85 2 . . 7 4 1 Nup84 1 A . 8 4 1 Nup85 2 B . """ # Order of categories should not matter for cif in (entity_cif + assembly_cif, assembly_cif + entity_cif): for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) a1, a2, a3, a4 = s.orphan_assemblies self.assertEqual(a1._id, '1') self.assertIsNone(a1.parent) self.assertEqual(len(a1), 3) # AsymUnitRange self.assertIsInstance(a1[0], ihm.AsymUnitRange) self.assertEqual(a1[0]._id, 'A') self.assertEqual(a1[0].seq_id_range, (1, 726)) self.assertEqual(a1[1]._id, 'B') self.assertEqual(a1[1].seq_id_range, (1, 744)) self.assertEqual(a2._id, '2') self.assertEqual(a2.parent, a1) # AsymUnit self.assertIsInstance(a1[2], ihm.AsymUnit) # EntityRange self.assertEqual(len(a2), 2) self.assertIsInstance(a2[0], ihm.EntityRange) self.assertEqual(a2[0]._id, '2') self.assertEqual(a2[0].seq_id_range, (1, 50)) # Entity self.assertIsInstance(a2[1], ihm.Entity) # Assembly with no ranges given self.assertEqual(len(a3), 2) self.assertIsInstance(a3[0], ihm.AsymUnit) self.assertIsInstance(a3[1], ihm.Entity) # "Complete" assembly that covers all AsymUnits self.assertEqual(len(a4), 2) self.assertIsInstance(a4[0], ihm.AsymUnit) self.assertIsInstance(a4[1], ihm.AsymUnit) self.assertEqual(a4.name, 'user-provided name') self.assertEqual(a4.description, 'user-provided desc') # Should set name, description of system.complete_assembly self.assertEqual(s.complete_assembly.name, a4.name) self.assertEqual(s.complete_assembly.description, a4.description) def test_external_file_handler(self): """Test ExtRef and ExtFileHandler""" ext_ref_cat = """ loop_ _ihm_external_reference_info.reference_id _ihm_external_reference_info.reference_provider _ihm_external_reference_info.reference_type _ihm_external_reference_info.reference _ihm_external_reference_info.refers_to _ihm_external_reference_info.associated_url _ihm_external_reference_info.details 1 Zenodo DOI 10.5281/zenodo.1218053 Archive https://example.com/foo.zip 'test repo' 2 . 'Supplementary Files' . Other . . 3 Zenodo DOI 10.5281/zenodo.1218058 File https://example.com/foo.dcd . """ ext_file_cat = """ loop_ _ihm_external_files.id _ihm_external_files.reference_id _ihm_external_files.file_path _ihm_external_files.content_type _ihm_external_files.file_format _ihm_external_files.file_size_bytes _ihm_external_files.details 1 1 scripts/test.py 'Modeling workflow or script' TXT 180 'Test script' 2 2 foo/bar.txt 'Input data or restraints' TXT 42.0 'Test text' 3 3 . 'Modeling or post-processing output' . . 'Ensemble structures' 4 3 . . . . . 5 3 foo.txt Other . . 'Other file' """ # Order of the categories shouldn't matter cif1 = ext_ref_cat + ext_file_cat cif2 = ext_file_cat + ext_ref_cat for cif in cif1, cif2: for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) l1, l2, l3, l4, l5 = s.locations self.assertEqual(l1.path, 'scripts/test.py') self.assertEqual(l1.details, 'Test script') self.assertEqual(l1.repo.doi, '10.5281/zenodo.1218053') self.assertIsInstance(l1.file_size, int) self.assertEqual(l1.file_size, 180) self.assertEqual(l1.file_format, 'TXT') self.assertEqual(l1.repo.details, 'test repo') self.assertEqual(l1.__class__, ihm.location.WorkflowFileLocation) self.assertEqual(l2.path, 'foo/bar.txt') self.assertEqual(l2.details, 'Test text') self.assertIsNone(l2.repo) self.assertIsInstance(l2.file_size, float) self.assertAlmostEqual(l2.file_size, 42.0, delta=0.01) self.assertEqual(l2.file_format, 'TXT') self.assertEqual(l2.__class__, ihm.location.InputFileLocation) self.assertEqual(l3.path, '.') self.assertEqual(l3.details, 'Ensemble structures') self.assertIsNone(l3.file_size) self.assertIsNone(l3.file_format) self.assertEqual(l3.repo.doi, '10.5281/zenodo.1218058') self.assertEqual(l3.__class__, ihm.location.OutputFileLocation) self.assertEqual(l4.path, '.') self.assertIsNone(l4.file_size) self.assertIsNone(l4.file_format) self.assertIsNone(l4.details) self.assertEqual(l4.repo.doi, '10.5281/zenodo.1218058') # Type is unspecified self.assertEqual(l4.__class__, ihm.location.FileLocation) self.assertEqual(l5.content_type, 'Other') self.assertEqual(l5.__class__, ihm.location.FileLocation) def test_dataset_list_handler(self): """Test DatasetListHandler""" cif = """ loop_ _ihm_dataset_list.id _ihm_dataset_list.data_type _ihm_dataset_list.database_hosted _ihm_dataset_list.details 1 'Experimental model' YES . 2 'COMPARATIVE MODEL' YES . 3 'EM raw micrographs' YES 'test details' 4 . YES . 5 'Crosslinking-MS data' YES . 6 'CX-MS data' YES . """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) d1, d2, d3, d4, d5, d6 = s.orphan_datasets self.assertEqual(d1.__class__, ihm.dataset.PDBDataset) self.assertEqual(d2.__class__, ihm.dataset.ComparativeModelDataset) self.assertEqual(d3.__class__, ihm.dataset.EMMicrographsDataset) self.assertTrue(d1._allow_duplicates) self.assertTrue(d2._allow_duplicates) self.assertTrue(d3._allow_duplicates) # No specified data type - use base class self.assertEqual(d4.__class__, ihm.dataset.Dataset) self.assertIsNone(d1.details) self.assertEqual(d3.details, 'test details') # Both new and old data_types should map to the same # crosslink class self.assertEqual(d5.__class__, ihm.dataset.CXMSDataset) self.assertEqual(d6.__class__, ihm.dataset.CXMSDataset) def test_dataset_group_handler(self): """Test DatasetGroupHandler""" cif = """ loop_ _ihm_dataset_group.id _ihm_dataset_group.name _ihm_dataset_group.application _ihm_dataset_group.details 1 "foo" "foo app" "foo details" """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) g1, = s.orphan_dataset_groups self.assertEqual(len(g1), 0) # no datasets read yet self.assertEqual(g1.name, 'foo') self.assertEqual(g1.application, 'foo app') self.assertEqual(g1.details, 'foo details') def test_dataset_group_link_handler(self): """Test DatasetGroupLinkHandler""" cif = """ loop_ _ihm_dataset_group_link.group_id _ihm_dataset_group_link.dataset_list_id 1 1 1 2 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) d1, d2 = s.orphan_datasets g1, = s.orphan_dataset_groups self.assertEqual(len(g1), 2) self.assertEqual(g1[0], d1) self.assertEqual(g1[1], d2) # No type specified yet self.assertEqual(d1.__class__, ihm.dataset.Dataset) self.assertEqual(d2.__class__, ihm.dataset.Dataset) def test_dataset_multiple_locations(self): """Check handling of a dataset with multiple locations""" cif = """ loop_ _ihm_dataset_external_reference.id _ihm_dataset_external_reference.dataset_list_id _ihm_dataset_external_reference.file_id 1 1 11 2 1 12 # loop_ _ihm_dataset_related_db_reference.id _ihm_dataset_related_db_reference.dataset_list_id _ihm_dataset_related_db_reference.db_name _ihm_dataset_related_db_reference.accession_code _ihm_dataset_related_db_reference.version _ihm_dataset_related_db_reference.details 1 1 PDB 3JRO . . 2 1 PDB 1ABC . . """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) d, = s.orphan_datasets # "location" should match first _locations self.assertEqual(d.location._id, '11') loc1, loc2, loc3, loc4 = d._locations self.assertEqual(loc1._id, '11') self.assertEqual(loc2._id, '12') self.assertEqual(loc3._id, '1') self.assertEqual(loc4._id, '2') self.assertIsInstance(loc1, ihm.location.FileLocation) self.assertIsInstance(loc2, ihm.location.FileLocation) self.assertIsInstance(loc3, ihm.location.PDBLocation) self.assertIsInstance(loc4, ihm.location.PDBLocation) def test_dataset_extref_handler(self): """Test DatasetExtRefHandler""" cif = """ loop_ _ihm_dataset_external_reference.id _ihm_dataset_external_reference.dataset_list_id _ihm_dataset_external_reference.file_id 1 4 11 2 6 12 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) d1, d2 = s.orphan_datasets self.assertEqual(d1._id, '4') self.assertEqual(d1.location._id, '11') self.assertEqual(d2._id, '6') self.assertEqual(d2.location._id, '12') def test_dataset_dbref_handler(self): """Test DatasetDBRefHandler""" cif = """ loop_ _ihm_dataset_related_db_reference.id _ihm_dataset_related_db_reference.dataset_list_id _ihm_dataset_related_db_reference.db_name _ihm_dataset_related_db_reference.accession_code _ihm_dataset_related_db_reference.version _ihm_dataset_related_db_reference.details 1 1 PDB 3JRO . . 2 3 PDB 3F3F 30-OCT-08 'CRYSTAL STRUCTURE' 3 5 emdb EMD-123 . . 4 6 . . . . 5 7 testDB testcode testver testdetails """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) d1, d2, d3, d4, d5 = s.orphan_datasets self.assertEqual(d1.location.db_name, 'PDB') self.assertEqual(d1.location.__class__, ihm.location.PDBLocation) self.assertEqual(d1.location.access_code, '3JRO') self.assertEqual(d2.location.db_name, 'PDB') self.assertEqual(d2.location.__class__, ihm.location.PDBLocation) self.assertEqual(d2.location.access_code, '3F3F') self.assertEqual(d2.location.version, '30-OCT-08') self.assertEqual(d2.location.details, 'CRYSTAL STRUCTURE') self.assertEqual(d3.location.db_name, 'EMDB') self.assertEqual(d3.location.__class__, ihm.location.EMDBLocation) self.assertEqual(d3.location.access_code, 'EMD-123') self.assertIsNone(d3.location.version) self.assertIsNone(d3.location.details) self.assertEqual(d4.location.db_name, 'Other') self.assertEqual(d4.location.__class__, ihm.location.DatabaseLocation) self.assertIsNone(d4.location.access_code) self.assertEqual(d5.location.__class__, ihm.location.DatabaseLocation) self.assertEqual(d5.location.db_name, "testDB") self.assertEqual(d5.location.access_code, "testcode") self.assertEqual(d5.location.version, "testver") self.assertEqual(d5.location.details, "testdetails") def test_related_datasets_handler(self): """Test RelatedDatasetsHandler""" cif = """ loop_ _ihm_data_transformation.id _ihm_data_transformation.rot_matrix[1][1] _ihm_data_transformation.rot_matrix[2][1] _ihm_data_transformation.rot_matrix[3][1] _ihm_data_transformation.rot_matrix[1][2] _ihm_data_transformation.rot_matrix[2][2] _ihm_data_transformation.rot_matrix[3][2] _ihm_data_transformation.rot_matrix[1][3] _ihm_data_transformation.rot_matrix[2][3] _ihm_data_transformation.rot_matrix[3][3] _ihm_data_transformation.tr_vector[1] _ihm_data_transformation.tr_vector[2] _ihm_data_transformation.tr_vector[3] 42 -0.637588 0.089507 0.765160 0.755616 -0.120841 0.643771 0.150085 0.988628 0.009414 327.161 83.209 -227.800 # loop_ _ihm_related_datasets.dataset_list_id_derived _ihm_related_datasets.dataset_list_id_primary _ihm_related_datasets.transformation_id 4 1 . 7 1 42 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) d1, d2, d3 = s.orphan_datasets self.assertEqual(d1._id, '4') self.assertEqual(d2._id, '1') self.assertEqual(d3._id, '7') self.assertEqual(d1.parents, [d2]) self.assertEqual(d2.parents, []) self.assertEqual(len(d3.parents), 1) self.assertIsInstance(d3.parents[0], ihm.dataset.TransformedDataset) self.assertEqual(d3.parents[0].dataset._id, '1') t = d3.parents[0].transform self.assertEqual(t._id, '42') self.assertAlmostEqual(t.tr_vector[0], 327.161, delta=0.01) self.assertAlmostEqual(t.rot_matrix[1][2], 0.988628, delta=0.01) def test_model_representation_handler(self): """Test ModelRepresentationHandler""" cif = """ loop_ _ihm_model_representation.id _ihm_model_representation.name _ihm_model_representation.details 1 "rep A" "rep A details" """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) r1, = s.orphan_representations self.assertEqual(len(r1), 0) # no segments read yet self.assertEqual(r1.name, 'rep A') self.assertEqual(r1.details, 'rep A details') def test_model_representation_details_handler(self): """Test ModelRepresentationDetailsHandler""" range_cif = """ loop_ _ihm_entity_poly_segment.id _ihm_entity_poly_segment.entity_id _ihm_entity_poly_segment.seq_id_begin _ihm_entity_poly_segment.seq_id_end 1 1 1 6 2 1 7 20 """ repr_cif = """ loop_ _ihm_model_representation_details.id _ihm_model_representation_details.representation_id _ihm_model_representation_details.entity_id _ihm_model_representation_details.entity_description _ihm_model_representation_details.entity_asym_id _ihm_model_representation_details.entity_poly_segment_id _ihm_model_representation_details.model_object_primitive _ihm_model_representation_details.starting_model_id _ihm_model_representation_details.model_mode _ihm_model_representation_details.model_granularity _ihm_model_representation_details.model_object_count _ihm_model_representation_details.description 1 1 1 Nup84 A 1 sphere . flexible by-feature 1 'test segment' 2 1 1 Nup84 A 2 sphere 1 rigid by-residue . . 3 2 1 Nup84 A . atomistic . flexible by-atom . . 4 3 2 Nup85 B . sphere . . multi-residue . . """ # Order of categories should not matter for cif in (range_cif + repr_cif, repr_cif + range_cif): for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) r1, r2, r3 = s.orphan_representations self.assertEqual(len(r1), 2) s1, s2 = r1 self.assertEqual(s1.__class__, ihm.representation.FeatureSegment) self.assertEqual(s1.primitive, 'sphere') self.assertEqual(s1.count, 1) self.assertEqual(s1.rigid, False) self.assertIsNone(s1.starting_model) self.assertEqual(s1.asym_unit.seq_id_range, (1, 6)) self.assertEqual(s1.description, 'test segment') self.assertEqual(s2.__class__, ihm.representation.ResidueSegment) self.assertEqual(s2.primitive, 'sphere') self.assertIsNone(s2.count) self.assertEqual(s2.rigid, True) self.assertEqual(s2.starting_model._id, '1') self.assertEqual(s2.asym_unit.seq_id_range, (7, 20)) self.assertIsNone(s2.description) self.assertEqual(len(r2), 1) s1, = r2 self.assertEqual(s1.__class__, ihm.representation.AtomicSegment) self.assertEqual(len(r3), 1) s1, = r3 self.assertEqual(s1.__class__, ihm.representation.MultiResidueSegment) def test_starting_model_details_handler(self): """Test StartingModelDetailsHandler""" ps_cif = """ loop_ _ihm_entity_poly_segment.id _ihm_entity_poly_segment.entity_id _ihm_entity_poly_segment.seq_id_begin _ihm_entity_poly_segment.seq_id_end 1 1 7 483 """ sm_cif = """ loop_ _ihm_starting_model_details.starting_model_id _ihm_starting_model_details.entity_id _ihm_starting_model_details.entity_description _ihm_starting_model_details.asym_id _ihm_starting_model_details.entity_poly_segment_id _ihm_starting_model_details.starting_model_source _ihm_starting_model_details.starting_model_auth_asym_id _ihm_starting_model_details.starting_model_sequence_offset _ihm_starting_model_details.dataset_list_id _ihm_starting_model_details.description 1 1 Nup84 A 1 'comparative model' Q 8 4 . 2 1 Nup84 A . 'comparative model' X . 6 'test desc' """ # Order of the two categories shouldn't matter for cif in ps_cif + sm_cif, sm_cif + ps_cif: for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) m1, m2 = s.orphan_starting_models self.assertEqual(m1.asym_unit._id, 'A') self.assertEqual(m1.asym_unit.seq_id_range, (7, 483)) self.assertEqual(m1.asym_id, 'Q') self.assertEqual(m1.offset, 8) self.assertEqual(m1.dataset._id, '4') self.assertIsNone(m1.description) self.assertEqual(m2.asym_unit._id, 'A') self.assertEqual(m2.asym_id, 'X') self.assertEqual(m2.offset, 0) self.assertEqual(m2.dataset._id, '6') self.assertEqual(m2.description, 'test desc') def test_starting_computational_models_handler(self): """Test StartingComputationModelsHandler""" cif = """ loop_ _ihm_starting_computational_models.starting_model_id _ihm_starting_computational_models.software_id _ihm_starting_computational_models.script_file_id 1 99 8 2 . . """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) m1, m2 = s.orphan_starting_models self.assertEqual(m1.script_file._id, '8') self.assertEqual(m1.software._id, '99') self.assertIsNone(m2.script_file) self.assertIsNone(m2.software) def test_starting_comparative_models_handler(self): """Test StartingComparativeModelsHandler""" cif = """ loop_ _ihm_starting_comparative_models.id _ihm_starting_comparative_models.starting_model_id _ihm_starting_comparative_models.starting_model_auth_asym_id _ihm_starting_comparative_models.starting_model_seq_id_begin _ihm_starting_comparative_models.starting_model_seq_id_end _ihm_starting_comparative_models.template_auth_asym_id _ihm_starting_comparative_models.template_seq_id_begin _ihm_starting_comparative_models.template_seq_id_end _ihm_starting_comparative_models.template_sequence_identity _ihm_starting_comparative_models.template_sequence_identity_denominator _ihm_starting_comparative_models.template_dataset_list_id _ihm_starting_comparative_models.alignment_file_id 1 1 A 7 436 C 9 438 90.000 1 3 2 2 1 A 33 424 C 33 424 100.000 1 1 . 3 1 A 33 424 C . ? 100.000 1 1 . 4 1 A . . C . . . . 1 . """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) m1, = s.orphan_starting_models t1, t2, t3, t4 = m1.templates self.assertEqual(t1.dataset._id, '3') self.assertEqual(t1.asym_id, 'C') self.assertEqual(t1.seq_id_range, (7, 436)) self.assertEqual(t1.template_seq_id_range, (9, 438)) self.assertAlmostEqual(t1.sequence_identity.value, 90.0, delta=0.1) self.assertEqual(t1.sequence_identity.denominator, 1) self.assertEqual(t1.alignment_file._id, '2') self.assertIsNone(t2.alignment_file) self.assertEqual(t3.template_seq_id_range, (None, ihm.unknown)) self.assertEqual(t4.seq_id_range, (None, None)) self.assertEqual(t4.template_seq_id_range, (None, None)) def test_protocol_handler(self): """Test ProtocolHandler""" cif = """ loop_ _ihm_modeling_protocol.id _ihm_modeling_protocol.protocol_name _ihm_modeling_protocol.num_steps _ihm_modeling_protocol.details 1 Prot1 5 'extra details' """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) p1, = s.orphan_protocols self.assertEqual(p1.name, "Prot1") self.assertEqual(p1.details, "extra details") # no step objects read yet, num_steps ignored self.assertEqual(len(p1.steps), 0) def test_protocol_details_handler(self): """Test ProtocolDetailsHandler""" cif = """ loop_ _ihm_modeling_protocol_details.id _ihm_modeling_protocol_details.protocol_id _ihm_modeling_protocol_details.step_id _ihm_modeling_protocol_details.struct_assembly_id _ihm_modeling_protocol_details.dataset_group_id _ihm_modeling_protocol_details.struct_assembly_description _ihm_modeling_protocol_details.step_name _ihm_modeling_protocol_details.step_method _ihm_modeling_protocol_details.num_models_begin _ihm_modeling_protocol_details.num_models_end _ihm_modeling_protocol_details.multi_scale_flag _ihm_modeling_protocol_details.multi_state_flag _ihm_modeling_protocol_details.ordered_flag _ihm_modeling_protocol_details.ensemble_flag _ihm_modeling_protocol_details.software_id _ihm_modeling_protocol_details.script_file_id _ihm_modeling_protocol_details.description 1 1 1 1 1 . Sampling 'Monte Carlo' 0 500 YES NO NO NO . . . 2 1 2 1 2 . Sampling 'Monte Carlo' 500 5000 YES . NO YES 401 501 'test step' """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) p1, = s.orphan_protocols self.assertEqual(len(p1.steps), 2) self.assertEqual(p1.steps[0]._id, '1') self.assertEqual(p1.steps[0].assembly._id, '1') self.assertEqual(p1.steps[0].dataset_group._id, '1') self.assertEqual(p1.steps[0].name, 'Sampling') self.assertEqual(p1.steps[0].method, 'Monte Carlo') self.assertEqual(p1.steps[0].num_models_begin, 0) self.assertEqual(p1.steps[0].num_models_end, 500) self.assertEqual(p1.steps[0].multi_scale, True) self.assertEqual(p1.steps[0].multi_state, False) self.assertEqual(p1.steps[0].ordered, False) self.assertEqual(p1.steps[0].ensemble, False) self.assertIsNone(p1.steps[0].software) self.assertIsNone(p1.steps[0].script_file) self.assertIsNone(p1.steps[0].description) self.assertEqual(p1.steps[1]._id, '2') self.assertEqual(p1.steps[1].multi_scale, True) self.assertIsNone(p1.steps[1].multi_state) self.assertEqual(p1.steps[1].ordered, False) self.assertEqual(p1.steps[1].ensemble, True) self.assertEqual(p1.steps[1].software._id, '401') self.assertEqual(p1.steps[1].script_file._id, '501') self.assertEqual(p1.steps[1].description, 'test step') def test_post_process_handler(self): """Test PostProcessHandler""" cif = """ loop_ _ihm_modeling_post_process.id _ihm_modeling_post_process.protocol_id _ihm_modeling_post_process.analysis_id _ihm_modeling_post_process.step_id _ihm_modeling_post_process.type _ihm_modeling_post_process.feature _ihm_modeling_post_process.num_models_begin _ihm_modeling_post_process.num_models_end _ihm_modeling_post_process.struct_assembly_id _ihm_modeling_post_process.dataset_group_id _ihm_modeling_post_process.software_id _ihm_modeling_post_process.script_file_id _ihm_modeling_post_process.details 1 1 1 1 'filter' 'energy/score' 15000 6520 . . 401 501 . 2 1 1 2 'cluster' 'invalid' 6520 6520 . . . . . 3 1 2 1 'filter' 'energy/score' 16000 7520 . . . . . 4 1 2 2 'filter' 'composition' 7520 5520 . . . . . 5 1 2 3 'cluster' 'dRMSD' 5520 6520 . . . . . 6 2 1 1 'filter' 'energy/score' 17000 6520 . . 401 501 . 7 3 1 1 'none' . . . . . . . 'empty step' """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) p1, p2, p3 = s.orphan_protocols self.assertEqual(len(p1.analyses), 2) self.assertEqual(len(p2.analyses), 1) self.assertEqual(len(p3.analyses), 1) # Analysis IDs should match analysis_id self.assertEqual([a._id for a in p1.analyses], ['1', '2']) self.assertEqual([a._id for a in p2.analyses], ['1']) self.assertEqual([a._id for a in p3.analyses], ['1']) a1, a2 = p1.analyses self.assertEqual(len(a1.steps), 2) self.assertEqual(a1.steps[0].__class__, ihm.analysis.FilterStep) self.assertEqual(a1.steps[0].feature, 'energy/score') self.assertEqual(a1.steps[0].num_models_begin, 15000) self.assertEqual(a1.steps[0].num_models_end, 6520) self.assertEqual(a1.steps[0].software._id, '401') self.assertEqual(a1.steps[0].script_file._id, '501') self.assertEqual(a1.steps[1].__class__, ihm.analysis.ClusterStep) # invalid feature should be mapped to default self.assertEqual(a1.steps[1].feature, 'other') self.assertEqual(a1.steps[1].num_models_begin, 6520) self.assertIsNone(a1.steps[1].software) self.assertIsNone(a1.steps[1].script_file) self.assertIsNone(a1.steps[1].details) self.assertEqual(len(a2.steps), 3) self.assertEqual([step.num_models_begin for step in a2.steps], [16000, 7520, 5520]) a1, = p2.analyses self.assertEqual(len(a1.steps), 1) self.assertEqual(a1.steps[0].__class__, ihm.analysis.FilterStep) self.assertEqual(a1.steps[0].feature, 'energy/score') self.assertEqual(a1.steps[0].num_models_begin, 17000) a1, = p3.analyses self.assertEqual(len(a1.steps), 1) self.assertEqual(a1.steps[0].__class__, ihm.analysis.EmptyStep) self.assertEqual(a1.steps[0].feature, 'none') self.assertIsNone(a1.steps[0].num_models_begin) self.assertIsNone(a1.steps[0].num_models_end) self.assertEqual(a1.steps[0].details, 'empty step') def test_model_list_handler(self): """Test ModelListHandler and ModelGroupHandler""" cif = """ loop_ _ihm_model_list.model_id _ihm_model_list.model_name _ihm_model_list.assembly_id _ihm_model_list.protocol_id _ihm_model_list.representation_id 1 'Best scoring model' 1 2 3 2 'Best scoring model' 1 1 1 # loop_ _ihm_model_group.id _ihm_model_group.name _ihm_model_group.details 1 "Cluster 1" . 2 "Cluster 2" 'cluster 2 details' # loop_ _ihm_model_group_link.group_id _ihm_model_group_link.model_id 1 1 2 2 loop_ _atom_site.group_PDB _atom_site.id _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_seq_id _atom_site.label_asym_id _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.label_entity_id _atom_site.auth_asym_id _atom_site.B_iso_or_equiv _atom_site.pdbx_PDB_model_num _atom_site.ihm_model_id ATOM 1 N N . MET 1 A 14.326 -2.326 8.122 1.000 1 A 0.000 42 42 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) sg, sg2 = s.state_groups # sg should contain all models in groups but not explicitly # put in a State state, = sg self.assertIsNone(state.name) # auto-created state mg1, mg2 = state self.assertEqual(mg1.name, 'Cluster 1') self.assertEqual(mg1._id, '1') self.assertIsNone(mg1.details) m, = mg1 self.assertEqual(m._id, '1') self.assertEqual(m.name, 'Best scoring model') self.assertEqual(m.assembly._id, '1') self.assertEqual(m.protocol._id, '2') self.assertEqual(m.representation._id, '3') self.assertEqual(mg2.name, 'Cluster 2') self.assertEqual(mg2.details, 'cluster 2 details') self.assertEqual(mg2._id, '2') m, = mg2 self.assertEqual(m._id, '2') # sg2 should contain all models referenced by the file but not # put in groups (in this case, model ID 42 from atom_site) state, = sg2 self.assertIsNone(state.name) # auto-created state mg1, = state self.assertIsNone(mg1.name) # auto-created group m, = mg1 self.assertEqual(m._id, '42') def test_model_representative_handler(self): """Test ModelRepresentativeHandler""" cif = """ loop_ _ihm_model_representative.id _ihm_model_representative.model_group_id _ihm_model_representative.model_id _ihm_model_representative.selection_criteria 1 42 5 medoid 2 42 8 'lowest energy' 3 99 3 'some unknown criterion' """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) state, = s.state_groups[0] mg1, mg2 = state self.assertEqual(mg1._id, '42') self.assertEqual(len(mg1.representatives), 2) self.assertEqual(mg1.representatives[0].model._id, '5') self.assertEqual(mg1.representatives[0].selection_criteria, 'medoid') self.assertEqual(mg1.representatives[1].model._id, '8') self.assertEqual(mg1.representatives[1].selection_criteria, 'lowest energy') self.assertEqual(mg2._id, '99') self.assertEqual(len(mg2.representatives), 1) self.assertEqual(mg2.representatives[0].model._id, '3') self.assertEqual(mg2.representatives[0].selection_criteria, 'other selction criteria') def test_multi_state_handler(self): """Test MultiStateHandler and MultiStateLinkHandler""" cif = """ loop_ _ihm_model_list.model_id _ihm_model_list.model_name _ihm_model_list.assembly_id _ihm_model_list.protocol_id _ihm_model_list.representation_id 1 'Best scoring model' 1 2 3 # loop_ _ihm_model_group.id _ihm_model_group.name _ihm_model_group.details 1 "Cluster 1" . # loop_ _ihm_model_group_link.group_id _ihm_model_group_link.model_id 1 1 # loop_ _ihm_multi_state_modeling.state_id _ihm_multi_state_modeling.state_group_id _ihm_multi_state_modeling.population_fraction _ihm_multi_state_modeling.state_type _ihm_multi_state_modeling.state_name _ihm_multi_state_modeling.experiment_type _ihm_multi_state_modeling.details 1 1 0.4 'complex formation' 'unbound' 'Fraction of bulk' 'unbound molecule 1' 2 1 . 'complex formation' 'unbound' 'Fraction of bulk' 'unbound molecule 2' 3 1 . 'complex formation' 'bound' 'Fraction of bulk' 'bound molecules 1 and 2' # loop_ _ihm_multi_state_model_group_link.state_id _ihm_multi_state_model_group_link.model_group_id 1 1 2 2 3 3 3 4 """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) sg, = s.state_groups s1, s2, s3, = sg self.assertAlmostEqual(s1.population_fraction, 0.4, delta=0.1) self.assertEqual(s1.type, 'complex formation') self.assertEqual(s1.name, 'unbound') self.assertEqual(len(s1), 1) mg1, = s1 self.assertEqual(mg1.name, 'Cluster 1') self.assertEqual(s1.experiment_type, 'Fraction of bulk') self.assertEqual(s1.details, 'unbound molecule 1') self.assertIsNone(s2.population_fraction) self.assertEqual(len(s2), 1) self.assertEqual(len(s3), 2) def test_not_modeled_residue_range_handler(self): """Test NotModeledResidueRangeHandler""" cif = """ loop_ _ihm_residues_not_modeled.id _ihm_residues_not_modeled.model_id _ihm_residues_not_modeled.entity_description _ihm_residues_not_modeled.entity_id _ihm_residues_not_modeled.asym_id _ihm_residues_not_modeled.seq_id_begin _ihm_residues_not_modeled.seq_id_end _ihm_residues_not_modeled.comp_id_begin _ihm_residues_not_modeled.comp_id_end _ihm_residues_not_modeled.reason 1 1 Nup84 9 X 1 2 ALA CYS . 2 1 Nup84 9 X 3 4 GLY THR 'Highly variable models with poor precision' 3 1 Nup84 9 X 5 6 ALA CYS INVALID # """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) m, = s.state_groups[0][0][0] rr1, rr2, rr3 = m.not_modeled_residue_ranges self.assertEqual(rr1.asym_unit._id, 'X') self.assertEqual(rr1.seq_id_begin, 1) self.assertEqual(rr1.seq_id_end, 2) self.assertIsNone(rr1.reason) self.assertEqual(rr2.asym_unit._id, 'X') self.assertEqual(rr2.seq_id_begin, 3) self.assertEqual(rr2.seq_id_end, 4) self.assertEqual(rr2.reason, "Highly variable models with poor precision") self.assertEqual(rr3.reason, "Other") def test_ensemble_handler(self): """Test EnsembleHandler""" cif = """ loop_ _ihm_ensemble_info.ensemble_id _ihm_ensemble_info.ensemble_name _ihm_ensemble_info.post_process_id _ihm_ensemble_info.model_group_id _ihm_ensemble_info.ensemble_clustering_method _ihm_ensemble_info.ensemble_clustering_feature _ihm_ensemble_info.num_ensemble_models _ihm_ensemble_info.num_ensemble_models_deposited _ihm_ensemble_info.ensemble_precision_value _ihm_ensemble_info.ensemble_file_id _ihm_ensemble_info.details _ihm_ensemble_info.model_group_superimposed_flag _ihm_ensemble_info.sub_sample_flag _ihm_ensemble_info.sub_sampling_type 1 'Cluster 1' 2 3 . dRMSD 1257 10 15.400 9 . . . . 2 'Cluster 2' 2 . . dRMSD 1257 10 15.400 9 'cluster details' NO YES independent 3 'Cluster 3' . . invalid_cluster invalid_feature 1 1 15.400 9 . YES . . # # loop_ _ihm_ensemble_sub_sample.id _ihm_ensemble_sub_sample.name _ihm_ensemble_sub_sample.ensemble_id _ihm_ensemble_sub_sample.num_models _ihm_ensemble_sub_sample.num_models_deposited _ihm_ensemble_sub_sample.model_group_id _ihm_ensemble_sub_sample.file_id 1 ss1 2 5 0 . . 2 ss2 2 5 2 42 3 # """ for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) e, e2, e3 = s.ensembles self.assertEqual(e.model_group._id, '3') self.assertEqual(e.num_models, 1257) # model_group is empty self.assertEqual(e.num_models_deposited, 0) self.assertEqual(e.post_process._id, '2') self.assertIsNone(e.clustering_method) self.assertEqual(e.clustering_feature, 'dRMSD') self.assertEqual(e.name, 'Cluster 1') self.assertIsNone(e.details) self.assertAlmostEqual(e.precision, 15.4, delta=0.1) self.assertEqual(e.file._id, '9') self.assertIsNone(e.superimposed) self.assertIsNone(e2.model_group) self.assertEqual(e2.num_models_deposited, 10) self.assertEqual(e2.details, 'cluster details') s1, s2 = e2.subsamples self.assertEqual(s1.name, 'ss1') self.assertEqual(s1.num_models, 5) self.assertIsNone(s1.model_group) self.assertIsNone(s1.file) self.assertIsInstance(s1, ihm.model.IndependentSubsample) self.assertEqual(s2.name, 'ss2') self.assertEqual(s2.num_models, 5) self.assertEqual(s2.model_group._id, '42') self.assertEqual(s2.file._id, '3') self.assertIsInstance(s2, ihm.model.IndependentSubsample) self.assertFalse(e2.superimposed) # invalid cluster/feature should be mapped to default self.assertEqual(e3.clustering_method, 'Other') self.assertEqual(e3.clustering_feature, 'other') self.assertTrue(e3.superimposed) def test_density_handler(self): """Test DensityHandler""" segment_cif = """ loop_ _ihm_entity_poly_segment.id _ihm_entity_poly_segment.entity_id _ihm_entity_poly_segment.seq_id_begin _ihm_entity_poly_segment.seq_id_end 1 1 1 726 """ density_cif = """ loop_ _ihm_localization_density_files.id _ihm_localization_density_files.file_id _ihm_localization_density_files.ensemble_id _ihm_localization_density_files.entity_id _ihm_localization_density_files.asym_id _ihm_localization_density_files.entity_poly_segment_id 1 22 9 1 A 1 2 23 9 2 B . """ # Order should not matter for cif in (segment_cif + density_cif, density_cif + segment_cif): for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) e, = s.ensembles self.assertEqual(e._id, '9') d1, d2 = e.densities self.assertEqual(d1._id, '1') self.assertEqual(d1.file._id, '22') self.assertEqual(d1.asym_unit.__class__, ihm.AsymUnitRange) self.assertEqual(d1.asym_unit.seq_id_range, (1, 726)) self.assertEqual(d2._id, '2') self.assertEqual(d2.asym_unit.__class__, ihm.AsymUnit) def test_em3d_restraint_handler(self): """Test EM3DRestraintHandler""" fh = StringIO(""" loop_ _ihm_3dem_restraint.id _ihm_3dem_restraint.dataset_list_id _ihm_3dem_restraint.fitting_method _ihm_3dem_restraint.fitting_method_citation_id _ihm_3dem_restraint.struct_assembly_id _ihm_3dem_restraint.map_segment_flag _ihm_3dem_restraint.number_of_gaussians _ihm_3dem_restraint.model_id _ihm_3dem_restraint.cross_correlation_coefficient _ihm_3dem_restraint.details 1 26 'Gaussian mixture models' 9 2 YES 400 1 . details 2 26 'Gaussian mixture models' 9 2 YES 400 2 0.9 details 3 26 'Gaussian mixture models' 9 3 NO 400 1 0.8 'other assembly' 4 27 'Gaussian mixture models' 9 2 NO 400 1 0.8 'other dataset' """) s, = ihm.reader.read(fh) r1, r2, r3 = s.restraints self.assertEqual(r1.dataset._id, '26') self.assertEqual(r1.fitting_method, 'Gaussian mixture models') self.assertEqual(r1.fitting_method_citation._id, '9') self.assertEqual(r1.assembly._id, '2') self.assertTrue(r1.segment) self.assertEqual(r1.number_of_gaussians, 400) self.assertEqual(r1.details, 'details') # Sort fits by model ID fits = sorted(r1.fits.items(), key=lambda x: x[0]._id) self.assertEqual(len(fits), 2) self.assertEqual(fits[0][0]._id, '1') self.assertIsNone(fits[0][1].cross_correlation_coefficient) self.assertEqual(fits[1][0]._id, '2') self.assertAlmostEqual(fits[1][1].cross_correlation_coefficient, 0.9, delta=0.1) # r2 acts on same dataset but a different assembly, so should be a # distinct restraint object self.assertEqual(r2.details, 'other assembly') self.assertFalse(r2.segment) # r3 acts on different dataset, so should be a distinct # restraint object self.assertEqual(r3.details, 'other dataset') def test_get_int(self): """Test _get_int method""" h = ihm.reader.Handler(None) self.assertEqual(h.get_int('45'), 45) self.assertIsNone(h.get_int(None)) self.assertEqual(h.get_int(ihm.unknown), ihm.unknown) self.assertRaises(ValueError, h.get_int, ".") self.assertRaises(ValueError, h.get_int, "?") def test_get_int_or_string(self): """Test _get_int_or_string method""" h = ihm.reader.Handler(None) self.assertEqual(h.get_int_or_string('45A'), '45A') self.assertEqual(h.get_int_or_string('45'), 45) self.assertIsNone(h.get_int_or_string(None)) self.assertEqual(h.get_int_or_string(ihm.unknown), ihm.unknown) self.assertEqual(h.get_int_or_string('.'), '.') self.assertEqual(h.get_int_or_string('?'), '?') self.assertEqual(h.get_int_or_string(45), 45) def test_get_float(self): """Test _get_float method""" h = ihm.reader.Handler(None) self.assertAlmostEqual(h.get_float('45.3'), 45.3, delta=0.1) self.assertIsNone(h.get_float(None)) self.assertEqual(h.get_float(ihm.unknown), ihm.unknown) self.assertRaises(ValueError, h.get_float, ".") self.assertRaises(ValueError, h.get_float, "?") def test_get_bool(self): """Test _get_bool method""" h = ihm.reader.Handler(None) self.assertEqual(h.get_bool('YES'), True) self.assertEqual(h.get_bool('NO'), False) self.assertIsNone(h.get_bool('something else')) self.assertIsNone(h.get_bool(None)) self.assertEqual(h.get_bool(ihm.unknown), ihm.unknown) def test_get_lower(self): """Test _get_lower method""" h = ihm.reader.Handler(None) self.assertEqual(h.get_lower('Test String'), 'test string') self.assertIsNone(h.get_lower(None)) self.assertEqual(h.get_lower(ihm.unknown), ihm.unknown) self.assertEqual(h.get_lower('.'), '.') self.assertEqual(h.get_lower('?'), '?') def test_get_vector3(self): """Test _get_vector3 function""" d = {'tr_vector1': 4.0, 'tr_vector2': 6.0, 'tr_vector3': 9.0, 'omitted1': None, 'unknown1': ihm.unknown} r = ihm.reader._get_vector3(d, 'tr_vector') # Coerce to int so we can compare exactly self.assertEqual([int(x) for x in r], [4, 6, 9]) self.assertIsNone(ihm.reader._get_vector3(d, 'omitted')) self.assertEqual(ihm.reader._get_vector3(d, 'unknown'), ihm.unknown) def test_get_matrix33(self): """Test _get_matrix33 function""" d = {'m11': 4.0, 'm12': 6.0, 'm13': 9.0, 'm21': 1.0, 'm22': 2.0, 'm23': 3.0, 'm31': 8.0, 'm32': 1.0, 'm33': 7.0, 'omitted11': None, 'unknown11': ihm.unknown} r = ihm.reader._get_matrix33(d, 'm') # Coerce to int so we can compare exactly self.assertEqual([[int(x) for x in row] for row in r], [[4, 6, 9], [1, 2, 3], [8, 1, 7]]) self.assertIsNone(ihm.reader._get_matrix33(d, 'omitted')) self.assertEqual(ihm.reader._get_matrix33(d, 'unknown'), ihm.unknown) def test_unknown_omitted(self): """Test that Handlers handle unknown/omitted values correctly""" fh = StringIO(""" loop_ _audit_author.name _audit_author.pdbx_ordinal . 1 ? 2 '.' 3 '?' 4 # """) s, = ihm.reader.read(fh) self.assertEqual(s.authors, [None, ihm.unknown, '.', '?']) def test_em2d_restraint_handler(self): """Test EM2DRestraintHandler""" fh = StringIO(""" loop_ _ihm_2dem_class_average_restraint.id _ihm_2dem_class_average_restraint.dataset_list_id _ihm_2dem_class_average_restraint.number_raw_micrographs _ihm_2dem_class_average_restraint.pixel_size_width _ihm_2dem_class_average_restraint.pixel_size_height _ihm_2dem_class_average_restraint.image_resolution _ihm_2dem_class_average_restraint.image_segment_flag _ihm_2dem_class_average_restraint.number_of_projections _ihm_2dem_class_average_restraint.struct_assembly_id _ihm_2dem_class_average_restraint.details 1 65 800 2.030 4.030 35.000 NO 10000 42 . # loop_ _ihm_2dem_class_average_fitting.id _ihm_2dem_class_average_fitting.restraint_id _ihm_2dem_class_average_fitting.model_id _ihm_2dem_class_average_fitting.cross_correlation_coefficient _ihm_2dem_class_average_fitting.rot_matrix[1][1] _ihm_2dem_class_average_fitting.rot_matrix[2][1] _ihm_2dem_class_average_fitting.rot_matrix[3][1] _ihm_2dem_class_average_fitting.rot_matrix[1][2] _ihm_2dem_class_average_fitting.rot_matrix[2][2] _ihm_2dem_class_average_fitting.rot_matrix[3][2] _ihm_2dem_class_average_fitting.rot_matrix[1][3] _ihm_2dem_class_average_fitting.rot_matrix[2][3] _ihm_2dem_class_average_fitting.rot_matrix[3][3] _ihm_2dem_class_average_fitting.tr_vector[1] _ihm_2dem_class_average_fitting.tr_vector[2] _ihm_2dem_class_average_fitting.tr_vector[3] 1 1 9 0.853 -0.637588 0.089507 0.765160 0.755616 -0.120841 0.643771 0.150085 0.988628 0.009414 327.161 83.209 -227.800 """) s, = ihm.reader.read(fh) r, = s.restraints self.assertEqual(r._id, '1') self.assertEqual(r.dataset._id, '65') self.assertEqual(r.number_raw_micrographs, 800) self.assertAlmostEqual(r.pixel_size_width, 2.030, delta=0.01) self.assertAlmostEqual(r.pixel_size_height, 4.030, delta=0.01) self.assertAlmostEqual(r.image_resolution, 35.0, delta=0.1) self.assertEqual(r.segment, False) self.assertEqual(r.number_of_projections, 10000) self.assertEqual(r.assembly._id, '42') fit, = list(r.fits.items()) self.assertEqual(fit[0]._id, '9') self.assertAlmostEqual(fit[1].cross_correlation_coefficient, 0.853, delta=0.01) self.assertAlmostEqual(fit[1].tr_vector[0], 327.161, delta=0.01) self.assertAlmostEqual(fit[1].rot_matrix[1][2], 0.988628, delta=0.01) self.assertEqual([int(x) for x in fit[1].tr_vector], [327, 83, -227]) def test_sas_restraint_handler(self): """Test SASRestraintHandler""" fh = StringIO(""" loop_ _ihm_sas_restraint.id _ihm_sas_restraint.dataset_list_id _ihm_sas_restraint.model_id _ihm_sas_restraint.struct_assembly_id _ihm_sas_restraint.profile_segment_flag _ihm_sas_restraint.fitting_atom_type _ihm_sas_restraint.fitting_method _ihm_sas_restraint.fitting_state _ihm_sas_restraint.radius_of_gyration _ihm_sas_restraint.chi_value _ihm_sas_restraint.details 1 27 8 3 NO 'Heavy atoms' FoXS Single 27.9 1.36 . 2 27 8 4 NO 'Heavy atoms' FoXS Single 27.9 1.4 'different assembly' 2 28 8 3 NO 'Heavy atoms' FoXS Single 27.9 1.6 'different dataset' """) s, = ihm.reader.read(fh) r, r2, r3 = s.restraints self.assertEqual(r.dataset._id, '27') self.assertEqual(r.assembly._id, '3') self.assertEqual(r.segment, False) self.assertEqual(r.fitting_method, 'FoXS') self.assertEqual(r.fitting_atom_type, 'Heavy atoms') self.assertEqual(r.multi_state, False) self.assertAlmostEqual(r.radius_of_gyration, 27.9, delta=0.1) fit, = list(r.fits.items()) self.assertEqual(fit[0]._id, '8') self.assertAlmostEqual(fit[1].chi_value, 1.36, delta=0.01) # r2 acts on same dataset but a different assembly, so should be # a distinct restraint object self.assertEqual(r2.details, 'different assembly') # r3 acts on different dataset, so should be a distinct # restraint object self.assertEqual(r3.details, 'different dataset') def test_sphere_obj_site_handler(self): """Test SphereObjSiteHandler""" class MyModel(ihm.model.Model): def add_sphere(self, sphere): super().add_sphere(sphere) self.sphere_count = len(self._spheres) fh = StringIO(""" loop_ _ihm_model_list.model_id _ihm_model_list.model_name _ihm_model_list.assembly_id _ihm_model_list.protocol_id _ihm_model_list.representation_id 1 . 1 1 1 # loop_ _ihm_model_group.id _ihm_model_group.name _ihm_model_group.details 1 "Cluster 1" . # loop_ _ihm_model_group_link.group_id _ihm_model_group_link.model_id 1 1 # loop_ _ihm_sphere_obj_site.id _ihm_sphere_obj_site.entity_id _ihm_sphere_obj_site.seq_id_begin _ihm_sphere_obj_site.seq_id_end _ihm_sphere_obj_site.asym_id _ihm_sphere_obj_site.Cartn_x _ihm_sphere_obj_site.Cartn_y _ihm_sphere_obj_site.Cartn_z _ihm_sphere_obj_site.object_radius _ihm_sphere_obj_site.rmsf _ihm_sphere_obj_site.model_id 1 1 1 6 A 389.993 145.089 134.782 4.931 . 1 2 1 7 7 B 406.895 142.176 135.653 3.318 1.34 1 """) s, = ihm.reader.read(fh, model_class=MyModel) m = s.state_groups[0][0][0][0] self.assertEqual(m.sphere_count, 2) s1, s2 = m._spheres self.assertEqual(s1.asym_unit._id, 'A') self.assertEqual(s1.seq_id_range, (1, 6)) self.assertAlmostEqual(s1.x, 389.993, delta=0.01) self.assertAlmostEqual(s1.y, 145.089, delta=0.01) self.assertAlmostEqual(s1.z, 134.782, delta=0.01) self.assertAlmostEqual(s1.radius, 4.931, delta=0.01) self.assertIsNone(s1.rmsf) self.assertAlmostEqual(s2.rmsf, 1.34, delta=0.1) def test_atom_site_handler(self): """Test AtomSiteHandler""" fh = StringIO(""" loop_ _ihm_model_list.model_id _ihm_model_list.model_name _ihm_model_list.assembly_id _ihm_model_list.protocol_id _ihm_model_list.representation_id 1 . 1 1 1 # loop_ _ihm_model_group.id _ihm_model_group.name _ihm_model_group.details 1 "Cluster 1" . # loop_ _ihm_model_group_link.group_id _ihm_model_group_link.model_id 1 1 # loop_ _atom_site.group_PDB _atom_site.id _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_seq_id _atom_site.label_asym_id _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.label_entity_id _atom_site.auth_asym_id _atom_site.B_iso_or_equiv _atom_site.pdbx_PDB_model_num _atom_site.ihm_model_id ATOM 1 N N . SER 1 A 54.401 -49.984 -35.287 . 1 A . 1 1 HETATM 2 C CA A SER . B 54.452 -48.492 -35.210 0.200 1 A 42.0 1 1 """) s, = ihm.reader.read(fh) m = s.state_groups[0][0][0][0] a1, a2 = m._atoms self.assertEqual(a1.asym_unit._id, 'A') self.assertEqual(a1.seq_id, 1) self.assertEqual(a1.atom_id, 'N') self.assertEqual(a1.type_symbol, 'N') self.assertAlmostEqual(a1.x, 54.401, delta=0.01) self.assertAlmostEqual(a1.y, -49.984, delta=0.01) self.assertAlmostEqual(a1.z, -35.287, delta=0.01) self.assertEqual(a1.het, False) self.assertIsNone(a1.biso) self.assertIsNone(a1.occupancy) self.assertIsNone(a1.alt_id) self.assertEqual(a2.asym_unit._id, 'B') self.assertEqual(a2.seq_id, 1) self.assertEqual(a2.atom_id, 'CA') self.assertEqual(a2.type_symbol, 'C') self.assertEqual(a2.het, True) self.assertAlmostEqual(a2.biso, 42.0, delta=1.0) self.assertAlmostEqual(a2.occupancy, 0.2, delta=0.1) self.assertEqual(a2.alt_id, 'A') def test_atom_site_handler_auth_seq_id(self): """Test AtomSiteHandler handling of auth_seq_id and ins_code""" fh = StringIO(ASYM_ENTITY + """ loop_ _ihm_model_list.model_id _ihm_model_list.model_name _ihm_model_list.assembly_id _ihm_model_list.protocol_id _ihm_model_list.representation_id 1 . 1 1 1 # loop_ _ihm_model_group.id _ihm_model_group.name _ihm_model_group.details 1 "Cluster 1" . # loop_ _ihm_model_group_link.group_id _ihm_model_group_link.model_id 1 1 # loop_ _atom_site.group_PDB _atom_site.id _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_seq_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.label_asym_id _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.label_entity_id _atom_site.auth_asym_id _atom_site.B_iso_or_equiv _atom_site.pdbx_PDB_model_num _atom_site.ihm_model_id ATOM 1 N N . SER 1 2 A A 54.401 -49.984 -35.287 1 A . 1 1 HETATM 2 C CA . SER 2 20A . A 54.452 -48.492 -35.210 1 A 42.0 1 1 ATOM 3 N N . SER 3 3 . A 54.401 -49.984 -35.287 1 A . 1 1 """) s, = ihm.reader.read(fh) asym, = s.asym_units self.assertEqual(asym.auth_seq_id_map, {1: (2, 'A'), 2: ('20A', None)}) def test_atom_site_handler_no_asym_id(self): """Test AtomSiteHandler with missing asym_id""" fh = StringIO(""" loop_ _entity_poly_seq.entity_id _entity_poly_seq.num _entity_poly_seq.mon_id _entity_poly_seq.hetero 5 1 MET . 5 2 CYS . 5 3 MET . 5 4 SER . # loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details D 5 foo # loop_ _atom_site.group_PDB _atom_site.id _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_asym_id _atom_site.label_entity_id _atom_site.label_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.pdbx_formal_charge _atom_site.auth_asym_id _atom_site.pdbx_PDB_model_num ATOM 1 C CA . MET . . 1 ? 1.000 1.000 1.000 1.00 2.95 0 A 1 ATOM 2 C CA . ASP . . 2 ? 2.000 2.000 2.000 1.00 0.95 0 A 1 ATOM 3 C CA . CYS . . 3 ? 3.000 3.000 3.000 1.00 0.95 0 A 1 ATOM 4 C CA . MET . . 1 ? 1.000 1.000 1.000 1.00 2.95 0 B 1 ATOM 5 C CA . ASP . . 2 ? 2.000 2.000 2.000 1.00 0.95 0 B 1 ATOM 6 C CA . CYS . . 3 ? 3.000 3.000 3.000 1.00 0.95 0 B 1 ATOM 7 C CA . CYS . . 2 ? 1.000 1.000 1.000 1.00 0.95 0 C 1 ATOM 8 C CA . CYS . . 5 ? 3.000 3.000 3.000 1.00 0.95 0 C 1 ATOM 9 C CA . MET . . 1 ? 3.000 3.000 3.000 1.00 0.95 0 D 1 """) s, = ihm.reader.read(fh) # No asym_id, so use auth_asym_id a1, a2, a3, a4 = s.asym_units self.assertEqual(a1._id, 'D') self.assertEqual(a2._id, 'A') self.assertEqual(a3._id, 'B') self.assertEqual(a4._id, 'C') # A and B should have same sequence, thus same entity self.assertIs(a2.entity, a3.entity) # Sequence should have been populated from comp_ids self.assertEqual("".join(c.code for c in a2.entity.sequence), "MDC") # C has different entity and sequence, with gaps self.assertEqual("".join(c.code_canonical for c in a4.entity.sequence), "XCXXC") # D is defined in struct_asym and entity_poly so should use that # sequence self.assertEqual("".join(c.code_canonical for c in a1.entity.sequence), "MCMS") def test_atom_site_handler_water(self): """Test AtomSiteHandler reading water molecules""" fh = StringIO(""" loop_ _entity.id _entity.type 1 water loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 Water B 1 Water # loop_ _pdbx_nonpoly_scheme.asym_id _pdbx_nonpoly_scheme.entity_id _pdbx_nonpoly_scheme.mon_id _pdbx_nonpoly_scheme.ndb_seq_num _pdbx_nonpoly_scheme.pdb_seq_num _pdbx_nonpoly_scheme.auth_seq_num _pdbx_nonpoly_scheme.auth_mon_id _pdbx_nonpoly_scheme.pdb_strand_id _pdbx_nonpoly_scheme.pdb_ins_code A 1 HOH 1 50 500 HOH A . # loop_ _atom_site.group_PDB _atom_site.id _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_seq_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.label_asym_id _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.label_entity_id _atom_site.auth_asym_id _atom_site.B_iso_or_equiv _atom_site.pdbx_PDB_model_num _atom_site.ihm_model_id HETATM 1 O O . HOH . 40 ? A 10.000 10.000 10.000 . 1 A . 1 1 HETATM 2 O O . HOH . 50 ? A 10.000 10.000 10.000 . 1 A . 1 1 HETATM 3 O O . HOH . 60 . A 20.000 20.000 20.000 . 1 A . 1 1 HETATM 4 O O . HOH . 70 . B 20.000 20.000 20.000 . 1 B . 1 1 """) s, = ihm.reader.read(fh) m = s.state_groups[0][0][0][0] a1, a2, a3, b1 = m._atoms # Should include info from both atom_site and scheme table self.assertEqual(a1.asym_unit.auth_seq_id_map, {1: (40, None), 2: (50, None), 3: (60, None)}) self.assertEqual(a1.asym_unit.orig_auth_seq_id_map, {2: 500}) self.assertEqual(b1.asym_unit.auth_seq_id_map, {1: (70, None)}) self.assertIsNone(b1.asym_unit.orig_auth_seq_id_map) # seq_id should be assigned based on atom_site self.assertEqual(a1.seq_id, 1) self.assertEqual(a2.seq_id, 2) self.assertEqual(a3.seq_id, 3) self.assertEqual(b1.seq_id, 1) def test_atom_site_handler_branched(self): """Test AtomSiteHandler reading branched molecules""" cif = """ loop_ _entity.id _entity.type 1 branched loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 . # loop_ _pdbx_branch_scheme.asym_id _pdbx_branch_scheme.entity_id _pdbx_branch_scheme.mon_id _pdbx_branch_scheme.num _pdbx_branch_scheme.pdb_seq_num _pdbx_branch_scheme.auth_seq_num _pdbx_branch_scheme.auth_mon_id _pdbx_branch_scheme.pdb_asym_id A 1 BGC 1 51 501 BGC A A 1 BGC 2 52 502 BGC A A 1 BGC 3 53 503 BGC A # loop_ _atom_site.group_PDB _atom_site.id _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_seq_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.label_asym_id _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.label_entity_id _atom_site.auth_asym_id _atom_site.B_iso_or_equiv _atom_site.pdbx_PDB_model_num _atom_site.ihm_model_id HETATM 1 C C . BGC . 52 ? A 10.000 10.000 10.000 . 1 A . 1 1 HETATM 2 C C . BGC . 53 ? A 10.000 10.000 10.000 . 1 A . 1 1 """ # Should fail since residue #60 is not in the scheme table badline = "HETATM 3 C C . BGC . 60 . A 20.00 20.00 20.00 . 1 A . 1 1" fh = StringIO(cif + badline) self.assertRaises(ValueError, ihm.reader.read, fh) fh = StringIO(cif) s, = ihm.reader.read(fh) m = s.state_groups[0][0][0][0] a1, a2 = m._atoms # seq_id should match num, i.e. start at 2 since residue 51 is missing self.assertEqual(a1.seq_id, 2) self.assertEqual(a2.seq_id, 3) self.assertEqual(a1.asym_unit.auth_seq_id_map, {1: (51, None), 2: (52, None), 3: (53, None)}) self.assertEqual(a1.asym_unit.orig_auth_seq_id_map, {1: 501, 2: 502, 3: 503}) self.assertEqual(a1.asym_unit.num_map, {1: 2, 2: 3}) def test_derived_distance_restraint_handler(self): """Test DerivedDistanceRestraintHandler""" feats = """ loop_ _ihm_poly_atom_feature.ordinal_id _ihm_poly_atom_feature.feature_id _ihm_poly_atom_feature.entity_id _ihm_poly_atom_feature.asym_id _ihm_poly_atom_feature.seq_id _ihm_poly_atom_feature.comp_id _ihm_poly_atom_feature.atom_id 1 1 1 A 1 ALA CA 2 1 1 . 1 ALA CB # loop_ _ihm_poly_residue_feature.ordinal_id _ihm_poly_residue_feature.feature_id _ihm_poly_residue_feature.entity_id _ihm_poly_residue_feature.asym_id _ihm_poly_residue_feature.seq_id_begin _ihm_poly_residue_feature.comp_id_begin _ihm_poly_residue_feature.seq_id_end _ihm_poly_residue_feature.comp_id_end 1 2 1 B 2 CYS 3 GLY 2 2 1 . 2 CYS 3 GLY # loop_ _ihm_non_poly_feature.ordinal_id _ihm_non_poly_feature.feature_id _ihm_non_poly_feature.entity_id _ihm_non_poly_feature.asym_id _ihm_non_poly_feature.comp_id _ihm_non_poly_feature.atom_id 1 3 3 C HEM FE 2 3 3 . HEM FE 3 4 3 C HEM . 4 4 3 . HEM . # loop_ _ihm_pseudo_site.id _ihm_pseudo_site.Cartn_x _ihm_pseudo_site.Cartn_y _ihm_pseudo_site.Cartn_z _ihm_pseudo_site.radius _ihm_pseudo_site.description 55 10.000 20.000 30.000 4.0 'centroid' # loop_ _ihm_pseudo_site_feature.feature_id _ihm_pseudo_site_feature.pseudo_site_id 5 55 """ rsr = """ loop_ _ihm_feature_list.feature_id _ihm_feature_list.feature_type _ihm_feature_list.entity_type _ihm_feature_list.details 1 atom polymer 'test feature' 2 'residue range' polymer . 3 atom non-polymer . 4 atom non-polymer . 5 'pseudo site' other . # loop_ _ihm_derived_distance_restraint.id _ihm_derived_distance_restraint.group_id _ihm_derived_distance_restraint.feature_id_1 _ihm_derived_distance_restraint.feature_id_2 _ihm_derived_distance_restraint.restraint_type _ihm_derived_distance_restraint.distance_lower_limit _ihm_derived_distance_restraint.distance_upper_limit _ihm_derived_distance_restraint.probability _ihm_derived_distance_restraint.mic_value _ihm_derived_distance_restraint.group_conditionality _ihm_derived_distance_restraint.dataset_list_id 1 . 1 2 'lower bound' 25.000 . 0.800 0.400 . 97 2 . 1 4 'upper bound' . 45.000 0.800 . ALL 98 3 1 1 2 'lower and upper bound' 22.000 45.000 0.800 . ANY 99 4 1 5 3 'harmonic' 35.000 35.000 0.800 . ALL . 5 . 5 3 . ? ? ? . ALL . """ # Test both ways to make sure features still work if they are # referenced by ID before their type is known for text in (feats + rsr, rsr + feats): fh = StringIO(text) s, = ihm.reader.read(fh) self.assertEqual(len(s.orphan_features), 5) r1, r2, r3, r4, r5 = s.restraints rg1, = s.restraint_groups self.assertEqual([r for r in rg1], [r3, r4]) self.assertEqual(r1.dataset._id, '97') self.assertIsInstance(r1.feature1, ihm.restraint.AtomFeature) self.assertEqual(len(r1.feature1.atoms), 2) self.assertEqual(r1.feature1.atoms[0].id, 'CA') self.assertEqual(r1.feature1.atoms[0].residue.seq_id, 1) self.assertIsNone(r1.feature1.atoms[0].residue.entity) self.assertEqual(r1.feature1.atoms[1].id, 'CB') self.assertEqual(r1.feature1.atoms[1].residue.seq_id, 1) self.assertIsNone(r1.feature1.atoms[1].residue.asym) self.assertEqual(r1.feature1.details, 'test feature') self.assertIsInstance(r1.feature2, ihm.restraint.ResidueFeature) self.assertEqual(len(r1.feature2.ranges), 2) self.assertEqual(r1.feature2.ranges[0].seq_id_range, (2, 3)) self.assertIsInstance(r1.feature2.ranges[0], ihm.AsymUnitRange) self.assertEqual(r1.feature2.ranges[1].seq_id_range, (2, 3)) self.assertIsInstance(r1.feature2.ranges[1], ihm.EntityRange) self.assertIsInstance(r1.distance, ihm.restraint.LowerBoundDistanceRestraint) self.assertAlmostEqual(r1.distance.distance, 25.000, delta=0.1) self.assertAlmostEqual(r1.probability, 0.8000, delta=0.1) self.assertAlmostEqual(r1.mic_value, 0.4000, delta=0.1) self.assertIsNone(r1.restrain_all) self.assertEqual(r2.restrain_all, True) self.assertEqual(r3.restrain_all, False) self.assertIsInstance(r2.feature2, ihm.restraint.NonPolyFeature) self.assertEqual(len(r2.feature2.objs), 2) self.assertIsInstance(r2.feature2.objs[0], ihm.AsymUnit) self.assertEqual(r2.feature2.objs[0]._id, 'C') self.assertIsInstance(r2.feature2.objs[1], ihm.Entity) self.assertIsInstance(r2.distance, ihm.restraint.UpperBoundDistanceRestraint) self.assertIsNone(r2.mic_value) self.assertIsInstance( r3.distance, ihm.restraint.LowerUpperBoundDistanceRestraint) self.assertIsInstance(r4.distance, ihm.restraint.HarmonicDistanceRestraint) self.assertIsInstance(r4.feature2, ihm.restraint.AtomFeature) self.assertIsNone(r4.feature2.atoms[0].residue.entity) self.assertIsNone(r4.feature2.atoms[1].residue.asym) self.assertIsInstance(r4.feature1, ihm.restraint.PseudoSiteFeature) self.assertAlmostEqual(r4.feature1.site.x, 10.0, delta=0.1) self.assertAlmostEqual(r4.feature1.site.y, 20.0, delta=0.1) self.assertAlmostEqual(r4.feature1.site.z, 30.0, delta=0.1) self.assertAlmostEqual(r4.feature1.site.radius, 4.0, delta=0.1) self.assertEqual(r4.feature1.site.description, 'centroid') def test_hdx_restraint_handler(self): """Test HDXRestraintHandler""" rsr = """ loop_ _ihm_hdx_restraint.id _ihm_hdx_restraint.feature_id _ihm_hdx_restraint.protection_factor _ihm_hdx_restraint.dataset_list_id _ihm_hdx_restraint.details 1 1 1.000 2 foo 2 2 . . . """ fh = StringIO(rsr) s, = ihm.reader.read(fh) self.assertEqual(len(s.orphan_features), 2) r1, r2 = s.restraints self.assertEqual(r1.feature._id, '1') self.assertAlmostEqual(r1.protection_factor, 1.0, delta=1e-4) self.assertEqual(r1.dataset._id, '2') self.assertEqual(r1.details, 'foo') self.assertEqual(r2.feature._id, '2') self.assertIsNone(r2.protection_factor) self.assertIsNone(r2.dataset) self.assertIsNone(r2.details) def test_sphere_handler(self): """Test SphereHandler""" obj_list = CENTERS_TRANSFORMS + """ loop_ _ihm_geometric_object_list.object_id _ihm_geometric_object_list.object_type _ihm_geometric_object_list.object_name _ihm_geometric_object_list.object_description 1 sphere 'my sphere' 'a test sphere' """ spheres = """ loop_ _ihm_geometric_object_sphere.object_id _ihm_geometric_object_sphere.center_id _ihm_geometric_object_sphere.transformation_id _ihm_geometric_object_sphere.radius_r 1 1 1 2.200 2 . . 3.200 """ # Order of categories shouldn't matter for text in (obj_list + spheres, spheres + obj_list): fh = StringIO(text) s, = ihm.reader.read(fh) s1, s2 = s.orphan_geometric_objects self.assertIsInstance(s1, ihm.geometry.Sphere) self.assertIsInstance(s2, ihm.geometry.Sphere) self.assertEqual(s1.name, 'my sphere') self.assertEqual(s1.description, 'a test sphere') self.assertAlmostEqual(s1.center.x, 1.000, delta=0.1) self.assertAlmostEqual(s1.center.y, 2.000, delta=0.1) self.assertAlmostEqual(s1.center.z, 3.000, delta=0.1) self.assertAlmostEqual(s1.transformation.tr_vector[1], 2.000, delta=0.1) self.assertIsNone(s2.name) self.assertIsNone(s2.center) self.assertIsNone(s2.transformation) def test_torus_handler(self): """Test TorusHandler""" obj_list = CENTERS_TRANSFORMS + """ loop_ _ihm_geometric_object_list.object_id _ihm_geometric_object_list.object_type _ihm_geometric_object_list.object_name _ihm_geometric_object_list.object_description 1 torus . . """ tori = """ loop_ _ihm_geometric_object_torus.object_id _ihm_geometric_object_torus.center_id _ihm_geometric_object_torus.transformation_id _ihm_geometric_object_torus.major_radius_R _ihm_geometric_object_torus.minor_radius_r 1 1 1 5.600 1.200 2 . . 3.600 2.200 """ # Order of categories shouldn't matter for text in (obj_list + tori, tori + obj_list): fh = StringIO(text) s, = ihm.reader.read(fh) t1, t2 = s.orphan_geometric_objects self.assertIsInstance(t1, ihm.geometry.Torus) self.assertIsInstance(t2, ihm.geometry.Torus) self.assertAlmostEqual(t1.center.x, 1.000, delta=0.1) self.assertAlmostEqual(t1.transformation.tr_vector[1], 2.000, delta=0.1) self.assertAlmostEqual(t1.major_radius, 5.600, delta=0.1) self.assertAlmostEqual(t1.minor_radius, 1.200, delta=0.1) self.assertIsNone(t2.center) self.assertIsNone(t2.transformation) def test_half_torus_handler(self): """Test HalfTorusHandler""" obj_list = CENTERS_TRANSFORMS + """ loop_ _ihm_geometric_object_list.object_id _ihm_geometric_object_list.object_type _ihm_geometric_object_list.object_name _ihm_geometric_object_list.object_description 1 half-torus . . 2 half-torus . . 3 half-torus . . """ tori = """ loop_ _ihm_geometric_object_torus.object_id _ihm_geometric_object_torus.center_id _ihm_geometric_object_torus.transformation_id _ihm_geometric_object_torus.major_radius_R _ihm_geometric_object_torus.minor_radius_r 1 1 1 5.600 1.200 2 . . 3.600 2.200 3 . . 3.600 2.200 """ half_tori = """ loop_ _ihm_geometric_object_half_torus.object_id _ihm_geometric_object_half_torus.thickness_th _ihm_geometric_object_half_torus.section 1 0.100 'inner half' 2 0.200 'outer half' 3 0.200 . """ # Order of categories shouldn't matter for text in (obj_list + tori + half_tori, tori + half_tori + obj_list, obj_list + half_tori + tori, half_tori + tori + obj_list): fh = StringIO(text) s, = ihm.reader.read(fh) t1, t2, t3 = s.orphan_geometric_objects self.assertIsInstance(t1, ihm.geometry.HalfTorus) self.assertIsInstance(t2, ihm.geometry.HalfTorus) self.assertIsInstance(t3, ihm.geometry.HalfTorus) self.assertAlmostEqual(t1.center.x, 1.000, delta=0.1) self.assertAlmostEqual(t1.transformation.tr_vector[1], 2.000, delta=0.1) self.assertAlmostEqual(t1.major_radius, 5.600, delta=0.1) self.assertAlmostEqual(t1.minor_radius, 1.200, delta=0.1) self.assertAlmostEqual(t1.thickness, 0.100, delta=0.1) self.assertEqual(t1.inner, True) self.assertIsNone(t2.center) self.assertIsNone(t2.transformation) self.assertEqual(t2.inner, False) self.assertIsNone(t3.inner) def test_axis_handler(self): """Test AxisHandler""" obj_list = CENTERS_TRANSFORMS + """ loop_ _ihm_geometric_object_list.object_id _ihm_geometric_object_list.object_type _ihm_geometric_object_list.object_name _ihm_geometric_object_list.object_description 1 axis . . 2 axis . . """ axes = """ loop_ _ihm_geometric_object_axis.object_id _ihm_geometric_object_axis.axis_type _ihm_geometric_object_axis.transformation_id 1 x-axis 1 2 y-axis . """ # Order of categories shouldn't matter for text in (obj_list + axes, axes + obj_list): fh = StringIO(text) s, = ihm.reader.read(fh) a1, a2 = s.orphan_geometric_objects self.assertIsInstance(a1, ihm.geometry.XAxis) self.assertIsInstance(a2, ihm.geometry.YAxis) self.assertAlmostEqual(a1.transformation.tr_vector[1], 2.000, delta=0.1) self.assertIsNone(a2.transformation) def test_plane_handler(self): """Test PlaneHandler""" obj_list = CENTERS_TRANSFORMS + """ loop_ _ihm_geometric_object_list.object_id _ihm_geometric_object_list.object_type _ihm_geometric_object_list.object_name _ihm_geometric_object_list.object_description 1 plane . . 2 plane . . """ planes = """ loop_ _ihm_geometric_object_plane.object_id _ihm_geometric_object_plane.plane_type _ihm_geometric_object_plane.transformation_id 1 xy-plane 1 2 yz-plane . """ # Order of categories shouldn't matter for text in (obj_list + planes, planes + obj_list): fh = StringIO(text) s, = ihm.reader.read(fh) p1, p2 = s.orphan_geometric_objects self.assertIsInstance(p1, ihm.geometry.XYPlane) self.assertIsInstance(p2, ihm.geometry.YZPlane) self.assertAlmostEqual(p1.transformation.tr_vector[1], 2.000, delta=0.1) self.assertIsNone(p2.transformation) def test_geometric_restraint_handler(self): """Test GeometricRestraintHandler""" fh = StringIO(""" loop_ _ihm_geometric_object_distance_restraint.id _ihm_geometric_object_distance_restraint.object_id _ihm_geometric_object_distance_restraint.feature_id _ihm_geometric_object_distance_restraint.object_characteristic _ihm_geometric_object_distance_restraint.restraint_type _ihm_geometric_object_distance_restraint.harmonic_force_constant _ihm_geometric_object_distance_restraint.distance_lower_limit _ihm_geometric_object_distance_restraint.distance_upper_limit _ihm_geometric_object_distance_restraint.group_conditionality _ihm_geometric_object_distance_restraint.dataset_list_id 1 23 44 other 'upper bound' 2.000 . 25.000 ANY 97 2 23 44 center 'lower bound' 2.000 15.000 . ALL . 3 23 44 'inner surface' 'lower and upper bound' 2.000 10.000 25.000 . 97 4 23 44 'outer surface' 'harmonic' 2.000 . 25.000 . 97 # """) s, = ihm.reader.read(fh) r1, r2, r3, r4 = s.restraints self.assertIsInstance(r1, ihm.restraint.GeometricRestraint) self.assertEqual(r1.dataset._id, '97') self.assertEqual(r1.geometric_object._id, '23') self.assertEqual(r1.feature._id, '44') self.assertIsInstance(r1.distance, ihm.restraint.UpperBoundDistanceRestraint) self.assertAlmostEqual(r1.distance.distance, 25.000, delta=0.1) self.assertAlmostEqual(r1.harmonic_force_constant, 2.000, delta=0.1) self.assertEqual(r1.restrain_all, False) self.assertEqual(r2.restrain_all, True) self.assertIsNone(r3.restrain_all) self.assertIsInstance(r2, ihm.restraint.CenterGeometricRestraint) self.assertIsInstance(r3, ihm.restraint.InnerSurfaceGeometricRestraint) self.assertIsInstance(r4, ihm.restraint.OuterSurfaceGeometricRestraint) self.assertIsInstance(r2.distance, ihm.restraint.LowerBoundDistanceRestraint) self.assertIsInstance(r3.distance, ihm.restraint.LowerUpperBoundDistanceRestraint) self.assertIsInstance(r4.distance, ihm.restraint.HarmonicDistanceRestraint) def test_poly_seq_scheme_handler_offset(self): """Test PolySeqSchemeHandler with constant offset""" fh = StringIO(ASYM_ENTITY + """ loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.pdb_seq_num _pdbx_poly_seq_scheme.pdb_strand_id A 1 1 6 A A 1 2 7 A A 1 3 8 A A 1 4 9 A """) s, = ihm.reader.read(fh) asym, = s.asym_units self.assertEqual(asym.auth_seq_id_map, 5) self.assertIsNone(asym._strand_id) self.assertEqual([asym.residue(i).auth_seq_id for i in range(1, 5)], [6, 7, 8, 9]) self.assertIsNone(asym.orig_auth_seq_id_map) def test_poly_seq_scheme_handler_offset_ins_code(self): """Test PolySeqSchemeHandler with constant offset but inscodes""" fh = StringIO(ASYM_ENTITY + """ loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.pdb_seq_num _pdbx_poly_seq_scheme.pdb_strand_id _pdbx_poly_seq_scheme.pdb_ins_code A 1 1 6 A . A 1 2 7 A . A 1 3 8 A . A 1 4 9 A A """) s, = ihm.reader.read(fh) asym, = s.asym_units self.assertEqual(asym.auth_seq_id_map, {1: (6, None), 2: (7, None), 3: (8, None), 4: (9, 'A')}) self.assertIsNone(asym._strand_id) self.assertEqual([asym.residue(i).auth_seq_id for i in range(1, 5)], [6, 7, 8, 9]) self.assertIsNone(asym.residue(1).ins_code) self.assertEqual(asym.residue(4).ins_code, 'A') self.assertIsNone(asym.orig_auth_seq_id_map) def test_poly_seq_scheme_handler_empty(self): """Test PolySeqSchemeHandler with no poly_seq_scheme""" fh = StringIO(ASYM_ENTITY) s, = ihm.reader.read(fh) asym, = s.asym_units self.assertEqual(asym.auth_seq_id_map, 0) self.assertEqual([asym.residue(i).auth_seq_id for i in range(1, 5)], [1, 2, 3, 4]) self.assertIsNone(asym.orig_auth_seq_id_map) def test_poly_seq_scheme_handler_nop(self): """Test PolySeqSchemeHandler with a do-nothing poly_seq_scheme""" fh = StringIO(ASYM_ENTITY + """ loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.pdb_seq_num A 1 1 1 A 1 2 2 A 1 3 3 """) s, = ihm.reader.read(fh) asym, = s.asym_units self.assertEqual(asym.auth_seq_id_map, 0) self.assertEqual([asym.residue(i).auth_seq_id for i in range(1, 5)], [1, 2, 3, 4]) self.assertIsNone(asym.orig_auth_seq_id_map) def test_poly_seq_scheme_handler_partial(self): """Test PolySeqSchemeHandler with partial information""" fh = StringIO(ASYM_ENTITY + """ loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.pdb_seq_num _pdbx_poly_seq_scheme.auth_seq_num A 1 1 6 . A 1 2 7 9 A 1 3 8 . """) s, = ihm.reader.read(fh) asym, = s.asym_units # No mapping for residue 4 (and no insertion codes at all) self.assertEqual(asym.auth_seq_id_map, {1: (6, None), 2: (7, None), 3: (8, None)}) self.assertEqual([asym.residue(i).auth_seq_id for i in range(1, 5)], [6, 7, 8, 4]) self.assertIsNone(asym.residue(1).ins_code) self.assertEqual(asym.orig_auth_seq_id_map, {2: 9}) def test_poly_seq_scheme_handler_incon_off(self): """Test PolySeqSchemeHandler with inconsistent offset""" fh = StringIO(ASYM_ENTITY + """ loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.pdb_seq_num _pdbx_poly_seq_scheme.pdb_strand_id A 1 1 6 X A 1 2 7 X A 1 3 8 X A 1 4 10 X """) s, = ihm.reader.read(fh) asym, = s.asym_units self.assertEqual(asym._strand_id, 'X') self.assertEqual(asym.auth_seq_id_map, {1: (6, None), 2: (7, None), 3: (8, None), 4: (10, None)}) self.assertEqual([asym.residue(i).auth_seq_id for i in range(1, 5)], [6, 7, 8, 10]) self.assertIsNone(asym.residue(1).ins_code) self.assertIsNone(asym.orig_auth_seq_id_map) def test_poly_seq_scheme_handler_unknown_auth_seq(self): """Test PolySeqSchemeHandler with explicit unknown auth_seq_num""" fh = StringIO(ASYM_ENTITY + """ loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.pdb_seq_num _pdbx_poly_seq_scheme.auth_seq_num _pdbx_poly_seq_scheme.pdb_strand_id A 1 1 1 1 A A 1 2 2 2 A A 1 3 3 ? A A 1 4 4 4 A """) s, = ihm.reader.read(fh) asym, = s.asym_units self.assertEqual(asym.auth_seq_id_map, 0) self.assertEqual(asym.orig_auth_seq_id_map, {3: ihm.unknown}) def test_poly_seq_scheme_handler_str_seq_id(self): """Test PolySeqSchemeHandler with a non-integer pdb_seq_num""" fh = StringIO(ASYM_ENTITY + """ loop_ _pdbx_poly_seq_scheme.asym_id _pdbx_poly_seq_scheme.entity_id _pdbx_poly_seq_scheme.seq_id _pdbx_poly_seq_scheme.pdb_seq_num _pdbx_poly_seq_scheme.auth_seq_num _pdbx_poly_seq_scheme.pdb_strand_id _pdbx_poly_seq_scheme.pdb_ins_code A 1 1 6 6 ? . A 1 2 7 12 ? . A 1 3 8 24 ? . A 1 4 9A 48A ? . """) s, = ihm.reader.read(fh) asym, = s.asym_units self.assertIsNone(asym._strand_id) self.assertEqual(asym.auth_seq_id_map, {1: (6, None), 2: (7, None), 3: (8, None), 4: ('9A', None)}) self.assertEqual([asym.residue(i).auth_seq_id for i in range(1, 5)], [6, 7, 8, '9A']) self.assertIsNone(asym.residue(1).ins_code) self.assertIsNone(asym.residue(3).ins_code) self.assertEqual(asym.orig_auth_seq_id_map, {2: 12, 3: 24, 4: '48A'}) def test_nonpoly_scheme_handler(self): """Test NonPolySchemeHandler""" fh = StringIO(""" loop_ _chem_comp.id _chem_comp.type _chem_comp.name CA non-polymer 'CALCIUM ION' # loop_ _entity.id _entity.type _entity.pdbx_description 1 non-polymer 'CALCIUM ION entity' 2 non-polymer 'no-chem-comp entity' 3 water 'no-chem-comp water' # loop_ _pdbx_entity_nonpoly.entity_id _pdbx_entity_nonpoly.name _pdbx_entity_nonpoly.comp_id 1 'CALCIUM ION' CA # loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 foo B 2 bar C 3 baz # loop_ _pdbx_nonpoly_scheme.asym_id _pdbx_nonpoly_scheme.entity_id _pdbx_nonpoly_scheme.mon_id _pdbx_nonpoly_scheme.ndb_seq_num _pdbx_nonpoly_scheme.pdb_seq_num _pdbx_nonpoly_scheme.auth_seq_num _pdbx_nonpoly_scheme.pdb_strand_id _pdbx_nonpoly_scheme.pdb_ins_code A 1 BAR 1 101 202 . . B 2 BAR 1 1 1 Q X C 3 HOH . 1 1 . . C 3 HOH 2 2 2 . . C 3 HOH 3 5 10 . . C 3 HOH 4 1 20 . . C 3 HOH 5 7 7 . . """) s, = ihm.reader.read(fh) e1, e2, e3 = s.entities # e1 should have sequence filled in by pdbx_entity_nonpoly self.assertEqual([cc.name for cc in e1.sequence], ['CALCIUM ION']) # e2,e3 should have sequence filled in by pdbx_nonpoly_scheme self.assertEqual([(cc.id, cc.name) for cc in e2.sequence], [('BAR', 'no-chem-comp entity')]) self.assertEqual([(cc.id, cc.name) for cc in e3.sequence], [('HOH', 'WATER')]) asym, a2, a3 = s.asym_units # non-polymers have no seq_id_range self.assertEqual(asym.seq_id_range, (None, None)) self.assertEqual(asym.auth_seq_id_map, {1: (101, None)}) self.assertEqual(asym.residue(1).auth_seq_id, 101) self.assertIsNone(asym.residue(1).ins_code) self.assertEqual(asym.strand_id, asym._id) self.assertIsNone(asym._strand_id) self.assertEqual(asym.orig_auth_seq_id_map, {1: 202}) self.assertEqual(a2.auth_seq_id_map, {1: (1, 'X')}) self.assertEqual(a2.residue(1).auth_seq_id, 1) self.assertEqual(a2.residue(1).ins_code, 'X') self.assertEqual(a2.strand_id, 'Q') self.assertEqual(a2._strand_id, 'Q') self.assertIsNone(a2.orig_auth_seq_id_map) self.assertEqual(a3.auth_seq_id_map, {1: (1, None), 2: (2, None), 3: (5, None), 4: (1, None), 5: (7, None)}) self.assertEqual(a3.orig_auth_seq_id_map, {3: 10, 4: 20}) def test_cross_link_list_handler(self): """Test CrossLinkListHandler""" fh = StringIO(""" loop_ _ihm_cross_link_list.id _ihm_cross_link_list.group_id _ihm_cross_link_list.entity_description_1 _ihm_cross_link_list.entity_id_1 _ihm_cross_link_list.seq_id_1 _ihm_cross_link_list.comp_id_1 _ihm_cross_link_list.entity_description_2 _ihm_cross_link_list.entity_id_2 _ihm_cross_link_list.seq_id_2 _ihm_cross_link_list.comp_id_2 _ihm_cross_link_list.linker_chem_comp_descriptor_id _ihm_cross_link_list.dataset_list_id _ihm_cross_link_list.details 1 1 foo 1 2 THR foo 1 3 CYS 44 97 . 2 2 foo 1 2 THR bar 2 3 PHE 44 97 'test xl' 3 2 foo 1 2 THR bar 2 2 GLU 44 97 . 4 3 foo 1 1 ALA bar 2 1 ASP 44 97 . 5 4 foo 1 1 ALA bar 2 1 ASP 88 97 . 6 5 foo 1 1 ALA bar 2 1 ASP 44 98 . """) s, = ihm.reader.read(fh) # Check grouping self.assertEqual([[len(g) for g in r.experimental_cross_links] for r in s.restraints], [[1, 2, 1], [1], [1]]) r1, r2, r3 = s.restraints self.assertEqual(r1.dataset._id, '97') self.assertEqual(r1.linker._id, '44') xl = r1.experimental_cross_links[1][0] self.assertEqual(xl.residue1.entity._id, '1') self.assertEqual(xl.residue2.entity._id, '2') self.assertEqual(xl.residue1.seq_id, 2) self.assertEqual(xl.residue2.seq_id, 3) self.assertEqual(xl.details, 'test xl') def test_cross_link_list_handler_linker_type(self): """Test CrossLinkListHandler with old-style linker_type""" fh = StringIO(""" loop_ _ihm_cross_link_list.id _ihm_cross_link_list.group_id _ihm_cross_link_list.entity_description_1 _ihm_cross_link_list.entity_id_1 _ihm_cross_link_list.seq_id_1 _ihm_cross_link_list.comp_id_1 _ihm_cross_link_list.entity_description_2 _ihm_cross_link_list.entity_id_2 _ihm_cross_link_list.seq_id_2 _ihm_cross_link_list.comp_id_2 _ihm_cross_link_list.linker_type _ihm_cross_link_list.dataset_list_id 1 1 foo 1 2 THR foo 1 3 CYS DSS 97 2 2 foo 1 2 THR bar 2 3 PHE DSS 97 3 2 foo 1 2 THR bar 2 2 GLU DSS 97 4 3 foo 1 1 ALA bar 2 1 ASP DSS 97 5 4 foo 1 1 ALA bar 2 1 ASP TST 97 6 5 foo 1 1 ALA bar 2 1 ASP DSS 98 """) s, = ihm.reader.read(fh) # Check grouping r1, r2, r3 = s.restraints self.assertEqual(r1.linker.auth_name, 'DSS') self.assertEqual(r1.linker.chemical_name, 'disuccinimidyl suberate') self.assertEqual(r2.linker.auth_name, 'TST') self.assertIsNone(r2.linker.chemical_name) self.assertEqual(r3.linker.auth_name, 'DSS') self.assertEqual(r3.linker.chemical_name, 'disuccinimidyl suberate') def test_cross_link_list_handler_empty_descriptor(self): """Test CrossLinkListHandler with empty descriptor name""" fh = StringIO(""" loop_ _ihm_chemical_component_descriptor.id _ihm_chemical_component_descriptor.auth_name 1 DSS 2 . 3 . # loop_ _ihm_cross_link_list.id _ihm_cross_link_list.group_id _ihm_cross_link_list.entity_description_1 _ihm_cross_link_list.entity_id_1 _ihm_cross_link_list.seq_id_1 _ihm_cross_link_list.comp_id_1 _ihm_cross_link_list.entity_description_2 _ihm_cross_link_list.entity_id_2 _ihm_cross_link_list.seq_id_2 _ihm_cross_link_list.comp_id_2 _ihm_cross_link_list.linker_chem_comp_descriptor_id _ihm_cross_link_list.linker_type _ihm_cross_link_list.dataset_list_id 1 1 foo 1 2 THR foo 1 3 CYS 1 NOTDSS 97 2 2 foo 1 2 THR bar 2 3 PHE 2 EDC 97 3 2 foo 1 2 THR bar 2 3 PHE 3 . 97 """) s, = ihm.reader.read(fh) d1, d2, d3 = s.orphan_chem_descriptors # Descriptor name (DSS) should take precedence over # linker_type (NOTDSS) self.assertEqual(d1.auth_name, 'DSS') # If descriptor name is empty, fill it in using linker_type self.assertEqual(d2.auth_name, 'EDC') # If both names are empty, name is None self.assertIsNone(d3.auth_name) def test_cross_link_restraint_handler(self): """Test CrossLinkRestraintHandler""" xl_list = """ loop_ _ihm_cross_link_list.id _ihm_cross_link_list.group_id _ihm_cross_link_list.entity_description_1 _ihm_cross_link_list.entity_id_1 _ihm_cross_link_list.seq_id_1 _ihm_cross_link_list.comp_id_1 _ihm_cross_link_list.entity_description_2 _ihm_cross_link_list.entity_id_2 _ihm_cross_link_list.seq_id_2 _ihm_cross_link_list.comp_id_2 _ihm_cross_link_list.linker_type _ihm_cross_link_list.dataset_list_id 1 1 foo 1 2 THR foo 1 3 CYS DSS 97 2 2 foo 1 2 THR bar 2 3 PHE DSS 97 # loop_ _ihm_cross_link_pseudo_site.id _ihm_cross_link_pseudo_site.restraint_id _ihm_cross_link_pseudo_site.cross_link_partner _ihm_cross_link_pseudo_site.pseudo_site_id _ihm_cross_link_pseudo_site.model_id 1 2 1 44 . 2 2 2 88 99 3 2 2 880 990 """ xl_rsr = """ loop_ _ihm_cross_link_restraint.id _ihm_cross_link_restraint.group_id _ihm_cross_link_restraint.entity_id_1 _ihm_cross_link_restraint.asym_id_1 _ihm_cross_link_restraint.seq_id_1 _ihm_cross_link_restraint.comp_id_1 _ihm_cross_link_restraint.entity_id_2 _ihm_cross_link_restraint.asym_id_2 _ihm_cross_link_restraint.seq_id_2 _ihm_cross_link_restraint.comp_id_2 _ihm_cross_link_restraint.atom_id_1 _ihm_cross_link_restraint.atom_id_2 _ihm_cross_link_restraint.restraint_type _ihm_cross_link_restraint.conditional_crosslink_flag _ihm_cross_link_restraint.model_granularity _ihm_cross_link_restraint.distance_threshold _ihm_cross_link_restraint.psi _ihm_cross_link_restraint.sigma_1 _ihm_cross_link_restraint.sigma_2 1 1 1 A 2 THR 1 B 3 CYS . . 'upper bound' ALL by-residue 25.000 0.500 1.000 2.000 2 2 1 A 2 THR 2 B 2 GLU C N 'lower bound' ANY by-atom 34.000 . . . """ # Order of categories shouldn't matter for text in (xl_list + xl_rsr, xl_rsr + xl_list): fh = StringIO(text) s, = ihm.reader.read(fh) r, = s.restraints xl1, xl2 = sorted(r.cross_links, key=operator.attrgetter('_id')) self.assertIsInstance(xl1, ihm.restraint.ResidueCrossLink) self.assertEqual(xl1.experimental_cross_link.residue1.seq_id, 2) self.assertEqual(xl1.experimental_cross_link.residue2.seq_id, 3) self.assertEqual(xl1.fits, {}) self.assertEqual(xl1.asym1._id, 'A') self.assertEqual(xl1.asym2._id, 'B') self.assertIsInstance(xl1.distance, ihm.restraint.UpperBoundDistanceRestraint) self.assertAlmostEqual(xl1.distance.distance, 25.000, delta=0.1) self.assertAlmostEqual(xl1.psi, 0.500, delta=0.1) self.assertAlmostEqual(xl1.sigma1, 1.000, delta=0.1) self.assertAlmostEqual(xl1.sigma2, 2.000, delta=0.1) self.assertIsNone(xl1.pseudo1) self.assertIsNone(xl1.pseudo2) self.assertIsInstance(xl2, ihm.restraint.AtomCrossLink) self.assertEqual(xl2.fits, {}) self.assertIsInstance(xl2.distance, ihm.restraint.LowerBoundDistanceRestraint) self.assertTrue(xl2.atom1, 'C') self.assertTrue(xl2.atom2, 'N') self.assertAlmostEqual(xl2.distance.distance, 34.000, delta=0.1) self.assertIsNone(xl2.psi) self.assertIsNone(xl2.sigma1) self.assertIsNone(xl2.sigma2) ps1, = xl2.pseudo1 self.assertEqual(ps1.site._id, '44') self.assertIsNone(ps1.model) ps21, ps22 = xl2.pseudo2 self.assertEqual(ps21.site._id, '88') self.assertEqual(ps21.model._id, '99') self.assertEqual(ps22.site._id, '880') self.assertEqual(ps22.model._id, '990') def test_cross_link_result_handler(self): """Test CrossLinkResultHandler""" xl_list = """ loop_ _ihm_cross_link_list.id _ihm_cross_link_list.group_id _ihm_cross_link_list.entity_description_1 _ihm_cross_link_list.entity_id_1 _ihm_cross_link_list.seq_id_1 _ihm_cross_link_list.comp_id_1 _ihm_cross_link_list.entity_description_2 _ihm_cross_link_list.entity_id_2 _ihm_cross_link_list.seq_id_2 _ihm_cross_link_list.comp_id_2 _ihm_cross_link_list.linker_type _ihm_cross_link_list.dataset_list_id 1 1 foo 1 2 THR foo 1 3 CYS DSS 97 """ xl_rsr = """ loop_ _ihm_cross_link_restraint.id _ihm_cross_link_restraint.group_id _ihm_cross_link_restraint.entity_id_1 _ihm_cross_link_restraint.asym_id_1 _ihm_cross_link_restraint.seq_id_1 _ihm_cross_link_restraint.comp_id_1 _ihm_cross_link_restraint.entity_id_2 _ihm_cross_link_restraint.asym_id_2 _ihm_cross_link_restraint.seq_id_2 _ihm_cross_link_restraint.comp_id_2 _ihm_cross_link_restraint.atom_id_1 _ihm_cross_link_restraint.atom_id_2 _ihm_cross_link_restraint.restraint_type _ihm_cross_link_restraint.conditional_crosslink_flag _ihm_cross_link_restraint.model_granularity _ihm_cross_link_restraint.distance_threshold _ihm_cross_link_restraint.psi _ihm_cross_link_restraint.sigma_1 _ihm_cross_link_restraint.sigma_2 1 1 1 A 2 THR 1 B 3 CYS . . 'upper bound' ALL by-residue 25.000 0.500 1.000 2.000 """ xl_fit = """ loop_ _ihm_cross_link_result_parameters.id _ihm_cross_link_result_parameters.restraint_id _ihm_cross_link_result_parameters.model_id _ihm_cross_link_result_parameters.psi _ihm_cross_link_result_parameters.sigma_1 _ihm_cross_link_result_parameters.sigma_2 1 1 201 0.100 4.200 2.100 2 1 301 . . . # loop_ _ihm_cross_link_result.id _ihm_cross_link_result.restraint_id _ihm_cross_link_result.ensemble_id _ihm_cross_link_result.model_group_id _ihm_cross_link_result.num_models _ihm_cross_link_result.distance_threshold _ihm_cross_link_result.median_distance _ihm_cross_link_result.details 1 1 401 . 10 99.0 10.0 'details 1' 2 1 . 501 20 99.0 20.0 . """ # Order of categories shouldn't matter for text in (xl_list + xl_rsr + xl_fit, xl_fit + xl_rsr + xl_list): fh = StringIO(text) s, = ihm.reader.read(fh) r, = s.restraints xl, = r.cross_links # Sort fits by ID fits = sorted(xl.fits.items(), key=lambda x: x[0]._id) self.assertEqual(len(fits), 4) self.assertIsInstance(fits[0][0], ihm.model.Model) self.assertEqual(fits[0][0]._id, '201') self.assertAlmostEqual(fits[0][1].psi, 0.100, delta=0.1) self.assertAlmostEqual(fits[0][1].sigma1, 4.200, delta=0.1) self.assertAlmostEqual(fits[0][1].sigma2, 2.100, delta=0.1) self.assertEqual(fits[1][0]._id, '301') self.assertIsInstance(fits[1][0], ihm.model.Model) self.assertIsNone(fits[1][1].psi) self.assertIsNone(fits[1][1].sigma1) self.assertIsNone(fits[1][1].sigma2) self.assertEqual(fits[2][0]._id, '401') self.assertIsInstance(fits[2][0], ihm.model.Ensemble) self.assertEqual(fits[2][1].num_models, 10) self.assertAlmostEqual(fits[2][1].median_distance, 10.0, delta=0.01) self.assertEqual(fits[2][1].details, 'details 1') self.assertEqual(fits[3][0]._id, '501') self.assertIsInstance(fits[3][0], ihm.model.ModelGroup) self.assertEqual(fits[3][1].num_models, 20) self.assertAlmostEqual(fits[3][1].median_distance, 20.0, delta=0.01) self.assertIsNone(fits[3][1].details) def test_ordered_model_handler(self): """Test OrderedModelHandler""" # Test both old and new category names fh = StringIO(""" loop_ _ihm_ordered_model.process_id _ihm_ordered_model.process_description _ihm_ordered_model.ordered_by _ihm_ordered_model.step_id _ihm_ordered_model.step_description _ihm_ordered_model.edge_id _ihm_ordered_model.edge_description _ihm_ordered_model.model_group_id_begin _ihm_ordered_model.model_group_id_end 1 pdesc 'steps in a reaction pathway' 1 'step 1 desc' 1 . 1 2 1 pdesc 'steps in a reaction pathway' 2 'step 2 desc' 2 'edge 2 desc' 1 3 # loop_ _ihm_ordered_ensemble.process_id _ihm_ordered_ensemble.process_description _ihm_ordered_ensemble.ordered_by _ihm_ordered_ensemble.step_id _ihm_ordered_ensemble.step_description _ihm_ordered_ensemble.edge_id _ihm_ordered_ensemble.edge_description _ihm_ordered_ensemble.model_group_id_begin _ihm_ordered_ensemble.model_group_id_end 1 pdesc 'steps in a reaction pathway' 2 'step 2 desc' 3 . 1 4 """) s, = ihm.reader.read(fh) op, = s.ordered_processes self.assertEqual(op.description, 'pdesc') self.assertEqual(len(op.steps), 2) s1, s2 = op.steps self.assertEqual(s1.description, 'step 1 desc') self.assertEqual(len(s1), 1) e1 = s1[0] self.assertIsNone(e1.description) self.assertEqual(e1.group_begin._id, '1') self.assertEqual(e1.group_end._id, '2') self.assertEqual(s2.description, 'step 2 desc') self.assertEqual(len(s2), 2) e1 = s2[0] self.assertEqual(e1.description, 'edge 2 desc') self.assertEqual(e1.group_begin._id, '1') self.assertEqual(e1.group_end._id, '3') e2 = s2[1] self.assertIsNone(e2.description) self.assertEqual(e2.group_begin._id, '1') self.assertEqual(e2.group_end._id, '4') def _check_pdbx(self, s): self.assertEqual( s.title, 'Enterococcus faecalis FIC protein in complex ' 'with AMP and calcium ion.') self.assertEqual(len(s.databases), 2) self.assertEqual(s.databases[0].code, '6EP0') self.assertEqual(s.databases[1].code, 'D_1200006994') self.assertEqual(s.authors, ['Veyron, S.', 'Cherfils, J.']) self.assertEqual(s.citations[0].doi, '10.1038/s41467-019-09023-1') self.assertEqual(s.grants[0].funding_organization, 'DIM Malinf') self.assertEqual(len(s.revisions), 4) self.assertEqual(len(s.entities), 5) self.assertEqual(len(s.asym_units), 14) self.assertEqual( [x.name for x in s.software], ['BUSTER', 'autoPROC', 'XDS', 'PHENIX']) m = s.state_groups[0][0][0][0] self.assertEqual(len(m._atoms), 3528) self.assertAlmostEqual(m._atoms[0].x, -23.51, delta=0.01) self.assertAlmostEqual(m._atoms[0].y, 15.583, delta=0.01) self.assertAlmostEqual(m._atoms[0].z, 17.773, delta=0.01) def test_read_full_pdbx_mmcif(self): """Test reading a full PDBx file in mmCIF format""" fname = utils.get_input_file_name(TOPDIR, '6ep0.cif.gz') with gzip.open(fname, 'rt') as f: s, = ihm.reader.read(f) self._check_pdbx(s) @unittest.skipIf(_format is None, "No C tokenizer") def test_read_full_pdbx_bcif(self): """Test reading a full PDBx file in BinaryCIF format""" fname = utils.get_input_file_name(TOPDIR, '6ep0.bcif.gz') with gzip.open(fname, 'rb') as f: s, = ihm.reader.read(f, format='BCIF') self._check_pdbx(s) def test_old_file_read_default(self): """Test default handling of old files""" cif = """ loop_ _audit_conform.dict_name _audit_conform.dict_version mmcif_pdbx.dic 5.311 ihm-extension.dic 0.14 """ s, = ihm.reader.read(StringIO(cif)) def test_old_file_read_fail(self): """Test failure reading old files""" cif = """ loop_ _audit_conform.dict_name _audit_conform.dict_version mmcif_pdbx.dic 5.311 ihm-extension.dic 0.14 """ self.assertRaises(ihm.reader.OldFileError, ihm.reader.read, StringIO(cif), reject_old_file=True) def test_new_file_read_ok(self): """Test success reading not-old files""" # File read is OK if version is new enough, or version cannot be parsed # because it is non-int or has too many elements for ver in ('1.0', '0.0.4', '0.0a'): cif = """ loop_ _audit_conform.dict_name _audit_conform.dict_version mmcif_pdbx.dic 5.311 ihm-extension.dic %s """ % ver s, = ihm.reader.read(StringIO(cif), reject_old_file=True) def test_warn_unknown_category(self): """Test warnings for unknown categories""" cif = """ _cat1.foo baz _cat1.bar baz # loop_ _cat2.foo _cat2.bar x y """ with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # Test with no warnings s, = ihm.reader.read(StringIO(cif)) self.assertEqual(len(w), 0) s, = ihm.reader.read(StringIO(cif), warn_unknown_category=True) # Should only warn once per category self.assertEqual(len(w), 2) self.assertEqual(w[0].category, ihm.reader.UnknownCategoryWarning) self.assertTrue('Unknown category _cat1 encountered on line 2' in str(w[0].message)) self.assertEqual(w[1].category, ihm.reader.UnknownCategoryWarning) self.assertTrue('Unknown category _cat2 encountered on line 6' in str(w[1].message)) def test_warn_unknown_keyword(self): """Test warnings for unknown keywords""" cif = """ _cat1.foo baz _struct.unknown foo # loop_ _struct_asym.id _struct_asym.bar 1 y loop_ _audit_author.pdbx_ordinal _audit_author.name 1 "Smith J" """ with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # Test with no warnings s, = ihm.reader.read(StringIO(cif)) self.assertEqual(len(w), 0) s, = ihm.reader.read(StringIO(cif), warn_unknown_keyword=True) # pdbx_ordinal is explicitly ignored, so should not trigger # a warning self.assertEqual(len(w), 2) self.assertEqual(w[0].category, ihm.reader.UnknownKeywordWarning) self.assertTrue('keyword _struct.unknown encountered on line 3' in str(w[0].message)) self.assertEqual(w[1].category, ihm.reader.UnknownKeywordWarning) self.assertTrue('keyword _struct_asym.bar encountered on line 7' in str(w[1].message)) def test_predicted_contact_restraint_handler(self): """Test PredictedContactRestraintHandler""" fh = StringIO(""" loop_ _ihm_predicted_contact_restraint.id _ihm_predicted_contact_restraint.group_id _ihm_predicted_contact_restraint.entity_id_1 _ihm_predicted_contact_restraint.asym_id_1 _ihm_predicted_contact_restraint.comp_id_1 _ihm_predicted_contact_restraint.seq_id_1 _ihm_predicted_contact_restraint.rep_atom_1 _ihm_predicted_contact_restraint.entity_id_2 _ihm_predicted_contact_restraint.asym_id_2 _ihm_predicted_contact_restraint.comp_id_2 _ihm_predicted_contact_restraint.seq_id_2 _ihm_predicted_contact_restraint.rep_atom_2 _ihm_predicted_contact_restraint.restraint_type _ihm_predicted_contact_restraint.distance_lower_limit _ihm_predicted_contact_restraint.distance_upper_limit _ihm_predicted_contact_restraint.probability _ihm_predicted_contact_restraint.model_granularity _ihm_predicted_contact_restraint.dataset_list_id _ihm_predicted_contact_restraint.software_id 1 . 1 A ALA 1 . 2 B TRP 2 . 'lower bound' 25.000 . 0.800 by-residue 97 34 2 1 1 A ALA 1 CA 2 B TRP 2 CB 'lower bound' 25.000 . 0.400 by-residue 97 . 3 1 1 A ALA 1 . 2 B TRP 2 . 'upper bound' . 14.000 0.600 by-feature 97 . """) s, = ihm.reader.read(fh) r1, r2, r3 = s.restraints rg1, = s.restraint_groups self.assertEqual([r for r in rg1], [r2, r3]) self.assertEqual(r1.dataset._id, '97') self.assertIsInstance(r1.resatom1, ihm.Residue) self.assertEqual(r1.resatom1.seq_id, 1) self.assertEqual(r1.resatom1.asym._id, 'A') self.assertIsInstance(r1.resatom2, ihm.Residue) self.assertEqual(r1.resatom2.seq_id, 2) self.assertEqual(r1.resatom2.asym._id, 'B') self.assertIsInstance(r1.distance, ihm.restraint.LowerBoundDistanceRestraint) self.assertAlmostEqual(r1.distance.distance, 25.000, delta=0.1) self.assertAlmostEqual(r1.probability, 0.8000, delta=0.1) self.assertEqual(r1.by_residue, True) self.assertEqual(r1.software._id, '34') self.assertIsInstance(r2.resatom1, ihm.Atom) self.assertEqual(r2.resatom1.seq_id, 1) self.assertEqual(r2.resatom1.asym._id, 'A') self.assertEqual(r2.resatom1.id, 'CA') self.assertIsInstance(r2.resatom2, ihm.Atom) self.assertEqual(r2.resatom2.seq_id, 2) self.assertEqual(r2.resatom2.asym._id, 'B') self.assertEqual(r2.resatom2.id, 'CB') self.assertIsInstance(r3.distance, ihm.restraint.UpperBoundDistanceRestraint) self.assertAlmostEqual(r3.distance.distance, 14.000, delta=0.1) self.assertIsNone(r3.software) def get_starting_model_coord(self): return """ loop_ _ihm_starting_model_coord.starting_model_id _ihm_starting_model_coord.group_PDB _ihm_starting_model_coord.id _ihm_starting_model_coord.type_symbol _ihm_starting_model_coord.atom_id _ihm_starting_model_coord.comp_id _ihm_starting_model_coord.entity_id _ihm_starting_model_coord.asym_id _ihm_starting_model_coord.seq_id _ihm_starting_model_coord.Cartn_x _ihm_starting_model_coord.Cartn_y _ihm_starting_model_coord.Cartn_z _ihm_starting_model_coord.B_iso_or_equiv _ihm_starting_model_coord.ordinal_id 1 ATOM 1 N N TYR 1 A 7 8.436 112.871 97.789 . 1 1 HETATM 2 C CA TYR 1 B . 7.951 111.565 97.289 91.820 2 """ def test_starting_model_coord_handler(self): """Test StartingModelCoordHandler""" fh = StringIO(self.get_starting_model_coord()) s, = ihm.reader.read(fh) sm, = s.orphan_starting_models a1, a2 = sm._atoms self.assertEqual(a1.asym_unit._id, 'A') self.assertEqual(a1.seq_id, 7) self.assertEqual(a1.atom_id, 'N') self.assertEqual(a1.type_symbol, 'N') self.assertAlmostEqual(a1.x, 8.436, delta=0.01) self.assertAlmostEqual(a1.y, 112.871, delta=0.01) self.assertAlmostEqual(a1.z, 97.789, delta=0.01) self.assertEqual(a1.het, False) self.assertIsNone(a1.biso) self.assertEqual(a2.asym_unit._id, 'B') self.assertIsNone(a2.seq_id) self.assertEqual(a2.atom_id, 'CA') self.assertEqual(a2.type_symbol, 'C') self.assertEqual(a2.het, True) self.assertAlmostEqual(a2.biso, 91.820, delta=0.1) def test_starting_model_coord_ignored(self): """Test read, ignoring starting model coordinates""" fh = StringIO(self.get_starting_model_coord()) s, = ihm.reader.read(fh, read_starting_model_coord=False) self.assertEqual(len(s.orphan_starting_models), 0) def test_starting_model_seq_dif_handler(self): """Test StartingModelSeqDifHandler""" fh = StringIO(""" loop_ _ihm_starting_model_seq_dif.ordinal_id _ihm_starting_model_seq_dif.entity_id _ihm_starting_model_seq_dif.asym_id _ihm_starting_model_seq_dif.seq_id _ihm_starting_model_seq_dif.comp_id _ihm_starting_model_seq_dif.starting_model_id _ihm_starting_model_seq_dif.db_asym_id _ihm_starting_model_seq_dif.db_seq_id _ihm_starting_model_seq_dif.db_comp_id _ihm_starting_model_seq_dif.details 1 7 G 11 LEU 9 D 12 MSE 'Mutation of MSE to LEU' 2 7 G 17 LEU 9 D 18 MSE 'Mutation of MSE to LEU' """) s, = ihm.reader.read(fh) sm, = s.orphan_starting_models sd1, sd2 = sm._seq_difs self.assertEqual(sd1.seq_id, 11) self.assertEqual(sd1.db_seq_id, 12) self.assertEqual(sd1.db_comp_id, "MSE") self.assertEqual(sd1.details, "Mutation of MSE to LEU") def test_multi_state_scheme_handler(self): """Test MultiStateSchemeHandler""" fh = StringIO(""" loop_ _ihm_multi_state_scheme.id _ihm_multi_state_scheme.name _ihm_multi_state_scheme.details 1 'scheme1' 'details1' 2 'scheme2' . """) s, = ihm.reader.read(fh) schemes = s.multi_state_schemes mss1 = schemes[0] self.assertIsInstance(mss1, ihm.multi_state_scheme.MultiStateScheme) self.assertEqual(mss1._id, '1') self.assertEqual(mss1.name, 'scheme1') self.assertEqual(mss1.details, 'details1') mss2 = schemes[1] self.assertIsInstance(mss2, ihm.multi_state_scheme.MultiStateScheme) self.assertEqual(mss2._id, '2') self.assertEqual(mss2.name, 'scheme2') self.assertIsNone(mss2.details) def test_multi_state_scheme_connectivity_handler(self): """Test MultiStateSchemeConnectivityHandler""" mss_cif = """ loop_ _ihm_multi_state_scheme.id _ihm_multi_state_scheme.name _ihm_multi_state_scheme.details 1 'scheme1' 'details1' 2 'scheme2' 'details2' # """ mssc_cif = """ # loop_ _ihm_multi_state_scheme_connectivity.id _ihm_multi_state_scheme_connectivity.scheme_id _ihm_multi_state_scheme_connectivity.begin_state_id _ihm_multi_state_scheme_connectivity.end_state_id _ihm_multi_state_scheme_connectivity.dataset_group_id _ihm_multi_state_scheme_connectivity.details 1 1 1 2 10 'connectivity1' 2 1 2 1 11 . 3 1 3 . 12 'connectivity3' 4 2 2 1 11 . """ # Order of categories should not matter for cif in (mss_cif + mssc_cif, mssc_cif + mss_cif): for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) schemes = s.multi_state_schemes mss1 = schemes[0] # Connectivity 1 c1 = mss1._connectivity_list[0] self.assertIsInstance( c1, ihm.multi_state_scheme.Connectivity) self.assertEqual(c1._id, '1') self.assertIsInstance(c1.begin_state, ihm.model.State) self.assertEqual(c1.begin_state._id, '1') self.assertIsInstance(c1.end_state, ihm.model.State) self.assertEqual(c1.end_state._id, '2') self.assertIsInstance(c1.dataset_group, ihm.dataset.DatasetGroup) self.assertEqual(c1.dataset_group._id, '10') self.assertEqual(c1.details, 'connectivity1') # Connectivity 2 c2 = mss1._connectivity_list[1] self.assertIsInstance( c2, ihm.multi_state_scheme.Connectivity) self.assertEqual(c2._id, '2') self.assertIsInstance(c2.begin_state, ihm.model.State) self.assertEqual(c2.begin_state._id, '2') self.assertIsInstance(c1.end_state, ihm.model.State) self.assertEqual(c2.end_state._id, '1') self.assertIsInstance(c2.dataset_group, ihm.dataset.DatasetGroup) self.assertEqual(c2.dataset_group._id, '11') self.assertIsNone(c2.details) # Connectivity 1 is unequal to Connectivity 2 self.assertFalse(c1 == c2) # Connectivity 3 c3 = mss1._connectivity_list[2] self.assertIsInstance( c3, ihm.multi_state_scheme.Connectivity) self.assertEqual(c3._id, '3') self.assertIsInstance(c3.begin_state, ihm.model.State) self.assertEqual(c3.begin_state._id, '3') self.assertIsNone(c3.end_state) self.assertIsInstance(c3.dataset_group, ihm.dataset.DatasetGroup) self.assertEqual(c3.dataset_group._id, '12') self.assertEqual(c3.details, 'connectivity3') # Connectivity 4 - belongs to scheme 2 mss2 = schemes[1] c4 = mss2._connectivity_list[0] self.assertIsInstance( c4, ihm.multi_state_scheme.Connectivity) self.assertEqual(c4._id, '4') self.assertIsInstance(c4.begin_state, ihm.model.State) self.assertEqual(c4.begin_state._id, '2') self.assertIsInstance(c4.end_state, ihm.model.State) self.assertEqual(c4.end_state._id, '1') self.assertIsInstance(c4.dataset_group, ihm.dataset.DatasetGroup) self.assertEqual(c4.dataset_group._id, '11') self.assertIsNone(c4.details) def test_kinetic_rate_handler(self): """Test KineticRateHandler""" mss_cif = """ loop_ _ihm_multi_state_scheme.id _ihm_multi_state_scheme.name _ihm_multi_state_scheme.details 1 'scheme1' 'details1' 2 'scheme2' 'details2' # # loop_ _ihm_multi_state_scheme_connectivity.id _ihm_multi_state_scheme_connectivity.scheme_id _ihm_multi_state_scheme_connectivity.begin_state_id _ihm_multi_state_scheme_connectivity.end_state_id _ihm_multi_state_scheme_connectivity.dataset_group_id _ihm_multi_state_scheme_connectivity.details 1 1 1 2 10 'connectivity1' 2 1 2 1 11 'connectivity2' 3 2 1 2 10 'connectivity1' 4 1 1 2 10 'connectivity3' 5 1 1 2 10 'connectivity4' # """ rate_cif = """ # loop_ _ihm_kinetic_rate.id _ihm_kinetic_rate.transition_rate_constant _ihm_kinetic_rate.equilibrium_constant _ihm_kinetic_rate.equilibrium_constant_determination_method _ihm_kinetic_rate.equilibrium_constant_unit _ihm_kinetic_rate.details _ihm_kinetic_rate.scheme_connectivity_id _ihm_kinetic_rate.dataset_group_id _ihm_kinetic_rate.external_file_id 1 3.0 . . . 'rate1' 1 4 5 2 . 6.5 'equilibrium constant is determined from population' . 'rate2' 2 . . 3 7.0 . . . 'rate3' 3 8 9 4 . 8.5 'equilibrium constant is determined from kinetic rates, kAB/kBA' 'unit' 'rate4' 4 . . 5 . 9.5 'equilibrium constant is determined from another method not listed' . 'rate5' 5 . . """ # Order of categories should not matter for cif in (mss_cif + rate_cif, rate_cif + mss_cif): for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) schemes = s.multi_state_schemes mss1 = schemes[0] k1 = mss1._connectivity_list[0].kinetic_rate self.assertIsInstance(k1, ihm.multi_state_scheme.KineticRate) self.assertEqual(k1._id, '1') self.assertEqual(k1.transition_rate_constant, '3.0') self.assertIsNone(k1.equilibrium_constant) self.assertEqual(k1.details, 'rate1') self.assertIsInstance(k1.dataset_group, ihm.dataset.DatasetGroup) self.assertEqual(k1.dataset_group._id, '4') self.assertIsInstance(k1.external_file, ihm.location.Location) self.assertEqual(k1.external_file._id, '5') k2 = mss1._connectivity_list[1].kinetic_rate self.assertIsInstance(k2, ihm.multi_state_scheme.KineticRate) self.assertEqual(k2._id, '2') self.assertIsNone(k2.transition_rate_constant) self.assertIsInstance( k2.equilibrium_constant, ihm.multi_state_scheme.PopulationEquilibriumConstant) self.assertEqual(k2.equilibrium_constant.value, '6.5') self.assertEqual( k2.equilibrium_constant.method, 'equilibrium constant is determined from population') self.assertIsNone(k2.equilibrium_constant.unit) self.assertEqual(k2.details, 'rate2') self.assertIsNone(k2.dataset_group) self.assertIsNone(k2.external_file) mss2 = schemes[1] k3 = mss2._connectivity_list[0].kinetic_rate self.assertIsInstance(k3, ihm.multi_state_scheme.KineticRate) self.assertEqual(k3._id, '3') self.assertEqual(k3.transition_rate_constant, '7.0') self.assertIsNone(k3.equilibrium_constant) self.assertEqual(k3.details, 'rate3') self.assertIsInstance(k3.dataset_group, ihm.dataset.DatasetGroup) self.assertEqual(k3.dataset_group._id, '8') self.assertIsInstance(k3.external_file, ihm.location.Location) self.assertEqual(k3.external_file._id, '9') k4 = mss1._connectivity_list[2].kinetic_rate self.assertIsInstance(k4, ihm.multi_state_scheme.KineticRate) self.assertEqual(k4._id, '4') self.assertIsNone(k4.transition_rate_constant) self.assertIsInstance( k4.equilibrium_constant, ihm.multi_state_scheme.KineticRateEquilibriumConstant) self.assertEqual(k4.equilibrium_constant.value, '8.5') self.assertEqual( k4.equilibrium_constant.method, 'equilibrium constant is determined from kinetic ' 'rates, kAB/kBA') self.assertEqual(k4.equilibrium_constant.unit, 'unit') self.assertEqual(k4.details, 'rate4') self.assertIsNone(k4.dataset_group) self.assertIsNone(k4.external_file) k5 = mss1._connectivity_list[3].kinetic_rate self.assertIsInstance(k5, ihm.multi_state_scheme.KineticRate) self.assertEqual(k5._id, '5') self.assertIsNone(k5.transition_rate_constant) self.assertIsInstance( k5.equilibrium_constant, ihm.multi_state_scheme.EquilibriumConstant) self.assertEqual(k5.equilibrium_constant.value, '9.5') self.assertEqual( k5.equilibrium_constant.method, 'equilibrium constant is determined from another ' 'method not listed') self.assertIsNone(k5.equilibrium_constant.unit) self.assertEqual(k5.details, 'rate5') self.assertIsNone(k5.dataset_group) self.assertIsNone(k5.external_file) def test_relaxation_time_handler(self): """Test RelaxationTimeHandler and RelaxationTimeMultiStateSchemeHandler""" mss_cif = """ loop_ _ihm_multi_state_scheme.id _ihm_multi_state_scheme.name _ihm_multi_state_scheme.details 1 'scheme1' 'details1' # # loop_ _ihm_multi_state_scheme_connectivity.id _ihm_multi_state_scheme_connectivity.scheme_id _ihm_multi_state_scheme_connectivity.begin_state_id _ihm_multi_state_scheme_connectivity.end_state_id _ihm_multi_state_scheme_connectivity.dataset_group_id _ihm_multi_state_scheme_connectivity.details 1 1 1 2 10 'connectivity1' 2 1 2 1 11 'connectivity2' # """ relaxation_time_cif = """ # loop_ _ihm_relaxation_time.id _ihm_relaxation_time.value _ihm_relaxation_time.unit _ihm_relaxation_time.amplitude _ihm_relaxation_time.dataset_group_id _ihm_relaxation_time.external_file_id _ihm_relaxation_time.details 1 10.0 seconds 0.5 20 21 'relaxation_time1' 2 11.5 milliseconds . 22 23 'relaxation_time2' 3 12.0 seconds 0.4 24 25 'relaxation_time3' # # loop_ _ihm_relaxation_time_multi_state_scheme.id _ihm_relaxation_time_multi_state_scheme.relaxation_time_id _ihm_relaxation_time_multi_state_scheme.scheme_id _ihm_relaxation_time_multi_state_scheme.scheme_connectivity_id _ihm_relaxation_time_multi_state_scheme.details 1 1 1 1 . 2 2 1 2 . 3 3 1 . . """ # Order of categories should not matter for cif in (relaxation_time_cif + mss_cif, mss_cif + relaxation_time_cif, ): for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) schemes = s.multi_state_schemes mss1 = schemes[0] self.assertEqual(len(mss1._connectivity_list), 2) r1 = mss1._connectivity_list[0].relaxation_time self.assertIsInstance(r1, ihm.multi_state_scheme.RelaxationTime) self.assertEqual(r1._id, '1') self.assertEqual(r1.value, '10.0') self.assertEqual(r1.unit, 'seconds') self.assertEqual(r1.amplitude, '0.5') self.assertIsInstance(r1.dataset_group, ihm.dataset.DatasetGroup) self.assertEqual(r1.dataset_group._id, '20') self.assertIsInstance(r1.external_file, ihm.location.Location) self.assertEqual(r1.external_file._id, '21') self.assertEqual(r1.details, 'relaxation_time1') r2 = mss1._connectivity_list[1].relaxation_time self.assertIsInstance(r2, ihm.multi_state_scheme.RelaxationTime) self.assertEqual(r2._id, '2') self.assertEqual(r2.value, '11.5') self.assertEqual(r2.unit, 'milliseconds') self.assertIsNone(r2.amplitude) self.assertIsInstance(r2.dataset_group, ihm.dataset.DatasetGroup) self.assertEqual(r2.dataset_group._id, '22') self.assertIsInstance(r2.external_file, ihm.location.Location) self.assertEqual(r2.external_file._id, '23') self.assertEqual(r2.details, 'relaxation_time2') # Relaxation time 3 is only assigned to a scheme, # not to a connectivity self.assertEqual(len(mss1._relaxation_time_list), 1) r3 = mss1._relaxation_time_list[0] self.assertIsInstance(r3, ihm.multi_state_scheme.RelaxationTime) self.assertEqual(r3._id, '3') self.assertEqual(r3.value, '12.0') self.assertEqual(r3.unit, 'seconds') self.assertEqual(r3.amplitude, '0.4') self.assertIsInstance(r3.dataset_group, ihm.dataset.DatasetGroup) self.assertEqual(r3.dataset_group._id, '24') self.assertIsInstance(r3.external_file, ihm.location.Location) self.assertEqual(r3.external_file._id, '25') self.assertEqual(r3.details, 'relaxation_time3') def test_flr_experiment_handler(self): """Test FLRExperimentHandler""" fh = StringIO(""" loop_ _flr_experiment.ordinal_id _flr_experiment.id _flr_experiment.instrument_id _flr_experiment.inst_setting_id _flr_experiment.exp_condition_id _flr_experiment.sample_id _flr_experiment.details 1 1 1 12 22 42 "exp 1" 2 1 1 2 2 2 . """) s, = ihm.reader.read(fh) flr, = s.flr_data experiment, = list(flr._collection_flr_experiment.values()) self.assertIsInstance(experiment, ihm.flr.Experiment) self.assertIsInstance(experiment.instrument_list[0], ihm.flr.Instrument) self.assertIsInstance(experiment.inst_setting_list[0], ihm.flr.InstSetting) self.assertIsInstance(experiment.exp_condition_list[0], ihm.flr.ExpCondition) self.assertIsInstance(experiment.sample_list[0], ihm.flr.Sample) self.assertEqual([i._id for i in experiment.instrument_list], ['1', '1']) self.assertEqual([i._id for i in experiment.inst_setting_list], ['12', '2']) self.assertEqual([i._id for i in experiment.exp_condition_list], ['22', '2']) self.assertEqual([i._id for i in experiment.sample_list], ['42', '2']) self.assertEqual(experiment.details_list, ["exp 1", None]) def test_flr_inst_setting_handler(self): """Test FLRInstSettingHandler""" fh = StringIO(""" loop_ _flr_inst_setting.id _flr_inst_setting.details 1 My_Inst_setting_1 2 . """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_inst_setting.keys()), ['1', '2']) is1 = flr._collection_flr_inst_setting['1'] self.assertIsInstance(is1, ihm.flr.InstSetting) self.assertEqual(is1.details, 'My_Inst_setting_1') is2 = flr._collection_flr_inst_setting['2'] self.assertIsInstance(is2, ihm.flr.InstSetting) self.assertIsNone(is2.details) def test_flr_exp_condition_handler(self): """Test FLRExpConditionHandler""" fh = StringIO(""" loop_ _flr_exp_condition.id _flr_exp_condition.details 1 My_Exp_condition_1 2 . """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_exp_condition.keys()), ['1', '2']) ec1 = flr._collection_flr_exp_condition['1'] self.assertIsInstance(ec1, ihm.flr.ExpCondition) self.assertEqual(ec1.details, 'My_Exp_condition_1') ec2 = flr._collection_flr_exp_condition['2'] self.assertIsInstance(ec2, ihm.flr.ExpCondition) self.assertIsNone(ec2.details) def test_flr_instrument_handler(self): """Test FLRInstrumentHandler""" fh = StringIO(""" loop_ _flr_instrument.id _flr_instrument.details 1 test 2 . """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_instrument.keys()), ['1', '2']) i1 = flr._collection_flr_instrument['1'] self.assertIsInstance(i1, ihm.flr.Instrument) self.assertEqual(i1.details, 'test') i2 = flr._collection_flr_instrument['2'] self.assertIsInstance(i2, ihm.flr.Instrument) self.assertIsNone(i2.details) def test_flr_entity_assembly_handler(self): """Test FLREntityAssemblyHandler""" fh = StringIO(""" loop_ _flr_entity_assembly.ordinal_id _flr_entity_assembly.assembly_id _flr_entity_assembly.entity_id _flr_entity_assembly.num_copies _flr_entity_assembly.entity_description 1 1 1 1 Entity_1 2 1 2 4 Entity_2 """) s, = ihm.reader.read(fh) self.assertEqual(len(s.entities), 2) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_entity_assembly.keys()), ['1']) a1 = flr._collection_flr_entity_assembly['1'] self.assertIsInstance(a1, ihm.flr.EntityAssembly) self.assertEqual([x._id for x in a1.entity_list], ['1', '2']) self.assertEqual(a1.num_copies_list, [1, 4]) def test_flr_sample_condition_handler(self): """Test FLRSampleConditionHandler""" fh = StringIO(""" loop_ _flr_sample_condition.id _flr_sample_condition.details 1 test 2 . # """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_sample_condition.keys()), ['1', '2']) s1 = flr._collection_flr_sample_condition['1'] self.assertIsInstance(s1, ihm.flr.SampleCondition) self.assertEqual(s1.details, 'test') s2 = flr._collection_flr_sample_condition['2'] self.assertIsInstance(s2, ihm.flr.SampleCondition) self.assertIsNone(s2.details) def test_flr_sample_handler(self): """Test FLRSampleHandler""" fh = StringIO(""" loop_ _flr_sample.id _flr_sample.entity_assembly_id _flr_sample.num_of_probes _flr_sample.sample_condition_id _flr_sample.sample_description _flr_sample.sample_details _flr_sample.solvent_phase 1 1 2 1 Sample_1 'Details sample 1' liquid 2 1 4 2 Sample_2 'Details sample 2' vitrified """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_sample.keys()), ['1', '2']) s1 = flr._collection_flr_sample['1'] self.assertIsInstance(s1, ihm.flr.Sample) self.assertEqual(s1.entity_assembly._id, '1') self.assertEqual(s1.num_of_probes, 2) self.assertEqual(s1.condition._id, '1') self.assertEqual(s1.description, 'Sample_1') self.assertEqual(s1.details, 'Details sample 1') self.assertEqual(s1.solvent_phase, 'liquid') def test_flr_probe_list_handler(self): """Test FLRProbeListHandler""" fh = StringIO(""" loop_ _flr_probe_list.probe_id _flr_probe_list.chromophore_name _flr_probe_list.reactive_probe_flag _flr_probe_list.reactive_probe_name _flr_probe_list.probe_origin _flr_probe_list.probe_link_type 1 Donor1 NO . extrinsic covalent 2 Acceptor2 YES 'Acceptor1 reactive' extrinsic covalent """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_probe.keys()), ['1', '2']) p1 = flr._collection_flr_probe['1'].probe_list_entry self.assertIsInstance(p1, ihm.flr.ProbeList) self.assertEqual(p1.chromophore_name, 'Donor1') self.assertEqual(p1.reactive_probe_flag, False) self.assertIsNone(p1.reactive_probe_name) self.assertEqual(p1.probe_origin, 'extrinsic') self.assertEqual(p1.probe_link_type, 'covalent') p2 = flr._collection_flr_probe['2'].probe_list_entry self.assertIsInstance(p2, ihm.flr.ProbeList) self.assertEqual(p2.chromophore_name, 'Acceptor2') self.assertEqual(p2.reactive_probe_flag, True) self.assertEqual(p2.reactive_probe_name, 'Acceptor1 reactive') self.assertEqual(p2.probe_origin, 'extrinsic') self.assertEqual(p2.probe_link_type, 'covalent') def test_flr_probe_descriptor_handler(self): """Test FLRProbeDescriptorHandler""" fh = StringIO(""" loop_ _flr_probe_descriptor.probe_id _flr_probe_descriptor.reactive_probe_chem_descriptor_id _flr_probe_descriptor.chromophore_chem_descriptor_id _flr_probe_descriptor.chromophore_center_atom 1 . 1 CB 2 3 2 CB """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_probe.keys()), ['1', '2']) p1 = flr._collection_flr_probe['1'].probe_descriptor self.assertIsInstance(p1, ihm.flr.ProbeDescriptor) self.assertIsNone(p1.reactive_probe_chem_descriptor) self.assertIsInstance(p1.chromophore_chem_descriptor, ihm.ChemDescriptor) self.assertEqual(p1.chromophore_chem_descriptor._id, '1') self.assertEqual(p1.chromophore_center_atom, 'CB') p2 = flr._collection_flr_probe['2'].probe_descriptor self.assertIsInstance(p2, ihm.flr.ProbeDescriptor) self.assertIsInstance(p2.reactive_probe_chem_descriptor, ihm.ChemDescriptor) self.assertEqual(p2.reactive_probe_chem_descriptor._id, '3') self.assertIsInstance(p2.chromophore_chem_descriptor, ihm.ChemDescriptor) self.assertEqual(p2.chromophore_chem_descriptor._id, '2') self.assertEqual(p2.chromophore_center_atom, 'CB') def test_flr_sample_probe_details_handler(self): """Test FLRSampleProbeDetailsHandler""" fh = StringIO(""" loop_ _flr_sample_probe_details.sample_probe_id _flr_sample_probe_details.sample_id _flr_sample_probe_details.probe_id _flr_sample_probe_details.fluorophore_type _flr_sample_probe_details.description _flr_sample_probe_details.poly_probe_position_id 1 42 99 donor 'Donor in position1-position3' 34 2 1 2 acceptor 'Acceptor in position1-position3' 2 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual( sorted(flr._collection_flr_sample_probe_details.keys()), ['1', '2']) p1 = flr._collection_flr_sample_probe_details['1'] self.assertIsInstance(p1, ihm.flr.SampleProbeDetails) self.assertIsInstance(p1.sample, ihm.flr.Sample) self.assertEqual(p1.sample._id, '42') self.assertIsInstance(p1.probe, ihm.flr.Probe) self.assertEqual(p1.probe._id, '99') self.assertEqual(p1.fluorophore_type, 'donor') self.assertEqual(p1.description, 'Donor in position1-position3') self.assertIsInstance(p1.poly_probe_position, ihm.flr.PolyProbePosition) self.assertEqual(p1.poly_probe_position._id, '34') def test_flr_poly_probe_position_handler(self): """Test FLRPolyProbePositionHandler""" fh = StringIO(""" loop_ _flr_poly_probe_position.id _flr_poly_probe_position.entity_id _flr_poly_probe_position.entity_description _flr_poly_probe_position.asym_id _flr_poly_probe_position.seq_id _flr_poly_probe_position.comp_id _flr_poly_probe_position.atom_id _flr_poly_probe_position.mutation_flag _flr_poly_probe_position.modification_flag _flr_poly_probe_position.auth_name 1 1 Entity_1 . 1 ALA . NO YES Position_1 2 2 Entity_2 C 10 CYS CB NO YES Position_3 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(len(s.entities), 2) self.assertEqual( sorted(flr._collection_flr_poly_probe_position.keys()), ['1', '2']) p1 = flr._collection_flr_poly_probe_position['1'] self.assertIsInstance(p1, ihm.flr.PolyProbePosition) self.assertIsInstance(p1.resatom, ihm.Residue) self.assertEqual(p1.resatom.seq_id, 1) self.assertEqual(p1.resatom.entity._id, '1') self.assertIsNone(p1.resatom.asym) p2 = flr._collection_flr_poly_probe_position['2'] self.assertIsInstance(p2, ihm.flr.PolyProbePosition) self.assertIsInstance(p2.resatom, ihm.Atom) self.assertIsInstance(p2.resatom.asym, ihm.AsymUnit) self.assertEqual(p2.resatom.id, 'CB') self.assertEqual(p2.resatom.seq_id, 10) self.assertEqual(p2.resatom.entity._id, '2') self.assertEqual(p2.resatom.asym.entity._id, '2') self.assertEqual(p2.resatom.asym.id, 'C') def test_flr_poly_probe_position_modified_handler(self): """Test FLRPolyProbePositionModifiedHandler""" fh = StringIO(""" loop_ _flr_poly_probe_position_modified.id _flr_poly_probe_position_modified.chem_descriptor_id _flr_poly_probe_position_modified.atom_id 1 4 . 2 4 CB """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual( sorted(flr._collection_flr_poly_probe_position.keys()), ['1', '2']) p1 = flr._collection_flr_poly_probe_position['1'] self.assertIsInstance(p1.modified_chem_descriptor, ihm.ChemDescriptor) self.assertEqual(p1.modified_chem_descriptor._id, '4') p2 = flr._collection_flr_poly_probe_position['2'] self.assertIsInstance(p2.modified_chem_descriptor, ihm.ChemDescriptor) self.assertEqual(p2.modified_chem_descriptor._id, '4') def test_flr_poly_probe_position_mutated_handler(self): """Test FLRPolyProbePositionMutatedHandler""" fh = StringIO(""" loop_ _flr_poly_probe_position_mutated.id _flr_poly_probe_position_mutated.chem_comp_id _flr_poly_probe_position_mutated.atom_id 1 Ala . 2 Cys CB """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual( sorted(flr._collection_flr_poly_probe_position.keys()), ['1', '2']) p1 = flr._collection_flr_poly_probe_position['1'] self.assertIsInstance(p1.mutated_chem_comp_id, ihm.ChemComp) self.assertEqual(p1.mutated_chem_comp_id.id, 'Ala') p2 = flr._collection_flr_poly_probe_position['2'] self.assertIsInstance(p2.mutated_chem_comp_id, ihm.ChemComp) self.assertEqual(p2.mutated_chem_comp_id.id, 'Cys') def test_flr_poly_probe_conjugate_handler(self): """Test FLRPolyProbeConjugateHandler""" fh = StringIO(""" loop_ _flr_poly_probe_conjugate.id _flr_poly_probe_conjugate.sample_probe_id _flr_poly_probe_conjugate.chem_descriptor_id _flr_poly_probe_conjugate.ambiguous_stoichiometry_flag _flr_poly_probe_conjugate.probe_stoichiometry 1 1 5 NO . 2 2 5 YES 2 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual( sorted(flr._collection_flr_poly_probe_conjugate.keys()), ['1', '2']) p1, p2 = s.flr_data[0].poly_probe_conjugates self.assertIsInstance(p1.sample_probe, ihm.flr.SampleProbeDetails) self.assertEqual(p1.sample_probe._id, '1') self.assertIsInstance(p1.chem_descriptor, ihm.ChemDescriptor) self.assertEqual(p1.chem_descriptor._id, '5') self.assertEqual(p1.ambiguous_stoichiometry, False) self.assertIsNone(p1.probe_stoichiometry) self.assertIsInstance(p2.sample_probe, ihm.flr.SampleProbeDetails) self.assertEqual(p2.sample_probe._id, '2') self.assertIsInstance(p2.chem_descriptor, ihm.ChemDescriptor) self.assertEqual(p2.chem_descriptor._id, '5') self.assertEqual(p2.ambiguous_stoichiometry, True) self.assertAlmostEqual(p2.probe_stoichiometry, 2.0, delta=1.0) def test_flr_fret_forster_radius_handler(self): """Test FLRFretForsterRadiusHandler""" fh = StringIO(""" loop_ _flr_fret_forster_radius.id _flr_fret_forster_radius.donor_probe_id _flr_fret_forster_radius.acceptor_probe_id _flr_fret_forster_radius.forster_radius _flr_fret_forster_radius.reduced_forster_radius 1 9 10 252.000 53.200 2 11 12 52.000 . """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual( sorted(flr._collection_flr_fret_forster_radius.keys()), ['1', '2']) r1 = flr._collection_flr_fret_forster_radius['1'] self.assertIsInstance(r1.donor_probe, ihm.flr.Probe) self.assertEqual(r1.donor_probe._id, '9') self.assertIsInstance(r1.acceptor_probe, ihm.flr.Probe) self.assertEqual(r1.acceptor_probe._id, '10') self.assertAlmostEqual(r1.forster_radius, 252.000, delta=0.1) self.assertAlmostEqual(r1.reduced_forster_radius, 53.200, delta=0.1) r2 = flr._collection_flr_fret_forster_radius['2'] self.assertIsNone(r2.reduced_forster_radius) def test_flr_fret_calibration_parameters_handler(self): """Test FLRFretCalibrationParametersHandler""" fh = StringIO(""" loop_ _flr_fret_calibration_parameters.id _flr_fret_calibration_parameters.phi_acceptor _flr_fret_calibration_parameters.alpha _flr_fret_calibration_parameters.alpha_sd _flr_fret_calibration_parameters.gG_gR_ratio _flr_fret_calibration_parameters.beta _flr_fret_calibration_parameters.gamma _flr_fret_calibration_parameters.delta _flr_fret_calibration_parameters.a_b 1 0.350 2.400 0.1 0.400 1.0 2.0 3.0 0.800 """) s, = ihm.reader.read(fh) flr, = s.flr_data p1 = flr._collection_flr_fret_calibration_parameters['1'] self.assertAlmostEqual(p1.phi_acceptor, 0.350, delta=0.01) self.assertAlmostEqual(p1.alpha, 2.400, delta=0.1) self.assertAlmostEqual(p1.alpha_sd, 0.1, delta=0.1) self.assertAlmostEqual(p1.gg_gr_ratio, 0.4, delta=0.1) self.assertAlmostEqual(p1.beta, 1.0, delta=0.1) self.assertAlmostEqual(p1.gamma, 2.0, delta=0.1) self.assertAlmostEqual(p1.delta, 3.0, delta=0.1) self.assertAlmostEqual(p1.a_b, 0.8, delta=0.1) def test_flr_fret_analysis_handler(self): """Test FLRFretAnalysisHandler""" fh = StringIO(""" loop_ _flr_fret_analysis.id _flr_fret_analysis.experiment_id _flr_fret_analysis.type _flr_fret_analysis.sample_probe_id_1 _flr_fret_analysis.sample_probe_id_2 _flr_fret_analysis.forster_radius_id _flr_fret_analysis.dataset_list_id _flr_fret_analysis.external_file_id _flr_fret_analysis.software_id 1 8 intensity-based 9 2 11 18 42 99 2 13 lifetime-based 24 5 19 32 81 98 """) s, = ihm.reader.read(fh) flr, = s.flr_data a = flr._collection_flr_fret_analysis['1'] self.assertIsInstance(a.experiment, ihm.flr.Experiment) self.assertEqual(a.experiment._id, '8') self.assertIsInstance(a.sample_probe_1, ihm.flr.SampleProbeDetails) self.assertEqual(a.sample_probe_1._id, '9') self.assertIsInstance(a.sample_probe_2, ihm.flr.SampleProbeDetails) self.assertEqual(a.sample_probe_2._id, '2') self.assertIsInstance(a.forster_radius, ihm.flr.FRETForsterRadius) self.assertEqual(a.forster_radius._id, '11') self.assertEqual(a.type, 'intensity-based') self.assertIsInstance(a.dataset, ihm.dataset.Dataset) self.assertEqual(a.dataset._id, '18') self.assertIsInstance(a.external_file, ihm.location.Location) self.assertEqual(a.external_file._id, '42') self.assertIsInstance(a.software, ihm.Software) self.assertEqual(a.software._id, '99') b = flr._collection_flr_fret_analysis['2'] self.assertIsInstance(b.experiment, ihm.flr.Experiment) self.assertEqual(b.experiment._id, '13') self.assertIsInstance(b.sample_probe_1, ihm.flr.SampleProbeDetails) self.assertEqual(b.sample_probe_1._id, '24') self.assertIsInstance(b.sample_probe_2, ihm.flr.SampleProbeDetails) self.assertEqual(b.sample_probe_2._id, '5') self.assertIsInstance(b.forster_radius, ihm.flr.FRETForsterRadius) self.assertEqual(b.forster_radius._id, '19') self.assertEqual(b.type, 'lifetime-based') self.assertIsInstance(b.dataset, ihm.dataset.Dataset) self.assertEqual(b.dataset._id, '32') self.assertIsInstance(b.external_file, ihm.location.Location) self.assertEqual(b.external_file._id, '81') self.assertIsInstance(b.software, ihm.Software) self.assertEqual(b.software._id, '98') def test_flr_fret_analysis_intensity_handler(self): """Test FLRFretAnalysisIntensityHandler""" fh = StringIO(""" loop_ _flr_fret_analysis_intensity.ordinal_id _flr_fret_analysis_intensity.analysis_id _flr_fret_analysis_intensity.calibration_parameters_id _flr_fret_analysis_intensity.donor_only_fraction _flr_fret_analysis_intensity.chi_square_reduced _flr_fret_analysis_intensity.method_name _flr_fret_analysis_intensity.details 2 5 3 0.200 1.400 PDA Details """) s, = ihm.reader.read(fh) flr, = s.flr_data a = flr._collection_flr_fret_analysis['5'] self.assertEqual(a.type, 'intensity-based') self.assertIsInstance(a.calibration_parameters, ihm.flr.FRETCalibrationParameters) self.assertEqual(a.calibration_parameters._id, '3') self.assertAlmostEqual(a.donor_only_fraction, 0.2, delta=0.1) self.assertAlmostEqual(a.chi_square_reduced, 1.4, delta=0.1) self.assertEqual(a.method_name, 'PDA') self.assertEqual(a.details, 'Details') def test_flr_fret_analysis_lifetime_handler(self): """Test FLRFretAnalysisLifetimeHandler""" fh = StringIO(""" loop_ _flr_fret_analysis_lifetime.ordinal_id _flr_fret_analysis_lifetime.analysis_id _flr_fret_analysis_lifetime.reference_measurement_group_id _flr_fret_analysis_lifetime.lifetime_fit_model_id _flr_fret_analysis_lifetime.donor_only_fraction _flr_fret_analysis_lifetime.chi_square_reduced _flr_fret_analysis_lifetime.method_name _flr_fret_analysis_lifetime.details 4 2 19 23 0.300 1.500 'Lifetime fit' 'Details on lifetime fit' """) s, = ihm.reader.read(fh) flr, = s.flr_data a = flr._collection_flr_fret_analysis['2'] self.assertEqual(a.type, 'lifetime-based') self.assertIsInstance(a.ref_measurement_group, ihm.flr.RefMeasurementGroup) self.assertEqual(a.ref_measurement_group._id, '19') self.assertIsInstance(a.lifetime_fit_model, ihm.flr.LifetimeFitModel) self.assertEqual(a.lifetime_fit_model._id, '23') self.assertAlmostEqual(a.donor_only_fraction, 0.3, delta=0.1) self.assertAlmostEqual(a.chi_square_reduced, 1.5, delta=0.1) self.assertEqual(a.method_name, 'Lifetime fit') self.assertEqual(a.details, 'Details on lifetime fit') def test_flr_lifetime_fit_model_handler(self): """Test FLRLifetimeFitModelHandler""" fh = StringIO(""" loop_ _flr_lifetime_fit_model.id _flr_lifetime_fit_model.name _flr_lifetime_fit_model.description _flr_lifetime_fit_model.external_file_id _flr_lifetime_fit_model.citation_id 1 'FitModel 15' 'Description of the fit model' 3 8 """) s, = ihm.reader.read(fh) flr, = s.flr_data f = flr._collection_flr_lifetime_fit_model['1'] self.assertEqual(f.name, 'FitModel 15') self.assertEqual(f.description, 'Description of the fit model') self.assertIsInstance(f.external_file, ihm.location.Location) self.assertEqual(f.external_file._id, '3') self.assertIsInstance(f.citation, ihm.Citation) self.assertEqual(f.citation._id, '8') def test_flr_ref_measurement_handler(self): """Test FLRRefMeasurementHandler""" fh = StringIO(""" loop_ _flr_reference_measurement.id _flr_reference_measurement.reference_sample_probe_id _flr_reference_measurement.num_species _flr_reference_measurement.details 4 9 2 Details1 """) s, = ihm.reader.read(fh) flr, = s.flr_data r = flr._collection_flr_ref_measurement['4'] self.assertIsInstance(r.ref_sample_probe, ihm.flr.SampleProbeDetails) self.assertEqual(r.ref_sample_probe._id, '9') self.assertEqual(r.details, 'Details1') # num_species is set automatically when adding lifetimes to the object self.assertEqual(r.num_species, 0) r.add_lifetime('1') r.add_lifetime('2') self.assertEqual(r.num_species, 2) def test_flr_ref_measurement_group_handler(self): """Test FLRRefMeasurementGroupHandler""" fh = StringIO(""" loop_ _flr_reference_measurement_group.id _flr_reference_measurement_group.num_measurements _flr_reference_measurement_group.details 5 3 Details """) s, = ihm.reader.read(fh) flr, = s.flr_data r = flr._collection_flr_ref_measurement_group['5'] self.assertEqual(r.details, 'Details') # num_measurements is set automatically when adding measurements # to the object self.assertEqual(r.num_measurements, 0) r.add_ref_measurement('1') self.assertEqual(r.num_measurements, 1) r.add_ref_measurement('2') self.assertEqual(r.num_measurements, 2) def test_flr_ref_measurement_group_link_handler(self): """Test FLRRefMeasurementGroupLinkHandler""" fh = StringIO(""" loop_ _flr_reference_measurement_group_link.group_id _flr_reference_measurement_group_link.reference_measurement_id 3 12 3 25 5 19 """) s, = ihm.reader.read(fh) flr, = s.flr_data g1 = flr._collection_flr_ref_measurement_group['3'] self.assertEqual(g1.num_measurements, 2) self.assertIsInstance(g1.ref_measurement_list[0], ihm.flr.RefMeasurement) self.assertEqual(g1.ref_measurement_list[0]._id, '12') self.assertIsInstance(g1.ref_measurement_list[1], ihm.flr.RefMeasurement) self.assertEqual(g1.ref_measurement_list[1]._id, '25') g2 = flr._collection_flr_ref_measurement_group['5'] self.assertEqual(g2.num_measurements, 1) self.assertIsInstance(g2.ref_measurement_list[0], ihm.flr.RefMeasurement) self.assertEqual(g2.ref_measurement_list[0]._id, '19') def test_flr_ref_measurement_lifetime_handler(self): """Test FLRRefMeasurementLifetimeHandler""" fh = StringIO(""" loop_ _flr_reference_measurement_lifetime.ordinal_id _flr_reference_measurement_lifetime.reference_measurement_id _flr_reference_measurement_lifetime.species_name _flr_reference_measurement_lifetime.species_fraction _flr_reference_measurement_lifetime.lifetime 1 15 species1 0.300 4.100 2 15 species2 0.700 2.100 3 12 species1 1.000 3.800 """) s, = ihm.reader.read(fh) flr, = s.flr_data # Check the lifetime objects themselves f1 = flr._collection_flr_ref_measurement_lifetime['1'] self.assertEqual(f1.species_name, 'species1') self.assertAlmostEqual(f1.species_fraction, 0.3, delta=0.1) self.assertAlmostEqual(f1.lifetime, 4.1, delta=0.1) f2 = flr._collection_flr_ref_measurement_lifetime['2'] self.assertEqual(f2.species_name, 'species2') self.assertAlmostEqual(f2.species_fraction, 0.7, delta=0.1) self.assertAlmostEqual(f2.lifetime, 2.1, delta=0.1) f3 = flr._collection_flr_ref_measurement_lifetime['3'] self.assertEqual(f3.species_name, 'species1') self.assertAlmostEqual(f3.species_fraction, 1.0, delta=0.1) self.assertAlmostEqual(f3.lifetime, 3.8, delta=0.1) # And check the respective reference measurement objects r1 = flr._collection_flr_ref_measurement['15'] self.assertIsInstance(r1.list_of_lifetimes[0], ihm.flr.RefMeasurementLifetime) self.assertEqual(r1.list_of_lifetimes[0].species_name, 'species1') self.assertAlmostEqual(r1.list_of_lifetimes[0].species_fraction, 0.3, delta=0.1) self.assertAlmostEqual(r1.list_of_lifetimes[0].lifetime, 4.1, delta=0.1) self.assertIsInstance(r1.list_of_lifetimes[1], ihm.flr.RefMeasurementLifetime) self.assertEqual(r1.list_of_lifetimes[1].species_name, 'species2') self.assertAlmostEqual(r1.list_of_lifetimes[1].species_fraction, 0.7, delta=0.1) self.assertAlmostEqual(r1.list_of_lifetimes[1].lifetime, 2.1, delta=0.1) r2 = flr._collection_flr_ref_measurement['12'] self.assertIsInstance(r2.list_of_lifetimes[0], ihm.flr.RefMeasurementLifetime) self.assertEqual(r2.list_of_lifetimes[0].species_name, 'species1') self.assertAlmostEqual(r2.list_of_lifetimes[0].species_fraction, 1.0, delta=0.1) self.assertAlmostEqual(r2.list_of_lifetimes[0].lifetime, 3.8, delta=0.1) def test_flr_peak_assignment_handler(self): """Test FLRPeakAssignmentHandler""" fh = StringIO(""" loop_ _flr_peak_assignment.id _flr_peak_assignment.method_name _flr_peak_assignment.details 1 Population 'Test details' """) s, = ihm.reader.read(fh) flr, = s.flr_data a = flr._collection_flr_peak_assignment['1'] self.assertEqual(a.method_name, 'Population') self.assertEqual(a.details, 'Test details') def test_flr_fret_distance_restraint_handler(self): """Test FLRFretDistanceRestraintHandler""" fh = StringIO(""" loop_ _flr_fret_distance_restraint.ordinal_id _flr_fret_distance_restraint.id _flr_fret_distance_restraint.group_id _flr_fret_distance_restraint.sample_probe_id_1 _flr_fret_distance_restraint.sample_probe_id_2 _flr_fret_distance_restraint.state_id _flr_fret_distance_restraint.analysis_id _flr_fret_distance_restraint.distance _flr_fret_distance_restraint.distance_error_plus _flr_fret_distance_restraint.distance_error_minus _flr_fret_distance_restraint.distance_type _flr_fret_distance_restraint.population_fraction _flr_fret_distance_restraint.peak_assignment_id 1 1 1 1 2 9 19 53.500 2.500 2.300 _E 0.800 42 2 2 1 3 4 8 18 49.000 2.000 2.100 _E 0.800 42 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual( sorted(flr._collection_flr_fret_distance_restraint_group.keys()), ['1']) self.assertEqual( sorted(flr._collection_flr_fret_distance_restraint.keys()), ['1', '2']) rg1, = flr.distance_restraint_groups r1, r2 = rg1.get_info() self.assertIsInstance(r1.sample_probe_1, ihm.flr.SampleProbeDetails) self.assertEqual(r1.sample_probe_1._id, '1') self.assertIsInstance(r1.sample_probe_2, ihm.flr.SampleProbeDetails) self.assertEqual(r1.sample_probe_2._id, '2') self.assertIsInstance(r1.state, ihm.model.State) self.assertEqual(r1.state._id, '9') self.assertIsInstance(r1.analysis, ihm.flr.FRETAnalysis) self.assertEqual(r1.analysis._id, '19') self.assertAlmostEqual(r1.distance, 53.500, delta=0.1) self.assertAlmostEqual(r1.distance_error_plus, 2.500, delta=0.1) self.assertAlmostEqual(r1.distance_error_minus, 2.300, delta=0.1) self.assertEqual(r1.distance_type, "_E") self.assertAlmostEqual(r1.population_fraction, 0.800, delta=0.1) self.assertIsInstance(r1.peak_assignment, ihm.flr.PeakAssignment) self.assertEqual(r1.peak_assignment._id, '42') self.assertEqual(rg1.distance_restraint_list, [r1, r2]) def test_flr_fret_model_quality_handler(self): """Test FLRFretModelQualityHandler""" fh = StringIO(""" loop_ _flr_fret_model_quality.model_id _flr_fret_model_quality.chi_square_reduced _flr_fret_model_quality.dataset_group_id _flr_fret_model_quality.method _flr_fret_model_quality.details 1 1.300 42 foo bar """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_fret_model_quality.keys()), ['1']) q1, = flr.fret_model_qualities self.assertIsInstance(q1.model, ihm.model.Model) self.assertEqual(q1.model._id, '1') self.assertAlmostEqual(q1.chi_square_reduced, 1.300, delta=0.1) self.assertIsInstance(q1.dataset_group, ihm.dataset.DatasetGroup) self.assertEqual(q1.dataset_group._id, '42') self.assertEqual(q1.method, 'foo') self.assertEqual(q1.details, 'bar') def test_flr_fret_model_distance_handler(self): """Test FLRFretModelDistanceHandler""" fh = StringIO(""" loop_ _flr_fret_model_distance.id _flr_fret_model_distance.restraint_id _flr_fret_model_distance.model_id _flr_fret_model_distance.distance _flr_fret_model_distance.distance_deviation 1 42 34 52.000 1.500 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual( sorted(flr._collection_flr_fret_model_distance.keys()), ['1']) d1, = flr.fret_model_distances self.assertIsInstance(d1.restraint, ihm.flr.FRETDistanceRestraint) self.assertEqual(d1.restraint._id, '42') self.assertIsInstance(d1.model, ihm.model.Model) self.assertEqual(d1.model._id, '34') self.assertAlmostEqual(d1.distance, 52.000, delta=0.1) self.assertAlmostEqual(d1.distance_deviation, 1.500, delta=0.1) def test_flr_fps_global_parameter_handler(self): """Test FLRFPSGlobalParameterHandler""" fh = StringIO(""" loop_ _flr_FPS_global_parameter.id _flr_FPS_global_parameter.forster_radius_value _flr_FPS_global_parameter.conversion_function_polynom_order _flr_FPS_global_parameter.repetition _flr_FPS_global_parameter.AV_grid_rel _flr_FPS_global_parameter.AV_min_grid_A _flr_FPS_global_parameter.AV_allowed_sphere _flr_FPS_global_parameter.AV_search_nodes _flr_FPS_global_parameter.AV_E_samples_k _flr_FPS_global_parameter.sim_viscosity_adjustment _flr_FPS_global_parameter.sim_dt_adjustment _flr_FPS_global_parameter.sim_max_iter_k _flr_FPS_global_parameter.sim_max_force _flr_FPS_global_parameter.sim_clash_tolerance_A _flr_FPS_global_parameter.sim_reciprocal_kT _flr_FPS_global_parameter.sim_clash_potential _flr_FPS_global_parameter.convergence_E _flr_FPS_global_parameter.convergence_K _flr_FPS_global_parameter.convergence_F _flr_FPS_global_parameter.convergence_T 1 52 3 1000 0.200 0.400 0.500 3 200 1 1 200 400 1 10 ^2 100 0.001 0.001 0.002 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual( sorted(flr._collection_flr_fps_global_parameters.keys()), ['1']) p1 = flr._collection_flr_fps_global_parameters['1'] self.assertAlmostEqual(p1.forster_radius, 52.000, delta=0.1) self.assertEqual(p1.conversion_function_polynom_order, 3) self.assertEqual(p1.repetition, 1000) self.assertAlmostEqual(p1.av_grid_rel, 0.200, delta=0.1) self.assertAlmostEqual(p1.av_min_grid_a, 0.400, delta=0.1) self.assertAlmostEqual(p1.av_allowed_sphere, 0.500, delta=0.1) self.assertEqual(p1.av_search_nodes, 3) self.assertAlmostEqual(p1.av_e_samples_k, 200, delta=0.1) self.assertAlmostEqual(p1.sim_viscosity_adjustment, 1, delta=0.1) self.assertAlmostEqual(p1.sim_dt_adjustment, 1, delta=0.1) self.assertEqual(p1.sim_max_iter_k, 200) self.assertAlmostEqual(p1.sim_max_force, 400, delta=0.1) self.assertAlmostEqual(p1.sim_clash_tolerance_a, 1, delta=0.1) self.assertAlmostEqual(p1.sim_reciprocal_kt, 10, delta=0.1) self.assertEqual(p1.sim_clash_potential, "^2") self.assertAlmostEqual(p1.convergence_e, 100, delta=0.1) self.assertAlmostEqual(p1.convergence_k, 0.001, delta=0.001) self.assertAlmostEqual(p1.convergence_f, 0.001, delta=0.001) self.assertAlmostEqual(p1.convergence_t, 0.002, delta=0.001) def test_flr_fps_modeling_handler(self): """Test FLRFPSModelingHandler""" fh = StringIO(""" loop_ _flr_FPS_modeling.id _flr_FPS_modeling.ihm_modeling_protocol_ordinal_id _flr_FPS_modeling.restraint_group_id _flr_FPS_modeling.global_parameter_id _flr_FPS_modeling.probe_modeling_method _flr_FPS_modeling.details 1 8 9 10 AV3 "test details" """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_fps_modeling.keys()), ['1']) m1 = flr._collection_flr_fps_modeling['1'] self.assertIsInstance(m1.protocol, ihm.protocol.Protocol) self.assertEqual(m1.protocol._id, '8') self.assertIsInstance(m1.restraint_group, ihm.flr.FRETDistanceRestraintGroup) self.assertEqual(m1.restraint_group._id, '9') self.assertIsInstance(m1.global_parameter, ihm.flr.FPSGlobalParameters) self.assertEqual(m1.global_parameter._id, '10') self.assertEqual(m1.probe_modeling_method, 'AV3') self.assertEqual(m1.details, 'test details') def test_flr_fps_av_parameter_handler(self): """Test FLRFPSAVParameterHandler""" fh = StringIO(""" loop_ _flr_FPS_AV_parameter.id _flr_FPS_AV_parameter.num_linker_atoms _flr_FPS_AV_parameter.linker_length _flr_FPS_AV_parameter.linker_width _flr_FPS_AV_parameter.probe_radius_1 _flr_FPS_AV_parameter.probe_radius_2 _flr_FPS_AV_parameter.probe_radius_3 1 15 20.000 3.500 10.000 5.000 4.000 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_fps_av_parameter.keys()), ['1']) p = flr._collection_flr_fps_av_parameter['1'] self.assertEqual(p.num_linker_atoms, 15) self.assertAlmostEqual(p.linker_length, 20.000, delta=0.1) self.assertAlmostEqual(p.linker_width, 3.500, delta=0.1) self.assertAlmostEqual(p.probe_radius_1, 10.000, delta=0.1) self.assertAlmostEqual(p.probe_radius_2, 5.000, delta=0.1) self.assertAlmostEqual(p.probe_radius_3, 4.000, delta=0.1) def test_flr_fps_av_modeling_handler(self): """Test FLRFPSAVModelingHandler""" fh = StringIO(""" loop_ _flr_FPS_modeling.id _flr_FPS_modeling.ihm_modeling_protocol_ordinal_id _flr_FPS_modeling.restraint_group_id _flr_FPS_modeling.global_parameter_id _flr_FPS_modeling.probe_modeling_method _flr_FPS_modeling.details 3 4 5 6 AV3 . # loop_ _flr_FPS_AV_modeling.id _flr_FPS_AV_modeling.sample_probe_id _flr_FPS_AV_modeling.FPS_modeling_id _flr_FPS_AV_modeling.parameter_id 1 2 3 4 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_fps_av_modeling.keys()), ['1']) m = flr._collection_flr_fps_av_modeling['1'] self.assertIsInstance(m.sample_probe, ihm.flr.SampleProbeDetails) self.assertEqual(m.sample_probe._id, '2') self.assertIsInstance(m.fps_modeling, ihm.flr.FPSModeling) self.assertEqual(m.fps_modeling._id, '3') self.assertIsInstance(m.parameter, ihm.flr.FPSAVParameter) self.assertEqual(m.parameter._id, '4') def test_flr_fps_mean_probe_position_handler(self): """Test FLRFPSMPPHandler""" fh = StringIO(""" loop_ _flr_FPS_mean_probe_position.id _flr_FPS_mean_probe_position.sample_probe_id _flr_FPS_mean_probe_position.mpp_xcoord _flr_FPS_mean_probe_position.mpp_ycoord _flr_FPS_mean_probe_position.mpp_zcoord 1 2 1.000 2.000 3.000 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted( flr._collection_flr_fps_mean_probe_position.keys()), ['1']) p = flr._collection_flr_fps_mean_probe_position['1'] self.assertIsInstance(p.sample_probe, ihm.flr.SampleProbeDetails) self.assertEqual(p.sample_probe._id, '2') self.assertAlmostEqual(p.x, 1.0, delta=0.1) self.assertAlmostEqual(p.y, 2.0, delta=0.1) self.assertAlmostEqual(p.z, 3.0, delta=0.1) def test_flr_fps_mpp_atom_position_handler(self): """Test FLRFPSMPPAtomPositionHandler""" fh = StringIO(""" loop_ _flr_FPS_MPP_atom_position.id _flr_FPS_MPP_atom_position.entity_id _flr_FPS_MPP_atom_position.seq_id _flr_FPS_MPP_atom_position.comp_id _flr_FPS_MPP_atom_position.atom_id _flr_FPS_MPP_atom_position.asym_id _flr_FPS_MPP_atom_position.xcoord _flr_FPS_MPP_atom_position.ycoord _flr_FPS_MPP_atom_position.zcoord _flr_FPS_MPP_atom_position.group_id 1 1 4 ALA CA A 1.000 2.000 3.000 1 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual( sorted(flr._collection_flr_fps_mpp_atom_position.keys()), ['1']) p = flr._collection_flr_fps_mpp_atom_position['1'] self.assertIsInstance(p.atom, ihm.Atom) self.assertEqual(p.atom.id, 'CA') self.assertEqual(p.atom.seq_id, 4) self.assertEqual(p.atom.asym._id, 'A') self.assertAlmostEqual(p.x, 1.0, delta=0.1) self.assertAlmostEqual(p.y, 2.0, delta=0.1) self.assertAlmostEqual(p.z, 3.0, delta=0.1) def test_flr_fps_mpp_modeling_handler(self): """Test FLRFPSMPPModelingHandler""" fh = StringIO(""" loop_ _flr_FPS_modeling.id _flr_FPS_modeling.ihm_modeling_protocol_ordinal_id _flr_FPS_modeling.restraint_group_id _flr_FPS_modeling.global_parameter_id _flr_FPS_modeling.probe_modeling_method _flr_FPS_modeling.details 3 4 5 6 AV3 . # loop_ _flr_FPS_MPP_modeling.ordinal_id _flr_FPS_MPP_modeling.FPS_modeling_id _flr_FPS_MPP_modeling.mpp_id _flr_FPS_MPP_modeling.mpp_atom_position_group_id 1 3 4 5 """) s, = ihm.reader.read(fh) flr, = s.flr_data self.assertEqual(sorted(flr._collection_flr_fps_mpp_modeling.keys()), ['1']) m = flr._collection_flr_fps_mpp_modeling['1'] self.assertIsInstance(m.fps_modeling, ihm.flr.FPSModeling) self.assertEqual(m.fps_modeling._id, '3') self.assertIsInstance(m.mpp, ihm.flr.FPSMeanProbePosition) self.assertEqual(m.mpp._id, '4') self.assertIsInstance(m.mpp_atom_position_group, ihm.flr.FPSMPPAtomPositionGroup) self.assertEqual(m.mpp_atom_position_group._id, '5') def test_flr_kinetic_rate_fret_analysis_connection_handler(self): """Test FLRKineticRateFretAnalysisConnectionHandler""" mss_cif = """ loop_ _ihm_multi_state_scheme.id _ihm_multi_state_scheme.name _ihm_multi_state_scheme.details 1 'scheme1' 'details1' 2 'scheme2' 'details2' # # loop_ _ihm_multi_state_scheme_connectivity.id _ihm_multi_state_scheme_connectivity.scheme_id _ihm_multi_state_scheme_connectivity.begin_state_id _ihm_multi_state_scheme_connectivity.end_state_id _ihm_multi_state_scheme_connectivity.dataset_group_id _ihm_multi_state_scheme_connectivity.details 1 1 1 2 10 'connectivity1' 2 1 2 1 11 'connectivity2' 3 2 1 2 10 'connectivity1' # """ rate_cif = """ # loop_ _ihm_kinetic_rate.id _ihm_kinetic_rate.transition_rate_constant _ihm_kinetic_rate.equilibrium_constant _ihm_kinetic_rate.equilibrium_constant_determination_method _ihm_kinetic_rate.equilibrium_constant_unit _ihm_kinetic_rate.details _ihm_kinetic_rate.scheme_connectivity_id _ihm_kinetic_rate.dataset_group_id _ihm_kinetic_rate.external_file_id 51 3.0 . . . 'rate1' 1 4 5 52 . 6.5 'equilibrium constant is determined from population' . 'rate2' 2 . . 53 7.0 . . . 'rate3' 3 8 9 # """ flr_cif = """ # loop_ _flr_fret_analysis.id _flr_fret_analysis.experiment_id _flr_fret_analysis.type _flr_fret_analysis.sample_probe_id_1 _flr_fret_analysis.sample_probe_id_2 _flr_fret_analysis.forster_radius_id _flr_fret_analysis.dataset_list_id _flr_fret_analysis.external_file_id _flr_fret_analysis.software_id 1 108 intensity-based 9 2 11 18 42 99 5 113 lifetime-based 24 5 19 32 81 98 # # loop_ _flr_kinetic_rate_analysis.id _flr_kinetic_rate_analysis.fret_analysis_id _flr_kinetic_rate_analysis.kinetic_rate_id _flr_kinetic_rate_analysis.details 1 1 51 details1 2 1 52 details2 3 5 51 details3 4 8 54 details4 """ # Order of categories should not matter for cif in ( mss_cif + rate_cif + flr_cif, mss_cif + flr_cif + rate_cif, rate_cif + mss_cif + flr_cif, rate_cif + flr_cif + mss_cif, flr_cif + mss_cif + rate_cif, flr_cif + rate_cif + mss_cif): for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) flr, = s.flr_data con = flr.kinetic_rate_fret_analysis_connections self.assertEqual(len(con), 4) self.assertEqual(con[0].fret_analysis._id, '1') self.assertEqual(con[0].kinetic_rate._id, '51') self.assertEqual(con[0].details, 'details1') self.assertEqual(con[1].fret_analysis._id, '1') self.assertEqual(con[1].kinetic_rate._id, '52') self.assertEqual(con[1].details, 'details2') self.assertEqual(con[2].fret_analysis._id, '5') self.assertEqual(con[2].kinetic_rate._id, '51') self.assertEqual(con[2].details, 'details3') # The last one is auto generated self.assertEqual(con[3].fret_analysis._id, '8') self.assertEqual(con[3].kinetic_rate._id, '54') self.assertEqual(con[3].details, 'details4') # Test the _all_relaxation_times() function self.assertEqual([x._id for x in list(s._all_kinetic_rates())], ['51', '52', '53', '54']) def test_flr_relaxation_time_fret_analysis_connection_handler(self): """Test FLRRelaxationTimeFretAnalysisConnectionHandler""" mss_cif = """ loop_ _ihm_multi_state_scheme.id _ihm_multi_state_scheme.name _ihm_multi_state_scheme.details 1 'scheme1' 'details1' 2 'scheme2' 'details2' # # loop_ _ihm_multi_state_scheme_connectivity.id _ihm_multi_state_scheme_connectivity.scheme_id _ihm_multi_state_scheme_connectivity.begin_state_id _ihm_multi_state_scheme_connectivity.end_state_id _ihm_multi_state_scheme_connectivity.dataset_group_id _ihm_multi_state_scheme_connectivity.details 1 1 1 2 10 'connectivity1' 2 1 2 1 11 'connectivity2' 3 2 1 2 10 'connectivity1' # """ relaxation_time_cif = """ # loop_ _ihm_relaxation_time.id _ihm_relaxation_time.value _ihm_relaxation_time.unit _ihm_relaxation_time.amplitude _ihm_relaxation_time.dataset_group_id _ihm_relaxation_time.external_file_id _ihm_relaxation_time.details 101 10.0 seconds 0.5 20 21 'relaxation_time1' 102 11.5 milliseconds . 22 23 'relaxation_time2' 103 12.0 seconds 0.4 24 25 'relaxation_time3' 104 16.0 seconds 0.6 25 25 'relaxation_time4' # # loop_ _ihm_relaxation_time_multi_state_scheme.id _ihm_relaxation_time_multi_state_scheme.relaxation_time_id _ihm_relaxation_time_multi_state_scheme.scheme_id _ihm_relaxation_time_multi_state_scheme.scheme_connectivity_id _ihm_relaxation_time_multi_state_scheme.details 1 101 1 1 . 2 102 1 2 . 3 103 1 . . 4 104 1 . . # """ flr_cif = """ # loop_ _flr_fret_analysis.id _flr_fret_analysis.experiment_id _flr_fret_analysis.type _flr_fret_analysis.sample_probe_id_1 _flr_fret_analysis.sample_probe_id_2 _flr_fret_analysis.forster_radius_id _flr_fret_analysis.dataset_list_id _flr_fret_analysis.external_file_id _flr_fret_analysis.software_id 1 108 intensity-based 9 2 11 18 42 99 5 113 lifetime-based 24 5 19 32 81 98 # # loop_ _flr_relaxation_time_analysis.id _flr_relaxation_time_analysis.fret_analysis_id _flr_relaxation_time_analysis.relaxation_time_id _flr_relaxation_time_analysis.details 1 1 101 details1 2 1 102 details2 3 5 101 details3 4 8 103 details4 """ # Order of categories should not matter for cif in ( mss_cif + relaxation_time_cif + flr_cif, mss_cif + flr_cif + relaxation_time_cif, relaxation_time_cif + mss_cif + flr_cif, relaxation_time_cif + flr_cif + mss_cif, flr_cif + mss_cif + relaxation_time_cif, flr_cif + relaxation_time_cif + mss_cif): for fh in cif_file_handles(cif): s, = ihm.reader.read(fh) flr, = s.flr_data con = flr.relaxation_time_fret_analysis_connections self.assertEqual(len(con), 4) self.assertEqual(con[0].fret_analysis._id, '1') self.assertEqual(con[0].relaxation_time._id, '101') self.assertEqual(con[0].details, 'details1') self.assertEqual(con[1].fret_analysis._id, '1') self.assertEqual(con[1].relaxation_time._id, '102') self.assertEqual(con[1].details, 'details2') self.assertEqual(con[2].fret_analysis._id, '5') self.assertEqual(con[2].relaxation_time._id, '101') self.assertEqual(con[2].details, 'details3') # The last one is auto generated self.assertEqual(con[3].fret_analysis._id, '8') self.assertEqual(con[3].relaxation_time._id, '103') self.assertEqual(con[3].details, 'details4') # Test the _all_relaxation_times() function self.assertEqual([x._id for x in list(s._all_relaxation_times())], ['103', '104', '101', '102']) def test_variant_base(self): """Test Variant base class""" v = ihm.reader.Variant() self.assertIsNone(v.get_handlers(None)) self.assertIsNone(v.get_audit_conform_handler(None)) def test_write_variant(self): """Test write() function with Variant object""" cif = "data_model\n_struct.entry_id testid\n" for fh in cif_file_handles(cif): s, = ihm.reader.read(fh, variant=ihm.reader.IHMVariant()) self.assertEqual(s.id, 'testid') def test_branch_scheme_handler(self): """Test BranchSchemeHandler""" fh = StringIO(""" loop_ _chem_comp.id _chem_comp.type _chem_comp.name _chem_comp.formula _chem_comp.formula_weight BGC 'D-saccharide, beta linking' beta-D-glucopyranose 'C6 H12 O6' 180.156 # loop_ _pdbx_entity_branch_list.entity_id _pdbx_entity_branch_list.num _pdbx_entity_branch_list.comp_id _pdbx_entity_branch_list.hetero 1 1 BGC . 1 2 BGC . 1 3 BGC . 1 4 BGC . # loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 1 foo B 1 bar C 1 baz # loop_ _pdbx_branch_scheme.asym_id _pdbx_branch_scheme.entity_id _pdbx_branch_scheme.mon_id _pdbx_branch_scheme.num _pdbx_branch_scheme.pdb_seq_num _pdbx_branch_scheme.auth_seq_num _pdbx_branch_scheme.auth_mon_id _pdbx_branch_scheme.pdb_asym_id A 1 BGC 1 5 5 BGC 0 A 1 BGC 2 6 6 BGC 0 A 1 BGC 3 7 7 BGC 0 A 1 BGC 4 8 8 BGC 0 B 1 BGC 1 1 11 BGC . B 1 BGC 2 2 12 BGC . B 1 BGC 3 3 13 BGC . B 1 BGC 4 4 14 BGC . C 1 BGC 1 2 . BGC . C 1 BGC 2 4 . BGC . C 1 BGC 3 6 . BGC . C 1 BGC 4 8 . BGC . """) s, = ihm.reader.read(fh) asym_a, asym_b, asym_c = s.asym_units self.assertEqual(asym_a.auth_seq_id_map, {1: (5, None), 2: (6, None), 3: (7, None), 4: (8, None)}) self.assertEqual(asym_a._strand_id, '0') self.assertEqual(asym_a.residue(1).auth_seq_id, 5) self.assertIsNone(asym_a.orig_auth_seq_id_map) self.assertIsNone(asym_a.num_map) self.assertEqual(asym_b.auth_seq_id_map, {1: (1, None), 2: (2, None), 3: (3, None), 4: (4, None)}) self.assertIsNone(asym_b._strand_id) self.assertEqual(asym_b.residue(1).auth_seq_id, 1) self.assertEqual(asym_b.orig_auth_seq_id_map, {1: 11, 2: 12, 3: 13, 4: 14}) self.assertIsNone(asym_b.num_map) self.assertEqual(asym_c.auth_seq_id_map, {1: (2, None), 2: (4, None), 3: (6, None), 4: (8, None)}) self.assertIsNone(asym_c._strand_id) self.assertEqual(asym_c.residue(1).auth_seq_id, 2) self.assertEqual(asym_c.orig_auth_seq_id_map, {1: None, 2: None, 3: None, 4: None}) self.assertIsNone(asym_c.num_map) def test_entity_branch_list_handler(self): """Test EntityBranchListHandler""" fh = StringIO(""" loop_ _pdbx_entity_branch_list.entity_id _pdbx_entity_branch_list.num _pdbx_entity_branch_list.comp_id _pdbx_entity_branch_list.hetero 1 1 BGC . 1 2 BGC . 1 3 BGC . 1 4 BGC . """) s, = ihm.reader.read(fh) e1, = s.entities c1, c2, c3, c4 = e1.sequence self.assertEqual([c.id for c in e1.sequence], ['BGC'] * 4) def test_entity_branch_descriptor_handler(self): """Test EntityBranchDescriptorHandler""" fh = StringIO(""" loop_ _pdbx_entity_branch_descriptor.ordinal _pdbx_entity_branch_descriptor.entity_id _pdbx_entity_branch_descriptor.descriptor _pdbx_entity_branch_descriptor.type _pdbx_entity_branch_descriptor.program _pdbx_entity_branch_descriptor.program_version 1 1 foo typ1 prog 1.0 2 1 bar typ2 . . """) s, = ihm.reader.read(fh) e1, = s.entities bd1, bd2 = e1.branch_descriptors self.assertEqual(bd1.text, 'foo') self.assertEqual(bd1.type, 'typ1') self.assertEqual(bd1.program, 'prog') self.assertEqual(bd1.program_version, '1.0') self.assertEqual(bd2.text, 'bar') self.assertEqual(bd2.type, 'typ2') self.assertIsNone(bd2.program) self.assertIsNone(bd2.program_version) def test_entity_branch_link_handler(self): """Test EntityBranchLinkHandler""" fh = StringIO(""" loop_ _pdbx_entity_branch_link.link_id _pdbx_entity_branch_link.entity_id _pdbx_entity_branch_link.entity_branch_list_num_1 _pdbx_entity_branch_link.comp_id_1 _pdbx_entity_branch_link.atom_id_1 _pdbx_entity_branch_link.leaving_atom_id_1 _pdbx_entity_branch_link.entity_branch_list_num_2 _pdbx_entity_branch_link.comp_id_2 _pdbx_entity_branch_link.atom_id_2 _pdbx_entity_branch_link.leaving_atom_id_2 _pdbx_entity_branch_link.value_order _pdbx_entity_branch_link.details 1 1 1 NAG CA H1 2 BMC N H2 sing foo 2 1 2 BMC CA H1 3 FUC N H2 . . """) s, = ihm.reader.read(fh) e1, = s.entities lnk1, lnk2 = e1.branch_links self.assertEqual(lnk1.num1, 1) self.assertEqual(lnk1.atom_id1, 'CA') self.assertEqual(lnk1.leaving_atom_id1, 'H1') self.assertEqual(lnk1.num2, 2) self.assertEqual(lnk1.atom_id2, 'N') self.assertEqual(lnk1.leaving_atom_id2, 'H2') self.assertEqual(lnk1.order, 'sing') self.assertEqual(lnk1.details, 'foo') self.assertEqual(lnk2.num1, 2) self.assertEqual(lnk2.atom_id1, 'CA') self.assertEqual(lnk2.leaving_atom_id1, 'H1') self.assertEqual(lnk2.num2, 3) self.assertEqual(lnk2.atom_id2, 'N') self.assertEqual(lnk2.leaving_atom_id2, 'H2') self.assertIsNone(lnk2.order) self.assertIsNone(lnk2.details) def test_database_handler(self): """Test DatabaseHandler""" fh = StringIO(""" loop_ _database_2.database_id _database_2.database_code _database_2.pdbx_database_accession _database_2.pdbx_DOI foo bar . ? baz 1abc 1abcxyz 1.2.3.4 """) s, = ihm.reader.read(fh) d1, d2 = s.databases self.assertEqual(d1.id, 'foo') self.assertEqual(d1.code, 'bar') self.assertIsNone(d1.accession) self.assertIs(d1.doi, ihm.unknown) self.assertEqual(d2.id, 'baz') self.assertEqual(d2.code, '1abc') self.assertEqual(d2.accession, '1abcxyz') self.assertEqual(d2.doi, '1.2.3.4') def test_database_status_handler(self): """Test DatabaseStatusHandler""" fh = StringIO(""" _pdbx_database_status.status_code REL _pdbx_database_status.entry_id 5FD1 _pdbx_database_status.recvd_initial_deposition_date 1993-06-29 _pdbx_database_status.deposit_site ? _pdbx_database_status.process_site BNL _pdbx_database_status.SG_entry . """) s, = ihm.reader.read(fh) # Should pass through to a dict self.assertEqual(s._database_status, {'status_code': 'REL', 'entry_id': '5FD1', 'recvd_initial_deposition_date': '1993-06-29', 'deposit_site': ihm.unknown, 'process_site': 'BNL', 'sg_entry': None}) # Also test public interface for selected data items self.assertEqual(s.database_status.status_code, 'REL') self.assertIs(s.database_status.deposit_site, ihm.unknown) self.assertEqual(s.database_status.process_site, 'BNL') self.assertEqual(s.database_status.recvd_initial_deposition_date, datetime.date(1993, 6, 29)) def test_add_to_system(self): """Test adding new mmCIF input to existing System""" s = ihm.System() e = ihm.Entity('AHC') e._id = '42' s.entities.append(e) fh = StringIO(""" loop_ _struct_asym.id _struct_asym.entity_id _struct_asym.details A 42 foo B 99 bar """) s2, = ihm.reader.read(fh, add_to_system=s) self.assertIs(s2, s) self.assertEqual(len(s.asym_units), 2) # asym A should point to existing entity self.assertEqual(s.asym_units[0].id, 'A') self.assertIs(s.asym_units[0].entity, e) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_reference.py000066400000000000000000000117011503573337200200140ustar00rootroot00000000000000import utils import os import unittest import urllib.request TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.reference class Tests(unittest.TestCase): def test_reference(self): """Test Reference base class""" _ = ihm.reference.Reference() # noop def test_sequence(self): """Test Sequence class""" s = ihm.reference.Sequence( db_name='testdb', db_code='testcode', accession='testacc', sequence='CCCG', details='foo') s.alignments.append(ihm.reference.Alignment( db_begin=10, db_end=30, entity_begin=20, entity_end=40)) self.assertEqual(s.db_name, 'testdb') self.assertEqual(s.db_code, 'testcode') self.assertEqual(s.accession, 'testacc') self.assertEqual(s.sequence, 'CCCG') self.assertEqual(s.details, 'foo') a, = s._get_alignments() self.assertEqual(a.db_begin, 10) self.assertEqual(a.db_end, 30) self.assertEqual(a.entity_begin, 20) self.assertEqual(a.entity_end, 40) self.assertEqual(a.seq_dif, []) def test_sequence_default_alignment(self): """Test Sequence class with default Alignment""" s = ihm.reference.Sequence( db_name='testdb', db_code='testcode', accession='testacc', sequence='CCCG', details='foo') self.assertEqual(s.db_name, 'testdb') self.assertEqual(s.db_code, 'testcode') self.assertEqual(s.accession, 'testacc') self.assertEqual(s.sequence, 'CCCG') self.assertEqual(s.details, 'foo') a1, = s._get_alignments() a1a, = s._get_alignments() # should get same default alignment each time (get cache 2nd time) self.assertEqual(id(a1), id(a1a)) self.assertEqual(a1.db_begin, 1) self.assertIsNone(a1.db_end) self.assertEqual(a1.entity_begin, 1) self.assertIsNone(a1.entity_end) self.assertEqual(a1.seq_dif, []) def test_uniprot_sequence(self): """Test UniProtSequence class""" lpep = ihm.LPeptideAlphabet() sd = ihm.reference.SeqDif(seq_id=1, db_monomer=lpep['C'], monomer=lpep['W'], details='Test mutation') s = ihm.reference.UniProtSequence( db_code='testcode', accession='testacc', sequence='CCCG') s.alignments.append(ihm.reference.Alignment(seq_dif=[sd])) self.assertEqual(s.db_name, 'UNP') self.assertEqual(s.db_code, 'testcode') self.assertEqual(s.accession, 'testacc') self.assertEqual(s.sequence, 'CCCG') self.assertIsNone(s.details) a, = s.alignments self.assertEqual(len(a.seq_dif), 1) self.assertEqual(a.seq_dif[0].seq_id, 1) self.assertEqual(a.seq_dif[0].db_monomer.id, 'CYS') self.assertEqual(a.seq_dif[0].monomer.id, 'TRP') self.assertEqual(a.seq_dif[0].details, 'Test mutation') def _get_from_uniprot_accession(self, fasta_fname): def mock_urlopen(url): self.assertTrue(url.endswith('/testacc.fasta')) fname = utils.get_input_file_name(TOPDIR, fasta_fname) return open(fname, 'rb') # Need to mock out urllib.request so we don't hit the network # (expensive) every time we test try: orig_urlopen = urllib.request.urlopen urllib.request.urlopen = mock_urlopen return ihm.reference.UniProtSequence.from_accession('testacc') finally: urllib.request.urlopen = orig_urlopen def test_uniprot_sequence_from_accession(self): """Test UniProtSequence.from_accession()""" r = self._get_from_uniprot_accession('P52891.fasta') self.assertIsInstance(r, ihm.reference.UniProtSequence) self.assertEqual(r.db_code, 'NUP84_YEAST') self.assertEqual(r.accession, 'testacc') self.assertEqual(len(r.sequence), 726) self.assertEqual(r.sequence[:20], 'MELSPTYQTERFTKFSDTLK') self.assertEqual( r.details, 'Nucleoporin NUP84 OS=Saccharomyces cerevisiae (strain ATCC ' '204508 / S288c) OX=559292 GN=NUP84 PE=1 SV=1') def test_uniprot_sequence_from_accession_bad_header(self): """Test UniProtSequence.from_accession() with bad header""" self.assertRaises(ValueError, self._get_from_uniprot_accession, 'uniprot_bad_header.fasta') def test_uniprot_sequence_from_accession_no_details(self): """Test UniProtSequence.from_accession() with no details""" r = self._get_from_uniprot_accession('uniprot_no_details.fasta') self.assertIsInstance(r, ihm.reference.UniProtSequence) self.assertEqual(r.db_code, 'NUP84_YEAST') self.assertEqual(r.accession, 'testacc') self.assertEqual(len(r.sequence), 726) self.assertEqual(r.sequence[:20], 'MELSPTYQTERFTKFSDTLK') self.assertIsNone(r.details) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_report.py000066400000000000000000000170201503573337200173710ustar00rootroot00000000000000import unittest import utils import os from io import StringIO TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm import ihm.report import ihm.reference import ihm.location import ihm.representation import ihm.protocol import ihm.analysis import ihm.restraint import ihm.geometry import ihm.model class Tests(unittest.TestCase): def test_report(self): """Test System.report()""" sio = StringIO() s = ihm.System(title='test system') s.report(sio) def test_entities(self): """Test report_entities""" sio = StringIO() s = ihm.System(title='test system') e = ihm.Entity("ACG") a = ihm.AsymUnit(e, "my asym") a.id = 'A' s.asym_units.append(a) s.entities.append(e) r = ihm.report.Reporter(s, sio) # Should warn about missing references self.assertWarns(ihm.report.MissingDataWarning, r.report_entities) uniprot = ihm.reference.UniProtSequence( db_code='testcode', accession='testacc', sequence='CCCG') e.references.append(uniprot) r.report_entities() def test_asyms(self): """Test report_asyms""" sio = StringIO() s = ihm.System(title='test system') e = ihm.Entity("ACG") s.entities.append(e) a = ihm.AsymUnit(e, "my asym") s.asym_units.append(a) r = ihm.report.Reporter(s, sio) r.report_asyms() def test_representations(self): """Test report_representations""" sio = StringIO() s = ihm.System(title='test system') e = ihm.Entity("ACGT") s.entities.append(e) a = ihm.AsymUnit(e, "my asym") s.asym_units.append(a) s1 = ihm.representation.ResidueSegment( a(1, 2), starting_model=None, rigid=False, primitive='sphere') s2 = ihm.representation.FeatureSegment( a(3, 4), starting_model=None, rigid=True, primitive='other', count=3) r1 = ihm.representation.Representation((s1, s2), name='foo') s.orphan_representations.append(r1) r = ihm.report.Reporter(s, sio) # Test report of representation without ID r.report_representations() # Test report of representation with ID r1._id = 42 r.report_representations() def test_citations(self): """Test report_citations""" sio = StringIO() s = ihm.System(title='test system') c = ihm.Citation(pmid="foo", title="bar", journal="j", volume=1, page_range=(10, 20), year=2023, authors=["foo", "bar"], doi="test") s.citations.append(c) r = ihm.report.Reporter(s, sio) r.report_citations() def test_software(self): """Test report_software""" sio = StringIO() s = ihm.System(title='test system') soft = ihm.Software(name='foo', version='1.0', classification='1', description='2', location='3') s.software.append(soft) r = ihm.report.Reporter(s, sio) # Should warn about missing citation self.assertWarns(ihm.report.MissingDataWarning, r.report_software) c = ihm.Citation(pmid="foo", title="bar", journal="j", volume=1, page_range=(10, 20), year=2023, authors=["foo", "bar"], doi="test") soft.citation = c soft.version = None r.report_software() def test_databases(self): """Test report_databases""" sio = StringIO() s = ihm.System(title='test system') s.locations.append( ihm.location.BMRBLocation('27600', version='foo', details='bar')) s.locations.append( ihm.location.FileLocation(repo='mydoi', path='a')) r = ihm.report.Reporter(s, sio) r.report_databases() def test_files(self): """Test report_files""" sio = StringIO() s = ihm.System(title='test system') repo = ihm.location.Repository(doi='1.2.3.4') s.locations.append( ihm.location.BMRBLocation('27600', version='foo', details='bar')) s.locations.append( ihm.location.FileLocation(repo=repo, path='a')) r = ihm.report.Reporter(s, sio) r.report_files() def test_files_local(self): """Test report_files with local files""" sio = StringIO() s = ihm.System(title='test system') s.locations.append( ihm.location.BMRBLocation('27600', version='foo', details='bar')) s.locations.append( ihm.location.FileLocation(repo=None, path='.')) r = ihm.report.Reporter(s, sio) # Should warn about local files self.assertWarns(ihm.report.LocalFilesWarning, r.report_files) def test_protocols(self): """Test report_protocols""" sio = StringIO() s = ihm.System(title='test system') prot = ihm.protocol.Protocol(name='foo') prot.steps.append(ihm.protocol.Step( assembly='foo', dataset_group='bar', method='baz', num_models_begin=0, num_models_end=42)) analysis = ihm.analysis.Analysis() analysis.steps.append(ihm.analysis.ClusterStep( feature='RMSD', num_models_begin=42, num_models_end=5)) prot.analyses.append(analysis) s.orphan_protocols.append(prot) r = ihm.report.Reporter(s, sio) r.report_protocols() def test_restraints(self): """Test report_restraints""" sio = StringIO() s = ihm.System(title='test system') dist = ihm.restraint.UpperBoundDistanceRestraint(42.0) geom = ihm.geometry.XAxis(name='foo', description='bar') rsr = ihm.restraint.GeometricRestraint( dataset='foo', geometric_object=geom, feature='feat', distance=dist) s.restraints.append(rsr) r = ihm.report.Reporter(s, sio) r.report_restraints() def test_models(self): """Test report_models""" sio = StringIO() s = ihm.System(title='test system') sg = ihm.model.StateGroup() state = ihm.model.State(name='foo') mg = ihm.model.ModelGroup(name='bar') m = ihm.model.Model(assembly='foo', protocol='bar', representation='baz') mg.append(m) state.append(mg) sg.append(state) s.state_groups.append(sg) r = ihm.report.Reporter(s, sio) r.report_models() def test_ensembles(self): """Test report_ensembles""" sio = StringIO() s = ihm.System(title='test system') e = ihm.Entity("ACG") a = ihm.AsymUnit(e, "my asym") a.id = 'A' s.asym_units.append(a) s.entities.append(e) e1 = ihm.model.Ensemble(model_group=None, num_models=10) e2 = ihm.model.Ensemble(model_group=None, num_models=5, precision=1.0, name='test ensemble') mg = ihm.model.ModelGroup(name='bar') e3 = ihm.model.Ensemble(model_group=mg, num_models=1, file='file') e3.densities.append(ihm.model.LocalizationDensity( file='foo', asym_unit=a(1, 2))) s.ensembles.extend((e1, e2, e3)) r = ihm.report.Reporter(s, sio) r.report_ensembles() # Should warn about extra models but no external file e4 = ihm.model.Ensemble(model_group=mg, num_models=1) s.ensembles.append(e4) self.assertWarns(ihm.report.MissingFileWarning, r.report_ensembles) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_representation.py000066400000000000000000000066201503573337200211240ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.representation class Tests(unittest.TestCase): def test_segment(self): """Test Segment base class""" seg = ihm.representation.Segment() # does nothing _ = seg._get_report() def test_atomic_segment(self): """Test AtomicSegment class""" asym = ihm.AsymUnit(ihm.Entity('A' * 30), "testdetail") s = ihm.representation.AtomicSegment( asym_unit=asym(1, 10), rigid=True, starting_model=None) self.assertEqual(s.asym_unit.seq_id_range, (1, 10)) self.assertEqual(s.primitive, 'atomistic') self.assertEqual(s.granularity, 'by-atom') self.assertIsNone(s.count) self.assertEqual(s.rigid, True) self.assertEqual(s._get_report(), "testdetail 1-10 as rigid atoms") def test_residue_segment(self): """Test ResidueSegment class""" asym = ihm.AsymUnit(ihm.Entity('AAAA'), "testdetail") s = ihm.representation.ResidueSegment( asym_unit=asym, rigid=True, primitive='sphere') self.assertEqual(s.asym_unit.seq_id_range, (1, 4)) self.assertEqual(s.primitive, 'sphere') self.assertEqual(s.granularity, 'by-residue') self.assertIsNone(s.count) self.assertEqual(s.rigid, True) self.assertEqual(s._get_report(), "testdetail 1-4 as rigid residues") def test_residue_segment_starting_model(self): """Test ResidueSegment class with starting model""" class MockObject: pass asym = ihm.AsymUnit(ihm.Entity('AA'), "testdetail") sm = MockObject() s = ihm.representation.ResidueSegment( asym_unit=asym, rigid=False, primitive='sphere', starting_model=sm) self.assertEqual( s._get_report(), "testdetail 1-2 as flexible residues (from starting model)") sm._id = '42' self.assertEqual( s._get_report(), "testdetail 1-2 as flexible residues (from starting model 42)") def test_multi_residue_segment(self): """Test MultiResidueSegment class""" asym = ihm.AsymUnit(ihm.Entity('AAAA')) s = ihm.representation.MultiResidueSegment( asym_unit=asym, rigid=True, primitive='sphere') self.assertEqual(s.primitive, 'sphere') self.assertEqual(s.granularity, 'multi-residue') self.assertIsNone(s.count) self.assertEqual(s.rigid, True) def test_feature_segment(self): """Test FeatureSegment class""" asym = ihm.AsymUnit(ihm.Entity('AAAA'), "testdetail") s = ihm.representation.FeatureSegment( asym_unit=asym, rigid=True, primitive='sphere', count=2) self.assertEqual(s.primitive, 'sphere') self.assertEqual(s.granularity, 'by-feature') self.assertEqual(s.count, 2) self.assertEqual(s.rigid, True) self.assertEqual(s._get_report(), 'testdetail 1-4 as 2 rigid features (sphere)') def test_representation(self): """Test Representation class""" asym = ihm.AsymUnit(ihm.Entity('AAAA')) s = ihm.representation.AtomicSegment(asym_unit=asym, rigid=True) r = ihm.representation.Representation() r.append(s) self.assertEqual(len(r), 1) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_restraint.py000066400000000000000000000253541503573337200201020ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.restraint import ihm.geometry class Tests(unittest.TestCase): def test_restraint(self): """Test Restraint base class""" r = ihm.restraint.Restraint() # does nothing _ = r._get_report() def test_em3d_restraint_fit(self): """Test EM3DRestraintFit class""" f = ihm.restraint.EM3DRestraintFit(0.4) self.assertAlmostEqual(f.cross_correlation_coefficient, 0.4, delta=0.1) def test_em3d_restraint(self): """Test EM3DRestraint class""" f = ihm.restraint.EM3DRestraint(dataset='foo', assembly='bar') self.assertEqual(f.dataset, 'foo') self.assertEqual(f.assembly, 'bar') self.assertEqual(f.fits, {}) self.assertEqual(f._get_report(), "Fit to 3D electron microscopy density map") f = ihm.restraint.EM3DRestraint( dataset='foo', assembly='bar', fitting_method="Gaussian mixture models") self.assertEqual(f._get_report(), "Fit to 3D electron microscopy density map " "using Gaussian mixture models") def test_sas_restraint_fit(self): """Test SASRestraintFit class""" f = ihm.restraint.SASRestraintFit(0.4) self.assertAlmostEqual(f.chi_value, 0.4, delta=0.1) def test_sas_restraint(self): """Test SASRestraint class""" f = ihm.restraint.SASRestraint(dataset='foo', assembly='bar') self.assertEqual(f.dataset, 'foo') self.assertEqual(f.assembly, 'bar') self.assertEqual(f.fits, {}) self.assertEqual(f._get_report(), "SAS restraint") f = ihm.restraint.SASRestraint(dataset='foo', assembly='bar', multi_state=False) self.assertEqual(f._get_report(), "Single-state SAS restraint") f = ihm.restraint.SASRestraint(dataset='foo', assembly='bar', multi_state=True, fitting_atom_type='residues') self.assertEqual(f._get_report(), "Multi-state SAS restraint on residues") def test_em2d_restraint_fit(self): """Test EM2DRestraintFit class""" f = ihm.restraint.EM2DRestraintFit(0.4) self.assertAlmostEqual(f.cross_correlation_coefficient, 0.4, delta=0.1) self.assertIsNone(f.rot_matrix) self.assertIsNone(f.tr_vector) def test_em2d_restraint(self): """Test EM2DRestraint class""" f = ihm.restraint.EM2DRestraint(dataset='foo', assembly='bar') self.assertEqual(f.dataset, 'foo') self.assertEqual(f.assembly, 'bar') self.assertEqual(f.fits, {}) self.assertEqual(f._get_report(), "Fit to 2D electron microscopy class average") def test_distance_restraint(self): """Test DistanceRestraint class""" _ = ihm.restraint.DistanceRestraint() # does nothing def test_harmonic_distance_restraint(self): """Test HarmonicDistanceRestraint class""" r = ihm.restraint.HarmonicDistanceRestraint(42.0) self.assertAlmostEqual(r.distance, 42.0, delta=0.1) self.assertAlmostEqual(r.distance_lower_limit, 42.0, delta=0.1) self.assertAlmostEqual(r.distance_upper_limit, 42.0, delta=0.1) self.assertEqual(r.restraint_type, "harmonic") def test_upper_bound_distance_restraint(self): """Test UpperBoundDistanceRestraint class""" r = ihm.restraint.UpperBoundDistanceRestraint(42.0) self.assertAlmostEqual(r.distance, 42.0, delta=0.1) self.assertIsNone(r.distance_lower_limit) self.assertAlmostEqual(r.distance_upper_limit, 42.0, delta=0.1) self.assertEqual(r.restraint_type, "upper bound") def test_lower_bound_distance_restraint(self): """Test LowerBoundDistanceRestraint class""" r = ihm.restraint.LowerBoundDistanceRestraint(42.0) self.assertAlmostEqual(r.distance, 42.0, delta=0.1) self.assertAlmostEqual(r.distance_lower_limit, 42.0, delta=0.1) self.assertIsNone(r.distance_upper_limit) self.assertEqual(r.restraint_type, "lower bound") def test_lower_upper_bound_distance_restraint(self): """Test LowerUpperBoundDistanceRestraint class""" r = ihm.restraint.LowerUpperBoundDistanceRestraint(20.0, 30.0) self.assertAlmostEqual(r.distance_lower_limit, 20.0, delta=0.1) self.assertAlmostEqual(r.distance_upper_limit, 30.0, delta=0.1) self.assertEqual(r.restraint_type, "lower and upper bound") def test_cross_link_restraint(self): """Test CrossLinkRestraint class""" dss = ihm.ChemDescriptor('DSS') f = ihm.restraint.CrossLinkRestraint(dataset='foo', linker=dss) self.assertEqual(f.dataset, 'foo') self.assertEqual(f.linker, dss) self.assertEqual(f.experimental_cross_links, []) self.assertEqual( f._get_report(), "0 DSS cross-links from 0 experimental identifications") def test_experimental_cross_link(self): """Test ExperimentalCrossLink class""" f = ihm.restraint.ExperimentalCrossLink('res1', 'res2') self.assertEqual(f.residue1, 'res1') self.assertEqual(f.residue2, 'res2') def test_cross_link(self): """Test CrossLink class""" _ = ihm.restraint.CrossLink() # does nothing def test_residue_cross_link(self): """Test ResidueCrossLink class""" e = ihm.Entity('AHCDAH') asym1 = ihm.AsymUnit(e) asym2 = ihm.AsymUnit(e) ex_xl = ihm.restraint.ExperimentalCrossLink(e.residue(1), e.residue(2)) f = ihm.restraint.ResidueCrossLink( experimental_cross_link=ex_xl, asym1=asym1, asym2=asym2, distance='dist') self.assertEqual(f.granularity, 'by-residue') self.assertIsNone(f.atom1) self.assertIsNone(f.atom2) self.assertEqual(f.asym1, asym1) self.assertEqual(f.asym2, asym2) self.assertEqual(f.residue1.seq_id, 1) self.assertEqual(f.residue1.asym, asym1) self.assertEqual(f.residue2.seq_id, 2) self.assertEqual(f.residue2.asym, asym2) def test_feature_cross_link(self): """Test FeatureCrossLink class""" f = ihm.restraint.FeatureCrossLink( experimental_cross_link='ex', asym1='asym1', asym2='asym2', distance='dist') self.assertEqual(f.granularity, 'by-feature') self.assertIsNone(f.atom1) self.assertIsNone(f.atom2) self.assertEqual(f.asym1, 'asym1') self.assertEqual(f.asym2, 'asym2') def test_atom_cross_link(self): """Test AtomCrossLink class""" f = ihm.restraint.AtomCrossLink( experimental_cross_link='ex', asym1='asym1', asym2='asym2', atom1='C', atom2='N', distance='dist') self.assertEqual(f.granularity, 'by-atom') self.assertEqual(f.atom1, 'C') self.assertEqual(f.atom2, 'N') self.assertEqual(f.asym1, 'asym1') self.assertEqual(f.asym2, 'asym2') def test_feature(self): """Test Feature base class""" f = ihm.restraint.Feature() # does nothing self.assertEqual(f._all_entities_or_asyms(), []) self.assertIs(f.type, ihm.unknown) self.assertIs(f._get_entity_type(), ihm.unknown) def test_residue_feature(self): """Test ResidueFeature class""" e = ihm.Entity('AHCDAH') a = ihm.AsymUnit(e) f = ihm.restraint.ResidueFeature(ranges=[]) self.assertIsNone(f._get_entity_type()) # No ranges - type is 'residue' f = ihm.restraint.ResidueFeature(ranges=[]) self.assertEqual(f.type, 'residue') # All ranges are individual residues f = ihm.restraint.ResidueFeature(ranges=[a(1, 1), a(2, 2)]) self.assertEqual(f.type, 'residue') # Should work with actual Residue objects too f = ihm.restraint.ResidueFeature(ranges=[a.residue(3)]) self.assertEqual(f.type, 'residue') # At least one range is a true range f = ihm.restraint.ResidueFeature(ranges=[a(3, 4)]) self.assertEqual(f.type, 'residue range') def test_geometric_restraint(self): """Test GeometricRestraint class""" dist = ihm.restraint.UpperBoundDistanceRestraint(42.0) geom = ihm.geometry.XAxis(name='foo', description='bar') f = ihm.restraint.GeometricRestraint( dataset='foo', geometric_object=geom, feature='feat', distance=dist) self.assertEqual(f.dataset, 'foo') self.assertEqual(f.object_characteristic, 'other') self.assertIsNone(f.assembly) self.assertEqual(f._get_report(), "Distance (upper bound) to axis") def test_center_geometric_restraint(self): """Test CenterGeometricRestraint class""" f = ihm.restraint.CenterGeometricRestraint( dataset='foo', geometric_object='geom', feature='feat', distance='dist') self.assertEqual(f.dataset, 'foo') self.assertEqual(f.object_characteristic, 'center') self.assertIsNone(f.assembly) def test_inner_surface_geometric_restraint(self): """Test InnerSurfaceGeometricRestraint class""" f = ihm.restraint.InnerSurfaceGeometricRestraint( dataset='foo', geometric_object='geom', feature='feat', distance='dist') self.assertEqual(f.dataset, 'foo') self.assertEqual(f.object_characteristic, 'inner surface') self.assertIsNone(f.assembly) def test_outer_surface_geometric_restraint(self): """Test OuterSurfaceGeometricRestraint class""" f = ihm.restraint.OuterSurfaceGeometricRestraint( dataset='foo', geometric_object='geom', feature='feat', distance='dist') self.assertEqual(f.dataset, 'foo') self.assertEqual(f.object_characteristic, 'outer surface') self.assertIsNone(f.assembly) self.assertEqual(f.feature, 'feat') self.assertEqual(f._all_features, ('feat',)) def test_derived_distance_restraint(self): """Test DerivedDistanceRestraint class""" f = ihm.restraint.DerivedDistanceRestraint( dataset='foo', feature1='feat1', feature2='feat2', distance='dist') self.assertEqual(f.dataset, 'foo') self.assertIsNone(f.assembly) self.assertEqual(f.feature1, 'feat1') self.assertEqual(f.feature2, 'feat2') self.assertEqual(f._all_features, ('feat1', 'feat2')) def test_pseudo_site_signature(self): """Test signature of PseudoSite""" p = ihm.restraint.PseudoSite(1.0, 2.0, 3.0) self.assertEqual(p._signature(), ('1.000', '2.000', '3.000', None)) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_startmodel.py000066400000000000000000000051001503573337200202300ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.startmodel class Tests(unittest.TestCase): def test_template(self): """Test Template class""" class MockObject: pass dataset = MockObject() p = ihm.startmodel.Template( dataset=dataset, asym_id='G', seq_id_range=(1, 90), template_seq_id_range=(30, 90), sequence_identity=42.) self.assertEqual(p.asym_id, 'G') def test_pdb_helix(self): """Test PDBHelix class""" p = ihm.startmodel.PDBHelix( "HELIX 10 10 ASP A 607 GLU A 624 1" " 18 ") self.assertEqual(p.helix_id, '10') self.assertEqual(p.start_asym, 'A') self.assertEqual(p.start_resnum, 607) self.assertEqual(p.end_asym, 'A') self.assertEqual(p.end_resnum, 624) self.assertEqual(p.helix_class, 1) self.assertEqual(p.length, 18) def test_starting_model(self): """Test StartingModel class, no templates""" e1 = ihm.Entity('AAAA') asym = ihm.AsymUnit(e1) s = ihm.startmodel.StartingModel(asym, 'mock_dataset', 'A', offset=10) self.assertEqual(s.get_atoms(), []) self.assertEqual(s.get_seq_dif(), []) def test_seq_dif(self): """Test SeqDif class""" sd = ihm.startmodel.SeqDif(db_seq_id=10, seq_id=20, db_comp_id='PHE') self.assertEqual(sd.db_seq_id, 10) def test_mse_seq_dif(self): """Test MSESeqDif class""" sd = ihm.startmodel.MSESeqDif(db_seq_id=10, seq_id=20) self.assertEqual(sd.db_comp_id, 'MSE') self.assertEqual(sd.details, 'Conversion of modified residue MSE to MET') def test_add_atom(self): """Test StartingModel.add_atom()""" atoms = ['atom1', 'atom2'] e1 = ihm.Entity('AAAA') asym = ihm.AsymUnit(e1) s = ihm.startmodel.StartingModel(asym, 'mock_dataset', 'A', offset=10) s.add_atom(atoms[0]) s.add_atom(atoms[1]) self.assertEqual(s._atoms, atoms) def test_add_seq_dif(self): """Test StartingModel.add_seq_dif()""" seq_difs = ['sd1', 'sd2'] e1 = ihm.Entity('AAAA') asym = ihm.AsymUnit(e1) s = ihm.startmodel.StartingModel(asym, 'mock_dataset', 'A', offset=10) s.add_seq_dif(seq_difs[0]) s.add_seq_dif(seq_difs[1]) self.assertEqual(s._seq_difs, seq_difs) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_test.py000066400000000000000000000010111503573337200170260ustar00rootroot00000000000000import utils import os import sys import unittest import subprocess TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.test # noqa: F401 # Test should also be importable class Tests(unittest.TestCase): def test_simple(self): """Exercise the ihm.test basic install test""" subprocess.check_call([sys.executable, os.path.join(TOPDIR, 'ihm', 'test.py')]) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/test_util.py000066400000000000000000000121521503573337200170340ustar00rootroot00000000000000import utils import os import unittest TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) utils.set_search_paths(TOPDIR) import ihm.util class Tests(unittest.TestCase): def test_asym_ids(self): """Test _AsymIDs class""" c = ihm.util._AsymIDs() self.assertEqual([c[i] for i in range(0, 4)], ['A', 'B', 'C', 'D']) self.assertEqual([c[i] for i in range(24, 28)], ['Y', 'Z', 'AA', 'AB']) self.assertEqual([c[i] for i in range(50, 54)], ['AY', 'AZ', 'BA', 'BB']) self.assertEqual([c[i] for i in range(700, 704)], ['ZY', 'ZZ', 'AAA', 'AAB']) def test_remove_id(self): """Test _remove_id utility function""" class DummyObj: pass o = DummyObj() ihm.util._remove_id(o) o._id = 42 ihm.util._remove_id(o) self.assertFalse(hasattr(o, '_id')) def test_assign_id(self): """Test _assign_id utility function""" class DummyObj: def __init__(self, hashval): self.hashval = hashval def __eq__(self, other): return self.hashval == other.hashval def __hash__(self): return self.hashval seen_objs = {} obj_by_id = [] obj1a = DummyObj(42) # obj1a and 1b are identical obj1b = DummyObj(42) obj2 = DummyObj(34) obj3 = DummyObj(23) # obj3 already has an id obj3._id = 'foo' for obj in (obj1a, obj1b, obj2, obj3): ihm.util._assign_id(obj, seen_objs, obj_by_id) self.assertEqual(obj1a._id, 1) self.assertEqual(obj1b._id, 1) self.assertEqual(obj2._id, 2) self.assertEqual(obj3._id, 'foo') self.assertEqual(obj_by_id, [obj1a, obj2]) def test_get_relative_path(self): """Test get_relative_path()""" if os.path.sep == '/': self.assertEqual(ihm.util._get_relative_path('/foo/bar', '/foo/bar'), '/foo/bar') self.assertEqual(ihm.util._get_relative_path('/foo/bar', 'baz'), '/foo/baz') def test_invert_ranges(self): """Test _invert_ranges function""" inrng = [(2, 3)] self.assertEqual(list(ihm.util._invert_ranges(inrng, 4)), [(1, 1), (4, 4)]) inrng = [(1, 1), (4, 7)] self.assertEqual(list(ihm.util._invert_ranges(inrng, 8)), [(2, 3), (8, 8)]) inrng = [(2, 2), (4, 7)] self.assertEqual(list(ihm.util._invert_ranges(inrng, 7)), [(1, 1), (3, 3)]) # Test with non-default start value inrng = [(5, 8)] self.assertEqual(list(ihm.util._invert_ranges(inrng, 10, start=3)), [(3, 4), (9, 10)]) self.assertEqual(list(ihm.util._invert_ranges(inrng, 20, start=10)), [(9, 20)]) self.assertEqual(list(ihm.util._invert_ranges(inrng, 4, start=1)), [(1, 4)]) def test_pred_ranges(self): """Test _pred_ranges function""" inrng = [(2, 3)] self.assertEqual(list(ihm.util._pred_ranges(inrng, 4)), [(1, 1, False), (2, 3, True), (4, 4, False)]) inrng = [(1, 1), (4, 7)] self.assertEqual(list(ihm.util._pred_ranges(inrng, 8)), [(1, 1, True), (2, 3, False), (4, 7, True), (8, 8, False)]) inrng = [(2, 2), (4, 7)] self.assertEqual(list(ihm.util._pred_ranges(inrng, 7)), [(1, 1, False), (2, 2, True), (3, 3, False), (4, 7, True)]) def test_combine_ranges(self): """Test _combine_ranges function""" inrng = [(8, 10), (1, 2), (3, 4)] self.assertEqual(list(ihm.util._combine_ranges(inrng)), [(1, 4), (8, 10)]) inrng = [(1, 10), (3, 4)] self.assertEqual(list(ihm.util._combine_ranges(inrng)), [(1, 10)]) inrng = [(1, 2), (1, 4)] self.assertEqual(list(ihm.util._combine_ranges(inrng)), [(1, 4)]) inrng = [(1, 2), (4, 4)] self.assertEqual(list(ihm.util._combine_ranges(inrng)), [(1, 2), (4, 4)]) self.assertEqual(list(ihm.util._combine_ranges([])), []) def test_make_range_from_list(self): """Test _make_range_from_list function""" rr = [] self.assertEqual(list(ihm.util._make_range_from_list(rr)), []) rr = [1, 2, 5, 6, 10, 11, 20] self.assertEqual(list(ihm.util._make_range_from_list(rr)), [[1, 2], [5, 6], [10, 11], [20, 20]]) def test_get_codes(self): """Test _get_codes function""" self.assertEqual(tuple(ihm.util._get_codes(None)), ()) self.assertEqual(tuple(ihm.util._get_codes(ihm.unknown)), ()) self.assertEqual(tuple(ihm.util._get_codes("TWC\nAA(FOO)T")), ('T', 'W', 'C', 'A', 'A', 'FOO', 'T')) if __name__ == '__main__': unittest.main() python-ihm-2.7/test/utils.py000066400000000000000000000025151503573337200161620ustar00rootroot00000000000000import os import sys import tempfile import contextlib import shutil def set_search_paths(topdir): """Set search paths so that we can import Python modules""" os.environ['PYTHONPATH'] = topdir + os.pathsep \ + os.environ.get('PYTHONPATH', '') sys.path.insert(0, topdir) def get_input_file_name(topdir, fname): """Return full path to a test input file""" return os.path.join(topdir, 'test', 'input', fname) @contextlib.contextmanager def temporary_directory(dir=None): _tmpdir = tempfile.mkdtemp(dir=dir) yield _tmpdir shutil.rmtree(_tmpdir, ignore_errors=True) if 'coverage' in sys.modules: import atexit # Collect coverage information from subprocesses __site_tmpdir = tempfile.mkdtemp() with open(os.path.join(__site_tmpdir, 'sitecustomize.py'), 'w') as fh: fh.write(""" import coverage import atexit import os _cov = coverage.coverage(branch=True, data_suffix=True, auto_data=True, data_file=os.path.join('%s', '.coverage')) _cov.start() def _coverage_cleanup(c): c.stop() atexit.register(_coverage_cleanup, _cov) """ % os.getcwd()) os.environ['PYTHONPATH'] = __site_tmpdir + os.pathsep \ + os.environ.get('PYTHONPATH', '') def __cleanup(d): shutil.rmtree(d, ignore_errors=True) atexit.register(__cleanup, __site_tmpdir) python-ihm-2.7/util/000077500000000000000000000000001503573337200144435ustar00rootroot00000000000000python-ihm-2.7/util/check-db-entries.py000066400000000000000000000027031503573337200201260ustar00rootroot00000000000000import unittest import ihm.reader import ihm.dumper import urllib.request import os class Tests(unittest.TestCase): def _read_cif(self, pdb_id): url = 'https://pdb-ihm.org/cif/%s.cif' % pdb_id with urllib.request.urlopen(url) as fh: s, = ihm.reader.read(fh) return s def _write_cif(self, s, check=True): with open('test.cif', 'w') as fh: ihm.dumper.write(fh, [s], check=check) os.unlink('test.cif') def test_9a0e(self): """Test IMP structure without errors (9a0e)""" s = self._read_cif('9a0e') # This used to have an incorrect reference sequence, now fixed. self._write_cif(s) def test_8zzd(self): """Test docking structure without errors (8zzd)""" s = self._read_cif('8zzd') # This used to have an incorrect assembly, now fixed. self._write_cif(s) def test_9a82(self): """Test HADDOCK structure without errors (9a82)""" s = self._read_cif('9a82') self._write_cif(s) def test_9a13(self): """Test HADDOCK structure without errors (9a13)""" s = self._read_cif('9a13') # This used to fail because feature #71 was null, but this # is now fixed. self._write_cif(s) def test_9a0t(self): """Test IMP structure without errors (9a0t)""" s = self._read_cif('9a0t') self._write_cif(s) if __name__ == '__main__': unittest.main() python-ihm-2.7/util/debian/000077500000000000000000000000001503573337200156655ustar00rootroot00000000000000python-ihm-2.7/util/debian/changelog000066400000000000000000000047211503573337200175430ustar00rootroot00000000000000python-ihm (2.7-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 2.7 release -- Ben Webb Mon, 07 Jul 2025 18:13:05 -0700 python-ihm (2.6-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 2.6 release -- Ben Webb Mon, 09 Jun 2025 15:40:14 -0700 python-ihm (2.5-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 2.5 release -- Ben Webb Fri, 25 Apr 2025 20:41:22 -0700 python-ihm (2.4-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 2.4 release -- Ben Webb Tue, 25 Mar 2025 20:31:19 -0700 python-ihm (2.3-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 2.3 release -- Ben Webb Thu, 13 Mar 2025 19:57:57 -0700 python-ihm (2.2-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 2.2 release -- Ben Webb Thu, 13 Feb 2025 20:24:55 -0800 python-ihm (2.1-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 2.1 release -- Ben Webb Wed, 12 Feb 2025 22:11:51 -0800 python-ihm (2.0-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 2.0 release -- Ben Webb Tue, 11 Feb 2025 16:49:02 -0800 python-ihm (1.8-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 1.8 release -- Ben Webb Tue, 26 Nov 2024 22:00:07 -0800 python-ihm (1.7-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 1.7 release -- Ben Webb Tue, 22 Oct 2024 23:02:03 -0700 python-ihm (1.6-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 1.6 release -- Ben Webb Fri, 27 Sep 2024 10:55:45 -0700 python-ihm (1.5-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 1.5 release -- Ben Webb Fri, 06 Sep 2024 14:15:12 -0700 python-ihm (1.4-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 1.4 release -- Ben Webb Fri, 30 Aug 2024 15:27:15 -0700 python-ihm (1.3-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 1.3 release -- Ben Webb Tue, 16 Jul 2024 10:37:13 -0700 python-ihm (1.2-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 1.2 release -- Ben Webb Wed, 12 Jun 2024 11:35:25 -0700 python-ihm (1.1-1~@CODENAME@) @CODENAME@; urgency=low * python-ihm 1.1 release -- Ben Webb Thu, 09 May 2024 12:44:51 -0700 python-ihm (1.0-1~@CODENAME@) @CODENAME@; urgency=low * Initial .deb release -- Ben Webb Thu, 07 Mar 2024 00:19:35 +0000 python-ihm-2.7/util/debian/control000066400000000000000000000011301503573337200172630ustar00rootroot00000000000000Source: python-ihm Priority: optional Maintainer: Ben Webb Build-Depends: debhelper-compat (= 13), dh-python, swig, python3-dev Standards-Version: 4.6.2 Section: libs Homepage: https://github.com/ihmwg/python-ihm Vcs-Browser: https://github.com/ihmwg/python-ihm Package: python3-ihm Section: libs Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, python3-msgpack Description: Package for handling IHM mmCIF and BinaryCIF files This is a Python package to assist in handling mmCIF and BinaryCIF files compliant with the integrative/hybrid modeling (IHM) extension. python-ihm-2.7/util/debian/copyright000066400000000000000000000023271503573337200176240ustar00rootroot00000000000000Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: python-ihm Source: https://github.com/ihmwg/python-ihm Copyright: 2018-2025 IHM Working Group License: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: . The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. . THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. python-ihm-2.7/util/debian/make-package.sh000077500000000000000000000010741503573337200205340ustar00rootroot00000000000000#!/bin/sh # Build a Debian package from source set -e VERSION=$(grep __version__ ../../ihm/__init__.py |cut -d\' -f2) CODENAME=`lsb_release -c -s` # Make sure we can find the rest of our input files TOOL_DIR=`dirname "$0"` # Get absolute path to top dir TOP_DIR=`cd "${TOOL_DIR}/../.." && pwd` cd ${TOP_DIR} rm -rf debian cp -r util/debian/ . rm debian/make-package.sh sed -i -e "s/\@CODENAME\@/$CODENAME/g" debian/changelog if [ "${CODENAME}" = "focal" ]; then sed -i -e "s/debhelper-compat (= 13)/debhelper-compat (= 12)/" debian/control fi dpkg-buildpackage -S python-ihm-2.7/util/debian/patches/000077500000000000000000000000001503573337200173145ustar00rootroot00000000000000python-ihm-2.7/util/debian/patches/ihm-force-c-ext.patch000066400000000000000000000006201503573337200232220ustar00rootroot00000000000000diff --git a/setup.py b/setup.py index 5172433..d428f1d 100755 --- a/setup.py +++ b/setup.py @@ -36,7 +36,7 @@ if build_ext: extra_compile_args=cargs, swig_opts=['-keyword', '-nodefaultctor', '-nodefaultdtor', '-noproxy'], - optional=True)] + optional=False)] else: mod = [] python-ihm-2.7/util/debian/patches/series000066400000000000000000000000261503573337200205270ustar00rootroot00000000000000ihm-force-c-ext.patch python-ihm-2.7/util/debian/rules000077500000000000000000000001571503573337200167500ustar00rootroot00000000000000#!/usr/bin/make -f #export DH_VERBOSE=1 export PYBUILD_NAME=ihm %: dh $@ --with python3 --buildsystem=pybuild python-ihm-2.7/util/debian/source/000077500000000000000000000000001503573337200171655ustar00rootroot00000000000000python-ihm-2.7/util/debian/source/format000066400000000000000000000000141503573337200203730ustar00rootroot000000000000003.0 (quilt) python-ihm-2.7/util/get_ccd_atoms.py000077500000000000000000000017301503573337200176140ustar00rootroot00000000000000#!/usr/bin/python3 """Get the set of canonical atom names for one or more residue types, by querying CCD via RCSB. Used to populate the KNOWN_ATOM_NAMES variable in make_mmcif.py. """ import collections import ihm.format import urllib.request import sys class ChemCompAtomHandler: not_in_file = omitted = unknown = None def __init__(self): super().__init__() self.atoms = collections.defaultdict(set) def __call__(self, comp_id, atom_id): self.atoms[comp_id].add(atom_id) cca = ChemCompAtomHandler() for resname in sys.argv[1:]: with urllib.request.urlopen( 'https://files.rcsb.org/pub/pdb/refdata/chem_comp/%s/%s/%s.cif' % (resname[-1], resname, resname)) as fh: c = ihm.format.CifReader(fh, category_handler={'_chem_comp_atom': cca}) c.read_file() print("KNOWN_ATOM_NAMES = {") for comp_id in sorted(cca.atoms.keys()): print(' %r: %r,' % (comp_id, cca.atoms[comp_id])) print("}") python-ihm-2.7/util/python-ihm.spec000066400000000000000000000067311503573337200174220ustar00rootroot00000000000000Name: python3-ihm License: MIT Group: Applications/Engineering Version: 2.7 Release: 1%{?dist} Summary: Package for handling IHM mmCIF and BinaryCIF files Packager: Ben Webb URL: https://pypi.python.org/pypi/ihm Source: ihm-%{version}.tar.gz Patch0: ihm-force-c-ext.patch BuildRequires: python3-devel, python3-setuptools, gcc %if 0%{?rhel} != 7 BuildRequires: python3-msgpack Requires: python3-msgpack %else BuildRequires: sed %endif %if 0%{?fedora} >= 42 BuildRequires: python3-pytest %endif %description This is a Python package to assist in handling mmCIF and BinaryCIF files compliant with the integrative/hybrid modeling (IHM) extension. It works with Python 3.6 or later. %prep %setup -n ihm-%{version} %patch -P 0 -p1 %if 0%{?rhel} == 7 sed -i -e "s/install_requires=\['msgpack'\]/#/" setup.py %endif %build %{__python3} setup.py install --root=${RPM_BUILD_ROOT} --record=INSTALLED_FILES %check %if 0%{?fedora} >= 42 %pytest ihm/test.py %else %{__python3} setup.py test %endif %files -f INSTALLED_FILES %defattr(-,root,root) %changelog * Mon Jul 07 2025 Ben Webb 2.7-1 - Update to latest upstream. * Mon Jun 09 2025 Ben Webb 2.6-1 - Update to latest upstream. * Fri Apr 25 2025 Ben Webb 2.5-1 - Update to latest upstream. * Tue Mar 25 2025 Ben Webb 2.4-1 - Update to latest upstream. * Thu Mar 13 2025 Ben Webb 2.3-1 - Update to latest upstream. * Thu Feb 13 2025 Ben Webb 2.2-1 - Update to latest upstream. * Wed Feb 12 2025 Ben Webb 2.1-1 - Update to latest upstream. * Tue Feb 11 2025 Ben Webb 2.0-1 - Update to latest upstream. * Tue Nov 26 2024 Ben Webb 1.8-1 - Update to latest upstream. * Tue Oct 22 2024 Ben Webb 1.7-1 - Update to latest upstream. * Fri Sep 27 2024 Ben Webb 1.6-1 - Update to latest upstream. * Fri Sep 06 2024 Ben Webb 1.5-1 - Update to latest upstream. * Fri Aug 30 2024 Ben Webb 1.4-1 - Update to latest upstream. * Tue Jul 16 2024 Ben Webb 1.3-1 - Update to latest upstream. * Wed Jun 12 2024 Ben Webb 1.2-1 - Update to latest upstream. * Thu May 09 2024 Ben Webb 1.1-1 - Update to latest upstream. * Tue Feb 13 2024 Ben Webb 1.0-1 - Update to latest upstream. * Fri Dec 08 2023 Ben Webb 0.43-1 - Update to latest upstream. * Thu Nov 30 2023 Ben Webb 0.42-1 - Update to latest upstream. * Mon Oct 02 2023 Ben Webb 0.41-1 - Update to latest upstream. * Mon Sep 25 2023 Ben Webb 0.40-1 - Update to latest upstream. * Fri Aug 04 2023 Ben Webb 0.39-1 - Update to latest upstream. * Mon Jul 31 2023 Ben Webb 0.38-1 - Update to latest upstream. * Thu Apr 14 2022 Ben Webb 0.31-1 - Update to latest upstream. * Mon Mar 21 2022 Ben Webb 0.28-1 - Update to latest upstream. * Thu Jan 27 2022 Ben Webb 0.27-1 - Update to latest upstream. * Tue Jan 25 2022 Ben Webb 0.26-1 - Update to latest upstream. * Fri Dec 03 2021 Ben Webb 0.25-1 - Update to latest upstream. * Fri Jul 09 2021 Ben Webb 0.20-1 - Initial package. python-ihm-2.7/util/validate-outputs.py000077500000000000000000000017201503573337200203320ustar00rootroot00000000000000#!/usr/bin/python3 """Check the output of each example for validity against the PDBx and IHM dictionaries. This should be periodically rechecked in case the PDBx and IHM dictionaries are updated. """ import sys import os import subprocess import ihm.dictionary import urllib.request with urllib.request.urlopen( 'http://mmcif.wwpdb.org/dictionaries/ascii/mmcif_pdbx_v50.dic') as fh: d_pdbx = ihm.dictionary.read(fh) with urllib.request.urlopen( 'http://mmcif.wwpdb.org/dictionaries/ascii/mmcif_ihm.dic') as fh: d_ihm = ihm.dictionary.read(fh) pdbx_ihm = d_pdbx + d_ihm for script in ('simple-docking.py', 'ligands_water.py', 'non_standard_residues.py'): print(script) subprocess.check_call([sys.executable, '../examples/' + script]) with open('output.cif') as fh: try: pdbx_ihm.validate(fh) except ihm.dictionary.ValidatorError as exc: print(exc) os.unlink('output.cif')