From bea62e14d13df34f739f67f0cff101627e3c4b8e Mon Sep 17 00:00:00 2001 From: Leszek Szymczak <leszeks@e-science.pl> Date: Mon, 27 Jul 2020 12:53:14 +0000 Subject: [PATCH] wndb reader now fully functional It's now possible to successfully import data from plWordNet database. Since it takes a very long time to read data from the database, a sqlite storage dump of plWordNet version 2.3 was added. This file is provided only for convenience, and should not be relied upon to be compatible will future version of the storage module. --- .editorconfig | 16 + .gitignore | 12 + COPYING | 674 +++++++ COPYING.LESSER | 165 ++ LICENSE-PWN.txt | 26 + LICENSE-plWN.txt | 29 + MANIFEST.in | 5 +- PKG-INFO | 10 - PLWN_API.egg-info/PKG-INFO | 10 - PLWN_API.egg-info/SOURCES.txt | 28 - PLWN_API.egg-info/dependency_links.txt | 1 - PLWN_API.egg-info/requires.txt | 2 - PLWN_API.egg-info/top_level.txt | 1 - README.md | 80 +- doc/Makefile | 193 ++ doc/asciio/skip-artificial-docstring.asciio | Bin 0 -> 26108 bytes doc/source/conf.py | 301 +++ doc/source/enums.rst | 6 + doc/source/exceptions.rst | 4 + doc/source/index.rst | 22 + doc/source/interface.rst | 4 + doc/source/introduction.rst | 258 +++ doc/source/locale/pl/LC_MESSAGES/enums.po | 110 ++ .../locale/pl/LC_MESSAGES/exceptions.po | 106 + doc/source/locale/pl/LC_MESSAGES/index.po | 38 + doc/source/locale/pl/LC_MESSAGES/interface.po | 1265 ++++++++++++ .../locale/pl/LC_MESSAGES/introduction.po | 407 ++++ plwn/.bases.py.swp | Bin 1024 -> 0 bytes plwn/__init__.py | 23 +- plwn/_loading.py | 65 +- plwn/bases.py | 1244 +++++++----- plwn/default/__init__.py | 20 + plwn/default/_default.py | 46 + plwn/enums.py | 189 +- plwn/exceptions.py | 62 +- plwn/readers/comments.py | 35 +- plwn/readers/nodes.py | 125 +- plwn/readers/ubylmf.py | 45 +- plwn/readers/wndb.py | 472 +++-- plwn/readers/wnschema.py | 541 ++++++ plwn/readers/wnxml.py | 348 ++-- plwn/relation_aliases.tsv | 5 - plwn/relresolver.py | 122 -- plwn/storages/objects.py | 520 ----- plwn/storages/sqlite.py | 1396 ++++++++++---- plwn/utils/artifilter.py | 112 ++ plwn/utils/graphmlout.py | 129 +- plwn/utils/relinfotuple.py | 64 + plwn/utils/sorting.py | 17 + plwn/utils/tupwrap.py | 53 - scripts/clean_wndb.sql | 32 + scripts/patch_old_wndb.sql | 21 + scripts/verify_uby_lmf_file.py | 87 + setup.py | 45 +- tests/__init__.py | 0 tests/abstract_cases/__init__.py | 3 + tests/abstract_cases/_make_abstract.py | 29 + .../asciio/graphml-edges.asciio | Bin 0 -> 4083 bytes .../asciio/graphml-mixed.asciio | Bin 0 -> 26171 bytes ...relation-edges-with-artificial-loop.asciio | Bin 0 -> 25927 bytes .../relation-edges-with-artificial.asciio | Bin 0 -> 4272 bytes .../asciio/relation-edges.asciio | Bin 0 -> 4052 bytes ...synset-related-with-artificial-loop.asciio | Bin 0 -> 4352 bytes .../synset-related-with-artificial.asciio | Bin 0 -> 26320 bytes tests/abstract_cases/test_graphml.py | 1712 +++++++++++++++++ tests/abstract_cases/test_plwordnet.py | 1163 +++++++++++ tests/abstract_cases/test_unit_and_synset.py | 981 ++++++++++ tests/cases/__init__.py | 0 tests/cases/test_graphmlout.py | 202 ++ tests/cases/test_sqlite_storage.py | 9 + tests/cases/test_ubylmf_reader.py | 563 ++++++ tests/cases/test_wndb_reader.py | 667 +++++++ tests/setuptools_loader.py | 16 + tox.ini | 5 +- 74 files changed, 12682 insertions(+), 2259 deletions(-) create mode 100644 .editorconfig create mode 100644 .gitignore create mode 100644 COPYING create mode 100644 COPYING.LESSER create mode 100644 LICENSE-PWN.txt create mode 100644 LICENSE-plWN.txt delete mode 100644 PKG-INFO delete mode 100644 PLWN_API.egg-info/PKG-INFO delete mode 100644 PLWN_API.egg-info/SOURCES.txt delete mode 100644 PLWN_API.egg-info/dependency_links.txt delete mode 100644 PLWN_API.egg-info/requires.txt delete mode 100644 PLWN_API.egg-info/top_level.txt create mode 100644 doc/Makefile create mode 100644 doc/asciio/skip-artificial-docstring.asciio create mode 100644 doc/source/conf.py create mode 100644 doc/source/enums.rst create mode 100644 doc/source/exceptions.rst create mode 100644 doc/source/index.rst create mode 100644 doc/source/interface.rst create mode 100644 doc/source/introduction.rst create mode 100644 doc/source/locale/pl/LC_MESSAGES/enums.po create mode 100644 doc/source/locale/pl/LC_MESSAGES/exceptions.po create mode 100644 doc/source/locale/pl/LC_MESSAGES/index.po create mode 100644 doc/source/locale/pl/LC_MESSAGES/interface.po create mode 100644 doc/source/locale/pl/LC_MESSAGES/introduction.po delete mode 100644 plwn/.bases.py.swp create mode 100644 plwn/default/__init__.py create mode 100644 plwn/default/_default.py create mode 100644 plwn/readers/wnschema.py delete mode 100644 plwn/relation_aliases.tsv delete mode 100644 plwn/relresolver.py delete mode 100644 plwn/storages/objects.py create mode 100644 plwn/utils/artifilter.py create mode 100644 plwn/utils/relinfotuple.py delete mode 100644 plwn/utils/tupwrap.py create mode 100644 scripts/clean_wndb.sql create mode 100644 scripts/patch_old_wndb.sql create mode 100755 scripts/verify_uby_lmf_file.py create mode 100644 tests/__init__.py create mode 100644 tests/abstract_cases/__init__.py create mode 100644 tests/abstract_cases/_make_abstract.py create mode 100644 tests/abstract_cases/asciio/graphml-edges.asciio create mode 100644 tests/abstract_cases/asciio/graphml-mixed.asciio create mode 100644 tests/abstract_cases/asciio/relation-edges-with-artificial-loop.asciio create mode 100644 tests/abstract_cases/asciio/relation-edges-with-artificial.asciio create mode 100644 tests/abstract_cases/asciio/relation-edges.asciio create mode 100644 tests/abstract_cases/asciio/synset-related-with-artificial-loop.asciio create mode 100644 tests/abstract_cases/asciio/synset-related-with-artificial.asciio create mode 100644 tests/abstract_cases/test_graphml.py create mode 100644 tests/abstract_cases/test_plwordnet.py create mode 100644 tests/abstract_cases/test_unit_and_synset.py create mode 100644 tests/cases/__init__.py create mode 100644 tests/cases/test_graphmlout.py create mode 100644 tests/cases/test_sqlite_storage.py create mode 100644 tests/cases/test_ubylmf_reader.py create mode 100644 tests/cases/test_wndb_reader.py create mode 100644 tests/setuptools_loader.py diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..fa46a48 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,16 @@ +root = true + +[*] +indent_style = space +indent_size = 4 + +max_line_length = 79 + +end_of_line = lf +charset = utf-8 + +trim_trailing_whitespace = true +insert_final_newline = true + +[*.rst] +indent_size = 2 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a2ed4fe --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +*.swp + +# Sometimes backup files are used +*.bak + +*.pyc +__pycache__/ +build/ +dist/ +*.egg-info + +*.mo diff --git a/COPYING b/COPYING new file mode 100644 index 0000000..9cecc1d --- /dev/null +++ b/COPYING @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {one line to give the program's name and a brief idea of what it does.} + Copyright (C) {year} {name of author} + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + {project} Copyright (C) {year} {fullname} + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +<http://www.gnu.org/licenses/>. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +<http://www.gnu.org/philosophy/why-not-lgpl.html>. diff --git a/COPYING.LESSER b/COPYING.LESSER new file mode 100644 index 0000000..65c5ca8 --- /dev/null +++ b/COPYING.LESSER @@ -0,0 +1,165 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/LICENSE-PWN.txt b/LICENSE-PWN.txt new file mode 100644 index 0000000..ece943d --- /dev/null +++ b/LICENSE-PWN.txt @@ -0,0 +1,26 @@ +This software and database is being provided to you, the LICENSEE, +by Princeton University under the following license. By obtaining, using +and/or copying this software and database, you agree that you have read, +understood, and will comply with these terms and conditions.: + +Permission to use, copy, modify and distribute this software and database and +its documentation for any purpose and without fee or royalty is hereby granted, +provided that you agree to comply with the following copyright notice and +statements, including the disclaimer, and that the same appear on ALL copies +of the software, database and documentation, including modifications that you +make for internal use or for distribution. + +WordNet 3.0 Copyright 2006 by Princeton University. All rights reserved. + +THIS SOFTWARE AND DATABASE IS PROVIDED "AS IS" AND PRINCETON UNIVERSITY MAKES +NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, +BUT NOT LIMITATION, PRINCETON UNIVERSITY MAKES NO REPRESENTATIONS OR WARRANTIES +OF MERCHANT- ABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF +THE LICENSED SOFTWARE, DATABASE OR DOCUMENTATION WILL NOT INFRINGE ANY +THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS. + +The name of Princeton University or Princeton may not be used in advertising or +publicity pertaining to distribution of the software and/or database. +Title to copyright in this software, database and any associated documentation +shall at all times remain with Princeton University and LICENSEE agrees +to preserve same. diff --git a/LICENSE-plWN.txt b/LICENSE-plWN.txt new file mode 100644 index 0000000..d43c374 --- /dev/null +++ b/LICENSE-plWN.txt @@ -0,0 +1,29 @@ +This software and database are being provided to you, the LICENSEE, +by WrocÅ‚aw University of Technology under the following license. By obtaining, +using and/or copying this software and database, you agree that you have read, +understood, and will comply with these terms and conditions: + +Permission to use, copy, modify, distribute, and public expose this software +and database (including data getting) and its documentation for any purpose and +without fee or royalty is hereby granted, provided that you agree to comply +with the following copyright notice and statements, including the disclaimer, +and that the same appear on ALL copies of the software, database and +documentation, including modifications which you make for internal use or for +wider distribution. + +plWordNet 3.0 © 2016 by WrocÅ‚aw University of Technology. All rights reserved. + +THIS SOFTWARE AND DATABASE IS PROVIDED "AS IS" AND WROCÅAW UNIVERSITY OF +TECHNOLOGY MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY +OF EXAMPLE, BUT NOT LIMITATION, WROCÅAW UNIVERSITY OF TECHNOLOGY MAKES NO +REPRESENTATIONS OR WARRANTIES OF MERCHANT-ABILITY OR MERCHANT-FITNESS FOR ANY +PARTICULAR PURPOSE; NOR DOES WROCÅAW UNIVERSITY OF TECHNOLOGY MAKE ANY +REPRESENTATIONS OR WARRANTIES THAT THE USE OF THE LICENSED SOFTWARE, DATABASE +OR DOCUMENTATION WILL NOT INFRINGE ANY THIRD PARTY PATENTS, COPYRIGHTS, +TRADEMARKS OR OTHER RIGHTS. + +The name of WrocÅ‚aw University of Technology may not be used in advertising or +publicity pertaining to distribution of the software, the database or both. +Title to copyright in this software, database and any associated documentation +shall at all times remain with WrocÅ‚aw University of Technology, and LICENSEE +agrees to preserve this copyright. diff --git a/MANIFEST.in b/MANIFEST.in index f696a4d..4ca8246 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1,4 @@ -include README-pl-beta.txt +include COPYING +include COPYING.LESSER +include LICENSE-plWN.txt +include LICENSE-PWN.txt diff --git a/PKG-INFO b/PKG-INFO deleted file mode 100644 index 0598421..0000000 --- a/PKG-INFO +++ /dev/null @@ -1,10 +0,0 @@ -Metadata-Version: 1.0 -Name: PLWN_API -Version: 0.9 -Summary: Python API to access plWordNet lexicon -Home-page: UNKNOWN -Author: MichaÅ‚ KaliÅ„ski -Author-email: michal.kalinski@pwr.edu.pl -License: UNKNOWN -Description: UNKNOWN -Platform: UNKNOWN diff --git a/PLWN_API.egg-info/PKG-INFO b/PLWN_API.egg-info/PKG-INFO deleted file mode 100644 index c0d5f64..0000000 --- a/PLWN_API.egg-info/PKG-INFO +++ /dev/null @@ -1,10 +0,0 @@ -Metadata-Version: 1.0 -Name: PLWN-API -Version: 0.9 -Summary: Python API to access plWordNet lexicon -Home-page: UNKNOWN -Author: MichaÅ‚ KaliÅ„ski -Author-email: michal.kalinski@pwr.edu.pl -License: UNKNOWN -Description: UNKNOWN -Platform: UNKNOWN diff --git a/PLWN_API.egg-info/SOURCES.txt b/PLWN_API.egg-info/SOURCES.txt deleted file mode 100644 index 9d68e4c..0000000 --- a/PLWN_API.egg-info/SOURCES.txt +++ /dev/null @@ -1,28 +0,0 @@ -MANIFEST.in -README-pl-beta.txt -setup.py -PLWN_API.egg-info/PKG-INFO -PLWN_API.egg-info/SOURCES.txt -PLWN_API.egg-info/dependency_links.txt -PLWN_API.egg-info/requires.txt -PLWN_API.egg-info/top_level.txt -plwn/__init__.py -plwn/_loading.py -plwn/bases.py -plwn/enums.py -plwn/exceptions.py -plwn/relation_aliases.tsv -plwn/relresolver.py -plwn/readers/__init__.py -plwn/readers/comments.py -plwn/readers/nodes.py -plwn/readers/ubylmf.py -plwn/readers/wndb.py -plwn/readers/wnxml.py -plwn/storages/__init__.py -plwn/storages/objects.py -plwn/storages/sqlite.py -plwn/utils/__init__.py -plwn/utils/graphmlout.py -plwn/utils/sorting.py -plwn/utils/tupwrap.py \ No newline at end of file diff --git a/PLWN_API.egg-info/dependency_links.txt b/PLWN_API.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/PLWN_API.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/PLWN_API.egg-info/requires.txt b/PLWN_API.egg-info/requires.txt deleted file mode 100644 index 0cc144e..0000000 --- a/PLWN_API.egg-info/requires.txt +++ /dev/null @@ -1,2 +0,0 @@ -six>=1.10 -enum34>=1.1.2 diff --git a/PLWN_API.egg-info/top_level.txt b/PLWN_API.egg-info/top_level.txt deleted file mode 100644 index c72d30d..0000000 --- a/PLWN_API.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -plwn diff --git a/README.md b/README.md index 298cac6..fed3cba 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,80 @@ -# PLWN_API +======== +PLWN API +======== +PLWN API is a library for accessing the plWordNet lexicon in a Python program. + + +Usage +===== + +Access is provided using a PLWordNet object, with data loaded from the database +dump. + + >>> import plwn + >>> wn = plwn.load_default() + +Using that object, it's possible to obtain synset and lexical unit data. + + >>> lex = wn.lexical_unit('pies', plwn.PoS.noun_pl, 2) + >>> print(lex) + pies.2(21:zw) + >>> print(lex.definition) + pies domowy - popularne zwierzÄ™ domowe, przyjaciel czÅ‚owieka. + + +Full documentation +================== + +For description of loading plWordNet data: + + $ pydoc plwn._loading + +For description of the PLWordNet class and others: + + $ pydoc plwn.bases + + +Creating API dumps from wordnet sql +=================================== + +Latest wordnet database dump can be obtained from +http://ws.clarin-pl.eu/public/wordnet-work.LATEST.sql.gz + +It can be loaded using shell command: + + $ mysql -e 'CREATE SCHEMA wordnet_new' # For maintaining multiple versions. + $ mysql -D wordnet_new < wordnet-work.LATEST.sql.gz + +It is then recommended to run `clean_wndb.sql` script to remove any mistakes +in an unlikely case that the dump contains some, such as invalid enum values +or invalid foreign keys. + + $ mysql -D wordnet_new < clean_wndb.sql + +Then, edit connection string in storage-dumps if necessary according to sqlalchemy format. +Default values are all set to "wordnet", in the example DATABASE will be "wordnet_new". + + mysql+mysqldb://wordnet:wordnet@localhost/wordnet_new?charset=utf8 + +After that, the database can be read and saved into the API format. Only works in Python 2! + + >>> import sys; print(sys.version) + 2.7.12 + >>> import plwn + >>> api = plwn.read("connection.txt", "database", "plwn-new.db", "sqlite3") + +To load this version at a later date, use `plwn.load(path)` instead of `plwn.load_default()` + + >>> api = plwn.load("storage-dumps/plwn-new.db") + + +Licenses +======== + +The python software is provided on terms of the LGPL 3.0 license (see COPYING +and COPYING.LESSER). + +Lexicon data is provided on terms of the WordNet license (see LICENSE-PWN.txt) +for the original Princeton WordNet synsets and relations, and the plWordNet +license (see LICENSE-plWN.txt) for other entities. diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 0000000..a5fcdfd --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,193 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +# SPHINXOPTS = -D language=pl +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = a4 +BUILDDIR = build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext + +help: + @echo "Please use \`make <target>' where <target> is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PLWN_API.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PLWN_API.qhc" + +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/PLWN_API" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PLWN_API" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/doc/asciio/skip-artificial-docstring.asciio b/doc/asciio/skip-artificial-docstring.asciio new file mode 100644 index 0000000000000000000000000000000000000000..086e71c75abf3b23eb1d54398bf49e3661838416 GIT binary patch literal 26108 zcmcG#1yEc~_cu7Wy9Ws}NFcbo6Fk^Ja0v|V4k5UP5Zo;|gS!mwF2S7ycb7m&cF6O* z@ArNGt=ihEtvz#ZpVNKXZtLmay>kwLmm`wWv*P2F)Me1rRT~(X2E6e0kMLh+0<7Ns z`P2F5ACdr=r2f9=0`Q>wm309CI6(libDR(WC4lL)WFSHUJAm*I;Y%&xFJb+4$Enc0 z0f7B&F;sxSKA;Qbn|I*U1Hy<JG;78V79qD?_moO<bd;}lekhj^bS)ALQ5w&;>2_nn zZn3~h(~${g+O5`&4{VHQIA-G2DgnHihQ--1S+v#j<sTTbnwfHgy9W5+cs=e9<JXw8 z9E|4D^4MINPp=N^Au~%KAG;bJZgFjuQ+=siI?Vc<aXL{OFR#gEZ(k~Q`jX~gx|OgC zpYjD_L%v+YX_Mjc6u^!5Yp$pedo_;4<5}66mFSC?b}MS|QGnZbSb&qje9PGtM<azp ziC6%cbt(LHuL>ewt^@<M3eVKkY~S?PSKHUtZ<kBc19)bXO;sY{O819o<MB|lLlBs( z5AEb<PzM6w0aKWAVX47dPl*w<TIqHPBPb>RG#Jc9&MB2)h=@=3@hL9NXZZYr3jjc% z{;Q3|NBfGFz>r=XSu_prLxA>n%El|2fnH=z9FWNU>d>;3l$l8i5fKZm=rcTAfhj|J zI!C%sx~Q~rFoU#&{Tt@uDKTTI6&lYs1*A;jsnkg9m(&j^x33C|k4#K$O1~zXr}>o= z$zj%@CF^PanZaUE{8qeLM5C=}r@}F;VPN`+RPmh#$W$}_bGswSgtx1%62C9eE(j#p zPEICZCtP9iG$wdfhyI{X@xH93Ni;(&vwH52nD8l>A*jIL841<Bsouvoymy`;i>Gzq znwf)<!JvwebTF+{2l4lhF8fq``|#XI_`q%u4M}!Qi49fv3Dw^2qp@q*_VG{ABC39^ zLJ%!PC;7Z*0gBSJ+?|&cErwaGSTdHpTuv`*5k{n+V~aAS6)!hpeph{5>-6D#u#;b~ zoq{9X(D(epGo#V8m~s0jzpI_aee=f@xo<S$D3~HP3%r)YcePJZlKfs&M&-W;#fz~a z89X<FoqW1(widd7pc&Zq?%G-vyot4Snqj013}@Y;{C46=7L1H?V_bfU$A{Ai7F<<P zt(pmJ1LG7d%==X*=1aZEIlI>3mO6Cp=dRAY0`K9r-8@VSg;%P7D}HO#rpVy3cpv}A zX8=BX$5aL%*C^KnmmG8#M?6a!B|06{1H|5Ae0&h>{9cgyF0Kw!ex|M^Au-$)e4<Ux zaa;B_sNH^zLewt1m=4KZN_<@jULHh4D{k-aU&z{6(YRvYxLlUqgZL)ZcYRm@v5--Y zBd(E>3iE_a^){;V#=1DZI^W&8_Z6A;SncS_IOA_e@h5%$-!|BaIJx#E)Hb<z(MTR% zz3guHou7VHd0ZWSm`)Mb2WP90#jWSd_rA_(j7?Qx`WCfTWLzU}s_<Jjd8nH@8ZU?= z2>epfd^93D78N(-<^4-<G?c4|24<AQhGIGflq)~~L(~9x4jBLdpWx6qFT6mjUjj%h znSG;`gDr;h_H1%<`(DMet9bg?`_4hlBsV7J8$4mC&PJZ%=hpSgo0wh)Lo)5dE#vH6 z!_#PQk2U`Z-#my9RMm8%j9@0k-LPR$@z!L2oS)@2aLhrKbsQ6<tFCz7cw!${1Esw* z94}f$hw$TTX{^qG-Vy1Nlh?A<XfBR;CO9+8BHYey;ee!v(<qPp)NJpCXpP;PX7EtH z+TBR4miE-sJM+UI^2O{HRoUBFj~46dm7b(d!%1k;(aNs~nznrMth6*Fcx>%nKa;U{ zXF49Q#8)E1A0XFFv2m!*URzf+T!xOXb$OVa6uP=Y*0wEQv3irea5<6F6HI5zQyJH6 zZ}P-ao}qQ+b~{pKaVDJTheR(gG=*&M?j9_bxHIl~9JQdE-nusUNn6r@>6(zx*2>qM z4jsyD`q0{-DDGMiU!4|rPr)?$xwXEL_T`oaR*^w1uH=>|^_GU#MshnDk&$Klo#oZK z=2Bb2?`DkzApHzz8N<x7qOJPWmDI}e?3GD~EkD&U*x|%s_w@91?4ZR}ftV=FT30x4 zSqW!JSQNVG6??prY?+VAtvx4UrexKxxIFvSTtK+ayTx0uj%SY(bedk2-StUXh6wt7 zxjg)i;$GnGC?(b0rMuEBE#pq#_;QY}BYuiU-kz<=S3XEak-6$A*s)6lbz~x~N?B>3 zU6bF_C_}YfQG0E7pxw)roet?;v?aZbD~87%;+i&jVVc0@jYe+>nCV+Xd!17gbKjgc zCBKrx`88?v5yhgYj;LdWn#O8T?rX~pw9O4l%j%R2`@6x09anA)Eq!Zr@Uh?Jo2l>( z*?xDArahxi9dmkT93$JHN-&uL32{rRg$wfnH)f>{k!@O-4sC@i$H-=L!&om%oHk77 zYUuf`!gq|T<*yd0|2a|Avo@(;Z`U+wsLWuDtC@`zmS`JP8y{jpRu-zzB5MWl*U=Vw zTq-@t(D5Y9c<Vr=?e<wC<hXoVU}f1I!-FtdW|gSSbbP%vJ!;A%N;A#7gMu^trcRhe zm{XTmm0=uAqD!Ld?G3zT<To&YF1iXkr#A9UC$F8J2+03PbBF)IyM#f1(BcO9rm(ht zpr9zoSE5na<W;G+#&PAr#>adIo=uuMS9Md6)^!fFAG6KS<)kO^q;E~HEQ?r*Tmrcl zg1YKF^=`-uOPaNISuoAj2BND#0=%;z*{7{5iU*4F+R1Kl>BEoGAAhQYE16zJsam*U z1F)2_haUtFV+Wn=3Phu*??{44%r4YRLX!-ms)E(uolHD^cdSAqEBYma#F>=J#eZ~( z_yz|Dj`12>B+=y2SMrte?D0?mHRW#xoKh^|T){4B8P9L^vZO#-$$l;WJPuKNtBq6t zqm-8yc~x`29sV3V{t3Ux`ZEfSP&5x#`X&DJcjx5AjOF$3I@4B?vd5S9^p+NMq5`8$ zI_%J2ohe4Dr+<oQX(?9){BI*7!abcHe$djaC3*Jt$=;#s14*yc$c?FzPAG6pU%$~M z!s4lR%J(M|MM5mO0hz1s(>V3rzE8B={jr83h%Mo;0uZ1!n?c?D6o{IqrW_n3{=M^l zlzUKzXpcA4J=|*#p9AerU($jv=O`yMs$}vZWub4vuWatG@+6WJ@lL_ei<_$Bs8Am7 z_}*9a_nH6P+5Io)i`DFS_`Qr~L*2b>@8_$8`i`<78l-@jIK~_X_ZBMvkn7NWVGij@ z)yCvVdS|NG4GQHbCpCfelwtBEFs)|3U)&;K{lmx3F3z8}E7ChgW#SzGo4P(9_fB8N z-5mQWie`4S)!mA%h|>wY^xp}8>9G0*(y)1dy(OxdqCL=|h+dBW?)yO>&ZdrH(r=@2 zUq-6VD*hS*P#jy;;4)x)>FCC3#m&V?huRRjEfU@3qyX%B9O0l2&JM7!keL+_tBGLB z9uw-dXU3Hzwxc&9ZpeDUp@N+k*=#t!SA-}WnW8%>hQmcY*lrTO&|w(+F1Q-y+l~Sf zE>3-9z<0;)%!~YY;}J1B!Y(nQFTPZ&rZP2XF;DoktuQ2Tw;ODmvBvtPTUm{<(Pnq* z6!$Jk(lS;xcJO;`Y&lYhHdMvV^W7XRmo1>`kJ_FYiBgE1HnD;#7_*XHe%p_%#kLH5 z+{*$T^Vuzprmz{bxSZdAG`56(FL&#dTdV$9@l>1QXldc+9rZ9S97yUe5`3$qV7@S1 zVwF1L&C2emtn-xr*5b0<G?J}xa`kwkPdR{%psuGy%Dt>e@qmU05xv%eCkVMD9~n$} z*H;Q(CO=QlfRAy!?<89K!%!X9>M{$yA!`b+#$dmwTStX<2RBXwGdK)c;gnH7T+i2~ z!|X~-IOz>#MK9^opQ&TgP)5Fj+#jF0c4JZ$ef(chl26P=9J2yJrJT%BeX3Gk(Q<=i z1_JsgOZAGn&V`;D)KZH1IU@~K83j3!KuG9)(tS1Ug+0$!(h0<;K}X~O4~_Y7PH$Gb zp<J$l_T}qJzg!OwM-YJNh%0VO!lpP*SGSM`Q#q0uqnyxc^=qI2t6sJu!cTQ+gk)(T zdZZ(}$ez6l-A+u3_g^yOMTh6b*7dlXq~v#?G}>1UU!Bc*TXA}UHOCESxuV+4tUL^i zA8T$K4oHg~_u<w0@If2%ZAA{`Dk4Kf7FD<Xoz{S21PT37Rq^29(s4-_mL8T}iGb7h zy{kSM7|D%m+)p=O^h^%+`|x*J%w_=Alx13zXW~qG;Uxqa<oGgDkZqE1!7PM@Yt+hl zGc0!wYptN>?Rs!mxsBI{_cOM7D<4Ee#J}IdhX3iE+~*fOR@3?aWki4JB~E$PSFL!3 z%ss*7`G%T@4wj9J`7z&xvGwj_s=<pSv6~F`%Cg^;oj#XV;^O|T+b`kbafSUeJBu?; zuV=pBe<Zm!iDwE7!W6HZq&rtBM!4%~X46lZU}U#jO3Vo!{El=<^i_12sWP2?1zLH< zlu?|VhY;T_(jZZt6J+9tNA_?L<BOxfw>I--FyTu^y1Lv86G&HA{o9cH5K~k?2o|9R zlU7hcGZC?pf;LhQZTEXTJ(^ekdwXjBybt{k#0!$v)*s!^UK3)5o?@8wwz1*tOVe0O zCe}A7DZTlXsMeU<H7&UYl|}9^o1q6_;sHB30C~;7yp0_uY}$<*Hpcg=_Qc<Y`~jbh z-aK$jUOv^Cmi+c-_~uquo73dEckkCh0)_r|#=|{5{UaX{;pzDy?|IF1@9lf@xhKs6 z`qf9ScPyDMk|J1z4VzGC$YK7MVI7B&XYuGl4(JGG9o!vTtEPSDsf?1v4>G5~d#HcH zzt8HV9PP11Kf=D~0h$opD;WA655B&q=;&E)CQ~Z)*{!auKBIJ|17t6;RNlUyS>eNa z%6VHnb8zzDCzhj<T`{~VVQ}SX$UP}?^?aE~9t$gwLgE`Ii8+Qp9UNhjSAT3&EPoYh z(u)QPBP=I+r-|MjhF;EG-D}PDd3ZoHhLdaKw#M80XWJy_E|tj@!WfFDr|Y*#-Y={l zxN{PYs@gN(tlVwIq~=m!7~z!VH+riY)Z1?7Lb~Z=DDMNWSqc?@e{6>>j4$*5T^Pgs z9^F+1*s=7feU$@T6d07Y)dLYJI1$Ka8KiAjTYu0@=PA4oM9Xa=5uuiDPIJ$^spWK} zmvc~dk97z9Bn<Ql@B=ENVCXMREc5Q#$vCvExcQ_Oj<C?9w{US<Gjsc};bapFaHGe) zedC`4oq;WS;^<yFJ!cqCpsI)|+pVGoBaYBE_l~*mou2v?qaa&_J&VlF^)=!hj7-Zz z?>VDh@OWm6%g$&DJOA86r%u_*KTF;R&)X5h_N=6{F`c%VX1s9CA#wBiWR(x;VUf|r z@!xLmWc>}S4>~~6ob>Zc4{K?QAtY#e)s<>lHV5&o@+xrf<!LI^fe@zdv~a(RH$=J} zHM;U&ap5BI_)_2yy)-W$?#obla1#vvd^t`8^<$ijYuW2Hg|P!5P%>3z1F4}AOr97p z@I*5hBUIynL;tHaZG}RlmNu;m!8n+wYE%^2rPq)J5(keDB7eH6@M%@J1SY9<$bLx0 z^J+|g?{dJ(Q>3dwiw~MnoX=~hFyon^x%lq1oLigK53Ye*Oa+#(RjcRAVxpx_m;T5V z|4hgs##~ot^ueKb@Grsnvo%7ocql}jGG1zGxQls0r^xfb@ROefx;4NGz+{X5U1h|v zFxzQJ{!ojzv*mvCCc`8yW_mQ!dm<T(sEe~J&$|avB2u_A$%-TtYzkpv+GnBzmwK!J zUAfv}E0<7$33+KmvBt8?Yl)Dc6_t@X3v4h8L6(mS*Qm$IO5x{0W;V>4((e~BM|$%s zYcTrwcfs=f-^f4sKJrP5I$oh=!nL8o_Wjm&`)|FaFFrprzD+-pY>ytqKT-OdUGuz5 z8C?45C)6?V2YUF}aW;8yJ-7W4LAUK`suCl4>nhV1I9BkBD(3^vY(!2JP@Db2>Zh2# zEosZ8+TzRlsUK9ke_T4F9@!HyyTtrO+rgrDWAasx)oa7^e^3hAIx$cbA5UuiBNrZn z5Z7@-qZwz6t~IwH2%Gn`9@YT@(znU}@-H_UJ3P5}MVfYpAJ~uUPY1h25;Ra2@ccs# z#U+D>+@b?XC9m%<2{MgogAcDtkOE`d*yBIsl5(`zDoOQs%KWTwCT8PmXxK#k140fh zM8@z{gOa$Yd%Ujk)hYZy-Jn|{{?$jA=vUq<b%7o+>!;e~Z^zUbOdJI0yVbZkYOika z@MsXRAE`~vBY#9glBGsu`y?}>IjK!7Ku+iytKXk)xt&7@=mdgj_3`D9aHExgUFH7( zEcgn*I7u=hE7USId>KRqfH|@(F(MK$bXp_y6Ch|nfi~bd9$b=cya`#D$+P^IsQ+Pz z|FC~uR2YW?0N??L)G+o;hzbcX7k=45c_@tw*cT7%=H}v3kN{9WNBx_an)Er|-*f-~ zD)K)vHw&`_B9<O7-WmuTN()hl&;1(?0Fauap#jhU0L<r704D#2{>vn&|KooyJpcfg zmck{cER%+pK>r`b|DEGfO8@}W|Caxcn&#hF9M1||R5+LxrZ1C#Hv!;2O8{I1UT#`2 zm{x%a<_&<|&w&i4e}%se4Isc2FwBhyKn4SNtpEV9`8Va<f7JnhIc6*YFdkpI>)*it z!KnZG;Q~xdo|8+!C2&92DJ>0+0dZ_d0ah<~06z(X{P+NA9A+tIr(;w69H@L8$VaLq z@i!$5AuE%>H}3*q4s-q+@c)^?1D~^iwfbNB+ff+v;CeRG)ZFI~Sf$`nVLag313-jz zP=ExWD*z4^8Sc5S|0TG8IXvQX+dj7^n?)l0E)xLbZ!RzDVGa4e5rS@{1k6hfE0#Ki zYxv)oi2z(!X|T3UG!FnMV0OXe{}AF>!gCk>L$+KaB%C^&67bwy68}^T!5cGvEREr9 z=^Yn<>^`T4EfqH;BrYx_2cFtEtou=bk}<Uvoe!t0_RG0jX3@vkT=n~xn_Z4HzxVRg zyEgsbi_hBkUA%OzIcomio7d9tg-Br<WF~jA@k{(i1y|kA+(iEbC(7O{@kKROIK*Gn zL7yI}slPM<M9)6m-jD6=e46jng<ekFOe&9!SwaP?gScp$qsS!?Rq~3aRcLdM9xwQa zMNW<N&fYEBkLEM4=^#xkK<e1;<V2tjo$9AirH3bi;p~Q?-%byqwlPV^-P1`m__aOC z1KU4Vr}{xjgjE<0^Gh%+hn>Dm`!Ev*@DModuFt2BHlt^?kPGoiw`*N;Wo@r2#!0nI z9tTNKAiK}f&3@k=+gOPL3ZuaTfuyWXoGIjM&=_X>zSNdM-Gm2DTsQtt2CR@w7O8~< z(y{1fsBn6EdqYhzTlV@Y#23sXp&pvIXpPtCieOh4IsZDSFMyb8VPdX1L!j9x!Ud5A z5lMzdQUX!}pht%bIc!8!D>DtyH^x4avu^zh3{kXAW@Px*K%vkEXkDx7>Di?1+M0gH zZF`ry#IdLT((;_cEq7vyk(^v6R6RKc{HY|tRqM=tt;_0vf<*It=b4$SYba)4XZKKm zRM=kUfWSSDz{!L&PnPN;DwT(-A-nC@eIA=%g&Q&|DOKj|-v#ogn4ChA-rC%;Rqk;E zTcOQqI#3<H!mScgO0^Nay*dZs_Kq75iY9oPUF{i1w)vf`ex-NC7(_K^kk+v~zcZWR z`rG*QSvSsL7;rR)!-wihl+xlWb|SET*N`;<+tHrEBg1pE$QnIvTH=H=4C~iiCG%^) zcAltQQOl62;RG3E<OV#O9DqqqGNd^oDDr-LVy+p2AEhlTbq~*jG3Of8NaL3|HHzmN z)?HUc*H#wDQPwkWEw*>ZW-|MAT5ZyO;Vrpr#>VZ&#=*Y9!Py&FCvx6jFdCK`*iZz| zopK6k>Ta$%y(2zV_Kln6B<uGTu9dX>B&s!6ffOCtCVOAO%p=i9UCMcasNGkuY{rZ{ zZey|44-kTr`nU8i%^b%KmDkoh?1g^*KSELJ@0_ChUuZWqIuKj%o6hsCSgMpeEEsy^ z)wAlTMYhq#1@QuzwkjM(4ICIT?R}$y5zGs|ohmV-VCgA%BogX~;Uew}@KPx-#;N1U z;UH-8zStdfC|$QD=d;A$L{!$2Xjw^|mDIk<q}bfHR2g1Ws%X3`wuS71iAx&45q&@i zeuFf&`FI?_1n?4&q}U9fBf`<1bMU~j8yib2h^~}<#-+NQ{|jdAI6vc9|6p7gpI~s+ zKTsC$f*<oj+rKE6lfi$%tQLd6z}R1mE9l2FF!ndYk+TH;beE`U(AD?0HY%g*+57P_ zXz8o^N2klWKAS{F3vguANMYkC@%AgyFEZvSbBnHN3!3UiXCZl9#qI0&<o%!1CW^CC z+cSy@!kRG3KUJQ|-T4A%<=11HTg(O>tl55~lMY0zqLSI~p3a2Cyh>4Hqce=HHOfd) zOln+#wttIHbg@?}RC08c9xk}<pSSCeASd!IvZtFrSKgC)Et;<$c&cohp%25kd?V%z z)@}^mfEjBcM~Y>E;M0afekFA$sH+^ivfaD+=%QJYxkYyp!xUC2QS{!soV>FRq9C_* z*-%dFu{%-T1i$f?d@{+o@~M2A<eri734uV(MWW@{ZiLLu>Fz8=sRmq+1?hj0FMXnw zMLA;O((^sQG3(8D+ACGB2<-QcZr~VfxR?6#Rs3nmYp;bD2mL&+;!r0vwNDy_Z_-aT z2K~t$Gp)<D?+1Q~&`{qBQx2vtX%GV&=W6dG?)m&LXST0su=q@k&v}@rzoIhv$A7t0 z&iie_iW|>Kw_MKA`E@SkRq_}m;eKbo>A8#3XnGQ8csO~SCqKcYXY^EoA?IlU)1;@q z*Zs;`urtGBj3J|XRI0P)DQYynD3f0+%U@sh>5tLJpTg(P3e)-#{{3c{;<gDMXWeyI z7G2A_I<DaC28~XQ;!k}EsK3F-1g}`OE5oB|IO;4de2IFREP|POTk;-Xo;a-Cv~~L2 zQ>ji=@ha{X-*9CLNBEYed}13g{+4r@Im5nwW&NA0j8sMR&<*5X#gC$Z7ml8QMu$XL zs2E+{lcs8X+RI#q{jxNHwvCc#^>Hn2d*{(#eDY;`#mBQz4@hVDy~Ft*!pjR8yER)! zJEqk!-J~QluG=P8J`WROZDoeq7%RwFe~mW}Hwzt=(XsL_#fap5A0>YsCoeLX*r<C= zEwN;A-h30vc6dCdq+RE<tB~}CSM9x-wc7p5r#}qp_cB4NL$Z4xOR(SeU!4mMKO$e* zL#mo82VT}+UI=FC_%1_Ek+}FhwLum}!NOS`cdm7HfVwIzPj<%KRqHd^4d2AaNSP?I zEQ;-K?G=!TTLvf7{oP}1Cw(HnFYB8W1bAHnqT~mu8Nu8bE&HN}A%$`Sb&N<$o;qMN zy?X9{>f>{>8YH|aIDyvg_VO$ip{vCpZf4M!In=66oKUujZKwa82gB!3l1hOEO6%+Z z)o2$k<bJXR{yykvE$xaY5o?2)&0HMuN^>c$Jn-riRYM^vA_DmJ(M&95YwepaW8c;d zg+2*1RWZfsV(>$81R-l<IAeXhlG7-I@-S;JT45w8jMP%hy|UF9MQ9_3qQkN6tm2y| z>q^$0ps!FWE`~h-!#+TQzIF-#d&$%!A_P71J=1L%UvC*6f_@sUA1O_GgOsO$7~~}Y zoN2NXh^jrr<JCTV=S*ZVOD))~o_bW}WG-`1rsHsDm~9H_jw2|7jP3!pL>Mz$BS)Yk zL!Z|c$D_#bw2nM#>D(L!?9>o4dotEqgI%r|nWG4FQ`8)c%y*oy(VH_$wakhZKk<W{ z<`naZ`1*8P0Zx8}uE*i|nE_qodXQo%g+=RC`|pa3rN0MO-A*#RPM3NgXL6vOoj)42 zM@GiIC1!1*(2>@!mJX0J-Zr<18J(zTD+t?-Hpu?;uz7?u*<#n$>*Re>1ODb%PuY+# z1V6Ew@x`2ZI$^nkkz}bY_VUs)s9<|Ux3W&LZ+LPtr+i>Mr&0Xv>2E@wC%%PGL+D8j z6~qlC*3R}{x#zM6*aEFSGUs*53jWIaoinoz=l8a@%E-~Ny<HRey)D;SgS&p0;A0;) z+zY>!jP@(9gRG(|BGT0}6grOaGb4{9*8t2jpG&?jik;7np8P%!c!)vXy_5JR7^keA z^C&14;%U$I?EDZ68|a0B0`q0SslxXzmsm>gJmx39RWLH>WZx#K+&fX-7nNrZo~Yn7 z?+Ny1lU+N9=snyhLyi3hj~`^lbUp}o22w_0&fJZrwGJZOqOKv8YKJDV5*I4yYSb2# zmuhHhmFgM`Sq%GL%(q^d4pMn#y9<5s^UI@?+i<oU3_!-xDe;jr-w^u(j1UPzviOI} zlAb9!fPQ3D0)Tkex8Ubq`MBQfT={bkA{M?di_l&<@u-)tc~tc}V{hRru6euyFo1-G z6#$27P!2PYF*MZ+=HN;--r{jHBq*e56ls-e%pbwR05)1<rJBrI>EGVXydG0|t@3(+ z4sagaMHdjW;oDwPQk1Lx;V1F{$kMZkgha)3kR0ttUEWAM$b;0B9;5|9?U(wB1b7E0 zyENa}knPIC6gKQQN^fAp*5KGs<vmuOM~?YI9zcx59)=o?N|VS57xB6aL1Vt*#S{R} zA{Ta!Ot{T{-23>!O%yl74DN1WnH2u2eSPYu;alyCNf9N%HmkcuxKe8X;5Gf8BBXq5 zHj)dd5GSU@r6dZEDuqfNgi6C1`t8wBUjMikS+`GUBQ9yCwe?9zC_EV5rBv|!u}mDI zWDxZwYA`Yu2z0|%{~tU5|KzAr|LX(*|F8IbYO&G#KjZ!X?}IBVb^A8uTU>ERa{u$t zpYA)Xcbv(tj(vTp;Lm*B_iq7o9(@<QTUoXpqN`f4dI#v$OTZ)2m#KvSsP$lv_djp_ zLqphMQm@{(xRYDxSN;YjSZIX%YJc~AWxIMi)h3>B!+TG5!O8xQ$o2U*F$RTfv2M+z zOrd5Bv}TAgZ+z<Zl6<$pn$Z!D=;fg1G9Eb#6)KM=%iM*YO|uf}U_Fh{#uYVKnSYC2 zy<6|%^(?3d1iX}DoX)(MFc0yxWO<q>#?Ih8R@YwfW$%CL`*K?A)A{DuLtJ_2anN>0 ztH1ar1*EPi_ea&tS5Bc+$761FRw{LEtbNMV5J9!^&aEtvbi%=4cEADAkg$&OR3vvE zl~ZW5{_86pof?RFL6a6e-}P%>6%c*cF38Rd56gT_SuzQX*6}oqczya4yAnq3yobGI z{IQ6c99^biq^6poRUgUuP8XqRVLMCABg%DN8ww*Dx;#mijfo^Or>U<;i97YzmUR>K zBQ?MHws(4w>PXB~K-Ypd<#B*=)9WXP9W90!(s4u<?F(65=h`KKnt7OPv&4z?EG)v5 zn)LLoUtcwSeZOo|ipa4#QV}ghQ=U;8Aqq`B4syI>#7S~DB2SKLHjapD_Imifo0%9j zn%we;|D}Zs0OxX~Bxy4Rpk{@QJXSqc5D0{fp85qclYKTdnN*CfwY}}<aYYMlCkM@h zbNl#F7FTy!1<Smkp2kY86x2qL36`Og5qo{qiT&itJ>hwH<5|AY(fiQ0mVOd;`(;u! zqkv^$+E|tF&8agTvzgea2>`ipJ#PvCf24gR0aszOW(Ba-R;aMm!&=}VhJ6JQg<(%f zJ1xyyq0d<5&wtdZbZ9oNbWi=8?hKD)bz;79Ot;hzZ~q=_a-WvGVj!KVjEMX8lWnj) zY;(*1J6RBHc=o=x!$gWJpEdru^%Iz;C7+{-2TYt!ZW~5RamD3*vC+}t$JQwm9hE4S zvP=urT{~<#QzVDqLAiD1&>}eSqvLr|!C_nOUKO@@rkf0_))at(X*mFkY33u?d^G`q zZH1nN5Rg_9x=|(kz;aZ_)-W_=?AFs6cPD|Cxi)rT)RLmNZH~14o8;4eXVYqeKmst1 z;of=RK3;jagF?-y!J!_2pf}e#1psXMu0eO51*Qx#KZp=d!7uA8S-Vz>`rO{`t!!?r z)UjDso7+uPvA4;J+?++-D!1fF#?=XRP^6Ou+;msi$~9EHnED{_|9+VeZS%far~<G1 zH4GMcZMC0eKwr`w(z3Jrtx++ZC|&G|D(#GRZD$8gxKO|S7yx)(55Qmrz>iA6Tma05 z5PjgyeJ(8flzb!n`>fdqV&aeQbG{tXvbEWEhzUf$1^*K8x#(MHYKBF^Qc;-se3w5s ziH#>vJXN*{ZCD>v^=Wkrh4^V)=hK9Z_6pe)B#=bqRrR<!M}8OO7X6O)b-G9Lx7y>= z;|&RtV&J{)iSZZXofcuX+QTyzjQPepv(hy>=!?rc9omBU^V8kab(>W^DzL+Jp`)XR z`^<EMD;wW=>yS_)J#!fsuX18#K5M==gQaT{c|+t}(VB)Aa}$GJM*05oH&;rmNlV6^ z(U5U(!%>o0q<+2my5;$_1_z&0MsRz2vW;PuT(cj2?QQnRG~`?6-m8g>V*O-W#@4K3 zkZ_g2-TQ`?^-5U<`kjH(iVfGy-rjb<Ss$v+wKKKmriRrsOGD1Q$JGUX%Jme(;MuRQ zK)P`d^Fntrd-7p!;Q+PI%YBD44y7VQ`U#@E;M%WtUsjH;8k{Y4vj{>(QqH;>kJ@9h z>qMVD)0c?HGH;fEc{FzL`{`38tnj<~?PS77MxxGsMmL32s^)J77qX5$>KYT>1>+97 z@PP3qRNs`C5K##hgswVu7Ak)-UsE{_X+18U`O0*E!V#f@?bY$9o97gJsK^RHd^FNI zkFCA-qy85gQA*nw`e$$2H?rFt?gpvG&-7<+r(j!E?G!a4&Fgylcon6Nrj06{$tt4{ zUTy0;wPzbX?+?K;o8ucfbiKQGTr7t^kk1wyC}KvNxTuKmaDV{$zpe!JzuvzBYx0sk z?8^QH!{BbuN;m4pzIe301U-;Gq{VRn?wWdm*D_xWt*Qdes8*gw+yA25EteJkY6Rca z#!<V*F>N?*Hn0SP1#F5puB64HBBJ8cEv;UhvRr$kbBE7|$RNxg6*uk09fa~5RcET6 zv^nqT2gtrd1O>>9xBc|BEkaDaIld*02=H0IKB-K_f8TTaHV%d$!j)P*TMGcBn+}Uc z+Ze#}9Dogd$I0UC-E~ph3E|y`qe%N~Z|9Aan4<Nzqc+dU7dH}rW5Fg2thL|)_X>z9 z2WK%l!5h+dbVd1f?ffe^YvJqE3uGc)QbL1_ZA=tW{#L#ruZLbE*9;kElM;<|h9xnl zmZTRw9{$qn&qzz-W6pP`X@X*(%&Jy2SB!z;cUNb@D8#jLBg=hv%zMo9r@Nyb%5yBA z;-W4&l@pshEx}rS1<K4TP5f-*E1&b7-APpDNgxOfUZHRHhNZe3gdT*1_=gtqPIcM> z*hrB(9?v>{zF5<-*RzZ>REOXfY7!BEl8hs(CT}o(?x5|b<s?pKwLtsRUZ^`knFEU} zk$FoaW0P>+*O>RS!YobtNq(LQ4WHUqmmG$VNp&3QfHv*Xn)Ga^$Hyb7iIJiD9_bC* zHmmmJf&Zz9pL#WW=(t|w@aG}%xM63KEN2!i2gz!e4{)2i%5s_boZZc$*PuB${E+TJ z<lH;j$u~kad6;P&roJm5v+zAc)ib}S{>XClXzFdxI`^1ld*W%6MZ}ea84?YY*^!ja z({anKx=gBBB~$sYRFh4*x0wsI${~=ltqNBh5`(4Liv{vSN<*)@(r;di0IKFQ$f?m9 z^u*7|FWDDP0x?#?=V_mZZg5bH^e3VDhu3-X37klq(+llaqC@58N6+7USaPwD5k*H0 zIF*}gb2<91-|^GdB*0ip3sE!~2qYDd7DsKN4oo>n;QYb<@y(>c7dWl~RbvqwZ}5|s z_mQC;E6jM2L}W%ggA;!1sJR+7Agwo3T1ph1mX-k2=CU+IkvwJ>+d6A!S?Dx0zky|* zBh1$JCG)`8h^)*TAeue5GY_ug=1mPic`@HqVYA}9zkX-G-UWlaVN+T``?N#oB>+|j z+4c1$pc6X8m7B>4OM&3>DfVh-#F+U_7lXO{OHr@4WsA0x1})sGzVCjvFn|$ZuQZa< zwNT+cQRom0lt^H{`|;5K=}=`3_{eH|T(g?2D)R2~o%6zMkl{X76E~5LcLKL>g5G>G zGo&B1Za%iYJ>w9M1_G?O#VW=t6yUqTD<aAS{IL<jbs|(0yU;hdH*3^M2IYN=y(&_^ z%92c|FB2MxVx8gD!O^Ti0~_tOXL*I_gKv?vQyErIv?Hq_4ZG$J$LNtm1g$@2hZXPm zJS>zI*O~-Pc4QX$Ajf6ts>kyyC8&KnqSk~KC3<~FIJ!DK<sku1$ct;GO%9(I96SB8 zwPix2KxEPZn;3cXwtB3#2DdU=Qtmc$MX<H~a)yihMcc0(!_(9zzB9g?{&|XgPqhI? zGyJjk@%NTSG{lPxx3V9zu_h8#`%~Pf`G|Ckz;8TFbi9Z4;XEs0v#qXWX(@@5#p5tp zUQnG63j$@{O@Q(?9>Q}9Dmm}HZ!e7_(4B`GMj;cz`@eg_{zzcB{?M|=Z)|KhuD@sS zbHm_b_o~us%NPw|nCQ0uL<70_L%=`FoUI<Ut@gUWT;qy48R%WC5_j@**CHf=wBvOQ z2u-?!G$yJf6ycZ<b1B^y6kFN@tq^mG0~Fz`5CN=!M0ho<Gv6a_?uwhKe*@_O0f+^- z?hA&tdh_?N^9wZ~2wPz5v#s6+Z0f)+H0pSXd;TA-47R&98!5b3yj&KiHBeUr&y|l& z?Pm*O0RZ8~xxYVMdnOc-s|8(dp^27#_y9scE)swVP68dE1|Q%$DGVC7Lwok1*@wda zqkyY8vw_Zc{Syc6*+zq<;BP<C(O-lmhCMEved7#wjQ5`2d{aUI0?cM>!UvnyK0hxo zaA1Xin+C2cUXz;%8;KhaAJ8^wS&2D*bP)M@G-EVXalJK<U_zU@?_C*@83fXrsXR84 z@1i`&{CQX)Ee1p$@664c-jc>t4RBDmR(ut(?0_HgH9)v7i7F=Aa2k)#ufp8Usv(`0 zVa!`kfnNG^AvbrX0yj62iz)-ytWs4VEft|}OE(oF!xOI%*tMbpOaSoBC;$fqU}6eX zXsbrFsctF5)Yq1ejnVn|c)W&cAYMTSh1|X%-n21Yf#I+8J`=0{n}V%9--<)PCInIt zuj~pdh$~=I;Z<6q-~=#nY=mMD?W)m5*tUh-VX`sKxxVSW{Sv-++<^kHD!}<UBLNf& z=RxxIyY>lgSc|C&Y{43f;XZ+z$cPag0)e#2f{YmPKwvXknsZ+xyK93I#q}`{o8Ky5 z-~te1rOXpHvRkhYz0;H@8kT)LCyl6KnLHDmebYQNJpnfTl@ov&uqCXT2@sEpD8K?f zJ*IyBECQggO(-qxK0%*Bq?W2DhneaV6?_*Ob$|p;Hq_^O(P!eb$NM9Gp-Cdxq2a%z z1=13=ZD|pT2b&LbDNKq4px6j}YTFUM%e;3LNhqb2r^ZF_UF_%NR@@c7Fv5rRwf7?s z6&NoG#FeBDVtS%FGu?i5*jX6wa7o#dLw>Z7{R0>s%fSucLIzM96OiC`{UE4^i!C7- zQV+YZJ@Ml1p(^~LOJeLhTI_jDr1KUrKQUWt7xus;sg*{y<>Xqmb?){;zUhad+YKqW zbjYe?;Wd)7$@3N`409c{QSHN;%w`~?ti@C=NYc){g=teoIjenFC^7ies>*(#zd&j% zE<6C9e;@j)7D|m`>O=xy1E99<v>v4|TBtepSJkL?Y_>ic847oHpqCW~sgJy6c6DQL z<>JO>PD%e!ZBL(+QBSoxH(%i{b*VS{Wyl6y@P*rQ=EMXFp*|>OS((=!1s;R#j$XMm z5|r*~W-<cNNzqLT(Vw3uZgh|t(@iaQXK4|n#8fdmO0{Q6Z>kf#iy9o%Yrkl}tMu7w zw;jwll7Ri$01h^1=qUxWj_0Khjf6|Op=;^QJ}EJG`v3rL0Dy{jd=^~!<1p@UY&jC; z#j0L`PhB5ulEo`f0{{Z<8(D^~d)fC}6L$w&s)A?Qo;EA9ZLWRsl5`<KsKDR}jN@-> zoSKW?`yUNQW0p@zOOHja#VYp%P|Bb4_&c27$3D4mqxkYDp1AGV01ZFS@F*_Ngm@Oj zOZ-*C99^rwI7i5T{LhxszXU%Fdw={aV*+TxU<eXSGH8^Cxv;ALL%1O{FzDeQhkn!) zRq8qTdGNph9ROy(pwo$?Hrw1Z<aoWeA)lDFEWf)e2kdHTwuNo9rqhcck&6_7V8=lU z7XT;LVwl}OK+&<Lh3$2Hjus(5A+%?y8M{kvU-#a$+GWd4f?_*@GAb*H{>MZQ$WY#j z6i^MY4?}>xJI?1{(r}nNrq_BS-y?{ONlF)l`LVfLXQ=Y?k`7LtYIN{wbhjt-_wAWH zR6KmPgH~kvIYX^@L(qbvR8Vj3&UfFA1+<%mxrN@FI(?m%B&mJUq!(p!c2cv~^Sef^ z<7G1}ERNiEw!TI9?#=Y5qr<_zx(@CRyjrJqayF42N`>*eT7Bhh!ZjQg4x*ZG`HFfx z9qEz6Tqx&9Qlr8!`s1YD^^B(_DQ(AecR3+-!Epd680&bKJVtLNuzR}R!I{7w1VeRR zUvGC*<gAy-D^p2ulpqP*SY^poV2B>6%4_s#9W@kkq;h(yPn02bJu<Q~w(?^AASBBH zP5LTpJe5CdxJlG-0FpItH2_#({>f`gKUNy4>-9WpSA7aJ$4jeIYyMV=X3ef8!l~P< zmgAO4hBP`YHE45X|4Z$4+i-(n;^8aQGb7$XQQL~EE&Z)D-`YDXYKcI2ef!2)++l&O zPZJl0cUe~|<R?eI>0>(17OBp<f*^9d8W?=qg{?JV?QCQFN`9&D)mS+juZqPhGJy!p zV0h{v*hr`$!&LyrD)%W%(u;!&gei~8oqaCeyT3T0afZQ{>_)HcbQwbuW{MaXV4v&! z9Xn860%3xfMBP>S)#kLJEU&zE5l7d+wRZh7cPo_q+4yh6@oQ@4KbR31@zlr!GWOc5 z@~6Ld)H~w5)eB>puA7F5*x3z!E-0k}Q31mP`WB7DY1WLmtiT^g1(_0EWY3{}I^Q~M zV7m%nnBk;$CCzGuwE%$QT3<Z%*i`wl*1bMHEBvJ0>5fs>P56cp?*m-sTh=y(v;Zz= zBDggB`alLp-^N3*okYg^&-sQ)0zq6x0=@aU^@1N@6U<Cm2{0^AOA7Z2X>Lv?Y?N+6 zO|dT?u14s$iZk5Yf<y;vekyk}SxuI_Z)P*4IY#p1$-+RC(i{O2TqSca^5dPrxX23q znk?=<s7cY<^lL_P5(TsoNIOXuY=`G4Q5Py%88@#_4AYa&@7-WqJK!Av9YP4(++jbY zA6J@h>il0>+8oc83Q}@?2I;uQ2qb9g%_o6CF3(%3Gtf=}w&!Q=3E^fSDQJP{Uui`l z5rH%kA%%P^_H~(i@6B>yMuDZ^L>HE=-h^vbh~0`cFm(^0wQIh;GuhWzp)IA!O|N2p z$6axial~%Ke5gFuul4Ve0An>j1+F4|bAWpt=b}CfLJOU854)VqKU;|mRUqNe1~9s} zd$lLu7CYO@h-#CgCyyw!Y=)pgSkkDDm-O07%{%$<j5=+J8E&<JnfHY{hXr_7=9ny1 zm2;L^5;TFNcWT|UqH6((k}GwPTlZ^zXF}8Lgbmml?WHmlX?<wqO?h8^Oc?-o4ghrK z(Ctl3JYAjPZHO2B{$o|%+!yNU#@w0#!tu1dB(MFfKXOd9=fHJ&(pGTfapx+%(%Q}` zteF?oaa9i*Sa7-)5Xo@&jFtbOOS6YGue9egJiKeTnKbe1=<7KI9FrwgKxD8-gyqRY zfz((#3L=bz=w}T{H$#UrG$C{+XdNX#aWj{Lsp2!5FlzQA_*S%G2jXSe_cb5+=Ud$C zRtqZ2dtA{evRV}nkN%E(*yvBA*aO7i%Oh7Z=aE1?8Yc{j$=JZgF7vvhJScE)X_={J zdUd?jW~C#>OJM>iICzut7y}NWdm@W~Qa?@zDw!FaU;`!i#FDv(A_)lY64b37IbJ=T zYqVAsOv0NH-b-P<U$O1X7HR@27Q|`fLFCmD0)kNBP=YhJK6Nw?rFXt@!TtK8i9$4q ze}6TEC6fX{GO0PHw_{+J(-@26hNzBYe|3Q612wFlak0Lo>s#x@EPj>39d1Z0)KR68 zY8IRb5|9KaMRR9hwD_Fj4oC;$ITOV~J!8JRl5KA8=TZY+j?-m?;EId%deh%@w7zXR zlr&FwPr7O5$W7|soj(B`aOyOqLDNe$xMDS>9a$Wi4l2?j!&|{+u6E=lfvI&I6B?{C z-ATGF1l3?u!=^ZIuk3wj>6Gg$kXq9SQcX+Yk9r%P*Gu~+5%ccg)>26NcP4AuLWTwF zpzoi^c5u!Q#77=V!{ar}t45tn2grv88=xQ`WzY%OVHctmT|hXsGCA>>!B5FAsz|8g z11{sXBXAep+>C3zbDQ|~T(bUZ<RUjm<;qZ7+gr~Rvtr4yLM@i$pAwBMu5VrTx9v(x zhV=@9dG)#K>PSqD7jJZ@aYH7K`h|izDYe}s@KInC1jjUVI%xRPv-Yrjw*7N@MuymU z3JBarIQIhWm5eVV))FVUM}byubqP8ZAce*OugOQu{^E23G%v%ars}ld;!;`X^dcDd ztx4aD@+ya@Fz%X<S=IWhSwvjIalXQSop2u#fHJQ^XDEl1e3nawY{?TB+n@O?U0NSZ zLWe6}?aMP6p<68&ebF>}qccy{dWE@R>8c0hh6u04^rQwUoCi2%zacX>)LKXh^&M@( ze<G^L9=~w|O}sQv+K-x-xai?X80PYZ?Y5lGoC%=)eIFsHGrPO8^y1XD8D(=yzWVnd zd0JsSXNv7KK8Jen;i>F(>v%@Eo&7R-oZoA0tAv?1Xp8glD)dnMTnT_c)2G7^b>9Ud z&eA>>WTWS3BtHIump=a`|2jgKkB0Hq^{H7w{OfjHvDP2zkGGQH=ww=1V+@ZgEXY3) zW$n>`@h=<9sek#KrWT^tXmaPbT>LRUdit|)aot%HdHqqp2(xMa$$F#y&4U5?FF*I9 zmmV>!Np%AY7GAC~?rgL-9Z@N(YL>2+S?*io<B_X;l$%w|bVQ|hbt2pM7WR#c1Dr8? zcO>Wh{%0=G;1Xl|s8=U$lwOR!o;tc~n*d6(Of41M9|oB@Mo_R-?!FWMX*>blXLa2G zuo`4s=f0EN#aeT|Dz6}tOD*nxM*)+oL`>{Y8MNR2gP4J1$4W)T?M6)4sSC~-$Z+Me zB*j=}#TF1lewgWx)G+*z`0X$Acva`wr#S|bT?+4#_;!SA-4v>x_S*-u#SfqJKP-W0 z3#z5#`Xpw*vSRlBk@e0&GQh@odug_`^;_7Qlc(sNnUcLXhw{txh_{Ry!o}iWSd8W_ zr*@L{NQu^V&*$7Tj!%0-NXnP=!Zx*|y{fhBwt3YRl}FN0eT#L!-XjN@S~T|kNv}B7 z1|6D-zT__&D4$U=?~8@UJ<X0SJ-qSg8HgfO1W`b}KYaKdeWw!N*%N21AA^jlkPyzl z#RMg~nL$1{nwEahsrm+aJTjMvtk3fR@aHq(uT?3#X8)`;Y>hKKjnR06b=LWzvfpzC zd8sXI9ohuxcxpo?pUS02LyT7dI5bP>9m#x^C^TV_K`u*s(*lRpRl7Bbn5FMSr2(`! z!oQ>oBk%JS6|@zcklXfa32xvOe)1X+M^=5b7kc1-FY+sHGA}O)>qm55noL|~yZ6=F zT5kp~(+>y1v!DojJSy+u-_u^Qn~u~Ja9AT;_+IbnWJnRIZ#+CtPtWjW&Es@|_zHxd znR(n(-Y^EL56#fg%cGR2OC{F*>J)F<g1sr39-kpzcToJf=@CiWX~8v#5Zlb&G$~QK zN=-{DbH;q1i{=^GhqCj~96q&cMtX7ann8N+5o@b;f2QI+B+7w<v@b?^YQ9p}{Y?;o zrY5hPC4|{B*JNCheE)_Saw^Z5)9wzE1c9^`l?y7z#-tEpq@W~FK5HBfuBQiyxxLK( z&joCJbv(+;^MSUiX6q)A>L#zj5qKzeGpVV&5qOCGo1_bEjtlll<<cOO!C#Yb5+>U+ z@*SFnc$<{wv8BMnv&@KWFMP-G9_G?{DMImA^V+~*zTM)$b)mD@C8Mv)L1SSDM<4}| zs=OI$_6tE}wBOuMzn|9D!=ww~0pThntEuqY0n(`=ALo2EPAg3XKfnwQ82D!8)d>4Q zkNk=gKD)PiAMwLJ#zHfSeB{Q}Sm=s{_m<+p(jnS645>{%&czaWb}Z%4+1wC#bjpXf zA1ik>3=K=9C)5Z%hdw4$p$>gcmi9-7lzV%DbV?V76v#oBEQzuvJEh6kUc70TbS=S< zY~m%Vr;11CPtGapkIUtK-AZ<qgnP)ozK76@ALr|~q(2I_J~B5AVikG)IPYo<N(8sH zDB`vi93V-f@4rw_D@p;H1?R!@n6tw3rJ>3uN|a+YN_OD}H>p^7dc;z5hz8$ezxHlt zmrj{KzBvqE5jtcqVI^|ub}C%d!Ke=JaqsGYvnmT;7J5{L=Stv8qKugpY71yaNQpym zuvQ$&y6{TN6N3Mm9(?q3gx|ImU*uP7G8sZlg;f_Owlq7Z>gZsJ5sG9`=qg*{r}W^7 z64%&tlM63;EwPtPmjj&f;`DLX?;ibL?D1o-JzSlC2O0b(p+!Zctp{QU)P8N^<D|d& zt(!G6v-91)ae(ryTDc_olX*hqO@2VaMV^A?9NGN~F}@THTVq~mllB24@rqXhHy*9+ zUb2cj>`(RA-v^^>MzOk_-%*%Lq1WICS1#jF9H7j#%G}laas3>68!wzSYTrJHUCgL} z*J+K{8x<GHf^Irse^AbG{>t4RUNRJyiAENcqDEA)f25!lYSIITYF>p_c(D<GzkV0z zsEMmMRE1=X$L{&ze(VBR9-=Hw7wO8v5t1-2R=>M_iF-H_RZeS=RT2#m!l1yDJ*3ec zHgodK6U4Uw3XgoH!Cl2{5wK92sY$)Y_5Hx@q?kyIb9n-RC{be~OvoSsJZDnQep43c zk&nM>op7Obnn|+J{cYEsmM#H3dc7w&bLH@nT%Q3we$?KjaSR=2{Acwa*?tJbH>V)9 zhWM-5+ZH6$EDJ0&cyuWyGmlD)frGs-QHNU}sdeT^#09rnr8QN6=oHM*TsGf48^FRh z1&UI-u=M>&&CCS7REfpNhcsFTKXRD|dDUx=6MC$fGfjLi$7}}6#2??)MPmgwYbp3P z+6=4m^5~@XcdifSfBg6c3VAb1yyAc(E8bN+uHtn>&YE21StpPH`XNNcqfh=$b*^yI zSN#XOSG(hHo`9@IswV9mTO5a&T7sJ!7e$%Ew?`kVjG^ys`Cx`6Uzy^UeApZ7HY|?v z5{^V23jLqEYd*~7KOyGv%#gc+r7$!?e(T6j&f%}kWJ{HZAuBe^UyZ?Plmi8yx9P?9 z$se8ZqDNV^^jTSnWJ~ggP4?lGZQtu`L8@dXTWv}$rn1%)(BVCXhnPknMTIhD5|^dF zW^v-v@m`rk{(`54lc(<wAA&PUrl+Ab<mE`?cIF};Y9vBHcCP(e9a8d2@`(s%(|A@( zydvjSK%SJDybgldxFCnAp6P@<_wO{mKSrJNH8gbz_0su)*2ND|!-mSjYr~-`i=Kkb zszdK^J*Z|r28w4L`x8XUu)l;GvH4KUiWJ}`p&SDT9vlFJ&Bgx^z1|Va*|~i(wp{r2 zyRB~YWYU;11Sr-Y{FHPFdL0l_+q}Lz$mW%VaJpjm*{#j=8xrRxATWiIl`M=<PcNgg zOha%OFUK<}>v2pf(Mv-fm<-kv@h+*@Sbb?k={rrO67eNtwio@UC`XGAziCU|AL4f) zP4n&(7whpms9>ShgRo$&U_QKA5WEXEGwREg`-eH5GdQviG4(%85fP!l*uGeaJ;4cn zeAp*nbeVestFZG{3&npqc^EAGsq+F^aGWe5?Q`R53_n)mii5Nh?AY+VSpk?7_}k`b z<6L=cG4KinIz)jJu<64Ph_lo-wQ4+O?rLS@yW4fKxGvHLoVu$c*$^QAeL!RJ#SLrs z>is6kc6VQ33N-;PGgZziq#CK35l%8SKr1hl8cyABVZjx;MM$}bXKIO?8~DaG@cMlz z*VWIX*R@Kc%-igt9TXSQW~NLB>wk*-oByvaz5}X>;9H+2O*(>fDUnD34MmD{qy^~^ zIzk8uAe~4F9qEEpK_oQkgramXAiX!Kigf8n@7;&L_dn<V-kbC7+}(3_&zYUGJ9Foo z@7^8P)|uHkY}x$u{NT+1FvbA;AvTuIe4V86M&c5_j9L6PH+aiwyPvtFSR5FI#Cq<` z);z^8@kA|b0vbQk^R7t;;Wyvj<<ieGF^&yTpg)<{5he7R{%aW`A|`zQ9diircLO{g zVaP)V0|vjp+rv;krF;2&>R={<5M4nuAnE!yhAa~g++#q+8C<TQGl%`cOSFEwe$vPn zpWE448D4_f80$ivF7wKBmuMw?UykqoDe;n>uc8$5+^g$=Gv4;*p;fQ3z2)O?9w+Z! zZCc{_%Zcbe%NRK8mpZy#))4slX4=-)JbgOw8;Dwt)pp50`TG0kl(CnZ)@jz5=kCj0 zgETLy(`b0>{+Zk1MMVr8HOAY?r>6oT=UiBo3Ulv8+~=_&M+fZ|3DIZa*kYUVR>k*{ zoKB~_ygCAs2!FBzVd7m;222v^{>+c|g4%Y3*}Qu~vL6MPRGDTk8$eWof*6t~J+avA zA}+-+-sQwiY-|+Rq#m1aG(73ER&KHvuORb=qg6et7x!etxvk`C^M;(Z^=iubdWaP$ z_TY_dNPT+xPjEW#nbs6<yoX*Mrt<jgS}4Y<65XWnR95WAMVeTgyNhy(O|t{<(1Y*& zE45iAXAY(gma6cEamlZz{JB+of8Do_EF#m>-)0=11YEkYNF`MzJ#HZY`%{WRs+r`q zN$E%#<(>q7kc&o|@gla<x9fZ{AE<$D;p_50xAtE<dcm2dC5P-C7^epEB@pki<-{rO z(xNL(Ri4Ir$>M3RQI42LP>Zq-bNXUol$`d~HWF8rZ`jt)MOHa20k_G2RIX8pfcAz{ z@OU_uiG6gmqT{GFFmB)Ye0gaGXX+B6AnOHa(x<jopk~HTnIJiIbd+DyQ2A!!WDUI9 zIZjo)S8%36_CZN;hV=acjQ!X8(ibifoo+17i(gAbG!kt+O24?aGxplGg6^pq$h(0F zf`wuMi7!YMB+5%{N&x~!6e|f)7SIB+!Kz(!28PA8(462NLmD<dMw9{1fr@ZlT;Ee; z;5*%d(!oRB!tRC-wz7hK8%{7oM7zpU=d{$630UQTnM{_{tKtii-<K@*x=?PM!sl7; zbYDm|I_~`$C1qUpjq*L8Larql8QH$SvGd|FA6{lYHnhm^aJw(gKW@xwoPBHzz7}S8 z{w6fnaq`Znn&yeFeoQ#zngE`!=)|d)_86y4VDY-OuOH^d6pcY_3Q(5nz>%x~QI9EB zri>8iHqqV5LS|~RKUz1CKcB_baT)hY?ijS?K$RtZ0QS!$0Rvw`xk%I`IDZJl_v>dC z6}f<c^)$iC<e#!*?{L5lt}Fv922VY%{NyPAdTEt5Yu^O6r)+0E+5Jl$_#}cu+IxSg zJMRA)n<V-N$>-~FxXV11<7$=K$Y0X|fn!lZbR40rmovQ~Yc5~v>%nb)1GMnpLH7ch zsR##1neaV*>aPC=-f9=LnjhGD*?Kw2Js8GSbgS0kLgIwhhEuKNgCq~vhO=b-cK%bJ z9~R2`o!8%<lduimG>IaS+rqlq65j3g`=n>;9HR)2ACET|(Ejul@-O8Iacw8aS9m0E zd9$V3DTujE9zk`OyzgSz`9@f8xh^{;o1T0Wnu(hP__5;0Y(E8Du(x;5{=hNLG>h_P zSOd-^9H2d1lG`h!TZ(TnDt&I?!NvYaJqEgCR0eW*7R5!cXn^W!STg^m@fLPm>v&%4 zy0Qg}VKH1Ynd)t~ylcN}sL3DlWME9A^zikcxQGB253kk7uW5gilUk`*geDXPw01ZW z><fM2h6t9g&SOzOeiljBYv?}tcH+BRc8a~bn;H}{K+HwMLlfx-CC#X;h45b*1E7%^ zUpcy)c6}S24v-JvO}(;EFGz%MnDJkSi&@L&OB-$tYa0H00K1PwI+A1$yPS&Cam`#~ z>kmn8_bK1pd)@Pa0ITlI&Pj2(m%x$82^(Uv>%+?jf>z8V-D=INWO^k*#!9)r`{z$P zE52XM%tWoOkC9%J-u%E+$Bdg|&oD&muVv!BPrj8_VA6BXX^-k&dN}VAZZQQ8jKYVf zTrgcqfzj0B9b1`N+K?A~n=nheRpNbJ-7ok%WTjT(7O||32Z?=pNqvNr@M_X_3OIWh zd~7~Tbh4&QYZqk;pYy798j`)dPoGm#5Etz?4FIBc0v@kP)>JG|=YFnnkZ;Cv_t5;I zW-W#Nmb@FxdBO^jZqDltwtH5g0<LEIBk*V!!;;coof9?kyg5!NMTd@?kUFM!`+jVE zdT7`}5a#IesrNhh7D#qz;Lz=hf7+wVXa7Q^>up3pw|2%_WKadY8e||pE<zPzN0JJq z0{We_Rz1!Vgc=td;Ne;V-LK}pb|%PYgEH6M#Kd&asuO%VV$j#;qi?IDv_(`GuDe^q zDTD!djcy|i^MM$zAI`p?{|P+)VMFo1Nk3Q8XQIHmzWGAmgC&W>u8)&$Wz_d;eh;gf z!{TVI-@K=x7nvev+(NA39$#7PWM<Cm|Gds<l$5w=y2(-<Ug`dzvLh|NJKr2|eR=x7 z%msnGl`vr`^+$48R42q<92P<>d+qIUa?$kd+usLX&E2!RC*B(cY4-mgU+9fFq{zNY z@PKI?%zBjgV+>LXgJlNib#t1;&zF91uSv#c=b)}n_EJ$^wSyHu+c_@nzrI~9-=C5% zij0hZq9lJS&QDZmP%MoY9@#$j@bqcCDNCI%C<CAS<8J(~o33RV>n+<OsMv0^7W^~d z|J463oG~7b6STCnw=U1^ANfBa4;dlWsnJ33D3cM>JaW~`<8n#!>;AOmXUUVrT;d@# zys}YHlQo$rhpe7vyvZsw6bkn?Dh^;g(5Pb-^9|8?kB+^(^QGLc0nW4-`YXR^N)tOC z4KwzcmLm}_Ue!rTs0*tZ0U-0DM^*N^6dj^<63BODqmsvwCbBPNrY4q(i^;wTDk||0 zc?f@v=?AJVslAXH2D+<ss+m4~G+q7|cdJqRfc)U=hH*?D>^y@rjirlgN_Zk=NuGzf z#`Yi!w!vK$wAhahlqN}$iBdD)!2_;^BqP1zs$t57PqmyOEbjB~H!rVGS{%BwJRCgE z4iDF2RRt>h{~s!*6NK**jR)B-em^xb?J6&wor^zVa)&(I;C4PM7<|UA@8J^t+PUl( zEXOLhGAGJ<w2chUT1|{>Kmlr=84wV_j7Tv-q^KLAM>bq1Ugb@G`FqzX+kQeu$T=C@ zGn&54JJQ~#7$|equ;yR$W25Q_IMtbT>qnpMUP(%4k;r)!e)cN;%*$leP67T8?hw&B z5`X2VNd^wj9KN@ApVicSZ~DTdimtDu_2WVN3NowMOh>FD%b?lqsJkYv++|RtNxGL* zjg^611d;vat!-qRw@V8hL44jFF11fg#`eI<(uv4K&IxAA_bd1!!cdfHV$4&}&<wL< zoth4>j^1=db-j)QOQ)CXG*>Bs2>HE7x6t{d81~O7Ft*qk<ZI1Q^RMLhdps4>wHGv~ zCR{c79}N)-2rQ5(^;^AK{6h)GC;Qh9cy_hEfKu7FmiT*NoP6X;JvB=Pna3t2v;YOH z(D0+z18rrLuhhZg2vhw!IYFMJj&=_=@K}-{KjWNJri<9E#xdX1xx9(JtBbYPxukcs zm@b)=eP~r%Re1{BMXv{NOx13}kuBR@&ZJ|vT)5<*l)rprcP-YhW(RwFHV*@qN~lZN zx6ERMkbrMace;qVLXN6k<~NcghdgC6$0&uZ)~>ZqmZcv|CWw33Wio%7x3LZffiu%g z+q_P{Nwjm+{km~t=kRyxqq&gz>|*AR{t0JNbuTQ7JpcKvo~j#(Y0{9(`Kf1G+ZK!$ zsswpSGC)R!PYe$o2Mwq`Mh)A0&Wuj^LiogTYdGHLLP%MtMidAMsIocVWLsrg^{?p! zyG1&NO22#+QO4_^#eu6RgPV5I*<XD`xn{YZJzaFsCX0qYF*zyNb(mYcWPiD|KvHZ_ zICIkUV#z<ID%^tpcXwPGoJm7;<Ds<X__?rMp{kcxdtG<)QjE{JtXw#WR>m{;jT)!o z+VMMCpLyJdiX^Clwu&WV;a}+YrIi!0z8vmqvFzO+^2>)PV|XaoV&mPvCEpGC)%T#) z9K*^+e#C6wf-<;yOnasGd^vONLzy1^vM*f|_2|(MLHS~DEV_gdasO?QDCy(WSxyGc zXHC*Gk8<*2Y5UotOZq>sRkx?`p8SL?IPA|&nZUy1Gb`Wc*0{s^8PB8IQB@(GQ^#2z z#fraDfZDOAREh|nsyAJXl{dB{A;XMUS<kfr%jq){1>TuCi^9dSnm!-2UE<^nGfr<u zEzQp@+uSTF9gb6bfo8zkR&lS2KL;s&#l#7r?W><{UtEMXOekV|KoxrVK|F6A9^DJa zdl}-s@I>a`ntVQ;JUOQ@-P>J)omhG#GX6U;1L-qleU)Il5cb;>lEen7%@dh8H0wh} z83^P`nI!OayuxwVxV_Xe`g#_GWsGp=ZoDhia>6HZSZk%N4}+<YW!*m}7Q=6$KfaS9 zgVjCl$&MNzA!;${7|$J2zKz&7Oy{pem>OnkZ=r0@`fbT)3(O@grQvKc+6`g)<nB=s z2R4BYo%N9OTuj15xCGmjra-cjVrjBkv3v_B@d4hlB!jRKMGRd|bx#MHJBIJJ1H#{l zX<%wa!TtP<{x4o?@M@_N&2ll!B{B=K{bI;7WVV}6PAN<2vBUDH;uCTPy|DscT+0&R z6Uh5CS-K44B8Kl)cBi;hX^K+uE&q4g95nj%!EO1GV0cv^eXoM{v6hm*T}0N%L2nc& z65<y2$9Re?jHf)fNTp~#NoodWxLT5eR@`RPam;a+BrH7NE{f@d8svcDgSp6l^9B>1 zwy?3W6STI}t8vTqRfO%+(lQ6NSQ1?;JO|Kk3*LVQL2XImQwuwCc7HENNau%v=pj(i zV#-%j5wyhA#qC*wUTaLfLuffOFE$sX`7gKz78_@!t@G4K`V-)(=(D#}sNj!vaX{f+ zk@&;ORksi(bds%?8IqxJVtRGQul&Iium0X{p|K`a%a;;M3PBx3-E3}duvm~#H~SJ; zl%+j3NTSlu&-e7m<JPsi^@Z&J@u;_D2$jAjyObMz*2JXp{@FMEr*BX7Gz<j@El#?& z-0ReP#&A)7^R_D=ihX>aIq&Q+1%3U1-t?1U+@Zt>p4Zw+AI3LJ%bWiY!OX^Y(Cpd8 zI0M}yXVo;-Uw1arhug58PlLer4{D%#f~LvppS~6_@JtpPV2h1m;S6D*V!eDQ^lbrX z5e$XGjG?fQJIPRWY_W+xSu(h?_yxI<!SYQ6g|HSI?H@JuWGJ6N803Z1dr)T>NY6AS z8Ki#u^b77HY*D`6`{H4b8pAoKWK(Uk4lnS&ha+27wx|DnO!aLlNK~xhUy_a1?GwSh zz}uioi^X#`o}UqWs=Er;k8a*L1`u|h_zT_?PVV_~)JXz6&?SpoM(<J>mFo>bZM_Uv z6d#qYt@D!Ai`gHTY=+SB(8<psX(AG!{-0I^4t_V?#noT3HjV(-`NkG@xLRqXzt8N^ zV3JDXvwU0I5YTg3!8*h3rS^^WogbZ<v>(4+xZqfE+;QTe+PT!<w|ue!CT|W`c8l$g z$*~%2(o;LA-Mt^taE97x2bCey-sMpIk*XsGGrY8}R_{jSGkw?80r@?mPa4NQ!=EJv zhgt#?PKB3$?rDo}mXHCJ5|1U2aRwVf<3;2!{UQ=wX{tkVuEbLfMg7m8hvvWgM*-5~ z64onKbG>@yXjcIK)rr@Gnc9p-O~=v?8uu=@k7XeAC4bN%rKe`$aS?5E90~MVr!A}F zN6yL8KmD}rRs+@CML=5LbR9kBIt!{V8cwazb^hBlIox8Gkf1m>5qx6<LHSK~ZDh{* zRSQuboe=UV1*@B%_~19u`B<)uFTV%UPk(Ao3n4`)Sa;Db7_@$XM7U>rv~UUJYpSv? zl+Ll>n6PN^)7x5C6a=v6z3m`2rYdHhp5#C`C1%x*dHpIiXZ)mKKyyHN7KhQ`N3a+U z04b`c+Nnh}ByW~u2QVcOyiBp4rpvp|i{g+v*)5TNsy<}T<i(`NyX5mGiudwu+}U9H zq#=rrofK>>QHh{XZHFqDfqg5#7ZKcW0cH+TK{=DgYBJj-fmD?FbkZSA&T8hAK%|fz zQ)-tW`%3ytKq!n{R5QaA&C@WA$vyR~{Na|8KEAb!po3}qg|7L3W~tqrdnqjXtG2{> za=ZGS%M@$%7{6;@qd}BTFyt}`>#Qr)su03eNw^>plEtxAZnpTsyQ5UFob1Vnz(C{u zM+x$kUUb?bOT0=aW<y%t`ow8a?-jlMp&5J5(uR7;yUGPgOfoQDvD(HXE-7Fn`pd2l z?#gv=$aQN!kjOT<dBFqCZefcaF=yZGtMn3m4vGvR&?PHKHsK2&Ca9Z5=)Ik4XU(LB zmx#^7$pGs)Sa%?myCUpsan<01P6@}I2o%;^RlkV?Sc7}NJ;r$Y+ZyVjX)E@JDcMv* zvPa#JK^=QBY{rE53ChbmBG0zHSsrNFUC3(sBN%?m($$p73y>0I<%fE4bj9w}Uc&|I z+%Hp|vz)98_z{LLof&}C;v+(B132ql6Z4r#?$Y(;-joJeRx`0vK;-h;Lq0`yn@h^% z-iE5?uTvorQ1!}&yu}pL+P}7mixb)^?O9@Rs9an5j2MYE=K<+^%SmyZ6_`5muCc;` zU+czjsF8iTi@WhmX`^q_yQzjscSwpGj<Yw9IxTrIhDZyR+h%!Rvh|$HFQGvG<0nE= zN%Q3wjCZw0kGM>5RsAfXiWQ_bvulq(p@<b$@%y;L<m(si$-5J#Ms9>~EpVf#+>(Ig z7^{)0*I#f0Wi~&aRzmPP)xrssGJ#An9UsKJvqZ-$avN;c+lgAB#C)w*`Gn*U8w2<~ z0I?DsK8?ib1~y+_DwEq$DrR(3Qq<(3_g8WIxy_!%swAotCM#A&Amc9-zfj^W-wA*A z+R2UAqbzj@BZgzKCnVDUUfPZl5Op3cA&ZKc1k~53#CDW|(hS})d^e1;Sg)^f>a`HR zm3Y>kVtH(AnQi(V-mCQVrr0dzgAJJtPM2$6=_-4DHWI=K(SpU?H0eoUdK+#+P5}6Q z8c?sBacXUz1}vnhT`N3;xpudcjP2*R8`FwXsHrep|8fEfERW)=dc5M!#xyG3P&SUL zRku#D=||Q$2^TzkGlh6FQG;aGpvY}2Hnd9RhBD~qY6xe<8hjOo-Ya*A4(p7`oHbit zz~K0{L|5u8si`7A`u~;AyWrydXd%wtHo_QfDDbkjI<p7H>ucX~Je)q4^KFFMAV@}m z3ih)+#BIpq0po7R$l>pK6$^dL_%l)MNyrtwnKVc9UE8DQM}wJT0TD5wtEt8Q+WwX; zmk$^rzq4Hvo(26O99-M1OKZGryyE}&Z92TPh8v&UA&rq<ptd=tIc~M&rVjBSoiI{Z zzEd%y7>Cz#Ek{3+aA+^vWxT#CD@;$kC6%dNf8BG$P9vrtq@Z8?`@juH;5K@Ygtrew zP4_-WR?xFmC7|7hCO1_duJqeernT|g_^%Sz))9?iXrDHj$*S_k@n{W{yu>|83sCYk zwpTuxK9l@by|~HCE_VmWGZAtq%`r2r-?Z!#cV9@(BeZTWF!VOOD3Bz_cX>tWLlOzq zDfwn!wf?1qJ73}WaW1KMrfMg%%#+p>5FaMR4ZzFs_HDPg_ycI|w8e0M;AQIO4umpH zyM8rdGX=2xD7jfh`@XO0zN&ABrnuv*MfI^=09E+MZKvAvM45xX`~Zy?(L6hPQVDaL zmt##A1I3_2zd2PKirB?fX(En?_e4P@Z{jTlBQNml*W+}p^ZZy04l_wmyW(D+!n#18 zzVB$`8v9pLsoq$KO!pQ{^vb$VAna|iu62$T)Ll)2PupDlb=@BZz!9h+Mr-XUFNvKV z@hhr|u{&mHIP25?6gZ@BkugiJm@9&OK;yBNi?!U_oH7}g$t%r_y(sPY)z<QuFWZ5d z)=MF|$kYH^62$Yf+`!v+?KXo0tnHWkhm@o|%e@$85V*wZ-ch@m^)@@~lm(P+P07kA zVPx(`D}w#iT+@V~)|Ahz#o7@djBl>Z1I9tNd;|TwS`KNCfGkEk)k-tyj^<27d}dtS zMEQ>dVRDnq3;sld8>Dr^MLPoT-dCYQPMbl6{l-hBnAIzR;gC6GYO=e(FK>0dnoXS` zq8b5%lhX8Yh1K`v^|kHjbE+k48obiIGZ{u4OxZykS^p?Il$jH0L(xJOqjmB7nz+U% zY3aAdMKi{w1`shRQ?A`=!jsggH!&>bEj#KV?9Bg$(<`ZSCXOM1dK|+O9tRs-U0t0c z6A#T4$ouGxd)u3$3&2fOF<IXp>h0b9#sSo32WT-5aZ~_x#EWQ`!F(gnE3?zgBd)#L z7b^6gJB^vZ5E4b_g1)goKHg@H^qK32!1#Cibnzaj*AU8)-YpG~Hb-E9v-ED5yWWyO z0d)5xb2(#>w9&)6;{bliM-KmX6<@_J4k#L2#(i$yKiJ#($EmoW8n#3eu}}OWeH4+N zpie#!reb56L}lRfO0^hPR+h@qiCoaV948FA5TNZKz}A)G4l=px_|~A3aopXbq+%QK zN!xxmvKhWevz|e9EQw=^L`|r5B$!a`Yiw>#bXu!p4NVZtx{&a2vAQR=qHQkcbt2r; z2lLJ^Jci3*O|p3M21J=n_gxn%x?k}K>eJA*DSX!L3};5*VTzMOimdVy-4A~7^_#f6 zm;Jc^o`cxyTddhD0ruWeB`5%@zS<_BhUm!KN`wH3j`>xxlnsY@CU?+6;PT;KUNUL+ zo=fu>S`Wd@x~?cg#Y9HL>grj7$zoO91Iuu!KeyCLiFF2(mez}*Ir_u^LK#*GIO7Dm z;w%q1C><k{QI<uYh(h+iKokPTvho|eO2vFU#+X`MdHGmepymlQ1}_Oi^OyfLzRCq7 zJMMnxqA81Y(Qn}Kd01uLjZ8i}m~P(PNMDoLW$>0ZU2)FTa(1Elevpi?U2MLP+g=}j z?B`Z4v6v~bF|i`fo-XcqAU%_xVgh(4ib#3q3jKgEdxQV=A0hZDdtV@5VLUt6>RsqK zenBX`d^TAuX)xZ!jVb<CVeHp)bct#3ztPbXUjxseBO_uOt4Wk9L^DB75Kl&EhcogJ zc(Yh15Y0$RfOpOLmeJve>%oDa4SlnBQV*v>BPRl)(V|p}H+Q9%kU}`=TyP)a>XLgT zKh~<X7CbpjNO7b0_=tRC&bo1Cea;?`#KH>!9;24yGHE7Oz%z{tj;(JLrp13P+Vo@Y zz?l`DI!Z&e6hsSUcoi4;F`}6g3qs=hck}A4&S*e7>byOG=fsj<GVn?ZQm~g9tb>3= zxKc0RM)E*)__@*h=@03YY1zGA+XeNa=92=%kc2NL^*KYtU8<B_dA%J;)m&ur!7^$& z&W;g{o6Z!GPb*@j>Qt>amuyUU8g*Bnj=jg`0uykyN(Bs2m?yvYGp<h##P;e;>ri}^ z#nrFTl57oTGq0r}FUW4EY!AbwpoK<A4-}vAofga^TGz`BCm$az%-Nt^-+=H8<|44s zCv}~3bfNcRbx?JKjH+odxs4^4dm>k>kbHc%fvt6&IIGsmT9k*IQRi6ch2LUUcYCu; zS_Wr_l7t&9QaApKZb9gm0kuahG|Ebyl4Xv4wYs{!zdW~vgra)eZKDOai%E=KbMLMT z2!rn6U2_Cdw~v$?I~@D}yr?o)c`lr7PK>@yVBCM3pp)%1SRsC1&zZn45v8O%Ae=(_ zDMYMjL?k-Ys(lv7M|Od6oa07qI6a^at6IJq;2H2{C<=-2yT~}8r>77@tvIXgNWIT_ zxQO%IEO9zLlVlNR&m{M{zu{E7v9#!<p~U;vkPZUa@=L^hH7jvoJhuPwZ59KnXN4t0 zZAxDX#lRWQ#EhD%mClJTr<)nuiEQ!iDJx|&uo|aDFya}L-Y1r>91|XnD^>3PWf`*h zUB8}UBYK(y^kaehFxc+_#T_t#-P1eFqRB(kb2oXJgiXOCvzsq;mlvmk!K<o`KSv7a zQPg()7+GUD;}`JWoZ!1;>V-CK^dGrjQyf$$EG2p`h%_1>=%35DBlcg@W#s;F+F&)V zk&Qjg?fKajnn@<tad<WyzWr6~ms6Ef!uly;>$N3sVn8TmCP~BC$8a!W(;rebQrJaB zoz!*0??6nwI{7IV0`{|@7LCb05pI>ZSAIA-J1AM_8<Uk^06&|iC!60ZaRZjw!~ghY zj(J5N*UFL%f8IQPU@%#et0@i8nuIVW%;p#KwC?+4(HwdW9H;c(yozu6+&Q`uzPMXl z!jkeUrMl%+LFp^AS9Up3!matD#nKa!$X|0}``|_KWl!hWT@PHrYAP%&)!NLf_6Xb` z4;QRVHa1^O?=&|^O{6-pchCfnVV}9yoQa4g3ab@ORXlf&py~Cmc~4@+zQ8If&#vC{ zDW_#rjklVI^Bt%u+ys)(3*~=KfQazbCbXwZp&yMT&~{<1H_~^sd+hObXaky(oNW5; zPCv6AXaTW)nf%++yOFxHw@~rhL)Slu!j66_g$Z!4EtRV3`pIQVkxTTe0NXB~I6NA3 z=l2bXv<TLdS0(y9-B|(HA_373?w_B<l51VNgcjPB-x-f;G&9NlxOv|xdWTz%`2a#x zq4#&jAO^3Bkc>bDBES$dejzXF`j=|#FPofz(V{!n09SHc=XSG3A&Hgy*>xfJsbYi) z#`FW(&6{BXEhMkkFR5kQ)Sa|;QPEN#oq)plK@tqXP>4z%FQC;vMGRreiEN{21A#<0 z(-1e|sfc{ogfUtwgB~(vk<6A3e2Q?bxtE898H=@zRS8NGtI`EYhRCC7Wagujwu{fM z3+GS$&#q9F*tKDFT}*)Tb4jg%u_OUHz4m+j1Qb@Z^M6V3<E(!6EJvZ;_7Pu^so+9N z4QyUATYL|VJEjteMAFV0y%kO*@!QK#Mx6lz7z|sl)LHDAZCWEb|DNFz`PxF?go}wq F{uiIN@MZu2 literal 0 HcmV?d00001 diff --git a/doc/source/conf.py b/doc/source/conf.py new file mode 100644 index 0000000..4e05b4d --- /dev/null +++ b/doc/source/conf.py @@ -0,0 +1,301 @@ +# -*- coding: utf-8 -*- +# +# PLWN_API documentation build configuration file, created by +# sphinx-quickstart on Fri Jun 2 14:19:03 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +source_encoding = 'utf-8' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'PLWN_API' +copyright = u'2017, MichaÅ‚ KaliÅ„ski' +author = u'MichaÅ‚ KaliÅ„ski' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '0.22' +# The full version, including alpha/beta/rc tags. +release = '0.22' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +# language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + +locale_dirs = ['locale'] + +nitpicky = True + + +# -- Autodoc -------------------------------------------------------------- + +autoclass_content = 'both' +autodoc_default_flags = ['members', 'show-inheritance'] + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# "<project> v<release> documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a <link> tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'PLWN_APIdoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +'papersize': 'a4paper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', + +# Latex figure (float) alignment +#'figure_align': 'htbp', + +'extraclassoptions': ',openany,oneside', +'babel': r'\usepackage[polish]{babel}', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'PLWN_API.tex', u'PLWN\\_API Documentation', + u'MichaÅ‚ KaliÅ„ski', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +latex_engine = 'lualatex' + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'plwn_api', u'PLWN_API Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'PLWN_API', u'PLWN_API Documentation', + author, 'PLWN_API', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False diff --git a/doc/source/enums.rst b/doc/source/enums.rst new file mode 100644 index 0000000..7c9fc0c --- /dev/null +++ b/doc/source/enums.rst @@ -0,0 +1,6 @@ +Public enums +============ + +.. automodule:: plwn.enums + :undoc-members: + :member-order: groupwise diff --git a/doc/source/exceptions.rst b/doc/source/exceptions.rst new file mode 100644 index 0000000..6964d0f --- /dev/null +++ b/doc/source/exceptions.rst @@ -0,0 +1,4 @@ +Public exceptions +================= + +.. automodule:: plwn.exceptions diff --git a/doc/source/index.rst b/doc/source/index.rst new file mode 100644 index 0000000..47ae963 --- /dev/null +++ b/doc/source/index.rst @@ -0,0 +1,22 @@ +.. PLWN_API documentation master file, created by + sphinx-quickstart on Fri Jun 2 14:19:03 2017. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to PLWN_API's documentation! +==================================== + +.. toctree:: + :maxdepth: 2 + + introduction + interface + exceptions + enums + + +Indices and tables +================== +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/doc/source/interface.rst b/doc/source/interface.rst new file mode 100644 index 0000000..f83ae7a --- /dev/null +++ b/doc/source/interface.rst @@ -0,0 +1,4 @@ +Public interface +================ + +.. automodule:: plwn.bases diff --git a/doc/source/introduction.rst b/doc/source/introduction.rst new file mode 100644 index 0000000..03510e4 --- /dev/null +++ b/doc/source/introduction.rst @@ -0,0 +1,258 @@ +Introduction +============ + +Loading +------- + +Access to plWordNet is provided via a single +:class:`~plwn.bases.PLWordNetBase` object, which requires a source from which +to load the lexicon data. In normal distributions, the storage file is bundled +with the python package, so the only thing required to get an instance is:: + + import plwn + wn = plwn.load_default() + + +Getting synsets and lexical units +--------------------------------- + +The basic building blocks of plWordNet are synsets and lexical units, +represented by :class:`~plwn.bases.SynsetBase` and +:class:`~plwn.bases.LexicalUnitBase` objects. Every single synset and lexical +unit can be identified either by an unique ID number, or by a combination of +three properties: lemma, :abbr:`pos (part of speech)` and variant. + +There are three primary methods on +:class:`~plwn.bases.PLWordNetBase` for each of these two types of entities that +allow selecting them from the lexicon: + +* Many entities by matching one or more of the three identifying properties: + + * :meth:`~plwn.bases.PLWordNetBase.synsets` + * :meth:`~plwn.bases.PLWordNetBase.lexical_units` + +* A single entity by matching all three identifying properties: + + * :meth:`~plwn.bases.PLWordNetBase.synset` + * :meth:`~plwn.bases.PLWordNetBase.lexical_unit` + +* A single entity by matching the unique numeric ID: + + * :meth:`~plwn.bases.PLWordNetBase.synset_by_id` + * :meth:`~plwn.bases.PLWordNetBase.lexical_unit_by_id` + + +Selecting by ID +^^^^^^^^^^^^^^^ + +Using the ``*_by_id`` methods is the fastest and most straightforward way of +getting :class:`~plwn.bases.SynsetBase` and +:class:`~plwn.bases.LexicalUnitBase` objects, provided that ID values of +synsets and / or units for the correct version of plWordNet have been obtained +from an outside source or by storing the ``id`` property:: + + >>> s = wn.synset_by_id(34160) + >>> print(s) + {pies.2(21:zw), pies_domowy.1(21:zw)} + >>> print(s.id) + 34160 + + +Selecting by all three identifying properties +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The "singular" methods require all three properties. Lemma is the basic form of +a word, variant is an ordinal number differentiating between different meanings +of the same word, and :abbr:`pos (part of speech)` is an enumerated value. + +There are eight :abbr:`pos (part of speech)` constants, four for Polish synsets +and units, and four for English. The enum class is provided as a member of the +base module of the library: + +* ``plwn.PoS.verb``, ``plwn.PoS.noun``, ``plwn.PoS.adv``, ``plwn.PoS.adj`` +* ``plwn.PoS.verb_en``, ``plwn.PoS.noun_en``, ``plwn.PoS.adv_en``, + ``plwn.PoS.adj_en`` + +There are few cases, where all three properties would be known, but not the ID. +Still, selecting like this is possible:: + + >>> lx = wn.lexical_unit(lemma='pies', pos=plwn.PoS.noun, variant=2) + >>> print(lx) + pies.2(21:zw) + >>> print(lx == wn.lexical_unit_by_id(lx.id)) + True + +It's not legal to omit one any of the three properties:: + + >>> lx = wn.lexical_unit(lemma='pies', pos=plwn.PoS.noun) + TypeError: lexical_unit() missing 1 required positional argument: 'variant' + +If there's no synset / unit that fits the query, an +:exc:`~plwn.exceptions.NotFound` subclass is raised:: + + >>> lx = wn.lexical_unit(lemma='pies', pos=plwn.PoS.noun, variant=99) + LexicalUnitNotFound: lemma='pies', pos=<PoS.noun: 'noun'>, variant=99 + + >>> lx = wn.synset(lemma='pies', pos=plwn.PoS.noun, variant=99) + SynsetNotFound: lemma='pies', pos=<PoS.noun: 'noun'>, variant=99 + + +Selecting by some of the identifying properties +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The "plural" methods always return an iterable of synsets / lexical units. +Unlike the "singular" methods, they allows omitting one or more of the +arguments, which could match more than one entity. + +It's safer to wrap the invocation in ``tuple`` constructor, since the interface +only guarantees that the return value is iterable. + +:: + + >>> lxs = tuple(wn.lexical_units(lemma='pies', pos=plwn.PoS.noun)) + >>> print(lxs) + (<LexicalUnit id=5563 lemma='pies' pos=<PoS.noun: 'noun'> variant=1>, + <LexicalUnit id=52245 lemma='pies' pos=<PoS.noun: 'noun'> variant=2>, + <LexicalUnit id=69245 lemma='pies' pos=<PoS.noun: 'noun'> variant=3>, + <LexicalUnit id=626100 lemma='pies' pos=<PoS.noun: 'noun'> variant=4>, + <LexicalUnit id=626108 lemma='pies' pos=<PoS.noun: 'noun'> variant=5>, + <LexicalUnit id=626115 lemma='pies' pos=<PoS.noun: 'noun'> variant=6>, + <LexicalUnit id=710443 lemma='pies' pos=<PoS.noun: 'noun'> variant=7>) + +It's also possible that a query matches zero entities. Unlike the "singular" +methods, this will not raise an exception, but simply return an empty iterable. + +:: + + >>> lxs = tuple(wn.lexical_units(lemma='pies', pos=plwn.PoS.verb)) + >>> print(lxs) + () + + +Synset and lexical unit properties +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Data associated with plWordNet synsets and lexical units is provided as public +properties of synset and lexical unit objects. There are described in +documentation of the respective classes: :class:`~plwn.bases.SynsetBase` and +:class:`~plwn.bases.LexicalUnitBase`. + + +Getting relations +----------------- + +The other elementary kind of entities in plWordNet, aside from synsets and +lexical units, are relations. + +Relation instances can connect two synsets or two lexical units. These +instances are selected using identifiers of their types. + +A detailed explanation on how relation types can be referred to is in +:class:`~plwn.bases.RelationInfoBase`; the short version is: + +* Full name, for example: ``hiperonimia`` for relations that have no parent + type; ``meronimia/część`` for relations that do. +* Short alias, for example: ``hiper``. +* Parent name, for example: ``meronimia``; this refers to all the children of + the relation. + +To see names and aliases for all relations, in alphabetical order, do +``sorted(wn.relations_info())``. + + +Related synset / units +^^^^^^^^^^^^^^^^^^^^^^ + +Having a :class:`~plwn.bases.SynsetBase` or a +:class:`~plwn.bases.LexicalUnitBase` objects, it's possible to select all +objects related to it using the ``related`` method, which accepts one of the +relation type identifiers described above. The ``relations`` property can be +used to check what relation types have outbound instances from the synset / +unit:: + + >>> lx = wn.lexical_unit_by_id(132) + >>> print(lx.relations) + [<RelationInfo name='antonimia komplementarna' parent='antonimia' + kind=<RelationKind.lexical: 'lexical'> aliases=('ant_kom',)>, + <RelationInfo name='derywacyjność' parent=None + kind=<RelationKind.lexical: 'lexical'> aliases=('der',)>] + >>> print(tuple(lx.related('der'))) + (<LexicalUnit id=133 lemma='apetyt' pos=<PoS.noun: 'noun'> variant=2>,) + +If a relation of the right kind (synset or lexical) is passed to the method, +but it has no instances for the particular entity, an empty iterable is +returned:: + + >>> print(tuple(lx.related('rol:ag'))) + () + +In contrast, if a relation is of the wrong kind or does not exist, this raises +an error:: + + >>> lx.related('hiper') + InvalidRelationTypeException: (<RelationKind.lexical: 'lexical'>, 'hiper') + +When passing a parent relation type to ``related``, distinction between actual, +children relation types is lost. A second method ``related_pairs`` can be used +to annotate related entities with the relation instance connecting to it:: + + >>> s = wn.synset_by_id(7337) + >>> print(tuple(s.related_pairs('meronimia'))) + ((<RelationInfo name='część' parent='meronimia' + kind=<RelationKind.synset: 'synset'> aliases=('mero:cz',)>, + <Synset id=22085 lemma='pociÄ…g drogowy' pos=<PoS.noun: 'noun'> + variant=1>),) + +Synset's :meth:`~plwn.bases.SynsetBase.related` and +:meth:`~plwn.bases.SynsetBase.related_pairs` also have an additional boolean +``skip_artificial`` argument. See the methods' documentation for more details; +the default value should be correct for most uses. + + +Relation edges +^^^^^^^^^^^^^^ + +Relation instances can also be selected using +:meth:`~plwn.bases.PLWordNetBase.synset_relation_edges` and +:meth:`~plwn.bases.PLWordNetBase.lexical_relation_edges` methods. Unlike the +``related`` methods, these two are not anchored to a starting point and select +all relation instances of given types in plWordNet; they return iterables of +:class:`~plwn.bases.RelationEdge` instances, each having a ``source``, +``relation`` and ``target`` properties. + +Without arguments, all synset or lexical relation instances are yielded. +Filtering can be done using an ``include`` or ``exclude`` argument. Both expect +the values to be sets of relation type identifiers (the same as those accepted +by the ``related`` methods). When ``include`` is not ``None``, only instances +of relations mentioned in the set are yielded. For example, to select all +hyponymy instances:: + + >>> sr = tuple(wn.synset_relation_edges(include=('hiper',))) + >>> print(sr) + (RelationEdge(source=<Synset id=10 lemma='samoistny' + pos=<PoS.adjective: 'adjective'> + variant=2 [+ 1 unit(s)]>, + relation=<RelationInfo name='hiperonimia' + parent=None kind=<RelationKind.synset: 'synset'> + aliases=('hiper',)>, + target=<Synset id=9139 lemma='bezwiedny' + pos=<PoS.adjective: 'adjective'> + variant=1 [+ 7 unit(s)]>), + RelationEdge(source=<Synset id=10 lemma='samoistny' + pos=<PoS.adjective: 'adjective'> + variant=2 [+ 1 unit(s)]>, + relation=<RelationInfo name='hiperonimia' + parent=None kind=<RelationKind.synset: 'synset'> + aliases=('hiper',)>, + target=<Synset id=104191 lemma='automatyczny' + pos=<PoS.adjective: 'adjective'> variant=4>), + ...) + +When ``exclude`` is not ``None``, instances of mentioned relation types are +removed from the iterable; either from the set of all relations or those in +``include``. + +Method :meth:`~plwn.bases.PLWordNetBase.synset_relation_edges` also takes a +boolean ``skip_artificial`` argument that's ``True`` as default. Like with +:meth:`~plwn.bases.SynsetBase.related`, see the method's documentation for +details. diff --git a/doc/source/locale/pl/LC_MESSAGES/enums.po b/doc/source/locale/pl/LC_MESSAGES/enums.po new file mode 100644 index 0000000..df1dabf --- /dev/null +++ b/doc/source/locale/pl/LC_MESSAGES/enums.po @@ -0,0 +1,110 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2017, MichaÅ‚ KaliÅ„ski +# This file is distributed under the same license as the PLWN_API package. +# FIRST AUTHOR <EMAIL@ADDRESS>, 2017. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PLWN_API 0.21\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-08-18 14:42+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.1.1\n" + +#: ../../source/enums.rst:2 +msgid "Public enums" +msgstr "Publiczne obiekty wyliczeniowe" + +#: plwn.enums:1 +msgid "All enumerated values used in plWordNet." +msgstr "Wszystkie wyliczeniowe wartoÅ›ci wykorzystywane w SÅ‚owosieci." + +#: plwn.enums.RelationKind:1 plwn.enums.PoS:1 plwn.enums.VerbAspect:1 +#: plwn.enums.EmotionMarkedness:1 plwn.enums.EmotionName:1 +#: plwn.enums.EmotionValuation:1 plwn.enums.Domain:1 +msgid "Bases: :class:`enum.Enum`" +msgstr "Klasy bazowe: :class:`enum.Enum`" + +#: plwn.enums.RelationKind:1 +msgid "Whether a relation connects synsets or lexical units." +msgstr "OkreÅ›la czy relacja łączy synsety czy jednostki leksykalne." + +#: plwn.enums.RelationKind.by_db_number:1 plwn.enums.PoS.by_db_number:1 +#: plwn.enums.VerbAspect.by_db_number:1 plwn.enums.Domain.by_db_number:1 +msgid "" +"Return the enum value associated with ``number`` value stored in the " +"plWordNet database." +msgstr "" +"Zwróć wartość wyliczeniowÄ… skojarzonÄ… z wartoÅ›ciÄ… numerycznÄ… ``number`` " +"przechowywanÄ… w bazie danych SÅ‚owosieci." + +#: plwn.enums.RelationKind.by_db_number:4 plwn.enums.PoS.by_db_number:4 +#: plwn.enums.VerbAspect.by_db_number:4 plwn.enums.Domain.by_db_number:4 +msgid "" +"Raises ``KeyError`` if ``number`` is not in the range valid for the " +"database field, unless ``optional`` is ``True``; then, ``None`` is " +"returned instead of an enum value." +msgstr "" +"Rzuca ``KeyError`` jeÅ›li ``number`` nie jest w zakresie wÅ‚aÅ›ciwym dla " +"danego pola bazy danych, chyba że ``optional`` ma prawdziwÄ… wartość; " +"wtedy zamiast wartoÅ›ci wyliczeniowej zwracane jest ``None``." + +#: plwn.enums.RelationKind.db_number:1 plwn.enums.PoS.db_number:1 +#: plwn.enums.VerbAspect.db_number:1 plwn.enums.Domain.db_number:1 +msgid "The number associated with the enum value in plWordNet database." +msgstr "" +"Wartość numeryczna skojarzone w bazie danych SÅ‚owosieci z wartoÅ›ciÄ… " +"wyliczeniowÄ…," + +#: plwn.enums.PoS:1 +msgid "Defines part of speech values used in plWordNet." +msgstr "Definiuje wartoÅ›ci odpowiadajÄ…ce częściom mowy w SÅ‚owosieci." + +#: plwn.enums.VerbAspect:1 +msgid "Defines verb aspect values used in plWordNet." +msgstr "Definiuje wartoÅ›ci odpowiadajÄ…ce aspektom czasowników w SÅ‚owosieci." + +#: plwn.enums.EmotionMarkedness:1 +msgid "Defines markedness of emotions associated with some lexical units." +msgstr "" +"Definiuje oznaczenia sentymentu zwiÄ…zanego z niektórymi jednostkami " +"leksykalnymi." + +#: plwn.enums.EmotionMarkedness.normalized:1 +msgid "" +"Return an instance of this enum corresponding to ``strvalue`` after " +"normalizing it with regards to whitespace." +msgstr "" +"Zwróć instancjÄ™ typu wyliczeniowego odpowiadajÄ…cÄ… wartoÅ›ci tekstowej, po " +"jej znormalizowaniu pod wzglÄ™dem spacji." + +#: plwn.enums.EmotionName:1 +msgid "Defines names of emotions that may be associated with lexical units." +msgstr "" +"Definiuje nazwy sentymentu, które mogÄ… być powiÄ…zane z jednostkami " +"leksykalnymi." + +#: plwn.enums.EmotionValuation:1 +msgid "Defines valuations of emotions that may be associated with lexical units." +msgstr "" +"Definiuje wartoÅ›ciowania sentymentu, które mogÄ… być powiÄ…zane " +"z jednostkami leksykalnymi." + +#: plwn.enums.Domain:1 +msgid "Defines domains of lexical units occurring in plWordNet." +msgstr "Definiuje domeny jednostek leksykalnych SÅ‚owosieci." + +#: plwn.enums.make_values_tuple:1 +msgid "" +"Auxiliary function that converts a sequence of enums to a tuple of enum " +"string values." +msgstr "" +"Pomocnicza funkcja konwertujÄ…ca sekwencjÄ™ obiektów wyliczeniowych do " +"krotki ich wartoÅ›ci tekstowych." + diff --git a/doc/source/locale/pl/LC_MESSAGES/exceptions.po b/doc/source/locale/pl/LC_MESSAGES/exceptions.po new file mode 100644 index 0000000..0898b0b --- /dev/null +++ b/doc/source/locale/pl/LC_MESSAGES/exceptions.po @@ -0,0 +1,106 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2017, MichaÅ‚ KaliÅ„ski +# This file is distributed under the same license as the PLWN_API package. +# FIRST AUTHOR <EMAIL@ADDRESS>, 2017. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PLWN_API 0.21\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-08-18 14:42+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.1.1\n" + +#: ../../source/exceptions.rst:2 +msgid "Public exceptions" +msgstr "Publiczne wyjÄ…tki" + +#: plwn.exceptions:1 +msgid "Custom exceptions raised by PLWN API." +msgstr "WyjÄ…tki rzucane przez PLWN API." + +#: plwn.exceptions.PLWNAPIException:1 +msgid "Bases: :class:`Exception`" +msgstr "Klasy bazowe: :class:`Exception`" + +#: plwn.exceptions.PLWNAPIException:1 +msgid "Base for all exceptions in the module." +msgstr "Baza dla wszystkich wyjÄ…tków w tym module." + +#: plwn.exceptions.NotFound:1 plwn.exceptions.ReaderException:1 +#: plwn.exceptions.LoadException:1 +#: plwn.exceptions.InvalidRelationTypeException:1 +msgid "Bases: :class:`plwn.exceptions.PLWNAPIException`" +msgstr "Klasy bazowe: :class:`plwn.exceptions.PLWNAPIException`" + +#: plwn.exceptions.NotFound:1 +msgid "Base for exceptions raised when an entity is not found." +msgstr "Baza dla wyjÄ…tków rzucanych przy nie znalezieniu szukanego obiektu." + +#: plwn.exceptions.LexicalUnitNotFound:1 plwn.exceptions.SynsetNotFound:1 +msgid "Bases: :class:`plwn.exceptions.NotFound`" +msgstr "Klasy bazowe: :class:`plwn.exceptions.NotFound`" + +#: plwn.exceptions.LexicalUnitNotFound:1 +msgid "Raised when a lexical unit is not found during lookup." +msgstr "Rzucany kiedy nie znaleziono szukanej jednostki leksykalnej." + +#: plwn.exceptions.SynsetNotFound:1 +msgid "Raised when a synset is not found during lookup." +msgstr "Rzucany kiedy nie znaleziono szukanego synsetu." + +#: plwn.exceptions.ReaderException:1 +msgid "Raised when there's an error in the format expected by a reader." +msgstr "Rzucany kiedy wystÄ…pi błąd w czytaniu formatu wejÅ›ciowego SÅ‚owosieci." + +#: plwn.exceptions.MalformedIdentifierException:1 +msgid "Bases: :class:`plwn.exceptions.ReaderException`" +msgstr "Klasy bazowe: :class:`plwn.exceptions.ReaderException`" + +#: plwn.exceptions.MalformedIdentifierException:1 +msgid "Raised during UBY-LMF parsing, when a malformed identifier is encountered." +msgstr "" +"Rzucany jeÅ›li podczas wczytywania UBY-LMF napotkany jest identyfikator " +"o zÅ‚ym formacie." + +#: plwn.exceptions.LoadException:1 +msgid "Raised when a storage can't be loaded from file." +msgstr "Rzucany jeÅ›li wystÄ…pi błąd podczas wczytywania danych ze zrzutu." + +#: plwn.exceptions.DumpVersionException:1 +msgid "Bases: :class:`plwn.exceptions.LoadException`" +msgstr "Klasy bazowe: :class:`plwn.exceptions.LoadException`" + +#: plwn.exceptions.DumpVersionException:1 +msgid "" +"Raised when a dumped storage has wrong version (suggesting incompatible " +"format)." +msgstr "" +"Rzucany kiedy zrzut danych ma wersjÄ™ formatu niezgodnÄ… z tÄ… obsÅ‚ugiwanÄ… " +"przez aktualnÄ… bibliotekÄ™." + +#: plwn.exceptions.InvalidRelationTypeException:1 +msgid "" +"Raised when a relation identifier does not refer to any existing relation" +" (or the relation exists for the other relation kind)." +msgstr "" +"Rzucany kiedy identyfikator relacji nie odpowiada żadnej istniejÄ…cej " +"relacji (albo relacji innego rodzaju)." + +#: plwn.exceptions.AmbiguousRelationTypeException:1 +msgid "Bases: :class:`plwn.exceptions.InvalidRelationTypeException`" +msgstr "Klasy bazowe: :class:`plwn.exceptions.InvalidRelationTypeException`" + +#: plwn.exceptions.AmbiguousRelationTypeException:1 +msgid "" +"Raised when a relation type identifier could refer to more than one " +"relation, but only one is permitted in the context." +msgstr "" +"Rzucany kiedy identyfikator typu relacji może odnosić siÄ™ do wiÄ™cej niż " +"jednej relacji, ale w danym kontekÅ›cie dozwolona jest tylko pojedyncza." diff --git a/doc/source/locale/pl/LC_MESSAGES/index.po b/doc/source/locale/pl/LC_MESSAGES/index.po new file mode 100644 index 0000000..36e20e2 --- /dev/null +++ b/doc/source/locale/pl/LC_MESSAGES/index.po @@ -0,0 +1,38 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2017, MichaÅ‚ KaliÅ„ski +# This file is distributed under the same license as the PLWN_API package. +# FIRST AUTHOR <EMAIL@ADDRESS>, 2017. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PLWN_API 0.21\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-06-12 16:51+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.1.1\n" + +#: ../../source/index.rst:7 +msgid "Welcome to PLWN_API's documentation!" +msgstr "Dokumentacja PLWN_API" + +#: ../../source/index.rst:19 +msgid "Indices and tables" +msgstr "Indeksy i tabele" + +#: ../../source/index.rst:20 +msgid ":ref:`genindex`" +msgstr "" + +#: ../../source/index.rst:21 +msgid ":ref:`modindex`" +msgstr "" + +#: ../../source/index.rst:22 +msgid ":ref:`search`" +msgstr "" diff --git a/doc/source/locale/pl/LC_MESSAGES/interface.po b/doc/source/locale/pl/LC_MESSAGES/interface.po new file mode 100644 index 0000000..230c22b --- /dev/null +++ b/doc/source/locale/pl/LC_MESSAGES/interface.po @@ -0,0 +1,1265 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2017, MichaÅ‚ KaliÅ„ski +# This file is distributed under the same license as the PLWN_API package. +# FIRST AUTHOR <EMAIL@ADDRESS>, 2017. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PLWN_API 0.21\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-08-18 14:42+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.1.1\n" + +#: ../../source/interface.rst:2 +msgid "Public interface" +msgstr "Publiczny interfejs" + +#: plwn.bases:1 +msgid "" +"Base, abstract classes for plWordNet objects, implementing common " +"functionality independent of structures holding the data itself." +msgstr "" +"Bazowe, abstrakcyjne klasy obiektów SÅ‚owosieci, implementujÄ…ce " +"funkcjonalność niezależnÄ… od struktur przechowujÄ…cych same dane." + +#: plwn.bases.PLWordNetBase:1 plwn.bases.SynsetBase:1 +#: plwn.bases.LexicalUnitBase:1 plwn.bases.RelationInfoBase:1 +msgid "Bases: :class:`object`" +msgstr "Klasy bazowe: :class:`object`" + +#: plwn.bases.PLWordNetBase:1 +msgid "The primary object providing data from plWordNet." +msgstr "Podstawowy obiekt udostÄ™pniajÄ…cy dane ze SÅ‚owosieci." + +#: plwn.bases.PLWordNetBase:3 +msgid "Allows retrieving synsets, lexical units, and other informative objects." +msgstr "" +"Pozwala na wydobywanie synsetów, jednostek leksykalnych, oraz innych " +"obiektów informacyjnych." + +#: plwn.bases.PLWordNetBase.close:1 +msgid "Perform cleanup operations after using the :class:`PLWordNetBase` object." +msgstr "Wykonaj operacje porzÄ…dkujÄ…ce po używaniu obiektu :class:`PLWordNetBase`." + +#: plwn.bases.PLWordNetBase.close:4 +msgid "" +"By default, this method does nothing and should be overridden by a " +"subclass if necessary. It should still always be called, since any " +":class:`PLWordNetBase` subclass may create any kind of temporary " +"resources." +msgstr "" +"DomyÅ›lnie, ta metoda nie robi nic i powinna być zaimplementowana przez " +"podklasÄ™, jeÅ›li jest to wymagane. Mimo tego powinna zawsze być " +"wywoÅ‚ywana, ponieważ każda podklasa :class:`PLWordNetBase` może utworzyć " +"dowolny rodzaj tymczasowych zasobów." + +#: plwn.bases.PLWordNetBase.close:9 +msgid "" +"After calling this method, this instance and any ones linked with it " +"(:class:`SynsetBase`, :class:`LexicalUnitBase`, etc.) may become invalid " +"and should not be used." +msgstr "" +"Po wywoÅ‚aniu tej metody, ta instancja oraz wszystkie powiÄ…zane z niÄ…. " +"(:class:`SynsetBase`, :class:`LexicalUnitBase`, itd.) mogÄ… przestać " +"funkcjonować i nie powinny być używane." + +#: plwn.bases.PLWordNetBase.from_dump:1 +msgid "Create new instance from a dump of cached internal representation." +msgstr "Stwórz nowÄ… instancjÄ™ ze zrzutu wewnÄ™trznej reprezentacji." + +#: plwn.bases.PLWordNetBase.from_dump:3 +msgid "" +"The dump file must have been created by :meth:`.from_reader` of the same " +":class:`PLWordNetBase` subclass and schema version." +msgstr "" +"Plik zrzutu musi być wczeÅ›niej utworzony przez :meth:`.from_reader` tej " +"samej podklasy :class:`PLWordNetBase` i o tej samej samej wersji " +"schematu." + +#: plwn.bases.PLWordNetBase.from_reader:1 +msgid "" +"Create new instance from a source reader, optionally saving it in an " +"internal representation format in another file." +msgstr "" +"Stwórz nowÄ… instancjÄ™ z czytnika formatu źródÅ‚owego, opcjonalnie " +"zapisujÄ…c zrzut wewnÄ™trznej reprezentacji w innym pliku." + +#: plwn.bases.PLWordNetBase.from_reader:4 +msgid "" +"``reader`` is any iterable that yields node instances: " +":class:`~plwn.readers.nodes.SynsetNode`, " +":class:`~plwn.readers.nodes.LexicalUnitNode` and " +":class:`~plwn.readers.nodes.RelationTypeNode`." +msgstr "" +"``reader`` jest dowolnÄ… sekwencjÄ…, która zawiera obiekty typów: " +":class:`~plwn.readers.nodes.SynsetNode`, " +":class:`~plwn.readers.nodes.LexicalUnitNode` " +"i :class:`~plwn.readers.nodes.RelationTypeNode`." + +#: plwn.bases.PLWordNetBase.from_reader:9 +msgid "" +"``dump_to`` is a path to a (non-existing) file where data form ``reader``" +" will be stored to be to be loaded later. If not passed, then the data " +"won't be cached in any file, requiring to be read again using " +":meth:`.from_reader`." +msgstr "" +"``dump_to`` jest Å›cieżkÄ… do (nieistniejÄ…cego) pliku, gdzie dane " +"z ``reader`` zostanÄ… zrzucone, umożliwiajÄ…c późniejsze zaÅ‚adowanie. JeÅ›li" +" argument nie jest podany, dane nie zostanÄ… nigdzie zrzucone, wymagajÄ…c " +"ponownego użycia :meth:`.from_reader`." + +#: plwn.bases.PLWordNetBase.lexical_relation_edges:1 +msgid "" +"Get an iterable of lexical unit relation instances from plWordNet, as " +"represented by :class:`RelationEdge`." +msgstr "" +"WydobÄ…dź sekwencjÄ™ instancji relacji leksykalnych ze SÅ‚owosieci, " +"reprezentowanych przez :class:`RelationEdge`." + +#: plwn.bases.PLWordNetBase.lexical_relation_edges:4 +msgid "" +"This method works like :meth:`.synset_relation_edges`, but for lexical " +"units and relation types. There is no ``skip_artificial``, since there " +"are no artificial lexical units." +msgstr "" +"Ta metoda dziaÅ‚a jak :meth:`.synset_relation_edges`, ale dla leksykalnych" +" jednostek i typów relacji. Nie ma argumentu ``skip_artificial``, " +"ponieważ nie istniejÄ… sztuczne jednostki leksykalne." + +#: plwn.bases.PLWordNetBase.lexical_unit:1 +msgid "" +"Like :meth:`.lexical_units` but either return a single lexical unit or " +"raise :exc:`~plwn.exceptions.LexicalUnitNotFound`." +msgstr "" +"Ta metoda dziaÅ‚a jak :meth:`.lexical_units`, ale zawsze albo zwraca " +"pojedynczÄ… jednostkÄ™ leksykalnÄ…, albo rzuć " +":exc:`~plwn.exceptions.LexicalUnitNotFound`" + +#: plwn.bases.PLWordNetBase.lexical_unit:4 +msgid "" +"All parameters are required, to ensure that the query could only match a " +"single lexical unit." +msgstr "" +"Wszystkie parametry sÄ… wymagane, żeby zapytanie na pewno mogÅ‚o pasować " +"tylko do pojedynczej jednostki leksykalnej." + +#: plwn.bases.PLWordNetBase.lexical_unit_by_id:1 +msgid "Select a lexical unit using its internal, numeric ID." +msgstr "Wybierz jednostkÄ™ leksykalnÄ… używajÄ…c jej wewnÄ™trznego, numerycznego ID." + +#: plwn.bases.PLWordNetBase.lexical_unit_by_id:3 +msgid "" +"If there is no lexical unit with the given ID, raise " +":exc:`~plwn.exceptions.LexicalUnitNotFound`." +msgstr "" +"JeÅ›li nie ma jednostki leksykalnej o takim ID, rzuć " +":exc:`~plwn.exceptions.LexicalUnitNotFound`." + +#: plwn.bases.PLWordNetBase.lexical_unit_by_id:6 +msgid "" +"This is the fastest method to get a particular :class:`LexicalUnitBase` " +"object." +msgstr "To najszybszy sposób na wydobycie danego obiektu :class:`LexicalUnitBase`." + +#: plwn.bases.PLWordNetBase.lexical_units:1 +msgid "Select lexical units from plWordNet based on combination of criteria." +msgstr "" +"WydobÄ…dź jednostki leksykalne ze SÅ‚owosieci na podstawie kombinacji " +"kryteriów." + +#: plwn.bases.PLWordNetBase.lexical_units:3 +msgid "" +"It's possible to specify the lemma, part of speech and variant of the " +"units this method should yield. If a parameter value is omitted, any " +"value matches. Conversely, a call of ``lexical_units()`` will return an " +"iterable of all lexical units in plWordNet. If no lexical unit matches " +"the query, returns an empty iterable." +msgstr "" +"Można podać lemat, część mowy albo wariant jednostek, które ta metoda " +"powinna zwrócić. JeÅ›li wartość któregoÅ› argumentu jest pominiÄ™ta, każda " +"wartość pasuje w to miejsce. W ten sposób, wywoÅ‚anie ``lexical_units()`` " +"zwróci wszystkie jednostki leksykalne ze SÅ‚owosieci. JeÅ›li żadna " +"jednostka leksykalna pasuje do zapytania, zwracana jest pusta sekwencja." + +#: plwn.bases.PLWordNetBase.lexical_units:9 +msgid "" +"The parameter ``lemma`` is an unicode string, ``variant`` is an integer, " +"and ``pos`` is an enumerated value of :class:`~plwn.enums.PoS`." +msgstr "" +"Parametr ``lemma`` jest unicode'owym stringiem, ``variant`` jest liczbÄ…, " +"a ``pos`` jest wartoÅ›ciÄ… wyliczeniowÄ… :class:`~plwn.enums.PoS`." + +#: plwn.bases.PLWordNetBase.relations_info:1 +msgid "" +"Get an iterable of :class:`RelationInfoBase` instances, matching the " +"query defined by parameters." +msgstr "" +"WydobÄ…dź sekwencjÄ™ instancji :class:`RelationInfoBase`, pasujÄ…cych do " +"zapytania definiowanego przez parametry." + +#: plwn.bases.PLWordNetBase.relations_info:4 +msgid "" +"``name`` is a string naming a relation (see :class:`RelationInfoBase`). " +"If it names a \"parent\", all its children are selected." +msgstr "" +"``name`` jest nazwÄ… relacji (zob. :class:`RelationInfoBase`). JeÅ›li typ " +"relacji posiada \"dzieci\", wszystkie sÄ… wybierane." + +#: plwn.bases.PLWordNetBase.relations_info:8 +msgid "``kind`` is an enumerated value of :class:`~plwn.enums.RelationKind`." +msgstr "``kind`` jest wartoÅ›ciÄ… wyliczeniowÄ… :class:`~plwn.enums.RelationKind`." + +#: plwn.bases.PLWordNetBase.relations_info:11 +msgid "" +"Any parameter that's not passed matches any relation type. As such, a " +"call of ``relations_info()`` will select all relation types in plWordNet." +msgstr "" +"JeÅ›li któryÅ› parametr nie zostaÅ‚ podany, pasuje do niego każdy typ " +"relacji. Dlatego, wywoÅ‚anie ``relations_info()`` wybiera wszystkie typy " +"relacji ze SÅ‚owosieci." + +#: plwn.bases.PLWordNetBase.synset:1 +msgid "" +"Like :meth:`.synsets`, but either return a single synset or raise " +":exc:`~plwn.exceptions.SynsetNotFound`." +msgstr "" +"Ta metoda dziaÅ‚a jak :meth:`.synsets`, ale albo zwraca pojedynczy synset," +" albo rzuca :exc:`~plwn.exceptions.SynsetNotFound`." + +#: plwn.bases.PLWordNetBase.synset:4 +msgid "" +"All parameters are required, to ensure that the query could only match a " +"single synset." +msgstr "" +"Wszystkie parametry sÄ… wymagane, żeby zapytanie na pewno mogÅ‚o pasować " +"tylko do pojedynczego synsetu." + +#: plwn.bases.PLWordNetBase.synset_by_id:1 +msgid "Select a synset using its internal, numeric ID." +msgstr "Wybierz synset używajÄ…c wewnÄ™trznego, numerycznego ID." + +#: plwn.bases.PLWordNetBase.synset_by_id:3 +msgid "" +"If there is no synset with the given ID, raise " +":exc:`~plwn.exceptions.SynsetNotFound`." +msgstr "" +"JeÅ›li nie ma synsetu o takim ID, rzuć " +":exc:`~plwn.exceptions.SynsetNotFound`." + +#: plwn.bases.PLWordNetBase.synset_by_id:6 +msgid "This is the fastest method to get a particular :class:`SynsetBase` object." +msgstr "To najszybsza metoda wydobycia danego obiektu :class:`SynsetBase`." + +#: plwn.bases.PLWordNetBase.synset_relation_edges:1 +msgid "" +"Get an iterable of synset relation instances from plWordNet, as " +"represented by :class:`RelationEdge`." +msgstr "" +"WydobÄ…dź sekwencjÄ™ instancji relacji synsetów ze SÅ‚owosieci, " +"reprezentowanych przez :class:`RelationEdge`." + +#: plwn.bases.PLWordNetBase.synset_relation_edges:4 +msgid "" +"``include`` and ``exclude`` are containers of relation type identifiers " +"(see :class:`RelationInfoBase`). If ``include`` is not ``None``, then " +"only instances of relations in it are included in the result. If " +"``exclude`` is not ``None``, then all relations in it are omitted from " +"the result. If both are ``None``, all relations are selected." +msgstr "" +"``include`` i ``exclude`` sÄ… kolekcjami identyfikatorów typów relacji " +"(zob. :class:`RelationInfoBase`). JeÅ›li ``include`` nie jest ``None``, " +"tylko typy relacji zawarte w tej kolekcji sÄ… brane pod uwagÄ™ przy " +"wybieraniu instancji. JeÅ›li ``exclude`` nie jest ``None`` instancje typów" +" zawartych w tej kolekcji sÄ… pomijane w wynikowej sekwencji. JeÅ›li oba " +"parametry sÄ… ``None``, wszystkie relacje sÄ… wybierane." + +#: plwn.bases.PLWordNetBase.synset_relation_edges:11 +msgid "" +"If ``skip_artificial`` is ``True`` (the default), then artificial synsets" +" (see :attr:`SynsetBase.is_artificial`) are \"skipped over\": new " +"relation edges are created to replace ones ending or staring in an " +"artificial synset, and connecting neighbouring synsets if they have " +"relations directed like this::" +msgstr "" +"JeÅ›li ``skip_artificial`` jest ``True`` (domyÅ›lnie), sztuczne synsety " +"(zob. :attr:`SynsetBase.is_artificial`) sÄ… \"przeskakiwane\": nowe " +"krawÄ™dzie relacji sÄ… tworzone by zastÄ…pić te koÅ„czÄ…ce albo zaczynajÄ…cy " +"siÄ™ sztucznym synsetem i połączyć sÄ…siadujÄ…ce ze sobÄ… synsety jeÅ›li majÄ… " +"relacje skierowane w taki sposób::" + +#: plwn.bases.PLWordNetBase.synset_relation_edges:42 +msgid "" +"``Syn C`` is dropped, since there's no instance of ``Rel 1`` directed " +"outwards from the skipped artificial ``Syn B``." +msgstr "" +"``Syn C`` jest porzucany, ponieważ nie ma instancji ``Rel `` skierowanej " +"od przeskoczonego sztucznego ``Syn B``." + +#: plwn.bases.PLWordNetBase.synsets:1 +msgid "Select synsets from plWordNet based on combination of criteria." +msgstr "Wybierz synsety ze SÅ‚owosieci w oparciu o kombinacjÄ™ kryteriów." + +#: plwn.bases.PLWordNetBase.synsets:3 +msgid "" +"This method works just like :meth:`.lexical_units`, but returns an " +"iterable of distinct synsets that own the lexical units selected by the " +"query." +msgstr "" +"Ta metoda dziaÅ‚a jak :meth:`.lexical_units`, ale zwraca sekwencjÄ™ " +"unikalnych synsetów zawierajÄ…cych jednostki wybrane przez zapytanie." + +#: plwn.bases.PLWordNetBase.to_graphml:1 +msgid "" +"Export plWordNet as graph in `GraphML " +"<http://graphml.graphdrawing.org/>`_ format." +msgstr "" +"Eksportuj SÅ‚owosieć jako graf w formacie `GraphML " +"<http://graphml.graphdrawing.org/>`_." + +#: plwn.bases.PLWordNetBase.to_graphml:4 +msgid "" +"Nodes of the graph are synsets and / or lexical units, and edges are " +"relation instances." +msgstr "" +"WÄ™zÅ‚ami grafu sÄ… synsety i / lub jednostki leksykalne, a krawÄ™dziami sÄ… " +"instancje relacji." + +#: plwn.bases.PLWordNetBase.to_graphml:7 +msgid "For nodes, their numeric plWordNet IDs are set as their XML element IDs." +msgstr "" +"Numeryczne ID synsetów i jednostek w SÅ‚owosieci sÄ… ustawiane jako ID " +"elementów XML." + +#: plwn.bases.PLWordNetBase.to_graphml:10 +msgid "" +"**NOTE:** Nodes that have no inbound or outbound edges are dropped from " +"the graph." +msgstr "" +"**UWAGA:** WÄ™zÅ‚y które nie majÄ… żadnych krawÄ™dzi wychodzÄ…cych z nich ani " +"prowadzÄ…cych do nich sÄ… usuwane z grafu." + +#: plwn.bases.PLWordNetBase.to_graphml:13 +msgid "" +"Nodes and edges have attributes, as GraphML defines them. For nodes, " +"attributes are public properties of :class:`SynsetBase` or " +":class:`LexicalUnitBase` (aside from ``relations``, which would be " +"useless in a graph, and ``id``, which becomes the XML ID of a node). " +"Edges have two attributes:" +msgstr "" +"WÄ™zÅ‚y i krawÄ™dzie posiadajÄ… atrybuty, w takim sensie w jakim definiuje je" +" GraphML. Atrybutami wÄ™złów sÄ… publiczne atrybuty klas " +":class:`SynsetBase` lub :class:`LexicalUnitBase` (poza ``relations``, " +"który byÅ‚by nieprzydatny) w strukturze grafu, oraz ``id`` który zamiast " +"atrybutem jest ID elementu XML wÄ™zÅ‚a). KrawÄ™dzie majÄ… dwa atrybuty:" + +#: plwn.bases.PLWordNetBase.to_graphml:19 +msgid "" +"**type**: Either ``relation``, for edges that represent plWordNet " +"relation instances, or ``unit_and_synset`` for edges between synset nodes" +" and nodes of lexical units that belong to the synset. The latter appear " +"only in *mixed* graph." +msgstr "" +"**type**: Albo wartość ``relation``, dla krawÄ™dzi, które reprezentujÄ… " +"instancje relacji SÅ‚owosieci, albo ``unit_and_synset`` dla krawÄ™dzi " +"pomiÄ™dzy wÄ™zÅ‚ami synsetów i należących do nich wÄ™złów jednostek " +"leksykalnych. Te drugie pojawiajÄ… siÄ™ jedynie w grafie typu *mixed*." + +#: plwn.bases.PLWordNetBase.to_graphml:23 +msgid "" +"**name**: If **type** is ``relation``, then this is the full name of the " +"relation (see :class:`RelationInfoBase`). If **type** is " +"``unit_and_synset``, it is one of constant values: ``has_unit`` if the " +"edge is directed from synset to unit, or ``in_synset``, for edges " +"directed from unit to synset." +msgstr "" +"**name**: JeÅ›li **type** to ``relation``, wtedy jest to peÅ‚na nazwa " +"relacji (zob. :class:`RelationInfoBase`). JeÅ›li **type** to " +"``unit_and_synset``, wtedy jest jednÄ… ze staÅ‚ych wartoÅ›ci: ``has_unit`` " +"jeÅ›li krawÄ™dź jest skierowana od synsetu do jednostki, albo " +"``in_synset``, jeÅ›li krawÄ™dź jest skierowana od jednostki do synsetu." + +#: plwn.bases.PLWordNetBase.to_graphml:29 +msgid "" +"``out_file`` is a writable file-like object to which the GraphML output " +"will be written." +msgstr "" +"``out_file`` to obiekt plikowy z możliwoÅ›ciÄ… zapisu, do którego zostanie " +"zrzucone wyjÅ›cie w formacie GraphML." + +#: plwn.bases.PLWordNetBase.to_graphml:32 +msgid "" +"``graph_type`` is one of three constant string values: ``synset``, " +"``lexical_unit`` or ``mixed``. Synset graph contains only synset nodes " +"and relations, lexical unit graph contains only lexical unit nodes and " +"relations, and mixed graph contains all of the former, as well as " +"additional edges that map lexical units to synsets they belong to." +msgstr "" +"``graph_type`` jest jednÄ… ze staÅ‚ych wartoÅ›ci: ``synset``, " +"``lexical_unit`` albo ``mixed``. Graf ``synset`` zawiera jedynie wÄ™zÅ‚y " +"synsetów i relacje miÄ™dzy synsetami, graf ``lexical_unit`` zawiera " +"jedynie wÄ™zÅ‚y i relacje jednostek leksykalnych, a ``mixed`` zawiera " +"wszystkie powyższe, oraz dodatkowe krawÄ™dzie łączÄ…ce synsety z należącymi" +" do niech jednostkami leksykalnymi." + +#: plwn.bases.PLWordNetBase.to_graphml:39 +msgid "" +"If ``include_attributes`` is ``True``, then all synset and / or lexical " +"unit attributes will be included. By default, attributes are not included" +" to shrink the written file. Note, that if any of " +"``(included/excluded)_(synset/lexical_unit)_attributes`` parameters is " +"passed, inclusion of attributes will be controlled by them and the value " +"of ``include_attributes`` is ignored." +msgstr "" +"JeÅ›li ``include_attributes`` ma prawdziwÄ… wartość, wtedy wszystkie " +"atrybuty synsetów i / lub jednostek leksykalnych bÄ™dÄ… włączone do grafu. " +"DomyÅ›lnie, żadne wartoÅ›ci atrybutów nie sÄ… przenoszone do pliku GraphML " +"by zmniejszyć jego rozmiar. Uwaga: jeÅ›li zostaÅ‚ podany jakikolwiek " +"z parametrów ``(included/excluded)_(synset/lexical_unit)_attributes``, " +"atrybuty w pliku wyjÅ›ciowym sÄ… kontrolowane przez te parametry, " +"a ``include_attributes`` bÄ™dzie zignorowany." + +#: plwn.bases.PLWordNetBase.to_graphml:46 +msgid "" +"If ``prefix_ids`` is ``True``, then ID of each node will be prefixed with" +" the type: ``synset-`` or ``lexical_unit-``. By default, it's not done, " +"unless ``graph_type`` is ``mixed``, in which case this parameter is " +"ignored and ID prefixes are enforced." +msgstr "" +"JeÅ›li ``prefix_ids`` ma prawdziwÄ… wartość, wtedy przed ID każdego wÄ™zÅ‚a " +"bÄ™dzie dopisany jego typ: ``synset-`` lub ``lexical_unit-``. DomyÅ›lnie " +"typ wÄ™zÅ‚a jest pomijany, chyba że ``graph_type`` to ``mixed``, wtedy ten " +"parametr jest ignorowany a typ wÄ™złów zawsze jest dopisywany." + +#: plwn.bases.PLWordNetBase.to_graphml:51 +msgid "" +"``included_synset_attributes`` and ``excluded_synset_attributes`` are " +"containers of synset attribute names, selecting the values which should " +"or should not be included with synset nodes." +msgstr "" +"``included_synset_attributes`` i ``excluded_synset_attributes`` sÄ… " +"zbiorami nazw atrybutów synsetów, wyznaczajÄ…cymi te które powinny bÄ…dź " +"nie powinn" + +#: plwn.bases.PLWordNetBase.to_graphml:55 +msgid "" +"``included_lexical_unit_attributes`` and " +"``excluded_lexical_unit_attributes`` are the same way as the above, but " +"for attributes of lexical units." +msgstr "" +"``included_lexical_unit_attributes`` " +"i ``excluded_lexical_unit_attributes`` dziaÅ‚ajÄ… w taki sam sposób, ale " +"dla atrybutów jednostek leksykalnych." + +#: plwn.bases.PLWordNetBase.to_graphml:59 +msgid "" +"``included_synset_relations`` and ``excluded_synset_relations`` are " +"containers of synset relation type identifiers (see " +":class:`RelationInfoBase`), selecting synset relation types whose " +"instances should or should not be included in the graph. By default, all " +"relation types are included." +msgstr "" +"``included_synset_relations`` i ``excluded_synset_relations`` sÄ… zbiorami" +" identyfikatorów typów relacji (zob. :class:`RelationInfoBase`), " +"wyznaczajÄ…c relacje synsetów których krawÄ™dzie powinny bÄ…dź nie powinny " +"znaleźć siÄ™ w grafie. DomyÅ›lnie graf zawiera krawÄ™dzie wszystkich " +"relacji." + +#: plwn.bases.PLWordNetBase.to_graphml:65 +msgid "" +"``included_lexical_unit_relations`` and " +"``excluded_lexical_unit_relations`` are the same was as the above, but " +"for lexical relation types." +msgstr "" +"``included_lexical_unit_relations`` i ``excluded_lexical_unit_relations``" +" dziaÅ‚ajÄ… w taki sam sposób, ale dla typów relacji leksykalnych." + +#: plwn.bases.PLWordNetBase.to_graphml:69 +msgid "" +"``included_synset_nodes`` and ``excluded_synset_nodes`` are containers " +"for IDs of synset that should or should not be included as nodes in the " +"graph. If a node is not included, all edges that start or end in it are " +"also excluded. By default, all non-artificial synsets are included." +msgstr "" +"``included_synset_nodes`` i ``excluded_synset_nodes`` sÄ… zbiorami ID " +"synsetów, które powinny bÄ…dź nie powinny znaleźć siÄ™ w grafie jako wÄ™zÅ‚y." +" JeÅ›li jakiÅ› wÄ™zeÅ‚ jest wyłączony z grafu, wszystkie krawÄ™dzie które " +"zaczynajÄ… siÄ™ bÄ…dź koÅ„czÄ… w nim sÄ… również pomijane." + +#: plwn.bases.PLWordNetBase.to_graphml:74 +msgid "" +"``included_lexical_unit_nodes`` and ``excluded_lexical_unit_nodes`` are " +"the same way as the above, but for lexical units." +msgstr "" +"``included_lexical_unit_nodes`` i ``excluded_lexical_unit_nodes`` " +"dziaÅ‚ajÄ… jak powyżej, ale dla wÄ™złów jednostek leksykalnych." + +#: plwn.bases.PLWordNetBase.to_graphml:77 +msgid "" +"If ``skip_artificial_synsets`` is ``True`` (the default), then artificial" +" synsets are excluded from the graph, and edges connecting to them are " +"reconnected to \"skip over\" them, as described for " +":meth:`.synset_relation_edges`." +msgstr "" +"JeÅ›li ``skip_artificial_synsets`` ma prawdziwÄ… wartość (domyÅ›lnie), " +"sztuczne synsety sÄ… wykluczane z grafu, a krawÄ™dzie sÄ… modyfikowane tak, " +"by \"przeskakiwać\" je tak jak opisano przy " +":meth:`.synset_relation_edges`." + +#: plwn.bases.PLWordNetBase.to_graphml:82 +msgid "" +"**Note:** while this method accepts all of the above parameters at all " +"times, parameters relating to synsets are ignored if ``graph_type`` is " +"``lexical_unit``, and parameters relating to lexical units are ignored if" +" ``graph_type`` is ``synset``." +msgstr "" +"**Uwaga:** mimo że ta metoda przyjmuje wszystkie powyższe parametry " +"naraz, te odnoszÄ…ce siÄ™ do synsetów sÄ… ignorowane jeÅ›li ``graph_type`` to" +" ``lexical_unit``, a parametry odnoszÄ…ce siÄ™ do jednostek leksykalnych sÄ…" +" ignorowane jeÅ›li ``graph_type`` to ``synset``." + +#: plwn.bases.SynsetBase:1 +msgid "Encapsulates data associated with a plWordNet synset." +msgstr "Przechowuje dane zwiÄ…zane z synsetem SÅ‚owosieci." + +#: plwn.bases.SynsetBase:3 +msgid "" +"Synset contains lexical units that have the same meaning (ie. synonyms). " +"Most of plWordNet relations are between meanings, hence the need to group" +" lexical units into synsets." +msgstr "" +"Synsety zawierajÄ… jednostki leksykalne o takim samym znaczeniu " +"(synonimy). WiÄ™kszość relacji w SÅ‚owosieci jest pomiÄ™dzy znaczeniami, " +"dlatego potrzebne jest grupowanie jednostek leksykalnych w synsety." + +#: plwn.bases.SynsetBase:7 +msgid "" +"For purposes of ordering, a :class:`SynsetBase` object is uniquely " +"identified by its \"head\": the first of the lexical units it contains." +msgstr "" +"W kwestii porzÄ…dkowania, obiekt :class:`SynsetBase` jest identyfikowany " +"przez swojÄ… \"gÅ‚owÄ™\": pierwszÄ… jednostkÄ™ leksykalnÄ…, którÄ… zawiera." + +#: plwn.bases.SynsetBase.definition:1 +msgid "Textual description of the synset's meaning." +msgstr "Tekstowy opis znaczenia synsetu." + +#: plwn.bases.SynsetBase.definition:3 plwn.bases.LexicalUnitBase.definition:3 +msgid "May be ``None``." +msgstr "Może być ``None``." + +#: plwn.bases.SynsetBase.definition:5 +msgid "" +"In plWordNet, most definitions are stored as " +":attr:`LexicalUnitBase.definition`. Synset definitions are present mostly" +" for English synsets." +msgstr "" +"W SÅ‚owosieci, wiÄ™kszość definicji jest trzymana jako " +":attr:`LexicalUnitBase.definition`. Definicje synsetów sÄ… obecne głównie " +"dla anglojÄ™zycznych synsetów." + +#: plwn.bases.SynsetBase.id:1 +msgid "" +"The internal, numeric identifier of the synset in plWordNet. It is unique" +" among all synsets." +msgstr "WewnÄ™trzny, unikalny, numeryczny identyfikator synsetu w SÅ‚owosieci." + +#: plwn.bases.SynsetBase.id:4 +msgid "" +"If this identifier is passed to :meth:`PLWordNetBase.synset_by_id`, it " +"would return this :class:`SynsetBase` object." +msgstr "" +"Przekazanie tego identyfikatora do :meth:`PLWordNetBase.synset_by_id` " +"zwróciÅ‚oby ten obiekt :class:`SynsetBase`." + +#: plwn.bases.SynsetBase.is_artificial:1 +msgid "Boolean value informing if the synset is an artificial one." +msgstr "Wartość boolowska informujÄ…ca czy synset jest sztuczny." + +#: plwn.bases.SynsetBase.is_artificial:3 +msgid "" +"Artificial synsets carrying no linguistic meaning, but introduced as a " +"method of grouping synsets within the structure of plWordNet." +msgstr "" +"Sztuczne synsety nie majÄ… lingwistycznego znaczenia, ale sÄ… sposobem na " +"grupowanie synsetów w strukturze SÅ‚owosieci." + +#: plwn.bases.SynsetBase.is_artificial:7 +msgid "For most uses, artificial synsets should be ignored." +msgstr "" +"W wiÄ™kszoÅ›ci przypadków użycia SÅ‚owosieci sztuczne synsety powinny być " +"ignorowane." + +#: plwn.bases.SynsetBase.lexical_units:1 +msgid "" +"Tuple of :class:`LexicalUnitBase` objects, representing lexical units " +"contained in the synset. Ordering of units within the tuple is arbitrary," +" but constant." +msgstr "" +"Krotka obiektów :class:`LexicalUnitBase`, reprezentujÄ…cych jednostki " +"leksykalne znajdujÄ…ce siÄ™ w synsecie." + +#: plwn.bases.SynsetBase.lexical_units:5 +msgid "" +"At least one lexical unit is always present in every synset, so " +"``lexical_units[0]`` is always valid and selects the synset's \"head\"." +msgstr "" +"Co najmniej jedna jednostka leksykalna jest zawarta w każdym synsecie, " +"wiÄ™c ``lexical_units[0]`` jest zawsze poprawnym wyrażeniem, zwracajÄ…cym " +"\"gÅ‚owÄ™\" synsetu." + +#: plwn.bases.SynsetBase.related:1 +msgid "" +"Get an iterable of :class:`SynsetBase` instances that are connected to " +"this synset by outbound edges of synset relation type identified by " +"``relation_id``." +msgstr "" +"WydobÄ…dź sekwencjÄ™ instancji :class:`SynsetBase` do których prowadzÄ… " +"krawÄ™dzie relacji typu ``relation_id`` wychodzÄ…ce z tego synsetu." + +#: plwn.bases.SynsetBase.related:5 +msgid "" +"``relation_id`` can be any synset relation type identifier (see " +":class:`RelationInfoBase`), a collection of relation types identifiers, " +"or ``None``, in which case synsets related to this one by any relation " +"are selected." +msgstr "" +"``relation_id`` może być identyfikatorem typu dowolnej relacji synsetowej " +"(zob. :class:`RelationInfoBase`), zbiorem identyfikatorów typów relacji " +"synsetowych, albo ``None``; w ostatnim przypadku wszystkie synsety bÄ™dÄ…ce " +"w jakiejkolwiek relacji z danym synsetem sÄ… zwracane." + +#: plwn.bases.SynsetBase.related:10 plwn.bases.LexicalUnitBase.related:10 +msgid "" +"Note, that distinction between any relations that fit the ``relation_id``" +" query is lost. Use :meth:`.related_pairs` if it's needed." +msgstr "" +"Rozróżnienie pomiÄ™dzy instancjami różnych relacji pasujÄ…cymi do zapytania " +"``relation_id`` jest tracone; należy użyć :meth:`.related_pairs` gdy jest " +"ono potrzebne." + +#: plwn.bases.SynsetBase.related:14 +msgid "" +"Raises :exc:`~plwn.exceptions.InvalidRelationTypeException` if (any of) " +"``relation_id`` does not refer to an existing synset relation type." +msgstr "" +"WyjÄ…tek :exc:`~plwn.exceptions.InvalidRelationTypeException` jest rzucany " +"gdy (którykolwiek z) ``relation_id`` nie jest identyfikatorem istniejÄ…cej " +"relacji synsetów." + +#: plwn.bases.SynsetBase.related:18 +msgid "" +"If ``skip_artificial`` is ``True`` (the default) artificial synsets " +"related to this one are \"skipped over\", as described for " +":meth:`PLWordNetBase.synset_relation_edges`." +msgstr "" +"JeÅ›li ``skip_artificial`` ma prawdziwÄ… wartość (domyÅ›lnie), sztuczne " +"synsety w relacji z tym synsetem sÄ… \"przeskakiwane\", zgodnie z opisem " +"dla :meth:`PLWordNetBase.synset_relation_edges`." + +#: plwn.bases.SynsetBase.related_pairs:1 +msgid "" +"Like :meth:`.related`, but return an iterable of pairs ``(<relation " +"info>, <relation target synset>)``." +msgstr "" +"DziaÅ‚a jak :meth:`.related`, ale zwraca sekwencjÄ™ par ``(<info " +"o relacji>, <synset koÅ„czÄ…cy relacjÄ™>)``." + +#: plwn.bases.SynsetBase.relations:1 +msgid "" +"Tuple of :class:`RelationInfoBase` instances, containing types of " +"distinct relations that have outbound edges from this synset." +msgstr "" +"Krotka instancji :class:`RelationInfoBase` zawierajÄ…ca typy relacji które" +" majÄ… krawÄ™dzie wychodzÄ…ce z tego synsetu." + +#: plwn.bases.SynsetBase.relations:4 plwn.bases.LexicalUnitBase.relations:4 +msgid "Relations are returned in an arbitrary order." +msgstr "Relacje sÄ… zwracane w dowolnej kolejnoÅ›ci." + +#: plwn.bases.SynsetBase.relations:6 plwn.bases.LexicalUnitBase.relations:6 +msgid "" +"The tuple is special: methods for checking membership accept all possible" +" representations of a relation type (see :meth:`RelationInfoBase.eqv`)." +msgstr "" +"Ta krotka jest specjalna: metody sprawdzajÄ…ce jej zawartość akceptujÄ… " +"wszystkie możliwe reprezentacje typu relacji (zob. " +":meth:`RelationInfoBase.eqv`)." + +#: plwn.bases.SynsetBase.short_str:1 +msgid "" +"Shorter version of synset's string form (``__str__``) that displays only " +"the first lexical unit." +msgstr "" +"Krótsza wersja tekstowej formy synsetu (``__str__``) która wyÅ›wietla " +"tylko pierwszÄ… jednostkÄ™ leksykalnÄ…." + +#: plwn.bases.SynsetBase.to_dict:1 +msgid "" +"Create a JSON-compatible dictionary with all public properties of the " +"synset." +msgstr "" +"Stwórz obiekt ``dict`` kompatybilny z formatem JSON, zawierajÄ…cy wartoÅ›ci" +" wszystkich publicznych atrybutów synsetu." + +#: plwn.bases.SynsetBase.to_dict:4 plwn.bases.LexicalUnitBase.to_dict:4 +msgid "" +"Enums are converted to their values and all collections are converted to " +"tuples." +msgstr "" +"WartoÅ›ci wyliczeniowe sÄ… konwertowane do swoich (tekstowych) wartoÅ›ci, " +"a wszystkie kolekcje sÄ… konwertowane do krotek." + +#: plwn.bases.SynsetBase.to_dict:7 +msgid "" +"Property :attr:`.relations` is omitted, as it would be redundant when all" +" related synsets can be enumerated when ``include_related`` is ``True``. " +"Some additional members are also present in the dictionary:" +msgstr "" +"Atrybut :attr:`.relations` jest pomijany, ponieważ byÅ‚by zbÄ™dny kiedy " +"wszystkie synsety bÄ™dÄ…ce w relacji mogÄ… być wypisane kiedy " +"``include_related`` ma prawdziwÄ… wartość. Kilka dodatkowych atrybutów " +"jest dodanych do sÅ‚ownika:" + +#: plwn.bases.SynsetBase.to_dict:11 +msgid "" +"``str``: The string representation of the synset (defined by ``__str__`` " +"override on :class:`SynsetBase`)." +msgstr "" +"``str``: Tekstowa reprezentacja synsetu (okreÅ›lana przez metodÄ™ " +"``__str__`` na :class:`SynsetBase`)." + +#: plwn.bases.SynsetBase.to_dict:13 +msgid "" +"``units``: Listing (as a tuple) of units belonging to the synset (in the " +"same ordering as :attr:`.lexical_units`), as pairs of ``(<unit id>, <unit" +" string form>)``." +msgstr "" +"``units``: Listowanie (jako krotka) jednostek należących do synsetu " +"(w takiej samej kolejnoÅ›ci jak :attr:`.lexical_units`), jako pary ``(<id " +"jednostki>, <forma tekstowa jednostki>)``." + +#: plwn.bases.SynsetBase.to_dict:17 +msgid "" +"If ``include_related`` is ``True`` (the default), the dictionary will " +"contain an additional ``related`` member, representing synsets related to" +" this one, in the following format::" +msgstr "" +"JeÅ›li ``include_related`` ma prawdziwÄ… wartość (domyÅ›lnie), mapowanie " +"bÄ™dzie zawieraÅ‚a dodatkowy atrybut ``related``, reprezentujÄ…cy synsety " +"bÄ™dÄ…ce w relacji z obecnym synsetem, w nastÄ™pujÄ…cym formacie::" + +#: plwn.bases.SynsetBase.to_dict:29 +msgid "" +"If ``include_units_data`` is ``True`` (the default), the ``units`` member" +" will contain results of invocation of :meth:`LexicalUnitBase.to_dict` " +"for the synset's units, instead of pairs described above. In this case, " +"the value of ``include_related`` parameter is passed on to " +":meth:`LexicalUnitBase.to_dict`." +msgstr "" +"JeÅ›li ``include_units_data`` ma prawdziwÄ… wartość (domyÅ›lnie), atrybut " +"``units`` bÄ™dzie zawieraÅ‚ wyniki wywoÅ‚ania " +":meth:`LexicalUnitBase.to_dict` dla jednostek synsetu, zamiast par " +"opisanych powyżej. W takim wypadku wartość ``include_related`` jest " +"przekazany do :meth:`LexicalUnitBase.to_dict`." + +#: plwn.bases.LexicalUnitBase:1 +msgid "Encapsulates data associated with a plWordNet lexical unit." +msgstr "Przechowuje dane zwiÄ…zane z jednostkÄ… leksykalnÄ… SÅ‚owosieci." + +#: plwn.bases.LexicalUnitBase:3 +msgid "" +"Lexical units represent terms in the language. Each lexical unit is " +"uniquely identified by its lemma (base written form), part of speech " +"(verb, noun, adjective or adverb) and variant (a number differentiating " +"between homonyms)." +msgstr "" +"Jednostki leksykalne reprezentujÄ… terminy jÄ™zyka. Każda jednostka " +"leksykalna jest identyfikowana przez swój lemat (bazowÄ… formÄ™ tekstowÄ…), " +"część mowy (czasownik, rzeczownik, przymiotnik lub przysłówek) i wariant " +"(numer rozróżniajÄ…cy homonimy)." + +#: plwn.bases.LexicalUnitBase.definition:1 +msgid "Textual description of the lexical unit's meaning." +msgstr "Tekstowy opis znaczenia jednostki leksykalnej." + +#: plwn.bases.LexicalUnitBase.domain:1 +msgid "" +"plWordNet domain the lexical unit belongs to; one of enumerated constants" +" of :class:`~plwn.enums.Domain`." +msgstr "" +"Domena SÅ‚owosieci do której jednostka leksykalna należy; jeden z obiektów" +" wyliczeniowych :class:`~plwn.enums.Domain`." + +#: plwn.bases.LexicalUnitBase.emotion_example:1 +msgid "An example of an emotionally charged sentence using the lexical unit." +msgstr "" +"PrzykÅ‚ad emocjonalnie nacechowanego zdania zawierajÄ…cego jednostkÄ™ " +"leksykalnÄ…." + +#: plwn.bases.LexicalUnitBase.emotion_example_secondary:1 +msgid "" +"This property is not ``None`` only if :attr:`.emotion_markedness` is " +":attr:`~plwn.enums.EmotionMarkedness.amb`. In such case, " +":attr:`.emotion_example` will be an example of a positively charged " +"sentence, and this one will be a negatively charged sentence." +msgstr "" +"Ten atrybut nie jest ``None`` tylko gdy :attr:`.emotion_markedness` ma " +"wartość :attr:`~plwn.enums.EmotionMarkedness.amb`. W tym wypadku, " +":attr:`.emotion_example` bÄ™dzie przykÅ‚adem pozytywnie nacechowanego " +"zdania, a ten atrybut bÄ™dzie przykÅ‚adem negatywnie nacechowanego zdania." + +#: plwn.bases.LexicalUnitBase.emotion_markedness:1 +msgid "" +"Markedness of emotions associated with the lexical unit. May be ``None`` " +"if the unit has no emotional markedness." +msgstr "" +"Wartość nacechowania emocjonalnego jednostki leksykalnej. Może być " +"``None`` jeÅ›li jednostka nie ma emocjonalnego nacechowania." + +#: plwn.bases.LexicalUnitBase.emotion_markedness:4 +msgid "" +"If this property is ``None`` then all other ``emotion_*`` properties will" +" be ``None`` or empty collections." +msgstr "" +"JeÅ›li ten atrybut ma wartość ``None``, wszystkie pozostaÅ‚e atrybuty " +"``emotion_*`` bÄ™dÄ… ``None`` albo pustymi kolekcjami." + +#: plwn.bases.LexicalUnitBase.emotion_names:1 +msgid "Tuple of names of emotions associated with this lexical unit." +msgstr "Krotka nazw emocji skojarzonych z tÄ… jednostkÄ… leksykalnÄ…." + +#: plwn.bases.LexicalUnitBase.emotion_valuations:1 +msgid "Tuple of valuations of emotions associated with this lexical unit." +msgstr "Krotka wartoÅ›ciowaÅ„ emocji skojarzonych z tÄ… jednostkÄ… leksykalnÄ…." + +#: plwn.bases.LexicalUnitBase.external_links:1 +msgid "URLs linking to web pages describing the meaning of the lexical unit." +msgstr "" +"OdnoÅ›niki URL do stron internetowych opisujÄ…cych znaczenie jednostki " +"leksykalnej." + +#: plwn.bases.LexicalUnitBase.external_links:3 +#: plwn.bases.LexicalUnitBase.usage_notes:7 +msgid "May be an empty collection." +msgstr "Może być pustÄ… kolekcjÄ…." + +#: plwn.bases.LexicalUnitBase.id:1 +msgid "" +"The internal, numeric identifier of the lexical units in plWordNet. It is" +" unique among all lexical units." +msgstr "" +"WewnÄ™trzny, unikalny, numeryczny identyfikator jednostki leksykalnej " +"w SÅ‚owosieci." + +#: plwn.bases.LexicalUnitBase.id:4 +msgid "" +"If this identifier is passed to :meth:`PLWordNetBase.lexical_unit_by_id`," +" it would return this :class:`LexicalUnitBase` object." +msgstr "" +"Przekazanie tego identyfikatora do " +":meth:`PLWordNetBase.lexical_unit_by_id` zwróciÅ‚oby ten obiekt " +":class:`LexicalUnitBase`." + +#: plwn.bases.LexicalUnitBase.lemma:1 +msgid "Lemma of the unit; its basic text form." +msgstr "Lemat jednostki; jej bazowa forma tekstowa." + +#: plwn.bases.LexicalUnitBase.pos:1 +msgid "" +"Part of speech of the unit; one of enumerated constants of " +":class:`~plwn.enums.PoS`." +msgstr "" +"Część mowy jednostki; jeden z obiektów wyliczeniowych " +":class:`~plwn.enums.PoS`." + +#: plwn.bases.LexicalUnitBase.related:1 +msgid "" +"Get an iterable of :class:`LexicalUnitBase` instances that are connected " +"to this lexical unit by outbound edges of lexical relation type " +"identified by ``relation_id``." +msgstr "" +"WydobÄ…dź sekwencjÄ™ instancji :class:`LexicalUnitBase` do których prowadzÄ…" +" krawÄ™dzie typu ``relation_id`` wychodzÄ…ce z tej jednostki leksykalnej." + +#: plwn.bases.LexicalUnitBase.related:5 +msgid "" +"``relation_id`` can be any lexical relation type identifier (see " +":class:`RelationInfoBase`), a collection of relation types identifiers, " +"or ``None``, in which case lexical units related to this one by any " +"relation are selected." +msgstr "" +"``relation_id`` może być identyfikatorem typu dowolnej relacji leksykalnej " +"(zob. :class:`RelationInfoBase`), zbiorem identyfikatorów typów relacji " +"leksykalnych, bÄ…dź ``None``; w ostatnim przypadku wszystkie jednostki " +"leksykalne bÄ™dÄ…ce w jakiejkolwiek relacji z danÄ… jednostkÄ… sÄ… zwracane." + +#: plwn.bases.LexicalUnitBase.related:14 +msgid "" +"Raises :exc:`~plwn.exceptions.InvalidRelationTypeException` if " +"``relation_id`` does not refer to an existing lexical relation type." +msgstr "" +"WyjÄ…tek :exc:`~plwn.exceptions.InvalidRelationTypeException` jeÅ›li " +"``relation_id`` nie jest identyfikatorem istniejÄ…cej relacji leksykalnej." + +#: plwn.bases.LexicalUnitBase.related_pairs:1 +msgid "" +"Like :meth:`.related`, but return an iterable of pairs ``(<relation " +"info>, <relation target unit>)``." +msgstr "" +"DziaÅ‚a jak :meth:`.related`, ale zwraca sekwencjÄ™ par ``(<relation info>," +" <relation target unit>)``." + +#: plwn.bases.LexicalUnitBase.relations:1 +msgid "" +"Tuple of :class:`RelationInfoBase` instances, containing types of " +"distinct relations that have outbound edges from this lexical unit." +msgstr "" +"Krotka instancji :class:`RelationInfoBase`, zawierajÄ…ca typy relacji " +"które majÄ… krawÄ™dzie wychodzÄ…ce z tej jednostki leksykalnej." + +#: plwn.bases.LexicalUnitBase.sense_examples:1 +msgid "Text fragments that show how the lexical unit is used in the language." +msgstr "" +"Fragmenty tekstu pokazujÄ…ce jak jednostka leksykalna jest używana " +"w jÄ™zyku." + +#: plwn.bases.LexicalUnitBase.sense_examples:4 +msgid "May be an empty tuple." +msgstr "Może być pustÄ… krotkÄ…." + +#: plwn.bases.LexicalUnitBase.sense_examples_sources:1 +msgid "" +"Symbolic representations of sources from which the sense examples were " +"taken." +msgstr "" +"Symboliczna reprezentacja źródeÅ‚, z których przykÅ‚ady użycia zostaÅ‚y " +"wziÄ™te." + +#: plwn.bases.LexicalUnitBase.sense_examples_sources:4 +msgid "The symbols are short strings, defined by plWordNet." +msgstr "Symbole sÄ… krótkimi stringami, zdefiniowanymi przez SÅ‚owosieć." + +#: plwn.bases.LexicalUnitBase.sense_examples_sources:6 +msgid "" +"This tuples has the same length as :attr:`.sense_examples`, and is " +"aligned by index (for example, the source of ``sense_examples[3]`` is at " +"``sense_examples_sources[3]``)." +msgstr "" +"Ta krotka ma takÄ… samÄ… dÅ‚ugość jak :attr:`.sense_examples` i jest " +"uporzÄ…dkowana tak, by źródÅ‚a odpowiadaÅ‚y przykÅ‚adom o tym samym " +"indeksie.(na przykÅ‚ad, źródÅ‚o ``sense_examples[3]`` jest pod " +"``sense_examples_sources[3]``)" + +#: plwn.bases.LexicalUnitBase.sense_examples_sources:10 +msgid "" +"To get pairs of examples with their sources, use ``zip(sense_examples, " +"sense_examples_sources)``" +msgstr "" +"Aby otrzymać pary przykÅ‚adów i ich źródeÅ‚, należy użyć " +"``zip(sense_examples, sense_examples_sources)``" + +#: plwn.bases.LexicalUnitBase.synset:1 +msgid "" +"An instance of :class:`SynsetBase` representing the synset this unit " +"belongs to." +msgstr "" +"Instancja :class:`SynsetBase` reprezentujÄ…ca synset, do którego ta " +"jednostka należy." + +#: plwn.bases.LexicalUnitBase.to_dict:1 +msgid "" +"Create a JSON-compatible dictionary with all the public properties of the" +" lexical unit." +msgstr "" +"Stwórz obiekt ``dict`` kompatybilny z formatem JSON, zawierajÄ…cy " +"wszystkie publiczne atrybuty jednostki leksykalnej." + +#: plwn.bases.LexicalUnitBase.to_dict:7 +msgid "" +"Property :attr:`.relations` is omitted, as it would be redundant when all" +" related lexical units can be enumerated when ``include_related`` is " +"``True``." +msgstr "" +"Atrybut :attr:`.relations` jest pomijany, ponieważ byÅ‚by zbÄ™dny kiedy " +"wszystkie jednostki bÄ™dÄ…ce w relacji mogÄ… być wypisane kiedy " +"``include_related`` ma prawdziwÄ… wartość." + +#: plwn.bases.LexicalUnitBase.to_dict:11 +msgid "" +"An additional ``str`` member is present in the dictionary; its value is " +"the string representation of the lexical unit." +msgstr "" +"Dodatkowy atrybut ``str`` zawiera tekstowÄ… reprezentacjÄ™ jednostki " +"leksykalnej." + +#: plwn.bases.LexicalUnitBase.to_dict:14 +msgid "" +"If ``include_related`` is ``True`` (the default), the dictionary will " +"contain an additional ``related`` member, representing lexical units " +"related to this one, in the following format::" +msgstr "" +"JeÅ›li ``include_related`` ma prawdziwÄ… wartość (domyÅ›lnie), mapowanie " +"bÄ™dzie zawieraÅ‚a dodatkowy atrybut ``related``, reprezentujÄ…cy jednostki " +"leksykalne bÄ™dÄ…ce w relacji z obecnym synsetem, w nastÄ™pujÄ…cym formacie::" + +#: plwn.bases.LexicalUnitBase.usage_notes:1 +msgid "" +"Symbols denoting certain properties of how the lexical unit is used in " +"the language." +msgstr "" +"Fragmenty tekstu pokazujÄ…ce jak jednostka leksykalna jest używana " +"w jÄ™zyku." + +#: plwn.bases.LexicalUnitBase.usage_notes:4 +msgid "" +"The symbols are short strings, defined by plWordNet. For example, " +"``daw.`` means that the word is considered dated." +msgstr "" +"Te symbole sÄ… krótkimi wartoÅ›ciami tekstowymi, zdefiniowanymi przez " +"SÅ‚owosieć. Na przykÅ‚ad, ``daw.`` oznacza że sÅ‚owo jest uznawane za dawne." + +#: plwn.bases.LexicalUnitBase.variant:1 +msgid "Ordinal number to differentiate between meanings of homonyms." +msgstr "Numer porzÄ…dkowy rozróżniajÄ…cy znaczenia homonimów" + +#: plwn.bases.LexicalUnitBase.variant:3 +msgid "Numbering starts at 1." +msgstr "Numerowanie zaczyna siÄ™ od 1." + +#: plwn.bases.LexicalUnitBase.verb_aspect:1 +msgid "" +"Aspect of a verb; of the enumerated values of " +":class:`~plwn.enums.VerbAspect`." +msgstr "" +"Aspekt czasownika; jedna z wartoÅ›ci wyliczeniowych " +"z :class:`~plwn.enums.VerbAspect`." + +#: plwn.bases.LexicalUnitBase.verb_aspect:4 +msgid "May be ``None`` if the unit is not a verb, or had no aspect assigned." +msgstr "" +"Może być ``None``, jeÅ›li jednostka nie jest czasownikiem, albo aspekt nie" +" zostaÅ‚ jej przypisany." + +#: plwn.bases.RelationInfoBase:1 +msgid "Encapsulates information associated with a relation type." +msgstr "Zawiera informacje zwiÄ…zane z typem relacji." + +#: plwn.bases.RelationInfoBase:3 +msgid "" +"The primary purpose of this class is to serve as a single object " +"consolidating all possible ways a relation type can be referred to." +msgstr "" +"Głównym rolÄ… instancji tej klasy jest sÅ‚użenie jako pojedynczy obiekt " +"centralizujÄ…cy wszystkie sposoby na jakie może być identyfikowany typ " +"relacji." + +#: plwn.bases.RelationInfoBase:6 +msgid "" +"In general, plWordNet uses *parent* and *child* relation names. Child " +"relations are those that have actual instances between synsets and " +"lexical units. Parent relations only exist to group child relations " +"together; child relation names need to be only unique within the group of" +" their parent relation, while parent relations must be globally unique." +msgstr "" +"Ogólnie, SÅ‚owosieć dzieli nazwy relacji na *rodziców* i *dzieci*. " +"Relacje-dzieci majÄ… wÅ‚aÅ›ciwe instancje w SÅ‚owosieci, pomiÄ™dzy synsetami " +"oraz jednostkami. Relacje-rodzice istniejÄ… jedynie jako grupowania " +"dzieci;Nazwy relacji-dzieci muszÄ… być unikalne jedynie w ramach grupy " +"wyznaczanej przez rodzica, a relacje-rodzice muszÄ… być unikalne " +"globalnie." + +#: plwn.bases.RelationInfoBase:12 +msgid "" +"For example, there are two relations named \"część\" (\"part\"); one " +"being a child of \"meronimia\" (\"meronymy\"), and another a child of " +"\"holonimia\" (\"holonymy\")." +msgstr "" +"Na przykÅ‚ad, istniejÄ… dwie relacje nazwane \"część\"; jedna jest " +"dzieckiem relacji \"meronimia\", a druga dzieckiem relacji \"holonimia\"." + +#: plwn.bases.RelationInfoBase:16 +msgid "" +"Some relation types have no parent; they behave like child relations, but" +" their names need to be unique on par with parent relations." +msgstr "" +"Niektóre relacje nie majÄ… rodzica; zachowujÄ… siÄ™ jak relacje-dzieci, " +"aleich nazwy muszÄ… być unikalne na takim samym poziomie jak relacje-" +"rodzice." + +#: plwn.bases.RelationInfoBase:19 +msgid "" +"plWordNet also stores shorter aliases for most of the relation types, for" +" example \"hipo\" for \"hiponimia\" (\"hyponymy\")." +msgstr "" +"SÅ‚owosieć również przechowuje krótsze aliasy dla wiÄ™kszoÅ›ci typów " +"relacji, na przykÅ‚ad \"hipo\" dla hiponimii." + +#: plwn.bases.RelationInfoBase:22 +msgid "" +"There are four ways to refer to relations wherever a relation identifier " +"is accepted (usually the argument is named ``relation_id``):" +msgstr "" +"IstniejÄ… cztery sposoby na identyfikacjÄ™ typu relacji, tam gdzie " +"przyjmowany jest identyfikator relacji (zazwyczaj argument nazywa siÄ™ " +"``relation_id``):" + +#: plwn.bases.RelationInfoBase:25 +msgid "" +"Full name, in format ``<parent name>/<child name>`` (or just ``<child " +"name>`` if the relation has no parent)." +msgstr "" +"PeÅ‚na nazwa, w formacie ``<parent name>/<child name>`` (albo tylko " +"``<child name>`` jeÅ›li relacja nie ma rodzica)." + +#: plwn.bases.RelationInfoBase:27 +msgid "" +"One of the shorter aliases mentioned above. This is checked before " +"attempting to resolve relation names. Aliases must be globally unique." +msgstr "" +"Jeden z krótszych aliasów, wspomnianych powyżej. Aliasy sÄ… sprawdzane " +"przed wÅ‚aÅ›ciwymi nazwami relacji; muszÄ… być globalnie unikalne." + +#: plwn.bases.RelationInfoBase:29 +msgid "" +"A parent name on its own. This resolves to all children of the parent " +"relation. Note, that it's not always valid to pass a name that resolves " +"to multiple relations; " +":exc:`~plwn.exceptions.AmbiguousRelationTypeException` is raised in such " +"cases." +msgstr "" +"Sama nazwa rodzica. Jest ona interpretowana w taki sposób, jakby " +"odpowiadaÅ‚a identyfikatorom wszystkich swoich dzieci naraz. Jednak nie " +"wszÄ™dzie dozwolone jest podanie wiÄ™cej niż jednej relacji. W takich " +"przypadkach jest rzucany " +":exc:`~plwn.exceptions.AmbiguousRelationTypeException`." + +#: plwn.bases.RelationInfoBase:34 +msgid "" +"Finally, a :class:`RelationInfoBase` instance may be used instead of a " +"string, standing for the child relation it represents." +msgstr "" +"W koÅ„cu, instancja :class:`RelationInfoBase` może być użyta zamiast " +"reprezentacji tekstowej, oznaczajÄ…c relacjÄ™-dziecko którÄ… reprezentuje." + +#: plwn.bases.RelationInfoBase:37 +msgid "" +"Note, that parent relations don't have corresponding " +":class:`RelationInfoBase` instance." +msgstr "" +"Relacje-rodzice nie majÄ… odpowiadajÄ…cych sobie instancji " +":class:`RelationInfoBase`." + +#: plwn.bases.RelationInfoBase.SEP:1 +msgid "" +"Character that separates parent from child name in full name " +"representation. It must not appear in any relation names or aliases." +msgstr "" +"Znak rozdzielajÄ…cy część rodzica od części dziecka w peÅ‚nej nazwie. Ten " +"znak nie może siÄ™ pojawić w żadnej nazwie relacji ani aliasie." + +#: plwn.bases.RelationInfoBase.aliases:1 +msgid "Tuple of all aliases the relation can be referred to by." +msgstr "Krotka wszystkich aliasów które odnoszÄ… siÄ™ do tej relacji." + +#: plwn.bases.RelationInfoBase.eqv:1 +msgid "" +"Check if ``other`` is an equivalent representation; either an equal " +":class:`RelationInfoBase` object or a relation identifier that refers to " +"this object." +msgstr "" +"Sprawdź, czy ``other`` jest ekwiwalentnÄ… reprezentacjÄ…; albo równym " +"obiektem :class:`RelationInfoBase`, albo identyfikatorem relacji który " +"reprezentuje ten obiekt." + +#: plwn.bases.RelationInfoBase.eqv:5 +msgid "" +"This is less strict than the equality operator, which only checks for " +"equal :class:`RelationInfoBase` instances." +msgstr "" +"To mniej Å›cisÅ‚a wersja operatora równoÅ›ci, który sprawdza jedynie równość" +" instancji :class:`RelationInfoBase`." + +#: plwn.bases.RelationInfoBase.format_name:1 +msgid "Format and return a full name out of parent and child name strings." +msgstr "Sformatuj i zwróć peÅ‚nÄ… nazwÄ™ na podstawie nazw rodzica i dziecka." + +#: plwn.bases.RelationInfoBase.format_name:3 +msgid "" +"``parent_name`` may be ``None``, which will just return ``child_name``, " +"as relations without parents are fully represented just by their name." +msgstr "" +"``parent_name`` może być ``None``, co po prostu zwróci ``child_name``, " +"jako że relacje bez rodzica sÄ… po prostu reprezentowane przez swojÄ… " +"nazwÄ™." + +#: plwn.bases.RelationInfoBase.kind:1 +msgid "" +"One of enumerated constants of :class:`~plwn.enums.RelationKind`; denotes" +" it's a synset or lexical relation." +msgstr "" +"Jedna z wyliczeniowych wartoÅ›ci z :class:`~plwn.enums.RelationKind`; " +"rozróżnia czy relacja jest miÄ™dzy synsetami, czy jednostkami " +"leksykalnymi." + +#: plwn.bases.RelationInfoBase.name:1 +msgid "String name of the relation." +msgstr "Tekstowa nazwa relacji." + +#: plwn.bases.RelationInfoBase.parent:1 +msgid "" +"String name of the parent relation to this one. May be ``None`` if the " +"relation has no parent." +msgstr "" +"Tekstowa nazwa relacji-rodzica tej relacji. Może być ``None`` jeÅ›li " +"relacja nie ma rodzica." + +#: plwn.bases.RelationInfoBase.split_name:1 +msgid "Split a full name into a ``(<parent name>, <child name>)`` pair." +msgstr "Rozdziel peÅ‚nÄ… nazwÄ™ na parÄ™ ``(<parent name>, <child name>)``." + +#: plwn.bases.RelationInfoBase.split_name:3 +msgid "" +"``parent_name`` may be ``None`` if :attr:`.SEP` doesn't appear in the " +"full name." +msgstr "" +"``parent_name`` może być ``None``, jeÅ›li :attr:`.SEP` nie wystÄ™puje " +"w peÅ‚nej nazwie." + +#: plwn.bases.RelationInfoBase.split_name:6 +msgid "" +"However, if :attr:`.SEP` appears more than once in ``full_name``, a " +"``ValueError`` will be raised." +msgstr "" +"Jednak jeÅ›li :attr:`.SEP` pojawia siÄ™ w ``full_name`` wiÄ™cej niż raz, " +"zostanie rzucony ``ValueError``." + +#: plwn.bases.RelationEdge:1 +msgid "Bases: :class:`tuple`" +msgstr "Klasy bazowe: :class:`tuple`" + +#: plwn.bases.RelationEdge:1 +msgid "" +"Tuple type representing a relation instance between two synsets or " +"lexical units." +msgstr "" +"Typ krotki reprezentujÄ…cej instancjÄ™ relacji pomiÄ™dzy dwoma synsetami " +"albo jednostkami leksykalnymi." + +#: plwn.bases.RelationEdge.relation:1 +msgid "Alias for field number 1" +msgstr "Alias pola numer 1" + +#: plwn.bases.RelationEdge.source:1 +msgid "Alias for field number 0" +msgstr "Alias pola numer 0" + +#: plwn.bases.RelationEdge.target:1 +msgid "Alias for field number 2" +msgstr "Alias pola numer 2" diff --git a/doc/source/locale/pl/LC_MESSAGES/introduction.po b/doc/source/locale/pl/LC_MESSAGES/introduction.po new file mode 100644 index 0000000..ae52cf4 --- /dev/null +++ b/doc/source/locale/pl/LC_MESSAGES/introduction.po @@ -0,0 +1,407 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2017, MichaÅ‚ KaliÅ„ski +# This file is distributed under the same license as the PLWN_API package. +# FIRST AUTHOR <EMAIL@ADDRESS>, 2017. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PLWN_API 0.21\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-06-10 15:46+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" +"Language-Team: LANGUAGE <LL@li.org>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.1.1\n" + +#: ../../source/introduction.rst:2 +msgid "Introduction" +msgstr "WstÄ™p" + +#: ../../source/introduction.rst:5 +msgid "Loading" +msgstr "Åadowanie" + +#: ../../source/introduction.rst:7 +msgid "" +"Access to plWordNet is provided via a single " +":class:`~plwn.bases.PLWordNetBase` object, which requires a source from " +"which to load the lexicon data. In normal distributions, the storage file" +" is bundled with the python package, so the only thing required to get an" +" instance is::" +msgstr "" +"Punktem dostÄ™pu do SÅ‚owosieci jest pojedynczy obiekt " +":class:`~plwn.bases.PLWordNetBase`, który wymaga źródÅ‚a z którego muszÄ… " +"zostać wczytane dane leksykonu. W normalnej dystrybucji, plik z danymi " +"jest zawarty w paczce pythona; jedyna rzecz potrzebna by skonstruować " +"instancjÄ™ to::" + +#: ../../source/introduction.rst:17 +msgid "Getting synsets and lexical units" +msgstr "Wydobycie synsetów i jednostek leksykalnych" + +#: ../../source/introduction.rst:19 +msgid "" +"The basic building blocks of plWordNet are synsets and lexical units, " +"represented by :class:`~plwn.bases.SynsetBase` and " +":class:`~plwn.bases.LexicalUnitBase` objects. Every single synset and " +"lexical unit can be identified either by an unique ID number, or by a " +"combination of three properties: lemma, :abbr:`pos (part of speech)` and " +"variant." +msgstr "" +"Podstawowymi elementami skÅ‚adowymi SÅ‚owosieci sÄ… synsety i jednostki " +"leksykalne, reprezentowane przez obiekty :class:`~plwn.bases.SynsetBase` " +"i :class:`~plwn.bases.LexicalUnitBase`. Każdy synset i jednostka mogÄ… być " +"zidentyfikowane na podstawie albo unikalnego numeru ID, albo kombinacji " +"trzech atrybutów: lematu, :abbr:`pos (part of speech - część mowy)` oraz " +"wariantu." + +#: ../../source/introduction.rst:25 +msgid "" +"There are three primary methods on :class:`~plwn.bases.PLWordNetBase` for" +" each of these two types of entities that allow selecting them from the " +"lexicon:" +msgstr "" +"SÄ… trzy podstawowe metody :class:`~plwn.bases.PLWordNetBase` dla każdego " +"z dwóch typów obiektów, które pozwalajÄ… wybierać je z leksykonu:" + +#: ../../source/introduction.rst:29 +msgid "Many entities by matching one or more of the three identifying properties:" +msgstr "Wiele obiektów na podstawie zero lub wiÄ™cej z trzech identyfikacyjnych " +"atrybutów:" + +#: ../../source/introduction.rst:31 +msgid ":meth:`~plwn.bases.PLWordNetBase.synsets`" +msgstr "" + +#: ../../source/introduction.rst:32 +msgid ":meth:`~plwn.bases.PLWordNetBase.lexical_units`" +msgstr "" + +#: ../../source/introduction.rst:34 +msgid "A single entity by matching all three identifying properties:" +msgstr "Pojedynczy obiekt na podstawie wszystkich trzech identyfikacyjnych " +"atrybutów:" + +#: ../../source/introduction.rst:36 +msgid ":meth:`~plwn.bases.PLWordNetBase.synset`" +msgstr "" + +#: ../../source/introduction.rst:37 +msgid ":meth:`~plwn.bases.PLWordNetBase.lexical_unit`" +msgstr "" + +#: ../../source/introduction.rst:39 +msgid "A single entity by matching the unique numeric ID:" +msgstr "Pojedynczy obiekt na podstawie unikalnego numerycznego ID:" + +#: ../../source/introduction.rst:41 +msgid ":meth:`~plwn.bases.PLWordNetBase.synset_by_id`" +msgstr "" + +#: ../../source/introduction.rst:42 +msgid ":meth:`~plwn.bases.PLWordNetBase.lexical_unit_by_id`" +msgstr "" + +#: ../../source/introduction.rst:46 +msgid "Selecting by ID" +msgstr "Wybieranie przez ID" + +#: ../../source/introduction.rst:48 +msgid "" +"Using the ``*_by_id`` methods is the fastest and most straightforward way" +" of getting :class:`~plwn.bases.SynsetBase` and " +":class:`~plwn.bases.LexicalUnitBase` objects, provided that ID values of " +"synsets and / or units for the correct version of plWordNet have been obtained" +" from an outside source or by storing the ``id`` property::" +msgstr "" +"Metody ``*_by_id`` to najszybszy i najprostszy sposób na otrzymanie obiektów " +":class:`~plwn.bases.SynsetBase` i :class:`~plwn.bases.LexicalUnitBase`, w " +"przypadku gdy wartoÅ›ci ID synsetów i / lub jednostek dla wÅ‚aÅ›ciwej wersji " +"SÅ‚owosieci sÄ… znane z zewnÄ™trznego źródÅ‚a, albo przez zapamiÄ™tanie atrybutu " +"``id``::" + +#: ../../source/introduction.rst:62 +msgid "Selecting by all three identifying properties" +msgstr "Wybieranie przez wszystkie trzy identyfikacyjne atrybuty." + +#: ../../source/introduction.rst:64 +msgid "" +"The \"singular\" methods require all three properties. Lemma is the basic" +" form of a word, variant is an ordinal number differentiating between " +"different meanings of the same word, and :abbr:`pos (part of speech)` is " +"an enumerated value." +msgstr "" +"Metody \"pojedyncze\" wymagajÄ… wartoÅ›ci wszystkich trzech atrybutów. Lemat " +"jest podstawowÄ… formÄ… sÅ‚owa, wariant jest porzÄ…dkowym numerem rozróżniajÄ…cym " +"miÄ™dzy różnymi znaczeniami tego samego sÅ‚owa, a :abbr:`pos (part of speech " +"- część mowy)` jest wartoÅ›ciÄ… wyliczeniowÄ…." + +#: ../../source/introduction.rst:68 +msgid "" +"There are eight :abbr:`pos (part of speech)` constants, four for Polish " +"synsets and units, and four for English. The enum class is provided as a " +"member of the base module of the library:" +msgstr "" +"Jest osiem staÅ‚ych wartoÅ›ci :abbr:`pos (part of speech - część mowy)`, " +"cztery dla jÄ™zyka polskiego i cztery dla angielskiego. Klasa wyliczeniowa " +"jest częściÄ… bazowego moduÅ‚u biblioteki:" + +#: ../../source/introduction.rst:72 +msgid "``plwn.PoS.verb``, ``plwn.PoS.noun``, ``plwn.PoS.adv``, ``plwn.PoS.adj``" +msgstr "" + +#: ../../source/introduction.rst:73 +msgid "" +"``plwn.PoS.verb_en``, ``plwn.PoS.noun_en``, ``plwn.PoS.adv_en``, " +"``plwn.PoS.adj_en``" +msgstr "" + +#: ../../source/introduction.rst:76 +msgid "" +"There are few cases, where all three properties would be known, but not " +"the ID. Still, selecting like this is possible::" +msgstr "Rzadko zdarza siÄ™, by znany byÅ‚y wszystkie trzy atrybuty, ale nie ID. " +"Wciąż, wybieranie w ten sposób jest możliwe::" + +#: ../../source/introduction.rst:85 +msgid "It's not legal to omit one any of the three properties::" +msgstr "Nie można pominąć żadnego z trzech atrybutów::" + +#: ../../source/introduction.rst:90 +msgid "" +"If there's no synset / unit that fits the query, an " +":exc:`~plwn.exceptions.NotFound` subclass is raised::" +msgstr "" +"JeÅ›li nie ma synsetu / jednostki, która pasowaÅ‚aby do zapytania, rzucany " +"jest :exc:`~plwn.exceptions.NotFound`::" + +#: ../../source/introduction.rst:101 +msgid "Selecting by some of the identifying properties" +msgstr "Wybieranie przez niektóre identyfikacyjne atrybuty" + +#: ../../source/introduction.rst:103 +msgid "" +"The \"plural\" methods always return an iterable of synsets / lexical " +"units. Unlike the \"singular\" methods, they allows omitting one or more " +"of the arguments, which could match more than one entity." +msgstr "" +"Metody \"mnogie\" Zawsze zwracajÄ… sekwencjÄ™ synsetów / jednostek " +"leksykalnych. Inaczej niż \"pojedyncze\" metody, pozwalajÄ… na pominiÄ™cie " +"jednego lub wiÄ™cej argumentów, przez co dopasowany może być wiÄ™cej niż " +"jeden obiekt." + +#: ../../source/introduction.rst:107 +msgid "" +"It's safer to wrap the invocation in ``tuple`` constructor, since the " +"interface only guarantees that the return value is iterable." +msgstr "" +"Bezpieczniej jest otoczyć wywoÅ‚anie konstruktorem ``tuple``, ponieważ " +"interfejs gwarantuje jedynie że zwracana wartość jest sekwencjÄ…." + +#: ../../source/introduction.rst:122 +msgid "" +"It's also possible that a query matches zero entities. Unlike the " +"\"singular\" methods, this will not raise an exception, but simply return" +" an empty iterable." +msgstr "" +"Jest także możliwe, że zapytanie zwróci zerowÄ… liczbÄ™ obiektów. Inaczej niż " +"przy \"pojedynczych\" metodach, wyjÄ…tek nie jest rzucany, ale zwracana jest " +"pusta sekwencja." + +#: ../../source/introduction.rst:133 +msgid "Synset and lexical unit properties" +msgstr "Atrybuty synsetów i jednostek leksykalnych" + +#: ../../source/introduction.rst:135 +msgid "" +"Data associated with plWordNet synsets and lexical units is provided as " +"public properties of synset and lexical unit objects. There are described" +" in documentation of the respective classes: " +":class:`~plwn.bases.SynsetBase` and :class:`~plwn.bases.LexicalUnitBase`." +msgstr "" +"Dane skojarzone z jednostkami i synsetami SÅ‚owosieci sÄ… udostÄ™pniane jako " +"publiczne atrybuty obiektów. SÄ… opisane w dokumentacji osobnych klas: " +":class:`~plwn.bases.SynsetBase` i :class:`~plwn.bases.LexicalUnitBase`." + +#: ../../source/introduction.rst:142 +msgid "Getting relations" +msgstr "Wydobycie relacji" + +#: ../../source/introduction.rst:144 +msgid "" +"The other elementary kind of entities in plWordNet, aside from synsets " +"and lexical units, are relations." +msgstr "" +"Kolejny podstawowy rodzaj obiektów SÅ‚owosieci, poza synsetami i jednostkami " +"leksykalnymi, to relacje." + +#: ../../source/introduction.rst:147 +msgid "" +"Relation instances can connect two synsets or two lexical units. These " +"instances are selected using identifiers of their types." +msgstr "" +"Instancje relacji mogÄ… łączyć dwa synsety, albo dwie jednostki leksykalne. " +"Te instancje sÄ… wybierane za pomocÄ… identyfikatorów ich typów." + +#: ../../source/introduction.rst:150 +msgid "" +"A detailed explanation on how relation types can be referred to is in " +":class:`~plwn.bases.RelationInfoBase`; the short version is:" +msgstr "" +"DokÅ‚adne wytÅ‚umaczenia jak można identyfikować typy relacji znajduje siÄ™ " +"w :class:`~plwn.bases.RelationInfoBase`; w skrócie:" + +#: ../../source/introduction.rst:153 +msgid "" +"Full name, for example: ``hiperonimia`` for relations that have no " +"parent type; ``meronimia/część`` for relations that do." +msgstr "" +"PeÅ‚na nazwa, na przykÅ‚ad: ``hiperonimia`` dla relacji które nie majÄ… " +"rodzica; ``meronimia/część`` dla relacji które majÄ…." + +#: ../../source/introduction.rst:155 +msgid "Short alias, for example: ``hiper``." +msgstr "Krótki alias, na przykÅ‚ad: ``hiper``." + +#: ../../source/introduction.rst:156 +msgid "" +"Parent name, for example: ``meronimia``; this refers to all the " +"children of the relation." +msgstr "" +"Nazwa rodzica, na przykÅ‚ad: ``meronimia``; odnosi siÄ™ ono do wszystkich " +"dzieci relacji." + +#: ../../source/introduction.rst:159 +msgid "" +"To see names and aliases for all relations, in alphabetical order, do " +"``sorted(wn.relations_info())``." +msgstr "" +"Aby zobaczyć wszystkie nazwy i aliasy dla wszystkich relacji, w kolejnoÅ›ci " +"alfabetycznej, można użyć ``sorted(wn.relations_info())``." + +#: ../../source/introduction.rst:164 +msgid "Related synset / units" +msgstr "Synsety / jednostki w relacji" + +#: ../../source/introduction.rst:166 +msgid "" +"Having a :class:`~plwn.bases.SynsetBase` or a " +":class:`~plwn.bases.LexicalUnitBase` objects, it's possible to select all" +" objects related to it using the ``related`` method, which accepts one of" +" the relation type identifiers described above. The ``relations`` " +"property can be used to check what relation types have outbound instances" +" from the synset / unit::" +msgstr "" +"MajÄ…c obiekt :class:`~plwn.bases.SynsetBase` albo " +":class:`~plwn.bases.LexicalUnitBase`, możliwe jest wydobycie wszystkich " +"obiektów bÄ™dÄ…cych w relacji z nim, używajÄ…c metody ``related``, która " +"akceptuje jeden z typów identyfikatorów relacji opisanych powyżej. " +"Atrybut ``relations`` może być użyty, by zobaczyć jakie typy relacji " +"majÄ… instancje wychodzÄ…ce z synsetu / jednostki::" + +#: ../../source/introduction.rst:182 +msgid "" +"If a relation of the right kind (synset or lexical) is passed to the " +"method, but it has no instances for the particular entity, an empty " +"iterable is returned::" +msgstr "" +"JeÅ›li relacja wÅ‚aÅ›ciwego (synsetowa albo leksykalna) jest przekazana do " +"tej metody, ale obiekt nie ma jej wychodzÄ…cych instancji, zwracana jest " +"pusta sekwencja::" + +#: ../../source/introduction.rst:189 +msgid "" +"In contrast, if a relation is of the wrong kind or does not exist, this " +"raises an error::" +msgstr "" +"Natomiast, jeÅ›li relacja jest zÅ‚ego rodzaju albo nie istnieje, rzucany jest " +"wyjÄ…tek::" + +#: ../../source/introduction.rst:195 +msgid "" +"When passing a parent relation type to ``related``, distinction between " +"actual, children relation types is lost. A second method " +"``related_pairs`` can be used to annotate related entities with the " +"relation instance connecting to it::" +msgstr "" +"Kiedy do ``related`` jest przekazany typ relacji-rodzica, rozróżnienie " +"pomiÄ™dzy typami relacji-dzieci jest tracone. Druga metoda ``related_pairs`` " +"może być użyta by adnotować obiekty bÄ™dÄ…ce w relacji instancjami relacji " +"które do nich prowadzÄ…::" + +#: ../../source/introduction.rst:206 +msgid "" +"Synset's :meth:`~plwn.bases.SynsetBase.related` and " +":meth:`~plwn.bases.SynsetBase.related_pairs` also have an additional " +"boolean ``skip_artificial`` argument. See the methods' documentation for " +"more details; the default value should be correct for most uses." +msgstr "" +"Metody synsetów :meth:`~plwn.bases.SynsetBase.related` oraz " +":meth:`~plwn.bases.SynsetBase.related_pairs` majÄ… dodatkowy argument " +"boolowski ``skip_artificial``. Zob. dokumentacjÄ™ metod dla dalszych " +"szczegółów; domyÅ›lna wartość powinna być poprawna dla zdecydowanej " +"wiÄ™kszoÅ›ci zastosowaÅ„." + +#: ../../source/introduction.rst:213 +msgid "Relation edges" +msgstr "KrawÄ™dzie relacji" + +#: ../../source/introduction.rst:215 +msgid "" +"Relation instances can also be selected using " +":meth:`~plwn.bases.PLWordNetBase.synset_relation_edges` and " +":meth:`~plwn.bases.PLWordNetBase.lexical_relation_edges` methods. Unlike " +"the ``related`` methods, these two are not anchored to a starting point " +"and select all relation instances of given types in plWordNet; they " +"return iterables of :class:`~plwn.bases.RelationEdge` instances, each " +"having a ``source``, ``relation`` and ``target`` properties." +msgstr "" +"Instancje relacje mogÄ… być również wybrane przy użyciu metod " +":meth:`~plwn.bases.PLWordNetBase.synset_relation_edges` i " +":meth:`~plwn.bases.PLWordNetBase.lexical_relation_edges`. W przeciwieÅ„stwie " +"do metod ``related``, te dwie nie sÄ… zakotwiczone w konkretnym punkcie " +"poczÄ…tkowym i wybierajÄ… wszystkie instancje relacji danych typów ze " +"SÅ‚owosieci; zwracajÄ… sekwencje instancji :class:`~plwn.bases.RelationEdge`, " +"z których każda ma atrybuty ``source``, ``relation``, ``target``." + +#: ../../source/introduction.rst:223 +msgid "" +"Without arguments, all synset or lexical relation instances are yielded. " +"Filtering can be done using an ``include`` or ``exclude`` argument. Both " +"expect the values to be sets of relation type identifiers (the same as " +"those accepted by the ``related`` methods). When ``include`` is not " +"``None``, only instances of relations mentioned in the set are yielded. " +"For example, to select all hyponymy instances::" +msgstr "" +"Bez argumentów, wszystkie instancje synsetowych albo leksykalnych relacji " +"sÄ… zwracane. Filtrowanie jest możliwe przez argument ``include`` bÄ…dź " +"``exclude``. Oba przyjmujÄ… wartoÅ›ci bÄ™dÄ…ce zbiorami identyfikatorów typów " +"relacji (takich samych jak te które przyjmujÄ… metody ``related``). JeÅ›li " +"``include`` nie jest ``None``, tylko instancje relacji zawartych w tym " +"zbiorze sÄ… zwracane::" + +#: ../../source/introduction.rst:251 +msgid "" +"When ``exclude`` is not ``None``, instances of mentioned relation types " +"are removed from the iterable; either from the set of all relations or " +"those in ``include``." +msgstr "" +"JeÅ›li ``exclude`` nie jest ``None``, instancje zawartych w nim typów relacji " +"sÄ… usuwane ze zwracanej sekwencji; albo ze zbioru wszystkich relacji, albo " +"tych zawartych w ``include``." + +#: ../../source/introduction.rst:255 +msgid "" +"Method :meth:`~plwn.bases.PLWordNetBase.synset_relation_edges` also takes" +" a boolean ``skip_artificial`` argument that's ``True`` as default. Like " +"with :meth:`~plwn.bases.SynsetBase.related`, see the method's " +"documentation for details." +msgstr "" +"Metoda :meth:`~plwn.bases.PLWordNetBase.synset_relation_edges` przyjmuje " +"również argument boolowski ``skip_artificial``, majÄ…cy domyÅ›lnie wartość " +"prawdziwÄ…. DziaÅ‚a jak :meth:`~plwn.bases.SynsetBase.related`, zob. " +"dokumentacjÄ™ tej metody dla szerszego opisu." diff --git a/plwn/.bases.py.swp b/plwn/.bases.py.swp deleted file mode 100644 index db9c729a546145f142a12fbd6693aaea5bc691d3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1024 zcmYc?$V<%2S1{5u)iY*50yk?J7)tVUQ?tvF1aUA+lS=bSN)63$NW+EoGk^x_Lrm2- yE=nm#EGntg5AX^1i+2q0)HTqv)Gx>>&(lvzEKV)fE2xBP9A%7#z-S22Bm@8`#Tx?v diff --git a/plwn/__init__.py b/plwn/__init__.py index fefc9a1..fe9b1f0 100644 --- a/plwn/__init__.py +++ b/plwn/__init__.py @@ -1,8 +1,27 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + +from ._loading import load_default from ._loading import read from ._loading import load from ._loading import show_source_formats from ._loading import show_storage_formats -from .enums import PoS +# Import the enums that are needed for selecting and filtering +from .enums import PoS, RelationKind # Setup logging for the package (not) import logging as _logging @@ -14,4 +33,6 @@ __all__ = [ "load", "show_storage_formats", "show_source_formats", + "load_default", + "RelationKind", ] diff --git a/plwn/_loading.py b/plwn/_loading.py index 6571f47..bdc51a8 100644 --- a/plwn/_loading.py +++ b/plwn/_loading.py @@ -1,7 +1,24 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + """Defines user-facing functions. -That allow simple construction of :class:`PLWordnetBase` instances, -with selected storages and readers. +That allow simple construction of +:class:`PLWordnetBase` instances, with selected storages and readers. """ from __future__ import absolute_import, division, print_function @@ -13,8 +30,20 @@ import textwrap as tw import six +from . import exceptions as exc +try: + from .default import get_default_load_args +except ImportError: + get_default_load_args = None + -__all__ = 'read', 'load', 'show_source_formats', 'show_storage_formats' +__all__ = ( + 'read', + 'load', + 'load_default', + 'show_source_formats', + 'show_storage_formats', +) _Info = namedtuple('_Info', ('desc', 'modname')) @@ -36,16 +65,10 @@ _STORAGES = { 'PLWN database).', 'sqlite', ), - 'objects': _Info( - 'Stores data in plain python objects, dumping them in pickle format. ' - 'Quick to construct, but querying and memory efficiency is not ' - 'guaranteed.', - 'objects', - ), } # Defaults for this version -_READERS[None] = _READERS['xml'] +_READERS[None] = _READERS['database'] _STORAGES[None] = _STORAGES['sqlite3'] @@ -70,6 +93,7 @@ def read(source_file, Return the right :class:`PLWordnetBase` subclass instance for the selected parameters. + Where defaults are mentioned, those values may change with each minor version of PLWN API. If you depend on some particular format for a long running program, state it explicitly. @@ -117,6 +141,27 @@ def load(storage_file, storage_format=None): return stor_cls.from_dump(storage_file) +def load_default(): + """Load and return the default, bundled version of plWordNet data. + + This function will fail if the bundled version is not present in the + package; this may occur in some builds where specialized plWordNet versions + are needed and disk space is a concern. + + :rtype: PLWordnetBase + + :raises PLWNAPIException: If no default data is bundled. + """ + try: + args = get_default_load_args() + except TypeError: + raise exc.PLWNAPIException( + 'No default storage bundled with this PLWN API package', + ) + + return load(*args) + + def show_source_formats(): """Print names and short descriptions. diff --git a/plwn/bases.py b/plwn/bases.py index b104c5d..43f9712 100644 --- a/plwn/bases.py +++ b/plwn/bases.py @@ -1,274 +1,260 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + """Base, abstract classes for plWordNet objects. -Implementing common functionality independent of structures holding -wordnet data. +Implementing common functionality independent of structures +holding the data itself. """ from __future__ import absolute_import, division -import abc -import collections as coll -import functools +from abc import ABCMeta, abstractmethod, abstractproperty +from collections import namedtuple import locale +import operator as op import six from .utils import graphmlout as go from .enums import make_values_tuple -from .relresolver import get_default_relation_resolver - -__all__ = 'SynsetBase', 'LexicalUnitBase', 'PLWordNetBase', 'RelationEdge' - -#: Named tuple type yielded by -#: :meth:`PLWordNetBase.synset_relation_edges` and -#: :meth:`PLWordNetBase.lexical_relation_edges`. -RelationEdge = coll.namedtuple( +__all__ = ( + 'PLWordNetBase', + 'SynsetBase', + 'LexicalUnitBase', + 'RelationInfoBase', 'RelationEdge', - ('source', 'relation', 'target'), ) +#: Tuple type representing a relation instance between two synsets or lexical +#: units. +RelationEdge = namedtuple('RelationEdge', ('source', 'relation', 'target')) + + +@six.add_metaclass(ABCMeta) class PLWordNetBase(object): - """The primary entry point for retrieving data from plWordNet. + """The primary object providing data from plWordNet. - Allows querying the plWordNet for synsets and lexical units. + Allows retrieving synsets, lexical units, and other informative objects. """ - __metaclass__ = abc.ABCMeta - _STORAGE_NAME = '?' @classmethod def from_reader(cls, reader, dump_to=None): - """Create a new instance from a source reader. - - Optionally saving it in an internal representation format - in another file. + """Create new instance from a source reader. - :param reader: Generator that yields :class:`SynsetNone` and - :class:`LexicalUnitNode` from a source representation. + Optionally saving it in an internal representation format in + another file. - :param str dump_to: Path to a file where the data read from the source - will be dumped in an internal representation. It will be possible - to later load it quicker by :meth:`.from_dump`. If ``None``, then - no cached file will be created. + ``reader`` is any iterable that yields node instances: + :class:`~plwn.readers.nodes.SynsetNode`, + :class:`~plwn.readers.nodes.LexicalUnitNode` and + :class:`~plwn.readers.nodes.RelationTypeNode`. - :returns: New instance of PLWN API entry point. - :rtype: PLWordNetBase + ``dump_to`` is a path to a (non-existing) file where data + form ``reader`` will be stored to be to be loaded later. + If not passed, then the data won't be cached in any file, requiring + to be read again using :meth:`.from_reader`. """ raise NotImplementedError() @classmethod def from_dump(cls, dump): - """Create a new instance from a dump of cached internal representation. + """Create new instance from a dump of cached internal representation. - The dump file must have been created by the same ``PLWordNetBase`` - subclass, and preferably by the same version of PLWN API (backwards - compatibility of dump formats is not guaranteed). - - :param str dump: Path to a file with cached internal representation. - - :returns: New instance of PLWN API entry point. - :rtype: PLWordNetBase + The dump file must have been created by :meth:`.from_reader` of the + same :class:`PLWordNetBase` subclass and schema version. """ return NotImplementedError() - def __init__(self): - """Initialize PLWordNetBase.""" - self._rel_resolver = get_default_relation_resolver() - - @abc.abstractmethod + @abstractmethod def synsets(self, lemma=None, pos=None, variant=None): - """Iterate over synsets form plWordNet. - - Filtered by lemma, part ofspeech and variant. - - If a parameter is omitted, then any value is accepted (so ``synsets()`` - iterates over all synsets). - - The lemma, pos and variant are properties of lexical units, this method - yields synsets that contain those lexical units. - - :param str lemma: Only synsets containing a lexical unit with this - lemma will be yielded. - :param pos: Only synsets containing a lexical unit with this part - of speech will be yielded. - :type pos: Union[PoS, str] - :param int variant: Only synsets containing a lexical unit with this - variant will be yielded. + """Select synsets from plWordNet based on combination of criteria. - :returns: Iterable of synsets fitting the parameters' criteria. - :rtype: Iterable[SynsetBase] - - :raises InvalidPoSException: If a query is made for a PoS that is not - one of the valid constants. + This method works just like :meth:`.lexical_units`, but returns an + iterable of distinct synsets that own the lexical units selected by + the query. """ pass - @abc.abstractmethod + @abstractmethod def synset(self, lemma, pos, variant): - """Get the synset. - - Containing the unit with the lemma, part of speech and variant. - - Unlike :meth:`.synsets`, all parameters of this method are mandatory. - It either returns a single synset, or raises and exception if no - such synset can be found. - - :param str lemma: The lemma of a lexical unit contained by the - requested synset. - :param pos: The part of speech of a lexical unit contained by the - requested synset. - :type pos: Union[PoS, str] - :param int variant: The variant of a lexical unit contained by the - requested synset. + """Like :meth:`.synsets`. - :returns: Synset satisfying the criteria specified by the parameters. - :rtype: SynsetBase + But either return a single synset or raise + :exc:`~plwn.exceptions.SynsetNotFound`. - :raises SynsetNotFound: If no synset with the given properties - could be found. - :raises InvalidPoSException: If a query is made for a PoS that is not - one of the valid constants. + All parameters are required, to ensure that the query could only match + a single synset. """ pass - @abc.abstractmethod + @abstractmethod def synset_by_id(self, id_): - """Get the synset, knowing its internal, numerical ID. + """Select a synset using its internal, numeric ID. - This method is not intended to be used by itself, but with tools which - identify PLWN synsets by their IDs. + If there is no synset with the given ID, raise + :exc:`~plwn.exceptions.SynsetNotFound`. - :param int id_: The internal plWordnet identifier of the synset. - - :returns: The synset having the ID. - :rtype: SynsetBase - - :raises InvalidSynsetIdentifierException: If there's no synset with - the ID in plWordnet. + This is the fastest method to get a particular :class:`SynsetBase` + object. """ pass - @abc.abstractmethod + @abstractmethod def lexical_units(self, lemma=None, pos=None, variant=None): - """Iterate over lexical units form plWordNet. - - Filtered by lemma, part of speech and variant. + """Select lexical units from plWordNet based on combination of criteria. - If a parameter is omitted, then any value is accepted (so - ``lexical_units()`` iterates over all units). + It's possible to specify the lemma, part of speech and variant of the + units this method should yield. If a parameter value is omitted, any + value matches. Conversely, a call of ``lexical_units()`` will return + an iterable of all lexical units in plWordNet. If no lexical unit + matches the query, returns an empty iterable. - :param str lemma: Only lexical units with this lemma will be yielded. - :param pos: Only lexical units with this part of speech will be - yielded. - :type pos: Union[PoS, str] - :param int variant: Only lexical units with this variant will be - yielded. - - :returns: Iterable of lexical units fitting the parameters' criteria. - :rtype: Iterable[LexicalUnitBase] - - :raises InvalidPoSException: If a query is made for a PoS that is not - one of the valid constants. + The parameter ``lemma`` is an unicode string, ``variant`` is an + integer, and ``pos`` is an enumerated value of + :class:`~plwn.enums.PoS`. """ pass - @abc.abstractmethod + @abstractmethod def lexical_unit(self, lemma, pos, variant): - """Get the lexical unit with the lemma, part of speech and variant. - - Unlike :meth:`.lexical_units`, all parameters of this method are - mandatory. It either returns a single unit, or raises and exception - if no such unit can be found. + """Like :meth:`.lexical_units`. - :param str lemma: The lemma of the requested lexical unit. - :param pos: The part of speech of the requested lexical unit. - :type pos: Union[PoS, str] - :param int variant: The variant of the requested lexical unit. + But either return a single lexical unit or + raise :exc:`~plwn.exceptions.LexicalUnitNotFound`. - :returns: Lexical unit satisfying the criteria specified by the - parameters. - :rtype: LexicalUnitBase - - :raises LexicalUnitNotFound: If no unit with the given properties - could be found. - :raises InvalidPoSException: If a query is made for a PoS that is not - one of the valid constants. + All parameters are required, to ensure that the query could only match + a single lexical unit. """ pass - @abc.abstractmethod + @abstractmethod def lexical_unit_by_id(self, id_): - """Get the lexical unit, knowing its internal, numerical ID. - - See :meth:`.synset_by_id` for remarks. - - :param int id_: The internal plWordnet identifier of the lexical unit. + """Select a lexical unit using its internal, numeric ID. - :returns: The lexical unit having the ID. - :rtype: LexicalUnitBase + If there is no lexical unit with the given ID, raise + :exc:`~plwn.exceptions.LexicalUnitNotFound`. - :raises InvalidLexicalUnitIdentifierException: If there's no lexical - unit with the ID in plWordnet. + This is the fastest method to get a particular :class:`LexicalUnitBase` + object. """ pass - @abc.abstractmethod - def synset_relation_edges(self, include=None, exclude=None): - """Iterate over all synset relation instances in plWordnet. - - Yielding them as tuples. - - Named tuples in format ``(source, relation, target)`` - (:data:`RelationEdge`) are yielded by this method. - - One of the intended uses of this method is to create a graph "live", by - feeding the results directly to a graph-building library. - - **Note:** if both ``include`` and ``exclude`` are passed, the result - will be a logical intersection. In both collections, invalid relation - names are silently ignored. + @abstractmethod + def synset_relation_edges(self, + include=None, + exclude=None, + skip_artificial=True): + """Get an iterable of synset relation instances from plWordNet. + + As represented by :class:`RelationEdge`. + + ``include`` and ``exclude`` are containers of relation type + identifiers (see :class:`RelationInfoBase`). If ``include`` is not + ``None``, then only instances of relations in it are included in the + result. If ``exclude`` is not ``None``, then all relations in it are + omitted from the result. If both are ``None``, all relations are + selected. + + If ``skip_artificial`` is ``True`` (the default), then artificial + synsets (see :attr:`SynsetBase.is_artificial`) are "skipped over": new + relation edges are created to replace ones ending or staring in an + artificial synset, and connecting neighbouring synsets if they have + relations directed like this:: + + .-------. Rel 1 + | Syn D |-----------------. + '-------' | + v + .--------------. + .-------. Rel 1 | Syn B | Rel 1 .-------. + | Syn A |-------->| [artificial] |-------->| Syn E | + '-------' '--------------' '-------' + ^ + | + .-------. Rel 2 | + | Syn C |-----------------' + '-------' + + + .-------. Rel 1 + | Syn D |-----------------. + '-------' v + .-------. + | Syn E | + '-------' + .-------. Rel 1 ^ + | Syn A |-----------------' + '-------' + + ``Syn C`` is dropped, since there's no instance of ``Rel 1`` directed + outwards from the skipped artificial ``Syn B``. + """ + pass - :param Iterable[str] include: Names of relations which should be - included in the output. Instances of all other relations will be - ignored. By default all relations are included. + @abstractmethod + def lexical_relation_edges(self, include=None, exclude=None): + """Get an iterable of lexical unit relation instances from plWordNet. - :param Iterable[str] exclude: Names of relations which should not be - included in the output. By default, no relations are excluded. + As represented by :class:`RelationEdge`. - :returns: Generator of tuples representing synset relation edges. - :rtype: Iterable[Tuple[SynsetBase,str,SynsetBase]] + This method works like :meth:`.synset_relation_edges`, but for lexical + units and relation types. There is no ``skip_artificial``, since there + are no artificial lexical units. """ pass - @abc.abstractmethod - def lexical_relation_edges(self, include=None, exclude=None): - """Iterate over all lexical relation instances in plWordnet. + @abstractmethod + def relations_info(self, name=None, kind=None): + """Get an iterable of :class:`RelationInfoBase` instances. + + Matching the query defined by parameters. - Yielding them as tuples. + ``name`` is a string naming a relation (see + :class:`RelationInfoBase`). If it names a "parent", all its children + are selected. - This method behaves very closely to :meth:`.synset_relation_edges`, but - for lexical relations. + ``kind`` is an enumerated value of + :class:`~plwn.enums.RelationKind`. - :rtype: Iterable[Tuple[LexicalUnitBase,str,LexicalUnitBase]] + Any parameter that's not passed matches any relation type. + As such, a call of ``relations_info()`` will select all relation types + in plWordNet. """ pass def close(self): - """Perform necessary cleanup operations, close this PLWordNet instance. + """Perform cleanup operations. - Often, temporary files are created when reading and parsing plWordNet, - and non-temporary files may be opened. Call this method to properly - close / remove those files. + After using the :class:`PLWordNetBase` object. - It's best to use :func:`contextlib.closing` to ensure that this method - gets eventually called. + By default, this method does nothing and should be overridden by a + subclass if necessary. It should still always be called, since any + :class:`PLWordNetBase` subclass may create any kind of temporary + resources. - It's legal to call this method several times. It's not legal to call - any other methods after :meth:`.close` has been called. + After calling this method, this instance and any ones linked with it + (:class:`SynsetBase`, :class:`LexicalUnitBase`, etc.) may become + invalid and should not be used. """ pass @@ -288,132 +274,94 @@ class PLWordNetBase(object): included_synset_nodes=None, excluded_synset_nodes=None, included_lexical_unit_nodes=None, - excluded_lexical_unit_nodes=None): - """Export the wordnet as graph. + excluded_lexical_unit_nodes=None, + skip_artificial_synsets=True): + """Export plWordNet as graph. In `GraphML <http://graphml.graphdrawing.org/>`_ format. - Normally, nodes of the graph are synsets, and edges are relations - between synsets. It's possible to make the graph made of lexical units - and relations, or both synsets and units. - - IDs of nodes are internal plWordNet IDs (the same as returned by ``id`` - property of synset / lexical_unit). They may be prefixed with - ``synset-`` or ``lexical_unit-`` depending on type of the node and - ``prefix_ids`` parameter value. - - Edges have no IDs. - - Nodes and edges can have certain attributes assigned to them in - GraphML. For edges, there are two attributes: - - * **type:** Either ``relation`` or ``unit_and_synset``, depending on - whether the edge represents a relation or a link between a synset and - a unit that belongs to it. The latter are only present in mixed graph - type. - * **name:** If **type** is ``relation``, then it's the name of the - relation. If **type** is ``unit_and_synset``, then it's either - ``has_unit``, for an edge directed from a synset node to a - lexical_unit node, or ``in_synset`` for an edge in the - opposite direction. - - Nodes only have attributes if ``include_attributes`` parameter is - ``True``. The attributes have names and values corresponding to - properties of :class:`SynsetBase` or :class:`LexicalUnitBase` objects. - Composite values (like tuples) are stored as JSON strings (since - GraphML only allows simple types for attributes). Attributes can be - excluded or included using the method's parameters. - - Possible names of synset attributes: - * definition - * relations - - Possible names of lexical unit attributes: - * lemma - * pos - * variant - * definition - * sense_examples - * sense_examples_sources - * external_links - * usage_notes - * domain - * relations - - **NOTE:** If both corresponding ``include_*`` and ``exclude_*`` - parameters are passed, an item will be included only if it appears in - the ``include_*`` set and does not appear in ``exclude_*`` set. - - :param Union[str,BytesIO] out_file: Stream or name of the file to which - the GraphML XML will be written. **NOTE:** Because of a peculiarity in - the XML module used (ElementTree), if a stream is passed here, it - should be opened in binary mode. - - :param str graph_type: Type of the graph. There are three possible - values: - * ``synset``: Nodes are synsets and edges are synset relations. - * ``lexical_unit``: Nodes are lexical units and edges are lexical unit - relations. - * ``mixed``: There are both synset and lexical unit nodes, - distinguished by prefixes in their IDs. Synsets are connected with - synset relations and lexical units are connected with lexical - relations. Synsets and units are connected with ``unit_and_synset`` - type of edge (see description above). - - :param bool include_attributes: If ``True``, then node attributes will - be included in the output XML file. Note, that if - ``included_*_attributes`` or ``excluded_*_attributes`` is passed, then - this parameter is ignored and the designated attributes are included. - - :param bool prefix_ids: If ``True``, then IDs of nodes will be prefixed - with ``synset-`` or ``lexical_unit-``. Note, that if ``graph_type`` is - ``mixed``, nodes are always prefixed and this parameter is ignored. - - :param FrozenSet[str] included_synset_attributes: Set of names of - synset attributes which should be included in GraphML nodes. All other - attributes are excluded. - - :param FrozenSet[str] excluded_synset_attributes: Set of names of - synset attributes which should not be included in GraphML nodes. All - other attributes are included. - - :param FrozenSet[str] included_lexical_unit_attributes: Like - ``included_synset_attributes``, but for lexical unit nodes. - - :param FrozenSet[str] excluded_lexical_unit_attributes: Like - ``excluded_synset_attributes``, but for lexical unit nodes. - - :param FrozenSet[str] included_synset_relations: Set of names of synset - relations which should be included as edges in the graph. All other - relation edges are excluded. - - :param FrozenSet[str] excluded_synset_relations: Set of names of synset - relations which should not be included as edges in the graph. All other - relation edges are included. - - :param FrozenSet[str] included_lexical_unit_relations: Like - ``included_synset_relations``, but for lexical unit relations. - - :param FrozenSet[str] excluded_lexical_unit_relations: Like - ``excluded_synset_relations``, but for lexical unit relations. - - :param FrozenSet[int] included_synset_nodes: Set of IDs of synsets that - should be included as nodes in the graph. All other synsets are - excluded. Any edge that has one of its endpoints not included will also - not be included. Also, if the graph type is mixed, lexical units - belonging to a synset which is not included will also be excluded. - - :param FrozenSet[int] excluded_synset_nodes: Set of IDs of synsets - which should not be included as nodes in the graph. All other synsets - are included. Also see remarks for ``included_synset_nodes``. - - :param FrozenSet[int] included_lexical_unit_nodes: Like - ``included_synset_nodes``, but for lexical units. - - :param FrozenSet[int] excluded_lexical_unit_nodes: Like - ``excluded_synset_nodes``, but for lexical units. - - :raises ValueError: If ``graph_type`` is not one of the allowed values. + Nodes of the graph are synsets and / or lexical units, and edges are + relation instances. + + For nodes, their numeric plWordNet IDs are set as their XML element + IDs. + + **NOTE:** Nodes that have no inbound or outbound edges are dropped from + the graph. + + Nodes and edges have attributes, as GraphML defines them. For nodes, + attributes are public properties of :class:`SynsetBase` or + :class:`LexicalUnitBase` (aside from ``relations``, which would be + useless in a graph, and ``id``, which becomes the XML ID of a node). + Edges have two attributes: + + * **type**: Either ``relation``, for edges that represent plWordNet + relation instances, or ``unit_and_synset`` for edges between synset + nodes and nodes of lexical units that belong to the synset. The + latter appear only in *mixed* graph. + * **name**: If **type** is ``relation``, then this is the full name + of the relation (see :class:`RelationInfoBase`). If **type** is + ``unit_and_synset``, it is one of constant values: ``has_unit`` if + the edge is directed from synset to unit, or ``in_synset``, for edges + directed from unit to synset. + + ``out_file`` is a writable file-like object to which the GraphML output + will be written. + + ``graph_type`` is one of three constant string values: ``synset``, + ``lexical_unit`` or ``mixed``. Synset graph contains only synset + nodes and relations, lexical unit graph contains only lexical unit + nodes and relations, and mixed graph contains all of the former, as + well as additional edges that map lexical units to synsets they belong + to. + + If ``include_attributes`` is ``True``, then all synset and / or lexical + unit attributes will be included. By default, attributes are not + included to shrink the written file. Note, that if any of + ``(included/excluded)_(synset/lexical_unit)_attributes`` parameters is + passed, inclusion of attributes will be controlled by them and the + value of ``include_attributes`` is ignored. + + If ``prefix_ids`` is ``True``, then ID of each node will be prefixed + with the type: ``synset-`` or ``lexical_unit-``. By default, it's + not done, unless ``graph_type`` is ``mixed``, in which case this + parameter is ignored and ID prefixes are enforced. + + ``included_synset_attributes`` and ``excluded_synset_attributes`` are + containers of synset attribute names, selecting the values which should + or should not be included with synset nodes. + + ``included_lexical_unit_attributes`` and + ``excluded_lexical_unit_attributes`` are the same way as the above, + but for attributes of lexical units. + + ``included_synset_relations`` and ``excluded_synset_relations`` are + containers of synset relation type identifiers (see + :class:`RelationInfoBase`), selecting synset relation types whose + instances should or should not be included in the graph. By default, + all relation types are included. + + ``included_lexical_unit_relations`` and + ``excluded_lexical_unit_relations`` are the same was as the above, but + for lexical relation types. + + ``included_synset_nodes`` and ``excluded_synset_nodes`` are containers + for IDs of synset that should or should not be included as nodes in the + graph. If a node is not included, all edges that start or end in it are + also excluded. By default, all non-artificial synsets are included. + + ``included_lexical_unit_nodes`` and ``excluded_lexical_unit_nodes`` are + the same way as the above, but for lexical units. + + If ``skip_artificial_synsets`` is ``True`` (the default), then + artificial synsets are excluded from the graph, and edges connecting to + them are reconnected to "skip over" them, as described for + :meth:`.synset_relation_edges`. + + **Note:** while this method accepts all of the above parameters at + all times, parameters relating to synsets are ignored if ``graph_type`` + is ``lexical_unit``, and parameters relating to lexical units are + ignored if ``graph_type`` is ``synset``. """ gwn = go.GraphMLWordNet() gb = go.GraphMLBuilder(self, gwn) @@ -428,6 +376,7 @@ class PLWordNetBase(object): excluded_nodes=excluded_synset_nodes, included_relations=included_synset_relations, excluded_relations=excluded_synset_relations, + skip_artificial_synsets=skip_artificial_synsets, ) elif graph_type == go.GRAPH_TYPE_UNIT: gb.lexical_unit_graph( @@ -459,9 +408,10 @@ class PLWordNetBase(object): excluded_synset_nodes=excluded_synset_nodes, included_lexical_unit_nodes=included_lexical_unit_nodes, excluded_lexical_unit_nodes=excluded_lexical_unit_nodes, + skip_artificial_synsets=skip_artificial_synsets, ) else: - raise ValueError('graph_type={!r}'.format(graph_type)) + raise ValueError('Invalid graph type: {!r}'.format(graph_type)) gwn.write(out_file) @@ -472,8 +422,8 @@ class PLWordNetBase(object): ) -@functools.total_ordering @six.python_2_unicode_compatible +@six.add_metaclass(ABCMeta) class SynsetBase(object): """Encapsulates data associated with a plWordNet synset. @@ -481,95 +431,166 @@ class SynsetBase(object): Most of plWordNet relations are between meanings, hence the need to group lexical units into synsets. - For purposes of ordering, a :class:`SynsetBase` is uniquely identified by - its head: the first of the lexical units it contains. + For purposes of ordering, a :class:`SynsetBase` object is uniquely + identified by its "head": the first of the lexical units it contains. """ - __metaclass__ = abc.ABCMeta - - @abc.abstractproperty + @abstractproperty def id(self): - """``int``. + """The internal, numeric identifier of the synset in plWordNet. - The internal identifier of the synset in plWordnet. It is unique among - all synsets. + It is unique among all synsets. + + If this identifier is passed to :meth:`PLWordNetBase.synset_by_id`, it + would return this :class:`SynsetBase` object. """ pass - @abc.abstractproperty + @abstractproperty def lexical_units(self): - """``Tuple[LexicalUnitBase]``. + """Tuple of :class:`LexicalUnitBase` objects. - Lexical units contained in the synsets. Ordering of units within the - tuple is arbitrary, but constant. The first unit is the synset's head, - used to represent it. + Representing lexical units contained in the synset. + Ordering of units within the tuple is arbitrary, but constant. - At least one lexical unit is always present in every synset. + At least one lexical unit is always present in every synset, so + ``lexical_units[0]`` is always valid and selects the synset's "head". """ pass - @abc.abstractproperty + @abstractproperty def definition(self): - """``str``. + """Textual description of the synset's meaning. + + May be ``None``. + + In plWordNet, most definitions are stored as + :attr:`LexicalUnitBase.definition`. Synset definitions are present + mostly for English synsets. + """ + pass - Textual description of the synset's meaning. + @abstractproperty + def is_artificial(self): + """Boolean value informing if the synset is an artificial one. - Will be an empty string if the definition is not present in plWordNet. + Artificial synsets carrying no linguistic + meaning, but introduced as a method of grouping synsets within the + structure of plWordNet. + + For most uses, artificial synsets should be ignored. """ pass - @abc.abstractproperty + @abstractproperty def relations(self): - """``Tuple[str]``. + """Tuple of :class:`RelationInfoBase` instances. + + Containing types of distinct relations that have outbound + edges from this synset. - Tuple of all outward relations that lead from this synset. + Relations are returned in an arbitrary order. + + The tuple is special: methods for checking membership accept all + possible representations of a relation type (see + :meth:`RelationInfoBase.eqv`). """ pass - @abc.abstractmethod - def related(self, relation_name): - """Iterate over synsets to whom this synset has a certain relation. + @abstractproperty + def is_polish(self): + """Check whether all units are Polish.""" + pass - :param str relation_name: The name of the relation to follow. + @abstractproperty + def is_english(self): + """Check whether all units are English.""" + pass - :returns: Iterable of related synsets. - :rtype: Iterable[SynsetBase] + @abstractproperty + def pos(self): + """Returns PoS of the synset units. - :raises InvalidRelationNameException: If ``relation_name`` is not a - valid name of a synset relation in plWordNet. + Raises :exc:`ValueError` if units have many different PoS. + """ + pass + + @abstractmethod + def related(self, relation_id=None, skip_artificial=True): + """Get an iterable of :class:`SynsetBase` instances. + + That are connected to this synset by outbound edges of + synset relation type identified by ``relation_id``. + + ``relation_id`` can be any synset relation type identifier (see + :class:`RelationInfoBase`), a collection of relation types identifiers, + or ``None``, in which case synsets related to this one by any relation + are selected. + + Note, that distinction between any relations that fit the + ``relation_id`` query is lost. Use :meth:`.related_pairs` if it's + needed. + + Raises :exc:`~plwn.exceptions.InvalidRelationTypeException` if + (any of) ``relation_id`` does not refer to an existing synset relation + type. + + If ``skip_artificial`` is ``True`` (the default) artificial synsets + related to this one are "skipped over", as described for + :meth:`PLWordNetBase.synset_relation_edges`. + """ + pass + + @abstractmethod + def related_pairs(self, relation_id=None, skip_artificial=True): + """Like :meth:`.related`. + + But return an iterable of pairs + ``(<relation info>, <relation target synset>)``. """ pass def to_dict(self, include_related=True, include_units_data=True): """Create a JSON-compatible dictionary. - With all the public properties of the synset. + With all public properties of the synset. Enums are converted to their values and all collections are converted to tuples. - :param bool include_related: If ``True``, the dictionary will contain a - "related" member, whose value is a dictionary in format:: + Property :attr:`.relations` is omitted, as it would be redundant when + all related synsets can be enumerated when ``include_related`` is + ``True``. Some additional members are also present in the dictionary: - { - "<synset relation name>": ( - (<relation target id>, <relation target string form>), - ... - ), - ... - } + * ``str``: The string representation of the synset (defined by + ``__str__`` override on :class:`SynsetBase`). + * ``units``: Listing (as a tuple) of units belonging to the synset (in + the same ordering as :attr:`.lexical_units`), as pairs of + ``(<unit id>, <unit string form>)``. + + If ``include_related`` is ``True`` (the default), the dictionary will + contain an additional ``related`` member, representing synsets related + to this one, in the following format:: - :param bool include_units_data: If ``True``, then the "units" member of - the dictionary will be a tuple of results of - :meth:`LexicalUnitBase.to_dict`. Otherwise, it will contain only - tuples of ``(<unit id>, <unit string form>)``. + { + <synset relation full name>: ( + (<relation target id>, <relation target string form>), + ... + ), + ... + } - :returns: Dictionary contain data of the synset. - :rtype: Mapping[str, Any] + If ``include_units_data`` is ``True`` (the default), the ``units`` + member will contain results of invocation of + :meth:`LexicalUnitBase.to_dict` for the synset's units, + instead of pairs described above. In this case, the value of + ``include_related`` parameter is passed on to + :meth:`LexicalUnitBase.to_dict`. """ syn_dict = { u'id': self.id, u'definition': self.definition, + u'is_artificial': self.is_artificial, u'units': tuple( (lu.to_dict(include_related) for lu in self.lexical_units) if include_units_data @@ -580,17 +601,20 @@ class SynsetBase(object): if include_related: syn_dict[u'related'] = { - relname: tuple( + six.text_type(rel): tuple( (target.id, target.short_str()) - for target in self.related(relname) + for target in self.related(rel) ) - for relname in self.relations + for rel in self.relations } return syn_dict def short_str(self): - """Shorter version of ``str`` cast that displays only the first unit.""" + """Shorter version of synset's string form (``__str__``). + + That displays only the first lexical unit. + """ sstr = [u'{', six.text_type(self.lexical_units[0])] if len(self.lexical_units) > 1: sstr.append( @@ -599,6 +623,11 @@ class SynsetBase(object): sstr.append(u'}') return ''.join(sstr) + def __inner_cmp(self, cmp_op, other): + if not isinstance(other, SynsetBase): + return NotImplemented + return cmp_op(self.lexical_units[0], self.lexical_units[0]) + def __repr__(self): head = self.lexical_units[0] rstr = '<Synset id={!r} lemma={!r} pos={!r} variant={!r}'.format( @@ -623,221 +652,261 @@ class SynsetBase(object): def __hash__(self): # Even if comparing is done by the synset's head, it's probably better # to hash by all lexical units, to boost the hash's uniqueness - return hash(self.lexical_units) + return hash((SynsetBase, self.lexical_units)) def __eq__(self, other): - if not isinstance(other, SynsetBase): - return NotImplemented - return self.lexical_units[0] == other.lexical_units[0] + return self.__inner_cmp(op.eq, other) def __ne__(self, other): - return not self == other + return self.__inner_cmp(op.ne, other) def __lt__(self, other): - if not isinstance(other, SynsetBase): - return NotImplemented - return self.lexical_units[0] < other.lexical_units[0] + return self.__inner_cmp(op.lt, other) + + def __le__(self, other): + return self.__inner_cmp(op.le, other) + + def __gt__(self, other): + return self.__inner_cmp(op.gt, other) + + def __ge__(self, other): + return self.__inner_cmp(op.ge, other) @six.python_2_unicode_compatible +@six.add_metaclass(ABCMeta) class LexicalUnitBase(object): """Encapsulates data associated with a plWordNet lexical unit. Lexical units represent terms in the language. Each lexical unit is uniquely identified by its lemma (base written form), part of speech - (verb, noun, adjective or adverb) and variant (a number: sometimes the same - form can have multiple meanings). + (verb, noun, adjective or adverb) and variant (a number differentiating + between homonyms). """ - __metaclass__ = abc.ABCMeta - - @abc.abstractproperty + @abstractproperty def id(self): - """``int``. + """The internal, numeric identifier of the lexical units in plWordNet. + + It is unique among all lexical units. - The internal identifier of the lexical unit in plWordnet. It is unique - among all units. + If this identifier is passed to + :meth:`PLWordNetBase.lexical_unit_by_id`, it would return this + :class:`LexicalUnitBase` object. """ pass - @abc.abstractproperty + @abstractproperty def lemma(self): - """``str``. - - Lemma of the unit, basic form of the word(s) the unit represents. - """ + """Lemma of the unit; its basic text form.""" pass - @abc.abstractproperty + @abstractproperty def pos(self): - """``PoS``. + """Part of speech of the unit. - Part of speech of the unit. This will be one of enumeration constants - from :class:`PoS`. To get the textual value, use ``pos.value``. + One of enumerated constants of :class:`~plwn.enums.PoS`. """ pass - @abc.abstractproperty + @abstractproperty def variant(self): - """``int``. + """Ordinal number to differentiate between meanings of homonyms. - If the same lemma has different meanings as the same part of speech, - this number will be used to tell them apart. The first meaning has the - number 1. + Numbering starts at 1. """ pass - @abc.abstractproperty + @abstractproperty def definition(self): - """``str``. + """Textual description of the lexical unit's meaning. - Textual description of the lexical unit's meaning. - - Will be an empty string if the definition is not present in plWordNet. + May be ``None``. """ pass - @abc.abstractproperty + @abstractproperty def sense_examples(self): - """``Tuple[str]``. + """Text fragments. - Fragments of text that show how the lexical unit is used in the - language. + That show how the lexical unit is used in the language. - May be an empty collection, if no examples are present. + May be an empty tuple. """ pass - @abc.abstractproperty + @abstractproperty def sense_examples_sources(self): - """``Tuple[str]``. + """Symbolic representations of sources. + + From which the sense examples were taken. - Symbolic representations of sources from which the sense examples were - taken. + The symbols are short strings, defined by plWordNet. - This tuples has the same length as ``sense_examples``, and is aligned - by index (for example, the source of ``sense_examples[3]`` is at - ``sense_examples_sources[3]``). + This tuples has the same length as :attr:`.sense_examples`, and is + aligned by index (for example, the source of ``sense_examples[3]`` is + at ``sense_examples_sources[3]``). - To get pairs of of examples with their sources, use + To get pairs of examples with their sources, use ``zip(sense_examples, sense_examples_sources)`` """ # TODO List of source symbols, link to? pass - @abc.abstractproperty + @abstractproperty def external_links(self): - """``Tuple[str]``. - - URLs to webpages describing the meaning of the lexical unit. + """URLs linking to web pages describing the meaning of the lexical unit. - May be an empty collection, if no examples are present. + May be an empty collection. """ pass - @abc.abstractproperty + @abstractproperty def usage_notes(self): - """``Tuple[str]``. + """Symbols. - Symbols denoting certain properties of how the lexical unit is used. + Denoting certain properties of how the lexical unit is used in + the language. - For example, "daw." means that the word is considered dated. + The symbols are short strings, defined by plWordNet. For example, + ``daw.`` means that the word is considered dated. May be an empty collection. """ pass - @abc.abstractproperty + @abstractproperty def domain(self): - """``Domain``. + """plWordNet domain the lexical unit belongs to. - Wordnet domain the lexical unit belongs to. + One of enumerated constants of :class:`~plwn.enums.Domain`. """ pass - @abc.abstractproperty + @abstractproperty def verb_aspect(self): - """``Optional[VerbAspect]``. + """Aspect of a verb. + + Of the enumerated values of :class:`~plwn.enums.VerbAspect`. - Aspect of a verb. This will be one of the constants from - :class:`VerbAspect`, or ``None``, if the lexical unit is not a verb. + May be ``None`` if the unit is not a verb, or had no aspect assigned. """ pass - @abc.abstractproperty - def emotion_markedness(self): - """``Optional[EmotionMarkedness]``. + @abstractproperty + def is_emotional(self): + """Boolean value informing if the lexical unit has emotional affinity. - Markedness of emotional connotations of the lexical unit. May be - ``None``, if the unit has no emotional markedness. + If it is ``True``, then the lexical unit describes a term that has an + emotional load, and ``emotion_*`` properties will have meaningful + values, describing the affinity. - If this property is ``None``, then all other ``emotion_*`` properties - will be ``None`` or empty. + If it is ``False``, then the unit is emotionally neutral. All + ``emotion_*`` properties will be ``None`` or empty collections. + + This property can also be ``None``, which means that the unit has not + (yet) been evaluated with regards to emotional affinity. All + ``emotion_*`` properties are the same as when it's ``False``. """ pass - @abc.abstractproperty - def emotion_names(self): - """``Tuple[str, ...]``. + @abstractproperty + def emotion_markedness(self): + """Markedness of emotions associated with the lexical unit. - Names of emotions associated with this lexical unit. + May be ``None`` if the unit has no emotional markedness. + + If this property is ``None`` then all other ``emotion_*`` properties + will be ``None`` or empty collections. """ pass - @abc.abstractproperty - def emotion_valuations(self): - """``Tuple[str, ...]``. + @abstractproperty + def emotion_names(self): + """Tuple of names of emotions associated with this lexical unit.""" + pass - Valuations of emotions associated with this lexical unit. - """ + @abstractproperty + def emotion_valuations(self): + """Tuple of valuations of emotions associated with this lexical unit.""" pass - @abc.abstractproperty + @abstractproperty def emotion_example(self): - """``Optional[str]``. - - An example of an emotionally loaded sentence using the lexical unit. - """ + """Example of an emotionally charged sentence using the lexical unit.""" pass - @abc.abstractproperty + @abstractproperty def emotion_example_secondary(self): """``Optional[str]``. - This property is not ``None`` only if ``emotion_markedness`` is - ``amb``. In such case, :attr:`.emotion_example` will be an - example of a positive sentence, and this one will be a negative - sentence. + This property is not ``None`` only if :attr:`.emotion_markedness` is + :attr:`~plwn.enums.EmotionMarkedness.amb`. In such case, + :attr:`.emotion_example` will be an example of a positively charged + sentence, and this one will be a negatively charged sentence. """ pass - @abc.abstractproperty + @abstractproperty def synset(self): - """``SynsetBase``. + """An instance of :class:`SynsetBase`. - The synset the unit belongs to. + Representing the synset this unit belongs to. """ pass - @abc.abstractmethod - def related(self, relation_name): - """Iterate over lexical units to whom this unit has a certain relation. + @abstractproperty + def relations(self): + """Tuple of :class:`RelationInfoBase` instances. - :param str relation_name: The name of the relation to follow. + Containing types of distinct relations that have + outbound edges from this lexical unit. - :returns: Iterable of related units. - :rtype: Iterable[LexicalUnitBase] + Relations are returned in an arbitrary order. - :raises InvalidRelationNameException: If ``relation_name`` is not a - valid name of a lexical relation in plWordNet. + The tuple is special: methods for checking membership accept all + possible representations of a relation type (see + :meth:`RelationInfoBase.eqv`). """ pass - @abc.abstractproperty - def relations(self): - """``Tuple[str]``. + @abstractproperty + def is_polish(self): + """Check whether unit is Polish by its PoS.""" + pass - Tuple of all outward relations that lead from this lexical unit. + @abstractproperty + def is_english(self): + """Check whether unit is English by its PoS.""" + pass + + @abstractmethod + def related(self, relation_id=None): + """Get an iterable of :class:`LexicalUnitBase` instances. + + That are connected to this lexical unit by outbound edges + of lexical relation type identified by ``relation_id``. + + ``relation_id`` can be any lexical relation type identifier (see + :class:`RelationInfoBase`), a collection of relation types identifiers, + or ``None``, in which case lexical units related to this one by any + relation are selected. + + Note, that distinction between any relations that fit the + ``relation_id`` query is lost. Use :meth:`.related_pairs` if it's + needed. + + Raises :exc:`~plwn.exceptions.InvalidRelationTypeException` if + ``relation_id`` does not refer to an existing lexical relation type. + """ + pass + + @abstractmethod + def related_pairs(self, relation_id): + """Like :meth:`.related`. + + But return an iterable of pairs + ``(<relation info>, <relation target unit>)``. """ pass @@ -849,19 +918,24 @@ class LexicalUnitBase(object): Enums are converted to their values and all collections are converted to tuples. - :param bool include_related: If ``True``, the dictionary will contain a - "related" member, whose value is a dictionary in format:: + Property :attr:`.relations` is omitted, as it would be redundant when + all related lexical units can be enumerated when ``include_related`` + is ``True``. - { - "<lexical relation name>": ( - (<relation target id>, <relation target string form>), - ... - ), - ... - } + An additional ``str`` member is present in the dictionary; its value is + the string representation of the lexical unit. - :returns: Dictionary contain data of the lexical unit. - :rtype: Mapping[str, Any] + If ``include_related`` is ``True`` (the default), the dictionary will + contain an additional ``related`` member, representing lexical units + related to this one, in the following format:: + + { + <lexical relation full name>: ( + (<relation target id>, <relation target string form>), + ... + ), + ... + } """ lu_dict = { u'id': self.id, @@ -875,6 +949,9 @@ class LexicalUnitBase(object): u'usage_notes': tuple(self.usage_notes), u'domain': self.domain.value, u'synset': self.synset.id, + u'verb_aspect': None + if self.verb_aspect is None + else self.verb_aspect.value, u'emotion_markedness': None if self.emotion_markedness is None else self.emotion_markedness.value, @@ -887,15 +964,41 @@ class LexicalUnitBase(object): if include_related: lu_dict[u'related'] = { - relname: tuple( + six.text_type(rel): tuple( (target.id, six.text_type(target)) - for target in self.related(relname) + for target in self.related(rel) ) - for relname in self.relations + for rel in self.relations } return lu_dict + def __lt_lempos(self, other): + # Common code for __lt__ and __le__ methods. + # Compares first two elements. + colled = locale.strcoll(self.lemma, other.lemma) + if colled < 0: + return True + if colled > 0: + return False + if self.pos is other.pos: + # Defer comparison + return None + return self.pos.value < other.pos.value + + def __inner_eq(self, other): + return (locale.strcoll(self.lemma, other.lemma) == 0 and + self.pos == other.pos and + self.variant == other.variant) + + def __inner_cmp(self, cmp_op, other): + if not isinstance(other, LexicalUnitBase): + return NotImplemented + cmp_val = self.__lt_lempos(other) + return (cmp_val + if cmp_val is not None + else cmp_op(self.variant, other.variant)) + def __repr__(self): return '<LexicalUnit id={!r} lemma={!r} pos={!r} variant={!r}>'.format( self.id, @@ -913,50 +1016,187 @@ class LexicalUnitBase(object): ) def __hash__(self): - return hash((self.lemma, self.pos, self.variant)) + return hash((LexicalUnitBase, self.lemma, self.pos, self.variant)) def __eq__(self, other): if not isinstance(other, LexicalUnitBase): return NotImplemented - - return (locale.strcoll(self.lemma, other.lemma) == 0 and - self.pos == other.pos and - self.variant == other.variant) + return self.__inner_eq(other) def __ne__(self, other): - return not self == other - - # Total ordering done by hand, to minimize strcoll calls + if not isinstance(other, LexicalUnitBase): + return NotImplemented + return not self.__inner_eq(other) def __lt__(self, other): - cmp_ = self.__lt_lempos(other) - return cmp_ if cmp_ is not None else self.variant < other.variant + return self.__inner_cmp(op.lt, other) def __le__(self, other): - cmp_ = self.__lt_lempos(other) - return cmp_ if cmp_ is not None else self.variant <= other.variant + return self.__inner_cmp(op.le, other) def __gt__(self, other): - return not self <= other + return self.__inner_cmp(op.gt, other) def __ge__(self, other): - return not self < other + return self.__inner_cmp(op.ge, other) - def __lt_lempos(self, other): - # Common code for __lt__ and __le__ methods. - # Compares first two elements. - if not isinstance(other, LexicalUnitBase): + +@six.python_2_unicode_compatible +@six.add_metaclass(ABCMeta) +class RelationInfoBase(object): + """Encapsulates information associated with a relation type. + + The primary purpose of this class is to serve as a single object + consolidating all possible ways a relation type can be referred to. + + In general, plWordNet uses *parent* and *child* relation names. Child + relations are those that have actual instances between synsets and lexical + units. Parent relations only exist to group child relations together; child + relation names need to be only unique within the group of their parent + relation, while parent relations must be globally unique. + + For example, there are two relations named "część" ("part"); one being a + child of "meronimia" ("meronymy"), and another a child of "holonimia" + ("holonymy"). + + Some relation types have no parent; they behave like child relations, but + their names need to be unique on par with parent relations. + + plWordNet also stores shorter aliases for most of the relation types, + for example "hipo" for "hiponimia" ("hyponymy"). + + There are four ways to refer to relations wherever a relation identifier + is accepted (usually the argument is named ``relation_id``): + + * Full name, in format ``<parent name>/<child name>`` (or just + ``<child name>`` if the relation has no parent). + * One of the shorter aliases mentioned above. This is checked before + attempting to resolve relation names. Aliases must be globally unique. + * A parent name on its own. This resolves to all children of the parent + relation. Note, that it's not always valid to pass a name that resolves + to multiple relations; + :exc:`~plwn.exceptions.AmbiguousRelationTypeException` is raised in such + cases. + * Finally, a :class:`RelationInfoBase` instance may be used instead of + a string, standing for the child relation it represents. + + Note, that parent relations don't have corresponding + :class:`RelationInfoBase` instance. + """ + + #: Character that separates parent from child name in full name + #: representation. It must not appear in any relation names or aliases. + SEP = u'/' + + @classmethod + def format_name(cls, parent_name, child_name): + """Format and return a full name out of parent and child name strings. + + ``parent_name`` may be ``None``, which will just return ``child_name``, + as relations without parents are fully represented just by their name. + """ + parform = u'' if parent_name is None else parent_name + cls.SEP + return parform + child_name + + @classmethod + def split_name(cls, full_name): + """Split a full name into a ``(<parent name>, <child name>)`` pair. + + ``parent_name`` may be ``None`` if :attr:`.SEP` doesn't appear in the + full name. + + However, if :attr:`.SEP` appears more than once in ``full_name``, a + ``ValueError`` will be raised. + """ + items = full_name.split(cls.SEP) + itlen = len(items) + + if itlen > 2: + raise ValueError(full_name) + + return (None, items[0]) if itlen < 2 else tuple(items) + + @abstractproperty + def kind(self): + """One of enumerated constants of :class:`~plwn.enums.RelationKind`. + + Denotes it's a synset or lexical relation. + """ + pass + + @abstractproperty + def parent(self): + """String name of the parent relation to this one. + + May be ``None`` if the relation has no parent. + """ + pass + + @abstractproperty + def name(self): + """String name of the relation.""" + + @abstractproperty + def aliases(self): + """Tuple of all aliases the relation can be referred to by.""" + pass + + def eqv(self, other): + """Check if ``other`` is an equivalent representation. + + Either an equal :class:`RelationInfoBase` object or + a relation identifier that refers to this object. + + This is less strict than the equality operator, which only checks for + equal :class:`RelationInfoBase` instances. + """ + sother = six.text_type(other) + return sother == six.text_type(self) or sother in self.aliases + + def __inner_eq(self, other): + return (self.parent == other.parent and + self.name == other.name) + + def __inner_cmp(self, cmp_op, other): + if not isinstance(other, RelationInfoBase): return NotImplemented + return cmp_op(six.text_type(self), six.text_type(other)) - colled = locale.strcoll(self.lemma, other.lemma) + def __repr__(self): + return ( + '<RelationInfo name={!r} parent={!r} kind={!r} aliases={!r}>' + .format( + self.name, + self.parent, + self.kind, + self.aliases, + ) + ) - if colled < 0: - return True - if colled > 0: - return False + def __str__(self): + return self.format_name(self.parent, self.name) - if self.pos is other.pos: - # Defer comparison - return None + def __hash__(self): + return hash((RelationInfoBase, self.parent, self.name)) - return self.pos.value < other.pos.value + def __eq__(self, other): + if not isinstance(other, RelationInfoBase): + return NotImplemented + return self.__inner_eq(other) + + def __ne__(self, other): + if not isinstance(other, RelationInfoBase): + return NotImplemented + return not self.__inner_eq(other) + + def __lt__(self, other): + return self.__inner_cmp(op.lt, other) + + def __le__(self, other): + return self.__inner_cmp(op.le, other) + + def __gt__(self, other): + return self.__inner_cmp(op.gt, other) + + def __ge__(self, other): + return self.__inner_cmp(op.ge, other) diff --git a/plwn/default/__init__.py b/plwn/default/__init__.py new file mode 100644 index 0000000..c2f5501 --- /dev/null +++ b/plwn/default/__init__.py @@ -0,0 +1,20 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + +from ._default import get_default_load_args + +__all__ = 'get_default_load_args', diff --git a/plwn/default/_default.py b/plwn/default/_default.py new file mode 100644 index 0000000..6c1bdf5 --- /dev/null +++ b/plwn/default/_default.py @@ -0,0 +1,46 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + +from __future__ import absolute_import, division + + +import pkg_resources as pkgr + + +__all__ = 'get_default_load_args', + + +_DEFAULT_LOC = 'plwn.default', 'plwn-3.0-v5.db' +_DEFAULT_FORMAT = 'sqlite3' + + +def get_default_load_args(): + """Get a tuple with arguments to :func:`~plwn._loading.load`. + + Required to load the default storage. + + This is a helper function meant to be internally used by + :func:`~plwn._loading.load_default`. + + **Advanced note:** This function refers to the bundled file using + ``pkg_resources.resource_filename()``. Normally, the ``plwn`` package will + not be installed as a zipfile, but if you override this, then if you use + this function you will have to call ``pkg_resources.cleanup_resources()`` + before the process exits. + """ + storage_filename = pkgr.resource_filename(*_DEFAULT_LOC) + return storage_filename, _DEFAULT_FORMAT diff --git a/plwn/enums.py b/plwn/enums.py index e14f67a..ebca1d5 100644 --- a/plwn/enums.py +++ b/plwn/enums.py @@ -1,5 +1,21 @@ # coding: utf8 -"""Enumerated values used in plWordNet.""" + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + +"""All enumerated values used in plWordNet.""" from __future__ import absolute_import, division @@ -11,6 +27,7 @@ import six __all__ = ( + 'RelationKind', 'PoS', 'VerbAspect', 'EmotionMarkedness', @@ -21,67 +38,139 @@ __all__ = ( ) -# Helper function for making dictionaries translating enum instances into -# numbers used to denote them in plWN database. -def _fill_numtrans(enumclass, num2enum, enum2num): - for num, enuminst in enumerate(enumclass, 1): - num2enum[num] = enuminst - enum2num[enuminst] = num +def _numtrans(n2e_dict, e2n_dict, start_value=1): + """Helper decorator for making dictionaries. + + Translating enum instances into numbers denoting them in plWN database. + """ + + def decorator(cls): + def by_db_number(number, optional=False): + """Return the enum value. + + Associated with ``number`` value stored in the plWordNet database. + + Raises ``KeyError`` if ``number`` is not in the range valid for + the database field, unless ``optional`` is ``True``; then, + ``None`` is returned instead of an enum value. + """ + try: + return n2e_dict[number] + except KeyError: + if optional: + return None + raise + + def db_number(self): + """Number associated with the enum value in plWordNet database.""" + return e2n_dict[self] + + cls.by_db_number = staticmethod(by_db_number) + cls.db_number = property(db_number) + + for num, enval in enumerate(cls, start_value): + n2e_dict[num] = enval + e2n_dict[enval] = num + return cls + return decorator -def _get_from_numtrans(numtrans, num, optional): - try: - return numtrans[num] - except KeyError: - if optional: - return None - raise + +_RK_NUM2ENUM = {} +_RK_ENUM2NUM = {} + + +@_numtrans(_RK_NUM2ENUM, _RK_ENUM2NUM, 0) +class RelationKind(Enum): + """Whether a relation connects synsets or lexical units.""" + + # Explicit ordering is needed only in python 2. + if six.PY2: + __order__ = 'lexical synset' + + lexical = u'lexical' + synset = u'synset' -# Explicit ordering is needed only in python 2. -_POS_ORDER = 'verb noun adverb adjective' _POS_NUM2ENUM = {} _POS_ENUM2NUM = {} +@_numtrans(_POS_NUM2ENUM, _POS_ENUM2NUM) class PoS(Enum): - """Defines **Part of Speech** values used by plWN.""" + """Defines part of speech values used in plWordNet.""" if six.PY2: - __order__ = _POS_ORDER + __order__ = 'verb noun adverb adjective ' \ + 'verb_en noun_en adverb_en adjective_en' verb = u'verb' noun = u'noun' adverb = u'adverb' adjective = u'adjective' + # English (PWN) PoSes + verb_en = u'verb_en' + noun_en = u'noun_en' + adverb_en = u'adverb_en' + adjective_en = u'adjective_en' + v = verb n = noun adv = adverb adj = adjective - @staticmethod - def by_db_number(number, optional=False): - return _get_from_numtrans(_POS_NUM2ENUM, number, optional) + v_en = verb_en + n_en = noun_en + adv_en = adverb_en + adj_en = adjective_en + + # Polish aliases for parity + verb_pl = verb + noun_pl = noun + adverb_pl = adverb + adjective_pl = adjective + + v_pl = verb + n_pl = noun + adv_pl = adverb + adj_pl = adjective @property - def db_number(self): - return _POS_ENUM2NUM[self] + def short_value(self): + """Returns shortened value. + With "adjective" shortened to "adj" and "adverb" to "adv" + for compatibility with other conventions. + """ + if not hasattr(self, "_short_value"): + value = self.value + value = value.replace("adjective", "adj") + value = value.replace("adverb", "adv") + self._short_value = value + return self._short_value -_fill_numtrans(PoS, _POS_NUM2ENUM, _POS_ENUM2NUM) + @property + def is_polish(self): + return not self.is_english + + @property + def is_english(self): + if not hasattr(self, "_is_english"): + self._is_english = self.value.endswith("_en") + return self._is_english -_VA_ORDER = 'perfective imperfective predicative two_aspect' _VA_NUM2ENUM = {} _VA_ENUM2NUM = {} +@_numtrans(_VA_NUM2ENUM, _VA_ENUM2NUM) class VerbAspect(Enum): - """Defines aspect values used by verbs in plWN.""" + """Defines verb aspect values used in plWordNet.""" if six.PY2: - __order__ = _VA_ORDER + __order__ = 'perfective imperfective predicative two_aspect' perfective = u'perf' imperfective = u'imperf' @@ -97,17 +186,6 @@ class VerbAspect(Enum): dk = perfective ndk = imperfective - @staticmethod - def by_db_number(number, optional=False): - return _get_from_numtrans(_VA_NUM2ENUM, number, optional) - - @property - def db_number(self): - return _VA_ENUM2NUM[self] - - -_fill_numtrans(VerbAspect, _VA_NUM2ENUM, _VA_ENUM2NUM) - class EmotionMarkedness(Enum): """Defines markedness of emotions associated with some lexical units.""" @@ -128,7 +206,8 @@ class EmotionMarkedness(Enum): def normalized(cls, strvalue): """Return an instance of this enum. - With string value normalized with regards to whitespace. + Corresponding to ``strvalue`` after normalizing it with regards + to whitespace. """ strvalue = strvalue.strip() @@ -147,7 +226,7 @@ class EmotionMarkedness(Enum): class EmotionName(Enum): - """Possible names of emotions associated with some lexical units.""" + """Defines names of emotions that may be associated with lexical units.""" joy = u'radość' trust = u'zaufanie' @@ -169,7 +248,10 @@ class EmotionName(Enum): class EmotionValuation(Enum): - """Possible valuations of emotions associated with some lexical units.""" + """Defines valuations of emotions. + + That may be associated with lexical units. + """ usefulness = u'użyteczność' good = u'dobro' @@ -198,19 +280,19 @@ class EmotionValuation(Enum): nieszczescie = unhappiness -_DOM_ORDER = 'bhp czy wytw cech czc umy por zdarz czuj jedz grp msc cel rz ' \ - 'os zj rsl pos prc il zw ksz st sbst czas zwz hig zmn cumy cpor wal ' \ - 'cjedz dtk cwytw cczuj ruch pst cpos sp cst pog jak rel odcz grad sys ' \ - 'adj adv cdystr caku cper cdel' _DOM_NUM2ENUM = {} _DOM_ENUM2NUM = {} +@_numtrans(_DOM_NUM2ENUM, _DOM_ENUM2NUM) class Domain(Enum): - """Wordnet domains of lexical units.""" + """Defines domains of lexical units occurring in plWordNet.""" if six.PY2: - __order__ = _DOM_ORDER + __order__ = 'bhp czy wytw cech czc umy por zdarz czuj jedz grp msc ' \ + 'cel rz os zj rsl pos prc il zw ksz st sbst czas zwz hig zmn ' \ + 'cumy cpor wal cjedz dtk cwytw cczuj ruch pst cpos sp cst pog ' \ + 'jak rel odcz grad sys adj adv cdystr caku cper cdel' bhp = u'najwyższe w hierarchii' czy = u'czynnoÅ›ci (nazwy)' @@ -273,21 +355,10 @@ class Domain(Enum): cper = u'czasowniki perduratywne' cdel = u'czasowniki delimitatywne' - @staticmethod - def by_db_number(number, optional=False): - return _get_from_numtrans(_DOM_NUM2ENUM, number, optional) - - @property - def db_number(self): - return _DOM_ENUM2NUM[self] - - -_fill_numtrans(Domain, _DOM_NUM2ENUM, _DOM_ENUM2NUM) - def make_values_tuple(enum_seq): """Auxiliary function. - That converts a sequence of enums to a tuple of enumvalues. + That converts a sequence of enums to a tuple of enum string values. """ return tuple(en.value for en in enum_seq) diff --git a/plwn/exceptions.py b/plwn/exceptions.py index ccb2384..3c7fbef 100644 --- a/plwn/exceptions.py +++ b/plwn/exceptions.py @@ -1,3 +1,20 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + """Custom exceptions raised by PLWN API.""" from __future__ import absolute_import, division @@ -12,10 +29,8 @@ __all__ = ( 'MalformedIdentifierException', 'LoadException', 'DumpVersionException', - 'InvalidSynsetIdentifierException', - 'InvalidLexicalUnitIdentifierException', - 'InvalidRelationNameException', - 'InvalidPoSException', + 'InvalidRelationTypeException', + 'AmbiguousRelationTypeException', ) @@ -26,17 +41,9 @@ class PLWNAPIException(Exception): class NotFound(PLWNAPIException): - """Base for exceptions raised when an object is not found.""" - - def __init__(self, lemma, pos, variant, *args): - """Initialize NotFound.""" - super(NotFound, self).__init__(*args) + """Base for exceptions raised when an entity is not found.""" - self.args = ('lemma={!r} pos={!r} variant={!r}'.format( - lemma, - pos, - variant, - ),) + self.args + pass class LexicalUnitNotFound(NotFound): @@ -64,7 +71,6 @@ class MalformedIdentifierException(ReaderException): """ def __init__(self, id_): - """Initialize MalformedIdentifierException.""" super(MalformedIdentifierException, self).__init__( "Malformed identifier, expected digits at the end of the original" " id instead got {!r}" @@ -85,7 +91,6 @@ class DumpVersionException(LoadException): """ def __init__(self, version_is, version_required): - """Initialize DumpVersionException.""" super(DumpVersionException, self).__init__(version_is, version_required) self.version_is = version_is @@ -98,31 +103,20 @@ class DumpVersionException(LoadException): ) -class InvalidSynsetIdentifierException(PLWNAPIException): - """Raised when a query for a nonexistent synset ID is made.""" - - pass - - -class InvalidLexicalUnitIdentifierException(PLWNAPIException): - """Raised when a query for a nonexistent lexical unit ID is made.""" - - pass - - -class InvalidRelationNameException(PLWNAPIException): - """Raised when attempting to select synsets or units. +class InvalidRelationTypeException(PLWNAPIException): + """Raised when relation identifier does not refer to any existing relation. - Related by a relation that does not exist. + Or the relation exists for the other relation kind. """ pass -class InvalidPoSException(PLWNAPIException): - """Raised when a query for PoS is made. +class AmbiguousRelationTypeException(InvalidRelationTypeException): + """. - Which is not one of the valid constants. + Raised when a relation type identifier could refer to more than + one relation, but only one is permitted in the context. """ pass diff --git a/plwn/readers/comments.py b/plwn/readers/comments.py index 4a0f6ef..473d37d 100644 --- a/plwn/readers/comments.py +++ b/plwn/readers/comments.py @@ -1,3 +1,20 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + """Parsing strings in wordnet comment format. For readers that need to deal with them. @@ -21,6 +38,7 @@ __all__ = ( 'NON_EXAMPLE_TAG_NAMES', 'CommentData', 'parse_comment_string', + 'make_empty_comment_data', ) @@ -79,7 +97,22 @@ def parse_comment_string(cmt_str): return CommentData( tuple(examples), tuple(examples_src), - cmt.get_first(u'D'), + cmt.get_first(u'D', None), tuple(cmt[u'K']), tuple(cmt[u'L']), ) + + +def make_empty_comment_data(): + """Create an empty ``CommentData`` instance. + + For cases where there's no comment to parse but an instance + with null-data is needed. + """ + return CommentData( + examples=(), + examples_sources=(), + definition=None, + usage=(), + links=(), + ) diff --git a/plwn/readers/nodes.py b/plwn/readers/nodes.py index 31790ec..b1bf8d3 100644 --- a/plwn/readers/nodes.py +++ b/plwn/readers/nodes.py @@ -1,16 +1,133 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. """Those tuples are returned by readers and absorbed by storages.""" from collections import namedtuple -__all__ = 'SynsetNode', 'LexicalUnitNode' +__all__ = ( + 'SynsetNode', + 'LexicalUnitNode', + 'RelationTypeNode', + 'make_synset_node', + 'make_lexical_unit_node', + 'make_relation_type_node', +) -SynsetNode = namedtuple("SynsetNode", ["id", "definition", "related"]) +SynsetNode = namedtuple( + "SynsetNode", + ["id", "definition", "related", "is_artificial"], +) LexicalUnitNode = namedtuple( "LexicalUnitNode", ["id", "lemma", "pos", "variant", "synset", "unit_index", "definition", "usage_notes", "external_links", "examples", "examples_sources", - "domain", "related", "verb_aspect", "emotion_markedness", "emotion_names", - "emotion_valuations", "emotion_example_1", "emotion_example_2"] + "domain", "related", "verb_aspect", "is_emotional", "emotion_markedness", + "emotion_names", "emotion_valuations", "emotion_example_1", + "emotion_example_2"] ) +RelationTypeNode = namedtuple( + "RelationTypeNode", + ["kind", "name", "parent", "aliases"], +) + + +def make_synset_node(**props): + """Create a :class:`SynsetNode` instance. + + Inserting appropriate "empty" values into optional properties + where a value was not passed. + + Takes only keyword arguments, and passes them to :class:`SynsetNode` + constructor. + """ + syn = SynsetNode( + id=props.pop('id'), + definition=props.pop('definition', None), + related=props.pop('related', ()), + is_artificial=props.pop('is_artificial', False), + ) + + if props: + raise KeyError('Not known synset properties: ' + repr(tuple(props))) + + return syn + + +def make_lexical_unit_node(**props): + """Create a :class:`LexicalUnitNode` instance. + + Inserting appropriate "empty" values into optional properties + where a value was not passed. + + Takes only keyword arguments, and passes them to :class:`LexicalUnitNode` + constructor. + """ + lex = LexicalUnitNode( + id=props.pop('id'), + lemma=props.pop('lemma'), + pos=props.pop('pos'), + variant=props.pop('variant'), + synset=props.pop('synset'), + unit_index=props.pop('unit_index'), + definition=props.pop('definition', None), + usage_notes=props.pop('usage_notes', ()), + external_links=props.pop('external_links', ()), + examples=props.pop('examples', ()), + examples_sources=props.pop('examples_sources', ()), + domain=props.pop('domain'), + related=props.pop('related', ()), + verb_aspect=props.pop('verb_aspect', None), + is_emotional=props.pop('is_emotional', None), + emotion_markedness=props.pop('emotion_markedness', None), + emotion_names=props.pop('emotion_names', ()), + emotion_valuations=props.pop('emotion_valuations', ()), + emotion_example_1=props.pop('emotion_example_1', None), + emotion_example_2=props.pop('emotion_example_2', None), + ) + + if props: + raise KeyError( + 'Not known lexical unit properties: ' + repr(tuple(props)), + ) + + return lex + + +def make_relation_type_node(**props): + """Create a :class:`RelationNode` instance. + + Inserting appropriate "empty" values into optional properties + where a value was not passed. + + Takes only keyword arguments, and passes them to :class:`RelationNode` + constructor. + """ + rel = RelationTypeNode( + name=props.pop('name'), + kind=props.pop('kind'), + parent=props.pop('parent', None), + aliases=props.pop('aliases', frozenset()), + ) + + if props: + raise KeyError( + 'Not known relation properties: ' + repr(tuple(props)), + ) + + return rel diff --git a/plwn/readers/ubylmf.py b/plwn/readers/ubylmf.py index d28b361..a3859ef 100644 --- a/plwn/readers/ubylmf.py +++ b/plwn/readers/ubylmf.py @@ -1,13 +1,28 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + # FIXME Some assert statements should be converted to regular raises (asserts # should not be used for anything other than checking for errors in the code # itself). -"""Implementation of ubylmf reader.""" - from xml.etree import ElementTree import re import logging -from .nodes import SynsetNode, LexicalUnitNode +from .nodes import make_synset_node, make_lexical_unit_node from .. import exceptions as exc from ..enums import PoS, Domain @@ -119,7 +134,7 @@ def _make_lexicalunit(xml_lexicalentry, xml_sense): lu_unit_index = int(_extract_id( xml_sense.find("MonolingualExternalRef").get("externalReference")) ) - return LexicalUnitNode( + return make_lexical_unit_node( id=lu_id, lemma=lu_lemma, pos=PoS(lu_pos), @@ -135,14 +150,7 @@ def _make_lexicalunit(xml_lexicalentry, xml_sense): # the only one we care about. domain=Domain[lu_domain.rsplit('_', 1)[-1]], related=tuple(lu_related), - # The below properties are never stored in uby files (at present at - # least). - verb_aspect=None, - emotion_markedness=None, - emotion_names=(), - emotion_valuations=(), - emotion_example_1=None, - emotion_example_2=None, + # Other properties are not stored in UBY files. ) @@ -159,7 +167,7 @@ def _extract_definitions(xml_sense): """ # Get definition - can be empty! At most 2 xml_definitions = xml_sense.findall("Definition") - lu_definition = "" + lu_definition = None lu_usage_notes = [] lu_external_links = [] assert len(xml_definitions) <= 2, \ @@ -219,7 +227,7 @@ def _make_synset(xml_synset): s_id = _extract_id(xml_synset.get("id")) xml_def = xml_synset.find("Definition") s_def = xml_def.find("TextRepresentation").get("writtenText") \ - if xml_def is not None else "" + if xml_def is not None else None s_related = [] for xsr in xml_synset.findall("SynsetRelation"): try: @@ -232,20 +240,21 @@ def _make_synset(xml_synset): ElementTree.tostring(xsr, ENCODING), ElementTree.tostring(xml_synset, ENCODING) ) - return SynsetNode( + return make_synset_node( id=s_id, definition=s_def, - related=tuple(s_related) + related=tuple(s_related), + # There are no artificial synsets in UBY dumps ) def _extract_id(full_id): - """Extract only numerical identifier from the end of a full id. + """Extract only numeric identifier from the end of a full id. :param full_id: a full identifier that has a prefix before the real id. :type full_id: str|unicode - :return: a real, numerical id. + :return: a real, numeric id. :rtype: int :raises MalformedIdentifierException: if the original id doesn't end with diff --git a/plwn/readers/wndb.py b/plwn/readers/wndb.py index 47a1f54..e1b3093 100644 --- a/plwn/readers/wndb.py +++ b/plwn/readers/wndb.py @@ -1,286 +1,262 @@ # coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. """Implementation of wndb reader.""" from __future__ import absolute_import, division -import collections as coll -import contextlib as ctxl + +from contextlib import closing +import io import logging import sqlalchemy as sa -from .nodes import SynsetNode, LexicalUnitNode -from .comments import parse_comment_string -from ..enums import ( - PoS, - VerbAspect, - EmotionMarkedness, - EmotionName, - EmotionValuation, - Domain, -) -from ..utils.sorting import text_key +from .. import enums as en +from .wnschema import WNSchemaProcessor -__all__ = 'wndb_reader', +__all__ = 'WNDBReader', -_log = logging.getLogger(__name__) +_LOG = logging.getLogger(__name__) -_EmotionData = coll.namedtuple( - '_EmotionData', - ('mark', 'names', 'valuations', 'example_1', 'example_2'), -) +# I'm not sure what role the relationtype of type 2 (synonymy) fulfills, but it +# seems completely unused by the relation tables. As such, it will be easiest +# to just omit it. +_SYNO_REL_OBJTYPE = 2 -def wndb_reader(wordnet_db_url): - """Generate UBY-LMF format compatible records. +class WNDBReader(object): + """Generate UBY-LMF format. - Directly from plWordNet database. + Compatible records directly from plWordNet database. - sqlalchemy is required for this method to work. + SQLAlchemy is required for this method to work. + """ - :param str wordnet_db_url: URL in sqlalchemy format, pointing to a - plWordNet database. + def __init__(self, wordnet_db_url_file): + with io.open(wordnet_db_url_file) as db_url_f: + self._db_url = db_url_f.readline().strip() + + self._db_eng = sa.create_engine(self._db_url) + self._db_meta = sa.MetaData(self._db_eng) + # Define required tables + self._dbt_synset = self.__mktable(u'synset') + self._dbt_synrel = self.__mktable(u'synsetrelation') + self._dbt_reltype = self.__mktable(u'relationtype') + self._dbt_lexunit = self.__mktable(u'lexicalunit') + self._dbt_lexrel = self.__mktable(u'lexicalrelation') + self._dbt_uns = self.__mktable(u'unitandsynset') + self._dbt_emo = self.__mktable(u'emotion') + + self._schema = WNSchemaProcessor() + + def __iter__(self): + # First, get relation type data, since it depends on nothing. And the + # other two kinds of nodes will need the full relation names. + # Then, get lexical units, since they will show which synsets are + # needed. + # Finally, get the synsets. + self._extract_relation_types() + self._extract_emotions() + self._extract_units() + self._extract_unit_rels() + self._extract_uns() + self._extract_syns() + self._extract_syn_rels() + + for node in self._schema.finalize(): + yield node + + def _extract_relation_types(self): + reltype_q = sa.select(( + self._dbt_reltype.c.ID, + self._dbt_reltype.c.PARENT_ID, + self._dbt_reltype.c.objecttype, + self._dbt_reltype.c.name, + self._dbt_reltype.c.shortcut, + )) + with closing(self._db_eng.execute(reltype_q)) as result: + for row in result: + parent_id = row[self._dbt_reltype.c.PARENT_ID] + object_type = row[self._dbt_reltype.c.objecttype] + if object_type != _SYNO_REL_OBJTYPE: + self._schema.take_relation_type( + row[self._dbt_reltype.c.ID], + # Ignore the kind information of relation types that + # have parents. It will be inherited. + en.RelationKind.by_db_number(object_type) + if parent_id is None + else None, + row[self._dbt_reltype.c.name], + row[self._dbt_reltype.c.shortcut], + row[self._dbt_reltype.c.PARENT_ID], + ) - :return: a generator over PLwordnet entities. - :rtype: generator - """ - db_eng = sa.create_engine(wordnet_db_url) - db_meta = sa.MetaData(db_eng) - visited_synsets = set() - nonexistent_synsets = set() - - # Define required tables - dbt_synset = sa.Table(u'synset', db_meta, autoload=True) - dbt_synrel = sa.Table(u'synsetrelation', db_meta, autoload=True) - dbt_reltype = sa.Table(u'relationtype', db_meta, autoload=True) - dbt_lexunit = sa.Table(u'lexicalunit', db_meta, autoload=True) - dbt_lexrel = sa.Table(u'lexicalrelation', db_meta, autoload=True) - dbt_uns = sa.Table(u'unitandsynset', db_meta, autoload=True) - dbt_emo = sa.Table(u'emotion', db_meta, autoload=True) - - q = sa.select(( - dbt_lexunit.c.ID, - dbt_lexunit.c.lemma, - dbt_lexunit.c.pos, - dbt_lexunit.c.variant, - dbt_uns.c.SYN_ID, - dbt_uns.c.unitindex, - dbt_lexunit.c.domain, - dbt_lexunit.c.comment, - dbt_lexunit.c.verb_aspect, - )).select_from( - dbt_lexunit.join( - dbt_uns, - dbt_uns.c.LEX_ID == dbt_lexunit.c.ID, - ) - ).where(dbt_lexunit.c.pos.between(1, 4)) - - with ctxl.closing(db_eng.execute(q)) as result: - for lexid, lemma, pos, variant, synid, uidx, domain, comment,\ - verb_aspect in result: - - if synid in nonexistent_synsets: - continue - - # Select all relations children of the unit - q = sa.select( - (dbt_lexrel.c.CHILD_ID, dbt_reltype.c.name) - ).select_from( - dbt_lexrel.join( - dbt_reltype, - dbt_reltype.c.ID == dbt_lexrel.c.REL_ID, + def _extract_emotions(self): + emo_q = sa.select(( + self._dbt_emo.c.lexicalunit_id, + self._dbt_emo.c.emotions, + self._dbt_emo.c.valuations, + self._dbt_emo.c.markedness, + self._dbt_emo.c.unitStatus, + self._dbt_emo.c.example1, + self._dbt_emo.c.example2, + self._dbt_emo.c.super_anotation, + )) + with closing(self._db_eng.execute(emo_q)) as result: + for row in result: + ustatus = bool(row[self._dbt_emo.c.unitStatus]) + superann = bool(row[self._dbt_emo.c.super_anotation]) + markstr = row[self._dbt_emo.c.markedness] + + if markstr is not None: + try: + mark = en.EmotionMarkedness.normalized(markstr) + except (ValueError, TypeError): + _LOG.error( + 'Value %r is not valid as emotion markedness; ' + 'skipping record %r', + markstr, + row, + ) + continue + else: + mark = None + + self._schema.take_emotion( + row[self._dbt_emo.c.lexicalunit_id], + mark, + _make_emo_tuple( + en.EmotionName, + row[self._dbt_emo.c.emotions], + ), + _make_emo_tuple( + en.EmotionValuation, + row[self._dbt_emo.c.valuations], + ), + row[self._dbt_emo.c.example1], + row[self._dbt_emo.c.example2], + ustatus, + superann, ) - ).where(dbt_lexrel.c.PARENT_ID == lexid) - - with ctxl.closing(db_eng.execute(q)) as lex_rel_result: - # Ensure relations targets exist - lex_related = [] - for lex_child_id, lex_rel_name in lex_rel_result: - q = sa.select(( - sa.exists().select_from( - # This join to ensure the unit belongs to - # some synset. - dbt_lexunit.join( - dbt_uns, - dbt_uns.c.LEX_ID == dbt_lexunit.c.ID, - ) - ).where(sa.and_( - dbt_lexunit.c.ID == lex_child_id, - dbt_lexunit.c.pos.between(1, 4), - )), - )) - - if db_eng.execute(q).scalar(): - lex_related.append((lex_rel_name, lex_child_id)) - - # Now, select the unit's synset, but only once - if synid not in visited_synsets: - visited_synsets.add(synid) - - q = sa.select( - (dbt_synset.c.ID, dbt_synset.c.definition) - ).where(dbt_synset.c.ID == synid) - - synrow = db_eng.execute(q).first() - - if synrow is None: - nonexistent_synsets.add(synid) - continue - - # Select all relation children of the synset - q = sa.select( - (dbt_synrel.c.CHILD_ID, dbt_reltype.c.name) - ).select_from( - dbt_synrel.join( - dbt_reltype, - dbt_reltype.c.ID == dbt_synrel.c.REL_ID, - ) - ).where(dbt_synrel.c.PARENT_ID == synid) - - with ctxl.closing(db_eng.execute(q)) as syn_rel_result: - syn_related = [] - for syn_child_id, syn_rel_name in syn_rel_result: - # Ensure the child exists - q = sa.select(( - sa.exists().select_from( - dbt_synset.join( - dbt_uns, - dbt_uns.c.SYN_ID == dbt_synset.c.ID, - ).join( - dbt_lexunit, - dbt_lexunit.c.ID == dbt_uns.c.LEX_ID, - ) - ).where(sa.and_( - dbt_synset.c.ID == syn_child_id, - dbt_lexunit.c.pos.between(1, 4), - )), - )) - - if db_eng.execute(q).scalar(): - syn_related.append((syn_rel_name, syn_child_id)) - - yield SynsetNode( - synid, - synrow[1] if synrow[1] is not None else u'', - tuple(syn_related), + + def _extract_units(self): + lexunit_q = sa.select(( + self._dbt_lexunit.c.ID, + self._dbt_lexunit.c.lemma, + self._dbt_lexunit.c.pos, + self._dbt_lexunit.c.variant, + self._dbt_lexunit.c.domain, + self._dbt_lexunit.c.comment, + self._dbt_lexunit.c.verb_aspect, + )) + with closing(self._db_eng.execute(lexunit_q)) as result: + for row in result: + self._schema.take_lexical_unit( + row[self._dbt_lexunit.c.ID], + row[self._dbt_lexunit.c.lemma], + en.PoS.by_db_number(row[self._dbt_lexunit.c.pos]), + row[self._dbt_lexunit.c.variant], + en.Domain.by_db_number(row[self._dbt_lexunit.c.domain]), + row[self._dbt_lexunit.c.comment], + en.VerbAspect.by_db_number( + row[self._dbt_lexunit.c.verb_aspect], + True, + ), ) - # Try getting emotion annotations for the unit - emo_data = _extract_emotion_data(db_eng, dbt_emo, lexid) - - # Now, parse the comment string to get some last pieces of data - cmt_data = parse_comment_string(comment - if comment is not None - else u'') - - yield LexicalUnitNode( - id=lexid, - lemma=lemma, - pos=PoS.by_db_number(pos), - variant=variant, - synset=synid, - unit_index=uidx, - definition=cmt_data.definition, - usage_notes=cmt_data.usage, - external_links=cmt_data.links, - examples=cmt_data.examples, - examples_sources=cmt_data.examples_sources, - # XXX Since domains are defined as strings, the int is cast - # to unicode. It's possible, in the future to add a - # translation dict to textual representations. - domain=Domain.by_db_number(domain), - related=tuple(lex_related), - verb_aspect=VerbAspect.by_db_number(verb_aspect, True), - emotion_markedness=EmotionMarkedness.normalized(emo_data.mark) - if emo_data.mark is not None else None, - emotion_names=_make_enum_tuple( - EmotionName, - sorted(emo_data.names, key=text_key), - ), - emotion_valuations=_make_enum_tuple( - EmotionValuation, - sorted(emo_data.valuations, key=text_key), - ), - emotion_example_1=emo_data.example_1, - emotion_example_2=emo_data.example_2, - ) - - -def _extract_emotion_data(db_eng, db_t_emo, unit_id): - q_emo = sa.select(( - db_t_emo.c.markedness, # XXX Typo in schema - db_t_emo.c.emotions, - db_t_emo.c.valuations, - db_t_emo.c.example1, - db_t_emo.c.example2, - db_t_emo.c.unitStatus, - )).where(db_t_emo.c.lexicalunit_id == unit_id).order_by( - # "super_anotation" is a boolean 0 or 1, so descending sort will put - # the super annotation first. - db_t_emo.c.super_anotation.desc() # XXX Typo in schema - ) - - mark = None - names = set() - valuations = set() - example_1 = None - example_2 = None - - with ctxl.closing(db_eng.execute(q_emo)) as result: - for row in result: - if not row[db_t_emo.c.unitStatus]: - return _EmotionData( - mark=None, - names=(), - valuations=(), - example_1=None, - example_2=None, + def _extract_uns(self): + uns_q = sa.select(( + self._dbt_uns.c.SYN_ID, + self._dbt_uns.c.LEX_ID, + self._dbt_uns.c.unitindex, + )) + with closing(self._db_eng.execute(uns_q)) as result: + for row in result: + self._schema.take_unit_to_synset( + row[self._dbt_uns.c.LEX_ID], + row[self._dbt_uns.c.SYN_ID], + row[self._dbt_uns.c.unitindex], ) - if mark is None: - mark = row[db_t_emo.c.markedness] - if example_1 is None: - example_1 = row[db_t_emo.c.example1] - if example_2 is None: - example_2 = row[db_t_emo.c.example2] - - row_names = row[db_t_emo.c.emotions] - if row_names is not None: - names.update( - word.strip() - for word in row_names.split(u';') + def _extract_unit_rels(self): + lexrel_q = sa.select(( + self._dbt_lexrel.c.PARENT_ID, + self._dbt_lexrel.c.CHILD_ID, + self._dbt_lexrel.c.REL_ID, + )) + with closing(self._db_eng.execute(lexrel_q)) as result: + for row in result: + self._schema.take_lexical_relation( + row[self._dbt_lexrel.c.PARENT_ID], + row[self._dbt_lexrel.c.CHILD_ID], + row[self._dbt_lexrel.c.REL_ID], ) - row_valuations = row[db_t_emo.c.valuations] - if row_valuations is not None: - valuations.update( - word.strip() - for word in row_valuations.split(u';') + def _extract_syns(self): + synset_q = sa.select(( + self._dbt_synset.c.ID, + self._dbt_synset.c.isabstract, + self._dbt_synset.c.definition, + )) + with closing(self._db_eng.execute(synset_q)) as result: + for row in result: + self._schema.take_synset( + row[self._dbt_synset.c.ID], + row[self._dbt_synset.c.definition], + bool(row[self._dbt_synset.c.isabstract]), ) - return _EmotionData( - mark=mark, - names=names, - valuations=valuations, - example_1=example_1, - example_2=example_2, - ) + def _extract_syn_rels(self): + synrel_q = sa.select(( + self._dbt_synrel.c.PARENT_ID, + self._dbt_synrel.c.CHILD_ID, + self._dbt_synrel.c.REL_ID, + )) + with closing(self._db_eng.execute(synrel_q)) as result: + for row in result: + self._schema.take_synset_relation( + row[self._dbt_synrel.c.PARENT_ID], + row[self._dbt_synrel.c.CHILD_ID], + row[self._dbt_synrel.c.REL_ID], + ) + def __mktable(self, table_name): + return sa.Table(table_name, self._db_meta, autoload=True) -def _make_enum_tuple(enumtype, source): - result = [] - for item in source: +def _make_enums_from_values(enclass, valiter): + for val in valiter: try: - val = enumtype(item) + en = enclass(val) except ValueError: - _log.warning('Omitting bad value %r of enum %r', item, enumtype) + _LOG.error('Value %r is not valid for %r', val, enclass) else: - result.append(val) + yield en + - return tuple(result) +def _make_emo_tuple(enclass, emoval): + return () if emoval is None else tuple(frozenset(_make_enums_from_values( + enclass, + # Skip empty elements in the values sequence (some people just append a + # ";"). + (item for item in emoval.split(u';') if item), + ))) -_this_reader_ = wndb_reader +_this_reader_ = WNDBReader diff --git a/plwn/readers/wnschema.py b/plwn/readers/wnschema.py new file mode 100644 index 0000000..cb93ae8 --- /dev/null +++ b/plwn/readers/wnschema.py @@ -0,0 +1,541 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + +from __future__ import absolute_import, division + + +import collections as coll +import itertools as itt +import logging +import operator as op + +import six +import plwn_comments as plwnc +import plwn_comments.exceptions as plwnce +import plwn_comments.utils.usage_tags as plwncu + +from ..bases import RelationInfoBase +from ..utils.sorting import text_key +from . import nodes as nd + + +__all__ = 'WNSchemaProcessor', + + +_LOG = logging.getLogger(__name__) + +_BASIC_RELINST_ERROR_TMPL = \ + 'Relation %s between units / synset %s -> %s dropped: ' + +_SynData = coll.namedtuple('_SynData', ('definition', 'isart')) +_LexData = coll.namedtuple( + '_LexData', + ('lemma', 'pos', 'variant', 'domain', 'comment', 'verb_aspect'), +) +_UnSData = coll.namedtuple('_UnSData', ('synset_id', 'unit_index')) +_RelInstData = coll.namedtuple('_RelInstData', ('child', 'relation')) +_RelTypeData = coll.namedtuple( + '_RelTypeData', + ('kind', 'name', 'short', 'parent'), +) +_EmoData = coll.namedtuple( + '_EmoData', + ('mark', 'names', 'valuations', 'example1', 'example2', 'status', 'super'), +) + +_CmtDataT = coll.namedtuple( + '_CmtData', + ('examples', 'examples_sources', 'definition', 'usage', 'links'), +) + + +class WNSchemaProcessor(object): + """Helper class. + + Externalizing some operations common to reading from any + source that follows the "standard" plWordNet schema. + + In practice, objects of this class are intended for composition, being fed + data from a schema-abiding source, perform some consistency cleanups, then + providing well-formatted nodes that can be passed to a storage. + + The checks performed by this processor are as such: + + * Synsets that don't have units. + * Units not assigned to a synset. + * Units assigned to not-existing synsets. + * Relations to or from non-existent units / synsets. + * Relation types that don't have instances or are parents. + * Relation instances that don't have types (illegal in the schema). + """ + + def __init__(self): + # These dicts should be indexed by IDs of the respective data records + self._syn_acc = {} + self._lex_acc = {} + self._lex_to_syn_acc = {} + self._reltype_acc = {} + # Relation instances are indexed like: + # parent id => list of _RelInstData + self._synrel_acc = coll.defaultdict(list) + self._lexrel_acc = coll.defaultdict(list) + # Emotion records are indexed like: lexical id => list of _EmoData + self._emo_acc = coll.defaultdict(list) + # This is aux sets for IDs that will be used for filtering + self._reltypes_being_parents = set() + self._relinstance_count = coll.Counter() + self._syn_to_units_check = coll.defaultdict(list) + + # The following are filled during finalization: + # Nodes need full relation names, this will provide translation from + # IDs. + self._relid2relname = None + # Some aliases may repeat in the plWN database, but it's not allowed + # here. + self._bad_rel_aliases = None + # All the units that were rejected for any reason - used by filtering + # relation. + self._bad_units = None + self._bad_synsets = None + + def take_relation_type(self, id_, kind, name, short_name, parent_id): + data = _RelTypeData(kind, name, short_name, parent_id) + if _insert_if_uniq(self._reltype_acc, id_, data): + if parent_id is not None: + self._reltypes_being_parents.add(parent_id) + + def take_synset(self, id_, definition, is_artificial): + _insert_if_uniq( + self._syn_acc, + id_, + _SynData(definition, is_artificial), + ) + + def take_lexical_unit(self, + id_, + lemma, + pos, + variant, + domain, + comment, + verb_aspect): + _insert_if_uniq( + self._lex_acc, + id_, + _LexData( + lemma, + pos, + variant, + domain, + comment, + verb_aspect, + ), + ) + + def take_unit_to_synset(self, unit_id, synset_id, unit_index): + data = _UnSData(synset_id, unit_index) + if _insert_if_uniq(self._lex_to_syn_acc, unit_id, data): + self._syn_to_units_check[synset_id].append(unit_id) + + def take_synset_relation(self, parent_id, child_id, relation_id): + self.__take_relation( + self._synrel_acc, + parent_id, + child_id, + relation_id, + ) + + def take_lexical_relation(self, parent_id, child_id, relation_id): + self.__take_relation( + self._lexrel_acc, + parent_id, + child_id, + relation_id, + ) + + def take_emotion(self, + lexical_id, + markedness, + names, + valuations, + example1, + example2, + unit_status, + super_annotation): + self._emo_acc[lexical_id].append(_EmoData( + markedness, + names, + valuations, + example1, + example2, + unit_status, + super_annotation, + )) + + def finalize(self): + """After putting in data using the ``take_*`` methods. + + Perform all checks and yield all created nodes. + """ + # Reset filtered sets, then fill them + self._bad_units = set() + self._filter_bad_units() + self._bad_synsets = set() + self._filter_bad_synsets() + self._bad_rel_aliases = set() + self._filter_bad_rel_aliases() + self._filter_bad_rel_instances() + + for node in itt.chain(self._fin_reltypes(), + self._fin_units(), + self._fin_syns()): + yield node + + def _fin_reltypes(self): + self._relid2relname = {} + + for rel_id, rel_data in six.iteritems(self._reltype_acc): + if rel_id in self._reltypes_being_parents: + continue + + if self._relinstance_count[rel_id] <= 0: + _LOG.warning( + 'Relation %s = %r omitted: no instances', + rel_id, + rel_data, + ) + continue + + # Inherit the kind data from the parent reltype, if the parent is + # not None. + if rel_data.parent is not None: + try: + par_data = self._reltype_acc[rel_data.parent] + except KeyError: + _LOG.error( + 'Relation %s has non-existent parent %s', + rel_id, + rel_data.parent, + ) + continue + rel_parname = par_data.name + rel_kind = par_data.kind + else: + rel_parname = None + rel_kind = rel_data.kind + + self._relid2relname[rel_id] = RelationInfoBase.format_name( + rel_parname, + rel_data.name, + ) + + yield nd.RelationTypeNode( + kind=rel_kind, + name=rel_data.name, + parent=rel_parname, + aliases=(rel_data.short,) + if rel_data.short is not None and + rel_data.short not in self._bad_rel_aliases + else (), + ) + + def _fin_units(self): + for lu_id, lu_data in six.iteritems(self._lex_acc): + if lu_id in self._bad_units: + continue + + final_emo = self._coalesce_emo(lu_id) + cmt_data = ( + _CmtData.make_empty() + if lu_data.comment is None + else _CmtData.extract_from_string(lu_data.comment) + ) + final_related = self._make_related_for_unit(lu_id) + try: + uns = self._lex_to_syn_acc[lu_id] + except KeyError: + # This shouldn't happen, but possibly can, so just skip the + # unit. + continue + + yield nd.LexicalUnitNode( + id=lu_id, + lemma=lu_data.lemma, + pos=lu_data.pos, + variant=lu_data.variant, + synset=uns.synset_id, + unit_index=uns.unit_index, + definition=cmt_data.definition, + usage_notes=cmt_data.usage, + external_links=cmt_data.links, + examples=cmt_data.examples, + examples_sources=cmt_data.examples_sources, + domain=lu_data.domain, + related=final_related, + verb_aspect=lu_data.verb_aspect, + is_emotional=final_emo.status, + emotion_markedness=final_emo.mark, + emotion_names=final_emo.names, + emotion_valuations=final_emo.valuations, + emotion_example_1=final_emo.example1, + emotion_example_2=final_emo.example2, + ) + + def _fin_syns(self): + for syn_id, syn_data in six.iteritems(self._syn_acc): + if syn_id in self._bad_synsets: + continue + + final_related = self._make_related_for_synset(syn_id) + + yield nd.SynsetNode( + id=syn_id, + definition=syn_data.definition, + related=final_related, + is_artificial=syn_data.isart, + ) + + def _filter_bad_units(self): + for lex_id in self._lex_acc: + if lex_id not in self._lex_to_syn_acc: + _LOG.error('Unit %s belongs to no synset', lex_id) + self._bad_units.add(lex_id) + continue + + syn_of_lex = self._lex_to_syn_acc[lex_id].synset_id + if syn_of_lex not in self._syn_acc: + _LOG.error( + 'Unit %s belongs to non-existent synset %s', + lex_id, + syn_of_lex, + ) + self._bad_units.add(lex_id) + + def _filter_bad_synsets(self): + for syn_id in self._syn_acc: + # Do those synsets have units and those units are real? + syn_units = self._syn_to_units_check.get(syn_id, ()) + any_unit_valid = False + + # This check doesn't necessarily remove the synset, but + # notification will be given. At least one valid unit for synset + # must remain. + for unit_id in syn_units: + if unit_id in self._lex_acc: + any_unit_valid = True + else: + _LOG.error( + 'Unit %s of synset %s is non-existent', + unit_id, + syn_id, + ) + + if not any_unit_valid: + _LOG.error('Synset %s has no (valid) units', syn_id) + self._bad_synsets.add(syn_id) + + def _filter_bad_rel_aliases(self): + # If an alias repeats multiple times, remember it to remove both + # instances later (so don't decide which is the "right" one). + all_aliases = set() + for rel_data in six.itervalues(self._reltype_acc): + alias = rel_data.short + if alias in all_aliases: + _LOG.error( + 'Relation shortcut %r is not unique; dropping both', + alias, + ) + self._bad_rel_aliases.add(alias) + else: + all_aliases.add(alias) + + def _filter_bad_rel_instances(self): + # Assuming that all bad synsets and units have been filtered, drop all + # instances of relations that refer to them. + # It removes instances in-place from related dicts, and decreases + # counts of instances for relation types. + self.__recount_rels(self._synrel_acc, self._syn_acc, self._bad_synsets) + self.__recount_rels(self._lexrel_acc, self._lex_acc, self._bad_units) + + def _make_related_for_unit(self, lex_id): + return self.__make_related(self._lexrel_acc, lex_id) + + def _make_related_for_synset(self, syn_id): + return self.__make_related(self._synrel_acc, syn_id) + + def _coalesce_emo(self, lex_id): + # The algorithm is like this: + # Start with super-annotation, iterate all annotations, fill what is + # possible. + # Do not overwrite status, markedness or examples, but sum names and + # values. If the super-annotation is marked as not-an-emotion, just + # return empty data. + # When returning the final emo value, don't remember its + # super annotation - it doesn't matter at this point; set to None. + # TODO Ensure that this algorithm makes sense, there seem to be more + # sensible ways of handling things. Move on for now. + final_status = None + final_mark = None + final_ex1 = None + final_ex2 = None + names_acc = [] + values_acc = [] + + for emo_data in sorted(self._emo_acc.get(lex_id, ()), + key=op.attrgetter('super'), + reverse=True): + if final_status is None: + final_status = emo_data.status + if final_mark is None: + final_mark = emo_data.mark + if final_ex1 is None: + final_ex1 = emo_data.example1 + if final_ex2 is None: + final_ex2 = emo_data.example2 + + names_acc.extend(emo_data.names) + values_acc.extend(emo_data.valuations) + + return _EmoData( + mark=final_mark, + names=_emo_uniq_sorted_tuple(names_acc), + valuations=_emo_uniq_sorted_tuple(values_acc), + example1=final_ex1, + example2=final_ex2, + status=final_status, + super=None, + ) + + def __take_relation(self, relinst_acc, parent_id, child_id, relation_id): + relinst_acc[parent_id].append(_RelInstData(child_id, relation_id)) + self._relinstance_count[relation_id] += 1 + + def __recount_rels(self, relinst_acc, item_acc, bad_acc): + for parent_id, children in six.iteritems(relinst_acc): + # Do not filter parents; this will be done at yielding, outside + fil_children = [] + for relinst in children: + if relinst.relation not in self._reltype_acc: + _LOG.error( + _BASIC_RELINST_ERROR_TMPL + + 'non-existent relation', + relinst.relation, + parent_id, + relinst.child, + ) + elif relinst.child not in item_acc or relinst.child in bad_acc: + _LOG.error( + _BASIC_RELINST_ERROR_TMPL + + 'the child is non-existent or invalid', + relinst.relation, + parent_id, + relinst.child, + ) + self._relinstance_count[relinst.relation] -= 1 + else: + fil_children.append(relinst) + + relinst_acc[parent_id] = fil_children + + def __make_related(self, relinst_acc, parent_id): + return tuple( + (self._relid2relname[relinst.relation], relinst.child) + for relinst in relinst_acc.get(parent_id, ()) + ) + + +class _CmtData(_CmtDataT): + + __slots__ = () + + # :class:`plwn_comments.TagBank` structure that defines all kinds of + # comment tags which are needed by PLWN API. + _WN_TAGS = plwnc.TagBank() + # Usage notes + _WN_TAGS.define(u'K') + # External links + _WN_TAGS.define(u'L', u'{') + # Definition + _WN_TAGS.define(u'D') + # The distinction for these tags is useful, since all examples go to one + # place. + _NON_EXAMPLE_TAG_NAMES = frozenset((u'K', u'L', u'D')) + # And define those example tags + _WN_TAGS.define_from( + plwncu.iter_usage_tags(), + plwncu.DEFAULT_USAGE_TAG_SURROUND, + ) + + @classmethod + def extract_from_string(cls, cmt_str): + try: + cmt = plwnc.Comment.parse(cmt_str, cls._WN_TAGS) + except plwnce.PLWNCommentsException: + # For now just make an empty comment which will make all fields + # unset. + cmt = plwnc.Comment(cls._WN_TAGS) + + # Get all examples + examples = [] + examples_src = [] + + for tagname, tagcontents in cmt.items(): + if tagname not in cls._NON_EXAMPLE_TAG_NAMES: + examples.extend(tagcontents) + examples_src.extend(itt.repeat(tagname, len(tagcontents))) + + return cls( + examples=tuple(examples), + examples_sources=tuple(examples_src), + definition=cmt.get_first(u'D', None), + usage=tuple(cmt[u'K']), + links=tuple(cmt[u'L']), + ) + + @classmethod + def make_empty(cls): + return cls( + examples=(), + examples_sources=(), + definition=None, + usage=(), + links=(), + ) + + +def _insert_if_uniq(data_acc, id_val, data_obj): + obj_in = data_acc.setdefault(id_val, data_obj) + + if obj_in is not data_obj: + _LOG.error( + 'Cannot add record %r with ID %s: already associated with ' + 'record %r', + data_obj, + id_val, + obj_in, + ) + return False + return True + + +def _emo_enums_sortkey(item): + return text_key(item.value) + + +def _emo_uniq_sorted_tuple(emo_acc): + # Sort the names and valuations for predictable behaviour + return tuple(sorted(frozenset(emo_acc), key=_emo_enums_sortkey)) diff --git a/plwn/readers/wnxml.py b/plwn/readers/wnxml.py index 18b40bf..51f637d 100644 --- a/plwn/readers/wnxml.py +++ b/plwn/readers/wnxml.py @@ -1,211 +1,163 @@ # coding: utf8 -"""Implementation of wnxml readwer.""" + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import, division -from collections import defaultdict -import itertools as itt -import logging import xml.etree.ElementTree as et -import six - -from .comments import parse_comment_string -from .nodes import SynsetNode, LexicalUnitNode -from ..enums import PoS, Domain - +from .wnschema import WNSchemaProcessor +from .. import enums as en -__all__ = 'wnxml_reader', +__all__ = 'WNXMLReader', -_log = logging.getLogger(__name__) _POSES = { - u'rzeczownik': PoS.n, - u'czasownik': PoS.v, - u'przymiotnik': PoS.adj, - u'przysłówek': PoS.adv, + u'rzeczownik': en.PoS.n, + u'czasownik': en.PoS.v, + u'przymiotnik': en.PoS.adj, + u'przysłówek': en.PoS.adv, + u'rzeczownik pwn': en.PoS.n, + u'czasownik pwn': en.PoS.v, + u'przymiotnik pwn': en.PoS.adj, + u'przysłówek pwn': en.PoS.adv, } - - -# Since etree may return either unicode or byte strings, all strings returned -# by its interfaces are wrapped with six.text_type - - -def wnxml_reader(wnxml_file): - """Generate plWordNet records from the official XML file. - - :param str wnxml_file: Path to the plWordNet XML file to read from. - - :return: a generator over PLwordnet entities. - :rtype: generator - """ - # The regrettably huge global storage for yielding - synsets = {} - lexunits = {} - synid_n_lexids = [] - reltypes_syn = {} - reltypes_lex = {} - # These need defaults to add instances to parent syn / lex - synrels = defaultdict(list) - lexrels = defaultdict(list) - - # Now, parse everything - for _, elem in et.iterparse(wnxml_file): - if elem.tag == u'lexical-unit': - _make_lexunit(elem, lexunits) - elif elem.tag == u'synset': - _make_synset(elem, synsets, synid_n_lexids) - elif elem.tag == u'relationtypes': - _make_reltype(elem, reltypes_syn, reltypes_lex) - elif elem.tag == u'synsetrelations': - _make_rel(elem, synrels) - elif elem.tag == u'lexicalrelations': - _make_rel(elem, lexrels) - - # Finalize units to synsets mapping - _make_units2synsets(lexunits, synid_n_lexids) - - # Now complete synsets and lexunits with relations and yield - for node in itt.chain( - _make_gen(synsets, synrels, reltypes_syn), - _filter_nosynset(_make_gen(lexunits, lexrels, reltypes_lex)), - ): - yield node - - -_this_reader_ = wnxml_reader - - -def _make_lexunit(lu_node, lu_dict): - # Only words will pl poses will be remembered - xmlpos = six.text_type(lu_node.get(u'pos')) - - if xmlpos not in _POSES: - return - - lu_id = int(lu_node.get(u'id')) - cmt_data = parse_comment_string(six.text_type(lu_node.get(u'desc'))) - # Create a temporal object which will be filled later - lu_dict[lu_id] = LexicalUnitNode( - id=lu_id, - lemma=six.text_type(lu_node.get(u'name')), - pos=_POSES[xmlpos], - variant=int(lu_node.get(u'variant')), - synset=None, - unit_index=None, - definition=cmt_data.definition, - usage_notes=cmt_data.usage, - external_links=cmt_data.links, - examples=cmt_data.examples, - examples_sources=cmt_data.examples_sources, - domain=Domain[lu_node.get(u'domain')], - related=None, - # The below properties are not stored in wnxml (at least in present) - verb_aspect=None, - emotion_markedness=None, - emotion_names=(), - emotion_valuations=(), - emotion_example_1=None, - emotion_example_2=None, - ) - - -def _make_synset(syn_node, syn_dict, snu_list): - # Only take non-abstract synsets - if six.text_type(syn_node.get(u'abstract')) != u'false': - return - - synid = int(syn_node.get(u'id')) - # Assign lexical units to synsets they belong to. - snu_list.append((synid, [int(uid_node.text) - for uid_node in syn_node.iter(u'unit-id')])) - # As with lexunits, related field is not yet filled - syn_dict[synid] = SynsetNode( - synid, - six.text_type(syn_node.get(u'definition')), - None, - ) - - -def _make_units2synsets(lu_dict, snu_list): - for synid, lexids in snu_list: - for uidx, uid in enumerate(lexids): - try: - lu = lu_dict[uid] - except KeyError: - _log.warning( - 'Unit %d from synset %d does not exist', - uid, - synid, - ) - else: - lu_dict[uid] = lu._replace(synset=synid, unit_index=uidx) - - -# Relation types are spelled in descriptive names -_RELTYPE_SYN = u'relacja pomiÄ™dzy synsetami' -_RELTYPE_LEX = u'relacja leksykalna' - - -def _make_reltype(reltype_node, synreltype_dict, lureltype_dict): - relid = int(reltype_node.get(u'id')) - typestr = reltype_node.get(u'type') - - if typestr == _RELTYPE_SYN: - the_dict = synreltype_dict - elif typestr == _RELTYPE_LEX: - the_dict = lureltype_dict - else: - # There is one more relation type, synonymy, but it's artificial - return - - # Remember the name so that will be inserted into the reltype storages - the_dict[relid] = six.text_type(reltype_node.get(u'name')) - - -# Relations are put into dicts indexed by parent IDs, to be later put into -# nodes. One function can handle both types. -def _make_rel(node, reldict): - # Get reltype - drop if unknown - reldict[int(node.get(u'parent'))].append(( - int(node.get(u'child')), - # Reltypes should be returned by names, not IDs - int(node.get(u'relation')), - )) - - -# As with relation, yielding is general for syn / lexes. -# Related IDs need to be added, and those not known purged. -def _make_gen(node_dict, rels_dict, reltype_dict): - for node in six.itervalues(node_dict): - related = [] - for child_id, rel_id in rels_dict.get(node.id, ()): - try: - relname = reltype_dict[rel_id] - except KeyError: - _log.warning( - 'Unknown relation %d (of %s), from %d to %d', - rel_id, - node.__class__.__name__, - node.id, - child_id, - ) - continue - - # Only remember from the related dict the items whose IDs are in - # the node dict. - if child_id in node_dict: - related.append((child_id, relname)) - related.append((relname, child_id)) - yield node._replace(related=related) - - -# Addendum to _make_gen for lexical units to filter synsetless ones -def _filter_nosynset(lu_node_gen): - for lu_node in lu_node_gen: - if lu_node.synset is None: - _log.warning('Unit %d belongs to no synset', lu_node.id) - else: - yield lu_node +_RELKINDS = { + u'relacja pomiÄ™dzy synsetami': en.RelationKind.synset, + u'relacja leksykalna': en.RelationKind.lexical, +} +_BOOLVALUES = {u'true': True, u'false': False} + + +# Examples of nodes that this reader is supposed to parse: +# <lexical-unit id="478387" name=".22" pos="rzeczownik pwn" +# tagcount="0" domain="wytw" desc="" workstate="Nie przetworzone" +# source="użytkownika" variant="1"/> +# <lexicalrelations parent="107360" child="61999" relation="104" +# valid="true" owner=""/> +# <relationtypes id="242" type="relacja leksykalna" +# name="rola: materiaÅ‚" +# description="Relacja roli: materiaÅ‚u jest wyjÄ…tkowÄ… relacjÄ… roli, +# łączÄ…cÄ… przymiotniki materiaÅ‚owe z ich podstawami rzeczownikowymi nazwami +# substancji i materiałów." +# posstr="rzeczownik,przymiotnik" +# display="<x#> jest zrobione z <y#>" shortcut="mat" +# autoreverse="false" pwn=""> +# Child relation types have the additional "parent" attribute. +# <relationtypes id="35" type="relacja leksykalna" parent="32" +# name="pacjens|obiekt" description="(dziedziczone)" +# posstr="(dziedziczone)" +# display="<x#> jest pacjensem dla czynnoÅ›ci wyrażanej przez <y#>" +# shortcut="rol:pacj" autoreverse="false" pwn="p_rp"> +# <synset id="12" workstate="Nie przetworzone" split="1" owner="" +# definition="" desc="" abstract="false"> +# <unit-id>12</unit-id> +# <unit-id>10191</unit-id> +# </synset> +# <synsetrelations parent="1366" child="551" relation="10" +# valid="true" owner=""/> + +class WNXMLReader(object): + + def __init__(self, wordnet_xml_file): + self._wnxml_file = wordnet_xml_file + self._schema = WNSchemaProcessor() + + self._dispatch = { + u'lexical-unit': self._proc_lexunit, + u'synset': self._proc_synset, + u'relationtypes': self._proc_reltype, + u'synsetrelations': _make_proc_relinst( + self._schema.take_synset_relation, + ), + u'lexicalrelations': _make_proc_relinst( + self._schema.take_lexical_relation, + ), + } + + def __iter__(self): + for _, elem in et.iterparse(self._wnxml_file): + elem_proc = self._dispatch.get(elem.tag) + if elem_proc is not None: + elem_proc(elem) + + for node in self._schema.finalize(): + yield node + + def _proc_reltype(self, elem): + id_ = int(elem.get('id')) + kind = _RELKINDS[elem.get('type')] + parent = elem.get('parent') + if parent is not None: + parent = int(parent) + + self._schema.take_relation_type( + id_, + kind, + elem.get('name'), + elem.get('shortcut'), + parent, + ) + + def _proc_lexunit(self, elem): + id_ = int(elem.get('id')) + var = int(elem.get('variant')) + pos = _POSES[elem.get('pos')] + dom = en.Domain(elem.get('domain')) + + self._schema.take_lexical_unit( + id_, + elem.get('name'), + pos, + var, + dom, + elem.get('desc'), + None, # No verb aspect at present + ) + + def _proc_synset(self, elem): + id_ = int(elem.get('id')) + isart = _BOOLVALUES[elem.get('abstract')] + + self._proc_synset_units( + id_, + (uelem for uelem in elem if uelem.tag == u'unit-id'), + ) + self._schema.take_synset(id_, elem.get('definition'), isart) + + def _proc_synset_units(self, synid, unit_elems): + for uidx, uelem in enumerate(unit_elems, 1): + self._schema.take_unit_to_synset( + int(uelem.text), + synid, + uidx, + ) + + +def _make_proc_relinst(taker): + def elem_proc(elem): + parent = int(elem.get('parent')) + child = int(elem.get('child')) + relid = int(elem.get('relation')) + + taker(parent, child, relid) + + return elem_proc + + +_this_reader_ = WNXMLReader diff --git a/plwn/relation_aliases.tsv b/plwn/relation_aliases.tsv deleted file mode 100644 index b7f87a6..0000000 --- a/plwn/relation_aliases.tsv +++ /dev/null @@ -1,5 +0,0 @@ -hiperonimia hiper -hiponimia hipo -deminutywność dem -holonimia holo -meronimia mero diff --git a/plwn/relresolver.py b/plwn/relresolver.py deleted file mode 100644 index 940a529..0000000 --- a/plwn/relresolver.py +++ /dev/null @@ -1,122 +0,0 @@ -"""Implementation of Relation Resolver.""" -from __future__ import absolute_import, division - - -from contextlib import closing -import logging - -import pkg_resources as pkgr -import six - - -__all__ = 'RelationResolver', 'get_default_relation_resolver' - - -_DEFAULT_RESOLVER_LOC = 'plwn', 'relation_aliases.tsv' -_default_resolver_obj = None - -_log = logging.getLogger(__name__) - - -class RelationResolver(object): - """Stores dictionary of relation name aliases to full names.""" - - @classmethod - def from_tsv(cls, tsv_stream): - """Creates an instance from a TSV file. - - The first item of each line should be the full name, and every other - should be an alias (similar to ``from_reverse_dict``). - - :param tsv_stream: The stream from which TSV lines are read. - :type tsv_stream: TextIO - - :rtype: RelationResolver - """ - adict = {} - - for line in tsv_stream: - items = line.strip().split(u'\t') - fullname = items[0].strip() - for alias in items[1:]: - adict[alias.strip()] = fullname - - return cls(adict) - - @classmethod - def from_reverse_dict(cls, rdict): - """Creates an instance from a dictionary. - - Mapping full names to lists of aliases that should resolve to them. - - :type rdict: Mapping[str, List[str]] - - :rtype: RelationResolver - """ - adict = {} - - for full, aliases in six.iteritems(rdict): - for alias in aliases: - adict[alias] = full - - return cls(adict) - - def __init__(self, aliases): - """. - - :param aliases: Dictionary (or pairs sequence) mapping relation aliases - to full names. - :type aliases: Mapping[str, str] - """ - self._aliases = dict(aliases) - - def add_alias(self, alias, fullname): - """Add a new alias to the dictionary. - - :param str alias: The alias. - - :param str fullname: The name the alias will resolve to. - """ - self._aliases[alias] = fullname - - def resolve_name(self, relname): - """Resolve a possible alias to a full name. - - If ``relname`` is not a known alias, it's returned unchanged. - - :param str relname: The relation name that may be an alias that needs - to be resolved. - - :return: ``relname`` or, if it's an alias, the full name it resolves - to. - :rtype: str - """ - return self._aliases.get(relname, relname) - - -def get_default_relation_resolver(): - """Create an instance of ``RelationResolver``. - - That loads a file with all default relation name aliases. - - The default aliases TSV file is located in ``plwn`` package root, as - ``relation_aliases.tsv``. - - :return: The default ``RelationResolver`` instance, initialized once on the - first call. - :rtype: RelationResolver - """ - global _default_resolver_obj - - if _default_resolver_obj is None: - try: - with closing(pkgr.resource_stream(*_DEFAULT_RESOLVER_LOC)) \ - as tsv_in: - _default_resolver_obj = RelationResolver.from_tsv( - line.decode('utf8') for line in tsv_in - ) - except IOError: - _log.exception('Failed to load default aliases file') - _default_resolver_obj = RelationResolver({}) - - return _default_resolver_obj diff --git a/plwn/storages/objects.py b/plwn/storages/objects.py deleted file mode 100644 index 9618a75..0000000 --- a/plwn/storages/objects.py +++ /dev/null @@ -1,520 +0,0 @@ -"""Implementation which stores data in plain python objects. - -Should be fairly fast to construct, but querying and memory -efficiencies may not be too great. -""" - -from __future__ import absolute_import, absolute_import - - -import collections as coll -import logging -import operator as op - -import six -from six.moves import cPickle - -from ..readers import nodes as nd -from ..enums import PoS -from ..relresolver import get_default_relation_resolver -from ..utils.tupwrap import tup_wrapped, TupWrapper -from ..utils.sorting import text_key -from .. import bases, exceptions as exc - - -__all__ = 'PLWordNet', 'Synset', 'LexicalUnit' - - -_log = logging.getLogger(__name__) - - -class PLWordNet(bases.PLWordNetBase): - - _STORAGE_NAME = 'objects' - _SCHEMA_VERSION = 2 - - @classmethod - def from_reader(cls, reader, dump_to=None): - obj = cls() - obj.__read_data(reader) - - if dump_to is not None: - with open(dump_to, 'wb') as dump_ofs: - cPickle.dump(obj, dump_ofs, cPickle.HIGHEST_PROTOCOL) - - return obj - - @classmethod - def from_dump(cls, dump): - with open(dump, 'rb') as dump_ifs: - obj = cPickle.load(dump_ifs) - - if not isinstance(obj, cls): - raise exc.LoadException( - 'Unpickled object is not an instance of ' + repr(cls) - ) - - if not hasattr(obj, '_version') or obj._version != cls._SCHEMA_VERSION: - raise exc.DumpVersionException( - getattr(obj, '_version', None), - cls._SCHEMA_VERSION, - ) - - return obj - - @staticmethod - def __fill_id_reldict(src_node, id_rel_dict, id_set): - rels = coll.defaultdict(list) - for relname, reltarget in src_node.related: - if reltarget not in id_set: - _log.warning( - 'Target %d of relation %s from %d does not exist', - reltarget, - relname, - src_node.id, - ) - else: - rels[relname].append(reltarget) - - id_rel_dict[src_node.id] = coll.OrderedDict( - (relname, tuple(rels[relname])) - for relname in sorted(rels, key=text_key) - ) - - @staticmethod - def __gen_item_reldict(id_rel_dict, item_rel_dict, item_dict): - for src_id, rel_dict in six.iteritems(id_rel_dict): - irel_dict = coll.OrderedDict() - for relname, trg_ids in six.iteritems(rel_dict): - trg_items = [] - for trg_id in rel_dict[relname]: - try: - trg_item = item_dict[trg_id] - except KeyError: - _log.warning( - 'Target %d of relation %s from %d does not exist', - trg_id, - relname, - src_id, - ) - else: - trg_items.append(trg_item) - - if trg_items: - irel_dict[relname] = tuple(trg_items) - - if irel_dict: - item_rel_dict[src_id] = irel_dict - - def __init__(self): - """**NOTE:** This constructor should not be invoked directly. - - Use one of the standard methods: ``from_dump`` or ``from_reader``. - """ - super(PLWordNet, self).__init__() - - # Remember the version for unpickling check - self._version = self._SCHEMA_VERSION - - # Master indexes - self._synsets = coll.OrderedDict() - self._units = coll.OrderedDict() - - # Secondary indexes for lookup of units by lemma, pos and var - self._i_lem_pos_var = {} - self._i_lem_pos = coll.defaultdict(list) - self._i_lem_var = coll.defaultdict(list) - self._i_lem = coll.defaultdict(list) - self._i_pos = coll.defaultdict(list) - # No index for lookup by var! That's the slow way. - - # Relations: indexed by id and then relation names; the second one - # should be ordered. - self._synrels = {} - self._lexrels = {} - - def lexical_unit_by_id(self, id_): - try: - return self._units[id_] - except KeyError: - raise exc.InvalidLexicalUnitIdentifierException(id_) - - @tup_wrapped - def lexical_units(self, lemma=None, pos=None, variant=None): - if lemma is not None and pos is not None and variant is not None: - # Yield only one unit since it must be it if it exists - try: - yield self._i_lem_pos_var[lemma, PoS(pos), variant] - except KeyError: - pass - finally: - return - - if lemma is not None and pos is not None: - retlist = self._i_lem_pos.get((lemma, PoS(pos)), ()) - elif lemma is not None and variant is not None: - retlist = self._i_lem_var.get((lemma, variant), ()) - elif lemma is not None: - retlist = self._i_lem.get(lemma, ()) - elif pos is not None: - retlist = self._i_pos.get(PoS(pos), ()) - else: - # Hoo boy, it's bad - retlist = self._select_lexunits(lemma, PoS(pos), variant) - - for lu in retlist: - yield lu - - def lexical_unit(self, lemma, pos, variant): - try: - return self._i_lem_pos_var[lemma, PoS(pos), variant] - except KeyError: - raise exc.LexicalUnitNotFound(lemma, pos, variant) - - def synset_by_id(self, id_): - try: - return self._synsets[id_] - except KeyError: - raise exc.InvalidSynsetIdentifierException(id_) - - @tup_wrapped - def synsets(self, lemma=None, pos=None, variant=None): - for lu in self.lexical_units(lemma, pos, variant): - yield lu.synset - - def synset(self, lemma, pos, variant): - try: - return self._i_lem_pos_var[lemma, PoS(pos), variant].synset - except KeyError: - raise exc.SynsetNotFound(lemma, pos, variant) - - def synset_relation_edges(self, include=None, exclude=None): - return TupWrapper(self._iter_reledges(self._synrels, include, exclude)) - - def lexical_relation_edges(self, include=None, exclude=None): - return TupWrapper(self._iter_reledges(self._lexrels, include, exclude)) - - def _select_lexunits(self, lemma, pos, variant): - # The "slow way" (indexless) of selecting lexical units - for lu in six.itervalues(self._units): - if ((lemma is None or lemma == lu._lemma) and - (pos is None or pos is lu._pos) and - (variant is None or variant == lu._var)): - yield lu - - def _iter_reledges(self, reledges, include, exclude): - # Ensure those are sets - include = frozenset( - self._rel_resolver.resolve_name(rel) for rel in include - ) if include is not None else None - exclude = frozenset( - self._rel_resolver.resolve_name(rel) for rel in exclude - ) if exclude is not None else None - - for src, reldict in six.iteritems(reledges): - for relname, targets in six.iteritems(reldict): - if ((include is None or relname in include) and - (exclude is None or relname not in exclude)): - for trg in targets: - yield bases.RelationEdge( - source=src, - relation=relname, - target=trg, - ) - - def __read_data(self, reader): - # Nodes need to be separated and sorted before being pushed to indexes. - syn_nodes = {} - ordered_synids = [] - lex_nodes = {} - # Ordered AND filtered - ordered_lex_nodes = [] - # The association will remember unit indices - s2u = coll.defaultdict(list) - # Temporary id relation dicts - id_lex_rels = {} - id_syn_rels = {} - - for node in reader: - if isinstance(node, nd.SynsetNode): - syn_nodes[node.id] = node - else: - lex_nodes[node.id] = node - - # First iterate over lex nodes to establish the unit-synset - # relationships and sort out synsets and lexunits that don't exist. - for lex_node in six.itervalues(lex_nodes): - if lex_node.synset not in syn_nodes: - _log.warning( - 'Synset %d from unit %d does not exist', - lex_node.id, - lex_node.synset, - ) - else: - s2u[lex_node.synset].append((lex_node.unit_index, lex_node.id)) - ordered_synids.append(lex_node.synset) - ordered_lex_nodes.append(lex_node) - - # Sort by lemma! - ordered_lex_nodes.sort(key=lambda node: text_key(node.lemma)) - - # Insert lexical unit objects into ordered dict - for lex_node in ordered_lex_nodes: - self._units[lex_node.id] = LexicalUnit( - self, - lex_node.id, - lex_node.lemma, - lex_node.pos, - lex_node.variant, - lex_node.synset, - lex_node.definition, - tuple(lex_node.usage_notes), - tuple(lex_node.external_links), - tuple(lex_node.examples), - tuple(lex_node.examples_sources), - lex_node.domain, - lex_node.verb_aspect, - lex_node.emotion_markedness, - tuple(lex_node.emotion_names), - tuple(lex_node.emotion_valuations), - lex_node.emotion_example_1, - lex_node.emotion_example_2, - ) - - self.__fill_id_reldict(lex_node, id_lex_rels, lex_nodes) - - # Now, insert synsets in the right order - for synid in ordered_synids: - if synid in self._synsets: - continue - - syn_node = syn_nodes[synid] - # Sort units by index first - synunits = s2u[synid] - synunits.sort(key=op.itemgetter(0)) - - self._synsets[synid] = Synset( - self, - synid, - (it[1] for it in synunits), - syn_node.definition, - ) - - # Relations are done similarly to lex ones - self.__fill_id_reldict(syn_node, id_syn_rels, syn_nodes) - - # But what if there are synsets that have no units? - for synid in syn_nodes: - if synid not in self._synsets: - _log.warning('Synset %d has no units', synid) - - # We can convert id rel dicts now - self.__gen_item_reldict(id_lex_rels, self._lexrels, self._units) - self.__gen_item_reldict(id_syn_rels, self._synrels, self._synsets) - - # We can build indexes now - for lu in six.itervalues(self._units): - self._i_lem_pos_var[lu._lemma, lu._pos, lu._var] = lu - self._i_lem_pos[lu._lemma, lu._pos].append(lu) - self._i_lem_var[lu._lemma, lu._var].append(lu) - self._i_lem[lu._lemma].append(lu) - self._i_pos[lu._pos].append(lu) - - -class LexicalUnit(bases.LexicalUnitBase): - - __slots__ = ( - '_relr', - '_wn', - '_id', - '_lemma', - '_pos', - '_var', - '_synid', - '_def', - '_usn', - '_extl', - '_exms', - '_exms_srcs', - '_dom', - '_va', - '_emo_mark', - '_emo_names', - '_emo_valuations' - '_emo_ex1', - '_emo_ex2', - ) - - def __init__(self, - wn, - lexid, - lemma, - pos, - variant, - synid, - def_, - usn, - extl, - exms, - exms_srcs, - dom, - va, - emo_mark, - emo_names, - emo_valuations, - emo_ex1, - emo_ex2): - """**NOTE:** This constructor should not be called directly. - - Use :class:`PLWordNet` methods to obtain lexical units. - """ - self._relr = get_default_relation_resolver() - - self._wn = wn - self._id = lexid - self._lemma = lemma - self._pos = pos - self._var = variant - self._synid = synid - self._def = def_ - self._usn = usn - self._extl = extl - self._exms = exms - self._exms_srcs = exms_srcs - self._dom = dom - self._va = va - self._emo_mark = emo_mark - self._emo_names = emo_names - self._emo_valuations = emo_valuations - self._emo_ex1 = emo_ex1 - self._emo_ex2 = emo_ex2 - - @property - def id(self): - return self._id - - @property - def lemma(self): - return self._lemma - - @property - def pos(self): - return self._pos - - @property - def variant(self): - return self._var - - @property - def synset(self): - return self._wn._synsets[self._synid] - - @property - def definition(self): - return self._def - - @property - def sense_examples(self): - return self._exms - - @property - def sense_examples_sources(self): - return self._exms_srcs - - @property - def external_links(self): - return self._extl - - @property - def usage_notes(self): - return self._usn - - @property - def domain(self): - return self._dom - - @property - def verb_aspect(self): - return self._va - - @property - def emotion_markedness(self): - return self._emo_mark - - @property - def emotion_names(self): - return self._emo_names - - @property - def emotion_valuations(self): - return self._emo_valuations - - @property - def emotion_example(self): - return self._emo_ex1 - - @property - def emotion_example_secondary(self): - return self._emo_ex2 - - @property - def relations(self): - # Not caching, since this is an informational method that will probably - # not be called very often. - # The rel dicts should be an ordered dict with relation names as keys. - return tuple(self._wn._lexrels[self._id]) - - def related(self, relation_name): - relname = self._rel_resolver.resolve_name(relation_name) - reldict = self._wn._lexrels[self._id] - try: - return TupWrapper(iter(reldict[relname])) - except KeyError: - raise exc.InvalidRelationNameException(relation_name) - - -class Synset(bases.SynsetBase): - - __slots__ = '_relr', '_wn', '_id', '_units', '_def' - - def __init__(self, wn, synid, unit_ids, def_): - """**NOTE:** This constructor should not be called directly. - - Use :class:`PLWordNet` methods to obtain synsets. - """ - self._relr = get_default_relation_resolver() - - self._wn = wn - self._id = synid - self._units = tuple(wn._units[uid] for uid in unit_ids) - self._def = def_ - - @property - def id(self): - return self._id - - @property - def lexical_units(self): - return self._units - - @property - def definition(self): - return self._def - - @property - def relations(self): - # Not caching, since this is an informational method that will probably - # not be called very often. - # The rel dicts should be an ordered dict with relation names as keys. - return tuple(self._wn._synrels[self._id]) - - def related(self, relation_name): - relname = self._rel_resolver.resolve_name(relation_name) - reldict = self._wn._synrels[self._id] - try: - return TupWrapper(iter(reldict[relname])) - except KeyError: - raise exc.InvalidRelationNameException(relation_name) - - -_this_storage_ = PLWordNet diff --git a/plwn/storages/sqlite.py b/plwn/storages/sqlite.py index 4c01856..49235d3 100644 --- a/plwn/storages/sqlite.py +++ b/plwn/storages/sqlite.py @@ -1,3 +1,20 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + """Implementation that stores data from plWordNet in a sqlite databse file. With an impromptu schema. @@ -10,7 +27,7 @@ except ImportError: pass import sqlite3 -from collections import defaultdict +import collections as coll from contextlib import closing import errno import itertools as itt @@ -24,75 +41,74 @@ import weakref import six from ..readers import nodes as nd -from ..enums import ( - PoS, - VerbAspect, - EmotionMarkedness, - EmotionName, - EmotionValuation, - Domain, +from ..utils.artifilter import ( + filter_artificial_related_synsets, + filter_artificial_synset_edges, ) -from ..relresolver import get_default_relation_resolver -from ..utils.tupwrap import tup_wrapped, TupWrapper -from .. import bases, exceptions as exc +from ..utils.relinfotuple import RelationInfoTuple +from .. import bases as bs, exceptions as exc, enums as en + +__all__ = 'PLWordNet', 'Synset', 'LexicalUnit', 'RelationInfo' -__all__ = 'PLWordNet', 'Synset', 'LexicalUnit' +_LOG = logging.getLogger(__name__) -_log = logging.getLogger(__name__) +# Marker object for data that has not been fetched from the database +_UNFETCHED = object() -# SQL script used to initialize the database. +# SQL script used to initialize the database. {{{ # "locale" collation must be defined on the connection before this is executed. _DB_SCHEMA_SCRIPT = u""" PRAGMA foreign_keys = ON; --- Metadata table. Used for version number, currently. -CREATE TABLE IF NOT EXISTS plwn_meta ( +-- Metadata table. Used for version number, currently +CREATE TABLE plwn_meta ( name TEXT UNIQUE NOT NULL, value BLOB ); -- Tables for constant values -CREATE TABLE IF NOT EXISTS pos ( +CREATE TABLE pos ( id INTEGER PRIMARY KEY, value TEXT UNIQUE NOT NULL ); -CREATE TABLE IF NOT EXISTS verbaspect ( +CREATE TABLE verbaspect ( id INTEGER PRIMARY KEY, value TEXT UNIQUE NOT NULL ); -CREATE TABLE IF NOT EXISTS emotionmark ( +CREATE TABLE emotionmark ( id INTEGER PRIMARY KEY, value TEXT UNIQUE NOT NULL ); -CREATE TABLE IF NOT EXISTS emotionname ( +CREATE TABLE emotionname ( id INTEGER PRIMARY KEY, value TEXT UNIQUE NOT NULL COLLATE locale ); -CREATE TABLE IF NOT EXISTS emotionvaluation ( +CREATE TABLE emotionvaluation ( id INTEGER PRIMARY KEY, value TEXT UNIQUE NOT NULL COLLATE locale ); -CREATE TABLE IF NOT EXISTS domain ( +CREATE TABLE domain ( id INTEGER PRIMARY KEY, value TEXT UNIQUE NOT NULL COLLATE locale ); -- Synset only gets one simple table -CREATE TABLE IF NOT EXISTS synset ( +CREATE TABLE synset ( id INTEGER PRIMARY KEY, - definition TEXT NOT NULL COLLATE locale + definition TEXT COLLATE locale, + isartificial INTEGER NOT NULL DEFAULT 0 ); -- Lexical units have several tables, since they have several list-like -- properties. They also need indexes for lookup. -CREATE TABLE IF NOT EXISTS lexicalunit ( +CREATE TABLE lexicalunit ( id INTEGER PRIMARY KEY, lemma TEXT NOT NULL COLLATE locale, pos INTEGER NOT NULL @@ -101,11 +117,12 @@ CREATE TABLE IF NOT EXISTS lexicalunit ( synset INTEGER NOT NULL REFERENCES synset (id), unitindex INTEGER NOT NULL, - definition TEXT NOT NULL COLLATE locale, + definition TEXT COLLATE locale, domain INTEGER NOT NULL REFERENCES domain (id), verbaspect INTEGER REFERENCES verbaspect (id), + isemotional INTEGER, emotionmark INTEGER REFERENCES emotionmark (id), emotionexample1 TEXT COLLATE locale, @@ -119,34 +136,34 @@ CREATE TABLE IF NOT EXISTS lexicalunit ( -- lem-pos-var and synset-unitindex indexes (and partial ones) are -- automatically made because of UNIQUE constraint, but additional indexes -- need to be created. -CREATE INDEX IF NOT EXISTS lex_i_lem_var ON lexicalunit (lemma, variant); -CREATE INDEX IF NOT EXISTS lex_i_pos ON lexicalunit (pos); +CREATE INDEX lex_i_lem_var ON lexicalunit (lemma, variant); +CREATE INDEX lex_i_pos ON lexicalunit (pos); -- No index for variant itself - it's not an useful use case -- Tables dependant on lexicalunit -CREATE TABLE IF NOT EXISTS senseexample ( +CREATE TABLE senseexample ( unitid INTEGER NOT NULL REFERENCES lexicalunit (id), example TEXT NOT NULL COLLATE locale, source TEXT NOT NULL COLLATE locale ); -CREATE INDEX IF NOT EXISTS sen_i ON senseexample (unitid); +CREATE INDEX sen_i ON senseexample (unitid); -CREATE TABLE IF NOT EXISTS externallink ( +CREATE TABLE externallink ( unitid INTEGER NOT NULL REFERENCES lexicalunit (id), link TEXT NOT NULL COLLATE locale ); -CREATE INDEX IF NOT EXISTS link_i ON externallink (unitid); +CREATE INDEX link_i ON externallink (unitid); -CREATE TABLE IF NOT EXISTS usagenote ( +CREATE TABLE usagenote ( unitid INTEGER NOT NULL REFERENCES lexicalunit (id), note TEXT NOT NULL COLLATE locale ); -CREATE INDEX IF NOT EXISTS note_i ON usagenote (unitid); +CREATE INDEX note_i ON usagenote (unitid); -CREATE TABLE IF NOT EXISTS unitemotionname ( +CREATE TABLE unitemotionname ( unitid INTEGER NOT NULL REFERENCES lexicalunit (id), nameid INTEGER NOT NULL @@ -155,7 +172,7 @@ CREATE TABLE IF NOT EXISTS unitemotionname ( PRIMARY KEY (unitid, nameid) ); -CREATE TABLE IF NOT EXISTS unitemotionvaluation ( +CREATE TABLE unitemotionvaluation ( unitid INTEGER NOT NULL REFERENCES lexicalunit (id), valuationid INTEGER NOT NULL @@ -164,41 +181,114 @@ CREATE TABLE IF NOT EXISTS unitemotionvaluation ( PRIMARY KEY (unitid, valuationid) ); --- Relation tables -CREATE TABLE IF NOT EXISTS synsetrelationtype ( +-- Relation tables -- + +-- The for below are used to gather combinations of parent / child relation +-- names. +CREATE TABLE synsetrelationparentpart ( + id INTEGER PRIMARY KEY, + name TEXT UNIQUE NOT NULL COLLATE locale +); +CREATE TABLE synsetrelationchildpart ( id INTEGER PRIMARY KEY, name TEXT UNIQUE NOT NULL COLLATE locale ); -CREATE TABLE IF NOT EXISTS lexicalrelationtype ( +CREATE TABLE lexicalrelationparentpart ( id INTEGER PRIMARY KEY, name TEXT UNIQUE NOT NULL COLLATE locale ); +CREATE TABLE lexicalrelationchildpart ( + id INTEGER PRIMARY KEY, + name TEXT UNIQUE NOT NULL COLLATE locale +); + +-- Next, gather these parts into relation types themselves. +-- Parent can't be NULL - the no-parent case will be handled by a special empty +-- string parent. This is so that UNIQUE works correctly. +CREATE TABLE synsetrelationtype ( + id INTEGER PRIMARY KEY, + parentpart INTEGER NOT NULL + REFERENCES synsetrelationparentpart (id), + childpart INTEGER NOT NULL + REFERENCES synsetrelationchildpart (id), + + UNIQUE (parentpart, childpart) +); +CREATE TABLE lexicalrelationtype ( + id INTEGER PRIMARY KEY, + parentpart INTEGER NOT NULL + REFERENCES lexicalrelationparentpart (id), + childpart INTEGER NOT NULL + REFERENCES lexicalrelationchildpart (id), + + UNIQUE (parentpart, childpart) +); -CREATE TABLE IF NOT EXISTS synsetrelation ( - parentid INTEGER NOT NULL +-- The below tables are simply maps of relation aliases to their main IDs. +-- Reverse indexes are needed, too. +CREATE TABLE synsetrelationalias ( + name TEXT PRIMARY KEY NOT NULL COLLATE locale, + relationid INTEGER NOT NULL + REFERENCES synsetrelationtype (id) +); +CREATE INDEX synsetrelationalias_irev ON synsetrelationalias (relationid); +CREATE TABLE lexicalrelationalias ( + name TEXT PRIMARY KEY NOT NULL COLLATE locale, + relationid INTEGER NOT NULL + REFERENCES lexicalrelationtype (id) +); +CREATE INDEX lexicalrelationalias_irev ON lexicalrelationalias (relationid); + +-- Next are finally the relation instances +CREATE TABLE synsetrelation ( + source INTEGER NOT NULL REFERENCES synset (id), - relid INTEGER NOT NULL + relationtype INTEGER NOT NULL REFERENCES synsetrelationtype (id), - childid INTEGER NOT NULL + target INTEGER NOT NULL REFERENCES synset (id), - PRIMARY KEY (parentid, relid, childid) + PRIMARY KEY (source, relationtype, target) ); - -CREATE TABLE IF NOT EXISTS lexicalrelation ( - parentid INTEGER NOT NULL +CREATE TABLE lexicalrelation ( + source INTEGER NOT NULL REFERENCES lexicalunit (id), - relid INTEGER NOT NULL + relationtype INTEGER NOT NULL REFERENCES lexicalrelationtype (id), - childid INTEGER NOT NULL + target INTEGER NOT NULL REFERENCES lexicalunit (id), - PRIMARY KEY (parentid, relid, childid) + PRIMARY KEY (source, relationtype, target) ); -""" - -class PLWordNet(bases.PLWordNetBase): +-- Insert the special empty values for the parent part tables +INSERT INTO synsetrelationparentpart (name) VALUES (''); +INSERT INTO lexicalrelationparentpart (name) VALUES (''); +""" # }}} + +_RELTYPE_TABLES = { + en.RelationKind.synset: u'synsetrelationtype', + en.RelationKind.lexical: u'lexicalrelationtype', +} +_RELALIAS_TABLES = { + en.RelationKind.synset: u'synsetrelationalias', + en.RelationKind.lexical: u'lexicalrelationalias', +} +_RELPARENTPART_TABLES = { + en.RelationKind.synset: u'synsetrelationparentpart', + en.RelationKind.lexical: u'lexicalrelationparentpart', +} +_RELCHILDPART_TABLES = { + en.RelationKind.synset: u'synsetrelationchildpart', + en.RelationKind.lexical: u'lexicalrelationchildpart', +} +_RELINST_TABLES = { + en.RelationKind.synset: u'synsetrelation', + en.RelationKind.lexical: u'lexicalrelation', +} + + +class PLWordNet(bs.PLWordNetBase): _STORAGE_NAME = 'sqlite3' _SCHEMA_VERSION = 4 @@ -228,44 +318,11 @@ class PLWordNet(bases.PLWordNetBase): return plwn - @staticmethod - def _make_include_exclude(include, exclude): - """. - - Creates ``WHERE`` clause and the parameter tuple for simple ``IN`` - and ``NOT IN`` case. - """ - if include is not None: - whereclause = u"WHERE name IN ({})".format( - u','.join(itt.repeat(u'?', len(include))) - ) - includetuple = tuple(include) - else: - whereclause = u'' - includetuple = () - - if exclude is not None: - if not whereclause: - whereclause = u"WHERE name NOT IN ({})" - else: - whereclause += u" AND name NOT IN ({})" - - whereclause = whereclause.format( - u','.join(itt.repeat(u'?', len(exclude))) - ) - excludetuple = tuple(exclude) - else: - excludetuple = () - - return whereclause, includetuple + excludetuple - def __init__(self, db_file=None): """**NOTE:** This constructor should not be invoked directly. Use one of the standard methods: ``from_dump`` or ``from_reader``. """ - super(PLWordNet, self).__init__() - if db_file is None: self._tmp_dir = tempfile.mkdtemp(prefix='plwn_api-') # Close the file immediately, we just need the @@ -288,19 +345,22 @@ class PLWordNet(bases.PLWordNetBase): self.__drop_tmpdir() raise + self._relcache = _RelCache(self._db) + def close(self): self._db.close() self.__drop_tmpdir() def lexical_units(self, lemma=None, pos=None, variant=None): - return TupWrapper( - LexicalUnit(self._db, *row) + return tuple( + LexicalUnit(self._db, self._relcache, *row) for row in self._select_lexical_units(lemma, pos, variant, True) ) def lexical_unit(self, lemma, pos, variant): return LexicalUnit( self._db, + self._relcache, *self._get_one_lexical_unit( lemma, pos, @@ -321,32 +381,31 @@ class PLWordNet(bases.PLWordNetBase): ) row = cur.fetchone() if row is None: - raise exc.InvalidLexicalUnitIdentifierException(id_) - return LexicalUnit(self._db, id_, *row) + raise exc.LexicalUnitNotFound('id=' + repr(id_)) + return LexicalUnit(self._db, self._relcache, id_, *row) - @tup_wrapped def lexical_relation_edges(self, include=None, exclude=None): - parsed_include = frozenset( - self._rel_resolver.resolve_name(rel) for rel in include - ) if include is not None else None - - parsed_exclude = frozenset( - self._rel_resolver.resolve_name(rel) for rel in exclude - ) if exclude is not None else None - - whereclause, paramtuple = self._make_include_exclude( + parsed_include = frozenset(itt.chain.from_iterable( + self._relcache.get_ids(rel, en.RelationKind.lexical) + for rel in include + )) if include is not None else None + parsed_exclude = frozenset(itt.chain.from_iterable( + self._relcache.get_ids(rel, en.RelationKind.lexical) + for rel in exclude + )) if exclude is not None else None + where_clause, param_tuple = _make_include_exclude( parsed_include, parsed_exclude, + u'relationtype', ) with closing(self._db.cursor()) as cur: cur.execute( u""" - SELECT parentid, childid, name + SELECT source, target, relationtype FROM lexicalrelation - JOIN lexicalrelationtype ON relid = id - """ + whereclause, - paramtuple, + """ + where_clause, + param_tuple, ) lu_q = u""" @@ -356,32 +415,49 @@ class PLWordNet(bases.PLWordNetBase): WHERE lexicalunit.id = ? """ - for parent_id, child_id, rel_name in cur: + edges = [] + + for parent_id, child_id, rel_id in cur: with closing(self._db.cursor()) as cur2: cur2.execute(lu_q, (parent_id,)) par_lu = LexicalUnit( self._db, + self._relcache, parent_id, *cur2.fetchone() ) cur2.execute(lu_q, (child_id,)) chl_lu = LexicalUnit( self._db, + self._relcache, child_id, *cur2.fetchone() ) - yield bases.RelationEdge(par_lu, rel_name, chl_lu) + edges.append(bs.RelationEdge( + par_lu, + self._relcache.get_info_by_id( + rel_id, + en.RelationKind.lexical, + ), + chl_lu, + )) + + return tuple(edges) def synsets(self, lemma=None, pos=None, variant=None): synids = frozenset( row[-1] for row in self._select_lexical_units(lemma, pos, variant, True) ) - return TupWrapper(Synset(self._db, synid) for synid in synids) + return tuple( + Synset(self._db, self._relcache, synid) + for synid in synids + ) def synset(self, lemma, pos, variant): return Synset( self._db, + self._relcache, self._get_one_lexical_unit( lemma, pos, @@ -397,38 +473,80 @@ class PLWordNet(bases.PLWordNetBase): (id_,), ) if not cur.fetchone()[0]: - raise exc.InvalidSynsetIdentifierException(id_) - return Synset(self._db, id_) - - @tup_wrapped - def synset_relation_edges(self, include=None, exclude=None): - parsed_include = frozenset( - self._rel_resolver.resolve_name(rel) for rel in include - ) if include is not None else None - - parsed_exclude = frozenset( - self._rel_resolver.resolve_name(rel) for rel in exclude - ) if exclude is not None else None - - whereclause, paramtuple = self._make_include_exclude( + raise exc.SynsetNotFound('id=' + repr(id_)) + return Synset(self._db, self._relcache, id_) + + def synset_relation_edges(self, + include=None, + exclude=None, + skip_artificial=True): + parsed_include = frozenset(itt.chain.from_iterable( + self._relcache.get_ids(rel, en.RelationKind.synset) + for rel in include + )) if include is not None else None + parsed_exclude = frozenset(itt.chain.from_iterable( + self._relcache.get_ids(rel, en.RelationKind.synset) + for rel in exclude + )) if exclude is not None else None + where_clause, param_tuple = _make_include_exclude( parsed_include, parsed_exclude, + u'relationtype', ) + select_clause = u"SELECT source, target, relationtype" + from_clause = u"FROM synsetrelation" + + # Pre-fetch artificial status if skipping is necessary + if skip_artificial: + select_clause += u", parentsyn.isartificial, childsyn.isartificial" + from_clause += ( + u" JOIN synset AS parentsyn ON parentsyn.id = source" + u" JOIN synset AS childsyn ON childsyn.id = target" + ) + yield_edges = self.__syn_edges_withskip + else: + yield_edges = self.__syn_edges_noskip + with closing(self._db.cursor()) as cur: cur.execute( - u""" - SELECT parentid, childid, name - FROM synsetrelation JOIN synsetrelationtype ON relid = id - """ + whereclause, - paramtuple, + u'\n'.join((select_clause, from_clause, where_clause)), + param_tuple, + ) + return tuple(yield_edges(cur)) + + def relations_info(self, name=None, kind=None): + if name is None: + return ( + itt.chain( + self._relcache.get_all_of_kind(en.RelationKind.synset), + self._relcache.get_all_of_kind(en.RelationKind.lexical), + ) + if kind is None + else self._relcache.get_all_of_kind( + en.RelationKind(kind), + ) ) - for parent_id, child_id, rel_name in cur: - yield bases.RelationEdge( - Synset(self._db, parent_id), - rel_name, - Synset(self._db, child_id), + else: + return ( + itt.chain( + self._relcache.get_infos_by_name( + name, + en.RelationKind.synset, + allow_nonexistent=True, + ), + self._relcache.get_infos_by_name( + name, + en.RelationKind.lexical, + allow_nonexistent=True, + ), ) + if kind is None + else self._relcache.get_infos_by_name( + name, + en.RelationKind(kind), + ) + ) def _select_lexical_units(self, lemma, pos, variant, defval): with closing(self._db.cursor()) as cur: @@ -443,22 +561,27 @@ class PLWordNet(bases.PLWordNetBase): """, { u'lem': lemma, - u'pos': PoS(pos).value if pos else None, + u'pos': en.PoS(pos).value if pos else None, u'var': variant, u'defval': defval, }, ) - for row in cur: - yield row + return cur.fetchall() def _get_one_lexical_unit(self, lemma, pos, variant, exc_class): # False by default will force-return nothing if any is None - lu_rows = iter(self._select_lexical_units(lemma, pos, variant, False)) + lu_rows = self._select_lexical_units(lemma, pos, variant, False) try: - lu_row = next(lu_rows) - except StopIteration: - raise exc_class(lemma, pos, variant) - assert next(lu_rows, None) is None + lu_row = lu_rows[0] + except IndexError: + raise exc_class( + 'lemma={!r}, pos={!r}, variant={!r}'.format( + lemma, + pos, + variant, + ), + ) + assert len(lu_rows) == 1 return lu_row def __init_db(self): @@ -467,27 +590,27 @@ class PLWordNet(bases.PLWordNetBase): with self._db: self._db.executemany( u"INSERT OR IGNORE INTO pos (value) VALUES (?)", - ((p.value,) for p in PoS), + ((p.value,) for p in en.PoS), ).close() self._db.executemany( u"INSERT OR IGNORE INTO verbaspect (value) VALUES (?)", - ((va.value,) for va in VerbAspect), + ((va.value,) for va in en.VerbAspect), ).close() self._db.executemany( u"INSERT OR IGNORE INTO emotionmark (value) VALUES (?)", - ((em.value,) for em in EmotionMarkedness), + ((em.value,) for em in en.EmotionMarkedness), ).close() self._db.executemany( u"INSERT OR IGNORE INTO emotionname (value) VALUES (?)", - ((en.value,) for en in EmotionName), + ((en.value,) for en in en.EmotionName), ).close() self._db.executemany( u"INSERT OR IGNORE INTO emotionvaluation (value) VALUES (?)", - ((ev.value,) for ev in EmotionValuation), + ((ev.value,) for ev in en.EmotionValuation), ).close() self._db.executemany( u"INSERT OR IGNORE INTO domain (value) VALUES (?)", - ((dm.value,) for dm in Domain), + ((dm.value,) for dm in en.Domain), ).close() # Insert version if the database is new @@ -527,62 +650,54 @@ class PLWordNet(bases.PLWordNetBase): if e.errno != errno.ENOENT: raise + def __syn_edges_noskip(self, rowiter): + for parent_id, child_id, rel_id in rowiter: + yield bs.RelationEdge( + Synset(self._db, self._relcache, parent_id), + self._relcache.get_info_by_id(rel_id, en.RelationKind.synset), + Synset(self._db, self._relcache, child_id), + ) + + def __syn_edges_withskip(self, rowiter): + return filter_artificial_synset_edges( + bs.RelationEdge( + Synset(self._db, self._relcache, parent_id, bool(parent_art)), + self._relcache.get_info_by_id(rel_id, en.RelationKind.synset), + Synset(self._db, self._relcache, child_id, bool(child_art)), + ) + for parent_id, child_id, rel_id, parent_art, child_art in rowiter + ) -class LexicalUnit(bases.LexicalUnitBase): - - __slots__ = ( - '_relr', - '_db', - '_id', - '_lemma', - '_pos', - '_var', - '_synid', - '_syn', - '_def', - '_usn', - '_extl', - '_exms', - '_exms_srcs', - '_dom', - '_va', - '_emo_mark', - '_emo_names', - '_emo_valuations' - '_emo_ex1', - '_emo_ex2', - ) - # Since ``None`` is a valid value for verb_aspect, this is a sentinel value - _NO_VAL = object() +class LexicalUnit(bs.LexicalUnitBase): - def __init__(self, conn, id_, lemma, pos, variant, synid): + def __init__(self, conn, relcache, id_, lemma, pos, variant, synid): """**NOTE:** This constructor should not be called directly. Use :class:`PLWordNet` methods to obtain lexical units. """ - self._relr = get_default_relation_resolver() - self._db = conn + self._relcache = relcache self._id = id_ self._lemma = lemma - self._pos = PoS(pos) + self._pos = en.PoS(pos) self._var = variant self._synid = synid # Rest is unitialized - self._syn = self._NO_VAL - self._def = self._NO_VAL - self._usn = self._NO_VAL - self._extl = self._NO_VAL - self._exms = self._NO_VAL - self._exms_srcs = self._NO_VAL - self._dom = self._NO_VAL - self._va = self._NO_VAL - self._emo_mark = self._NO_VAL - self._emo_names = self._NO_VAL - self._emo_valuations = self._NO_VAL - self._emo_ex1 = self._NO_VAL - self._emo_ex2 = self._NO_VAL + self._syn = _UNFETCHED + self._def = _UNFETCHED + self._usn = _UNFETCHED + self._extl = _UNFETCHED + self._exms = _UNFETCHED + self._exms_srcs = _UNFETCHED + self._dom = _UNFETCHED + self._va = _UNFETCHED + self._is_emo = _UNFETCHED + self._emo_mark = _UNFETCHED + self._emo_names = _UNFETCHED + self._emo_valuations = _UNFETCHED + self._emo_ex1 = _UNFETCHED + self._emo_ex2 = _UNFETCHED @property def id(self): @@ -600,10 +715,18 @@ class LexicalUnit(bases.LexicalUnitBase): def variant(self): return self._var + @property + def is_polish(self): + return self._pos.is_polish + + @property + def is_english(self): + return self._pos.is_english + @property def synset(self): - if self._syn is self._NO_VAL or self._syn() is None: - syn = Synset(self._db, self._synid) + if self._syn is _UNFETCHED or self._syn() is None: + syn = Synset(self._db, self._relcache, self._synid) # Use weakref to avoid circular refrence to synset self._syn = weakref.ref(syn) return syn @@ -611,20 +734,18 @@ class LexicalUnit(bases.LexicalUnitBase): @property def definition(self): - if self._def is self._NO_VAL: + if self._def is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u"SELECT definition FROM lexicalunit WHERE id = ?", (self._id,), ) - row = cur.fetchone() - assert row is not None - self._def = row[0] if row[0] is not None else '' + self._def = cur.fetchone()[0] return self._def @property def sense_examples(self): - if self._exms is self._NO_VAL: + if self._exms is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u"SELECT example FROM senseexample WHERE unitid = ?", @@ -635,7 +756,7 @@ class LexicalUnit(bases.LexicalUnitBase): @property def sense_examples_sources(self): - if self._exms_srcs is self._NO_VAL: + if self._exms_srcs is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u"SELECT source FROM senseexample WHERE unitid = ?", @@ -646,7 +767,7 @@ class LexicalUnit(bases.LexicalUnitBase): @property def external_links(self): - if self._extl is self._NO_VAL: + if self._extl is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u"SELECT link FROM externallink WHERE unitid = ?", @@ -657,7 +778,7 @@ class LexicalUnit(bases.LexicalUnitBase): @property def usage_notes(self): - if self._usn is self._NO_VAL: + if self._usn is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u"SELECT note FROM usagenote WHERE unitid = ?", @@ -668,7 +789,7 @@ class LexicalUnit(bases.LexicalUnitBase): @property def domain(self): - if self._dom is self._NO_VAL: + if self._dom is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u""" @@ -679,14 +800,12 @@ class LexicalUnit(bases.LexicalUnitBase): """, (self._id,), ) - row = cur.fetchone() - assert row is not None - self._dom = Domain(row[0]) + self._dom = en.Domain(cur.fetchone()[0]) return self._dom @property def verb_aspect(self): - if self._va is self._NO_VAL: + if self._va is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u""" @@ -698,12 +817,24 @@ class LexicalUnit(bases.LexicalUnitBase): (self._id,), ) row = cur.fetchone() - self._va = None if row is None else VerbAspect(row[0]) + self._va = None if row is None else en.VerbAspect(row[0]) return self._va + @property + def is_emotional(self): + if self._is_emo is _UNFETCHED: + with closing(self._db.cursor()) as cur: + cur.execute( + u"SELECT isemotional FROM lexicalunit WHERE id = ?", + (self._id,), + ) + rowval = cur.fetchone()[0] + self._is_emo = None if rowval is None else bool(rowval) + return self._is_emo + @property def emotion_markedness(self): - if self._emo_mark is self._NO_VAL: + if self._emo_mark is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u""" @@ -715,12 +846,16 @@ class LexicalUnit(bases.LexicalUnitBase): (self._id,), ) row = cur.fetchone() - self._emo_mark = None if row is None else EmotionMarkedness(row[0]) + self._emo_mark = ( + None + if row is None + else en.EmotionMarkedness(row[0]) + ) return self._emo_mark @property def emotion_names(self): - if self._emo_names is self._NO_VAL: + if self._emo_names is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u""" @@ -732,12 +867,12 @@ class LexicalUnit(bases.LexicalUnitBase): """, (self._id,), ) - self._emo_names = tuple(EmotionName(row[0]) for row in cur) + self._emo_names = tuple(en.EmotionName(row[0]) for row in cur) return self._emo_names @property def emotion_valuations(self): - if self._emo_valuations is self._NO_VAL: + if self._emo_valuations is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u""" @@ -751,14 +886,14 @@ class LexicalUnit(bases.LexicalUnitBase): (self._id,), ) self._emo_valuations = tuple( - EmotionValuation(row[0]) + en.EmotionValuation(row[0]) for row in cur ) return self._emo_valuations @property def emotion_example(self): - if self._emo_ex1 is self._NO_VAL: + if self._emo_ex1 is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u"SELECT emotionexample1 FROM lexicalunit WHERE id = ?", @@ -769,7 +904,7 @@ class LexicalUnit(bases.LexicalUnitBase): @property def emotion_example_secondary(self): - if self._emo_ex2 is self._NO_VAL: + if self._emo_ex2 is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u"SELECT emotionexample2 FROM lexicalunit WHERE id = ?", @@ -780,72 +915,126 @@ class LexicalUnit(bases.LexicalUnitBase): @property def relations(self): - # Not caching, since this is an informational method that will probably - # not be called very often + # Not caching, since this is an informative method that will probably + # not be called very often. with closing(self._db.cursor()) as cur: cur.execute( u""" - SELECT DISTINCT name - FROM lexicalrelation JOIN lexicalrelationtype ON id = relid - WHERE parentid = ? - ORDER BY name + SELECT DISTINCT relationtype + FROM lexicalrelation + WHERE source = ? """, (self._id,), ) - return tuple(row[0] for row in cur) - - def related(self, relation_name): - relname = self._relr.resolve_name(relation_name) + return RelationInfoTuple(sorted( + self._relcache.get_info_by_id(row[0], en.RelationKind.lexical) + for row in cur + )) + + def related(self, relation_id=None): + relinfos = _parse_related_relid( + relation_id, + self._relcache, + en.RelationKind.lexical, + ) with closing(self._db.cursor()) as cur: cur.execute( - u"SELECT id FROM lexicalrelationtype WHERE name = ?", - (relname,), + u""" + SELECT lexicalunit.id, lemma, pos.value, variant, synset + FROM lexicalrelation + JOIN lexicalunit ON lexicalunit.id = target + JOIN pos ON lexicalunit.pos = pos.id + WHERE source = ? {} + """.format(_make_relationtype_where(relinfos)), + tuple(itt.chain( + (self._id,), + (ri._id for ri in (relinfos or ())), + )), + ) + return tuple( + LexicalUnit(self._db, self._relcache, *row) + for row in cur ) - row = cur.fetchone() - if row is None: - raise exc.InvalidRelationNameException(relation_name) - return TupWrapper(self.__related_gen(row[0])) - def __related_gen(self, relid): + def related_pairs(self, relation_id=None): + relinfos = _parse_related_relid( + relation_id, + self._relcache, + en.RelationKind.lexical, + ) with closing(self._db.cursor()) as cur: cur.execute( u""" - SELECT lexicalunit.id, lemma, pos.value, variant, synset + SELECT relationtype, + lexicalunit.id, lemma, pos.value, variant, synset FROM lexicalrelation - JOIN lexicalunit ON lexicalunit.id = childid + JOIN lexicalunit ON lexicalunit.id = target JOIN pos ON lexicalunit.pos = pos.id - WHERE parentid = ? AND relid = ? - """, - (self._id, relid), + WHERE source = ? {} + """.format(_make_relationtype_where(relinfos)), + tuple(itt.chain( + (self._id,), + (ri._id for ri in (relinfos or ())), + )), + ) + return tuple( + ( + self._relcache.get_info_by_id( + row[0], + en.RelationKind.lexical, + ), + LexicalUnit(self._db, self._relcache, *row[1:]), + ) + for row in cur ) - for row in cur: - yield LexicalUnit(self._db, *row) - -class Synset(bases.SynsetBase): - __slots__ = '_relr', '_db', '_id', '_units', '_def' +class Synset(bs.SynsetBase): - def __init__(self, conn, syn_id): + def __init__(self, conn, relcache, syn_id, syn_art=_UNFETCHED): """**NOTE:** This constructor should not be called directly. Use :class:`PLWordNet` methods to obtain synsets. """ - self._relr = get_default_relation_resolver() - self._db = conn + self._relcache = relcache self._id = syn_id + self._isart = syn_art - self._units = None - self._def = None + self._units = _UNFETCHED + self._def = _UNFETCHED + + self._pos = _UNFETCHED + self._is_polish = _UNFETCHED + self._is_english = _UNFETCHED @property def id(self): return self._id + @property + def pos(self): + if self._pos == _UNFETCHED: + (self._pos,) = {unit.pos for unit in self.lexical_units} + return self._pos + + @property + def is_polish(self): + if self._is_polish is _UNFETCHED: + self._is_polish = any(unit.is_polish + for unit in self.lexical_units) + return self._is_polish + + @property + def is_english(self): + if self._is_english is _UNFETCHED: + self._is_english = any(unit.is_english + for unit in self.lexical_units) + return self._is_english + @property def lexical_units(self): - if self._units is None: + if self._units is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u""" @@ -859,6 +1048,7 @@ class Synset(bases.SynsetBase): self._units = tuple( LexicalUnit( self._db, + self._relcache, row[0], row[1], row[2], @@ -872,7 +1062,7 @@ class Synset(bases.SynsetBase): @property def definition(self): - if self._def is None: + if self._def is _UNFETCHED: with closing(self._db.cursor()) as cur: cur.execute( u"SELECT definition FROM synset WHERE id = ?", @@ -880,61 +1070,254 @@ class Synset(bases.SynsetBase): ) row = cur.fetchone() assert row is not None - self._def = row[0] if row[0] is not None else '' + self._def = row[0] if row[0] is not None else None return self._def + @property + def is_artificial(self): + if self._isart is _UNFETCHED: + with closing(self._db.cursor()) as cur: + cur.execute( + u"SELECT isartificial FROM synset WHERE id = ?", + (self._id,), + ) + row = cur.fetchone() + assert row is not None + self._isart = bool(row[0]) + return self._isart + @property def relations(self): # Not caching, since this is an informational method that will probably - # not be called very often + # not be called very often. with closing(self._db.cursor()) as cur: cur.execute( u""" - SELECT DISTINCT name - FROM synsetrelation JOIN synsetrelationtype ON id = relid - WHERE parentid = ? - ORDER BY name + SELECT DISTINCT relationtype + FROM synsetrelation + WHERE source = ? """, (self._id,), ) - return tuple(row[0] for row in cur) + return RelationInfoTuple(sorted( + self._relcache.get_info_by_id(row[0], en.RelationKind.synset) + for row in cur + )) + + def related(self, + relation_id=None, + skip_artificial=True, + _forbidden=None): + + _forbidden = _forbidden or set() + relinfos = _parse_related_relid( + relation_id, + self._relcache, + en.RelationKind.synset, + ) + select_clause = u"SELECT target" + from_clause = u"FROM synsetrelation" + + if skip_artificial: + select_clause += u", synset.isartificial, relationtype" + from_clause += u" JOIN synset ON target = synset.id" + yield_related = self.__related_withskip + else: + yield_related = self.__related_noskip - def related(self, relation_name): - relname = self._relr.resolve_name(relation_name) with closing(self._db.cursor()) as cur: cur.execute( - u"SELECT id FROM synsetrelationtype WHERE name = ?", - (relname,), + u'\n'.join(( + select_clause, + from_clause, + u"WHERE source = ? {}".format( + _make_relationtype_where(relinfos), + ), + )), + tuple(itt.chain( + (self._id,), + (ri._id for ri in (relinfos or ())), + )), ) - row = cur.fetchone() - if row is None: - raise exc.InvalidRelationNameException(relation_name) - return TupWrapper(self.__related_gen(row[0])) + return frozenset(yield_related(cur, _forbidden)) + + def related_pairs(self, + relation_id=None, + skip_artificial=True, + _forbidden=None): + + _forbidden = _forbidden or set() + relinfos = _parse_related_relid( + relation_id, + self._relcache, + en.RelationKind.synset, + ) + select_clause = u"SELECT relationtype, target" + from_clause = u"FROM synsetrelation" + + if skip_artificial: + select_clause += u", synset.isartificial" + from_clause += u" JOIN synset ON target = synset.id" + yield_related = self.__related_withskip_pairs + else: + yield_related = self.__related_noskip_pairs - def __related_gen(self, relid): with closing(self._db.cursor()) as cur: cur.execute( - u""" - SELECT childid - FROM synsetrelation - WHERE parentid = ? AND relid = ? - """, - (self._id, relid), + u'\n'.join(( + select_clause, + from_clause, + u"WHERE source = ? {}".format( + _make_relationtype_where(relinfos), + ), + )), + tuple(itt.chain( + (self._id,), + (ri._id for ri in (relinfos or ())), + )), + ) + return frozenset(yield_related(cur, _forbidden)) + + def __related_noskip(self, rowiter, forbidden): + return (Synset(self._db, self._relcache, synid) for synid, in rowiter) + + def __related_noskip_pairs(self, rowiter, forbidden): + return ( + ( + self._relcache.get_info_by_id(relid, en.RelationKind.synset), + Synset(self._db, self._relcache, synid), + ) + for relid, synid in rowiter + ) + + def __related_withskip(self, rowiter, forbidden): + return ( + fil_pair[0] + for fil_pair in self.__inner_related_withskip(rowiter, forbidden) + ) + + def __related_withskip_pairs(self, rowiter, forbidden): + re_rowiter = ( + (synid, isart, relid) + for relid, synid, isart in rowiter + ) + return ( + (relinfo, fil_syn) + for fil_syn, relinfo in self.__inner_related_withskip( + re_rowiter, + forbidden, ) - for row in cur: - yield Synset(self._db, row[0]) + ) + + def __inner_related_withskip(self, rowiter, forbidden): + return filter_artificial_related_synsets( + ( + ( + Synset(self._db, self._relcache, synid, isart), + self._relcache.get_info_by_id( + relid, + en.RelationKind.synset, + ), + ) + for synid, isart, relid in rowiter + ), + forbidden, + ) + + +class RelationInfo(bs.RelationInfoBase): + + def __init__(self, db, id_, kind): + """**NOTE:** This constructor should not be called directly. + + Use :class:`PLWordNet` methods to obtain relation info. + """ + self._db = db + # The ID is internal only, and can be used only with ``kind`` + self._id = id_ + self._kind = kind + + self._par = _UNFETCHED + self._name = _UNFETCHED + self._aliases = _UNFETCHED + + @property + def kind(self): + return self._kind + + @property + def parent(self): + if self._par is _UNFETCHED: + with closing(self._db.cursor()) as cur: + cur.execute( + u""" + SELECT name + FROM {parpart} JOIN {reltype} ON {parpart}.id = parentpart + WHERE {reltype}.id = ? + """.format( + parpart=_RELPARENTPART_TABLES[self._kind], + reltype=_RELTYPE_TABLES[self._kind], + ), + (self._id,), + ) + row = cur.fetchone() + assert row is not None + # Convert the bogus '' value back to proper None + self._par = row[0] or None + return self._par + + @property + def name(self): + if self._name is _UNFETCHED: + with closing(self._db.cursor()) as cur: + cur.execute( + u""" + SELECT name + FROM {chlpart} JOIN {reltype} ON {chlpart}.id = childpart + WHERE {reltype}.id = ? + """.format( + chlpart=_RELCHILDPART_TABLES[self._kind], + reltype=_RELTYPE_TABLES[self._kind], + ), + (self._id,), + ) + row = cur.fetchone() + assert row is not None + self._name = row[0] + return self._name + + @property + def aliases(self): + if self._aliases is _UNFETCHED: + with closing(self._db.cursor()) as cur: + cur.execute( + u""" + SELECT name FROM {} + WHERE relationid = ? + ORDER BY name + """.format(_RELALIAS_TABLES[self._kind]), + (self._id,), + ) + self._aliases = tuple(row[0] for row in cur) + return self._aliases class _DBBuilder(object): def __init__(self, db): self._db = db - # Relations need to be added later to weed out nonexistent ones targets - # and avoid foreign key failures (which are a bit obtuse in sqlite3. - self._synrels = {} - self._lexrels = {} + self._node_handlers = { + nd.SynsetNode: self._insert_synset, + nd.LexicalUnitNode: self._insert_unit, + nd.RelationTypeNode: self._insert_relation_type, + } + # Ad-hoc relations (for cases where we don't have relation type nodes) + # need to be added later to weed out nonexistent ones targets and + # avoid foreign key failures (which are a bit obtuse in sqlite3). + self._adhoc_synrels = {} + self._adhoc_lexrels = {} # Synset to lexical units relations also need to be deferred. - self._synid2lexids = defaultdict(list) + self._synid2lexids = coll.defaultdict(list) # Cache IDs of constant values with closing(db.execute(u"SELECT value, id FROM pos")) as cur: self._posids = dict(cur) @@ -953,10 +1336,7 @@ class _DBBuilder(object): def __call__(self, reader): with self._db: for node in reader: - if isinstance(node, nd.SynsetNode): - self._insert_synset(node) - else: - self._insert_unit(node) + self._node_handlers[type(node)](node) with self._db: self._finalize_units() @@ -965,45 +1345,60 @@ class _DBBuilder(object): self._prune_empty_synsets() with self._db: - self._finalize_rels(u'synsetrelation', self._synrels) - self._finalize_rels(u'lexicalrelation', self._lexrels) + self._finalize_related( + self._adhoc_synrels, + en.RelationKind.synset, + ) + self._finalize_related( + self._adhoc_lexrels, + en.RelationKind.lexical, + ) def _insert_synset(self, syn_node): self._db.execute( - u"INSERT INTO synset (id, definition) VALUES (?, ?)", - (syn_node.id, syn_node.definition), + u""" + INSERT INTO synset (id, definition, isartificial) + VALUES (?, ?, ?) + """, + (syn_node.id, syn_node.definition, syn_node.is_artificial), ).close() # Related go into temp storage - self._synrels[syn_node.id] = [ - ( - self._ensure_enum_row_id( - u'synsetrelationtype', - u'id', - u'name', - relname, - ), - targetid, - ) - for relname, targetid in syn_node.related - ] + self._adhoc_synrels[syn_node.id] = syn_node.related def _insert_unit(self, lu_node): # Unfortunately, we can't insert into DB until we have all synsets. So # save nodes in temp dict. self._synid2lexids[lu_node.synset].append(lu_node) # But deal with relations - self._lexrels[lu_node.id] = [ - ( - self._ensure_enum_row_id( - u'lexicalrelationtype', - u'id', - u'name', - relname, - ), - targetid, + self._adhoc_lexrels[lu_node.id] = lu_node.related + + def _insert_relation_type(self, rel_node): + type_tbl = _RELTYPE_TABLES[rel_node.kind] + parent_tbl = _RELPARENTPART_TABLES[rel_node.kind] + child_tbl = _RELCHILDPART_TABLES[rel_node.kind] + + with closing(self._db.cursor()) as cur: + # Ensure the name is there + parname_id = self._ensure_rel_part_name( + parent_tbl, + rel_node.parent or u'', + ) + childname_id = self._ensure_rel_part_name(child_tbl, rel_node.name) + # And now the relation itself + cur.execute( + u"INSERT INTO {} (parentpart, childpart) VALUES (?, ?)" + .format(type_tbl), + (parname_id, childname_id), ) - for relname, targetid in lu_node.related - ] + # Do aliases if present + if rel_node.aliases: + rel_id = cur.lastrowid + alias_tbl = _RELALIAS_TABLES[rel_node.kind] + cur.executemany( + u"INSERT INTO {} (name, relationid) VALUES (?, ?)" + .format(alias_tbl), + ((nam, rel_id) for nam in rel_node.aliases), + ) def _finalize_units(self): # All synsets are in, can add units now. @@ -1017,13 +1412,15 @@ class _DBBuilder(object): id, lemma, pos, variant, synset, unitindex, definition, domain, verbaspect, - emotionmark, emotionexample1, emotionexample2 + isemotional, emotionmark, + emotionexample1, emotionexample2 ) VALUES ( :id, :lemma, :pos, :var, :syn, :uidx, :def, :dom, :va, - :emo_m, :emo_ex1, :emo_ex2 + :emo_is, :emo_m, + :emo_ex1, :emo_ex2 ) """, { @@ -1038,6 +1435,7 @@ class _DBBuilder(object): u'va': None if lu_node.verb_aspect is None else self._vaids[lu_node.verb_aspect.value], + u'emo_is': lu_node.is_emotional, u'emo_m': None if lu_node.emotion_markedness is None else self._emids[ @@ -1047,14 +1445,13 @@ class _DBBuilder(object): u'emo_ex2': lu_node.emotion_example_2, }, ) - except sqlite3.IntegrityError as e: - _log.warning( + except sqlite3.IntegrityError: + _LOG.exception( 'Pair (synset=%d, unitindex=%d) of unit %d ' - 'violates: %r', + 'causes integrity error', lu_node.synset, lu_node.unit_index, lu_node.id, - e.args, ) # Drop relations for this unit, if any self._lexrels.pop(lu_node.id, None) @@ -1071,7 +1468,6 @@ class _DBBuilder(object): lu_node.examples_sources) ), ) - cur.executemany( u""" INSERT INTO usagenote (unitid, note) @@ -1079,7 +1475,6 @@ class _DBBuilder(object): """, ((lu_node.id, note) for note in lu_node.usage_notes), ) - cur.executemany( u""" INSERT INTO externallink (unitid, link) @@ -1088,7 +1483,6 @@ class _DBBuilder(object): ((lu_node.id, link) for link in lu_node.external_links), ) - cur.executemany( u""" INSERT INTO unitemotionname (unitid, nameid) @@ -1099,7 +1493,6 @@ class _DBBuilder(object): for emo_name in lu_node.emotion_names ), ) - cur.executemany( u""" INSERT INTO unitemotionvaluation (unitid, valuationid) @@ -1111,46 +1504,129 @@ class _DBBuilder(object): ), ) - def _ensure_enum_row_id(self, table, id_field, value_field, value): - select_query = u"SELECT {id} FROM {table} WHERE {value} = ?".format( - id=id_field, - table=table, - value=value_field, - ) + def _finalize_related(self, related, kind): + # Insert all relation names from the related dict as global-level + # relations, if they have no SEP in them. If such relations are not + # defined, define them. If relation names do have SEP in them, don't + # try defining them, just assume the types are known and try getting ad + # their IDs. with closing(self._db.cursor()) as cur: - cur.execute(select_query, (value,)) - id_row = cur.fetchone() + for source_id, related_pairs in six.iteritems(related): + for relation_name, target_id in related_pairs: + relname_parent, relname_child = RelationInfo.split_name( + relation_name, + ) + try: + rel_id = ( + self._get_child_relation(relname_child, kind) + if relname_parent is None + else self._get_full_relation( + relname_parent, + relname_child, + kind, + ) + ) + except exc.InvalidRelationTypeException: + _LOG.exception( + 'Relation "%s" (between %d --> %d) unknown, ' + 'dropped', + relation_name, + source_id, + target_id, + ) + continue + + try: + cur.execute( + u""" + INSERT INTO {} (source, relationtype, target) + VALUES (?, ?, ?) + """.format(_RELINST_TABLES[kind]), + (source_id, rel_id, target_id), + ) + except sqlite3.IntegrityError: + _LOG.exception( + 'Relation "%s" between %d --> %d causes error, ' + 'dropped', + relation_name, + source_id, + target_id, + ) - if id_row is not None: - return id_row[0] + def _get_child_relation(self, relation_name, kind): + type_tbl = _RELTYPE_TABLES[kind] + parent_tbl = _RELPARENTPART_TABLES[kind] + child_tbl = _RELCHILDPART_TABLES[kind] - insert_query = u"INSERT INTO {table} ({value}) VALUES (?)".format( - table=table, - value=value_field, - ) with closing(self._db.cursor()) as cur: - cur.execute(insert_query, (value,)) + # Get the special empty string parent, since it will be used + # several times. + empty_parent_id = self._ensure_rel_part_name(parent_tbl, u'') + child_id = self._ensure_rel_part_name(child_tbl, relation_name) + # Now, try selecting the relation with empty parent. Otherwise, + # just add it. + cur.execute( + u"SELECT id FROM {} WHERE parentpart = ? AND childpart = ?" + .format(type_tbl), + (empty_parent_id, child_id), + ) + row = cur.fetchone() + + if row is not None: + return row[0] + + cur.execute( + u"INSERT INTO {} (parentpart, childpart) VALUES (?, ?)" + .format(type_tbl), + (empty_parent_id, child_id), + ) return cur.lastrowid - def _finalize_rels(self, tablename, rels_dict): - ins_query = ( - u"INSERT INTO {} (parentid, relid, childid) VALUES (?, ?, ?)" - .format(tablename) - ) + def _get_full_relation(self, parent_name, child_name, kind): + # For full relation names, only try selecting them, not adding the + # types, to reduce complexity. + with closing(self._db.cursor()) as cur: + cur.execute( + u""" + SELECT {reltype}.id + FROM {reltype} + JOIN {parpart} ON parentpart = {parpart}.id + JOIN {chlpart} ON childpart = {chlpart}.id + WHERE {parpart}.name = ? AND {chlpart}.name = ? + """.format( + reltype=_RELTYPE_TABLES[kind], + parpart=_RELPARENTPART_TABLES[kind], + chlpart=_RELCHILDPART_TABLES[kind], + ), + (parent_name, child_name), + ) + row = cur.fetchone() + if row is None: + raise exc.InvalidRelationTypeException( + kind, + (parent_name, child_name), + ) + return row[0] + def _ensure_rel_part_name(self, tbl_name, rel_name): with closing(self._db.cursor()) as cur: - for par_id, chls in six.iteritems(rels_dict): - for rel_id, chl_id in chls: - try: - cur.execute(ins_query, (par_id, rel_id, chl_id)) - except sqlite3.IntegrityError: - _log.warning( - 'Relation typed %s between %d --> %d causes ' - 'IntegrityError, dropped', - tablename, - par_id, - chl_id, - ) + # Is the name in already? + cur.execute( + u"SELECT id FROM {} WHERE name = ?".format(tbl_name), + (rel_name,), + ) + row = cur.fetchone() + + if row is not None: + return row[0] + + # Insert it then + cur.execute( + u"INSERT INTO {} (name) VALUES (?)".format(tbl_name), + (rel_name,), + ) + + return cur.lastrowid def _prune_empty_synsets(self): with closing(self._db.cursor()) as cur: @@ -1169,7 +1645,7 @@ class _DBBuilder(object): return for synid in empties: - _log.warning('Synset %d is empty', synid) + _LOG.warning('Synset %d is empty', synid) self._db.execute( u"DELETE FROM synset WHERE id IN ({})".format( @@ -1179,4 +1655,226 @@ class _DBBuilder(object): ).close() +class _RelCache(object): + + def __init__(self, db): + self._db = db + self._ids = { + en.RelationKind.synset: {}, + en.RelationKind.lexical: {}, + } + self._infos = { + en.RelationKind.synset: _RelCacheInfoDict( + db, + en.RelationKind.synset, + ), + en.RelationKind.lexical: _RelCacheInfoDict( + db, + en.RelationKind.lexical, + ), + } + + def get_ids(self, relname, kind, allow_nonexistent=False): + idcache = self._ids[kind] + + try: + found = idcache[relname] + except KeyError: + found = None + else: + return found + + # If this is a full name (with parent and child), get that. + # Otherwise, check alias, childname and parentname - in that order. + # For bare parentname, return not one ID, but a set of all children + # IDs. + # Finally, if that fails, just raise an exception. + + try: + parent, name = RelationInfo.split_name(relname) + except ValueError: + raise exc.InvalidRelationTypeException(kind, relname) + + if parent is not None: + found = self._find_by_fullname(parent, name, kind) + else: + found = self._find_by_alias(name, kind) + if found is None: + found = self._find_by_childname(name, kind) + if found is None: + found = self._find_by_parentname(name, kind) + + if found is None: + if allow_nonexistent: + return () + else: + raise exc.InvalidRelationTypeException(kind, relname) + + idcache[relname] = found + return found + + def get_infos_by_name(self, relname, kind, allow_nonexistent=False): + infocache = self._infos[kind] + ids = self.get_ids(relname, kind, allow_nonexistent) + return tuple(infocache[id_] for id_ in ids) + + def get_info_by_id(self, id_, kind): + return self._infos[kind][id_] + + def get_all_of_kind(self, kind): + with closing(self._db.cursor()) as cur: + cur.execute(u"SELECT id FROM {}".format(_RELTYPE_TABLES[kind])) + return tuple(self._infos[kind][row[0]] for row in cur) + + def ensure_infos(self, item, kind): + if isinstance(item, RelationInfo): + return item, + if isinstance(item, six.integer_types): + return self.get_info_by_id(item, kind), + if isinstance(item, six.string_types): + return self.get_infos_by_name(item, kind) + raise TypeError( + repr(item) + ' is not an integer, string or RelationInfo', + ) + + def _find_by_fullname(self, parent, child, kind): + with closing(self._db.cursor()) as cur: + cur.execute( + u""" + SELECT {reltype}.id + FROM {reltype} + JOIN {parpart} ON parentpart = {parpart}.id + JOIN {chlpart} ON childpart = {chlpart}.id + WHERE {parpart}.name = ? AND {chlpart}.name = ? + """.format( + reltype=_RELTYPE_TABLES[kind], + parpart=_RELPARENTPART_TABLES[kind], + chlpart=_RELCHILDPART_TABLES[kind], + ), + (parent or u'', child), + ) + row = cur.fetchone() + return None if row is None else tuple(row) + + def _find_by_alias(self, name, kind): + with closing(self._db.cursor()) as cur: + cur.execute( + u"SELECT relationid FROM {} WHERE name = ?".format( + _RELALIAS_TABLES[kind], + ), + (name,), + ) + row = cur.fetchone() + return None if row is None else tuple(row) + + def _find_by_childname(self, name, kind): + with closing(self._db.cursor()) as cur: + cur.execute( + u""" + SELECT {reltype}.id + FROM {reltype} JOIN {chlpart} ON childpart = {chlpart}.id + WHERE name = ? + """.format( + reltype=_RELTYPE_TABLES[kind], + chlpart=_RELCHILDPART_TABLES[kind], + ), + (name,), + ) + rows = cur.fetchall() + if len(rows) > 1: + raise exc.AmbiguousRelationTypeException(name) + return None if not rows else tuple(rows[0]) + + def _find_by_parentname(self, name, kind): + # This one can by design return a set of values: all children of a + # relation. + with closing(self._db.cursor()) as cur: + cur.execute( + u""" + SELECT {reltype}.id + FROM {reltype} JOIN {parpart} ON parentpart = {parpart}.id + WHERE name = ? + """.format( + reltype=_RELTYPE_TABLES[kind], + parpart=_RELPARENTPART_TABLES[kind], + ), + (name,), + ) + return tuple(row[0] for row in cur) or None + + +class _RelCacheInfoDict(dict): + + def __init__(self, db, kind): + super(_RelCacheInfoDict, self).__init__() + self.__db = db + self.__kind = kind + + def __missing__(self, id_): + ri = RelationInfo(self.__db, id_, self.__kind) + self[id_] = ri + return ri + + +def _make_include_exclude(include, exclude, fieldname): + """Creates ``WHERE`` clause and the parameter tuple. + + For simple ``IN`` and ``NOT IN`` case. + """ + if include is not None: + whereclause = u"WHERE {} IN ({})".format( + fieldname, + _qmarks(len(include)), + ) + includetuple = tuple(include) + else: + whereclause = u'' + includetuple = () + + if exclude is not None: + if not whereclause: + whereclause = u"WHERE {} NOT IN ({})" + else: + whereclause += u" AND {} NOT IN ({})" + + whereclause = whereclause.format( + fieldname, + _qmarks(len(exclude)), + ) + excludetuple = tuple(exclude) + else: + excludetuple = () + + return whereclause, includetuple + excludetuple + + +def _parse_related_relid(relid, relcache, relkind): + if relid is None: + return None + + if (isinstance(relid, coll.Iterable) and + not isinstance(relid, six.string_types)): + return frozenset(itt.chain.from_iterable( + relcache.ensure_infos(r, relkind) + for r in relid + )) + + return relcache.ensure_infos(relid, relkind) + + +def _make_relationtype_where(relinfos): + """Create a ``WHERE`` clause appendix. + + For limiting ``related`` queries to sets of relations + """ + return u'' if not relinfos else ( + u'AND relationtype IN ({})'.format(_qmarks(len(relinfos))) + ) + + +def _qmarks(length): + """Create a sequence of question marks for prepared sqlite query.""" + return u','.join(itt.repeat(u'?', length)) + + _this_storage_ = PLWordNet diff --git a/plwn/utils/artifilter.py b/plwn/utils/artifilter.py new file mode 100644 index 0000000..47d9afd --- /dev/null +++ b/plwn/utils/artifilter.py @@ -0,0 +1,112 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + +"""Wrappers around synset-yielding generators. + +That handle exclusion of artificial synsets. + +The algorithm used here is pretty simple. When a relation edge reaches an +artificial synset, a matching relation is searched in edges originating from +the artificial synset. If found, the node it leads to is treated as target of +the relation from the source node. + +If there is no matching relation, the edge to the artificial synset is treated +as nonexistent. +""" + +# XXX Both functions maintain a set of items that they have already been +# yielded, to ensure that the wrappers will not yield the same item more than +# once. +# This makes them slower and uses up more memory, but in practice plWordNet +# structure is very unreliable with regards to not having loops and multiple +# paths. +# Look into removing these set objects only if speed / memory somehow becomes +# a concern. +# XXX At the same time, an assumption about plWordNet structure is made: that +# no lexical unit belonging to an artificial synset is connected to any other +# lexical unit by lexical relations. Surely, that should be easy to maintain? + +from __future__ import absolute_import, division + + +__all__ = ( + 'filter_artificial_related_synsets', + 'filter_artificial_synset_edges', +) + + +def filter_artificial_related_synsets(syn_and_relation_iter, forbidden=None): + """Filter a related synsets iterable. + + Skipping over artificial synsets using ``relation_name``. + + :param syn_and_relation_iter: Iterable of pairs of + ``(target_synset, relation)``. The relation is needed to generate + edges skipping over the artificial synset. + :type syn_and_relation_iter: Iterable[Tuple[SynsetBase, RelationInfoBase]] + + :return: The related synsets iterable with artificial synsets dealt with + according to the algorithm. + :rtype: Generator[SynsetBase] + """ + forbidden = forbidden or set() + for target_syn, relation in syn_and_relation_iter: + for filtered_syn in _inner_filter(target_syn, relation, forbidden): + yield filtered_syn, relation + + +def filter_artificial_synset_edges(syn_rel_edges_iter): + """Filter an iterable of synset relation edges. + + Replacing edges to artificial synsets with edges to "next" nodes, + using the relation of the edge. + + Edges ending in artificial nodes are simply discarded. + + :param edges_iter: Iterable of relationship edges between synsets. + :type edges_iter: Iterable[RelationEdge] + + :return: The synset edges iterable with artificial synsets dealt with + according to the algorithm. + :rtype: Generator[RelationEdge] + """ + for edge in syn_rel_edges_iter: + # Drop all edges starting in artificial synsets + if edge.source.is_artificial: + continue + + forbidden = set() + + for filtered_syn in _inner_filter(edge.target, + edge.relation, + forbidden): + yield edge._replace(target=filtered_syn) + + +def _inner_filter(target_syn, relation, forbidden): + if target_syn.id in forbidden: + return + forbidden.add(target_syn.id) + + if target_syn.is_artificial: + # Apply the filter recursively for any artificial synset found in the + # target set. This should not cause recursion more than two-three + # levels deep. + for rec_target_syn in target_syn.related(relation, True, forbidden): + yield rec_target_syn + else: + yield target_syn diff --git a/plwn/utils/graphmlout.py b/plwn/utils/graphmlout.py index 910b545..fecf2ff 100644 --- a/plwn/utils/graphmlout.py +++ b/plwn/utils/graphmlout.py @@ -1,19 +1,32 @@ -"""Implementation that stores data from plWordNet as a GraphML tree.""" +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import, division -try: - str = unicode -except NameError: - pass + import collections as coll import functools as funct import json import xml.etree.cElementTree as et -from six import iteritems +import six + +from .. import enums as en -from ..enums import make_values_tuple __all__ = ( 'GraphMLWordNet', @@ -25,6 +38,7 @@ __all__ = ( 'UNS_IN_SYN', ) + # Constants for graphml exporting (library user should just use the string # values). # They also double as prefixes for IDs. @@ -48,26 +62,32 @@ class GraphMLWordNet(object): #: to string which will be the content of a ``data`` tag. _DataType = coll.namedtuple('_DataType', ('typename', 'convert')) - DATA_TYPE_INT = _DataType(u'long', lambda val: str(int(val))) - DATA_TYPE_STR = _DataType(u'string', str) + DATA_TYPE_INT = _DataType(u'long', lambda val: six.text_type(int(val))) + DATA_TYPE_STR = _DataType(u'string', six.text_type) + DATA_TYPE_OPTSTR = _DataType( + u'string', + lambda val: u'' if val is None else six.text_type(val), + ) DATA_TYPE_BOOL = _DataType( u'boolean', - (lambda val: u'true' if val else u'false'), + lambda val: u'true' if val else u'false', ) DATA_TYPE_JSON = _DataType(u'string', json.dumps) - DATA_TYPE_ENUMVAL = _DataType(u'string', lambda val: str(val.value)) + DATA_TYPE_ENUMVAL = _DataType( + u'string', + lambda val: six.text_type(val.value), + ) # Data type for enum that can also be None. DATA_TYPE_OPTENUMVAL = _DataType( u'string', - lambda val: '' if val is None else str(val.value), + lambda val: u'' if val is None else six.text_type(val.value), ) DATA_TYPE_ENUMSEQ = _DataType( u'string', - lambda val: json.dumps(make_values_tuple(val)), + lambda val: json.dumps(en.make_values_tuple(val)), ) def __init__(self): - """Initialize GraphMLWordNet.""" self._root = et.Element( u'graphml', # The commented out xmlns declaration is correct, but inserting @@ -85,9 +105,7 @@ class GraphMLWordNet(object): self._attr_types = {} def add_attribute_type(self, id_, name, type_, for_=u'node'): - """Adds an attribute. - - Which can be then assigned to node or edge instances. + """Adds attribute which can be then assigned to node or edge instances. :param str id_: Unique (in the whole XML) identifier of the attribute type. @@ -175,7 +193,7 @@ class GraphMLWordNet(object): self._tree.write(file_, 'utf-8') def _add_attributes_to(self, element, attributes): - for attr_id, attr_val in iteritems(attributes): + for attr_id, attr_val in six.iteritems(attributes): attr_type = self._attr_types[attr_id] attr = et.SubElement( element, @@ -189,8 +207,8 @@ class GraphMLBuilder(object): """Class that bridges. :class:`plwn.bases.PLWordNetBase` and :class:`GraphMLWordNet`, - extracting data from the former and putting it into the latter - in the appropriate format. + extracting data from the former and putting it + into the latter in the appropriate format. This is an auxiliary class which usually shouldn't be constructed directly. Use an appropriate method from :class:`plwn.bases.PLWordNet`. @@ -236,7 +254,8 @@ class GraphMLBuilder(object): included_nodes, excluded_nodes, included_relations, - excluded_relations): + excluded_relations, + skip_artificial_synsets=True): """See :meth:`plwn.bases.PLWordNetBase.to_graphml` for description.""" added_attributes = ( self._add_synset_attrs(included_attributes, excluded_attributes) @@ -247,10 +266,9 @@ class GraphMLBuilder(object): ) visited_nodes = set() - for edge in self._plwn.synset_relation_edges( - included_relations, - excluded_relations, - ): + for edge in self._plwn.synset_relation_edges(included_relations, + excluded_relations, + skip_artificial_synsets): prefixed_source = self._prefix_synset_id( edge.source.id, prefix_ids, @@ -264,12 +282,10 @@ class GraphMLBuilder(object): # added along edges, but it's not a problem if a valid node is not # included, because it will eventually be included by another edge, # if it's not completely secluded (and if it is, we don't want it). - if self._check_include_exclude_2( - edge.source.id, - edge.target.id, - included_nodes, - excluded_nodes, - ): + if self._check_include_exclude_2(edge.source.id, + edge.target.id, + included_nodes, + excluded_nodes): if edge.source.id not in visited_nodes: visited_nodes.add(edge.source.id) self._graphout.add_node( @@ -310,7 +326,6 @@ class GraphMLBuilder(object): excluded_nodes, included_relations, excluded_relations): - added_attributes = ( self._add_lexunit_attrs(included_attributes, excluded_attributes) if (include_attributes or @@ -331,12 +346,10 @@ class GraphMLBuilder(object): prefix_ids, ) - if self._check_include_exclude_2( - edge.source.id, - edge.target.id, - included_nodes, - excluded_nodes, - ): + if self._check_include_exclude_2(edge.source.id, + edge.target.id, + included_nodes, + excluded_nodes): if edge.source.id not in visited_nodes: visited_nodes.add(edge.source.id) self._graphout.add_node( @@ -380,7 +393,8 @@ class GraphMLBuilder(object): included_synset_nodes, excluded_synset_nodes, included_lexical_unit_nodes, - excluded_lexical_unit_nodes): + excluded_lexical_unit_nodes, + skip_artificial_synsets=True): synset_attributes = ( self._add_synset_attrs( @@ -414,14 +428,13 @@ class GraphMLBuilder(object): for syn_edge in self._plwn.synset_relation_edges( included_synset_relations, excluded_synset_relations, + skip_artificial_synsets, ): - if self._check_include_exclude_2( - syn_edge.source.id, - syn_edge.target.id, - included_synset_nodes, - excluded_synset_nodes, - ): + if self._check_include_exclude_2(syn_edge.source.id, + syn_edge.target.id, + included_synset_nodes, + excluded_synset_nodes): self._add_mixed_synset_edge( syn_edge, synset_attributes, @@ -437,12 +450,10 @@ class GraphMLBuilder(object): excluded_lexical_unit_relations, ): - if self._check_include_exclude_2( - lex_edge.source.id, - lex_edge.target.id, - included_lexical_unit_nodes, - excluded_lexical_unit_nodes, - ): + if self._check_include_exclude_2(lex_edge.source.id, + lex_edge.target.id, + included_lexical_unit_nodes, + excluded_lexical_unit_nodes): self._add_mixed_lexunit_edge( lex_edge, synset_attributes, @@ -684,8 +695,8 @@ class GraphMLBuilder(object): ), ) - includer(u'relations', GraphMLWordNet.DATA_TYPE_JSON) - includer(u'definition', GraphMLWordNet.DATA_TYPE_STR) + includer(u'definition', GraphMLWordNet.DATA_TYPE_OPTSTR) + includer(u'is_artificial', GraphMLWordNet.DATA_TYPE_BOOL) return includer.included_attrs @@ -703,14 +714,14 @@ class GraphMLBuilder(object): includer(u'lemma', GraphMLWordNet.DATA_TYPE_STR) includer(u'pos', GraphMLWordNet.DATA_TYPE_ENUMVAL) includer(u'variant', GraphMLWordNet.DATA_TYPE_INT) - includer(u'definition', GraphMLWordNet.DATA_TYPE_STR) + includer(u'definition', GraphMLWordNet.DATA_TYPE_OPTSTR) includer(u'sense_examples', GraphMLWordNet.DATA_TYPE_JSON) includer(u'sense_examples_sources', GraphMLWordNet.DATA_TYPE_JSON) includer(u'external_links', GraphMLWordNet.DATA_TYPE_JSON) includer(u'usage_notes', GraphMLWordNet.DATA_TYPE_JSON) includer(u'domain', GraphMLWordNet.DATA_TYPE_ENUMVAL) - includer(u'relations', GraphMLWordNet.DATA_TYPE_JSON) includer(u'verb_aspect', GraphMLWordNet.DATA_TYPE_OPTENUMVAL) + includer(u'is_emotional', GraphMLWordNet.DATA_TYPE_BOOL) includer(u'emotion_markedness', GraphMLWordNet.DATA_TYPE_OPTENUMVAL) includer(u'emotion_names', GraphMLWordNet.DATA_TYPE_ENUMSEQ) includer(u'emotion_valuations', GraphMLWordNet.DATA_TYPE_ENUMSEQ) @@ -731,13 +742,13 @@ class GraphMLBuilder(object): def _prefix_synset_id(cls, id_, do_prefix): return (u'{}-{}'.format(GRAPH_TYPE_SYNSET, id_) if do_prefix - else str(id_)) + else six.text_type(id_)) @classmethod def _prefix_lexunit_id(cls, id_, do_prefix): return (u'{}-{}'.format(GRAPH_TYPE_UNIT, id_) if do_prefix - else str(id_)) + else six.text_type(id_)) @staticmethod def _check_include_exclude(item, include_set, exclude_set): @@ -775,9 +786,7 @@ class _AttrIncluder(object): """ def __init__(self, graphout, type_prefix, checkfunc): - """. - - :param GraphMLWordNet graphout: The output graph instance. + """:param GraphMLWordNet graphout: The output graph instance. :param str type_prefix: Unique names of attributes will be prefixed with this. diff --git a/plwn/utils/relinfotuple.py b/plwn/utils/relinfotuple.py new file mode 100644 index 0000000..f1290cc --- /dev/null +++ b/plwn/utils/relinfotuple.py @@ -0,0 +1,64 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + +from __future__ import absolute_import, division + + +from six.moves import range + + +__all__ = 'RelationInfoTuple', + + +class RelationInfoTuple(tuple): + """Tuple subclass for :class:`~plwn.bases.RelationInfoBase` instances. + + Meant as return value for ``relations`` properties. + + Overrides search methods to use :meth:`~plwn.bases.RelationInfoBase.eqv` + for membership testing, to make checking if a synset / unit has some + relation easier. + """ + + __slots__ = () + + def __repr__(self): + return ( + self.__class__.__name__ + + super(RelationInfoTuple, self).__repr__() + ) + + def __contains__(self, item): + return any(rel.eqv(item) for rel in self) + + def index(self, x, i=None, j=None): + rend = min(j, len(self)) if j is not None else len(self) + + for ind in range(i or 0, rend): + if self[ind].eqv(x): + return ind + + raise ValueError(repr(x) + ' not in tuple') + + def count(self, x): + cnt = 0 + + for rel in self: + if rel.eqv(x): + cnt += 1 + + return cnt diff --git a/plwn/utils/sorting.py b/plwn/utils/sorting.py index bd37a82..69cbc58 100644 --- a/plwn/utils/sorting.py +++ b/plwn/utils/sorting.py @@ -1,3 +1,20 @@ +# coding: utf8 + +# Copyright (C) 2017 MichaÅ‚ KaliÅ„ski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + """Sorting keys that provide locale-dependant alphabetical sorting.""" from __future__ import absolute_import, division diff --git a/plwn/utils/tupwrap.py b/plwn/utils/tupwrap.py deleted file mode 100644 index 7e94abc..0000000 --- a/plwn/utils/tupwrap.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Wrapper for all functions that return generators. - -Calling the wrapped generator will wrap the contents in a tuple -(as a faster, chaining way or ``tuple(generator)``). -""" - -from __future__ import absolute_import, unicode_literals, division - - -from functools import wraps - - -__all__ = 'TupWrapper', 'tup_wrapped' - - -class TupWrapper(object): - """Wrapper class for generator objects. - - Adds a ``__call__`` method which will convert the wrapped generator to - a tuple. - """ - - __slots__ = '_gen', - - def __init__(self, generator): - """Initialize TupWrapper.""" - self._gen = generator - - def __iter__(self): - return self._gen - - def __call__(self): - return tuple(self._gen) - - def __repr__(self): - return '{}({!r})'.format(self.__class__.__name__, self._gen) - - -def tup_wrapped(fn): - """Decorator for functions that return generators. - - The return value of the wrapped function will be wrapped by - :class:`TupWrapper`. - - This decorator is the only way to wrap around the output of generator - functions. - """ - - @wraps(fn) - def decorated(*args, **kwargs): - return TupWrapper(fn(*args, **kwargs)) - - return decorated diff --git a/scripts/clean_wndb.sql b/scripts/clean_wndb.sql new file mode 100644 index 0000000..631b051 --- /dev/null +++ b/scripts/clean_wndb.sql @@ -0,0 +1,32 @@ +-- Clean a plWN database of things that cause the API reading to break. +-- This is ad hoc, for cases when we need one-off dumps for internal usage. +-- Let's not involve SWORD, please. + +-- Remove relations with types that don't exist or one of the endpoints that +-- don't exist. +DELETE synsetrelation +FROM synsetrelation + LEFT JOIN relationtype ON REL_ID = relationtype.ID + LEFT JOIN synset AS par_syn ON synsetrelation.PARENT_ID = par_syn.ID + LEFT JOIN synset AS chl_syn ON CHILD_ID = chl_syn.ID +WHERE relationtype.ID IS NULL OR par_syn.ID IS NULL OR chl_syn.ID IS NULL; + +DELETE lexicalrelation +FROM lexicalrelation + LEFT JOIN relationtype ON REL_ID = relationtype.ID + LEFT JOIN lexicalunit AS par_lu ON lexicalrelation.PARENT_ID = par_lu.ID + LEFT JOIN lexicalunit AS chl_lu ON CHILD_ID = chl_lu.ID +WHERE relationtype.ID IS NULL OR par_lu.ID IS NULL OR chl_lu.ID IS NULL; + +-- Remove relations that are instances of parent relations (if a relation +-- has children, only they are legal values.) +DELETE FROM synsetrelation WHERE REL_ID IN ( + SELECT rel_outer.ID + FROM relationtype AS rel_outer + WHERE rel_outer.PARENT_ID IS NULL + AND EXISTS ( + SELECT 1 + FROM relationtype AS rel_inner + WHERE rel_inner.PARENT_ID = rel_outer.ID + ) +); diff --git a/scripts/patch_old_wndb.sql b/scripts/patch_old_wndb.sql new file mode 100644 index 0000000..ea32233 --- /dev/null +++ b/scripts/patch_old_wndb.sql @@ -0,0 +1,21 @@ +-- Used to add the emotion table required for schema 2, though it will remain +-- empty. +CREATE TABLE IF NOT EXISTS `emotion` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `lexicalunit_id` bigint(20) NOT NULL, + `emotions` varchar(255) COLLATE utf8_polish_ci DEFAULT NULL, + `valuations` varchar(255) COLLATE utf8_polish_ci DEFAULT NULL, + `markedness` varchar(5) CHARACTER SET utf8 DEFAULT NULL, + `unitStatus` int(1) DEFAULT '0', + `example1` varchar(255) COLLATE utf8_polish_ci DEFAULT NULL, + `example2` varchar(255) COLLATE utf8_polish_ci DEFAULT NULL, + `owner` varchar(255) COLLATE utf8_polish_ci NOT NULL, + `creation_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP + ON UPDATE CURRENT_TIMESTAMP, + `super_anotation` int(1) DEFAULT '0', + + PRIMARY KEY (`id`), + KEY `idx` (`lexicalunit_id`) +) ENGINE=MyISAM AUTO_INCREMENT=4300 CHARSET=utf8 COLLATE=utf8_polish_ci; + +ALTER TABLE lexicalunit ADD verb_aspect int(11) DEFAULT '0'; diff --git a/scripts/verify_uby_lmf_file.py b/scripts/verify_uby_lmf_file.py new file mode 100755 index 0000000..9026e1a --- /dev/null +++ b/scripts/verify_uby_lmf_file.py @@ -0,0 +1,87 @@ +#!/usr/bin/python +"""Script that checks an UBY-LMF file containing plWordNet data for consistency. + +Specifically, it checks for the following errors: + * Empty synsets. + * Lexical units belonging to nonexistent synsets. + * Synsets and lexical relations to nonexistent synsets / units + +UBY-LMF module also prints out encountered errors, so be sure to capture the +standard output. +""" + +import argparse as argp +import collections as coll +# import itertools as itt +import logging as log +import sys + +from six import iteritems + +import plwn.ubylmf_reader as ubyr + + +def main(): + ap = argp.ArgumentParser(description=__doc__) + ap.add_argument('uby_lmf_file', help='The file to check.') + + av = ap.parse_args() + # Log every error and warning from the plwn module along with this script's + # output + plwn_log = log.getLogger('plwn') + plwn_log_h = log.StreamHandler(sys.stdout) + plwn_log_h.setFormatter(log.Formatter('!! Log from %(name)s: %(message)s')) + plwn_log.addHandler(plwn_log_h) + plwn_log.setLevel(log.WARNING) + + _verify(ubyr.iterread(av.uby_lmf_file)) + + +def _verify(uby_lmf_reader): + synset_defs = set() + unit_defs = set() + synsets2units = coll.defaultdict(list) + synrels = {} + lexrels = {} + + for record in uby_lmf_reader: + if isinstance(record, ubyr.SynsetNode): + synset_defs.add(record.id) + synrels[record.id] = frozenset(item[0] for item in record.related) + else: + unit_defs.add(record.id) + lexrels[record.id] = frozenset(item[0] for item in record.related) + synsets2units[record.synset].append(record.id) + + # Empty synsets + print('!! Empty synsets:') + empties = synset_defs.difference(synsets2units) + if empties: + for synid in empties: + print('S' + str(synid)) + + # Lexical units belonging to nonexistent synsets + print('!! Units belonging to nonexistent synsets:') + syn_nonexistent = unit_defs.difference(synsets2units) + if syn_nonexistent: + for synid in syn_nonexistent: + for lexid in synsets2units[synid]: + print('L' + str(lexid)) + + # Synrelations to nonexistent targers + print('!! Synset relations to nonexistent targets:') + for parent, targets in iteritems(synrels): + bad_targets = targets - synset_defs + for bad in bad_targets: + print('S{} -> S{}'.format(parent, bad)) + + # Lexrelations to nonexistent synsets + print('!! Lexical relations to nonexistent targets:') + for parent, targets in iteritems(lexrels): + bad_targets = targets - unit_defs + for bad in bad_targets: + print('L{} -> L{}'.format(parent, bad)) + + +if __name__ == '__main__': + main() diff --git a/setup.py b/setup.py index 8bd5719..2277dc8 100644 --- a/setup.py +++ b/setup.py @@ -1,28 +1,33 @@ # coding: utf8 -from setuptools import setup, find_packages -import sys +from setuptools import setup +import os +# Name of the enviromental variable that excludes the default storage file from +# the distribution (to save space). +# The *.egg-info directory must not exists for this hack to work. +ENVNAME_DIST_NODEFAULT = 'PLWN_API_DIST_NO_DEFAULT_STORAGE' -def install_requires(): - req = ['six>=1.10'] - # Only require enum backport in python2 (python3 has better stdlib) - if sys.version_info.major < 3: - req.append('enum34>=1.1.2') - return req +setup_args = dict( + name='PLWN_API', + version='0.23', + license='LGPL-3.0+', + description='Python API to access plWordNet lexicon', + author='MichaÅ‚ KaliÅ„ski', + author_email='michal.kalinski@pwr.edu.pl', -if __name__ == '__main__': - setup( - name='plwn_api', - version='0.9', - description='Python API to access plWordNet lexicon', + packages=['plwn', 'plwn.readers', 'plwn.storages', 'plwn.utils'], + package_data={'plwn.default': ['*.db']}, + + test_suite='tests.setuptools_loader.setuptools_load_tests', + install_requires=['six>=1.10', 'enum34>=1.1.2;python_version<"3.4"'], + zip_safe=False, +) - author='MichaÅ‚ KaliÅ„ski', - author_email='michal.kalinski@pwr.edu.pl', - packages=find_packages(exclude=['tests', 'tests.*']), - package_data={'plwn': ['relation_aliases.tsv']}, - test_suite='tests.setuptools_loader.setuptools_load_tests', +if __name__ == '__main__': + # Include the "default storage" subpackage by default + if not int(os.environ.get(ENVNAME_DIST_NODEFAULT, 0)): + setup_args['packages'].append('plwn.default') - install_requires=install_requires(), - ) + setup(**setup_args) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/abstract_cases/__init__.py b/tests/abstract_cases/__init__.py new file mode 100644 index 0000000..88772d5 --- /dev/null +++ b/tests/abstract_cases/__init__.py @@ -0,0 +1,3 @@ +from ._make_abstract import load_tests_from_abstract + +__all__ = 'load_tests_from_abstract', diff --git a/tests/abstract_cases/_make_abstract.py b/tests/abstract_cases/_make_abstract.py new file mode 100644 index 0000000..4a6f9df --- /dev/null +++ b/tests/abstract_cases/_make_abstract.py @@ -0,0 +1,29 @@ +from __future__ import absolute_import, division + + +from . import test_graphml, test_plwordnet, test_unit_and_synset + +import unittest as ut + + +__all__ = 'load_tests_from_abstract', + +_ALL_MODS = test_graphml, test_plwordnet, test_unit_and_synset + + +def load_tests_from_abstract(loader, name_suffix, plwn_class): + retsuite = ut.TestSuite() + for mod in _ALL_MODS: + for tcase in _iter_mod_cases(mod, name_suffix, plwn_class): + retsuite.addTests(loader.loadTestsFromTestCase(tcase)) + return retsuite + + +def _iter_mod_cases(mod, name_suffix, plwn_class): + moddict = vars(mod) + for name in mod.__all__: + obj = moddict[name] + if issubclass(obj, ut.TestCase): + # Yield a subclass with suffixed name nad the _PLWNClass variable + # set to a real value. + yield type(name + name_suffix, (obj,), {'_PLWNClass': plwn_class}) diff --git a/tests/abstract_cases/asciio/graphml-edges.asciio b/tests/abstract_cases/asciio/graphml-edges.asciio new file mode 100644 index 0000000000000000000000000000000000000000..012d9493a0eaf3e8f7a3ec8142817d84c65bc617 GIT binary patch literal 4083 zcmV<P4-D||004E&LRx4xF+o`-Q(1ueQ~Cf0n*WG@_$UC}_y7Mt@Bjb*@8ADG0ALn> z25is(2j5!c(dCe;dfv5~)j$JFP+Ikrl7d%3i&<<|l|Tg)0&EirO-i1gsp&SRgF{UN zMuzo3&>8>$0004?07_*v2trdmQxhg213&=K4FRT20MKX~PzMH$8U}+vWW)wWhCl#m zra%A#1OOs`s;U4002%-Q0BN8A000002$2*cOoY=$DsL)&spLnRQ`GcrP|Z(B9+T5h zG#;QHnN6wcJwT|U384Wo0V75dY3OPVG&BG-8fe7P>Hq=VB!4<pSGK-$EG#QD{$)>t z{1|Gq`WP5ms%SFIlP#DktxOqUOtR5W`!LZmR+T<AFsZ3DVU;5ZN-2u9Fsv$aSsA+H z!Pxjt-WF2L-YV`Ys+``9Oq;b%mDy`YY;mRPF&$K(!`R7O)T!0OC47&1q;FU<j?xq& zUYm6e<#BYc37Jb;J#bvjdUVEQNrfs2(@;bvD}?7Eoj(qoF)?wm@+wRy5)=u*;?zPK zl~jtUTB?=m!Z1?3*n3%El~I(eB}HY16<=TLuzX^qk}4pebOvfDi-%2*q1=};wJ<n| z6!=W!Izh8m8aY09#M#4BFPXdaoZ85Q`CG8?t7i(&i{xeWDFoTvczI{&aq@3EDr(W{ zaQn=%=F?SI`Z!_3qgGU{8f7f3)ZK<O<k{|FO`4+#H*nF0PkOpJv@&X@Rqs^YheJk& z%F4J`;$-Q^TxXM;2JHVDu<UYkID1?-noZcicgHtl4JA6ZVfWa>MQW@$G-S%hCI8io z1w9po6%|r>l8Q`^z(Q%tYaq-gl!T;*V}~~EMOh)17?UmC88&ocYT7GS9!k;cSBq)Q zyTy~i$iZC*J81~COJqxAgYDP;`P(heSs@YV)&pY4C5fh7os8<uRO-;I;ni4Z?p73R ztsAnDm1@f@y<A7<eN7+tGc1D0J?&xY7NjkUByyc=<-Q+x?&a-!(dgysYSG-#%#Rfs zBMh=^)<J~PMv5zDjK0#4!jtf+X=L{zVO3(nmQ_#sb{3%VMfVhSVZ)%%bhI7c=OlzD z@T!E8NeKcF001Ek=Io+KM50sk$dn*TjY%Yuy`KeKCK>y}!Vy8gNvX?U%D<u+)+)0t zwxh<OgSnS}+!8h%Vl+h<lInx@v~g^r?i&4dj8IpuocvV#CFY0VKKd!j-yUfioM$jw znVq(qZ+GDI<fqkJ?o5LGEyI_sS=pO+#$<(^4P=Q51v~s^MOL72^5Tk;;M=0}g7L}r za_p|zD4S&B0W!pn*-!(Nrl0fu>ldo;aM}>li>wmB=sHAU2z00zVT1~Vq{br4WQGv` zXet>73;`^Jq3;SFk?RM(HaY>Jq3;hsz*ivegJ8p;8wbLIOpOgt(9kp}Xiz(&kwSsO zBd8xhu%u^<m}E3;lTYKO*M<e_JW9s?lhmv$3i0XCod`tfm(fQ0P?B94E?!2}1{&Q6 z5!{-KoDH0tnK4$SSmJy_kLx58yVK;e<$%Z^auo>|n6?w)2bhrg5S$a`Nn578@R%h` z)b^|ysZ%;-GMo_dNMM+dSWJ@q#P%mR^8nRvC>R(0j|aIQ(+1%bAkZ*u9nrC&-34?C z!892dkO1lwM}68zK@yQhsDTMUuzU+5$g(XcP(=uT>LNx)LW+WgNhD-#Oe8T@6%?^c zY>F{abBG&(BC#ScfJJ~vgCN{)1x7_7F>_>Qq6}Qp28u#d1)?An1wt6LP{7zgXreSU zKyD4euz=G=%84YBNhFMnk^lez0000INhC!J5P}90;uN5-;O<YZG!y9z{vK}!#Lf&k z7x)=v_}&i3+K`p}560lsM|A0o*x2~Xc1^{)Ct%6sm$|%pjL`hN0+4J~Ss}Y;<b6uf zm7l1ohxR%ri|T(=gv<m${bz-b<vP-4z~ZSR2E1l0w9WP23CUrC$uUK%t$wHXa=U2N zIjA<VyYy*wrj;<@!@!hii#qpvx7PJ^N^a28r?XrOD7(h!))BTC5LTpC61yik-6UNQ zLP$koH8-m`QPmV1Af&2LsMS!^G(&m|c9*2J7|nBi)zaC^FGox3B`QO0I*6nKP-5O> zBMBxW3S2`LBYfLT+0RXHd})m<wNuT@XA_)~^J`9A;@HkK)-)m^0Z1^CVj>TX>Q{rn z%U*Un7{@B@;UF_Wl&e>?(Hn5T3)a+fOGTTt_(uW&h~jPy@!?2W9uXequg=x=o}`GH zHDO<sy9JP|A#?fIWAm@n%*9%tDzPx9+jM2gGS!2c$2C7$N$+xQ%Y`bdlano4G*QW` zUtnte6_zkzTQJ#A7Ky{}wrav?*@jvQ{gVyVGTEx6=kz{ZQ|G3Xo6bxyL77q*eaGBr z)w5ULZ=+d-Q)5<-1yZ<F>gq{^ee-4sl$GvRGwmxA3pI6Dg11)8uw@u_G%x6C)$n)y z&!vvz!PU{?vR5@fVU*q2iS*W8?|Zb%^;7o_X_g<lRaC>;rzKYvsk$eF@i{Pzq>L0& zl`1N!gsM`N*yOHnvf<C;*Cx_sL#xF>a7UaKmEiDU74E>(W)HqusyWe+*79k)U7t=S z8Xblk9c1dVPVdIYOClzfwf8H8R-K*NMmqgGoWq|IJF0BVoU<HBMk3n=6)D`d=EA5@ z19pv0y&!C#LV?>*meeYU!g}*HnhYCnQQjdAkc%oLP^u(yw^t+?2t~U{UWnu`W_8U` z--~4pSy9_fD<Zr?EFz8}u>(#wnqWlQ%$H?N6b%ZKk&>d(3RUx_a79Q<BwU2D5aAI= zOKK}oiCV-Q(Xj}+0@WHb3z!$Z@LrlJP>>{&N|NI8HB|*^Q$D3&l&rz4J8_AmZeFap zi|=$;Xu}~J3`aOFA~;A8KROG{i1Prk!U&!{ecE2}+fc)re$agE!wu9jsXDNmZEUO? zNdhD?fS`8>BERFO?|gccECL_66!xsZx)d2)N=qgcnDu|7#^(SE+oyy5rp7QyEMif! zU!&>D3--`oK}R{K_^2lL_A;ns$)Ck%XKuQt*s7#XqBAT@WWvK*dC|LHJ>!#-V(duM z6O<&%+>g?ExNPr|9?w1rBBN>vs?kf_aaD+$ObKcfF$_}}@+Vq2BwTpn8hS!b!gp^C z5l5d<&bdaumm2k*w#8k<Tsg@LWvOQNy&?``3HESlGsLQJ8|Naa*xlpEi|V^gMxEpq zsYS?(u`##Wg7!Czo5(JEI=JVA=iB@*TM~98o{hQ3QPD{rEu@E@AsYHlE-#B+q=&*n zJA4V@QPbDU#B;)J-LP1kVuxxOO#O7C>k?$ZO+Aw6aTZv_B<9ymhOR<1PXl~ytO<zO zWs=K<V;?j5A2YMY`a2|%<~evI^XU`LIBp|R8VA4J$HT{0Waeq`%#OyD)5+ZC#M&&H zBt5Thr@Ph5{Q_97{XJMq*WZ(O4l(ViggReW9c1m`M|;yAG0o7?vO4ret!yn(rmUzM zbV25Nc-zT{l?;+0RuU326W}<_N^kNin+@Z2P{f=N(7U+ZcIqtR5Nt!(6;V|d$h90X zNgTZ#d0{~}A{ICiOXER!aym#Ul|q#b>OqJxm{3Oho4_K|!$-aEU_n64rOLSKWd^?? zJ)#p)1Y9c(kRb_mM35s63k6C<z@heqcVe0Y3c^E#LE_^uiOOz>o>6{gD#!>yfQN)2 zicqg|F!<8Mv@SN%SX`#Shz&+JT1ciqNQ@6M3DjTeDj!XsA%LPnVp_+0n<kD-7*>op zLS*#UC``6<i8(d3G<_vxt_(W5CKWbe(Zh85RgqJgb#&an&G+=^`;OKXoj!Mahu&WL zWS)2dlVEwxXNb~hGCuKA&J)Mw^ZCCwF!^7o4u}*`0H{^ik}5~bn^n@PGXW4p{8}=h zFe#SXo@`ZCofwj(%jP$=aG)DZD2GA7QA+o`@VR-mVX>?1GQz5wrVl43jvvtK;VVPz zI=EzTj5l&>p2m(8{*(C_q2D{+AI4u%^7#|5Mo*p@`Rn9-aLMy1K?p}%3V{Z5*V1k5 z7>%M7KoeD}c5K68M(Fttq0V|T`W6uNtJbAYtl5{nvkJeiuY$=2=htPk<=v`q!|cb( zb`z6^3?}JPqi(opgQArr*_Y*!{ZH?HFUr1+B!-94L-ncr*OSo~W~Tx9z3XokB-|YH zA(F*|t*134B|Zb(@@UafRU*BF$I)z!{cE}Hegnj5H%}?#D5IR(PPsc=s3$!SsXayy zaSHUw8ndw>-gd;x2%Z}@d|I$bYMVOh`p}N~^LrN97?@%;NPUG!Zslbyu|fukC$QCk z<PLyD=M+pSc+e_pVEYL)Y~@+7$+GF#XIMtV9mKqnIU0ow8buDn>#oK7^zF@duJmJ9 zzY32W#(<SUjej&F=NhNrsF>e(e|si|vywz;=^akga7b(1uuu8rgsGJHD_7KRB<r^p z*$Gw4zH_~enjM~xL0*r#@!_fpaM~ndH`CoQ{d=4`$qn5de!j0;Df;yx;6!=8>)d4` zIwzOJ@;!&)bA4&{-OphW;}cRz%f$LtN5PmzsS2GoJCfM0*B!FqiOr;pyERS{81BCW zd{B&^2fZ{;YGEd4oRzeMuXk#3(IP*0Qz;KOiMw|Vr`g$ha!DQqdUZ-nQdDk{9HLe< zy|ZH}&Zg<C-Eftey;B7+Zp(&#XX`M8dXnnsfA_KS9~YWK(tEuZqC$$A(aBFdJ|NgU zhe4=N?1>RWS<lKmC$4mSUe4r)oNTWv6U>h~8p#ve6U8B9{N2!#9={Sp)_f5}o1;|p zonMj?Wi~!Xa!BFHDshB^2_o08XEWB+=miU{A;}J!9{@vydt*ZA8#RKox@2&5H8_s1 z2%ENvb__VxGV$W?Sh5od>_n<as+FS%sFN=BSF40dB)+F>(d2eMG?2m1$=vlkG?6Du zc|LVWjm|<%ThEgi@A0^2)}HjXO_|qiwAnUFl`_K&dBQ31+|e~o$)<@B>TFbzPR{eA zZPgkiIix#N)R4+UQbS(Ogp-rOA=sU%>F+g@lVi#sBe%4dwKb%txg=r+IXlx=lSq}X ziKO$AgD8bjN{<ntJ7R|!xSLf@|4|>IOZKS<kh~H~qAE~VSO)ut;o5n)KCD;lxY}xz zbg6I5u*@F2gSa@Phn7?{AY&ldlap6RMQGVq3UqbA=sHYIsL9D%Emc+$vZHZW6;zaI zAi;r_tSd<nm`N;H%c5$_?=)eSio>F?@OC+-iPc?I9Gj;K%He7$NmRnHYlWv)S~&); z#sj!65xzWMawE0x_m^IsxTMc{ORBZ2*jB4ms-n8FR}^Q1%L<^t!^)<=Tx7I_ffBUV l(H!%naUUl|cHvcQAJ9<1z?N80QObYucO+AV2?!6RKcLFAvd;hj literal 0 HcmV?d00001 diff --git a/tests/abstract_cases/asciio/graphml-mixed.asciio b/tests/abstract_cases/asciio/graphml-mixed.asciio new file mode 100644 index 0000000000000000000000000000000000000000..7d39908abc799dabab6345211fa2f13518dcdbbf GIT binary patch literal 26171 zcmcG#cU%<B(=R#%35rBfP?X@3Ge{1SVTlSX86@XjGDt>22?COHQt~c2!;%yw2PMOj zl$@5F(_NnD_nz~fbI%|5^SSqaVR~w+t7ofcx~sdYdv*bAJ5Lz{TR~oFeI_k^yl#6k z;0bSFSl}cJFmrNq({}SOqyW=2$y0L>fIqqe0DvR`aIuPf_XDs~%z0*l;~N|TbRRz6 z&jQZqFu54y+$Q&D-m-kjd1c#bP5_9sAKq_&eVDEH^e)Z|yc6TnmUaLjk+vWFBh^;` zjVg_mV&RetMk=S)j86`)<p?hg8}i}A2+57#9R*NePU8=FIj?}v28ZIo`H&8S;b*EJ zgWZUOBmn%}(78%DHr;*H+)vxbEI#f}Nu*eSXUJ0X-$x4R_O0&<)zj8O&h+jx$c4p) zv_feOl+^C<N^5?Vmt&zkJP1Y70c`Se<TxYpub_!JqFBHEgQe~Q&pswf#lRD28JpJF zq+)V_*o}9G-?ij$u?WMMq5wczHHu<5-9Gg^oVNJI5g%4nXi>KSKdC%O=0#RaE@4ob zb-e1RDVBsnDpe3s(2ox7tArSGxyJ>=c%elvBxLD;UjXpf4)csgj#%CivPm6w+cK#E zwp3(6YFIf(01gEOwvv(>p?L8*Q|ft4iU6Ntt-!N_G~WlG$=3O@auvg}(iAjUL$F@U zkdmeWQnm^qKKF48V@9;_<fb*0e6qe2Fc+sPx`13iS!g~Wk`oC633qn$;C%YUQ%hVq zZ4lsz_Yj^&Z4FKy{ibc&hi#RVaMSoPp3zQgseoQb*-?$B9|3mx$YI8628Mi4R~9@H z&z3uM(^nDl8C!usL|O(SGbnIgguR>*@lkbo2IkfM(Vw}Zx?dglrswe0H2WjPQ+hto znct9C-X(4F#ZB+c(A8!2m~GEM+#iw-Ssd>R^NE=FP-Wc3G`sLHiYDqyZ(6Nk|Idx5 z0bMhfW#z?FM-mFp61DRo3~-%a2$SaLV>dW>5jxH%Ro~chc`6|xMI{yA5&NuX@}Ss! zKHn5x3IUwa#iI|TW!f%ztR8M!JYRTw9!lE0lUe#s605als`l<(>m5Zw8TlUytIfej zY`C+3eI!J#)C1oT!_!|JHnu&~cXzPTzflyVnKk6gw=2m{Ov~WuY!vb@VMSV+F%v_1 zdUkbn6(ZAF69ew9s;f4ozi=)=d~KWVss4EX<@gBkV40#<%fOG(Kau#czo$MX?7}+E zDDxdlL<B8+yBKdMBkxE=e2<7ImazO9Pj{^yr}~8jyb;In4MdFAICFOkoKX7yT`k2l zN=&TEH3U6{gnaCmh!~kG*YZ*%(PGZ;{$^|jilzP9_H!Fvhq5oN>mJeulRuxCCX|p5 zTly-H%<SnIS!f+m);pp>X{N9-@==qoG}S7(Jnd!ea^npXy6r>;7IKE_0);Kf>06Pz zXKp&NT)iyZoA#X-l_(1+7?8M69lMaL)WMNnCu8;^SDD>2HNc8NFaD)95j|ZbJzYDH zgo0MoFNB9f%p)M$H<f=F^}6X(FT+!lxKBLd)Pyt4HO>OxTz?}46bOO<?B}~+<_qrg z11#_qlNpnlP}FStN4?dpLdF4x4vPc>zYiG+KO^k(UO_g3ygbXB&%=W9pyNkgC7y;> zqm-~Rk2AE@+*(&MbmFq*x3&+`u}`$*S*P)s=fqR?5#C%uj*^P_@EPIy7rYSXg2W}k zZD{+{EW>WPVCgIKjX!Ff0}=4{V$^f>DEc|Lw}|@gOoBW~{K57{<CdQ;cV6aX_}Tu> z-;|++Q^umRiuvx;lr<+^qf)rYJ(-D*I%LU0E{(+ql%W{4`zAX?q9K#~L5OK26W)x~ z$9`0c<z~_6rA@Myx$x-?tzoDu1ARnXr`3w*eq{^1H=-Mf?C|m|JMlmhjPW)k(Uj=y zFpGd4dU_`NYDMh%Ul@Zw38){>&eW(0<MRty7`PcmP(-CeL~6^O7<g7zwj4*Bc#qMu zUQA|(tFGrCGZIsBVP-f;{ifyLO{EBOC$1kbxOVN_?ATO)2XPr1tc%ovKFqR!Q>*Bw zX*Je1)fqafHup|Lm#1eMX0&S~Tq_L;;iUt!V;@JRmBx#H9mt@jM-QyA`Ps-IiH+$< zWRv>OBs&%}Js0J_Lh?09io;%yW(LwmL)G~f7P^xuq1qpb1i#vrmMZJ0q|6GXCx^aB zv*CrHe`;x1Qn+d641J{3s{0vLTkzX6q-VQF$aZ^4MRhi_G(~C}4$C0*a^ttNN-R(* zs0&9njo6J3v%?N-OqwJ6dcNr})0i2gCL(=<W_9F;`-S(9v^gT*r4*Vu_`0}Ovwef; zd2%JqF1gE1dPGh}BP5(kU&&Xc#Z-2omH<J2(Z#Cdbm+<N>HR9zQmue;<5zuAzfXqg zeV0aC3?YqE5yM~}JJ#MMNSQUY(W8frDORp*6Z}MFx|9y7A9Wc@-FSKz8fyDH*kg4t zd2U8Wy`?_=e2xF)-uZc3DB?CZ@u$IxSz<TBsGqNaixX4m-y4=)QnxzD%ly{9@~|jx zdEtvUHj=_Nf=W)zjFQL6w+<FaG_+qiVI=#sk8Q(5t80}p*d!qEn*uwwzj?lbOr+c} zINwHf{Ba@E-)j1CwQ&;@s_32>EqNU~X1DtmQ!}p&y^%G`EI(9=AMuwhaBxG1G2PzF z3rWb3dmsLw7>ai20B@-bOf^m+1_TqhNw5qao>Iw{(X1Jt;yvFGuIc$e!qMSw-^!l4 z9-j{x|Bi@t@iI7)AO1C6>>>~=@`AUBy;j}4>5KOKpO;3oQaVP$rzF45Uoy7&6*nN# z2nLvx*A^DY-bgWse^KxWvp|AA8>B|nF8rKqhEVcVA*>M#D@A(SnZcO|rP4%a*|^YJ zPV=E67=eD|B5^~|V<`c~8`KShD$|;uAWz)qK+o(+;UiKPd?V10B)JO2LrBuh&3DkZ zvxbhcVpP)Qr2z7@WjML*d}|v%aWX?WecVb*n|!BlE<5v;&8zdk$ZAI|*sER1o6c4A z#Qsa}*_)TwH&-{b=(sm1Xk8ou_i-HxOxLCbZB%}GuCA4_*IsA9i+Fs7AA%dczjN?1 zcDjX7MtrS3qAny<Y?YEty{fvI|9zhtQFT_UapjnUY=f9^KZtIa?L~+weMa8!7kb*o zq09BA?LX63N8Zg@t_c^b^1R%8xjolp_l%?c2Uq6LSY6PN^7zxSm3{ACt^LdgKU%cC zU+R5WJu*{K%^uo6yOwPG+_rddd3fgL<b#j0Qwn$jt;1-Gg0@?1Ajw~NDS-t$8iCDn zI-(~Pe)?<`1(d-=j5<`B6%aF3c04M!3V6r;yj~$@s-uP<eSyP2mseK~#Jf_wgZoK% zo?e`Yo;0}N;uD^HduH)dsD!?K|ETc|^`w}#Id`aM;&{!@9B*#;oE?k)#S4<N!nyUE zp~bF58}yZ6k`khYJiZUjDRg|WXm{M^G_)6q6xF+64+)7=b~;tVCas$h`aRX~)T*d6 z$FF)a=mJ|yL4WwlwU70MhTyET>P)}*zKLCY0%k08DA$KjXkCi`=3-X%Vfu0}OvUeM zkqECp*{hh-RdVC;$V4-Wo$s&bCAW_)7QuorE3c=U^XtnBUkkVyqsx`8in$EYrKIMc z`~sfccn$s}GZ7O?qU8wC_p3g$J8Ze`dY+w~EEFg{rHIOCY76voD$@v_U1U>kl(4&5 zdTX_ozAe#ORXV3n)9AYMFHQp9tX|Q+=?h;;cCxXYm@iCZU0yqUvsf7zQD3AG!1sk) zUZ|ililKKPpB;gsI#s{9(Fn|X(R}cmRnpMkii}166%}Q#mN%11WSRf6>xW&b$KEHc z!Z`bHr9pw*gOyPkfkhDV;7eaiT8-poK^p4MIfEU0`Q<@-d3oR22bmh0et9i+zHJoD zBG3j&70p{BLE!DO2Rzz4YiO^7m-Xv6ibrB(e390_Luf{|KH3XDeLi_fur}wJ<HvOT z^=+-PQS+(+W&bi>vY5-&jQ4|r%;PqzLO!TUmecr28{#MTZ%}5}6~%STa|+Ir?Fp(s zCN$i2;~6WXZN!gS`sPkDPMt*hjGdF8_u%LLCSIen7*giiy!Z8LR^st!=d2;vd2DK8 z_&MjXi1+zY>d_hK9;aMqq(k=nl!MK8me%v#zMYectLq#8yBTjY&-eFD`W!k6WxKWo z(aB1+uvWf~CB-bb#YFF1c%gDJC@##OQM*8JE$ru4bU-$9TBbi4w<NnvCF@|fPw%d= zhORZO?%|vH{WHI7FMi@D8D^846Xn%YB7<%jt0F_mcLS25(Odh;UOg8+s-X)r!I?Fz zt9}UiJ4P(z;MW?Jre{BlMFcbtYAX)zXi3y|WlQ?4`#mhTjT{*YDdc3{?c=E0y4Cj5 z$#?FgW#>a*GM7=K>+=748X7_;o1){uI<=*)m;xg^XKmF7i=>mZCPHK(9o+DAs=1WF zg8EbNUd!p1`jcjz*Uk;K0WU6VE1{5M%bunBlw!*q{cQ2=@j2u1en)rlkMZ(X=WK=d zUlR|su<@xzrEYK!Fw<6cUw);{Wu}}c9l?3gPTEzvcPEzCCFG2O|ImWwIC$;}Y`tq~ zX)*RN!ll6FYc+YG*yUOx<mM{{5f3>>?1M+7+S;`sBW(RUcfmY}pc|5PnZTKs6||$& z#8^S?fl9tI?*;L5sPFhn7ha52@PsrF;qg!rz5MnOD}hm1#%=XG1zqRc$*>^o<ayGE z&lD*ybdKCJkA#;w{nO6aP}7K^?Bs!keq?fV(My(V?oHaxgTMT=9TzV@UA*kLoChWE z38KJ^H(Jn|xHq0NtE{qZ)V*%mec02RL$#M(j0*|R8=o#gDnKI4Uu3l58T{{?ItF@$ zOKd&yrd2_7-~XG{><Zj0JS-a;N<TsjEFB~ij=O7-UqA4E`Ip|I-lZIEJmU*-_<QZs zJf|U|9N_(1Ri*bc^CoGtliu@-Kqu7kRcm(r_C>(drP!01cgNG3ftV@hKQ_~q>a6TX z)YD-@n|LbApC>-Scw4j&l}lJZn063CK0YX+)YH2A?c?1Nwu%lrzNnr~2^cTm{OM$K z^W~gP$^i+F_q!FGwtyXvEQe=Ne*D>)hq}{)&o?hzy3#y};k=JJ>U*2UGcP({xQGof z-g{~N?t&R(F;RPqz11gyCc{0HUSBo6ZR)G5KiG_&?=JJN6_JBMMK_ke=<iA3s2V}l zdrU8FM0&ouJ0{se`;mD~{TX=>@TLjRuiUL|viW-vkANh?0ug%O-%>Hh-$6)DDz@*; zLxhSVm-K?fCedQ+_wTZ@Fml6iYlj#!q~CLHANyRHTs)9>K-V{2Z%IsEatFpXb|C|} zu12hAx%jpMT0~BJc72<zz`D$(FQ=Y1dJP<B%@gYEa6}t5dY$}(Gyj{<d#C=wS$3Ys zo@#zg-kMn^pXIO8A04y%gxD;yksoULI8Y69luT<jwN!pC*|{2-UykAyKUCI8nx~di zChfe#*E|P~`@0V{QIjd5YZk1v%lRWFO@?1hJuN?1_17fq3m-0+9LhyND$aW5?<dP4 zk4yCGI*4wLKBn#Ecgp~`!m~DTSgHcs+IM1m)(!#Mw;n7_gGBf09=zL4pAP>!#_D>I zX{)PSTzg|&FIeEm86cR^Q_JOkX>zi6z<3O1cDe4k{&j2VM)2p-cc30MtAhC_b0>|y za|c-$>aHVY_!bdQ-86JFdSsgF;{G72%OfM}gOjs!;w7V%`VR4*Wpn?YukTyNSfEz@ zl#{bXqLZ1;y>!=Pf?8OMYsa)ARJ*I(16h#CmKUc?f0(G5f?|07@KfXQu{yKKX|m_7 zEt~$VZ*s4AW2<B1>g4c3B5NMn-|=yk%2V`cbL~+5){+gnEq~8;Q1tTjRzXf)wJ?)> zYs43`)%v+#`hYH7P0{?@T*XqN%3y6<s-dy>!Rla|X>Ii?i6nVOO*Vp~s;_!%<0$aw z_ACbI|Bf4#Q-+tk{A6TzYfBxH^sSPzxvMWEXUrc}Uh(dP-!Na9T^$BaYg`SXDBhH> z9$devD~!kEE`P{nqHc&PZMLp0_$k|M_d+88F2ow<30>Fgl5P(Y4x$El@KZ&SqbYh{ zO&Prksx!X_tOvZkcDk-0srG5l723JF%JO*GDZr9_!M{4s+%?OtwSCD4nz%F^M1)rd zO5bblVUiv|td0*wcug^cdUSkW8?IU~viRU!IsfHp3h8BrY1g~DJetZ8f?rnfZ@;>W zXNgw`E;M*Gi2WZmNd916U6ZqPB=+8I+mtQ{hUpPue+<xo$!fDWIDV9^3Ayp10KSt4 zOYjg!uFUYq7zHmq3p#lh2@5&KFPy@;D6%dg|A-|^w?<8bFBKi8tPlr#@Zq|K;(Qd$ zmiO#I*!MgQ2Ft2nRk6;o)D0cD3iOGH_jfgC{#*<4yM$2i*pSMpn1i{%<f`$Eoz_Ni zjJHuL45;HQJNTLCs=yKQ6><iQd9@5|`g90u=b!B?dMs>YF3^9aAbhIWU_#J87r2-d z`yUDbr2cdM=Y$QAVv}NWZUz1Y7LheL2mr8P%0}U-;r!%s-EoAek*QJu-G2lyr~i=u z6r%gLC^neUN(p#}P_T?w?PpSoQ<CeC%lS_UDQql40Gl)jpt@yX1^tKp?+NR_^!i7M zTUFvYdZ_7BL6+fhuK?2jCQI<&)ufZb<VgL`T4AnkIkzyq&A>K``R8IR2a5;5lmvhv z07n26&ZL_2KMLUj{oPWxS^wf4feaR(H3I<Pl>z`!{{Ww54kiPHsjbw%0-*l>pns|V zbBYH@#Q^rdfFK+`)$f>!^7B#iV_RGDS!2Yd@Rn2KnbN9yQsdd^llLw}I3Wi8_A-K0 zh=@8&eC;2Eh}K}GG6BFpFW^7C|92;B&~16T+eGR%3BuGpZv!kKA14ETaLd5M9DJCA z4s&2M#sL6&0zfKA3ZTcv{pYFu!~P%C?OlSf2(SQvBThd7yN4kNs8Y%SECE2k7Qnuh z{STyYG%@^tUYHavc@JX`%bhrR0LwC#1)wv<@QKo1#{pO=U^)za3p}aYL^b}F{s*{e zadKZQV|6fUl3R8I|270C1_iLSWl!o=_92~&>g5h9`2`(yO4lZsNi9HfMdRBpIvpe| zVksI%vn`hYJu@UGR5W^6+SX#pf!NYAa6@a?EcI_-_`5xJ%>ryL@G)s8Hfc({=gtl7 zLMdPMDaS|JxmElx*Cb~>WHD0L(;?w6J?JTpUtI>~p7^WnoH&SAUS!-L{r2ahP_=Cm zJ8}$$<ZAlF2@In8!eTTF4GXKnc!9-=QkaM2#a-%E;#k*L#5SvYC$k)4ZH$ie_M8oH znWa=j)bv*!O&W)KK~P5S<*J>LrK#*3pub+koC8D<Bi4NH`$$7)K`*6dPJ?>wC5X0B zdJ=ToQOCGjdpovJA!Y|6o>BM~rp)kx`Jj}{Z3w!D7%{Yf=?}i$-64PTeoVWHc|l_h zeZ?AmMW7&A_2fJ}HR}G~33mHvnP0VkGMp8RW;<63%At;1RwF1BTh@$v_+8h>A3K!s zW~}ir;i5p1K|dO7gEOyvPi|w8CdR$ET<prbv5rMf^Iw7qVz@UK&&>n8^M%#Q<m7lY z^}74|&l>BIVn`&?u*u6@$Sc6t1idkPQj3WvJ+Gd1QXfZnRcGWi?YnyNZL1qZ4m{E{ zAJA&4_oC1F-y=#T&?N==a)%jRAMy2vru3;i<|&U=34?m}I*;;~sRM*I+UN@ztyxeh zQs{A5qdN>?|0CGj^SQ@s?c;LSjja{4P+cz&ViqCzsY{bb^h-i*R=V5l_6ee@&y~r< z3M#n24KsqV8T=H{EB)*b8Mi_1`06~-uzPNhD@*l)v!zt?z#4~N-#58%woq*DxVjND z#tnONR2%Qsdf`5$j%PQQ_yc8(fyhHAn96L;-6@cKSp00Wn;&*d=ExI`iL-b!LmY8_ z0jI!|)GV1HeV4I6yj*P4#Y9w4x!;OowZDK9`W36P!G7N^iPX1l-~u;FlP$=LV`-6( zWrU@mwEkThnxj*N#x~yHo5q5lVh8;Il7hFAQlHAO#+lqUtJKV2ay|L+38`hXG@e5D zU(qUuCAZ)fiii~s6~~fCS^8q2s4e#@GM97i0BWI?GE5L_y=HTnp>9XeW3#)zIfK&N zrIWHa)v&(d{+OjPN5^Mwh)j1kb%f```ua!geCZXNPoNExHTC>r433W&iZ!3gt8%39 ztm{YSKW`I1oY0?Xt?nwO7fW%|zrURKGzx6_UD*^|p#<IN>C4m~99QY>O<W8olyF0S zdrR&JF=^K&#hnQv!eWA1+BLBjr)Y1vW#GK`t+W=@+=RAlK|S=Dbc*YSDNTv*(>zos zY0YvCBKy`&D)NmRZf{ZwpV{iXVym71uPE04*I}_c(uhQG)fZd%**-TnesZ(z`F#X3 zm$2r-b-((TzyspMG@@|`XT57ZBUywfGm&IjpGc;=y~FG-8{X?3uU4|Rl?@`J2L(=Q zBk{3@F`G=A$lon<DG_l(xn+)(7Ik)9e_!xMQ8bXgM-9vtvMMkbD>Y_%VVU_pG-lI5 z!O$mWAZ@c+nMBT(0h0U7l|uRjj!ILA*<d?G!*+Fd7PD=6b^Hb!{n`Rm7O#e{@PT<q z|6$KM#G?YfWMn+v^4h&tHW4L2;Y(3%Vemd(iv&@%0c+B?*LG(onF~m7J=MXf^cEyb zSIt_DGHYF#D$2;&7`N4`Rk5<NUZv^IouZtk52K-3`28(Y>4Uws;DqcW^F3|t-rmx* z?X9B3=(URsoZzbZiCF9d3khslexJGJGJYcdw;3VM!nhRQh#nK4KUWDN8X$cUmoX2P z;Tvw!aSx4UmVdggH)@Udq@o)gKltMlgE!r&V4$zf;6a$I2k42A*_$J~_rV(tuPiR6 z0)2KYZ}31r>L#c3RJrFi`V#uAiKBK!xwrNFXl7Dj`4CrEt7<itrE7)N1H@gPiq6sw zCjGw>3(hySR*_UXDXwp{z+;zDD4sM}2j|^uYZ^8yq?NprbF6Ffl(NnOt0^Q>Gfsx; zyBP}$&x0ogM)#A(j0e(u5|uAEvbf1rpP!^xsOFVt8j~=Zn^_O`9wI)#`tV+ddVX3D zbo1Q#vUa8N*XL=biGAXL#zL<%ntr>$L={OL@Z*MefbU8}Fng7)GGt_~P+r&Iz}Y<} z#krU#jz{;3l1DRNA;nCM`Q>#*FFa}{`$r5oNTHMAg~Ufoz50orlfZAR{#P{_#TxZ< zwN;n0CQRyx7U*_(Y-&mweB;ORHf+Uign`Ckj4NpdcDE$6FM%y<pt)s=nU9|p-;n>w zqr9Ht_f}S0of`=enVHTOcV@&84SWW*>9HxMCEoDhCrpVX?{0Gcut`cWJOSZno}1$A z`CS44ZH1HIH2lKfIP!Dl@?Xmh<lOFEI2u=Jc2*yf=sPGqvwech6t$F%^2wj@VJLjw z#C780MGStxFDlZZd|Zd%`SzDX-<03v_#?TJY3*GU{&CO`v7k=wSthl93iihJ=p$u$ z^}z<-_voi%Gt`_0)Q!|1?{=lN_-ute-CbD4YH_DMd>(}V&b2|&BD#DT`6yTSCqFdV zYCzBPzWfLN2^xp!njuqW1rcf-^T^Hg86+Bwi-N6ug6av_FR0a_ZNtOJ$c7q%BIVRn z`+vyLwleqLka$&+w~&1vqpEZA;-jb9f4aBII&=3SN;ZBePQ|&8NwtTwgD^iF@{!6$ zvUWn9*+z7}?A&+0WHWPJGzq<Gto|(88W$TSg^e<Eto!~DWsB9OTQ6}il;M==;D_Aw z=|QIV)%<zQmV2it>>VkCurLt>GID>wNHmcQ0{cX4UG`KNCxcQXy6FT@H8Y~tbH`)G zjRA2>BSHtUx@f{yx$>5cmn{d;TPp)Zsc;XZK7W&tji4>ope54@tbn7rBk|R{lD^_k z+&s(9#CIEo<h9kFn8Uc>%W$@@wkQf0cVOXyOLvMy2~aY<AH**xC7oh#-i`M+a7`f< zy4ETRB{UdGAM)WZyLmRAn6G03b*SVQM5Fi1ij1IRCic2Di*|F5C>kka_2I_|Zk|!p z!^G_shqv1n?Di@tOb`=QSg}(6JCSh&(W`t5*M0L(`cE9xhfBl0YYup%-=hc{8XH>c zxV(9Jta0^b=(42rx}?#L?as(@l2#u_NmU=UMX$jS?8*LzfzU?#;_?aA#kQdq;EmS0 zZn;qX+*~e?jRW6Kt%m_VKo&cL?4PTT&VUNMg)?}Gwd1Zxmu?Qu!knFLd?(`TM85Ss zo<qY<nrVt?UWt%0h%AWFSuUcka-4le&f<)|@s-ariNQ5lP1QZ%9S>ObW|~vJjw?)$ z)F8}x5?5RD6)uIJw}y=U@C<bSb=3}F^LI$*QoWj(C`xwNIlY#oi*smwI^V8g9pW<Z z(yt0u*?ko&ByA7*Vx=Nqtf`}2Qq0b#otVg`qxCZ1Z>Q8_qa#|vMZ(_0e?5Ki-G|EN ziqM{TxE33=w1d3ii(jFtYft%0{z0v4)h&MEHtO2~K&tO*rq|q2N0ir)B<v#ljgH}J znZLtOfJ>{<=Dg9xyw9ejA5tL*xYOGw1;{*N=fwCeqoZ~LQuf;M4XN?qy#8V>Htk{u zkD_7wAOSXO?czi>H9HQwBx|N5rX&lpupg%6!gQV&v)gKF0u0rwJ?Yet%(*FGs_Cwg zQ$?1FC`U1)t}V}ZWq2_}omh;XN7d4-WhY5rxmE)_DB$;EZ1%A6qiz>R(`Gc65X$jn zCv(9|f5I`Z7a?q&ipZD-Mr%tIY5+jr78?vo3Nee;vr1Sq-(elJ`1JIJS2~uV*@xXP zk~RphE*cd{lNbJuz-UbnP`A)|)__5kYC?o2dl!M(lpG&v2t5Imu#L#|`-q=wu&~dd ztDn1aithc3iO*WiS!$}@^L``6=*o(U{11~A`J52Q85ibv|4$&0`JZke_J7D*XT#p$ z|0MnYCl3oz>^}U<U&D8Q{?EbawMKXU*Qoy0p&HL)`)a#mU+RRrc-Iuo{=!9=$^*5w zLAV4eI$tmjRV}7v{O?EoOcTcOtF=8(J345+a@p2sm0J<*4L;a-&i~Y7BXjK(gdU6f z^00h^q+&e5A8nM+731DO#qvQKmtKa%=U_F_cWFf{BlVn858>i@l9P#|<W}*Lz-vjy zp_ImbJe<H;hq9_cNZmn50e4D=O1E%yeN?0fuo_R}cl!F{RQu0&gU?bvM7rQpiBO7l zT|43g3UqxIr%jMX{825I70ezthNwOMbmbksjjcMY>p3=6oM7X*=`6SXE26Y?=7T2F z3~c%D7-&-eRkTG*`+l$<6KFHVPFddAWbmsQV^jWfCz-CAF-`r4%B2}HRwY6T*6faQ zzVp@5&#y_Qu2w%|@L}xIZEso|H_3a&Cd+%(!%y3Nd1@&BAWWp~G4yr1#tGjB+;()} zx_M|?LVt@?b&%=AGsz6G@Qb`q^29e_H(XbSb$k}xPaL!xJ{^UgL+sdz`8-uy5~@3B zcy@P`NwL&jRpqGvMLt7a{m7Cks9rk{x~6xW2A3xK#Hce!&o}-gm;V`(f0%*$WYF<! zs`(SEOz|7<w4S%4?A=uf?CTEMsjtx#WVaxF#8p%CuoS??9?Jo+$G(n1AbO=6^Yy55 zldQ|L<EQ?S!3q7ASD`KG)d4%9_AS(eS}&QwTU7T+ZG+_=(>F#eCYrvm1c&p}{YcHO z-wA&*uMuEs;<fL}T6py3YDY-Tv%~lGp|~ekO!*sm|BT}7GR8a9*;TK}NP+t*E%G_Q zo$=D~ARL1+Osg3$0}GToNFz(pW0-*R*Y_xSf|^$Y4mxpXn-eCpSKPTm58iy^J=K`A zzDS)ww~>-{SDR319pUz0sck);E8X-#*6VD{z6myL`cE_ltM;+m@mvr95PP@4(l>TI zFoA401v8j!Kj#a<2_iY+zm_|wa>-VIt~N0Bi&y7w?9#Svyx{Yek+Je5{;B*t?|oZ3 zZ3PTWC-gQ=1}f+E{|SXlVSSG(0*0f6Hmudw$@#V$LP}l%X{pBN2W>@fj2XwP_;hD^ z%9pMt0Njk({zGmvu2!FzDU!b)B#%a4*3Z!k$Ae;-PF*@r<5YWEX<nJs!fJqj9{{lE zJBwU#5&mVA5%9>W2B=FKo;KS29B}Qox;oi9%NRfP{zazA8}HP@td>J=JC5v(Q#0cD zO+P+cYn)ZQ+S144H4f6p3;)ke#U<;}YSy*)!FqjPTjY*UQg7waOS{zOmJ@fM2hc}r zD-tRjeeL~<a{YaB7w!O-RSw300uK#t(>qB)SVzAxAteC7pY}akY<r_oSX$PqcKqjP z{_*+ZHgq!^=I`t4RXb7KIwNuNV`5?wBT9x#$~ky3vUbqjjZbz6Kjqr?s-EmZ54I;R zQw=lgGV$?P_eKw{GqS`tTw53|c#3y5|9r1}sZZZ5U)ErK<iQf3uC9f;@6fXSC_<+; z-@(4CWE(x8)$|%Ex__d}6e%YjH8YDEFpPL1-alKE|JliXvoP1b$E_aY#|W@0!Ap8Z zRT(`*pz?p-dx>Zv^)d~hW6bXpTD~%~^=#Vy9V;FeJM&UOB1<8cJUheMcNq4P^i?>j z;H|>Fmm!TV%iqu&ubG7hbql%3Omu5ZSPjM#R~_2$t6MYDP3C><GlxGrZuZR@rSX#P zuX^e1G!7C)%g#-BD9WODiBDRLuvg|L+*(A}qn^@?GrzaS)o5v{u1=lwnEK4fN(o67 z4G$?R<Sc6DQD1n#>{%&VgMw?^(fK18(NI!spt!@$b~Mo0AWAIr_D$P87;<XM;~QFh zW`YZu6?TY)?(fIpLqThxpP>1DQjJMF3hDuKWtV3IXG24oPF^dg<N4=Vn`5nq=k*hN z)qB4gqJggr40LjGa{kU(3{rGbS!WZa7p+%kJ-WqR0sh7ZUFEtH<wl7<8D40Y1uP2Y zMt<Js(gg3KbYTI$bIn?%=j3m2KN;>|s(`t;45@9=NF$KJG!9%KB#fNNoB=loi@emS zMdAdRDj;eY8gMZ}D;FCq6yTRllG+%rmEYiX<VSP#D>as%!CEc^8w-dbz{R9NQgo#M zE0HvH)D8d|*%Dq78alp5UBz9cN9aAtaJnJj+3~4abolcL1NH^I(4YSrG)yb|A9z+9 zLzs%`^e}78T?r%FnXLL}>ISxe`#Y{E&MK4uck7kc_GZg`VYfETX9Ehp=cN-Cb>-Dd zY{aZIPsnIi{dRze3IYTWp5E-`yC{}txB65fL&ec&@w0^ZW<Zji|6udTTsr{ZX#N7Q zF*BcgJAQqR6EwhWA0Y?}J8v*P^pc6(+j(y9@2J|nEj!n#_Zpqqm9bau4ggjEDZ>eH zWaHBKPL#VOE}RTs^=xS$?wDVgT~uBCATdvT*G=0=+wuqBc^=LFP6$q5HsS5>+9R1X z?wZJ`<Rh*bpLy}dSFhTQ|9(Vq_)MJOiPdaVJObw6Mrmd}t6-|-KrmbtfpUk{!q9fG zzxOp8t9VV+8O%6mNAhwRS;9-3>PjI$4Kwp~OP|$btXqlo^o&DlA|$uT*~n%)>g($> zDKB=>o!Yo`Qj(YjUTr7SuI1Bf+PB#~*J^Cq)_G(88*Mmmk~gW2cD#IN^|)4;s+3T( z$(|ePrH!TikPqpm$NlJtI6Mj2e9ui|4@d^Wcy@qu#@$&N=`E~)nEIj0I6Et<%%>Ta zXy}<%t4j&<{JLbUHsTR;q~uu+_mtnKMrb+@E_E$Z84dgm7cS(o_{vj`B~EkI)E~XZ zFw?2(yEMK%vn7jXVf3@#*LtxdE25a)Kuf}V0$)<+(7SQ4>$9=Nh)-=xCd{L9L=1U4 zy#0Xmb|x`+BIX;pqv#gjayl>K%AZLy812=CfNO4PCOxP?K7_+h#v4PujHX6+%7jKm zo6v)WV9cvN>B`iNJKbCTgg!W|UdmXC_nn!^IzRZnL&BY$t$paEzp=4gCxE?WyE=ZU zp~+4yvKTGA82%%oQ{IB1GM$~8GK>-ekqH95zXO51$|0nCmk7~j!B(<d_1~`B{8_x5 z;jwnKO0+L;!A#;`D^?vSCU#(ieu#n=(Y>d{JqpTXE{xs9R-#jJtY`<l#|GnKfk+=( z4ys<JemW%?u*^8yl5HoK#=fW~$vK{4o=rr&1tgv&{u#Hf<`+l_!b|)qCHl9%1?5$7 z+eL&X9=&-=5`>Y59UuRtbG{3=j0*)u6BPT7Rfdu@&*c1oI09M;Z?cVJm7S8D)Hj5m zeS7$M=m4n~CBr#VM0$T4Du2oV=3S5a^;*F#o%14#q7$&fw?+!$db<^?<;A$jUr!uk zz@TAiww!;6I6_dG5~DT_^dAl^j!+Z);=OthSSgh*AiHS`;|5>h1qrrE&zE!8&jl!# zceuJeZoa&WoDHvo&OtZROCpPq#cwl1s=1v^p1uu1xl}xvOo_;Fx_`&xGe5JJrFKS) zm(es6dCLH%N$G5laquQG{V+f@=-7!hl-6rL0QVYxCR+0ytj{P{K~H><ilJE@iF99= zO!4R{(=3pp0Kn5UF-k<tNv|<}AQ4XjAdGD!>^vVpqMImKr*5H}$+qLkG*$0iUk0n< z`-;wwRR58m>YLV?iP6QJ;V|OL@6jYLwZPM+H9<;U6Kf5OM#m4kjW;vozU$Ad55JD9 zhZUQV{oi#Glh`H)jeC?$ScfRbl3ghN9qNuipNtLhdmJ6jxIFDcFCWxYopl{rsvcr% zd?y@M4NnqWvBs=VU~u0-?!G;$Zn_TT7@v8z+JbiSKgk3L`7txk(jDK;j0`xuG-FOG z0s9r9l*nop#Ld6G2K&`LDMp|M8+aI`jKzX?vC}4!nz1uPXMn|WCx;YBz^tFu5r}S< zVOBl;Fr9@IkjAHR>8vevdQU)y`45tl!`gNF=|SqSQaPXGuww6W2VIG@Zdm<`F%bc0 z#{kxHUFq##)%OIPXyMh2)QnYeS1in)?pN4B4}hFI01K9s@M6@Mo8dbQbr*}XB1h_9 zu0ZPre5bVimJ7Th#F%a<hA%YlKjzW8fGK=i;fK@KE<@s*2IKqAm_;%C$BVMs<E0qE zIuz6NcrB$hxTe1!BIV3rZ5hPPKed^{baV_FTu$$KZ$BsZMeL7xiF`z*4jUX%jFBsj zC%}z(Z}}bP9YankPtXFmkdWa^5RR3sQjsd&h-*SV2iB~J*81AdSqi4b>2dYJPx@ai zXEYM?;<SaQ<>DFuFauc1859fpGKibdpGEv47S!Jd2Bpy4ufgnm#DTtuQ+12NkiLaR z#~RS}b;0k)<Bl_#U4`H}=E-3&+rlDdzlW;e2pH7<59Ri*E)C2HEkHqk(NIFP<G|4_ zHQ|g{?QwE(p&~!xKnWs)a^9%OtZB45s^NGzR7}OwbGDtgKl?$P@+tlkySzAyAi?=3 zn7ufI{Fq0jF1%<kUeC0zw!(%~ON2lmo)K`Q+E;jyYTv?fOIE~2xbj@;I^xvvEfD~E zkA)@SwJIr^xuV6N>7FIFH-vHT{)Hj*z*r4D8qd!Jj#E3C_g~p}0>HS~G?WGCEjkX2 zmB0m%f?!bm9#hSaGs~6)zak&UxA$&u2}t^?zn%2XNL%abpL5HE|92F@i@eJU_T51J z-9b|Kkg2Ju!<M<A;cBtHH`ZNJ?bu2%wI1=KjguB*hs-0y=F0gNU=4&2kTT#B5KAho z^ff9%F7?cJ7-w=LE@lbh_@vtVfo>`+=D7_Yc_muu=Ah9=H5El!4%Z)$LD%L&HSOY3 zsDXO@7NxER*6W0<*8`&md~VMvlhZcw#yCo54b`(&n!da>=u4#tCYDaFmR9R<;5&}D zpMSuj+whvD2jcsr%K9!`H(5V1*2z%+KD0Yg2+wXN{Wrt43y+i(9zas<^#_)`&XYie z2bf}s^Z0Sv_DR4v+%rn@MeXS)#+FBr=(~u*prT$4mX$Si{@Wjal(td<WlT)3c{YV8 zHQ4y@$AK%5tcZ1K%h5n+sv^I`!Sw?CvGfBOP1vh)=dZ8h;KxFi{gm3}T-EG!hU4S1 z29EEo7Y4+>B14(K1nV{R*CnRa=dF&83t4;QuQYCc7AU)V<AW80d5sM83Y|7S4va_K z3LD{{T23HK6PiV{hjMb5Pz?ZhmBDh`gf?5w=L%bWcLrg(trB~igAz_!(Z0vGZ2%|K z#Aj?mvhg5V!YN>Oy-U)5b4X+}T72#bc}$?nKo{9FQR>zvZsM`AH;?RUIp0BSmY#2W zPawJicAnL5HeY`QOM3p>m;n9cR;Afzdqp2M4sGm}kWVV?>7kC6#K#Pn`RIQ{0FD}h zTN0z$zrakW{tH-^HUBQ`;sK-`01$%Va~Xm#DS^~~L@<#ROqAx|gnYRI6STkp7c<qp zg%S=gO`~T$;*tFZTa%O3^zTm($3~r<nTOrf2L=^PW_3rEez9Pg0w|)}w@9|hURdoG zn~Z|T4k=m$JtHRPk8YeAT@v|-a-5uWq-mHXRVRC1t*FeiXJf@ymiK}P&;tZJu-*yB zsT*vL*B2T)m2;g8v961$!r`fGjimnE3TC?8YNGF-ySm+((&fG{`VMe*g>s=he5$OZ z??I&xH%4r?>%jXOUWMhbd<l;ukA2CSk&>BShBfVkRmfUPszF&Dx0maz_1G3=Vts0z zK+9x8)<PW9Mt>(vv&Aq9B3AM{EpPr_{)f28#N=2clWA#J(a1oqnOOaYJFzjD<4M*= zl_U?<Wj+X%$cYu(4!@VC;{l4WhLKwrf4ZLW6H3Qe;z>~h)Vdtm92HTXD4IiW_3DDL zf=`A&1<Tc~$lTeK-Qb5N{iV8wkC2UyI?#KQ?Cehbjt;(+tfDh_NV?H>Db467Hi(8X zt)>BLxY}-$o<2*DzV~hanehoLOjr1B{<E2dBRHOGQ-z;PFZ2`h^D4MClXE;jvH85* zRr05PK}5c2nvCyTWBrzcx%w`?GpqeyQtz;FfnYxCS2U{(>*NkPGb(8#iPCxC8GRQn z2V#hN|5m?L*eVeD)y-@mS6?Yg_yd3G@!zu+w|TMA9pi0gf~rx#+sQKw6Y{oJ?U!TT z8V$8+@rZUZ;eZty=W!ivM*ICeT7A0HDx69C$@R073IZX8g>fq8=6WRJqZ4Ml$Ukv` zdqnrtI?bg&b8gHUBBUf~K}kufNgJqQJ~kYt->;|z)Az<V|NNP16BX*{%9N#Cn%SMt zeDbGnusj26DP3O5^49OL?5~NpeHaJD9#+dSCA#HKv;S?`P%E$odYF!<p#3$_BorvQ zdHeagKWc(|(xWC$j2q>%jDAWvmAOmvX_9E#3p%aZj}>oQMAQeZ>x0G%wV*us1?kjw z{5B0%!!yDtm}y*kk^kPZMjOuwb3;K?rtR2kDQ8KX1dYO3k@7pfcPByd@g`#BcVLtw zUczuma1B2ZT!cQw>h&l;0WJ<()N8m1Q>PhVnGWGA+7rwmPbgwiAtH8`igtDe?zt|c zvfxMTJ;4M*^rw*#>>J{un1LPO0PaBU-VXB*0PH>Q+}9(4tTy&&mRxFy_m<y4akg$5 zSRYZKI4~#yAOwL~+%kwjn6Z1%Et5d)eIA@GE&(o&9RV*^hG?ck>!t?XD%#(*1JEk4 zDP|)FD4a1!_z4s<%4J|_fjVib_c#%dxLDA4fzV<rgEH5H>9V}r!5`Dz|HVB?s%iCO z)aa?Y=qApPz+CO-?>2Fo72Sbf@U`h;U!*Pt;qrJt3_56HT%V8$n?u|Bdg)d&+iu_S zSr}dX{YBM{b8JpkdgQJgOp{XdYeMS#YB$i<H@=L;V(s?`VM>Q;b)D(Hk8oSI{6;bT z<0YPUsk_8j?KIw}i$jH&FnMRymgm#p3P}KXbg*=}qkeU;!#^+Xv~whoEF4pY{-CQc zrZPL-A5Wb5f6`(tr+jbKo^o))Rg|XdeiO1M48MRtPT{Xip|)l`j}FW-GlfYj!-Fjb zVk2DSP%YhU=)m>3IG;?rUMw!u#Vcf5jLKJ})GIV+K-^5@N!r9?{_5MA6*dZrd6umw z<K;rdo2}mSMu*5nU9Twd&CH!LpVfBEt`~kjm9Q{uwv{Lf^z7V>Rl@2$Q0sC@Ltg~W zdy1a4<eGi99X5khH<21wwjFkS{C2Ua2iRoa1|n0K-B^x1gCkQq?2*h5bwvh=!?1Ck ziFNF_d<E<6Q4xyNY&HeaR)#Kbl0@r4%6YMxU*Sp+hT;c`_#$0G{%&#hR}wukAsaKU zhq(cnJu;2o+FwW;Mm$X4nCWC+(jw93<PXT~H?RLLg|X*YkCcW{$}LaKKM=~^FUf;= z^4_KVHs~(3L;ySz!57qZJ(~FvjT6i7CJ=}8ie7f3advipMGnvke*8hwixwi%HQX$$ z-zs=UrC!xqk0+zNa^T<zKW4B$t`>5Gu)WeG_=W04^+rXACKUTt*w+jGFjkHHnHuAo zAi!h_ALtR|QAO14J8@IYK5}<guwsKhJd=6yB%KgV!{u0_xFy1Q?7!VPV?C}@Jv|mF zG;lCX>5*}q$tbZIQq1T1fm#1e^g?&%lb)_xB*d3Yz*j%L5uV7xifap9T^5=_MRz{S zXM5v$UuR`9tVv>(O_+uk>FbNnDywOIw-gPYdT(W1AMgD(YlrLSQ?gX$B3;h?5J7e~ zi$)J>(bvV_YpF=>YZ5;fP3s;<Bcdo+D4O=3O25g4bcO@(zJ2?)5TxwtTRrg_eb|JE z;I>hFBJ^y43;ws644xCKOy^51KO;Az2WBJ{9QZjGf=xk2jx9H-*FrffqN~q0LzmiJ z_NTXL;2R}oXXI3P3cU9?^MSgjQ<tNxZ>T)ESiFztMF94xX?v<3qU+n~F&9Y{k~k@I z@w*@w@v_G;6ZW5A3W{|;P-@E3k4EqHUH>~>2^XUII)kA(p^pn&rnYi18!rDa@I8<3 zBEB>Bcz?It+jFT+&A|oL<vlYWn!but_NUFD%G<j-9X0^_$*OFW+5uT#^ASb5nr!fM z+sOV7^IU3qoEytaN~d8Bv&rBw|9fb?61_BUHg4DG**W{AQE1Y<S~~itf}A5}lU1A+ zKjsHX8E!oY*ktEiR(~_4LH6^XS}U5_aiGt=O8!4$WnYs9#((Z02VXqnQDgra_?Nb{ zh|2j(B6uof-q*@>mRv5Z^egE7%R3>@(P}vfSV6ix%d4xHt?@j*U?Cx6D8nZW`woPT z{<8~^ub;qV|4&Z^gXOwZvocUpy0KCgcT{HOH{Lq3x`{epD9FeW3u^|KNs4$DT=XUV zT~^a(LO!eU79gG<u<=VIt0JJ$Qt8UX36?xHG_mc2r1P;x?=lm3S<jPfEsuB+s6Wm0 zM(Nu4nMHNDe$cUR>p3xuSI)}!w~qt!y^1-OdY2kXr`qnZ)At(Wd8(O^i5m<4ZB%#k zGC|2TBfVP<rB*FwsQNQ$=4E&9uXp#)Z&u>KFoDnO?nU>d#t3=;dWmo#6*tQ3AAU}m z7D}yxgQZQ?70F0Pb81pUZ|)vGJ2E1^r*ZIiYVq=T*xbHT&s+uS&7(^DK91rGv%}iC zN6H!gR4}xz=dtTWfa4p70+X~&>23FS5o^*PTm5;hjg4~R$&ioCX-l`AuSedSS=Ifx z5ip$9*G(te_9uaLmW-%bj@7WWt`nY|w?ZHce6dWeCbSy48;eyH6F2PYH-t|g{2Eh{ zJo6lWYkPCg=Gil`MsBy$i_GQ0Gk(C6jzoC&F-@VQ*m|vx&J$wK%yV#FTDW_3CZpec z!C4nqSoG(i+~y`vOE(!QLPDMxrbku-IL{{I37?a)M6z^>n|i+%AvN;h300UI=_TYl ze7~>c+~G~XalAk7mhr7gcB4PIRzO@rxKnc6;+x&X^laq8Ax@yUCa<wMV_z?WA@}F{ zK=D<Pa?b7!WcrXqUd*~umRHBMyV$F(joeCNS4KpD180i%SGcdl^%iDdb7|E>)SbGo zC`<hiSrRwlw|=khXFw|w0(7WYRbJ+<owtgP*Ot=E@<LaZ%5!mQLK4r}p@-=>s+Ywe zZ<Y|!5SzT%g7yzYabA%#>$0sG%9^3xGIdfhA=j(hdG_~`^qt*;h}~qcSfVB6BjfZ3 zX&*Od)GH+iXp+MgMx4X5U-m>=h|~=fE?$AKrGTk)-xFS)f&3HS)i<mMmnzNslrLAC z2cN-5d#qFz&aB8(*sK!zt4dXygYKo`>4t&M8?qmfM3OsiJYL@RSRTnEvTvl9%sy`A zkS+`f(U<IE{Sg{LT&1KPjQH*vll3-^>2ihv-qKsbd~UKLU{7sUU8g8pq7GAcdzYH_ zo~s4DE{0Skq>nxLvzQlXa?^KtHt5!2`8R@ovqSRJmvqOtp$Lt(^R98YkYLtL1^_UP zgdd4n_g33%CowU9+x4+zUL)7=;&HJIZ96o6H<CvpVom3=ds%c8)BeKUVIiiL8K(BG z=Y3HzNx<{7%8VIv{>T-XHd!1JpEj}CqNE>vY?~;mPJ!d&PTrG?X2R$8t?wJbcfa*% zanS5K@KGsZzGHZcvh+Q!-|Xb<eB-cfUuLeE1aEllY5JUN^ts~XcoehGmx$pa`Dc04 zwGcWyv*-7NyBywAPtO~GnQ~l(JG^MDQEU_;AhQY{S>DRKkM3H4Qxnzh7LQCsRcEZQ zf;ASd>mysT<|W&n?R+NetEykfZ}%13OkZ{6Z+&YcVq>_HpK|OqbSjx$X0lyZ?@31d zxst@D`(x<mkl@0^AD=$$;Cn8qlY@m~0%`YnR{Jsy#V@`290j(Pqp-8?x2`aQ#V)X> z*6o7a@Tr1k30E0tex;f2zs=W{r1H$1!+IhMa)^wA+BEC1i*h%@P{=t0`*(4;2psYU zhpFaGZWdVnZ<6_&YR)d9b(0<6jtCRUf7?1<OgKPa-od>TgUv(g(b&IZ15Dect89VM zbZBit$$9CwxZzS=KQT{qFSIyKAd0EZ;LdY=ByaFWYYna79>v`6{l0Y>zx}7P<EWWG zqf6`9&}rO1KfyedA9xl8567wACN$L(YE~57?H~|$^gkVk@ltoXu|X&erMBM=zU{~P zHA_2;We!eZusD^_BKqz7y9CQDi&0Mx$@5c1=O=s}dVyM*k84Rci?jy~w3&YBe3qo8 z94-;jdA$6zP?M?8v#zB|$)LzBtg)r8+vi+JL%DG3V9x62x46LStxn#s-dM#w4xX3E z*)7*?M}Z9Zoq_Q>1?U_@A;SvY-8e#~5KtIsRbzPOqOMMrk}+v|8JhBdHs7RvImVF8 zG1gphJS_jSdFNdv_~SM`yG)lriEKfabp8X<fLBgu^9s(P%rpjO0)!%?LaP=DK@(#P z>JSF!@h1N9sX@G2o;bmn>;Oxq)zN8MO^WZb^`=Ea8gc#j(Pulbo%rTqP<E&IhKg~M z#|Clg;on%EOxM=IOc)m@qqdx%3$$+GnDt`xY;<Dfu*JC9@8;9NHF0#ojeTI`<>zd% zEc6+FcpLNj$&GNABoF@Pv&a4%5^17eUJB`g@|3l&N!rG?C#MH6v-BwVaPC#P0%u?Z zUW$%3l`?xY=OsU^B?2LtsQQHw@&<71AYi;dZEikeTE%9_0H4$=_O`gozR@p*wvBgZ zl>DxCl@jtpHP3(zQl%_$55GVz-l!^NXe`oV(`myM)F-Tnh#S9i!MPijOjR`lsiqD- z?!Ix42`r=lQtlF7-H|d?u+=>`<5RFSwISk+zoVl%qx#+Oo{kKU%@dCBiU@}6ojBTG zLzh(Ln|)VmR0d6Tl~cdt)m1Tj?cB)uZ!)r1Zld$zUMy`BxVwL-*4Sca^x)a`Qe}f- zY;?#*Nkk^amd8Td_K;#Q^6p?;@1AR5{$enT_=c>F>HD4)R#uirm!L4Nr_Z9Jv)-Vx zM@B{pxMJ|Y9ufX;+YMxvqxr|J!S$IRIrzIIG8{xlXK|_=C#_BG7UL5L`2k^^k}q>Z zzUu{)gY~K5*49Ez0I>@j3O6+s6t9Lzt2H^0u*t>Yt>iKLOcAsgwks-}v-ZmMozQhj z1-zgqcyehZeQ9eYF?Z9`&C<xMH;GBeh2byB&h>mS@<xe_HhDbo1oN%m$~uTDj1*sF z(4~>H4rPh?Wadc{X1=2+GAM+(c8qq5!nl#B>zyn5Xv_k6e<z0ZPsYb(OklOoH4e;< zbD4Pca*y0RH~TnrHODg0EOwoZ;A%3FOL3nGs@b*6^M7^q-O+G$ZTk|57F~#_!5Gn^ z2BRhjhCz&8qL&bTFbEPQ7~PDTQ4%4#sH25MH+mZ}O7vc%_xwE1_x|3szHhJV?DN;z z`>eCpIrrZ8eO=cf8YI^Hvo(KlLVO@2{kL-}Mfav^(xA{ob?7*)DfZ0e_EPCJ<*snA zc$I)NL@T}P*#nU_=7XD+jg;rJ_cTP7bHV-l3XU-AciLYDVAed6Lne8pGc)+kXI7uD zX9|eyRLOgo$T(pNgR3*wM0DTvic;g^XwBB{oM&%85U=4DMQz=TrOY}Oa^s;;Xqu-O zHp(jumhh0pkAeL}ftmw;1cdM4+szh{Ic)c8PYBq1nR8uP{#slrJdTWgZ0$z<oH>;s z3kyK~-ZD3R*Vgu-)L!t7$@u#|h`YK781SRL7EokQQ|P`!?eS@=0`8YghQHawzB*Yn zpLt_~gPr2aE9tp4+$QM90+c9p)B9XRka|=?N=TqxktxBA_z{rYMRI2;%p-P|Ls^!} z7O629OsH2k6PnsVaI+bcUqy|bm1=6jq!5c8@Y>qJ)y&e^<OnaG(81+KcriRzd`DaY z$dq}7J2+K`CQnre5Zki?umGv>#ESZOX3zHiDn4t!=evq8vx15x-Zq#nZQ=z);?^Q# zI<NPY;KWwb0=iO}tt$3GV9?5K1DIXe)pkTv_#lYgFrO+m#x0OMxBqEjSK0eXZvl>b zrY_HTdR<k#Z_hNBT%Mf?E!tgfgawgZ-Y>jBS6;<V=)WtphzfHLIyU`5Oo2jE)w6R< zXJkwWX8=#N@W2FbXfC?)$S<HG#<J4y+{h`w<=fV4yj-PZ1w-k*qe|!bb9}&s<Jn)< zH=1vjw&32Bu1&aM{K>oh#(-2uQf!aj9;0-=Cf<v_vH0{G!*6l?7t!>5it_7=z;865 zJQ`5DmS3u%u8pu~C?Nqv^r@FKyi)`N%NQ9KtV&mRAFwW}V>0gk^Gk3WeN%rZ{66{x zZHre-Z1Lnq!hx&0$MR=|U5MW`rH+rmc5cy5c4%I8ew0AEzNb|v$&9H18iIKpbvb3d zEPP$^uJOq0W_{)A$i^^}Pv;h8yCXI>cEkwIp{1p0_y|ijm9(lGU3JByf<$6;BYZN@ zQ3JlCnjc-u=n*b|PM>gTPD|?BJ0c-FHR-k+W?!wjLV7`2TEJDP29JeXIg<;E%iv&b zoLFOpnP@K;Z9_4HE^Dx2MHEQO_o&@NDywWKJk)c}13)VYn#!slfK_@KvPmo`X-D|H z$~dj%&#U=hoJzrnYjCnQ)er9#aXz|Y!0GPC*B@4o<P^OKK*TR&RNo=@0>2$8^vw@n zX&(!NcEul@__5>MP8H7mubRQ&%ePBeS;?G%D>U=b>GuRs>@1NAu~f?0s|wEC>rys! z&!baKUgCZXxk%Q{L<+jh#_9L-%AaB7K)iHAtsv_a#a=*X`tzRf{UF_CQqO-c_z1z( zQ#7e8eZb!xDwsqh+*nhPAq>E5^2PRCxOvyDWPRPJk9u9PHMKlh!X4g@?A1k9kw_K} zFo~f~X!Nf}IPP<*Qtvyqyv+>uANrS<yFA~D{r^tgX#D;a`u(>{D4_HGBi!b+A>1bW zFPs(`yNcJL`cxjmLA9cm=PpO74~QtAoJw&d2HcdsriPw&lK-soIe81zlHHW?YfSTb z>LXiv{bLHZmN<2E<ej)~&c=3!1jzQE#_0Q>X4=-))~Xs25n;@Bk3H_9UD+Q|Mc&mX zcdI__(E+7ofo{Y`4jZ-qW!#FTh{EXRPA8|pt$q7BwpQ}~dmD?5VQh;eqi=owx&KFu z*0%|sMWaJQ$#2=bhls2@u>K|!9+yF6S=+FLv5p;jM@G88KVgT_5qjngJhI4q5D_@Y zpqh?^sisizPi`F5tGqDgd9vTwvlK$`Y!xBukb#a6vLUw|sg)iW&|*3@)%LG+G<z5r zq8v%*F5GcZ?FU{KBD@XJUIGH2)f@uii1%gUo+ES<`c-=4XhuK0=^EM!Y#6Jui4zg< zXqIYZJX*93h$kgYRtqp7$*imad|S6n)aUf^lDkoOAD@}7*|Y1mVCDTAbK6>4OWwiG zURe~MEAmj0*hrWhYTs}9<0fs+6$Vy^?#liiOWS~2E3zEu@IIXZuK2y5!e`R#D)(LT z5be#G;Bo{IDJ65&lFvW3J|w`ZcBKB;j<)%H2&$*+AO1>n^5xvzbwte8r05iVXU$8_ zlj_N@(G)RQI@PplPtB^q4FTkHxW?L%>6&l1_Wt}@0Oi+-ieVIKf;%hw5IjP3d4M30 zXwKfp>T5t{^5eriUOq@DVvTea-MDm{J>O_fJdYP#wFc>y>k74>UfinikmhR;4`5!I z2-`W6UxkBeDi)|pe@)cnXpdTJsGqPymhG~YT0WX&6RRIyWoOp*11a86I6;`+rVF1m zfT+6<<D}~9CRQszeU1aR?8(r55!p6)k_fe==3jGmI>dRtf`!VI+EP$s{rU0sO-v0g ztc1B>CRM_xK$t0Ev-F=6DAxC#ePY7)KH2YI^WZn(zG@ql${RfUe=8Mo<|^J7#Aw)c zWEIW~50frrr-x7?!Q$tRFHAWe&(Ts@9Z}AA5kDfbB-h$rY-0(O{R{d2FY(}pdk`lz ziTx+riUiw}6Ww|5`ZjNU=)_R!pfi2kqm)X>2AdUE*T$|Sj9}zt{-GwyQO<J^$tmma z<Ky4-*J5^~7jn%W|E~7rx@qR>`$fay`e?pZrnJ}pI{Czk623f$$V|IKDWgx0C4A}8 z(sJebi!H!!f7CUz(QA9_%&qxi_rLf-?aHygl(H1&OzsizvM*DrFJUrx=<^&c{m$%q zSJ8_BkF*Uk9tE5mX@q2`^Au~0$gZw8{jfP<5te$i1PRobhtf~Oe_mbB=C^eX?;Tx& z9OEt}2am316y8<!;#c-}H{;El{Eu#&4$Pb0X^(CmUMXmg{{K#)>)`T-#nM07p8tza zoC<oB%5k3d#GJk$P911r*eLA_0}DtX*R=oag8|s$Q@!-vl}%0^c8>J;i~^mJQ-2TE z>8QwsCrc&b%&};>&jsqTT^~Xw4CT!#V%%F@pkvNoan1;5ekxsu>6OFpJ+z(@e=rSz zn6X5E)+5B%7|E1XMzN6Z4rA%BRP0d8Rg(j*)BS3i$2FwX-yUL%Gt-?i>Q|4_4dI4h zF6(wd<sADgz4IPu((`l<9iA|hs4!n+=5IsGPa}LgJpq&vWb>>E{hE_jvDRGrqP0?K zQV6b&zQ{I_h75Kdzzmx{{oVGbz2xoD&=$nzcCT1U7F+Qwx!2y&_SfQ9-oO3>uPq!O zBSlN78{8`?DR&>Lsq{Qe=GFk1=asVN??ZgPQju%vAJ<r1BSai>-$zeJzyw+JI7JYW zxO8Jj;Uar^dGSBbmDikDwPC&pM^qE82?eJrA1}=ChN~nkg-uFIFft!~{S~EU<OeyS zSS@n<Ry$myz|WkScXqg}l)&ZD#lmuP{0r)sDk=Otg;$e>yhn4h{J_~6dr*_Bck5*E zfxxNKT)zUh%d?pf$}aQFEFo%@r>{uOJzi3ef@FY^zhIm`Q%cgSqau&Iq=}Vsx(B)Z zHF*5LbH;R$ZpPK*cH(t@tDk^B&Fz{sv@M;!ET8O`FgxtYboP)+UHw%VL9NmjC-u(@ z^e;Fb+Pl3DZ`j)88$r*VnufiREjWRLXJ&JscL%-Hk^+tqgq=knUUMwl%syD}JJ6=p zkofg;a;teaCfR!^OHPEASm~3OG+&In6cns;{NiC8-KfhOFs3JN->^ZG_^!qz-olUE zXoBU_`+{wd=tzaj3V!9lb!TR*tRMI5OMON7Ho9ovuS7Xdkr?iy4a?a|vmq^{m4NU} z%!C~>$#?PaH8OW=_dk1?ICi+NLAYW6lBp%0aE&p}S3Wd%N7?~-R;>~9Jks)FQgU|( zo3Sdx_QNVXT^JU066(n&v=oVMsMcl~+N%^%qG#0hNh!}_qly#mDm?lDQ&r72^$e5O zw!x3j8u&1F(#+_NwFgQF6)DQFesn!7zcOfTH^H<f<S9ySyd#9y+1Qkt!3*X8l5J1q zu`Fa%L_IBLNHQ^~$%zStVX;+B7Gh3yGJ`0o3-=fsYCMUb>;~a6d|qUkCn>Yjk^S@C zxKRK1JKRM^P6H@vPb!WAn2dTh7u_c~0Wl2;wSSxgjdLT319<~aN4$`Dr*N9%h`Jwh z;X&o=uS9+)cC-0!fAF(jcM*~cw@z4|8nqGMrW&yuAn$7*UYBRA#V(!FTz&hDEb{+t z#T5Y?-l@U2RD@ZDCXP@V&REvT#pGT^Elo_!8P?h5-|E->;~w7ukoP)nBIY0@^b0Mw z^`;6QXMwN4Tc;d8P_J<RoVFp$SfkwdE&rF4(s&ZlDR6w3`;EZ}OI3|v1zr2ErQdjC ztMG)ZoTM)`o>sjKXNhgFs!%Nckln6OaWaKRwd-vJhiF+zxl!x5MctXO-@<I>zW5<i zn^_%#TE8yD<CBtPS}Kj&YRzf)uof%>Z4o?y6pxL>SIu3kXj`{P`ig8Es;85thXaHc zL8~<Qkker8dv!a9tn<>|1aT`1X<pA5EBX&aUSamCr0qX3SV4=be>`ieEw$!+>`HAZ zvQoTA6I?-J%7l)r$YD?|-Q}_^P|f32paK6Ht06rb@r3X+Ib<((ibQoE>LGpX=7>wd zUFpGcnjh|4rtb&sbrqZlUP0XEj~4ph!YgW7m`tl!Tsj4Q(Uj(LR?ffr!Z(7HR4Axm zM+0y)iZ3*tQ10p?HdqvisSSYaHHlY34D&qes*+rd<nEx#kH&mcochRusFdd~D2oYS z<D;ulP`pm`jgn5!pGM{W&G))Z1r5}kVq&yQj+VFkrbLQLY>^~Mo<lwqVVMl6j%pKR z-mk<};wk8r9dpE(xk81ePOhQWWW;UrSL}_rUndUX4e;$Jrn~hVMDza>j>0c?zc<}A zJ?Qf-e2MV%6u3NQ?>M@`Y)o6Jq<!4k9asBp>ktT@MpUD+R%HYu?Z;hxKlB+t&!e+# z%e#=6GU{TzVJz*I*aRN735q>+-3SV%+4Xt7HUs<tmuoz7!Tcp4Ve?@B31E|oKYP@i zi(H-XRAlpDpte~v6wQKms|V*6ebfulVMz>NAy8v)<})7|VV!$2mCgA<eVF?$mbPSO zX0V{^yFlOAN7@%r8C{e_FUaNSoM@=W3BwAgcPLj09Mcz2QV6z(etm<Yu>R5h@9~8F z!-ERhn&K7dIWSG}5fzn>psydjBq#;|aYMEbS5wR-@_5yu=$nB2UpCI=2wVyoq*@>5 zs8qa~USf#p7BemxqxnXb_UTw%@e%1mTq@!4PH1HLR%>|BmuEp#!@j)=uI4!s5nZqG zE|@Wqi6M3eyo<nbuk{^fIwJO9+50LRicY#^LU-m7MAtrJmLm$W2Dv7JeBm2udhyUa zV|{3O8#(_yzL7ljqM~GoX;_QCepG8QIio;Pk-k2CvT(XCnLe+T`c_7EX&X5z;to7a zJz1Yu-~&R#C5f-&10PghLp_-rT2z#msHd+7EmYTAZI~=}ljtesDO{5Io@q+V^IJw< z13Sr#VGoTnxEAUU#c!JI1)b<+n%_FpqS;jXqq_0@g614saYDs|+pj&9kBA;w2Asw# zHnZjV;;qzb$vTX5=ugwd2oK4S`h1Zq=Tv<f4BXLLr9I!05~;?s$eU+u)(2o8&(X~V z;FF}aQ<;RhNVeCmzDPPxZ=~{trh4pcnjINz5<Qv7xBSA4#_nQaB`A`YPi3e3OaCI> zQroVNoKWqXcRB|b8zup*1;H77PAYbX<EC`P%BvtXplQ^rt@qo@mvqe#NxDP?qGH9> zP7{$e57exsidv>30}`;GHg}q#*?3njk1QjUkw;Q)bofB8w7LRKFk1_VYbDbJ5Sx1> zLh4{)nEAu+2!QmIhfNn~!{7ti3I2fiGNeUCMSaC>aajuOx;L8|h>mb(y7oB&>ls?l z=X<B7ZPUu5EqkCEx&D!SG!<ftWpILl#8%9vk@Q9aOnL{w_Mm?%Sm_rXdH}X{&MlG` zqE8&MI4!IF(2n7qMJ`eG2&))2xYgTagcg67-+%<2ta-~H3?cgbd{kB#RJtp8;Kt|H zMLjU#VXyBfsY`Mr4%AHg{mGj}T*P32(h-m)Bv@+S8RuLN0*-T6I=4hftl>l;y7Y30 zDmuS#(^{D02j?&9_^|mDw3O9~V}#^fA|B=KzSKrD&}&xxSkKWyQqVz^TG5^xtPVJ! zX@vum5{#)-fsexaOgKmUqQ(J9oJp+p>2$Y9#OIP5Lqv+*g)3D=TuI`zRE(8-5);++ zct|uMQ!p)|Hj_T4hVd%8;wJ|UQ+(y~65ft$i8E8^FMHc|Wh*8y-Kv%}@CUgyE%?NK zv1{q_GM8qt6K30WN^Sfqu~AdBmb@5i%dL2qXD(CM^UI{=sBI5w1rEHrGGqg7zaOaa zNC|x&X0uXD3-*1Y!x?P5FAClW#ESB?tT<q$)5J%Ld-+o1d={a~24>SNA96;)F>Ieu z%%<ipv~-nz#Aqtwe9erTkfhSJBEq0?l7_|r1wUDGe*P3mgKCbn)1kIhSJM<P!a^i` zlbbjBk_-kIr~mlR7-P;Sz&tPLbrYpvNaZ$fU5AefPVV^M53_N4NDU5*&7(0J_cf!I z(&)V6s!E!Kkpb5-ZUYqMYjp#9^>4XHRl%W+dL>#d+=GLGe=SD4AeFt;Si66ma*T~c zsY&DoxazlF?~c#r+NJDh*MzU{m+)C*^zByWq3#EPh&TqjsR#Z&D=QWBT5*u{U;KY2 z@l}&ZE>uQR9w{$sQ^Js<KZ|ucP0CxZnE#`eXs^L6dZ^A@cX|T0GQ92Oyhc|M!Lp5y zh|266_-b&5fM*5nNG`=7w`n?^_HmjLLuhIz&JQfbVF_c;_UIzuYne)r^2l2Lrj+fc z06)j}-b`%9PFu3MDK@6(K4az+UjxaElPR-ka;IX)j9pk%Kdz@HxFG7jXW5974&hHN z*4aB7q!Y>}JyE9BV~L|$K(iglYJg{L%DydTLXOKhjgger57cfCsBc-isDgIJLo*mH zt-%Ih5r?~qDPSNy9bY1^tsr-9^xe=OKj;|S6J*&Gon^^K)<NRN+<wxMjtd*z^HnG+ zB1TK*=R6FPFn$qa{~Hq<X)hNKpm%B709vAv>2+kw{S*2$SG(}oNqx)q&NsAXW*Vy2 z2K1VIDp70v%HQsonP8U6&{(AAc9T;i{vpO#baI)dQq(S;OWm?Wv~U;eJeCu+XI^IG z^AZtU6;+dCMb7u?2~aRM#(Ex5%-xZeR)Au(+;boOoUh{2_(#Kjzt6Z>VCtR$L_!n4 zS1suXZlp+u8*;S7at(f*QI%0l%S#u<mzqKf5tbNZMO=;jLkRho&O!rAON5@jR;+N3 z4+y%3)q~cTJ$b+&`u3??eT$^e%Pr38t;Jt6Q__WASLY+9m5O?;m2xV&S-L$3SoJeI zO`nv~*?iXQ&g*<*lAxL(JGu!&q?gUe%E4Q9hTZPh=f2G=uXNEP(&E}f;M1=d-zIa3 z!SVQhiMJHzY-(DLQK=&*xQqD0KejGmtgllE4kGe@EvA0GX8UGg`j1)N+u5Im?Pqaf z&nt=6F`*;?47DKNv?4rU)QmRRKzJpg@LZm!KJl<RTUWr#m;FAFV&-?mL=eddTe7s; zmR{mk=4yoVlKHg58!xTRwJWrm^EjtXil@*M<H;GdrSJ7F992dyoa3v1j*rhd_9M7V zaaF4zaeoRK(Nh1CX2&zNW8snLt=b(}AZgnOL^`lXgZKeexf3ibVUjt5dU8$@CoF2- zb{_Iswdu48yNgOzOGQz#QPSg*U*{i4-RSH3j<`tt*g_X#vNreZqN41VGn9dB%P(xu z==hIQE@itxf)|V%J4oi|x?7;nXUkv^8MoUo3OhG^p)<Enr#U5QXrGlyRX;Q;SBjYx zRc*GIu|*&}Yy+cIy?mvc$M%9k`P9G9Y7F`4I($Bnx6|4ZPP?J_LIB#ekF&x3>+xXg z+yExMgDf8Sr0ebk))q0B=SZB>QLJGrQ@*c<e5<`+C4QM`$G~A-WE6crF<8Hcku2m+ z;O(necA61$cpyUX*asj%;$!(ZY}w2)+D*X5zTUIX5oD%p63|ltuxhPDiJ`~iCku+T zbq^Ids(vcx4}i+w6br~$^c&U9t>M42=gw^v1dX|^W2SsJoo!>7{2%%uRb6od3l-C) z)5%}8P%5h`bT15gHVRgy)hrD^t?dP^D#g{fnI8qj^)+dIBRs+ajJ1jAifP$+*Mc^F z?HU5++deNwyI^GXbw+aV5ycGi>#)GY_vkMuv#*=c!u7I=;Y`*F%nwv3VJ@Xb>GH|^ zl%3E*vl0*%&HsSrtlUA8c4VRxC}J-XH3ftgni@Gl@E(y<K$wT1V2rg5gY~>l?;Ris z3~B|F6sY91O%M|$vb@vDb5r-DU6mRn{+1}eJL^^nh!~QKbE`MzLSdn}K^b59N^enD z{*XsLh^3u)?!dtTv{HDyasSH7$~53Nt)D0&6;*F6+5<@S$JmgYU~EW@{OsOP)h)e^ z#io6@m(|vIXI%)yK{Al6G0yshkl4G@L>OJ`y|gcDBu(cnVUVpfv*@~~_4Z3&Rxpz; zZGtz<3rsPLoe+z(pS-TqZWRq=xvl#V{r6$+eS!qSm~$~#K)f|S^Hf-;AZD7aBlJ5< z_s?#(m%i)Uvu6L8v8~zr+zGek`0z<g1Q&*#DEi<<N6$}ON(vs@LUIvxc9v9QOhiAc z-$*GwdMY1_w)8nVaONdoy7M$RTvT~X8reEc?jN(AMjy!&cxgq1WX#0e8Z=OAPc){w z1=-lZby%wx85r9=grX?OU0ltZ9m8f1lSu4o&7HJbq0PcA%B(FS6JWoSAP2*2xQ)2n z)HCNOn&`|Shq5Z#*3tKG?nGE|mwhYlV)pgLX27!mTeEWRfBHITW%9wKdWrh{Xp%yJ zU3=~p63lbwuvjIV+hFkd2Hc-9&Cp>P9HRZeE6`Piq#<QccTu^R6RK7vBq`Y*HF!hJ z=rtUsg`%)@cNi(3Mw6}U39xnECMT`4VZiy7Uxx9FWQ9^$ym4q>%D`7nfRXnoIIp0g z&&eU1F%fV8y?}O5+-|WrY{L=cT|0_f?FBi=6YC{cVn9t#)|rPM0@Hw^0In&=_03jj zQr_qCSWSd6FLUY>=Ua~?ZGp;g-Ajeq+k*b%`lV{6?(Nf$iL{t8{#25@c1eIy6=TO< zh+W3bj^OtCO)8KtnY^1dJ?#x8s5=`_Ms)GDd`@qJye}xbZ2q9OH}?8B`uF@lhMs44 zV_K$^>xJ^klt*-RGZq#@KdH%(-ORxGTGwyDLtL-mz5Aa9B-n@AsG)jEVG#8BNt3#I zD7iOdgp)qR{(GweQ%yd`p~K2~@5}1LlA%}bx>Y|nwFxN|IG-GnZMlh<yL!1v-0;4= zZq&#ZpuPAkTHLZQ-tXr$1oAAlVu66<7VkF4GVj;E42V@M5yzyL?!#OvOuyMdcfXa3 z@R!w$s^?xEJVw@tPamr3%gvLWdXn3uCDAveU1`$D$jg65O!>x(F?W}wyX_~T)g-n2 zb-wOpxv7tr=t|p@fjFK)3NQ-66RAiy(0;;+sIO$52BGhFsWcv}I;eP{MJr7mj1Wu9 zU5FWM22)rc(X$2C&!Wd@iI3ctS%vE0m!i%^&Y5fxT&etTGbu-)t)5cOGy45_@qyAk z8|s&BC@c>@P$1)CRoJBRRWfiKf|r;Ah2Wm_sh7>XV$~UJs`*k69s$#Zvv@7P$xN>= z!AJ3O<|1o58ZBEYF)(irL+ohD;ngG6+E)_Gt(s3k1D=VApH{vqp?8v)@3ZtN!rYhp z-`ML{M#X83);c0$w6vII(=Dw??;*Hkoo$^m1lWIVp1h-SW3ua-zmg7+2&B+Nh2Q!o z^Eo#43ij)h-q5;j8U3ykE`9Je>$3qR5RU0#M_s~KHav(!LB<802s*^XN3`M(et^RQ zh!G~w=I7;w?f@B`S9@k6ci_-m?)gTajbfLR(|RUJ)+|1_P|uXhXm96ani}c_KPdp| zBk$RGSZgpYxH0<eL!*SgyM$G;zHN!QNQ@1`DaNp=l5Umaq&A4$ei(R*K;~)4sx7yt z$}?L!VOiU2X7?H8np0Fq;QdCsu3)#9ZZ6y@$Pw<8uzeF=M?rDHPlo11R2teu>#qcx zh2~J*zU4B~=g2X?Iv-02C~;3h+%asrZIWzdFiRxbh|zZ^Hkqm;ESqrr9n9AEJEK!6 zm`@Gz-9qL>4r+JcMVt8r=d#XhRN$ubd2AX;`H09x#cfN<SJInwPRj*^O6#iMCXPN0 zqySuW$7b==jHZVR+pPy+tENX6iHQgHo1!MWg)RG)fKba*P*zw>cTl)d<Q%jyD`m&A zzj4C$vR{lnwn8}b>c|yvSSk>(o4zUOCy(j+elC6O2L6*i0IB|yC}RvTQ4JIXvk8Tc zoD!p39ZE3oKDCEg8cwf!wz;2-;bJvhG)yd?>KZ&1tP$6-MWj&h=7CoVdnLx#tGe65 zX1cd#Sg$ZU=#N(63|0u2-%~?vPt-(G@aiQ`ZVIDi3~KEOzG50Cj^UX)qaVK2G)6mP z@SJwTVSV${c%IFBBE&!6D)+vR07_U1cP~uF87bG$kpfOddo+1PD@+)Pg8NCC!a5(` zwrXl4G^}c*tet1cangv%s)OzwHwiHy3LooL?>-7GT&F3wb#`KFZI8Jdpgup|tBpuu z2`#ch4zT2_E+4-&`1008`{(H(ctK5DJVVZPM>XJ@u@fW_XnOG{#uD3Eih)G0j9>;B zJ7tYtR}>*gp9epZQ<V~qa3E6CBC(+3?Zl>uBj13zQ|*wN5>eHaW3R!HHfgu>xI4;W zQ=6zGC7G&frH;D_Eo#fbVczIqqWD7zX)bcE*IN@yGdP@yd+`V9MoSp0J@C+WsZ>zk zfVT2Wuo1mpk|U{#arn)TH%R)RfOHxioqWAfp$*4y69ZQL2y|nPqYQuL7gX3vNf|>< zQu2*M_=Jg%UH&8d%=<r5JRvR$GRJ|l4%-?+#}Bp|>W{C-KiORqn}V`k=Zgo<7aG^= zr&7=cgID-P<IWRV{>bhw?Olz;NR;vJp-a=OCb&ywqN8q-`40Nkv1^fjOn3R*>yP%n iC)MZ|5xx*1%LU7X$VV(J&F>a=)bhz_3Gft(kNzKy<^72O literal 0 HcmV?d00001 diff --git a/tests/abstract_cases/asciio/relation-edges-with-artificial-loop.asciio b/tests/abstract_cases/asciio/relation-edges-with-artificial-loop.asciio new file mode 100644 index 0000000000000000000000000000000000000000..bac81e9063d157181070286503f61836bb4f291a GIT binary patch literal 25927 zcmce+WmsIzvM@SGa0nWl-~@Mff`{OPdvJG$;1(oUVDO+}aF<~S!QCAOf;%L^f`vQe z-Fu&N?)iRvKkjpD)>KzlSFf(sQr%tMKLCgV0x|}+0^HL2j9U6EItc0j(t*dA_lvIp ztJjZ@UmyPgDS%nVBeE3WCEcDc0D$y81a%zYCjhD783i&bE)p0Z5;}BQ3-}A_Pnb0B z$1hs|QsMP#7~pwO5P(7ib$+56b@GB3IvsYmdejV;Q8%f)iyjpAcU)YdSEs}_Af#N@ z9%Xg3^Hir0ZBg>JM5P{&tpwTwoW6Xj?nBZH0VqsOuAnN{VAnuOE>fin*32hSCy}P} zeqIlyDOvJlw5+XhEGf4&7&cHJ$vEXZp(Qo&3f4H-aI7>&R+ddpZ&?pvC?Yqojgd;6 z^k0@)H+iCXDNlZu&PFIQ^**c451<C@&qpm%g=;_aD&)As=7T(EsFw!H0aWW_LdW8& zh@0(Q6e^Y-wWP?H4eX_$Tr<+hwz%jq$TM&G)U@963#8I8MM~O7*iw~77yl@o44EC3 zCj3~Mr5&6V@l70oT$%)chy_6IwTqa+*1FQ9*3`bp;+rHR<C{bYd8#vby{gR?A}YPY zy@<+0z$QZoBeOI|kq<|<<>N!7mjxjs_cE2o<u_{Cmfu>5l{fmAeHKq!;<5K*`k2Qe zu99WD9HOlrfref{Ace+f%SWF#MF!$=dP|0q<xhuK_E|76`H$zFMBqKE@_ggSserku z-D`cI<Y@W%*S4q;wdt<zIjz@hLp-psFW){PkFYa8(qS_yPnXS?5(zM7T4@@Ye;`rT z(*&Apr6fS9IP?5G^xp{j&+P(%!qBn!9BPD6n&|6DqO@no-YH=YzFl%=QYVGcl1*-G znOPXg550sm^J^Phs}lGlA<ApcGKIaHBMIF_Xeai-Z!}U1)HF?mA?jQ##k4Cl+ad~$ zeo3y^NhO!<og5P%CVOVVY2ad@@-i^}sOwRo`umjfIMtONgSItG1-gd5d8F=a**r6S z0(0tW4|u5Ud?U%**~72ZoUi@b{QIe(V~?uZ=uLK8Gtb?sq-c??Oi(q6?_9P0MC!5C zZx&?OcR&Az-@*?@B4Edv+1-16tEsAVo4eIT4==~z^wRoII`*zTWZST-+R~Hch#cX@ z9;R=`#$-H>rNCRF@f?tsfbE99uH1W2stp#|`-D=;T%*K2=veRUMm%cl)KXO1nRCxC zo%H(J?WI&!ii|%WNH7A3B8iAJ{fFcjE!8&ht?2df!~ykqNNq!p3krhw{8(&?OHp`b zHXckgqS=D>wM)@;1_5*Xu&JJ@)Y)yb>e}c8TE#c&GE;W6uYc6s1~@jaST$EQIZl{G z(5)`x3o;9{$=7e6+Sm?z+k%g)_<raRI#-f^Agm{4s~;cx_{dvyLn`#Xt2|k2Meva2 zFs`3ldhJ*APrmB>uq4toCc7H_RMgk<mOihhxkeaIQsfDg`3hU?GWIk{G~a8ESk*aJ zumjUdlw6<$_~1T5aQVZ}yUj|;9m~;*hbonMH=*AAuQy+Rp@krF$pQd)FHb;!vhP5k zR4*DHf|na$wN2CW8ilhAiuVXwH?SJhF4Nyi<;;VMVl~MxJiHcEGkz7)sKQ)y{Rga? zjiq%C_e}G4jZfqKykYM-L<_&<+762^n0r&gbe-I$p}b@~&6bt5sj;g<EqvTS*V6QD zffJ{$B^d3G)uqNhbqH7Kr`<lQRj~KqOp&4wI8RlpLy!SaMq)W<+e^EHKJl`%JnoO0 zYz8dqj)0dEQ%$>SSQt}gA(}#QB>T(i?A%FJZhU({fR)xt>=cdaYei-*fh{%xYuFK! zv25_8Uc%{x>VjroszWj!mi8wep3_5^dOY#!XitWB^|g<yJSd^D5@z4xCp`SdL{+ul zxO_wdRCuVO5B6z3-6^!|5(YuYX0le*a~zk;$EpJS@S+6;-&vPTCFQPEZx?ZviR#Ys zZCCb@d!SJ+k1cQ{%sJ_$`gzlNX_i!9omP)qlX&^KD?^tXY#Yxh)rSp6r*p@u=IU20 ztaf!~TsY=;`Zr4|wbJ7j-vXVTPb&rTrsbRDqqAPD&6j0!5i&hv(l-P_Aduo?ee0Ac zqPX&OH}b+|y+1>%rXB(gnv?@9I9Rc6-gAm(J@zb`%kwJ(TI6MdJU@0*Rd!jRi(tFs z*ij$e2F=s!ZeGIqx}xUBWsXUZcpKvhWvdq7uI-6sH!6{q=l5_3C|kK6V}@bIP!<NW zYdo?eYVm8RF_E|0{$4?l$W^5>41;z#SI{0<Q~3FpTUuiEiDDYAd`NH8;W@2z)USdJ z?#I?1k=9Y__^%rje6ZrDd6)Ec@cX)a>v75MO3;OON3~^@mQyiv-{RfIOBvH3LH=Do z=*>dOx_y;Trj<L(0xwoI<_Duuo;D%}P?RofT+?U|YqCyTrj@Ia!LE7wV8v5fHRM1C zx=U*GH#KbYDU;k&>hNZj2iNd6u-cl`i1-COlRJGTGqbujS(|n?%Zs?OORsk|P`&_^ zIoMRha)A4<hPp2Wo|Tg}<jX*Q$g6nMv~d-Q>Ji~wPkV<##%FXOzpp~H9wsfQ%HyC) ze$}@R(Gt-(K6QRe%QwfXGq~HQ%!L(v(}t;f&EB&wV5cX-%G~&ea-&DBn3#+82f*S~ z2y{`2h{Q>~<c;%BeFtxb-Q(5cxzlWU;>la0&n}@6A{*n8E4=eg=BXHzOHuoCen#UU z5umXnOB*q<SB<tYJ2p6jK%XG>HCb3EP&n?)+-PqzIC_#O14kls>LQ5!Zs%>E(WJZ` zkFKxqxEv`Z_a*5eP2%_Ik5|l@r<^DpeVbp()6<Q!YI5c<DI01%n%oe7an{n)3G0V5 zo!p`>Dn?;}Bg#dlcE343JT@8-$cnYuqIIguKVL;T!!2s1B4pr}$D)<TOEFz!3Ekl* zE<682(R!nyd-%xo{^9ZE&i&r|i%~|epO1StkEU~<YAzo2YnL!*ZcU9DY^>-wVoqj? z?j@ryj{0Eh7qYUiA<@tOOyS|4p1#zUl@+b8-uI2^+<0RVPBpHA7`-r%b%ldk`HAk! zhw!&tID~FdnVGRrB7H(6N3LePBpZ}9_mBHX)~yKNkX~deARD2{;fGmL;N1&63fubH zfA6rat*$+zu#iWPyh>@m;iYIGZw1Fc&USV(dK{aSSF%=$4p=I7F|{ineapPOHfE7Z z@>$yZaohRQv}fz*z=V4M8fKpJhL=sA-`>+x5WeP}!VRRy*Wv<`JQEWDpkOabu%+v1 zVSJ0iufSr~W2r4A3dbVS1DZ!oTal;6`>$G9eckNa+1ouA8_4tvAAQb634`@SGa%_9 zBY=guoRep@W|3YYwagX<X-J7+nmo^-uu0n)k?&N-sKnRX*ob*KSe?%LAl`nZDi$mZ zjmfHv^?IK?pk{L7nHaf$%`rCpQX>-woj%jFvIr4P*cwGg`z^}#Tns&K-l_!lei$jC zzC3;nrongu31hT|>?+5R#|$E6??~ldJ}L<J-S^tpXNd!B((_1rK&5B6&pTp6Hk=!T zx8ys<ViI*l-4liH%u|Lr3nTS?>?6X7bcIMh)IP6ZOCXKx)mqs({&}sg7<;f}D%x>& z-jYXkyD;fM?(cO1JcwXFXu7VUN~9cbVFy+*<$iMiQ#A}zZ|i@rH4eTa7jWDe$>1<* z^9{<MIS(?muA6=!_x^T!eKl+h%}u|uW<BORLpGoN^?Zq)A=Q-ebKyJY?M5gx+4L$w zyk)p9ioShC(xX8Xj|b4II)Sl+K3@%DEsmJPWf@AFMk6bXC(ZOV#V^LFqSH&uROCcK z8rk@Pfrr{kTbQ6BqRnb{8j=9s)67AvuF`OiQ7lK1?m?ozja}TB2N~12Wl#<mCC+a| z_#MWEfsgGBxWcpIT*u^liUVGTi3sKvGF7`5s_In{#pFc%AX|K=(ftT9`NsUNKYc|p z6rCmUJ<B{4Mt`rwpv~D^(-%to!LCeRx4Haq&gi^YmcF6_=%8URUq{v$6WD&<DUjHW zcASzfRhckvMb{rPr$*bX7jmqEuT8A?YO3uuqN@SFI3emqVH7Hs6dgS>BXe{aIITxZ z@zx6bsP+UZvi<e^pUJv*t`1;F)&9R-DT6=e<cDi|Nx1m53R;dn%l8sBE9a$8dBd}L z&WLFJnty*z<Ld4cmReJO{dr8bm>iluR8PBKIfj_wq$UFbJG-7WL#aY()}|10y}aF| zg#(>+c~GOXm}|q&Yfky!Sna81<mE9Ln#=BNj0K9Q^n?8~?%&5W3$gkv(jdj?X$Dro zCo3(=Efk;P7l{(op2r6`IT*TRWMupdf=^Zu=v0w>0vVs}e*(F`pfJ#t`&m0hDQ8c3 zdA_Oc_7CgE&-u|n;x_2PDb>Y$VXgB@P*9WUx!3*CP{IA53Gg=Q^9H_?KMjwnXJ3~j z8-Cn#Hls84Jb3%RtP{w2z8C3g)WVsgYI4)SEctF=10g>2H%QJ~<2@zJhbOl3QX9y_ z6$F%$dTF5UFNb*!bV@&^*mr^5Gzt2RrC=#Ke*;E@+y5!r!Hb}|f2REEGa#-KMe|!k zOg*(SGHtCHrYytr^V>+Z-!D5}E(?f;e*SzL-D<}A(O<gOd==+;W0dq~P4&q+Z0wh# zg`sttC>vT%Ef}A$O^(BwangTbR+-&Et0@8>4xy55>n)vM)9!;j9<sl>99?eYpFGT% zfB9p|IPKX`pAYfb`5m+-2!Z_3c(Ra>iZL-hK7oa_G_Z89o?eCAZjcq^v%h@L=qI{Z zitx7_ID{gDilVCag9T*S?cM?nfl-sZ{Qy^~wo<%r#E5vZ1t3)9;3`Sd!149Ooelm- zuV~*5d@l5R`s%MJ?>+?zC5;t1?DOVpUXGa?o3Dv%$|H!^>zZH<fE&!#mo9FDB=R21 zW)3cTM(lk*5{tnWCdAJFK3B+8TaC$a<<u#lNcAq}jamTV|BYchviNm=?w9v&RP5U> zY9`L<>0Wzjfx7Lze!GGORl#t0%WT6z970#q_Zv^|3FeVc34v)udh~TYJ-8B)p5>+^ zD>ar|13$-(W#=s4RBYr&!L$-Fdvpe=&Z#L<lu~X9&7|SurvDQC<MK5}YmBFDir#k_ zTlT&QG~820V;gbDc*Chh(O>T>!@r;o7Gw4z9n+c#8P?c{+Ae_V@Y?sBrndVyH1uBq zhmt=iEsEZrKF@n0#EThIR?{=Leq4}Dke?vtu;r8R2UwFi>9AGWWWp8MaoIVy@zuw$ zY6R$@v}cv`bq)4n2Q$ZJ`y3378p{@Te=>Z=>!ybfCMFN>N^S*C!uP&5Yn~h**F;%# zqU3&$7s->pV<@h7*q-H6u@d`~(K1K$;8s?Dqr=skVQ6#WZDWCJ#9-guCgW96s(e7h zhm28gB_V|L%U7B{cs-A%!QmpI@Rdp^zHIiJFl5X%`Rncp7F<gT?vEtA?Ba!ySG!<= zD#HQd5FL%U_J3@f!X_nM-Gzeud86#*xVBPpFPIfi7=5>aOm`<QT-dxieeTJ!B|Kux zLyhTg@s+=r%5=t~br`<oZu9(<rNvf`ht6_P@{e7!!Tmia;QB1>l$AdQr}rC!v{zUH z*()=Z`2;n7g!}!^q36Kdtn)o@s^BN^#k}!qTC^+JIEh%i%}Oo56kn@_5pcZ-wCl%6 z<x!$<;NavjW3=QoNdQvi;Z8}Y+`9<HHuNGfR$!0}X&`QQRKYejjxPVpWE~&JJV8QM z$$|XV0H);2XZ!2_-C{|FOx85QLO+|MSnQW@9PkZ4th6|P5~TRz%1)N|lH{zZ$_e`L z@NhlfROMhWfA~-EmwrMIruw2X<@Bk-;~wznyS@ERs;s4?uxr7(qASj!j3tI!kw1O= zwvwDlkgErguY%F*mk*EccP?)2FC@+8T_gipV`GC&Q-!ZD<`319NFRhHH6^)v%^t%Q zCN2#1`X=(ico?GB<uHN;?{zODF4WZVWpD1UE;E0da?^e-qFl{&`5EN-`+ZAnw9AL2 z4?bfef4))e<I<)c)f~qKx0mgb<X3!MUZ5)E<l}slNa@Tkq#~GuaLy&I+ifNVKJUb= z4B$w=pky*4Lq|Gy-Bb+;LM&_RC937YxjV=gYUL81j|vSB_PEm9#G<h7ABZczic7=B z_(;Akb;hw>cuZ<`i&_~N!f$|)C;ih8@4jG_Hi8TVJA8^KQ#F(lUyd=kU6c5M`T0dT z4Uqn<I29w}r*-#aeX=XCO<SGyZE{3`9xi>m<Q@KoHnl*DB3gV*`5Phx7I{|Xw7(7% z<P;_P6f`<|+A22S7j#-gxk7_CvgvHJ%i|=iv6|i6y`ap3zk*iUql3sjTz_c*K<Z!N z|101^1Rw)K0C4ngzypEcB;vA>5*?!=2wxF|VmUZE3_$q%2I?m%geUYL==w|XSXn6o zLMvp4vMi%4<3GmoX#ZP)20(+=O(F$=D}n|<2>CbrpHSvMQCxVL004@$(dg3)lp<38 zrzAWk^>0~3Wa@tuh6n$GC(aY#hCFE(g`4>|W<Lq|hrpErAY_4Pl`>;NQV37wzybcn zL!p1EzbFJZ6cK=;#0}Sj-xdI{OjY6ufhW))0iLSo2`q;)t(8LllK%@y{pBDb0YU*} zjR0yuDkJ>M5EvCL2k|YVR6^j2A+%XrTZ4L#N!6`>2MDtR4Mu0NqgV<5=7Xz3X#f}< z4FM4F(*EWAe}Iq`|NWAu_CkC@&%AiG^&|S8_)iUi4*#RUX&y5;f;Ue$00pinwb{Qb z6V3&|tK%=wJT(srymApo>(w#2r2zVIa6UqX)PE%4QA7aQ-@b<omqW%MZFuzz9%bt- z0EC9XQyJ~#r2xE7h5iT7d3mV+6S22YZy>UM1z=SA+Z6x+?BW>pg6QLGM0pvdm+yC9 zwb9T~D<(^}0>3(cyZfCR{iPj4F#5Z`!X0I_a9Jfn^zHf|J}=$tPPlg}|Jdc0QZ!%m zvRx9DLda8Bpl-8vA6!T0efDU2t^?A)e?juTZ(NP(e*FxI8Ch~pJ(=leF>Ube%kzhq zNd?ovLQsbb@?a5@cIU~-{%kwJ7&=<kn4%K2#7~L`mTqRMrl)iJ${A%GA`ZqP-1RFQ z1r=UKFT9~ZkWV}J&WhJ<Q9DbLt{uqgs1|~ATG8BkU;;`YHRRFJQ|djXFw7zWw`wW> zBKL0o?s%H)YtZCeJ_!EOCdjt3yjThOSiVB1nn1G#_7hg!pV9Hg$4CDiCMAWbLtpLe zt5?LUMuA5VqbWrlrua&k+7LCKtm)e<EQDhKgCrgUoE7!cA-zd~#@67+xDXwO^OlE} zZgaCY&%wz_x|%bSY@{&uYx5E$XI><0r?s!4%TozW%V#SYqW%YCQ=DN%&n(oN*xZGl z1v$>jt}hJVS_B&^yJj23#8?#9C1-%nJzTtfe0;oHJWqR_Pw%gwusO(?!zvzF!KQsJ zw@T;CMp(&fd;8dGIj4y<i=((Nt1Q6z^tDu$D~`2uf8%ZoArrX(*uz0@WiVd2t(^|; z1nLs<qd=M(cXLM-R!8kQ6Rk?K`n?XmkIG5UEOItSg6OyJOtn{4;A8JBCd|ocEkj2t z8bDE$#;Ykc-j&Ool!{%4QJVAqP7^*YYt<v{gCM{3#+8w#Hn-Mt2f`hTX7Nj1{k${i ziJp^qae{pUmf2+URnwcXOsp}hQ<Hq}oU?%C@jduADJ|BI`SjRtav6}OvRa65I5B(# z$~mo%o6h2h;PH+N<I`U0`ZRC%vSt$C2mX-n>P!rz$Qk)X=0B2HgC6Uq^VfgG7$;Fx zofz=3f#<wIi|XfuwF$+vT~41agy^mKM^*ERFb5^Z1~CA|`H!*k3GS&Q6M+wT2<KsR zhZDiY^6TPLyzANY^K>oPn{u%7hF=aUxK2PpRW#8yP}48rZ<iG4?T#{d#vIMxG&*TZ z4?0;gJvRhhw!D1it79SU2aQKNw9RJ%UPptY9XUi6Zv8~%Aj6hc>^Vzgww@$Tpv>SP zX`J_qmKI`Gp6uEqa;MhC#ui>tT1PV}fEe8ejKWL}{FvwLE{;bDu><61mg^|aLMPSM zO>9ES33d>7pv?>uWGi@RML;Ci5jCK}(Q$%xoolX)YtwMp9WP)dt1D8q>pN2FT3_bU zo2usPx~4fxkU{B+A}~aV&wc3I^~|_B-nT(q;cuK^dy^{g>JP{Xu=@O8-luPFPcAFk zCl}O?+mp-cf4Q^x7lK$8I{pt1Eu+6KtN-cF`Y*p#O+M|fAASM-Nd?Q0cCKu-7EV@i zBn_zp{em$o%@rS3cMX!FLf4+d;|9=zd?|DtvA~MzI$(qN19^C^nuzrUOjwwx4X552 z0wsqX#FW$di|mfP5VcM@rF%^>B^rvx-T_;7PqSc{{Qz2UeL-AjHC^J=e0t`MFDFl~ z&8)eyI{9NdfWRxux;G&Ux+sw5)q%Vx6Gb(v{XHPcE8D=bJ31har4n{&G@+80u4{-< zE!PKgm?M6pEo|JNAy_9W!2gB0iR%`^QF>JJsYF4h_F?Y;au#VON}Os=+BkITJ5PS< zlodOSQ)L|~s@<%k<LovXC!TCvND?}ZgH17BEYrkG?(q3ToiCWh%1UR<`$}7Rz%!@| za9nk{E!D9&A@y-w_~%4+2494PPhrQJjj`a7LIBBG^`BT|giN|r+y-yj#R<}0kda6- zgZ|rJvS=~ca)}#zmXWbyvcCo1_cd&v$3$JwFHRP|JT@zc`xurN7B6|K@n`$%u9k+o z4vE%zd;Jcn<F_f*xcASJf4IAjWTgX#hCYq)6{VVWmtH2>%Pu`!4#kt`WV)7RD-GRe zH=Fapq6~;0IQKFb-U>OFi~PYBz3w?QIdoAPGgP8azs)aiu))s%5S3vEwB8*Pn^S(k z$UXg~LiannR;9QmuKi`MUZ#FSJ(Q@s#VY(&Pg~)w<c%}zmtSz-53$@_6Z`Zrwe4Zo zQ`CBpZ~d{=UFz+)UB<Cr<0Bq0o-yxIgr$8UJG}Tc4iSkG$*U3;ZL284VfROCQlLi9 zMZLB14^Lf>V{e<(jKll)6v6#XzaQ>Ss*bKD-OuQ*pK}s00oR(Zy6LU0*}%hf!U2YD zS)PN@m{_ItMSMWZQ=`w?S57;pL&C`*MUkb<MJVH26#`aHYeDT|?c<|x5{?a+@wb$@ z%GLOuXYa<eNtXD&1U|nvMVIg8;>PNd<LJgkX;|SyTa1a*asBm4NYx_x#m=VXA!MzU zh<_C=8PZ-~!E*i4M>ObkPZfl_Wm4YWc_BAG;LmZlYTT0|hc*5KN%^X|LUf4<MZs6l z($eD-?Yj`G_@Mk1fGg=oRMfI|mF!GLU5^>c!?d{ET-SHcRz)jXc?=qP-FdNylKA~E zmvBWl#&MAR_a(O!gnn@7q8+;}3~RH>bH-V(aExTt9+P<qx)`}ti_Ay>a!Ix=vEz8F zZICM=p#m#yg)2JP2CW26kStmYWLXdD*L4h*V6c+gAjk9EYwhgJwYRi4Wa2trRic7r zAJ3A6Rlln@FH_`eDzW#lfq3a#uoyov=#ZG_+*Nr^UpY;P?3@Sf1v#g-Ij^3zjV#S} zg&5ibkef#wX`Grv$gi&VOuQC&jn1oPt%_281T*Zq3xV>O8hD7*vaYTqWzLHp{H}-E za=>;r;jC7#S2(WT<0l5I#5~@V>@5M+TOcAm9?N5(k%TLb(6)lG7Zb}FE2EE(jexD? zm{!&DQ0^2&xIqfXgGLbI#cj1q0gk}7HdfL1i)IaR1rD{d6xyy3xOwTaYXAx47qdSU zprS5$94i#N%RgJg_=z|!J$_~ZwfU~&<Fe)MypuEUcQJMjx%<PaJYA>bWTgkn_3;VN z)#ea5tl?)}c56Jtj9rZztegt~X0+NneNaBV=I`*XOK)NrC19yxgm)j!uAC!a)GN)p zHa~8;%7mseJDpy0X6yE`$k<++or&Yu_m7gNl85i#|NLCG-#ls}rCn8J+f?=X&8dG* z<A@<;%!|VMlo2D)(btLdeZ<>*WvCyt^Xif`TvR_Jr~*7Sjw7$&e#Yy+CgRpOxA`S^ zJalL@JYXTWef9gU57bZ8%+<ZW1(FTj|FW4Vixwj3C`G&)@V*gM-0pse!wjT*mP$Ft zOjye#GG`t~Sm|t{qTxX2WVZ~gBT+tIn=T5bNW7(@vRv&jc<m~SX6IXq@{yuvBM?;R z@+tewY;^3ac%IMs=Z!Y&groID-Q3H<z&ozy<rjqPY~{91EXu>_=?qrNDfK0x+(R+j zRwor9=FoSFZD$KZx#U58r9p$4d-*vNS-IEM4F_4u!zuOuz})Z?j7Z_{SQP+dWfhoM z5-m7LhpV(U&QchCnr1rZ!LM$QcJCJSFE70l007MNPa%K^4jlskAhP_+w0+2IIg=YN zW2zEJvsAlWbN)!9cn)B%JzB2CqMbFZH^VmimhCNDKOJ7o5Qpq_P0;z8i(sB0ZNuIW zs4iU}c9Lqn&rGa>FwvUmt3d6M6X~6bMGpq219zer^c;27r~>c2kIzB+vI1|s^*Q*7 z4O*5LItlr7a*C(F6^9`BGQTGSWyNGQDDfrO0_+Xui{N^(H3ATC&L$)Rw?HS5ttfhh z;LcL3^NifntMJcDX)E~RY`uXRczC${?9_l~Dd#0X9cz2pWMw&Db1czxO)wZ-T9cvL z@q0O7Q-ZX4_WFcq<;3gv{vj1M1z4r-63~?fPETini`{3y3;RC-GR@Pb;vYcx7x}dN z;Aj6g_y4zWZZZ{x(Snw)jIs02iKJQ)Pn;lc-fhgu@tpMn)w=QZ==^T`O_krkOk60M z3@)Zl3_vQP$sRWZ8B-?&@W1}SCG#Nw<WZGg*|X})OSK?^%p;cPowC;Ns-frNKVr|I z9-^yI8%d&@9s`Y=oHk`+>H}9UeO@kWl_3zant#JydcgLsRu1TvNUy?8$5G_SLm{_n zY2RVHl?04NJ)id37T2H=0Dk}mt9LGDWRPhCfYkGYflXCELfjqc%N(TF!M8Yr_*|O- z3iE|QZbwHa>yjY4Gf`8P_~rNdUO+y3>qoIZLm4bNA|EH?ERej9a|^O63=O<owG^qz zxaz!<#hV>RDWjiwO{MOD+?(rx&8%2au{5hf-=baYw6AxS1%5+21Z;>=X^Pq(?Q;rq z`RXn$F4{RY2Z!-esGgVgFF<M(2^Lom7K?+QQ-Hvcab=JFVLAh&eus1M4l`Spa>IhM zr|fAa+i*j03~X6Hs?uJ2Z|%%w4Uc7CPGkkT6go+TC#nHmnG3R_z--XPKJ7R~4QoUT zRg>+lp6yZAWMvf<m+gMzr7XV|nPdkp#vQB&I&KoMo450FuQZ6$(mr~7*o=>D>njr} z-;K_{>HlmhKhF5s>P6daT#{=h+^q#LYM-8jS0ma#E%*Tl1X{FINCNBf&L$_)>prIQ zl1jdA0rA83m#h0_v?mg0zH^aLPoc!{y|!je1cvm2@GVpZxfyK^QCwWlYDI(Nqhs$U z|I`S+_3jDKSQ;!1KO2v`buWGI(_2u)d!<ZEOAX-XxO@=;fDcduOsD2KX{Y8DC$L+0 zg;T3=2%KzGH?;lyB964me(7vr;60~={;xN2oZmC=)z++C+H$LmdEMkL)XKIAcrWl{ zVjTQist!+Kh814HJ%;W7@|hqsk$GIZNC5z-VrwmLFMZkpZADxyeAX|r$Q`0+Db9H( zGJ@|rCG>Zpc&T#FORhlG^FIDnSBgV~>eoxDyeO6vD<`ZUeC>QZ7swYVf*Rj?^k2cp zp=*&nfUoH79WUdB{<^VSH-4QIqP>;R(fFUwL7o`~gF~jC-Ct9$jW&cKpna2iV+T5V zPNrW}oL}C%bjYHh!ngeEI6~|AN+7|WIL!Gf_yBQ$*8u=P;S$giIL<_dnVJNtUj`Vy zv2)_^8kop8HF^jddssU%axj@MO|2L?<5d$6*f;6cm&l}3n)@Y$CF_xl_dJJezR|&8 zstOg>^?z$C(COiRvE>X?(PJZ+^@m|imNN#w4;&Z`#YgWu)e<0kMu9ege$WT6YSA3q zE0xhkwH7JB3j@HjPaz0a1#k?|w*|a3CYyXJFJRBP<2Fc4gXx>4&`tdPdo4kexU-@u zd-@BuXsZ6_6JOvmNLrd+edl{4KhG0kp}j#Yoz*8k4T}SHA93i`iQP%IanAEu{8*GY z#=*qJHleZyRJ7?G7?if#NV7b{_d52*c?uh4zt1)w<u_(@eRTb#?P1U*u#d8!jfu(O z_Eb!ChG=_>3%@h_b6!DvX2TVj$ie<{z0t|G$WVf^CQAj67XiAls-cFjS?;*@={%E8 zwwKa(@G3)vmhXEtj)5b|_WR(zJE0$-8qqe3Ux3yMgZTi18UAv6XC`ZC)+c-8T=`bh zxchsT^HmZ1)59~#Q>yV>cc*}$1Gp064K<uz?zX+JM{;3?o!@sA==|@%B2;^V0dphS z0mkDON2Po|Ig3bZYg0qeW0B*5l(abTXs&FxuGXhn6A~s@3<8K=u7T4WQ&@#-zMPSc zc9mq)%l7w__d~5m;gjwJ-G74F+s~pi#&icSJ&Z+u1;$(<oLgHXfSj6_`S}Yt44#d) z`1AP9LvH$RCMI%Se0G0N5&V$oIJ0TWZ7eHK5p#Wphlhs<KXls@<_VDsmGG$X`E`G@ z$6!)Md2{JAL3I~&y`~IR6}=Jq0f<fA24yA}gv5}{dGGB(kNQN`KQsK2tGX^*TU%qE z|0A>3Qhr$wK@IN*QfO$QwAQrLiU2&8`Ma6*fVNbvl2)IyKJE)8R21j?8;O^K|FA9t zDQ-L&Ema0VnaFV8+DEh}93n-H`#%W85yv}M0D$0VyewbMOCn&RA<1W9r1#FjAbuiW z>_&8E$xFg(!Xv-D^ID?$fAn;ObA15l(~48`t|k1%nU#oe$LOaeXb1xI)X=Kg(7NPi z9eN+Ic6)Jy5uSV`V=e2|e(XJW6&NYqM;C5Emvd$qBBzH843Qn{xCz`7E4Dim?<D)Q zc%3MAxo3E3ow0p1CgBJ#@o7?quNVMd1xK65+`0hpN`S9}f{ZwBML)~?UMDw6G&5Ry ze?EK1_Uc;o=bo6u&Oh??lK{jKDNLrx@T9EkE!J<h#BE!P<p*p1-Va0)8K=|%gq1i_ z5BvANxIz)tlFKaJZ)8ZAsGTRUFm}W?*3=mFS(P=2PJ-Dj#ztXtVW;n~5~_v*oQn@v zvR3=_^jkDC375P1D!~JWx<UueyLQf-*qUwi+$O5D=IpSU;v$AuvA`C`B_CpE@A!Fl z{b5>dZe0xmzp35~SsMa`cAkx%r~|TJkn}St8DylhfVE2nXvNA+aMHjU)32JL66N^W ztSayW%2&>5?xVHS>-JioE9Wmc$H^zxC9w9?ofiJKxY`ECm-;XwHZ1>Wvuc>Lj@KGx zrSa+Rr&H0PByU#lJ8|0iD>nl403X$q__RU>9aw1<u+K2}OB=D4C(Ke@ld|GhZgSvI ziEnX(XzEKkOi;&BN|Wk#Urf(;y6{YE`R{ZTeWN}z`aoga(98jDEy|(R{k@C;jQO;Q zptIDqtpM?*1=O0Lt(AbLPRir6@Mb@sqc^?k(#W_DZ^V|iwr#S+a;aQ$_f+M*iprPf zf1Vzubr6<BvT;6RR@SOE9`UD41uck-TXiJK?t0kh0Z_-q4m&ailEh3i^6Mwcg-6$} z1)rUFUh-i2bxij;*0Zshwb6510bkWNHkPu})}>ZIUFOIcthChOVI}RZOID<YPox-t z6f`0rP$q;v3<U^OFTm(mZGkwoU)l%Qh~K}Y`ZWQUcE8!u8WB8;&O54#Y+kWPqmGo8 z;38=J?HUNrjK9Lhg-#ukA`MVNeg@)Am1MN|gLJOXBoTo71Noy==W)-zc(c-Vl>M62 z^Y9H5V`~RqUi}z=osl-uc*2=C?D%PzJaazU`uqb?k^1f0+NvxL0#GGI{X1Do%uVo+ z->+fQS4koG(nxzXwBja1L6>O+tQ+rk;WhvlKuxA&r%sK+=4Pa~`5D0d?!zgYsU@s` z-G5JX;R9v&Cimt*WVCpYBEwq9ie;D*V+uMD@I=smRHC1jW+@<d{>$<FsGOoyjPHr4 zEIWjB`Dc8J;^Cd}sA;9s;q9H;5b%06r@uyG$9O+ZzWJ)|ZMz%L{ZbVFlhvuiObu?f zGp8lcanz`FxeBElyZ}mt)U&86idH)jpquRGGLW)*AATYkPPJeMuCnswuIj9mW6<xg z4@7ysweX6rtjN-lkrF}*be&xlQow9A?Veyo4v7HsL?d(&%T>-d8XS$_W*{@^R2kTn zTi381$d%CSy~TjQVw;`ToQ|?9<G(=4-x1U?Tbu0Gm6^~8EXF`-ze->plf-8w!aT__ zDr$I{AoZ*rfU1v>1)<o8hPJ1{<_y#9rek><)1>QKIql<S=v@3NHLZopJ7Ae!sq)CU zdJHX{mhLKgdtxx|^m=ZB*X#0f)s1rGbm!;pdA0ABDSha6+&y=Ic4+xJ^;7c!0A%wq zWOFTv{7%hFOonT>qoB^a@epJX{FHqD8hSad3h!y6K6bB=3-Cj{7K~YTub#mv1B6%4 z3UC39i0bH4g3z<d(eaiAseduk5COQ&hE~<>NALs4b!1#=WB{)80EJtXgW>luG-~)0 z%Jl6k(X4b$)<Cd+4XC`AxV`@*Yb*c55X0eb0K!gl)zh#3o6gFF*yX&+ctf>6px_zc z6@nB7KpioJ5KusIK0fDRi1-viG&GkA{U^a8V7dAd;$-;G&k=y>RN*8DJ<&B>glGBt zt<O4a;8~lhT$rBDSKO+F+8uNa;ETfM6~`P0gIOSmiGY_|mwwa2+A@Tbcj+jT@$!m( zd?$M-%zjN4l)S<QjH%OM=>r12gg~Hl`R!OD<e>;uL^)|5bA+x`ReZVE0F>dVWc5t! z(j8`HJ*0N`vYDw5?Tl{+nksX1Uo^-LwdS)b(SLDkjAd{Iq{ya(d{?4XdYyyL3=*TT zqg4WZN>SnxBDX6}F+*9lqtD!XBCl23j`LP#rkx^BDwo<slT=e3)#{2<fZzI8;37_? zDN3P#sXZhoz7&I%x6LaBLM&kTp)bXj9lkPBD19sgjz=uL(kqXgnDZ7=@ut<>7PpfU z9PdxZCy<bLAPHvr^dC3^GfMOW0H@{@B~@N{U8xFf8O*mZ{(5`nKi^c%gaO}WcE|xu znDBtwfCU1WUs>UBTGc^b*4L<kkS_S>$3@x;$`i@6*5a-5&V%gL{Ohm^(b^im=bLzq zBn3A#Qt-DBJ_F!O1EQbVK_F<D0QxWl0LK`7wRo>o#Vw)FY4wtYDw_(i>q+=|-b@}B z(@2qV=3REKO&nZ-{{j{;)lplUNQ$-P5Ra0N2QROPN}6Ssj!iJ@@!~!S+?*i-{LWgS zhet+dRi?o$3uZ`M$nZr9GU`h@53AK8@E<mApOjOLzM#_>TtNIsE?xi+3P9<)J+CCG zoyiu7M%r5@KD)1`>H7qle@3h&)*I$&_t^1UbmfEZEIg^h!tFW6JG(5L{PWmgef{?4 zT=6k4nH}<&3IaDb#rd&F4s!jB#%!J6pJkxkvGU3li?CEKpk_z=Xl1C2p`Q5>KgJQy z2y=R_OsqZ?65u-;01Z}nRC;@CMk|6UMF~J%n+TY>y1m;^3d*{Z@w&!6I`Q*7{qvip zIuEgkS*$4gv(V_!XuP(GL)OF+W%JcFwBTyY*nUBq6I0*g23uN<>7B(?=UAk$miDzr zW2XIDqA(r~m5(w5DN94$e^*5x+pZoxJDl${?vx@g3ftm{XIaO7Ej2&3(xo-Yjw5^E zv}rwKH^dU&9}t@RZTuY=E;;27V?RJ=bU8IIoNu4*TG0Z~qyRXVW3xWH4`;z=T6^Nx zdx9G4<mb>Es9MhT4|rbq`VbEfhtuD3!NKs*B%bR?C3m?Q{-!`=Mm$`3%OjY_S1}W> zldZ2AN!iz(HBgteh1>w&vz;y2-I`^-$ls9*U+Ro3kmX*BT^nUz=NsRR%^sDnC%C(e z2YTn-{!0P?-$lUBZ{a7!|7KwRH$O+c!9VAtNB}hR5P%F^gqS@Aj*+SVB@K6$$lyl& zgV5a<WpG3OCd9<V0<7sWIOZ=cPK@Pr-yTB@xGk336LFAlRqdAA21i!Fx3Of;0Bitq z^N=nq0Ij%{=asWN&z+a1@cywY(zh<*E}OpgoM^5Ur)+ZU@C@=obe^74!3mytG5saw zigIiuzzYC&ZI>AefC%MQ<3m#bGf?UnD|9F}Ha7GdH@D35lKoMZor5xUS!Y!<2+Ph! znL0HN4i4I$fZ*jTTw~yz5<|NF&sRi<QAm0tC=jxWzMpJ1Y;-53xz@WH#4_~da|XQD zIu$EW*oarQZmo{an)TxRwS{Itm|}JXFHyGj(p>ADR{Iz!`0bB&J^fch`hMi~))=e) z31Z6v1Q1_!eN+aRoxF8PjBmX=bx$_^I7bI#{nY14Ayi0`8k@i8#nC}sC4@5C3{VTe zRGl|84zSAe+2Hr>XUN%!9j;;-_OjPh;fPdH^)mUelny?!xb|U>8~Ysa%*tukDZ#~? zfyprB^R4vi47Z<gUz$Ip3Mg8&Kc?vtIo&-ZuRJmc@3T5vvvN<`ykOsuwxj_080drC zd&TsHDbuyMc_ToX9xra0jyp`un~a%frwLZN568gWxtKc{BhrY-Xt)R!Ud~;6!o-TH z2F;sB^EqH_TGh>wSPwmSu(yr11+sz=vK2u#juY9itX(g2N<GElg7`U9^>S_6^#i*U z>Zfa4G<(a@UYX!AMel$Gz1_>r{zI>2ncdqXqsu+YvreI0tmh^i)l~ohmEX9MC=(@f z>#8za8x#FepCD;0Djqqn$1AuaeC4Eb2yt4KnwrOr2q87*3s%nC^R<PUnVulqH7ck* zNsau(+sh(f&&!H4>HsD@byA@b$L1wocKXd{#W_-qsWBu-f4OnDHCd^Q*AVC8)MS(^ zV_f>=v*2@RU`IepwQ22<9lf_{^X15@2y27rcOmrS(d5*r6qkVJLzS%gs`hLIM3uE` zr#d%!`j7qGC9s3!X+<S!(6}cA%7YFHdD1MqdBiElZNAa|%XZFNH;Td_iGwj;)<vCf z^f_5SVZN-cRoq&xig!%7qdZ@>qs-G4)JLgtvB3QDaCy=QgcLXJ;iAz^H>@5O%i69z zWVAh00HB2fJtf4q2CsD{xMZaC-pywmVp1Ko<@%lc4C3L|>I~o{&f=#O{PoGZdNYnY zb&*j8Ux_hVxQoQ89)8D0N$P2`gh#vf$Q^U`WC+a4S>lRISdyg*!C=jF?Nay@?vZos zsEyD(!~h?*$SL%n0N<Y`vqaZtDybyy6TQt<I?A%CX!BGh>GpN<o+)08))wHW0UiV{ zfMev*(dTpU?ceKjqP6n;{FnWIp!^v^>47ke>I85Gq{)pd&shG{Y23uPB`w!^?bJ5F zN4Jj2_T-uW_t~wL1e|wKFyrlBO0Hu09U3h#vr;CjAdl)O0rR*>x*|(Ms1Ag0$EmW_ zzBCMMnA(&d{2B`xz9gxI4}7=1tt5%O+#o};k-9~1oUUV3O31qwHOjlaHJeV~_4Pvs z4mTQ8r%CYsa(MtmKgE2eeJNqAZDgeRS8CM?Z0x!7;T8P}CDAR-#*X&}X!%s(b>>Ek zt{*qxqC1&9EmHMM38Pu%>X0axN}13hkQzMhroLbu{;_%9h+j*Yw%pl=0cZ0Jz0phM zp+s;_lF>s=NNaw#X*~YN1$?yyceBIKZvZaP%ZjnXfOWC6YJcOO6_EZsukq^zagXot z<s%5h(3L!)o9j_@3R_#XO54Mv@7$?u9f?B<BN+O^X}b+v({AF~^>`w5aGr<w)W1Te z3(qFyQr*=!JV7Ppj+*mUWvlkcp~OW-`%Y0gz2Uh8t8iwfuq@BZ5>9vO3)b3@@(BM( zox-9GRKh3*(pD-73{ZG3n-B#D4ar0tuDV|o1?LXQw0-O1lQxWdk$t$@&$_MkT$`OY zICs>drCSQ#ya>dK(<{nQa*Igx*VvWCKyPjgqHp7#Qrl<%VqsJPUH8k?&xuIMydL~1 z5TC>y4+>XTPj!62O9A1r)|ct4&Lq_>3u-OLrBo!U^_?v!GRnK>4&LA^TKlU8K@T8{ zx+dD>%J9l?LYzoic|fgwi$JfjN_?LWfs;Hl?=u}|rKK<{<CauE-@JWDQ6VogP`zas zcd=?LS`CL-K!@(?WlpeZ?0v(<-a+8!k{4;I#-*51Y&L1~ZcGVGsl&(`A9~B#S*2eq zVsCFP!@%%M_0Y<()j8!f!Z1F8^Ix62tKMvvGLf9na9~?AEk}|fEC7OP_gd2$qw4g( ziaz7$qr?L#f6-<C6(PXt@vhB_R79h^yNLwXzA-)N%d+luVnqVMD}wf)l+va}!2Vc3 z=(lg*zK1Bg2Q(~boL;n7#BtiZB^RU`;{b1V;DO`NX$t~y<W^-@^^_QJ1;+Z;A`l7i z@DXJf^`JyBVO@QmRcg>+^~P}f*f%0>Uc9B)Oz`m4e)&65!L4Q*Xis4*<q%ZVW90{N z4+`=KNzK~xSy5utCFAHfsy{a<*BsUj&rLK(H>#NTKdN^SPE|v%9EJ+EhJBVl>x@Si zM3a?48kXd(Y3aIUEfts&DKNqOXBd~aqVA@pO|1u=)o~5UM=5vM*A+}H6(Y?)=6`6W zmC<^h1>f+mo_ma>Tx);=M5y9lw=nF_QN+fCA_nk!h;4)u6Bh~c`7qw$G`t;Y7C6Bk z>g*xP!txqv7}+CVyY9b+t~enA5c)a-{c}%P9&EzOMoT2t+o<n4FH-cvhU)Po%UX`Q znNEEN?ToeVkggh)qA^Idb4MBPR#;Is(Bz!(WfUcaiHQCrsm2k$>Qk-G|8W;Qaq-7= zZ{v4nzV1fjw>-=sBF@%=N7nH7TPp_U=VN{K&A!XWH8^hLCA}ssQ<`^iSf`HXRa_H- z{OGQ-=rI5qskrz4Bw_EB4CZU%-%-BYPrA*1&mOrbiuZdlB%fFV7Uq4jwCdX$0`;ee zc-!U9q3QxLz9o-*J3tUMEn~T(t+UZ4Qz5YOU>elMSH53UWsYz8neJ@3n8Zpwpi{i# z#lqL!^`I-5lU=R=|3UbzEV1$1;nFChm43w+$tfBSqg6cR_g@#Bns1nx9x_|poqd5{ zMK_`#0gKm$Cw;Xy2}OZR+RSVr+io<BC9yAxM<thQ=+-K6Fx8Ix8hY+^CTGH)rD+b) zOsey}L`RTb<0PHg>q{yQWaiCTPVB6{&F#Mv+32QQb!2~Su7Dlb*az|vrXfjKgwWiG zSHHZcAi!jAX8LWvwqs@DsL*x}g^)G8<xkZyK&CkYisx~v&>+ZsL#4`dfuB10SRZ*m z8hf;~Z~2AcTW$>9G7wvIYJY3HwEcXqThRx+))~ZQoQ|DGxv)ZKU%nU+KFelUDJNRn z-=TM1ySpAf)(zrv${ut+JUImq_}Ea*%P7_>p-CSvcJZ%Pu{5%2+lB!>!aSvrG~{*P zqMh-sQX-<;=tayK_mKg7Ax(X-<$d)1THmgou?3(>;Y{X7As;fL0MDW_V>uQITvD4@ z%r_e+>l4D`4g;R%tGnkDc1R`YdjXFyttQoRuqf^xtwGCf{Zgbd%X6310UsaVlvhf$ zJc<!1%wz!DYu@Phy^P}mPV^c#nPw$yk1cz*!6NV*!4xoVr`_$Ee55)&(@3YzRNE|@ zqkAqE&yRRmv_3311<bA(6+gW#4m%d&%|?qRJ|oHy+&)^-sT<WV3kjj3L?CKV1a=6& z6J*<Zo>jxdxc}xWEnP6SU>MwPX+LWoXW9rNM3e$-<OHs8>x?tpo@M#e1)N)dJVh}b zP$YwDW!5!D_Rm|#W*bbh+l?BwA$oX=n&2i?#GVTU4l3Y$t~OaPF&3fkuh7t%y3F0- zz_C*QaLB*PB+@}71L(l90cwHMC<O%3(O&n<5K0PfvEoZ--!gCdP>GJ)j(b>CM+oGd zXs2s$V@bc>i4IqMjC!27<IUNN8tNl_y$}^x9pzNqu%~ows&_jlzelipYX(3VAOKx- z*kCk7AG+a{4g0A!X{{5!r0OOwKuui-p$7l-FN(+sloS!SBZ)04GDVL3ax&DYszM~E z81VDuT>S19UXBe9(QPJ4z}#=~<#1X7eH;pVaNXe+&}8BzSC}<HG0hibF8A3HR57w4 z5(N)bpcs3-_?_Y9yBAxWO;=62^d;;-*D|gNmmu9>F{4`{Fbn}!y8l{v)e5>&x`np< zMWf(+SCe#kU7vbd6^9EpK-xT<B1E4YPCboemu5Cht%I^c{T)queMx<^fD0L!I&k1O zPT7yk%bk<0ley>3UbCj^1}r;a8*gf>iOru!9EAoVPU$|+JUS6V3$r8y;BTw}V5@j! zRFoSt{hJJZ*S)9C1Yy#xY74h%PSumihkLWNgPAAu@n}-JyhqGy9T4eFugq7*$uOyQ zj9g`!+VZgQf;|%45Vz~!;0i!^FQN1*2F}{tEDQc=<h8+YMMViUt`437KB}I+`g)x< zZ{MlmVAOM$JSFyc(RD?T27Ah)K(N>H%DbyqJ%@(Aw^d)I*hz`^qY$xErBI5u64~ly z?lsl*6uXsZh7-m7UI_QNs_3AS(i2)>qb4ptmp#_~DvLU3CNALDr{-upzSF$66|I$8 zNEbYqNTJ@XL)a6)GNWhyUGs32LAg7V6^r^u>~W<L_)SGK7By~^+?Jpz2kFb#umkeh zUi^C9Am_NX6+7@@llH8BN?bS%t)gPoZR(<(yT79MZd@g+yDvt{mm@Y;X@$l}JBHZ4 z?||)8p4V;K^wi`?9ByMt*5}ek)V~8f%+g=6y)5enmy_V);-a<^B1lmY?%aP(vU?GA zng+8G#uKs|ZW2sNIfg(cGyB;fkZ(V-P-}SDkzepAIyhw#zN|B8*?m@P5J`WVqD<?Z z{EE8RB8uq(2ebd+Ori5|D`+#R6({fKZab2e_W@!wAaPRRZJ~WZ>jf=Yua;Xl`N(&2 z%y_=I9qp?ZU|Xti7XN3e=0ZP7O2ZH6UQd+#N_{=;?u8s&x2I`vGE<}JIZ;8#NI>ay z<$2`hX2;S^xAnoP1+`9XUu(Pct3vs~O3%b)fJZF93Ek%{xw@w6lIp56Y=s!;=H=I$ zX+#46`Lo^U!V1nvA?2q(_({;Z!hcas5PQGo9?Cf>a<J35U(;)PlxED-(I!!5O=Q3K zaE1<7NJOd(!u?1W*<9T;^f_ZAY0WgTAG=BM;z@JsljI6Hsk<1%ik}0FMeXoMER!RE zDbwYze(8xT%C}i@=a-QT%&t$);*We5Ge{vs{*ekoNC&=Ej@1c$P<|g*Gw~LYkPG)F z45JE=f$~zM8M~@~I;wj|;F+8~4Rx|yLHiIg52CE%OZ(5~#DC;oU+Z6xV6ht;>K*S7 zl`&$mstAW>On$wyA)>CPH1qKI)SrJ#l+sS(Bg#l5XudSo?WH598oRU*_UP;{S-0BV zP;=-@mS4G7Q0H$JrAlLczRSnQBl(AjZBJsN=>DtE%ym%yd)S(!Ywvx;`J7@3BL47J zAi2s#f5BejeMZR^&PV1X`ez<r(?wA5$b{whZnVcU#kR*EvL{?L;ai?fCANy*b6(5Q zcap?D(GtI-KUmQ1`PFuJ+O%M1G(oV(@(-;dfdVR21B^NAhu$8_Q<tK`^#b?_{iO=S zW{aVB!KU8eultef13zp|#yxWO-jvhC(jWk5l%a6jC_ENd?V<m~7Opc9WQY4d6Fm|} z;5*bmE!VFpcH?_M3C&9s&W3QOF!mGs(++)#4(cQSUH%G_@J95F+DOOopB9_h?=|~} z8)Nyuntgw4|8b5u-9OButlGG5VLJErdR$u`Vcr*MZ<nlQy5T#On&1-qR+kXm4jy#d zVWQJ27-Hd5PD7{*T3GOaK)#TpA=6-SAxKfbn(o=Z`=|g}t;5a8S>P6--&+Qke-3fk zmB<U15+;wsk&ou4W#YZMzgz!pF50JD?z_LiFdh<AcJ{h8C%h}o34STsykCXHB1T@u zi%~`fgIEeVTPV-*kiG*%z_=$Cg76h8adK{K38@fxZy+g&9bPQ@zTx!u>FG_Q5T&41 z#<U{>X^Il7K|#gdqpE+oGIa^FvPMW3Az7@$5Pkj{b1f(y;20_O|CI67F>!5Czc_{B z(&EL673d(vwNPLv3}s+&ibHW399p1wiw|1d1{mCB@G0)j;1qW&R-pIe-tXllFK=gM zuk1fga<Y@X&e`iHzn;)-LY`ehB7-C-fb1}CjgQOr1~br`37zh5$(8d-)BbN=uB0PZ z=X8!gn}FyXCA|%Q34DLtOP?xB1Os__ogvn6*|U%Y4dbq_YT6v*d5w>K&_}v`fbziM zpIJD(IYaUM^KZAd_K!Xb^JqOU_L%GJIiwf-1Z=w`xxyEVITZEh7Kxk@fw^eQ6(q99 z<%GDVj`J&4X}XEX73KPWEl}J)Q|65I&%`GYbYA~rh-?Qwlyi1owm+R67EqVWT!l3G zhm>s(881(=Y;p}uXT@~bEpTMM#MITIZ#seis|mSE_<{V0;sY3NIyG2k{SWEr7q_u# zd7PUk_u3`*hBXgI^N@fflY|hL01m^B=ftpd(&iU>$DEuZM4SZelnuZFM(q-g!#1fe ztrj#~7ngwPn?;j9b&gEaD+6}Z9a<ctFPc+^9A!1O%5BR(JM}?}p&sItK0h!2SxWm2 z<AKA=m@HQRrf~rn!ea_E<ZwQi@>m1YBS^4WFe51GOdjg~?2nMW7k;+_9d@v9+7rj6 zd%{dh!YSW6)Q>1zUhbadt4&jN8RCW*5%X?(+b3U*pv_dZS8OLn>Kt+*o{a>0bYaAO zLq`MFo?(q%UW6fpT$2UCR=qE|D%5JMAOm4UY#t8fB54j2)PFot+$M{$zrP&+c>JLn zVQXJx;QS7XLaJ*-bgPra#W_w-W%8h#)CUAVb2q~+z6eBVg+oE=JvLId@CBy|{+ErY zUro`XF3XTKI8dl%R!_{_9L-ZPyB_vP8Fm4<uC6*ZY}L6o%F3+J?-_qv&!`c2g{bQi zKfL>BW8Is<Jw=$OvyTY20ah_uW8(uCE1lmhuPFNvFP(#hZ>|LocY2zQ`O0{Pn?CiH zPiavsl!@qnx2fE1KxAipJ}HDJQG;I{5`Rgf+FDcZI*<we3*Yd$(<sP`b>EiN7}%3u zZvXSoPUX_?goC+}!J*cs81wDDws%4G-$*6#UceRV=T@0`HN0p1<gqTP#riF?cPSZp zD1ERJCp;D`@Qnd(HS<PFtlMbc@7VTKH0ZDaVn$D;UQW~h9#Av2%!*v?_SLOL?;hyJ zZx-tDz^N$^+*4>aAbNr$^s(hCnX?vbET|xn#gmuY-<ds27W9rMw@LX4>4(40DL4OU zD4(AHZI{i@KRuX9xe8dl_i;jBfBEO^=KRk4Y@;uhN$USg%pC9U1K@}US!qbGU4;<D z!!^ocY8MP4bp?F67Bv0CcbW1Wk47V>>zND}rt;DGnq&|*2LG7OKYeEpi~Lvi*AK=I znGbaj*(m`QF|nY(B$BZ*{wORQ%&}}4|J#clum9$;O!14Ikv<v!8?kNYtG#?7(fI|% z*y+!l@k_b`A$QLmHkP3o*4g&^{!cst0yb`2cErb+@A1>F1&_TNPf;{oDckO20V%J) z{0l*SQnc(Zg!<61;&Y}|&n*rSS<pIxG=gmgYNV-doPXa{N0|=XW;ah82~$$jby6?0 zjU_3A$tOdKdp>UYPQq)86UfD#7|trZ=R~5unO!tE{Gk_fy8`qQzk-$>Zmw5`)?Jr! zBz2`63QxU~T9(*|;bELvO>G`mPUT_-h<j2f<=%ypS3UjN+ZoB;fK2~VGG=0)^Lt%D zvHcEx(%!gI{JPzR%nS1y&XXasn_|VIq#B^fj%mCOtLLuo%5~e(S>r(bZ{!keOV>Ts zs|gZ&93ZND^mm@s<wQxys*-yxRfccpeXsvTX}SHc+s{{h&Fw>w@FBnaEXS<Ae(#6n z#krmG%vBMzj}3MTzi0)dHih15NN53;P>7TNYiRYVr+WN$X(@7hck0Rg?%%oex+wHB zL_0^91S6@Q_f>mYb$SNsCjN4<Qlil-=y#M5_12y<tefR8gH$#)!n;|zrZDlsG<H}V zvq@Xq9>cHOj)u6=B5X*ipeC{4JP@UKhz00^IAE}NojS@hE-#t2%2hF7{FM8abHYVf z^iEePHrRiWLK%ASd2~m-zIu(cWNyBy^EHaxQpFoJdO+h7W4@*$qvbCpy}@`OzxYic zG^a1BD!<?P4PSUcUgVqtDn*_gla(28$D#45)jed3#haPqLB^mhU+<Rs1^@7cYn6YU z=K1a;@H)C4H(dOfT_eWc(C+-e)WrTUEJ2);Nm%SA75<Yr?eG!;`R2kJ+=+juy#EiK zK3|+^FK2%9#00xH=dKxc4&7&}LPPd%K0jo3e)-dwqFCi#Gq^i>tu7;v8l~q}KVSbB zzjOcPX(a%~Fan<j2U2>5T$@W-50w}R)gS|fmV}61+O%JPs3%u_LXdkJ{M0M$`go{A z)XT;uymswYO6<bz`pavZ%ANC#p?ohU`s<U`jxP^)|4(|rWCqY;&f*&|V+7`cEm<kB zRJcA+;a<w<9?v1I#ZPv}0v?q%!jFDd|2Jl_?6JqpB=B*pv54(`o`k9v)Ju^cIo(E6 zAFuBA6@1Uw5SXR{1vIQsk~m$V8zgBgR^a=u@{oOyI2`5YIT&mhuI#}O!bZ~U%GIqI z^w$14I{J^@6luzG$IklV`qbi3=)IWBIx3vx(Jf#d^}qAuLS(N|XGHTqpo;iEJoxcE z{-4NUl~u$0Q2)V-?HvIg8J7#VfW;=me~kpgks(}#I-Tr&-ds^|xK{umB6yjMz2MgC zqd=5Ucia2Z#ssS-bnOCNNdB<bJYOhuYlx<gh+~FNY0y!p-;fYBQF{#a&9Wj8r}s8A zrckm|X(O5)A`~3(koN%p?o0jInrx15B!<%4w+vZ*mdi@LBavpTWChDV+kBZ#Ij{ta z3Q=U24|*;od?OHLjQ-groKT@sg>JjCpqUeBu|?H#pGq*i3h`8UBl;<?S|v5uny^d@ zw5h!<g8Qj%Np6V=ra2DsW)G^yx6o9h+bik3^btFkxJBN-GOX;&E|qc<w-g-Wt@$si z7vlw=<4j|W_2mvF^FghTe}|W$5`c3b)K{s{MG6R~@-wIC8Kj!98kMKECa*yPf((y6 z*fbM|8Zz`qSM7V0A7^X^kyzv}yUf_Z3{Vm!9LAK3-`%UjloAPX6v@6SJnr5W@BU;s zvH?E=)SURI?Q`1DVa~Wa$(J!__{rPWRI}iJnXdmvcWI-`+&goMg6nIkEjpt{EiJ2O z39}AAYY->a^jL>xq+{j9^m=1CCPf$F^|7t1`9f_{1H2nrhaesytDLHDHWA(4&Ycuk zaRs<6D!DXzwwyI(GZ6{QGqfg&TbM$E5U4>y)El1BC0d6@l@A;rz>C#&O&{#(dc0f~ zS<0}42&uRp<3sM-(0`Nsv(8W2g}}85=nkafNIlWuJp}wnnNX@~nV0;em;6Y~$#=`5 zeWjWD#@Z`<ofv>gKu=95WbybORuK(%SaM*hzyfXx*12e~n`I+8yF5Z*jCK&uTD8I+ ztQl6Quaj60b@`glMm+6;0;$!N8TvH%w5B7|TawT%rKR~z^C-`wnSt#pRc&T3aXs&m z&Hf2(F-^5-(x6<0Ng7gYEj=WCzs7`uXV%W#p0o0=V<(WZZu(`Wp5#I5BZd;qy9eM! z7`zRhHx=I6s6w_L`aXowS$2)wXCC!?%XZ-`&*XVqhkuU$MpNAFS63f$a^mkACjCO| zrq12NCE+L8@5~}+0x8`%o??*V3v%F2q?9VG8Vx_uB3*Rg{jM{w|B{=1wfKb#1p^jQ zA3-rBHO6QsYx1bMu;wZO?D+ObsgOH!ODbCnX5(QZWY;8FG&?G@#76T|<So-{i7@E^ zrgVXF3xi$?+k09!Kdr+Tc&)EB2+}QHE9|SU^tGW*J^VBRnF_(#7W0>{qc3YlY27ux zGs)OSnE|E>63;}nHv361vDU*F9EX@xkzC^5Tv!JcEh$EgAB^1sX+Bqpx2~x(<*$GD z|M)uS#ex4}kM?D2w=&7b(6W7Pu*DM;K3f4pWlNWfA1&!hg?e-hu3wme3&V^o=ac&c zh0Uxv*aVPm(|5A1Z`ZFjOyJNT>hJHGo>APYjhQ3w-n)l^u$?*FUVlf;uFae`j<ubb z%?H?uY->=-!fSsxN=l}&CD;WgdRC~3o39FrIeEHxBW0k6^*q$PpxBtJ^?tQirM%Ao z^(kjE4Wv?r=bx>!fCJ*qrD{!CdFz5DGU~o58aA=AI+@pxwMxCR^N7rrp1jtk>o1;$ zd6%g8w=?$|Kp<ui*QblP!&T|6sSZ1y(zl2$)nu~sHdmf!_&7%u$^@Hp9XfftcqNEo z5M%|XMNB>Ihk_i9HuK((U%l(8<|Gy@Ic(vdKlYWMuvr!>mvGCn0Ouy`D1R4hrfT@M z{=+X2rK6k#<g_+k^PSG{Sp;yF)MzRZWIsPA@@3lUsE@w;Tc%&>bS1J8{NB?dx~b>K zh9A8%tLjv!r&_7%)W8G3!(TAEyU`YvbkQq-n!clf0E)-~e~Ds`yc-E*&BdD;M+uT$ z#A|-&^DN@(^Ym-iyrU8$*WylqLr`ou#pQwkP&_kLVZ)*moiJL^X?`5Vi_pK|^8uuZ zGf=vz==!Qy{)F18PQD_la~>Sh8L||Hki#Q}9bz!duI{81v2X&<&+k{g0!WchYx<I% zYt@BGp3q^!f7rr*__&`xj=4lg2X`zkZ^(OCyv<(KgNg2<(|2<^b9-cmo6-fF<UVNA zdJ^Qfxd0(c*yO)v;by}iBEx4>x?RusRT`6i(y+Kee*&kqO)K@%IwfbiwvLIOlpU-N zJtHTl4eT_*c`#F8r##_%E{SY0Cs+8s^e%+v5A|?SVhu%(Ef0i%Oih{H1RsMCUw}ow zop*Q;cOd72f|4@+WvD`U1`i^(n6v7Qu3EQ>os*<0Nq%H*$3)88<Hc!`vR!PRK*Sr2 z1e%YkTr7Q(9#Qx}&8dzeQHDOyCF5uBjc7b<ihKswo}MtV;ywnisLpYF5($jAcr*sf z#?FsC%}4guV<G$B@kd~y^KNCEypW_aam2i>f%46d5pwdgmpEU?Q*Z#7y_5>Yp59fn z#EVXGs8(~VedC>y)62zH5r@WYbjHQ#+j|Uger3-3`A{4pg}^8$PAl@3xF3mUJ!=^N zg#bxAfbENN2$+xhy@H1tuX0|YJ_KBaBGREMRejXAltBdaR3Q+vM~kXpTpq+ABCiHc z$z!Hk$jfc41-lkAOK@3>m8U3#sH(mLzE`CN=OuCTy;n_o`KX{Ne5nc1R4wGz$~v$s zZ(Zc6%ZepmI`eQ~$j<QmY(FsXH2;NP-0)UbiPWat+3!~3=l%)luuH7Liv(Fa(v{dH z_1Grw_Uww)UH=pOl0o1Zlj@D{tL`Df?=EHfW~P<>18Hoh)a(cdTl$WyJg;6dpTrLw zmc0)q5kB9g^(v*8m!$E+b8)Y5i2v%kk~|684%-Q05#`jQJn|M(Lv#rJTJ&&Qk&@|v zB2Auohgsk4wKt9)_veqSQy!SzF2`yeEvzp1w_Wndm_dMn9D>Aw72-1H7+_YGr6*Lg z8tlv@(b++eDuR7qiI!Fq?n9R|hx;L`ESUiK9~;`{dD3&4UV}6e*L~?-Ma`90Vt5;h zem0iiLiuMQR|Equgi1B>CVd46j6X{q-S12$0Q+y5wj!=Y{b46eVC5O$>v{C%nU(QL z+S)@=_@Z;k{#OtG6M#q7qRYhTjUj4@5_H|UJ$)^zn&lwFSp}8#b`#=x`G@%>?l~6$ zCMw3GOY1}2rSRK`h{Y2QEa_e+!dmkNQi1J0`q{RRag0zF%`z7G%&F}6DZS}uKZP0? z^kG@Yevv3o-rn5cnAg%5&JxajWTW%0)(~4!ttSYcJoUtt91jNJM~s0al0A!*vD|v$ z)AkT7!O`51W0(f&6s<+oBcL^(E+Cp}h4d0h)fp5nPX*E1)`l@zr$X<+l7dbZ45{Yg zXnR;2GN`8mEX3|lh8g3+qf%myzxi7%6vL9Bgq}N{Eg3qbhxT!^TbjMAmW~+-)hxm( zUm}k-3<{;BX~a?HWaG`VQt2<`p^S)DTZDpMqYQ@WT4rl0OZoSf>aBDbI7Z8*BA1Rv zCT^~3=3J9ArVmf7Da<r{Rdznd(lzX_7!#Tf3lVTwj%xR2D2{i19pI{A#L=gILY4i^ zLwzvTHdr0Tqmi~mlF<roK8YsYpcp2O#xy<$zlr27VKy>+RI-$1GS$4QZ_LPyffEZO z&ISuNGMgU#^hEJlKGjWH;O79N%PXZRC~LgK(@CyNJNk&0c1XsaZ$_Or-jh^v>gun! z>*D*?lw~fkBq_=VakB_dmc9g_*L3d65SoT!7SOH(59`GPGxkg>2uN{#6bI#@W%EP! zs|$LWWJminrpQ-Hp>BL;qrG4oo~jV|>sSo}2*ZmGRnehVMcj@l0RX)u3^q<mm6ba? z8~3uWlSS$)v3mdWrU}#|!^H0Ak`JU;2nyZ2#2DgRYVV~ln$gqlaX<`>bQf%lT*B!i z(7o`u8lxugTf7T!_gTr-a13bOTwHuW^+Q6M49v>S$>HR<!E}8FTw*loGQ?*XP&DAO zU({T+Gm^isiyr5+AQ4}zCB+qIUl;IbsV*5?&Tt?n{K+D?Y|EburRYD!SNUYCv`-{I ziK}P5U)rMG$Widx*g77@Pb~p(a#+wa-qkGQXyG%&!v@{@Md-vV*wBU2%;-#suk28# zmnLOIwmGem2oUd8W2U7Ys*#<_iN|a9hxd%ScsyOF$HB52omdtBK{PUjjmZkyp^5sL zY`3Ce&&%()-PILF9$3nrT7>&Gi9J<Y4WU@bihh>Bn@qBDuVz6ZckE42bu9G0cyq!s zX>E78$gh+wY*bBh-Yn3OIm6JD7z=ZaS(!owsF3^fB9wzY2Ni}z$DYcX^{Yly5T$g) zQ%#dPV*dVn<flW`?vg^431i$(N!M!M<Vz@5ESF&MJ5Ngsex%i<)6Vp|{;-^}rPIPS zOVarA-nx)V*Y^@iU&}&?Z9{FZ*iEA&`kV_UXizIeo?7h|1rwZsr7r2W0-X7Q@LdWC zf#P2RBNXJH21ISzr<V&k4n?e%Dinh0lvCcc#CR48yce{pHE2lFqUds{2`#l77EGm8 z4h5HW0~8TFMM|kwk_;v8mI!X6x#v&6(-K!`U;-%Kc81VngN)vsp6y1atX>Ri<WUeL zO}Vb{k$<W;?>Vps9)^6ia$iA_Wt}~A_eTWDV8p(poYzh8w1B0L9Qqr!Uno!XHK%BA zpE1&Lt^sd38oZ^Y__AAG<T+f?|CRzP3->(ni1|ty$QT@FN(qo#XZ3izoO+camS=WW z_ty<(>;Jv-wJv#N-MvNcW3Nj`d~wm83|_5&GNW5^nP4SeN}9%f<F5f_+qIQR*4sUi z;by}-l+4noy4O6GR(0nzFNn1~&U`@kQzjn_r{c#;e32@)>|rxl4yve61HcRBj2@Q+ zM3MW1*t7{DUDYZZDnJ=!j#xI(aOuZn6XE-xel)i4t_-im+uhgqIT&Rg+vGFBS4^bV zjwLJS(jd!9rmtT_C{~*vIN7|~ZlwoKU`cO3sxe40yej3H(AYsG!n1`YXcYP{WVi;7 zJMi?1;ZFMI>`~H#-}*#FU%Y7;AW?P%uIXaO6<e9!;B_-ykb7WZ={(R%86UmqVmu)i zUas}<wA~-S%Vc=o3uNENm5O>G*Mw_pbp~dHY>U0H^Xec-dZ{ZqelE#fJ8&&y1=^-j zV@0gxD9@P7xXd4bsv>cv;7_EL#g0G2kB@s**-yar?B~^Xlk;AHGwlRFjp=UN>z+hC zUI>+5o+-pFk`wpUWyS+kw(z_2M3i^Ef2&I><7uHg^_#FmYWCSujyJPY1Z4&6GFIBb zB-E*?Mj0!00l4oBd}Z-h98PeG`k|&%`Mi=Q6M9W+yJ+}}s<p%7037H2hDGlK2g@ir zAHj<VNzV!|>Ln{=Lf!D5u#>x)FvOt(5d$p%yz1XQ33#p$Q}6t7zn#<iV@$>ET?jYm zin5gKRD<)ZJm94ph(SVw$k4HR4H1*{Y*<>riK$2wO+(Nk{$|jrsrD(XhsHupdCPpa zYouYY8nz3rmZViJbgSCoa40iU-tF?@j$>DaBi=xW=JmR9gvh#=tiaaY$6CxPN$T2m z2r|Y`)LgVuk%GO}Tgi%Q33%3W!x%IuM|34Svb+mjEYooStA{-LZ^DbpLPOlV5eCgW z&iK<HxO$I^BFVWoGX7U(^B*lsmx%-M)}^Jzi1a)S>In3>+Y>!~^H;(l(VAe2HwuK_ zTlS>QyZ2)pqwr(r=F>e_$3JusFXx^P+mp91o?6zKi8>t&{PG64bL=~m%XlHXm)A0` zmD8j`Y}L5&bww3FjH`pX@}5pKBDOqC=(clvp|3>G)5cwdi+*et_$fXDqQs9ji1G{X zHj!QO!D~!T&LdM}LyuSftd012nrCDxP*(hCd`@!&^|+dL8B^0YP`G70jflt_tHM$x zZEnRykqeMYR<E9TH1DQ%%`-Gri6vk0=7Fy5p;fMi*-1pl`*r5sOp;4+BuyN2CP9E* zN1;_qqNUzdF9DI1tgocQ4H)m8-8$G#RbQ=cPLu=gC4Qi|Y>Oix6AQxpK;iq_81=P} zu{f$;P3}DbUYqhW8mmMsrvd<h90Ly)NS|NfWt7kC0c@0%^8LE9bUhh%+=(I!rpjAg z5pv65eyYsJCfQn6;istsh{L%w;}{1AHTpKe6xOvAThib49E{$*ndIpLKNY-y`m2<k zqWfZt!X(!tqjXr}f?9K@r_+HY2<6!ZXF(^PLd1pHb%OGEmJ(C@>{#T0c&$~5L!F(N z$k~960M5Y1`d360-iDU6^bBOQq!DA%?ZCOTbS&9z)Ni!<m6Ze3H*<h~f=>k_r^-dS zB0d(e8xawuWEvG_w);LB_nnd{9TT{$>|XSr=YM>KhO!y#W5jG?Bs_fRV@By4e3+cT zL%i&^`Mh;oaKTZ%;mH`!$9lKIpbX`e%SbibU7TET^YPIi2&7~nW5zzZ{e<6=<$JKV zY%opu?}xjZJCHCHR&{r&=}NZV;I2Z6p@{h>bOD*SrjU$w;Wdb2`nKG&Is=#0bwsQ> z*S9w+`e|mU-ym^RJBd$wxM@wL;rnLpDR`Z0awYOy>CBOkIJw|_g3%n+4ek8Vq{8kJ zFz}@vZ6v3ierP4(%oYIc)48OkCKp}(ODvrA#X147PbL^Z#{84nN+mupVvXOY;Itcs z?IF<eK8!R7s}NUGikgABZ|!?0Q_uN~2{hlQnw*?6Cp99rPlo(r@`Mgt8^j$iRXyf; zM#zxc&k%^)XrsXIAb7ZoUd(J}GaD^8s2@w>9H3GkiMs<8CAd<USpQw|9<#1UE3s@C zt<reTFGbT!FBQEg@e)v`hW>ILb#l@e-x%65gG`G~QqRvH=-(P#*~nc`)dMLLrz4t? z9BDIchvurOZr&++_0OGjKT(idtMHpI8Eqe7y&bBhC~eeL)`lK8)JuK?#c>je?G2(n zEj1b05n!tDpj2Tmaq(0vi(A!#Q(Jx?A7F@9@<P!ilSGONO;ta+`t`V`-n*SSp_(Bh zsFXM5X*z>E;*y|cI5~uigV~yfOHQ8ofCSL~6~&tKfwXUC<Hq-LaN)lF-oMsJNkK5j z2=8A>*^pEXif>KLTy*?LkPJ|~oN;R=iWm748_O7|BA4InK`p)Em>#k-xghi6sm;J0 z5!;#+;W&fjPR~jL07aZ}YnUJzv&92^9(J^dC~sU{+x)cT>CL@lQnBbb)z`g{`VEc+ z$c21lIVuo$hEcpNSTkK8;V|TBqBl!|Se89DN-dt<i0HOeX~a3VA<rBn;T|v5fc@K- z+bfQeB)#2heaQya`e#=WO?N}5pA87Tf-(jCOt%c~aXz}d4nra{kTGl{;lV(oqqdV> z;F{z-BjZXzJrjsAm6F8LZzP(|OcZd1)m-~Xy#<DcojEuZe^Fz<&I-R4Q4VekLrf<O zun3;|kNx@$dmSs!y&%ok#%BMlX!gV}OR1NN{S^4%EE{#>w3zP*Ulc(z?bI=m?%oQ+ zE)vwU@aw12!T?tL{sXm3QycjB0~>zS^$DhZ=d*ax>|JN>Qj<cY(?b+uR23H!$RALr zt29{!wZ78^4$6*ooptGF$ueF8hoascc&I_9FHF~ZUX7m5Om{RlsDYT`rg7Rk{vV5e zN-b~u4%s}+2L+k<6%G-jR<(5A@YVC#2F|GKO*c%p0JvbhbXuyg5v^p$@+q6hZ~|mJ zIwu2JFz!x7rK!jT2NuA*HLVddX@d|mXc&~*)!bY~5?xp>i~<$!b<Zs=_cRzf#0&0r zTe}>K#ID*Im~b(6ia5PfR~=Az7u2G{QOCy0LBZLT47LR(sg8H1nXOE9GGl~b1xW{y z<6NB~Lp?-#zwQ{89VhAA>O+brJfL1%jncG!+2*aFvFAb8&BD97KPwm=>Tc4^`X%7B zyz7JRU%YA;>f0KM4w<HA8P;+xpEeze(}lmfu`AW$%-47c`Jm@C(=ZV4zo3KeUd5y1 zwJZMtOj#jazvh!G!N|eC%$8y|)O`Jq9lLq5X-E()mO(H^<r&7(`fV#IGIs0RJwHUv zXovPevgsfDu*XJSGg9Q!!$?d7>d92C1<XqqV%uFowSN>e?2(#20ZXW>hv=ye#^9Pl zz?uc|!PvYGgLUy7+6ihAC0d{oxXHp4QC*otDJds3&sOqPO+GASyR70_USg8kE4+CF zPpcwk%gqn?Vovy}3V+%^I<MXU@2#KS2M{IcXV?6_I*T2oYHIPqOZ$4JDDi{DD1`mw z-lywHCpiPp2E*spzglWE3gOsqzz|EK9JuxT#1@ErRf&xymgOk3(8Fttou@qbCJ_L5 VnM?(^>DcGr{N|F&tX+nc^FQ(-<thLG literal 0 HcmV?d00001 diff --git a/tests/abstract_cases/asciio/relation-edges-with-artificial.asciio b/tests/abstract_cases/asciio/relation-edges-with-artificial.asciio new file mode 100644 index 0000000000000000000000000000000000000000..ef961540eb391f191c5852675002cedb1559b5ba GIT binary patch literal 4272 zcmV;h5Kr&$005R0LRx4xF+o`-Q(3u1o9X}vYyXIU_$UC}_y7Mt@Bjb*@8ADG0ALsY z25e|h0r%Fs+dyOkqs%vn1&sh!Fj}oN6i`q`n`3NMV@x2Tkp*lKhMGv!^-bwDGH4nG zjXg)GG6sMe000Jn005B$5}2xfg*U2uR6I}`00x5~Y3Q3GdVuu<^+%)wLxV<*13{oN zVgn;XAOJMeAOHb^01-dcQd3L-00000000000000)kch}2CPqz6nq)?rX!MOTJwVVh z86Kbw8KeMIQ3Onph{|S@DTby@(?Xjm=`s&d={+Z@=tjyh00xf?k^PBPURw8zu&~Y* zPpwnge&!mjJxmNORWuo9NtV+EQG+ZAmRc$Qh8iYHJ*72N)S58Lk%Xlb#aftF6*w%6 z-Er3Gdo#hsWh~v;uL)ID=yx?TZq+$gO|2hl7a6h!c^gKfk+Sz?o=rhd+>VY{N26@_ z*wZRF<>M>3I~I2IoE$gGW0uXFUy60Q8&DHq6}2Eh3I+nILM!nxWyZ(I$*Wo7kcd(g zKv8T6DN3qERIN&-c(D)Cu(Zia^I_i01geads|itAVTD(<?J#>{q>?pLCzXdalXKCA zJh{2e=x&6lb=wD7@`DACQKQ#9Mh%t{eG@0sI4vR*(%pwuTR5vdZ*`ZcNG8t-*W%Bq z$KBp?RMn%z;`rHR(9=~{`na&-<kgicMwv@1H8)|6I5u~%rp-}=o4B-LlgzG8hK5a4 z%DdG!VaU;;va+rwd&GxU`9$d8fXO~lkaIL}8#_&zRI)?^syZDkG?eDqht9?tD^+2^ zqb623FYs8vQ@L1CQB@~tD5T5igiSaNWEn*9fkaN17ejUxWF}Z*Mq9ZuY)(uKTSaQa z#;qR0yBki1?#52H0|jytJQhhJTWliI6X35K9EgaWtr8<r#_JKLTe-8VHB*y9vxg;N zqrj{v*;+ScBP!LFS$1)K@7~e>V<N~bgT1Uyz_vtN7D(Ya*WG&lFN-fDy&cXTCaoS8 zhGcdq(HLcuX0i+>iZoGMG-dB83@JY{tICOmRf`H)NIzG*86=3k5D)ZGYkhiJaxy$X zP<cHvL?O*I$N>ltfB*pqZqLq)BCV<kAWDrANh|2^)uDQcU$skvC6;B8CCQt9`oW9B zOf!FH*oGRTct{U45#mwFmWYlN+ntpQB!pwHfg)FfoE=q?;F<MA&AA=WReGgd`tqaU zh{Os7gK0wiEx8X9T!Mp~_quU{WNc*F@N+g8ii7~i41TX~TkfNG(ow6fu?q~aYjX0< z#=u%XPdzsRn4v}PNF6#Wwna*DB=~dwJ9<XP1L)s@2;pKy;VJ<4>X-WeI=!}9+%(Si zszB~MLNJ6pN(B^9iV;vCEmB4?0Fg=&KguBlO9H`SDPj_Z_LNUZzi=1p3t@5vp=e)d zUg2OU7zN@BU|4qo*caSTW(A>Yg`sE`luVQlAO;jl1-htg1^C5bNVy9UV7nG&Q*Bi0 z`8><vJbl5C96VmTOfpTF8dh1-%L2as2R5X!T2qRPLz1vSmjn<9vOpBoKtWX}*Unnx zIl1KkQ-ZcieL%(p5>(Y-(<l-}R41`VzYwkafqW_^YJs3Hm{u8!jc8g0g4zIf5kRyS z3v1Y03$PIHD3l9I1VL;IJ&<1jzOcQZEmq|M=)V}ey(qtu3ur|W3qY`20_ZJ;p>zdu z6@p|qQXv2jZ4gt7!bu1sP@t7W2ucND{ef6TWLA_)NTNgjQ4xzG5(O0$N|H#jG66~@ zLI^@hBqGH^=LsfiTLoB(u&|=kkRqgD+BShuu>@A7pe3Rpl`RldxdlL~C>5lLAwX0w zVMMfL#ezUuDll3WKwCj=1)PYKrj+T^B>({t1f&1}0000000~M_D7i#o5YjSOU`ZqV z|BKV|neXUzyzJn^fqs@*eXh4l-vYju+TwCo-t(~L=K7m{hf8kv!s7fb_jnlO!=e5? zY`Lnegx$0CKWemPXUM3-^|>VMe%J5=DI!Te$F4{|f<X6@V#plj73VKSf<g{0{B3oD zKzbS&&@{koXtrzE(I0b0uw@B(hL%Q=gweBnY4NKgNYrR-2{_oyx%B#VjND6gm}EGg z!FnFqh}Sf_l4mT!5-1fU-ZtJF44wIo*drunOdL7gv~81x9g!8eArxg)A{7FmP)d~= zs!E1>*3vPeNjbZA%ejfQ#cY|qykZPYCi4Y3p+l<TQO^@D;@JS!ov<yaY`zi_0z{Oe z5@6v3G$V4`32N(l&m3!u=WaKX8`W=RM)8W=R<W(?j-WUaO$t#0N(2Cx%sc}-$9N>Z z8V+PenbTqc3S=p}xaCs~+;iieGPPX2Ln^GZ;=E85It}5$9#{Zclvi3udsp1i=y;?_ zCe;x&Y{I|2qXmenMsa7{_cHsRxrwb>nky3ud=5;wCR(s`EOaUO)ShPs?6|2^%^D=K ziWnL!75GBWOC*R08G;N9>}Z@{J7%mVjhJPiugH(e%Yvqj+bT|96Y%PL^)#g1aAAoF zGAxM0*7dY%*{kGlqgjPhV^)sEQn;#fb0oq)jM;)^C3%(5ekEdIX0B@NR_fW745JQ~ zh5rVvUUyfo<zvEixw$(QO6XJQWjA!9PnNRkdoKZ(<4@%}6D&WNs;P&SP71Cysl0=& zx}ymr1r()9imG8M!jxAw>f-kt9DS{Gv@=yn@cfvQHqf<cJ(5Xr>h)-Zm(O5kN<O(J zn!H$b9s?y{`%sh_IfNKFqnd|@?PHYz&duy>vx+Mkcr=V_?&fU`yEdY5i<xzH*mPDU z;MrghsYaG<Hx)txI_!3thVRO#e2^sVzIjkes$|!jOW2^Is<kVEu&WTtT0+fHBZX!q zwkF0jrbNs}Fcs3aa*Q%t8VoTG2#jNm-47`yU}VBdAYl#{LnToWPE&d}8Dc4l*rGAA z3L3PNr@p(^M65}@31msT1VdwPVsX?wObz1#JOn^xfr}xVN%zA!&Wy-$_ivpd7>j}l zaX5tSxlzs)2{BkTzn&%J%9@#0T`lbL&2LvT<A)6^h7*PzEURAQf~R7X`ce+`l6Rs6 zTYw|y&)2VH>rpOE2jBqmmuSp@%B1GPZMCwnY(zw@K#}dJ6hEhfvo#WeAFUDf0+Z#R zv7I=R-17f~etf&ICFn>&{r%JVW7X3_<*_2peO_-hi2>sh1I~7t{Kx~l`5A(kFk{}a z(Zd)Vst}61l?yN=3Lu6f(=##CnmY}G)X5ROI|YVT<URLmi)T?I$=lU!M45p&Wf8X9 z)$PE#g@T2NQmdd7blQl@=A!-KP~M5hF}lDGoynv&COguxxp+u~8Ja05&{dFRLdNZL zRdEGRr;{@r=%L7QI?+R$PMj)-_qCDOXSh@<fD;O#y&LfL3Wql12H{Z0Mh&`9x^Vnw zQA+Pb?rqTyVaZ}AV`4{c5=On#jm7M1uqoPrJULN0V(Znjm;*`|E<4pCJt&8I0fce$ z(5tG<7YUz1Y8LP$MuA-nUmy${1h~ARon?hqlOik#vWmt%FRAHt_Fm^qNOp$~xg)od z9kDpKi3|yK?C=%a&4YWT`)mokyfx@JniRPs6}pMvJN@qmKF^&Mh(nV6^eo-kV9eTW zuKZ|qocnhx!)htWcdMBV4MOHZo3#|dh#?_Hbz-0y<puCQE|sx@@mvxJQDd~}HUe?$ zMGfZTKvME@LnSo6i^E}KomGrU<dCwWsusf3vWFHdMB&`q(L_Ys3k|_W-6EmOfpZ|3 z1t=8VV`3XNtPtUu!4=B<bgmV`kxK^Klhs`sn_z3~6Vx+d6+-F;^q{nANHCBjk0pgh zgbS#hg1SHquNX)YMl4`*s=WOrgbw+lsqmutz>2JbBBZ28sHl;Iliadb#>z7jHfIr& zHwJ75ES6^*tc^riihoQa;b-%)UtV7$Lkm|+YaLF8O&l07svz1(2|F2TB_srxI6_NB zVi>IV0kT&U4sJ<>O_*|YH&3xy6*(t2Ez9-%`f_|v0}9Tb^Qp!0uT?Th_0VeRKDMX2 zW@KQ0@Tp^z{9cZs_*dpY?0{Mz634|J@j_N3-S%;DHr9YbMR|O%SWtwmDN1iGF^cbV z+pLiAG?5QOsfHEicHwgEY{O$$w#y2tYM4B34IE!%n~GM4w&vo<;*2+NYMw@pDt*75 zihK{n>-rh^Uz760`L@C6hCLPb*UtwJz(NAS7{W(Yh7nD>{9IN(`Jg!@nm|>y9Z?OM zh|32vEv~c1?sqcvEFtVG*ixsVvoAAd6@F!VD<m7AftJrs?Nf^nay`?eoE%s%o25#P zx#6M?N>r0(Uv!c7zf1If#qqQxe?NXs>pmTKzQFwUb&>UZvGIpAbPoxz*@FOmZazD0 zRP&EBwWCEvREqMG9~H7U@-F9j_)oTE;QWWQK?jWJdk1G|k~~l^_KzV){23p?w$+`I zBdG8~SqEOto_*LzjZK|8y#-EBDoNHdL6KIfi4zJZjDV(Q$iR{UkTQ~e2Fvw_aESO- zQtDdy3Y!=nHEv{Vg`zeLiGAU03Yh_1L3IoewWNeml!SNJsyp-XXQjtv&MRLP0!!9) zl&S);=@mS{;9q_SwVr1uAz-*+*s7P+H*gbZqCm$IP()Me*+T~7+CwMrH;6gy#@Yo} z4yn<4TQrAHbGVh<^}9H=L@C9zl13waJLV*xt8<H-5xbM0;MMFhljflvXo=eOyUfWX za!KR!yl0U7&WECW4-3eMoryI+_*jCc9})7Zr-ZPnl8Fa)VaPHhIGS-~6zE-2#|#eD z61gXYPT^9);X6h4a4%rCMZ46n%m5+V#B|)MpN9cpQ>f<*%+01xv5_^{Dsu!nw+j(! z2*op1O^6b&I5`D}NI8p#GElIyoFGt249gr|hM1oF=43?irP<8y^1Uacx<qoGZsp`i zf~KP2r@9_s-aTimY*9J{iixS+-xriRolfq)Ag4-XL!uz`qMa~Mz$@QH=20Mrr--0e zS1_V=2WXKLGZP6gozt83q#7jH^t>q}ig61k93%=ABs#J*4x<NfqFS>KxOnJa8XTha z3uIh{v8o7C=Bczd1_NqtIZ$&lgu4eFQx48Idc^|?<d9V)RZ7u>Ss=@)RpRK7Sc3SS z-Z9PX2^cu|-Uo@*poc}i^Nf;4=LmyV^T}1mmQik@dj*gQ43^l-E95Rh!C<mk=oKJ# z=3HD0y}i{_n92%}H}WB=mSj@uCW<Ek>jf;LU{Nu@R1oC_H;cet_h4^bhC$Gekm0}= zvy8P1lm&s<Dtnee3uto=M|ipnG0+!YrIa-yF@^~zYh-z3N3i1^M3dq5KBj-^2twuL zgsDVIRHNPi;@R@>J}ys|_3~O(ICC*Rv4cO*$p*_%k3j;KCa6104g^7jYt5;_vy+mv zZmWekIpA>|rY6*6;H?&_s|i_AxU34QN;HsQz{^$@qCnE36DcK&8aXD6zBY_9(O7a; z9d4IGXq?r}QNg-#R#yvAN=l{`gIp~+vZFv2b07n(G7IMWLOlPMx%?CCsjBXE7V(Sr z-Yy;6Qu6U6m+jiK(VSrHHU5wsYFwWug>68;W5*S8Ni*7bj>0?tE%ve^Qx}whFrgAi SP*K=_@pmLsg$WCkS-zkz(XEyM literal 0 HcmV?d00001 diff --git a/tests/abstract_cases/asciio/relation-edges.asciio b/tests/abstract_cases/asciio/relation-edges.asciio new file mode 100644 index 0000000000000000000000000000000000000000..d147506010ecee274b3cd0b543f24755e4190d35 GIT binary patch literal 4052 zcmV;_4=eES004pVLRx4xF+o`-Q(1?M=2rj+a({?__$UC}_y7Mt@Bjb*@8ADG0ALn> z25cHg0r%71FLhZBQ1a`qDOGDAHKj^Nb_SgQqqcW5LX9*401Ym|+X*5HWi~0Pv?r+0 z(?it#MA}AxX!QZ1lhOu_G&ID}0V$CIB5D)VOe1Llh%u>wGHH>B$kRh<G|8rVK!*m6 z8U}+vWW)wWhCl#mra%A#1OOs`s;X(A00000001-q000002$D@H7z6+`8UrDq0000q zGynhqsG<lE0Sz=$(w;PvJw~bJ8hW0msiC2#lszHpdW{=U(De=3B!0B2uW?^9EG#P} zKc!RE{cJT_Q}L+6)l)&1WSMNiRA9>jWtNJ66Acq3A0;(Z)S58Lk%Xlb#aftF6*w@& zEUeve-0FJHzAGta?)7#RRZed<rcK(X2<DMT^&D>&7>?>tW%4vzRxEdN%3kB@Qyoht zk?<ixDesq(SI-_H^@et}37jR)Up<>Rx04Dfh|J*$13l&ljED^G!g{*4>FRa0acgz% zoYJGw;9=gXl&YjvO4cgZh6(#s87W@4d)Z)>QIe{#l@*p4RefJm2l5JmHB%?KheeZf z@L`X3ZgY7zVoeuMb{(&IvsM~7KIg%+i&HO~ck;P4l0tp0*ml*kinHAN8GRH&Z0tL{ zDg9=@E+c^vVvh*X=aNaXtRWxx(9IkcB&BH6D6+Ftb{NxxXQK*i)fh>;i$)ne>gMFo z$*P%GqNeOQnlv^dL}*9!BskY6IW{oCKH!FPGHf(>#*9Tq2EaQ|Z00hd2O{G=F|veM zLo!j5D;yW@`@ZkH|IK4yr=qZ;qN-1ED5U)oB4?sBkYyzH$t0ki&Zk>*2$L)^8E))k z**UO{TSaQa+N~b-dA6LJyxBaQ3>C~tI_N~UB(@~BNRJx4q(UdPm_tIQ%LbWj=FY6u zPE88V9F>NSrC~<O(Yq-bR;;qizxTiF%lM-xvI`*iI;8H2Y)G~$CkfKS#paJzosC*K zv@;{EMu@{Kn>CPOG*P08*`qI-kiwJvZ!}CQtXNXYsp^l^05jm9vW+}2aWOPRpwicp zLKD$d2_z7K2mk;OhVyn&Bqb3MGNLjBsM92pzWbWACIROoOd$wD7DZ|Gkg>3^P|sJ5 zQm!L`Bu8Zx4B^Yuq(V|crO+Y7%$Om#l@Tx2<<%(?anGf?i&}xAK}XV|+wx3(O0Le@ z_R-*0>)^y2?dOafX`I{7RPMV<m&13<!)%flOZ4!0e5;!|_6*s=2xAuq6Ap+~5}Dc8 zC5S;rnJ6Te1gXVFQY9&1<mOiVp`2?OuYwer4P!eAA6@s*g&EGc?8Xv4FOnrye54p* zgo^~GFp-c{q57hbY!Hg2h)Oufrn$>Mm6paBeiy|^)ax=*J|<7eBuYz^P?S+YNTw<W z1QO__1_~~~{bEuulVwR<E%W`KKa1-48zZag{!OmGXD!obw*9un?(^eB^sTC$8n8_w zg{&cGFu=z12+?;0VlOE}9xQ$cp1@?Mq&D+@$8@&MgzDTl;6;HmA8kX4@QEKdCj;&h zk*id3Mi?^;EhE)rOronO(HUjpL)j8AVnI;JQTR`L<oFCD2<afwLHix3@k#v%tfG-Z zuvSh?u*n5+6@p@g(To5&3ZUmMYDEg9U<M2=Tm-54327}Qp`Z#F0T1O8K_sOlMJh=o zjEqE#RT9$`E>VdFXtfbmMMW0LDN$->%%YgGM3&(e6E-A95pzT)ih@-Mf{4JyX_%5U zXr?0xFqF<2sb(`0iR7RFB2WMT0000000L5pN+v{-h?Hp=mKaGQe`D}JR|jL&oEU6h z^Rmm;*yh5LNslSGQWr)(Mu6o-M#(**vUm+Llcrp_SYw+GPx<#cG*v{Kwtm<B6{9OZ zBA9<uo4ofQ)mFlazB7+CQ>}XnNxdyj6>Jc~B84nAqSmm*+4-YvKYsf*ksp@F1NQFe z?u|jPR1Y(eA`RhNzu(4jXDL=18guB|BGNL~J`u|fA|TR$v=L;L;!&g;=VZ;1DO$>! zx#-PIw$?=6z-Y2an}`D$nC1<a5r#~Y$QJZZ4d<OIcg@<zBTCz5W!@2qEn*0h8`d48 z8C$ljN>5V<6`I@7f?*`YVidTBEJr-sNl~iSGri|_;>qg;^_td-dcEsTvwZ72;DcmD zvP{MjOi2OqcdNDPWv?qbn8yn1;*by%AxT=#l}b5u$4pxC=%$Oee01nU5fS3*I>{$- z+>&}Ve&(->!bu6ERu%sS>;wgb1%Y7KDAzsZI75U`Q$#7EX}PlDGS!2U$0hsL9`^?9 zxT#fK90@E@LIWj%h_xN`F|cANWrf=)jGf};hD8?{B>;~|W&}i+EFvRcsQlKCnN2A- zM+O*#GK&&0`aX>swrc#X%^J)q>}t{4s#g_Gj)a&;<~CrNNnYi0KILLzX0D3vR_fW7 z45JRFh5hYXz3$(q-p66w=;rTPE0Uj`l-<-x_tG-$dOFO%GJlBVnPK`$s+fC};Hu+V zn~3g_x}ymr1z=RcN|hB<!c~z<u8q~j@^Eqdcf=YZrk<@?k^sUeZvk0anwTQGP~R;d zjh72G+f$RlyG@^$7<4OBp&b@OqW<RG5dlgfJmf?{gc6Qq2z>ff50)@zG9HUpvdK1R zP8m)lrj;{dv2i%&X7wsU9Jo$<+tq)~`sB8)NLwxGtZ#LW1jL_nbt4Abjzhpg9D**T zB4M#8)S1R@scNj5$gQ<8)u%hdt%9s=VlIIwVt3xHLcl^UB3MIW2Ap*mDu(G!mC*#K zia^qK$x<a22vny%GlIoMM+WMOjpAZioM&!pws5y?k>JJy2n*O~((VQ^jF_F%gor|g zH1R{Tg31)w*P&=Ay9=OGez>WQ((hBF9Zx%UM6_!OT^LuH!Be?PeyD@eK=dHGM1=ae z{N7=OaZk`6V@x(s%B1GPZMCwnZ6XmXQbI?`WCz<nuh(=BScJI*klyCVn>I|?u<-ro z!833`96P7dd{$6ZN;fkywY)b(4HQ838^a^e0y%!uAXK?>{Or7X_0=}TRV3*mWo@=$ zWQ}XfcD`HiaANL>G{oT`Wo}YFZzmSgJ5oot%eLDROo*H_2{W6XwnVHki{d9v2$LA^ z)6PKXiP^cNRF5umm2!=I?ltMvY)#~maODve%Vu3U?gq3c_V#Yq!bFbOW7{G}k1s7q zKeIXD-=u+zL>iD+8#xjH%&3u4KoZT7PE(myc@9|Ww2bn3K&m;aB=2mJJg^gAImY7m zwb>+kh@E}Noz^+~ICq?|+q0;W`9wVGHPs)(5FFrEC>A=&WhhC+L`KV1$b!a7J7I@= zL`1}FwQd!ReUIpUj;{OW>X9SJ;oM2(&OErcl18l~-{{A$ey*vXTtam^S3ehHlN(ZL z5<Sm%pQncp;7KVfw@(Z&Pkv3@TygHKIo}RFCtn#PcD(W4PEKZxqIBmIR<=mBhWN_} zt@f`&<>i*CHCrTlyqqCBJ+dTqIyfYaE$yw6b!pG9gD(UXQcDDTQYxaVTOn#Vu~8=v zHy<1*oN}5;B4f%x?$x@<sz|~}>7E4P#+`e*$s+aBZ>jY-h=M~@F2~owq#O`>qU;t# z5C(}lq`_q>i!+84?aUHjk)%lXq@1c74HDZ{Sydg$B=GWNk?wMWewhd8MM(;ZN~HP~ z5-?XvLWHHL#Gt7{tdXk3fZzg10SC7M2bxdv8a>%O@_~s8l0)chkfVbJ6{8L?O!*q- z36{=KP7O<DkMxz2xR`WvOe$={n~QY+TCyr~N{hDvwfk|pYL*T$2;najn!{dTfO=RG zBvu{sDAh6yOON<ev7l{7qpT@VkpPr-E5KlDBj4#`dbQ))MHdd|a*KutE$wOD!mD&; z<Df)!w2=>swT2b%d0}$$Y{O$$)XNI0YM4A+8aTgWql#9C)ac^K;*2+NBrF6<Mh%ge zG|O>!NNOGJj@0C1Vp!i_vmoz?OC&1Jb*z#&@%<dM+x!j)=u05eTT_*oyJHuI1;Z0P zuY<|t%keBB=C3s>d>b<KY{IYSSG{C|bK=Wq*Sl2W!|IQ$>L&*l3?}JPqi&dJgPN5j z*_P@uByXOr(X)t&BW=zIoOds3jkO~ze}C$B4G5KwV~NGHXX54IZB;!-qs5~|MO2FR zlOK-R8~Yb?-2V&jZsh$>(!&pft>x{6p14orAHk#ls>l4Tty$R;I}V_tJ9cdK-GLOk zIGJ}eP+aa3n|K6@vN9#nPm;uq+$^QFBvWf|_PBcwf=HgECS+%Ql1B+okkNs&g=u8a z*>vhNtOTY5<%3WK#3VqP5;GSQ=0lFFB>C`!<45{3LFIH^nAi70Pqj6FGAPFTyL(wQ zHJk`ean_Cqk!E`r5|Tgnwi9Css(Pz;{Z0fNw(hpcB&y@vIo^$$Nb2)>rFp)N{w-oD z#ju!-^z@jK`)Rqw(Ia;^Pr<9$%k!d>xg<{~(dcA^=JEJEPigvGpBeaFk7`Jr@@jwm zDFrzAA9}~aMqwt=BBxEx)V3?J%d%WCIW&yA^-eJ(9k<|*j3mkMd)hr1Gd$$2q>@*^ ztvJUr6a1Q)i5_nAcJ3{wBkSF9!6bJT=g~}AB{v~BNXD10Y-KrA-K^Pgtj+3}FmA)K z$)+DS=Vl|t(pAKf9w~S-d%dr$_<JHpN$KovW1L8-Bb=KtJ{9(zN$z}HB1e)ei5(vg zM^WvZ-ag*Qk>wjJ#)pvdMNLvk?vD78XYNO2L62v8Bzq53R6}%X!RI<Zsvyy^^}7;B z7~(~aI7l#(EqgX{J&j)!NX|DswerL9JD$c(nR5`to5vHN)Z-l;!Mk9)2OU!m{x^EU zERswov_(=yRIM0ANHXnJcybX)FN49&@+6ELJRMH=W|Be1-<ydExxj-~^XtYs`)(QY zd({-1LptrYn>JNuctT2g?$N8NX`u<+6Jx8kM>gB3f+>=UW{%wyvMF&CzGhGeXd;=8 zx0j*SCkDripIfi2y=`eva!C3zBRpNnv&o=Vz6PFm4MfHmNhgeriSUS<V>}F$&rx+a z^p#AK6$S|Mz%yESIfu2=;ivD)Z8b9H#r{&`GI(+m{fQ!kks(9vVC0N2jU5`C8#%eF zM(Vgzo2Ca6bC{b^lY+Ebs;nhtM&ht4sVLGxg99yCR)R)~l15TX7Bq8B8GRZs%SB<$ zSbs~Y$uv%i=&0b`IIAm#sHG)S3c!<$!BGMw01_)411v5A=<EVE@%@`5GALDi%AJ+( zZ)sl5TF50~Qiaxb5e73JEy;P+L7QvM=<s8+-F)j9%&GoX5}G3{m@xLgxLwH<;X*<l GGnrfj7=C*I literal 0 HcmV?d00001 diff --git a/tests/abstract_cases/asciio/synset-related-with-artificial-loop.asciio b/tests/abstract_cases/asciio/synset-related-with-artificial-loop.asciio new file mode 100644 index 0000000000000000000000000000000000000000..d7b9da85dce34d60749913554959e5b7b34b797f GIT binary patch literal 4352 zcmV+b5&!P+0054kLRx4xF+o`-Q(3OdeTV=ESpSHB_$UC}_y7Mt@Bjb*@8ADG0ALuO zA6)DjNCDq1!)hMPq!sCj;)68M8#cf*ZJ|J+RhVIkYZapq13&;M83v}5)6~@22r_LZ zj1xAgh+!IN3;+TH2w(stLIfgBG)VNCGHB3glhk^HXqqxSQ1YHoJwWw5lgbZJ;L)Q% z&}a;pfXL9u01Y(A003Zs1W)x;pa1{>000000000001%|6rjrv%>Yims>TOLU)73N{ zlX|0ir=-)$G-v^!dY+?0Ky5%36hQ(3Fq&#;nN6meO{t=KfYVP<)71Y`Y938BHjM~r zpzM-9NmX9;eP&o#W?@wP>YmTs!&Rr^U}0*hpvy8$wqU9-Wq~rwML)>HM9D|crmC8g zMj23;FhFJn6gWl@@RFLg&SScDJiJy?&E4wkDyp2GO-!4$PIc95L&QSc79eM~+A|y) zEvptiyunYs9NeyNCYj-5OsLzBtXE|h%ffJJz2-N`lUDYpI-$4;umpD`0)UJf0y)SS z>qU&pdZWmb*05kM7#x5cae@Sts-#s))>W0^f$yeDRxtEsf>lOKRfMRlu)?e3@nHGI zNhE5fPevTpP0wsG>6@J1P1ut~)6)*esk2rZI6r~x*~O`s)ZO*DHIWJQw_)2>&MMDy z?=t)<K{j?B-WmKZKTh+Srm8xP9|0tr6=96?njq1+Vnz(2q)jt5H(`x9Hhb7pW~jnV z+*&Zn>aK2044SExda7>2nWIBxWn5MNCQf?dCnm%O3HioB$jPv1?HVwtV2A+gBV&;T zCL9+a?hyqHu*fn|AtOdd|AveOJe7qN6;gPTicG=zWQ5a#Yaq!dx<r;CI-FdauqvW5 z!w}1NHcg~+VKr?Ps}FLtd8@s&<lWxM+~8oYXpz@ZB3dHS(2uge`{Pw@TV-dYK@-WT zA+b|}p{86NOzO>4=+La;(O78gRupWl8?up=YRfFVIFIUoV@LTWQphZW;MPZkOJNqR zlt&5Dzhd`(EIl5m@^9YNqphKt9m+ID8D!b4g9)OI6jsd{eQHArPxY)?m1S8$=@G&r zNeCt(`Gc_{0q@J|CW)noJ1Ck{@jn+aDWWerLJ;PfWB`N+KmY)QH(Qd4At;omCPYYx zOmM2R{B>rc+<a@G#L{D?MQt90w=;rgCH;RSnAVh<Qg_)S1F@HG+@O~Qs1XUICML!c z;Y6KWpx&w>dL24wk3=p-{pgkSI=m6n5PD$sKEuej9#Hymo%hZ8MY8$KZ2HWv9p^3* zSVSD}&V63}?Zg~<NH^a}qG3u~x)mU!)73KUny{HGU8yQz5Tf!=)AaVa#|dF_*L_sk zB$JlHMCl9+Pv`uzcDQw|c%4Z~(&s{%iSd01l~wdeFvA5#MPN+Cnh`Kp`=Y|S1d(J$ zRdA285&IGTVm}Hpjflc9kFg&VQWH`;2+0-kBP4z~BwbO2V+g`9Mj;zWKB8*~!5T;$ z2>B#dA~r>csNojzr=RpPVa_4+0DxyOVj!BuZrC7>mC}_Cv;`GJM6baGb`TXeU@-H< z5$T87%van=J@0#iU8IJ4H>UMwAC?2J$L7)SO4B~p1Jv^9Fqz{7@SvS&uMr=8BDf=7 zct!}Q%_pkJnW(7FnMP5-hm0cxVnJaUBK>4KMDYpX0W&!i4a%S7PHuEhiW5;QDlkPx zNaT#82;@mNNh&s)!io>JNg(WX%PPVy2?b$fSPDbr6s00l6>%k4|F}X)5>iGKf@M^c z(&7n`3kpC$z*HjxlD4o{P*SiEw2>nr0Z7EH4yRCLXjl=570XhHm5>UXh(&6WFtHN@ z6{cb%IwGYb3>FeFOkhnyY7;{OV}YwAssM=+fB*mh0II60s;a7#5;P?OV57M-NMNFe zyW=Q}<;N+i2<%Wbhx<7@pF21(;9tzkFVgOHKLm*@^FK?8&0n?iFzD$2i*J*ux2x@O zeQo!3vCW4k_4hJ5Dyk!P&$0WJqbok;7=PK#9q-)!i4tTGAI^P*n)8L&nYNMdnM{&7 z={IJAUA1PC-pF8C3qVmio3-HK^zYy}1f<dr*fB2SMvZNk<?`6q2-Ik7Q^YOebDVFB zM7LOmLy7#h$n?|CByNUrct}QUg2-+)DyJp+SXyZcpoTl9jpT+^?2xq3F5*&yAu}?S z!lA}fd>zO(&H^(sQ4AbGo?Nz^MCR7n>Y%*9*6gi|1t(y&iKdj!CeFado|;6rvH(g1 zlp+wp!U$+arM42*t60{y&er8;8o_;LwW6a}y=p5r&a<}S*hw^rL<uMagpY9Q8Parx z=)TP^L`5^6goGkwDZ5%Ysb?J+=*GKe8Z^aijBUSUb`l}(bscfsQWTa`x)ar__BDK7 z5=jP)SXb+A!DNc6k)-q(dJmApuG%RujBuZ<ahf<&M$8<RIV}D~k??SC%Zim%!NHcT z8Ytk^KaS1oe|inywz9?yD}xRTJQ^lX#$2%iqT~`#4DR<uCPpeSTw+`NC)(AueTy2> zWjHXzNf}EB!|r{}8n$ZujoLMsRht^Lcq)~}Q=_3K5#VgWGLpUu<b8_7!p&V3-mTTM zEEz@}O$+;4wS3Os$N3oSJDnWe>m_ni{S2n=q(|heyI#(-FXT_<IVM<tw5q9xy-o_Q zHL1LkcQ;gFBw(VHsZmu-C0Qv_x;IxB(%|FtYne7Ch)=KCiK>Nqfm=MDO~|jc4*eUC zaM8mnUctH4YR@Ii9k>WAjzI<kgt9OfM~IF3AYXmmqzOK7w8{2UgMkc-bkZhy_VREb z>BTr#D^}CG$-%6RayiJRmQ2NxGQ*2buJM>CiN}u%$-FqGj`zf%ewb%fW{1QP$770; z0tkrQj%f^uOuLrJQ3Gnp5=6Gd*v6E~22Jw_tY9wlS2u);YRnWxxov75QX06)gouHJ zI9$!LGOu_lF67g*0#gcLL|a56$qrsM)`g8^MZ`BEFd@AqEK<`Nh@5ac%njb+I^;lM zfqAhk*&7km8NPI8Qr@9Cgh0mJR3dSaK{xJ9n1>iQ>-IEZ7d5o44c5CVrD5+CG-sj= z(=!5RA_jW_fs3v%VfdguQ75PsG=QJS&*{K^2c(i{yp#PQ_u3(b>KRm>7)`deRt=&8 zF|weE^oT%Dli2Eas^EbisR92&5d4q#C)u-q554CR{%_{GM~py^KYad|^mE7{<3wue z=kob$0r7zOJ)8ci5;}j~x+7Mtdvtg?cG!3_B2*qkP}2g^m_qFn#-Y1k9nrzUMlR4z zF*rmSTbU=|c(}H9qCGr&ZKN5IBZf$syiGHMN@-T7TUjQEj7X$0MltS?=}_YYxbMLl zc|;vhc5Y1~StpNFj#a`n=(BD?<)9$9sFoZ^jWoJBIQO8tQb+!KcWdD!leO6Nl1bmo z$5ey&=P2Ocq(O{H2B`;9H0%G=LEq7<q(QH{R~}fA%dhi1*pbwcJezVIB6C<Ldt^tB z2^#rMHy6FG$d73fI{TpRu{rth?}^J2Htgz<sPTw-RMi^({0MW2b!c7v%;H9tMX16- z$*!A8HE>9cBzGI*ZD0wBz)1-ugl2W0i0eDQI_Nv9uQ)tB%shFdc%zGX5voLKB%cpf zJ-hXFPD4*@hf_-D?shpan<Avr5$}8ay?i)-awMX=bnwD2Pliq0TygKH!47xBk3x0v zksYrrcaBbGjfm;Yjat-}u-_SC$-dPduhHjCY;J(aHYi-uM(i?5DJgEJi-2}|NhIoY zb3|!xZETKOa*n<XydfDu`U;T=611fW+%iCTdJgA<3lQT-%4kAo#vt$3M(YUG5rGKe zP-G103=z)<F$rF}HjZ)55)mOfZ_^!(%`o5WkJ1PpMkECZCb>2-Oc0SVGbKK@g$Dt5 zAf6FkC|w*0Au!ZHQ_KqY`OO0KoJgmcQ_3=_K&&K+`2rD474gX)=;%!*L#e>T&}zp@ zl_0q!>o<`#5m2Z5;U8;1&c%LwzlouRtEIJ$*CtII7%;6Eafv2gORi!`hR#tVgH^Gk z`O3*$OgcHH6*ghb#kzczkyDa%bKJk5^6Aa|F18h&KDTR&?q1qthk<~sat{Hy+b~?P zy<$j{r0;ya?)S~}_iNf0=SPu4LGCo6pa&+=VuV=-e;2e(n%qewQv?a?Xe<;6Rz*-# z?^+@ZU%$%v+FZ<+*KF&m?{UAguYU-M+R{WmhkFbw=6T_A^K8RoSJcZ2s%n@#oEkX4 zL!*jTht%NFAZSDwGFV>8Mur{`JOnWN$G_X|lkguOoKG(X$HN<ZIodyN4UaTPGNex$ zM#?6&@@-f6jEjVZkPTIvH;8v|8EBU-Vmi-@$>huNtRd#FH7a~tGWa%OSN!VsSs>i` z*=+m!wN5NPnEU5ZI5@CjH%gTobi+g()Tt)Szoie-{<rA<mG?A4DR_pQo@*aPy5EF; z8yy6F-!=J;XyhMA&}g}FeZ0Ias;98^dzv&<RY<QwWAR%fe=^=3C%|?a&Li)ha|}I$ zl<oKH=lbBMiwEXUl-zoikLtFyXJjL|>?0;5lfP!qj;tXXn>zJ;<c{e2-tyX8B$zRX z))DlqBX<iaZA4RRZ~h!K9`omcJ~AXJB5LAD%3<z;D7ltPNRYVekWGOi;733sFu?{x zC=o^wBd)y>*PmWF+C7Pz)_rs#F3YQc;sqA+f=AmCVILC&h{g`y)=dp(2@*6p$3fPP zC`P@D5kJR<5~fqxTEDQ|4x4ed$dapvY~^~JG?S~z<cjir%@N<lu#%ix!WfNo^_cxz zoLwZ6ZszIp>honEmZX!q1o3^(Q!*rTN4w^Gk3;!fpBeVM9*`$|hNK)j&&a5IGDy}D zQ=Z3STNT#h*De^Gnjx1~sm2kG%j}Psk(1f*N8r^SsLYY1@{+faC44&*mq`*on^Q24 zCxN?n7Srn4dGJXc<$3f)h{ZyN@+5GQQyN~mv6SUgbZa+UBC|KDV50`+m$+*a+dEAv zr+{t87=P}QtUO%kCqv=rE;vO=9ON}(d~54EpE4=$eC{$M#TG(3Hus~r^PJxwPRK`$ zY_D}A#s`f=)e?K+JK+{b?Y)p0W6!+^^F5J3)f%J9bbh!Dqht6Ti6e`FDmdXF!br8} zvytZ2@j^3=&r18)N8U?2k>-p^8zU}c7?X53oefSg(cDJuh`R<3I;LIxZuN^IOedt0 zRU}nP(S%eCyH#EsWRih=9cPj5PN&dBVCC+1JRRCd9PQwJYa&MH05xwutYf#n;h!{o z5o%g(8P{#J*&8CFqby8Jc#<jYx@e72)ilyXj|Re|igk9$&9>?qLODV@)4~y%5vmcd zS27L<bR(%9X!7_P$-%Mik@Y(I$h~qkq*K)-ec6ba;_gkJEeNfA4G$|~;WZ?f!w8ey z*m^=doMW;_=&SO=Blv$ETNy;p_@Y%6*5;!yAQKlnpdPci_WQ@==Kb5V<JFmEt!8y? z``a+!^Td9p8LOf8H8~>;BVLV84V>K7qjg*<&C>&j=P@>;Ck1G=Rai>Ojm2P9Qc<LX z1_oNNtq7A!i43HcENJGMGX5rvGSOIbRvpf#B+)u6qN9U!;;gO~qLh_PD+ah)bY-J} zYU*GIKwuX%9`DMRljiZij($ZoUPn=9d)xH8+8wOIUL2{ZR;4%W`hai*AzD9skmKs5 uqU8kwO)o7uZ5k-_dpua~_+LuKGb(?<gr<ngCJa4a{}*yaI8czT%YBH=5heBj literal 0 HcmV?d00001 diff --git a/tests/abstract_cases/asciio/synset-related-with-artificial.asciio b/tests/abstract_cases/asciio/synset-related-with-artificial.asciio new file mode 100644 index 0000000000000000000000000000000000000000..415c27ae2afd3ef67577aff69184b01f36c169e5 GIT binary patch literal 26320 zcmce-cUV(R*Ebr9NRuK(ks?Cqg7gjo0us7FDAK!>0D|;lC`yrzl+Xm}y@wL2^bXPq z2}tjt^rk0x-|zFj-}jw!UFVN;ot4SV+B0j-UVAdL_RMc4hX8CnAGz0Wg!yC*ShWod zXjl>ek80kBy`N<VES&xM)Ar{dk^z{dK7OoP3&;bSng9R*KQgQ{#>Zp;ae(F$qo@o? zAz&LPVI7Jo`7fH&3mzW>06garcmVOYSvU)?Hl=k)U-a=@RR>M$!7*Y=rZkIQv97;{ zOCeR<&@rR%yF5C>`S6}(dFa-3)?6%IF#CF?ZhRp8t1yD_o@OE7WllEZyP`D*1D16w zkxHNJF%l@LvTyI=Q;9d}=AY5KD_YqGG@6+)h$owqWGV%ImZlDSjYv;B9Q7|UqT!p} zr+r!3#?pvV4T!|9IOpc8eZUyWY*{6s9(}U=1;PNu%F-~)6302Pi5_j;_ns(_{beXf z0|o<LTFOILh4P*$8yr}hOqjg^JTi#Vl&-YP3KGP}?IOtkn0k+(*z10#_Apx=xuD(V zq?GD}wuOwn4?lkH2SF(85mwfJwTk_h>NsqzSAsp^anKaN3v8=)0EH?5*BtLlkvx99 zlHF#o3?B4fVJvd?kFk-d4B7m4;raQK$Eg~xF2f+KnK0sf3Tr7fDGk}e@mgqMHa<QX zHjbPiD~_CYzm%Gw58F2Mjg%T-Y)ldub3w6xSv^qm<<qnFxexwp#DWEvVCjOZhmN&3 zPl>s~g8V}X+_RjKG-j&R-@n|X*zxP;X{4s;%+eED$jMmeg)8F9-3wOIrHGGeab}y6 zyeQ2}Yl%4KEKWzC_Ndd6oL2vVg-V^8n?kC3#1Zg~8e8PxJ=61<`~BfrMUQKPLd21? zQqty!Nk-&c-i?HxC_eFs5JocB73txAz%hb;oYcaSX)lib<QNLwVyIevUUj)>xxRfN zA+2E$S_I|*D-|bJ;aBeK9rrV=6=lhM)NVDG4WS^5FClnkVE#dGzF^4UzT)Tp1M?}X zh0Ds`HW9Jr1>RI+-@~7tB95Kv8Y7pqjd1?!k;k8M%n8mK%-3`qOQf9}lehH=5M4(n zwkKjY&Emz56O-#Vk=2{i0~I&vvu^H=1Bv<7NxJqfosU*gXO;PTaUtnqwViC;#>S8M z9rMALR3qsyci}fb4D=M<=Ox$@KeD|c?5rt?Lmj%k6%w_?TBD~=vD3iYeI8L;mKe%9 z1k+z_`(0|rv8PL^0|xD~P{IPQV-t*WXX8GHOISRiQ~+!WKY!2HekG_Ir!$#%_oax3 zx>Ck$g3YcjE$?ZGMo<d`MJMH$S-^b9JxGJ=z8p(l6%#cSgiC1!qy&;{AV_xx_(8yU zLr5LO5Yo7ZEB~^kHY_cXE|J2()zkLPfXAES-7>+Sx|B|3_tpu`@2T>3HdeRozx#cp zop#Anf~7?06)lNnLiiN=jh9Tch3ryoreyK#=1<F6{o@zAFT0Toa|LBN{F0M)53LhC zx*}OW+bK9rJloZJsP!IZ4l#fvahGs*8$f~<^C$>Qs3}7j|7>65E3zGj-AVGl#%@Z@ zTpnOJ_)h+K^bM|A7y!VeF?(Hqj4iGo64WPvaQs3%aqawR3(;)+#cDo)TVVVOXR%5< z@f|zog}g?SkB{R<R(y_AeMyUDB{EqN+|)3$(U|O;!(d!*+9x#*7ARBY*hdiMp{6`$ z=(*;Ca|9PkD--w{Me7Cmz}5;}-;i~1#h%?y{hq?{7KO(BuT<RBtQR;vaV!=42B$Qb zInZidMU<pS?MNvwDXW!d*@<k$%CUUwmUBK6=?kBx9}t}%uc@0;daSIw=N2ZX_R6Bf z-kqMVX(Rc*VncKL`s0O1^qnkFJFpq~-y8Z8sLiPIJbGB&H;~0w{Z2K>;^--GO{&xW zzSI5`dVe3M<}QRQtxWfTSq$XR*Ef-D&p$pi?tb6NK4TBjU~dDXfTb0@erp((@4>BO zGu_h6#wH@7-u<HJxkSBjU(l7<LTh$SYz;XwC}e-ysli<YTxWrcHV$qo21}JJG$GQ5 z;<xvgtxfif=94lli)mDhy4e@zQHJ;6Obpf#;a_`B1`9fM5iV7R1jS{;sHp_IJr<$D zP48^To_%N<u6S7kSkKo0hCm={#GQ=3D||3d)m7hw$9JBpk2;+)041ErGb4G2kzlNF z?i|geX*gOu7$Me}oRB7<?nY&@RcyD5Kji7o=0D|2TWW?hPSe`M58vw=p3kVXuqgKU zMMz;^)MO*Szi#t=FtO(TR#{ORqAX1v{<#b>9OZMkZlXw7^>9Raa-Di_pP;E!M1}wD z+V<DVA2bnyWzLQZPX|0GN~46Q*M4k$BB>e<o^qSMoVvq3V2|As@CSeWd`Bvz8R^_A zRlx;IdHofsq%C535HB4feaXXBQfE9WTLyzANDT2eQaQjr>fPxsx1lkLs2l3!O6-2U zZl2gzVl>3xz{BmvJFp7AEt@euE-^VGE?2lM?Ra~u#8ieUU3;tBm}2e9IU_(^PV@<* z^iPFct*W^TUu(h7OG@bdcPEdBOTMnX_I3GHlWYH(&DthBVxX+<t#53u#dRnXKhgHC z*w9W<VR<mFR{G%Z#mK{9pSg_0jFi&U0t%ZNqS8<0`%aGW%Wm%7gDqiEi1gm7hL~Bm z&9|sU{{!ui7@fhw2jm-8Pm}9;GG2ou5GW3gm#kBSI`A{0)$cy$>$^sd;Pv5Ly^*^? zZeNuj4Mq|<SsE>lCe9F#x6E-*PWkDPZ-vi+-SfchHJ-0eX!8mibk@0utu%Wg%cn$z z#)YT@LZzYZ3SS7$r8Uf3FTV~OX=+oZ4{UA#@bd3kU5jnU_PV8UkXnREbuV?#HtX0w zvDULFS$Y)F>-4UU=W{<rc19Pw$7|)!lty@Ad<wYOVVW6lD5|mprDI_C_nA80F=ck? z6uPB&JvVQIM;yFNEs<mIesZ5U;kjm=lqARSslT7M&t}QNA7tC*`--1`>=v(UR5hWB zo<40CzyFLL+g4n@H>gAsjUKfQD31&%`upkFKTVkGy?!40Yo{Nz`1=((+5V5tKV9<j z5{S?la&nU0l2!&LsRx9Hl&Ws)?Y#c!O?tKO7{2L8SM%HxY!`ji?wT;BsT>2=RRH{m zs`Nwn&<6Ir2%~jxC&7Ox_)U~)Nsj$91>;i855!F#Y;z*pTg}|q94#gviT(av;!H=% zS_*2cSm1Zw$uZ4nosUgrdF26p6*C}ni>m_9atigX2wUpQW7|&D;jeSQR{hH}wnAv* z`u+o5?~Tmcu#GR(YsXBbCCef}m<`qLG{d_$E3KNSuT9wT@qP(0hdyUzK2<O=l|I<~ z<9BP?gh*dH3+uK$i(kjbJ`BNQQdpaJvq;X(M12#|uqv?uGdzy8h3=mZus&n0AH0%o z$f8p*A+1Of7;)Vws)!FN!A%nN;lnvk{GRtdIl3hwMsF%@HoUkmS?`?g-SPg6(xyp_ z{VwXsfdEzqFoEchy*-G;ATTkJLbl})Cl640wrf@x-Ok?nJ^$*UW&CSDZ0sxKfxFlS zHhtG%=|<++IQv&c(>rmbP2vkxclAm0z(7hF>2It!9}swM<BHA0VbLNIZ)2P?8?357 zZD#A5Kr~}5iZ^h`J+Mx!<8pW#B7Cl;Ouks-hGk|X{zUZrq4oN)SLVN>kDREmuTXvB z9591!_7U|IU#Qcq&GaxByr}1Lm)oGZ%5>MOoU5_rF=}*O8yx@n+jMUKe24zoc)jCe z?wjglXIpE3gNVy0iNJ^MlEJ4cAgh`F!Z#@c-rUceb@YBSYZ(73H4o>3kIwIobom0Z zHSa8^$R`d^R2d8Cfe5k~jr!BS<3C!Cq)G947Or(iH?C4y75E?<dJ+R2O2iM7omipB zlY^6N;?I}6bF_1!h3peQgIM0;$#&uZ&l0LS%cN%djI0NJ3fB;9n>{9@BZm(^u=#3f zzEy;|z0CXm<?%3cj`w^Vy#t8KURC*P6-8_&vn6q{__NDX$o(YABIC(0viQQSi|M-k zZMJP-qGsDG)1tr!pX>@0_25O@Q$|O*^1zZ3u*0j@(^ZdZ!~7Z}52CU>qQ^eG!ruv# zv|+Ea>?=#sqz?~L4%)f%NQB)?ok+JJ9{O8HvwbBVn*z5!*>3})#=NHbu5EBYvbnjr zn)3c6B`1l2$Jr>oh!efZxq9frWQIoB{?B_;0$8?4^#)GWeqj7iLvueA7CY=Cq~mYG zg~v8MI_2G^mdO<I6kW<`$s$h_e=;+*va)-sV}@6T2iKt!bve3NX2KA&O_9HjlcY<{ z&z~kyhcxc2?IPObJbAUN`f4(<B%rvc{I!nMJb9XkZVpc+3ijH88)UsxqLUamO(oZ( z%*PK;UQfdp=Wf1W0_HutfAH?jc#dM!Bf<Yl3jd)CZ;D#Zc#uH)h6F}I+LYzs!ofyr zXWf~67x3|<>-XA6Q^IFgO*ezxSvOxuA$`&^*D}NScVZb9H=b{;U-@gF-~90}<pq)9 zJ)W^`zSg#DzZokhN?JRWU)xLLPx$F|wIuw4>=E=jToa)q(hc(r%3AQ-ensYvO+bPC zZ1F7P4V7eJ$SDM=c_6+odT=I6du=mF_KE(Idb83+``6=jX8ae+cQ}-bamkdJ*=3*L zXMHPv{3ei!@Dq7hIawmzh0f#6m=We9=CA|u!pQ&~Z)RM;<jcaBxaNH?U!25FDuz|( zysCQ?CH?*LOUj3|9U48&UQ<h1o6M<NvSf^AfvjzB@DueOA8j~FqBvW=lr4^KR;)cY z4f&&c&>NaV{O9qE@S~WuBM6&{1jprBfa&|0trpbcuduLCPtQwv`4<Sp+Fu`9&cGz; zL^;TfBKj`OU{yt@wED=-#U*=MPIlf6<2ff$6IFF-tHObqyf{I3hIym$8^|J9aQj4= zkG9TLRz0rkpAnRDl}hyD$3Zm~S=`SB=xTm+`TGdB28*3MGn%HFre7%sno%HG+?R%Y zN`&uSN`KKPm_Qz1`b(#HK{P;EZo?=)w^7NX+h8WUD*^Jdq%N!JkL0(wx-k#-AWdM= zrTkWD9#Y&%s7qMN@xiR~jxLqHc`ZzFb%mk#A<X%5wz)-$AvEilRK^Eu6Q?cUz(4a7 zgU|WKbCvCimUPCwqtZ(Ys0+83G(COIfa_Y0Da##Qf~PJAU8LE@oO-`bcIHHnf77&t zrIv=f`w1K%i{Ey5+0Wt3Tj9im_)WbPamie|h@CZI_bOa>!a^0pS)y@SgBFYq14)*x zr`@Lex?A=;78B^L=w+n38QER@<(2Ld8Nx>9g?b$2of?tvy!cc;>nc;dwX*oGV{>fu z_~}y@8!D#E=s(Xri-Y9V15$k)wh$BVKR7H4K04nj(~ErKOz&Lh<&2u#ah-bo-FwdV z*`CuU*AF8Chu%{roi@Yix6Uw`XYtwS#kEvZugWn5dAIkyV;*vD&g8n=cliI#8@|&! zQOpz>sQL(OLOwUFksu5D!KF(zKv8DQ|JM_O6F73ogcKeM@E)|XFIPDnMHNvp0r;3$ zzb`Sya7vq+Qkmlg-M5mm4pxt4H+8blv7c%YmBcRIwHD)hOT^61I~lO{J>$8SxJX73 z`3#LB+f&^rr%hAz(*ThR?@fWEM@7c(ZhhV2VVT3!ca7$Fu-w(a)hxLeJk`>5i(4?? zK#S=<ctUylYh9Uz;K-AoKV0Ussx!K6s<38LfQ3AjuW}TK2~x+4mb0EmQSwT2*3_8z zId%5_C9L;6aKt@9Iw_a5$J$!%(w>Mh+OIa)qfw$AS~ehHl6CjGD(=unhw<W^#l?FR ztD`+fVQG?DDk3F>DgCdoSDQ=L4-SrfIC*h&gAF-J=tMm{<ZnrFSnF)O;pex}-WDH` zPM+LZylo4XNqBl>-+d4bM8wQqU$(GREBRfjb-*67UWGPVN0d5BgEZ0;<B~ATgZ-VI z*VA=n4(<+rlmA5iX!$+D+<4Uuu@OvhOiYXK&E;H-&qdng_<6-}@)XOZ`0u>UH_x9Z z*ZjP6e{cI9xpsQfrZKoxf(~FdXuhe$II5LXd0!Sqt$=5b-#w0dxva?E^`1*3;mGpc z1Y7^h^zq0)*ttPpxNb@=W#~d(u+2zoc9D3J<tcHd!;ofU=VK|B{i|Ji;j8N#-`^bb zH*!+5Wcb7v6-yC*9)UlftscHlzVLe<(>q$9^=ba2f2PNm?4waP&-JMb_e*JiAj!2~ zEI)Q%=p$dd?=5qRiu#^jM-<h}>ze*j%GpqR=F#+{Se}nzJKu8TEyV8pA_w$Ez7*_w z%Mha7l{Nl|P?l~!Y}W>)+CdW}{as4w7cOLh@J&uXL+f_T6nUS_eP($JQRZ$s#jZ`+ zCb_ugdC)5*(@X5<n_9r)aLK3f4>dCtA>8^Z90Xu(mOM)3AJq4um5iXzzpMC#!$^q8 z$mzn06e6HWKpSgVxXeEg44xH?j|P@xBOvfp{xv!E)cqpva6+AULxbwl{`>M>{WQSt zzk=AxLkaA_FxWp@0Nj650D$p7+$|%64<N(Fq+|f-?Ing-hH8L8ipm&{J1=r5vOiAX zzLjc6ys8aI2Ed5_SLA<>FqPaAZa){N4DP+x0s?|VAFU#Xn5+h%e+vKrIT-*Dz{?20 z0|2l90N#Id|4s18{&Nk*SOfs{v%r{vmi(49DOOqko4x;$1Yluf)c!I3zcO!gZV6xJ zHvWe7&wZw{Fw9>Iq4-!r@s<Jt592j4w}IXYVXP4V{`XVi0s=o`@!MeP5qbjvfCN;B zvM@sYxQw@E-I4&Sjm%%>|0#k#RQjt++|P=sg}9%0s2_)4Re&F(g%4P01or2bHp2Mr zfkVR@9T!(2@~pWFFyJysYwW)&+#bV)@d7Y97>)mLDEzjXf7;}hW5j%0tOf;SUCk3H zZ!<7I3IGVbweK%sK>@h<0GSZXGlu{=68_1=#Q(M!hQmA<BR~$5L6pFwjOA(p1vK)> z001~Rx=>8%e-n-YA3Np>7k~xD;!&+(`736W@)<zJdz+o0jA<2MNbG+r0I&prJ^#7H zomN$Tz{<JW4Zu^4zpVyCw0)8BtNrn|`~F7wOcaN0ChKtom@49*l77$>=!E)RZ@Bz| z$)&$Id)RqXVcFfb5N7^c<FwXY@9K*K$;jxn;h%&8&&iwUnC~m0BoXV=l1ZR(uvHww z^AG7Cs(hu(zP#tK%9fY&@1KyhD?GR|3;MK^%M@_=xbxzD@+|sKQqADwgfGX7lJzSd zd(NZ2<G5A|?;_~Tu-P6K^>DPZ2X>xsi322mN0NETMw2>uS9nKvIvLaGZa47wnsV0g zoGD5ooZ6Jm6m(8J(mRvOzioNC8`t$JpPFqB%!V77vs${ob$Vk_JP@Jxq29qUDr)XQ z{8Wj%`W-NBc!Qg>0|q0yqrMQ8LHh(<>`kNo%ZWuP`_KTh^mYH<*!aUDa#+#@KRAp~ zGXxmIt_+|yUm<2_Qzi`v)C#DqKek}pbf#`F1dHl^s9@!T(Y*EW6cX}vuv9meYH8Ft z9eM-JAyTOP&g{63x6X}j3;wYrx%9F1(M768>^NFwxv;%dfk<l()TVwz&%fuaez9m6 zRw0<f3UP_oFHGWHTlZT)qM}eJR2eGcyQgP>ugU&4>a1=br}(aA6MAI2#Iq_rx9JzZ z;M#m-t(%~+d|GOOuhTw`-2aX)Rn>>AuCH%OP>~dfiZL=^t#q_E;BT-8{_@{&ugrg( zf1|f5>`HAR;i8!_KRiQ{zYQ;yNgJ&61S=QR`6lbR!QzdL7gS47C23keCy7Z4t-0Oi z>gmyoJ_r)SK(Zq)yZU4Y*OBGCQtb~Bvu9JrHomQAbyoAfAT@jOhaLYyPGS**3oTzf z(w%+hM>ae?J#F4CG?t$WX*#M$_I=2``ZljZTr5=o{>}|~`{qL^7FNoGYD7!@edPuw zVvS6kT%My15vq%OnT@t3--rctr(`Tt-gGN}(Vc0KcJW)qN%c4zpM5T^?l;t&K9}m2 zQhcVStzo9Ru4VwkU_qs$E-r54KhpRDEOVw2!g=i*(rzI)s__KpJl+RyQWh)Zhqu$K z(JVZB3Zs2x2}}^Mh-zQP!qEJ=cgOL|yH7V6qZvpb9G@8jDUQ-{l6+*AR!Dn}R=XBc zn_!5h-Ff$4|F3Z^r_NFcd?Zc9O`wvgE84}3ITIHPKZL0-D%eu)4oUO_h?iZJ*zrml z8*MN3h<~bA^Uq{$R)xe_ko4OWTrqK<W;Yxcws`O1xvkGGSaFZMUobVW3s8B=u`km# z9l^@x-3kci0mv26=pVCcjrD?G3G35wf}x6bjS>XzaP;w>yV20MsR!`+iGX-l*>SqM zOlJnzvN|3USZU<OJzC8>(>cI>%X7ZOc>Q0<ENJ-_{rZcH4J_Y+V*fy952*ixj5(BF zf-AiLh0K;3ZgIA_zqnb$Ezb5oFgP7n8`a!w8b$jbT28NCbGCY+O-P^l*!$T`p(a1i z`!{$SEDI@9RM*INTWcXbiCifoUw-9R`4>N{@cj0QMjL(LcW=38XhVBs$YgjTywmW* zw8D&Mj&Do6J<$23kx`Xpy<qZNHDXJ7CtPCqk*D3amz1{vT6v$rGqu4yWh4ZJ-U2C$ z2^VVR)i-8i5HyP&VT-PN5iJwKaXVhDZrRif<Av>KtU|+ODe<2W@hCMtaF=7(ZgJE1 z;w1w_nNQUBNh6+x@lO-9)IvENb$3oY=oMzH2Q_2lDtj@wm=9;ykLFK=789_A-D$Nm z;q^%CySg=UQN@NtA}+|_vi#i@wFOV}qM=%uVC-k)Uvcp+iijv>_n+bMbT8+;BH~0) z?aV4tOB5Z_0X(QL597QGB<1h&)i()viHhER?)Y3rFK28nZ|l`kk0e)T`h8YcdlbC) zV~ugM=-~?FC`u?|@!?_&KC(wXUMhMcB<xr}3$7uWusru?(DwTY8EYDE#c6|CH8E2p z?{DkpFy1R=&DfAI?=Y>n_YZW9vXh4BDL1PFRDc&^qSnL8#jC4>HmR@rtJQY-8%ll0 zgx@zc@&yx4kWgzVCL~<cJASi65Z7{D{b_vO5+zpbxcWoO<9AP{#E+xunGYhllj*Y- z^3hLUXc-VB?*zcp8ifO9?(uvt?r*Ho&?+*`)9zRTg}fK=Ao!ek%$0pf75*Vjf@`*% z3h|Uik$(PJ*>5#uYr&U&Q;Olasm&sz$={I$B&!K1)n=Fn|8Iid7sLEFql#aaz?K9B z9qJL~b(>3@N}V4-A#p(wT6v24=BfqHPR$d+p`^Q3LtNeC#=YtKLU*LlCMtU*zyGLa zuRVLlF%sGz|IW*%WW+rzVu&Mss5{X>m-(54r+}>umbKM42ln-d(#F{axuRXeK~#8p zCrn9vHaP#(hBa&G+u*E3@%=>FM?>iktWn0WaQkT|x_<s@4!zhwu$|<GkoO;8>9Zg4 zo}?0*wX;ARYy#PKX;JXv)<r!_(s^&M^pTR5-8isud%hxid!V#rjCkMU3N<n^ggw&o zO2>E1p7%@vz0b3{s+0{IA5lfO_-5>?E_I^aJVZtBY&(A;ZA}M}rGum-PF}_rWx8+q z9TE4EtQaH&?IKEQ__$xwG`Il=dQ)3`_GRf0aiD5DSr^w7?F_i=N?R)xiE_0jk0Y3n z$*#+#5)_Kx!r*HQamyg!C$^8_lDP5Vyr2h~hbn^>-1Tp@kh-j82ILS#L>z9AhV;@0 zU`k<Fkn-bF<}K3|)D~~EXK$<V!Qf1$=Uj_qsGs+MC-w*U*y?#a$ce_PAI8#PE?C7` z*%|T+Az~fLn&-*m`?{XJeN0Um^+4hwr2&KpQp*B2G{+=>Maz5Ex^>!|(XZ1#H8dks zi(xu0uYs{jY{5ZlU}N3|aI$82alKY&SP4%y)(Bx_ud#116Pwy=glE(4(LG(_unZwx zQ_;ILKP1;C)xl_8d8hpzN6o&70>ipE+8hr73n<+>C3<g?HW?<SqnSF?6zqD$#szf& zccGMXsD!%=no2$vgC7>bnTE&k4@WX7)EiWV)4)lFWAPr3JSDZgjp(jdZY=1}RukT< zbzV(;LA13k!*vHrJ=1*S$ej34s6AuNntHHjDq7suuB2O05d6R=etN}{EVsnj&9K*j zuB<Q_&xXoG7Nz+)FHm&41UQyw>H6!QioVD6$oDcfR^u)GL{5_!+zBmTV#m4j-u0Wd ztrr@@kd$U<k>Trchu0sIld@^-lHd|#PrF%Ik{VywQp{EVkjUh*&9F#p^0%xnHyeWN zqfX09n&~!37}n4>C(Z9hCiEhr)Hl4uX!Bh!kLnH6muS!@tB|8~x~j7M^#fC|tJvL; zAX2QG4XkK_eeSj?oZ#s%L*Xtq=BBN(`-RF{?dcBS_%~Wrjox^9r4kOdcDe}(T%*3q z`PZVPe%iVkmZu3Xj)MSLdCMt}WZXzU{IUseeeK*5%(<O$`T&(>IISHj>F3up-L%pA z^rwidJviH1MWM3tspC^eB3dHso^RiJMzYJ05})wn7xi~P#rXTnD@L`RJ8XX#c_?I( zy_IoRUb8yPVhu9*hwRs^Zz+qwwR#MIB`p(<Y(!}dW)yG7WS^Yi_!?)N)=91@JTt!V zyw%eLVBN}*eHa*&0g%;6DJJOP<$)x^VS=fu1Bv{3%1QOFqgV@ZwDPT$VOqJ<@jWmb z3p)!t8@`8;ilATpMJ$<DX9qK_&LWv23eGc=3X#Pl2TnXZL4h9abk{f|!QaG2$X?3S ze*H!kjQ`?mVu)O7l<4tZ{qyJI2tKt@wAATUM@Q?(6-v*Bqw(UG0~Bk?`_xT9h+N1f z-tN&DV6v`Eu@(R@cmud|G}9(^wF28l{p`Jubucr@H)7(6m?sJKV>JfjQ;~qq!-4T? zDoOw>tL7~*7?v8x%Ar(2_aqdoQ(RnJRI)KqAuZhN&%D|V&uOzXZk;hLc&T}q(le#8 z5)?WcqM%%4poI~v=xQ4K&$8fuVV?Z=y5>Ji9Lzcfs|d4v`~R2lO2ZOZ8RM0u4Ii9e zY`<szF;t}emgZ<z!`Y%l_=~@E7^>>2y3R=3*QFp3ZjdZvEQq>{3=l|QKlucJPhAE8 z{2$+I6-+LOm9gu7KtY>;_;t=Av5okEz|m&<ko!!r#4kUFt3`p9!4bFPiZKV}=SFJ- zT3dHj0)xVI$mGOKC1*^fhOZ8H$~HHp6MT0>77bbvp~g;cj_MH7c&(oTBeH_-y(y7l zta}5%1VCNmFLb+*XKpP3`siHxqezhaLHM_}lA`_z8|FFPGR{-i$5rHH0pIXie9Mg^ zJ6TH0IQWNG2-UUX`*TaAKIcr2!BTR|_evTIKky-7zFFUz@Jbv!2@A;`_t!>-9Onl( z-RSr4ety(}$7V$RnT2!vR7mx6(soEb`a_Ieac<?pKv|@$E<=<Ue^H_hLF-LF?(zO* zs2+yIQ%}sN()zH~ADZ!Y67v>y5=pnDxtC9y*V%DvB|JroXt{n)Qe!bdsouQwue7n? z4k$s*8zdN}b7;r-wzPRY)RUPgL!g4har((?9ef)SPX^M7nj@;wJ0nY899Yd{@~$QF z5B(ds*_FY%9^(1RD(-UH&T_rAQ(E}ujfsX2LtYq&Xp{MR^3CBc?u<BkFQ92ZM--a6 zI6PTn@p5|2Rq=YoZDSg}UJJmoo=ysy$^yu|#te_wwek;n%{qbs9opn4hO>*ln+_8n zUDMNJvXMJE0iLS|zGpG&`*kB383pa_Z-V6?F^=C$DHqelkq?$<mX}U)U(Ar$y=R(3 z)b;z#d$}VI2RQA#RHWdT|HZhekumaw^>q5gdAJ1%c$Dn(39}Y2Gb{t()s?ts0diM9 z=ptR*upn^cn=JUa9KVIl6XAY_i=zO<acln5$+jTUW$B*}7Dif(Q`YxpVvjF$q!aj& z4q<NL&>(_e^ZT2BF#!xRFljeB+=3ZLbVoiYkty^hRx8h$+E33W6P}OtgccS&-wvj) zb~e8694qw|z1Z^q%2k(vF7Xeir*P5;C}Y>5*uIP$v#;$aubOU+szTRSVnBiqw3=AC z_pQpKT{u|rjN!_<>{tS?h}uj%kM(A3j2+@)Tem*EXWkDL6G&^!<eK5%F^YsXfBE*) z|H0{1{0kf?Kt>gw0&olDUy4_CzkS+A(`D08z+GDO-l9NwPtfYh`Ys$w{e9k0X|2t7 zW8mVHYi}-DV3uFlRxcUN#IaG~rw6yL$g_1o`!y>fd#RBD_EUyn%psM_hX3~-Ls|mk zUvI93kDoDEOv0&R;!emPB)9IzP<rHD<E*+l6iL_hx<$QpE)w7$mo@|dFz)<mEr5vo zHpa$y^^127GMKwz?Tnzj56`{pW*S=;q$O5N>HNxC-1bH*oSjaaPDGHQt>>xd7Z{ui zbCL!W$7mRJmeV^*LM~b`R<eYsLwEPYY0Eu|o-r90A|v}0$IYUk1@ds$Y+;rZY{WQT zpO?ADepp(#;~jt?@u0QD^U_I#cuc3p&&BTFkp8(6L`idAiN~2m5sDe{=+~JZQ*QjP z6;Iv0^|G#al0&E*6$bR~PC<_Sx)J1ud#wBI_^`5vaTqHaD*jR(83)#t7XIXDmRHuG zyr1}Hm{i1Iye@ZLROIP-8-paPL(jffGn9vOx-N6NcG$UPry;n;WLO2vGm>K@J+m30 z{XV<xtB|{i4H&(5V6mL+))G<hF2+M>ZNFRr#In}2Uxq${n*TtzZcCAxxHz4az#d*1 zWbE$b3wp@XH@mJh=1q5OYkn>+?VIA)YKnwMt-><e*%kLJ-0kXhG6~E2GgE5}G~q9I zo^hUZXV|<`r~GBwM*SOn)mx9!n>}+gZaVdPSxH(Vz)L&`fsezv;qxxkNV{IFsAlyG zpR<>UGqfA2n=<pt&d8~R0a95!z3+GlKOV#w4>}j?qr}D~ILPo>L>w%pC;Fs&Zge3w zVoWM3Od2^)L=P5L+Ob(RTx~2Ag8*?+dQQ^5=;l{t=ee!;)b;2XUtjM-u|2(e2Zod9 zV{&AezQG6p-e;7*uLK3e5l&x?FZwpdy{ew`JZRxN5n#r1y1rm|Ea1kJV_ki)eJ-|> zO^(fki;Il~fGYisWf;lu{}(1{4)TQu(`o#?Cx3bWEIOExE|9KY%#JC7QQ)MX)pr7q z<*%RfU-ATXNi%_fvH!Ez!xvNgaEIVXNWH*qt=(o6!ss*Z8eag8?*e?#5k9PP&}Muk z)g2J?MA7APEDY*{dUrZ?D*1t?8z)$;`R#@wR6!pX43!^lzVO?S%(Xj^c9>c=w49;y z-r6ucu2I={U(Y<n-0@Ko6hQqN05J7kH?(P3x*Z`{m_B}4jaWh9Zxl33yxOr~y8EVO zYY_kHq=Iil(g6jy9Xz)q2{V9**xG|*H2hqf87?0;(hJdNY_-k?Y?z;gX{d4Ggt$lR z1u+J9v5j$mqqf46?R&K5K-J^@qB&EwI=tA~*VI?P#!Y~=TgM3~d}fYn3ZJT{YK*8h zLZ(KxFHbAY8R}=Q8LhoWEzCLfr{=wN<ug&_h)>$z-5p5z93Fuzv7a*d{KJ7+C_TwF zERh&IDUW{lmSvo#tKCVwNnBh+X(n;O*&ATQd(Z!D(f^moqVI$*Y*kO^<gWp`#n<2f zyNZG7rwhX<eV(OmD<vc<)MJ}XcR_cZv)|28W1|s8MH<YIJ!jlBV+?jnbU-v#qcV?T z<q+O9%({*iFb+>oTN|COTvbkF?No7%?#i15qQWMp$`(!flI^HKzrH-LuSPwqkLi-h zy&~1w2ZP64{jiqSRTb0k^K~>x9fPM7?}v2s;$@vNjlLAO|4IzwHTOjZWZd_VdX*ld zzMPTJ5~gVR<{%jE?YX1Ur74R`=Aa@ux3X#*EzU2Yr@}(;a0RbNE8}TLa38N(7_U5o zY%m`2!@*SCRW+Haq^Te$Hb(twJ~Dvs%1BASpVv|MnfJNDPc{Jo--QK+HlKU=KYxyX zlvbd5OvD}!9Fk{v5qY62tyF5Yb9>;#mY<7;Dl($SrPRvc{v9w_GmF(qi3$vsgTmq% zt-)YIY+Zi$79?b*L>nn~@Z$(=up7~%ZYpW-ZF6&YgmSOM^j?g`g=|B(d(u)G@mw_b z9Mto3b@^><RO9sfGv3kPZEwDg89A;nmD*ve=Aschs^00l7)d<q6mn*~+qvA_G&F5v zQ$q&$-cWcq1M^-%%-#+WLexy}izNzvAU*<#1V&?l)orXvu!@)hOsg6E8dcVm&T)sr zaW*OlPJEx7&9Jj^E%y%h3t%=qWCTmUXUP!jFE><m%K<EUA1vs%_9LQNPewc|s3qe2 zdp|L#kceN#s<dFi*X4(XhJp8s|KW!EU@%y25LiYS^*3*eF%g)c1l+{^j3173{70`& z@`f*^O;sJ;R#EHs!8BmiM#%dt(KG9NDss5R<q#=kKuS@WWWBf3KNF?^SKEy!3lqUH zoXaY&TeouBB@FMQXj-1^S3VQ=uvS$@)r*;}$<GQG@0O&h?@l8N@w?WzdlMcp8|{!9 z=n0mFKwa=Q7mMl-qRyP#{4;fBVgm_utVwX7Jf958-qe-3eeWZWeIj0GKcS`~<ZY+S z%|YsYp^QNBIYH2nolm7=iu>7->Rl`_-_)LwW;yOkj6K)a4<>-Cul8o-XTiqaFnUy= z@diKR=Tq!G(QbaNk&WAKUWP0-VEjJ~%ZNVBmshiWz30LHP?O+B_UGs~2gIVD<oeF~ zJe&8Tzxyctf%rzW&`(Bhrq(K7oxq|+#@n9}<9epU9!=Fp3qjy>Y=$Tc*{3507|#PF z0fS$Ij9p&cc4dK*vHMVEEcUy`?%&Iu&0b@%-^scUeT@Yml3@XS#_>g84h`-2zxy|n zK!y>E$Sk<L3A0OD-+@ae3&16N+e?SpEInro!pDRsz`$dX<tA2#^{T~Wp(7!FYpHqK zMxoxv4_Xd<kG_}z7!2)^x1XmRGGZH<P5rQG_fZ*2K-L`qJC+O~Koc8EiIeF)U+!S0 ze0!nd$WHc8!98bRFdF`^j0$GL2f=8x9N0IZ+cC<w#9ceIlIe}vo?Tr6v;#<ldOjAF z8Qu!Q)rHFJIfoL$VJXGXC2c-(MxvoppQRymYf8#OV{@>8?%_9z5nawM{SqpR;^L-s z)uDkLSJht5sD4pcG?+I2t$<K!W6IhXK!m+EkFp&iLLjHvPCqvg^HPy#Gb~Mdx>{J! zxOtqcvNs7BuOm9IEx;9|I@E1+d&iQ5XjU7*Ptj=`&cNJwP+&V!4VVBBo&W)RMKNr0 zi`3iAOyu0|aAiwf+5?J)QxBWj7)ubaBsdEG9mIPpjdRQKjo{7O0qfvtka`&xFjx!3 zD%SHI29t3CejXa2+qkcc8KZ6?7f3Psx`n8ETclFMykQl5H(avsI5%Fe{q|l7bXfJh zwCJ;z5dlndsf(^)eni$^szYF}bWY6L^CuOCQ2=W(JvsC>aXdCER7MYZ*#B0G!Ghw( z$~<8kmD-un9vZN5FQ1t4J6ZnQ=70Cx<z<0iq=@eUfI(OQ&f-E%N&pBKn~ti+(}Rrq zmIW9O4`JvURF=SCKx(d$NU9KnWrnGf@$RVO$&UH{JBOObZf>PN!T*yKz%NjqB5Yc! zLoim5*hVa1?<Kisdck{vFC@0{?!ck`MiJoAObPnLz1(xUq9W>K7BGZ`8-x!ed-0rP zy|Y2P`1jKU&)9v+<<>I`U}P+>0DvD406M-*Chx$x)5rD(hwoK@MKd1tw?pelDWa|# zzNZ_Ye$?3=@I(ju8e5`R6Ktrjxm)YV*la#8F|EXIVqr#yhY}^P5LRPjzN`7lx(?<G zs&fweTH&fQkdmlZ;K52<oagNh(^qJLm^(P(ve)qGS{%z%q^m071KP1g?Tu@Z#+5jg z4Mflf09<2NlshdQf1$_<X`ol~!UI!>!2-_(Dm@L{oX?p$>zmgj(fd7HQ(9-`T+j7S zkx2AuFfz4is=Uxd#o+CaLiSa=347-%^B3G@gL|xx>d`JA4ZKl_{P`@p@6IgD28#8P z4H82Pr>CjmPVz$rDFyCajbik~s)xGLlih;l_UNGvG#71S>)V!#a-Wr9hc-Pl)C%*P zV&>n`=_98OPfniEPnx0jhmE9EdOf=UfF}Te#Ei(R#k75%B8Jl8!q~TE-Ni3UByT6| z{~eN?effmur!SMELGCpa2wimEN+vL#fDT9|3uJIHwMp8F$m|?_@oV46W5%Uh!+azA z#8a$Y%IdZbrN3RuAh{f9b8mN?t8OBrFwR3jb)3sVog9r3fc`B90PZ9O-g5r|L~fTE z|3y!epntbdG41`(3_u3ONDo?JVl4cBC9v@2$U>g{8yUgLFuT7P`uhN?00GLi&bk(T zgq~tajW5Y`q&~bn<}S8BY&hdnq?oU}sUlvW3=sft){GYifbH*!kium9Hx<(T+*+dO z_@?-FMnXJ2lots}qrDfLO#79fpG<;Ef>WhCBc!+~c8nLx41grY1DIhcI#=hSN@kS{ zqvJwci68R5#C<48e8~{muSaT=L37tt;LbuxXo7keql*h8d!o>MmaMxT7?=dMbp+$Q zD0~ZWxR*rK<`{Clv*&&sfE23ph3W8okS};tp|C4A<(_}$$vq7*ff*Qjr&$`JcoP=L znRv?0*4MbE8m6?HhG~jfFL;tvVA}Vq5BIw|5*tp`B<IrzP|csI={Qi1jbSbwUWK(R zkx%DfI;~2(baw$p015!sxRiG}qHeJov;BeHP6W6E1-$mCp5KA}8kIQw+N{Nrz7}}p zI6bwyuJbAT6-Q6WxG!0qv0V#l(4ywe|1hSkgFV1pB8}JS*Hs0iym?+vgUd$%j@61R zUPW^!WBRZ4kAI9(DoE2d=d(a76A`bzXqs~)`w{fh%6ykIjE_cu?@mYkID<(tOt_>v zL*WU1bFzoclvRP8Fs`F4Fc!0Pk;R;Km};tbwXrPsEOYoGvZ2zwG3+^v_jzdlX$=V% z8)uG%Fk`HSDAoXMn1@@<nuAvKgIMB+o}$t`W;SxHSTlJtB~tGFF|w(qTIU%5i<VKn znR2m%Pt(o%C`E*lk*Tr6r<qT*)Sw^0<E`*;%yiv*Lki_ESbo0YS&(tyRM$O?XgZ2M z^YKKVi82^sOwon<O7(L;O_!0eTIR%IBiE!6P1|UxC@A=NIo>SgGs$NXm6^7qc(}Er zhn6{2XUeiyoeg9?HQ;~!Hy9$Ea{ua?f{(P`3h?5AL?B5EgBBp%c);DpdF@Sq<P7PY z_&i9x^Az1M<1aOQPJhKl?uV7G!QBjkLHXZO;b@yNg3{1?O|UdF*0WakGPkY~NHx-y z;`Q_tI(ON|j5u9E#%8bj`%AH?P60l4lDi#Z+R`x|ZYG4M$`b+tVpLlwFD_Tz{=ymo ziY3t}?20xlSOjvM;vdZcQ0rI4LN9?!LyW`;5&2*STZ%9n8Xb2bHaeAObn3vk>5n!R znxs_AX%Zsa2J;9Zt{;b+nAbj_5<sQHj(P86K1AK?`7t)XwMM4qih7Q{b?l1~>2JGI zQ>SZ-i%a4`sna#-8z5#2H8|37Lu@Q$mQ(p_5r=ZFCVO%5=R7br4rFMk@*%DZfY!;z z8j3kZ5f6izk>DZifGHaQopr0#wb3qknXa5R|AOblx~<`c$h1i_coGJSD8x7btdqoB zKLB_-TD>yI_T4{H>}dGzXpG_Iv96H?_~;#HQyQ4m(|(BZ*N)jfq{qa#{&R*CdjE~D zr(Qmry+=@sjXmGySk@8u6k1lGS_h|5NylFBtJ%QtQ249>Z@OinPEbiYVco*S2YB@$ zm@-OhA*$7ce|L6lLG&)*?Z#S+bY^R7OO*BOE~ZKJZwD8_{Gda*ZMjar?#^-*jahZ> zbU5MZCfHrLm|=fr>E4`T*_sEbTRn}p2U^P=;oF$KhcZdYC>obF^`7TkacJ<NV^C{C z^Z_-jj&Z$v0{3fq#G}|#`0E?vq=E$qdc;2{aW;-{KH7gHN(8Y^D(0Zjsh5Ziee0PZ zT%hwZTVx7%AXU?{THC#QF?p&E^WP@SL@Emapo&<J%4q}{8-%P=ib=dnoSMG%+<-?g z7)yKHs9uI!na|0LvG+u4T`P|YEP0_W$#od3UAr&mnZ3&tT1SX%S>Tq@B_YMaOx(yM z=_Z0Kb*(h?-U!6HLi8-&TDzq$Z8DO1-Q^NCFiy*uhYxe~@_&^O5=(@nUgvy+ZrsIZ zbW6u|iN@ws0y7m+S%Bc8EBUj?B==t9CfA;{9Nm@B>iiqp*bi>O&^P?`<c1<sjgfkt zs!|G$-IpSK5sSAk0$iMT`APHO6*#+Q2tUM{13VF?Sx(}X6-_2{f0<uEYu^MB!WX+F zZ=_(wgIZ}Y>hTg4XUE5E0%+icGyg(J#5<IoOzd58gvM)^R+M#w(t1BUt+%fLMYNO- zFH9-NeGW=R4j>1_?cLcm&s3+JAi?fr0@OA%0}6#H{3Rkx9>haj@L$iKQJ@~Ui<4M$ z7E@oybP;~fTduVid^)$e7dHED_+5UJLr8WW_2)F>e4>-ng!dkQND{7Rrc@>4Gpqw3 z9ib1MnHi?{z*~HJbudI~krscf_6z6dT>FsBw9Guh$38YCZjKc0QULuI9!Xmnxv#^6 zIqMrKDs9Y->*Kt=G7osV)q`Qmg?ik-0v&bNGuOuZ=CohM*Mact;ntShY0AHQi$}sj zSUpWRCaKQ1YrX<vMMXs=$p{o*+S=B(pa+X<v`&TXogA-(jlrph+F)4@rMTU65v%Sr z0$@0;o6Uh77Cx|@Ky_BX*#RM@XFxV5E7ob;KhQMXZci*L6-;Jd-O|MQaHRXvqr=(t zolf7r!tP$LbGs?9pPbmDO(XdDc<zh!yCy@4-gv>ejDh{;JXq&z?QzCzFOD2zA77zW zcmK{dUj^Ns(62AVa}ZUt+GHSUG7^fum8YMIQ-2*+czZ0HeyyvSYb}px?A!_WCx6|? z&|)xE|FJ&kbDkJQf$#AzhsJg9zMa;ePj=t<@R2O2j29ku0LkgY8lqiQ(?oJa1U;E9 zjctZI;lg|O`dX8?1VLLlA-m3)S#-}CW@(NM!0K+6j_J}!wbxBp_bT=$)IPYzeoi_} zW&q?ACSW^rExO!hJMhMzD1M**+>M?0#|zE`1VQWRKu0q<IaSqtT&ZP7e4m>D=gxCA zs45wL5pwUh>D1->(3#__wubM1Z5vw4&kQ$<{JZZSHi#UYPNxLWG{AL7X^j(qjEGG( ze44)GG<_?ZF`Vh1PqV(B%_9?*k|pzjTx0(QnZ{N!1U{iAI8ktp|DL|J<+Cu!QfOI3 z(G4mmDP(_d_e{z^Ov(`PRWy1R`Qwi%Pg+AforL%iynN)mllJ2pVJXu2;*Ub3V9}1= zQXz<6GwzZ<`=Pa_?{DK~-I>nKMgLP2#2J3Q&^-0L9VF)JB9E#?3e?pOK74v(R|qPE zUHsX+J3X`?kRg^JmazF`_nf2PxnIkcm;vDMV-eyBei0sLcDraCUS4cccFOzJ3W24P zzz(}V^l`h_t}+g}#2mUHlK9{kON@n*uf#U=VpMSkE{b=`8$NX+50QN<p+;g^q0H7q z_u^jLEss@oW-UR5M5yg?&Pf!Hejf#yTf@8lC>Wv)^m54e{WUy#N+;B;rjkDBEy-W| zf01wury&qJI=sC8Jt90D-_{dndY5*HEP;(gW1Jm9b(nu|v_ye)%cMfNWWOvHfcHJP z6sv_sPQ{~zZ^U=K-(y?Bvq=m@(NcflvxGSAfIB~_FW=j)XmeU^zb`8Xx<eo%JSnQM z%JE9BP#%y=jUITWiYv-geewz?yn=B{hE9CU7FTUegwW%{WkP^g1pi9^$tcdtsu{@X zka`EBo9luRvWOn#!F*rejZlCjPA1mYHfev_#!B21M}6N}^)#F$iOI*5^u}?|))q`3 z<`&0ocx>SKEsshYd3c67rR)v~LTbW0fN@IW@&#T49#VnwlZ!>gxj18;cQ|a%Z%l0m zF^8LT((<{R7ZOj*JY=~}bdg)aFI2Dp=n^mwD}DSbHBMRxCF7F`1j8X7;B8w^Ydx!W zvg~h%D{OYuVXkYi5x`wR#`|nBAA98cV1bvbbTmdwAOsY;kA6fOLO#-_P|vT#hFVLS zr!Fqe!5vWE9qM8fUFE9st+9zc!g+S`9j`)i{V9ZV0bV0mXzd_VSHS1YE5l{>)CLO3 zs8P4DOa-Wme{A$k&+tAT`Y2?S8#;IO<pqJ@VK`I&S>s!ZuIOqdJ?n5b+h7>Q1h%@k z7*lb}b}8Fs=BW@wWnz>9v2eBlsYF6tzzZ+z_K_@gli!&bI6c&GuS5c#55i#0bE03v zd|gH}CIui=tNvdQkDE3`hbKM!ZXtv4=2NewbsG}zK5oCe&>dWVIU@AjT|df$%lw?) z2sV467w?1j2^Q{reX}{!P3{=IkoKfMsIZ9iBl9&Mj9axag6xEn(6i5fqU9?;^GCAv zjfNhf)6*Wl^Q&tt;gWSdU_{5D(Nw<b(zF$C^e(;5Pk7%s#2FcB7}<J;o-*pn<F0Pd zhSBUD79aK|^m><CM$kET>z@CpUHR0>ZUc-I&Xh1B$S6ByRHGEdyL1vc=x3(7w;eX| zsyLc;keZN(vL``+GnGoapoZ2~lphNV|FO$Obiu)~``XUR&J-!!ebQ9(HRoE;50%RD z2j-+9JAr=K<9ejE?x*RvviyYP*hHbOOio9DuqEPV{H?XOce>A?pqk}>;5x-UH$COu z7|bw~lJpi3230sewZETvuMLkLc(A8Fs;w%&9{Mm`BsMet$%v9&aQ||m=J^%+lM}WN ze!=JP)n6v#l|K&_^`1-B*DVV%X32593Y>nZ|8};H26=2g_TokNTs(+^^zJOHj*1oh z-j|3yNY#~&`MIH~B4`1w5(pbWaSyuU!IAEF%&}>Z^>``}Fnq>O!ef5`l5ekO2`Wp< z(&F_!;`L9SeL3DFe=g{Ebka|n&*te4KNEEq6K4|M4g1j@&&NgC8Ht?GH(%D;USLuF zmcm8M__OL9#s3=S_K+7U_vq88$i_?I!Yhqu>~w;u@{VetABd%V2z;Lut5W}SS-22~ z{);EXAtxHG>6=$6UsOJEFASwhZ{x>Lly;0^Hh4auSP4eln8f29fMT2>A!>;*v6FU* zbhXZD*(0y~j;GnsYqSGIl`#ZoQl6b5fT8!-Z)qjBolIloG2#N1zO~_hW^_Fjfk+m3 zSd#E_ef4V3uopLxC24*Ql0x~o&ZKuE%#hqM<c?niu2RPLsqj1Iluz=0iPJ0z{gC22 zT<cp8tH|>7(Xi!o5jluu--Oc2(;ijzV^vhkyB1M=tQ*gSH}*NGg7CUDy5m_lX6Het zAX5Ac1Yum0-?W+6LFVcH1)>i4a)+WqI)ft9U;o5{$y$8NzeRk(=(TcT*o6fW(y^y~ ziBHc6{r#wC&^EbdCLL7#lb-YucT|}>x4W~Ky`;iHy^;H$#iV|u`Hs8YN#*J04)FO) zJz%b~&hMF(oij&>%`hCPIQ>26w`V5z`%Bq;E>2<=s$S~fL&yrB;?Xi~b`~8rb9!Y> zJb;rRmsK4zAku(pkQn4x-OZ^t7k}3cbZ?bKvK@h_eMyH8!scRTj1371Y8MX+oGwgt z>1f<D^!vC!*RI*gp3#c?0oyxwmymz9-<<g^&Pm~K_iI8h^>oAsMZ(Y?GWj=w#91$n z!l5ZFigG~yB!!~D_=YD;+#f>tEQVIB?}g|nzn=m=|6zOMsCVQ2z9@%CPj^~EoZqhh z_SBs#dGd^P#m`04ALh*J>f^vowBfzy)4Iyr=1co2I;c=j5rR1bDseVX6QVynK9cma zy6(SiKPCKTizbfu9&_Ba9{)qouweiEn$vtv`u*_Ld)x7X@c@Xl|Dxi-$xqTTT<NbM zs?)OjGIHi#HY->&b@#hxLhq@fQr{p-!k0kg)p0(joo<@6J?<vd5Z{AHw@~7cjqT~C zdh~VOU<ec0jN}Pwqc-$~y3y%{#QDi#h-4c1W9{*eRL4@YMlHj$PL6-7lBjg%czumh z-QF+kt|#W~dAM2Kk-I}dNk$R&R#WEYOHKzkV2nI%Wj_Y<XEb>w{&+cHtk{TK;wwQU zq>*kztmg;DZ}$Bs6<rWULfZCm#fBcZc|za|(IWTYNT(Mr7~hJ-tQabvcbInlo*Z%O zB>nk3)UD=vWHc?P?Ta3X<DdVli?0rf>j}2Tf+PeFA-F7TAh^o{!C8VNxF)!SMFQ;N z?hcE4aCdiET!XtMi#r5@=aJu6U)8HO)iqtW>i#ixt7mS{>C-3NA}$#CN0_)%PW|j~ z^S)8#7zX{@u$-tiC*0B^J{o+>dt7Vb3&OK;_?tx5KVW<H$HA3fUnj7y%Msdgd+h8C zMK?mp#^pqPPy2QXv3nOI*SJ(o;4fkm{p`GPAd@RXk&y~Svv+PQ(qwqNx=~7;a_8kK zeq|)mr%>#&yU952<574vFa@MNWu!1zT}}QLhe?)07?S~xiSY~kfr2>>OcM^}V3Hnc zo}fjs+q+-het5&fP9ZK}@k<UFIypwSg!j2~`PAIp=~x)A4S{>Wk3v`k2kXv0`=WLI z`!N6QQ}W3yi;nUU!dMB2yDjvV-}lIyxg{PF8wcEiD!#zhgySl@UVC28Bc6WI`j27Z z^g8r?B@G>C!Jke;(nTx3x<F@Jv&0);@@KT!oRjrEefL%pPC6xQ=#It?lUdIx94uPF zgm+<dOzL|0@3wPupIO*n&&M~qslWEFy}hGS@dyGb6B)wCDqL-Ku5>Q3S|N+*MUC0& z_e@`1-A@BIvCyh25Xpa(6dbkuJesTpu=o9Zd-(!YzS6ppO8L(lVP)I7-v?Pyj_H3? z_NP2lOn$aQ5G2#Bo#QKFF$ivMDk>tnb>`wFj|^zs!fJAcG|}FNf=AEt;_BzBLvV27 z5_Bj#sINxc{xb9Ig!`}W$LNL1g?ts{YSO7~PwBnJy*h*cP0i|^KEE<V9K&P4!NJk6 z$3AUS#57f6Rsn81GQV}v&*fK`JA+W$vhgm@wfrOHPCw~2r<5-4&k?ocl{KO}t$Wi- zwJMcX+VUIbMRUWx{`F8Axy9llcO?#O;&oOYS8RQbJl=6>*eXzF<j(6)dK`AuNTp4* z46=-nkzwK`L&=eN>iqCwJ$JGZsgAHo-{Gwfhit2+a}R51p??sf7T#*Fg=woM2&6hW z+EBa=iF(0hnlaQo^I%uUCwM0ERES{2dH3?nMzPk6S;oOEj-^78(}<$<>$Fwk7~z*f zh<*XhJk5pIW9rkJV+=jZz1!a|FMzRGOof9zN(3=6HZ#*1EOUnf<eo~)JQQaU#_1w; z$Q4b;{#3{`)V?Wkod*7scNGEEG*1DHALtlvhK8Hg!y0h~93j)b{?Yg^Y#OJPekDHe zAN`6rKGedk8SJenna0?uJ#nK-BeyTqeElMdD?)u_Y}754_rl%P-NP|`$8@&>WYZfd zX0Qlz{JBz*IQPn2pVvfluQ=33lY+IccJXpqL#5ZtVqsb#toYR*VwzP*)L(~FFZ?ra z|6sXG?_!_aE@QEi>22y)e_uUjJZ5pRw|lIVwT6dmv)iW4Xj5o^KdUmj1&A1UR___R z2bC^izb-27mjXQca!q9m#4szpHYnhPPGh}e3+y#!w@E6h;;)v7Ooqfv&YME4J?)g6 zmoi*;5Y7>og;{{h`Nri!-8SVC4ZjkjPb@<WBa4t;caY60yrkrtnnLn<Lj&^C1Xl|d zdmmOn2BzyF?4ea6a1&R#akaQ>5vPtGn>aa`P1^k{zCHq*sJ!p=9!Z)Vh^7X)oc;R0 z?hT+z>f$bh_6kZM0dv4+MG`nAE-#<OkiEw{<rPCWv~)opW7Gta0JWc^q4~$WM~fan z_*|f2q8MbB{|(-JXl1{&zPf8|6=}7)?Z{Y-5*B?}p>&NF^FW|spiX3odE8v=IR7_K zWC&gCj17o+Tnld7UGC-b2-pX_4-xf&q2?H(D#v7t_y!GhJf*5)f9ucaW*<)$Z*F&l zkTgYZH2&--SmKmi5pTSl6|Y^-um6EI4FR%>VOjzyezXp56jYuoRsGznKww|t)FO4g zn8-usrN&1xeEI7aeFzDzPi3eEQ=l7lC>_;Xg$Rvfjpjl-8T;@}PJYs!A3iatXjn)7 zxL#exEA0_n{t7DxB0_@Jzih;w59QmN-AN#Vp7Fh?{47}HtmS3ABSB63JqW{W0Zblz zKq=eBK}>-LfG-`}+!l&@hWW4e7>9cPsa^N9YybGg<4SJyG$utmct{cmDF?s1t&%T^ z6>$=c_TTv9s<%BVHsBBU<Z}iG`v+?OwfNK0>3Ew>9PHB%so!=fz>jmCI?IxEIKB^7 z`5sS+@tp|!(=@#SOJ2d|6v{*=4&i=z#$#m7#W)tbzMX!l1v~#pqD3>1Q6VJWBgI4P zx<QymEiG1*irk1MOuTe*Nfr{W@rA)K124ELntgsYk;gHGW-vOV<#(yGOO_#mTPv$# zT_BX<p7X*(xfqH3^MO=tGSFLK@T$4FUFxzhw{>wE(OKEhxK0$GckYAGK1Ac})_|bU zIiK1{P_3?VuF-1ap>8ZT3f`6JKXKzDX-IG2dN@?;0vpFW{0$(9tCl;>I`AC?4AE-4 z-Fe4)n~6I$bKRonbuu+S$%JoPHJ8_2>CQxhCC6muv0&t>t=xW}W>aI+FxnYR`cP4s zFQ&ArPVAK>`nkHyV>mj?+_h;z%<b*XZ;L0+f&HMl`jc<!wb%+yN{Sb1a08`A8&iV; zA|3c^iVatu*ZthgqwilQW1We!%6omJrPGsDue8~9a*D}hz9oa>g#xAz@?TjIWd=oe z-{FVjl9|d=eW3w8P`TYdbSBy5F3(qfZg;!<bF$v)`NV$l$QuF{H#2$s)%Bbh6RXti zNS+oR=gZzYa^;zvCo|bMjb(Vo;Cw_7^dZ3gjlY2AL#uoH=abr&h0V>=OE<BJENTOa zrqlIKuZJg|?~{A{PgNr6pSZ+H)RPJd$mkaat!nE=4SLhtk0j47I<$`GTg`<|b|<`L z*Qt2_fgk_h=FqIgwJO&66OeMHR9`<t80Xc4RNd1${AJw#+@2l8I;IbN{3P^v;G!th zMq$2iXcQe-w<_bP<#U+#QaB`qBQTG=NTbO!Gukd#%OW@nVJ@&&swn3D`EPA!V`pJw z(u-^RO+@fhC-RdE`fvXCU^8u7o|^MdW#NAfivRZIvP~VTczP5WB~pTtt9y`uK3@%| z01#HzQFlAgb|t2gx6}W1UZzq73$KHY5UV<mPA)k0v?RL#dk&w-k}$vrvUvPb^3yJe zm9(e%d@I5vi1=$_u34H!Rbo%oWrG)<o!XCVAYG-o{I^%!&*MT&?Yj!fQY_7C1+4yp z6K2eH#D7_g2)9A`=mh$T<O5yD3$)C23pv&_U`~$mYwCeT@ijK{-izhdIwQ3o+MC82 z6t;ife~Yjhrzl7l;h`bU?rD-U<xztrn`np3D%M9>6JrqqT!eBIx$O(F#)bssB4rBs z7-i6Czkxq-P<w?N^8K1<sc*S=y}1}*NR(o&8Trr7POn^+TXWo`MlSC8ppK4DxFJSx z6#@>P2NOFF@mu}aPb>A%%Z1ubza)M}l|b@=G_NF#F<cGc$|4^Uic_jt`mY9!hDzpZ zzo9OKCyC+w=~|kODMM{N+^NPWHeYB^DK|Jh;L-i&!b)m=+jk#lkvsFRimk)VM@zx5 zlg%0_Lkvrjr{G~$P<_hF6eKAhu)e&AfyFIfSarhM_3fys3WnJj%ikV<D5=CWs$%aa zSPCZ^E@d)Ae9b(ZEg2=B7D3FB8=?_m_DM!8B3bEEesX3~oMQOoF{8?>zLg~<2OpO5 z83RK2WVN=0-YbqK=+EWlzEr4f#MEP(BEg9#X*jfDCn4{zHdx^wv__+1WS0E5kf+tq z;d%L&2AR~QlYhLKwQf~2BZbeZCunGVDr0ZJTOwsl&~oQAYFEMiw~D<?)Pt9ts5lg_ zp7WS|uTx`{F7N1yAqk{qel9&&|4ThRg$)TN_!{9j><}<IBPo)1F1JOfmvZ6FO-U|} z^8WjKQd&AcNlmu4Wl`$L)#a9i#g@`aIY*P1%Z*yVoTnQbt@G!@%s(<yMiEST@{62V z?l9uf$v@3P8VhTZUa1sye>zSKb{{H^h6WFIAA&V$WI<$&WX7XlkKcj)bRmWhFDb6w z&n<+O+(irCn08Oxf+%9nyxU?0oGdeivNuhQgIG8-AJ~jUuEn$w)QwAzHdNFZHp2ur z?Ui<Vo~{cfWO7gyJ;KX{jwbNJ;k0vc@JYj~7n%z?M2P@_eQ&GQwP;{x1XmQ{NczQ+ zK_hR&=P3oHB7>rtd~*5sKVV_lWCT#udVQ*O{%?Oa$@=MP2+rMD$gVtEApP}`)@0zq zbF-mil_n3}vv!cVO+>rJQd)bZfAE!X`#>+gYOzZ5kz3O6-5U$s7T{1chjc$@velG= zD@F0|8p}dCufNpm9QcV`2gn;QGAd!Gy^%siC*<BVI>%OR+7Ic;eKEntA>?)Jqe^iK zOJVgck)Si^;i^@TpEH8fL0Dq=ZvQn_JzPAjnV(<q)yW&HE=TqIbcS;<e!l30Kt*01 z^H01DL>u+q+OXcLXhepn0F#+eq|ZKa)I}@pD<$D}{gt;ld0_!0x+wZnak+sc>amOj zfHS_E#s*DoqdZ_){@Ww{WQV?v5c`$GjdPf%-0D|#7VQi%y%iyx1gM!`KWWXQ#hCw| zTdge(j|wj`eWh2>s-Wt5fT1K%-qWyYH3Qnz+bb>T-}{1I-im^-)}rt60@%<k;KFrq zkvLrBFY$J|U#hv^P0Km>tDZ9{vSt+9g}Ny|2<5ZC!v=>a$&R#YDD5+XPv$_D6Ep)# zrqMBAN)oRp$OIvfJCxd?@;bH2MdQ$&28~}KzzrUEPLkni{V^*a2<Y`7b@S3UAuG$H z(so8)$Xg#@^c7Wt_e2njaqN;^QeSp9N8UX&)qY+*v>CumJRG3-7S7jeZ3)(~uNX?O z(sdPYAIcim6VyBsbm&O#7xbWUd|p8ElZj?1_Te*IkIGV%tsIPO!_kblMx#!lMgy*p z_>4rj1Y8H{TLKqDiu&NjdMuzxS(2wr3L%@uq_j*<W|r_lEU=QF7|7sfOZToDz-=s4 z2d@i5uu+`X%+@Tvd?js_!=1+9&oOd#e-yv+K$CmxZMS;zxb^`0wv~1`t%UA}>0?!p z`eX(Lsg}32w3+exz9>F^Jm@}h(*&2UqRqVUDI!y&boTU*sjL>B;FX9a@PtRBW-qVu z(h1;OYzPW20oGdwES2N5Pp=b{W@J<e0CXVfABFpbG+5?QkyF+;j?0|}|FXYxZs^kb zN{`FX<C~-zxc(j9d#TYhY_8w$QdF3Y6at3UROnBDc*va4@9)ukM-?@r!+E%U)ql7| zu6%1xakQ%7@8=3Wl>6xbx2xCFPZzB)9ojwIoIhN$A9oarR<gAq;|k8^cY!q+Nkzd9 z$nfB~h2F|hLQxs}!nbDw<hYoOvVy<D!*RcTiV{41<NJM%(jFC?%*Yz5`rm1*8vn%m zdm8_b_;WkuDg^hQ^UMPhGs0EAD2^Go&tkqK1q8LRMJ{aFCd3vHOBKP~w5>6Z8s&{v zxKN?&7c<vxY%3J}dKZv!*B!d~k*<M6P-SB44eZ=Zg{jx5EAu%6l#Y*ip7d1t4P9q` zJsEHGT&XG>X=2cRvU;>`zK(`!72<g!KvMcCz@>Y^AGilYG~Ych<220I(9zIT7*19! z4rGb1%7-bdtZfE?5q=^$j0z?3@nP`9c$WM?7FCT83JHu)%{%$A>Kf{*#R}>>ElZ_# z{KMtIa->LGh8`C1r-+#K&=N%hP4&P#@9^J-S9hdsG-Go>Egvt(0C$4Nw-1Bje&VkD z;;6_wG1y<ZnKQTayezuy%INaX{My=VJ=L0|gkO4x;^Qu#!!=mGZ{J4$_&~}Su3SD` znS%bO(4V7I`}QJzyQBZ4hRBOiSd1MhNRO36k;zOa0Q^LJ;?ks8(kX&mY&l*7d3pWu z^hnv@5m;CHW8!T3Y4<Gj!sxn{x^TU|v+36B6`Po^2G~D1S9Ikb1Vt`9qo=3p@>McD zm-KOh+&?e(W2&zD%<(2Fwn@>I-wRqZ!J#tk>Dq4#Lw^LWjKrDriP995fPvEROF>%| zn=2i%rfgndowEEbP}7e;4&$;G$KuPE{LauT0Nmaa*bWvwT37Hg+5vdh!5_vl?U*z+ zZmBs|Tpq4&GxI`cFGW%3+{+Pi1HbI>J*20*&xGnlkERN9@nerDCpIV}hT|&J^{AYS za8MCZ&~CMlF&A}`y|U|8oP@_rhHs5N)}t3eMN`9fE>MVK<W<9V(R_)8z@ekQ%ws4P zXcK^!*>xyeVFa!(eBQsd_zIqSW^l^WK&g|Ut2EQY5>^yvCQw8r_Yzm5);|Xf3?H%9 zsm@JfR|eOILee;m##;Ck)kykOzCwCe5NylMbv+HA@*zF;BTDI<zj+a9R{)sls=g3s zuVc+T2j(%c{@vzCB2@qmXhS`|$S`_n*E9!{@D-{bMy9>#1e=N3)=;UV-f{0$EB~t} z`f{VaET1hVseC_MH{sCW2p~n3n^_@XBTkKfWjg$9t`@VRzN~kDr0uxgOp}snyc~pd zhHdXYWR%sfj3|GFx$xj{7WPOwcFZT}LG3u0$!+vB8m(bVvW<t4OI8U@W}g+Li{8)S z+!2?U7X2Yhg`}u@;vv^ND}D`#Rx??<+cD{<v<-P>n-0PBb7atl##Ei$u`A}*9ALu9 zyi=tkEJ;zYU-E%wW8H(gY`n?c2RK~y^}AE@%0o!%2oQB8J}Zx7x(Fz%@6DFoRZH8B zmd=Kg7M5R4S;lWEmJok5^_WyE4R|u-?49SF#@q6AuI8Ut!nHP6oN(nZmO^^_ZF-}Y z;@U|Xxc%Z9)v8**a3=P-X!%N^Ex&S~HMS#O3f#&X+md%t!uPo(iUbA}wTi`ugyo5c z^AStLtu$-)e6N45enb07r);$qb~JdHla(FdEE-95yL^dTvpRBY)Z$qmsQA<$+D^%$ zOdXwS5|{$(TO(6Uc^(r@?CTqp4)oWdQkKlI=hxmZBLNgdSFP%&Q-Fl@IPLd~HT0*x zCF(Z(^-kTKRdF|(NRw-xto^RsPwCibPR-%~?lnGuny6)U+BmYZPg0s!vZ+to$hyY~ zDO>PEO_wZ~*)f}Kwpa($d~r8>Y$H-zSe$n4?3WD9ZJB$BgSMJ_4I^Df*CdZ&s}<>J zs7~-P`_^I;hnlmQblb79xVVVg$VE&gF2ZR{sV?blauDO0V>*@O4TGXD&qvN+k8Ml9 zP>Z1<0KD>xFd4o*?K{{L2Uy5%W0ooDVOcoA_C;f<^AnVi#lVOKlgI~1H6n<~dSgV| zq!*ldJ`^D*O%s0Zt`_>2G6an)g2;?~AR*~`R-<S@zxN<89Z|3p*SLbB^mha+2Ej%g zhT58@QF9`>fR#ol7guU;(rcp0B^e|2sXWUHzdD6e$(XOwKb63OR7X&!r^t;n(K9gN z9V(gPEqIP#$-+$>BUO0UZ9o}x7)y+9@yc4dhfKXC9}+{-prI|3$XETWE_*>Bm1<0w z$7VfDPawW8wPviU-V|ZFn)Slic4}}nXtcF7W}`oLXADYt6skBingW?CFHrBT(qpaC zZ74%9`^LM>vWx<{iHb@x$c^F;9r7pPMfr~U`s}QxS6?&H3;}_@=phP6gt|_U-QQ=S zNt;(zz?lF)Gp7~j7t*?H+SP7LQ|@%MDF!0ALDKk^54lfpst2l9AhAQ+bSZVCT=B8z z%k|{REBkBi{q(i!$sDg7m7l4LKcDV2;%?$n7ESBfTXaIP@=|5rXm3%DC=n<repoof zMR;8zt+!8v9WSrPR~7#taht|~HE3o|cyKzt$3^xDT8BCf-i#dCK=B1&*in;4Qti?s z+MVIRN~~Xd_f$-k81qo1ZT;IF;fbh%lbms0v~}BQcQT57nglzVAzl`lG#YvdhAs~Z zvy7hs;gGAtl9YK>*dmRqZ+Ur-l2A2Z?h<mVtVC^|k)qiS`yzdhbl0|$xmvy!>d4#N zXQC2+7CU%p_)r+Woc7fPH1P?!4Lqz$e8fK%6!4(wJ5dQpFN@`1VtB%1pk}(v566^Y zR=-6pd#L47&SkOOTyR;c<lk`ie9sr@C3<aQ;vEDA1WT;fxQ359JrBz1zQF9EPJDj) zoY8JFZfIljhM^nDl-6KdvE)`q?HFQd0fK{<o{1b@_Q}@tfqF4^U;CGys9^1<sA_EA zu34DzyLMvzC|(VE`AXwQ5l4ezbOgWfbQ!&b3Tiga9!UA5ZFRjGK1AGYW*IH4TUW2_ zmjQU?idB@GeGD)p)CABH!e{d34K*C(1)0B;?Hr~&u<C(ezf+YfQd9BC)D4F5vF(1; zD3k76A%fWC-i5G<1%8YW)HF8MAGb3WXOz)(TQ9ze%)|~=$#XQE&SeFO{Q9lQE<~kQ zhqy|Q+7CuXK4>o6QI$u;`m4wqAY+b}npZSM5?bMPLsOP3yFHdAiv}8$JMX(Hd2+k& zGw_6GtA+oRB4K6+P#2nZ?3zqmpeA}BXnnZ;es)OEe%IyavQ6|Wb*G{Rql3OCxAgUI zK54me`UMB!oXw!vAaY~z$nab&p@!k=xC7OyLo=3y9=aN&k(mlC$h6FtUU!)Sf-;Ye zNkCU7B{MjprjI+MgC<?yz`yVwkL1}~x@y!sgC`jsZuvJU=$vRK#N$n!#&Z=daXZI~ z_u?5HGjN)ZSIwQzOrum7KWKrDHtxVPLroRfzKg)P7Er>qxB^7b@``dT%GTyboD znBppu5XYLW(?Z0bC@DJ`C~O~7n0}P$Uo^A*a@?-_Z10=R?UIH~d#CHEBn^dWoe{sQ z@VP5BD-}*l+B=ovle0z_py=eT3Kw|+Bklh7E}SAaZXsDcQ~&XP1y-AR#UgB7Hh0fl zX3|VW`29|Z5#u=YMT-Le5FlP8@AY?sKneeCe2eO1D9nv-{%K#W9AA-xHC}BHo&*dn zlmt2vzj}vN1&_;K1jxROjMTA$)R{@fD=hP0D9dNgX$wWNBGqe17fFgu*@`yjDhBEk z14Rpa=pUjZ7;>CIB-@9^gcBMJL^7tJXFR2)TCYM`ignE5%}nzUQWSbgwW&q|UUS=~ zdR`e<$ovC$S|ZwSXlUO7m%FXO&!R*=z4@B#lSuT^EsB+M<SoXkydjZ~pgKl+jegJv zGGxezpDCJ7huh|_lyG{g>1_I9`VuXELc@rr_>3ox-9~1<<@9#R;<nrB8btA3?L}TU zFuX-<o&m$)gGKpyjs|^zPEu|fQpO@^9MtY5cv@~?O<o>=$4HHZX2W#3(c#1sTHMwk zFIA_uLXxHHn(Qv3Cng8j9!u=2nTA_UtlNG@`A#7Cc~e8mmrH)ytppk3mvc(V$N@C? zr9r{E)3pz8a5HM2+;;(dlWu@$A5V1(h_-n>rdjLGhmU<$;cs3p%uP!h<rQVrk@}YO z$9ryLrfmBn9ii;}nHTN-3!G4Fa@3+v&q-=<3L<Kz6O%`TOn4N8{+)d;-+(Y}LLpAX z-15Ag5a|98*|q5jm-qT2igKkkU?BSVb?&?7lPI5o&qqVF0>Gizs;VWRJH#;u#SfL% zE2LEm%Ide-$Qn+HjfV1oA{{VV-}J@oyPxw3kt+taKmcwrb24^<55;DfDtDLL*i6qJ z`%B+0Usb<P{1QZ^w7yuf#2N1^GPd81jYn?7!CX0}aBsORfX8W2C3DdBTR+-jRf)2` zLtiR1;x|oNF0>nThO`nP?}uX<Pzk&AFTk6DL74j{PD!{lXwMXIq!*u2%g0^Ex?s+p z&wg>tb$wC|v`hhH4T#C#Ks(9jsw)_D`<Tw1b_Uo4L)$(O1vXmnutT^HcNga~ni-77 z%XRA~66$A3l*VFilndeCVi_+5&M*4iQd`V(xfn*@{!SOT`e0Q&bId0g@m#X`%`p{+ zNi_$99R;-1nTKh(U}$n1>9A$zn7yQ+tt|I4t)#h?DP^_;*N8DR7zkJX1TkpgApp>i z$+K5Gu(xc}%D->rVrhjsLFL4*<_aC+zK0_4gdOVZ989_Sp9+2~2xy!x2_Ic?^(d3X zsDYbn%MZ-oW>M~Z!V)1N&ZK04Z(ckjxBLJ~t2!>(-W*At8!a_^MVV1(J55ZcfD`g2 z=s5v_yf7ky<9pR$5efR~#jV$0*=D^j!OPJ>)*(-5^b4PVGNM8jiP~muI1#e9#2<l! z%9dd+icOpd9(hc^EqF0ndXk_29^cFwz2lw@4NYY2p4T+?bu>{uf&FO~FdzVdtCW7D z2N%icNg(2Q!mBj&>=1o}lTC+bYRz7-W!ucp{4+iDmia~ixbna>qw}6^d70j9;g=UM zNXZ5S!BR4$-DX18&vpi>^tTNC9dx(ciG9M8m&gP_I=b`N@n-}Z`r4a}u<@{7-}dTz z=3k4IKNIm8Pj0XF&^ZbTr&Mm^+uYdNaB+y1{rnQ%qUZ5t5*Tx42z7CT&p6v3b|pJ9 zY(OlhahikcUX5jeOh~Jls6}$kq^_&tHrytz_~pX8gGy%L0g7#3q;5nV%+7ZiMdT`$ zr}(SCdaU8Qk1q5CWHO7dyLl82z*Z@2_!k=n|Fn0Vd8_2-WrYD2f4XM~C7WP=F8q-u zdL$;%{kyHwxS^T}S3I*CFqxMurW~9jD|PqA2=CduuFFP8Wt+8Mz+J24`|{9$)9cx$ zxVgIOOR=Q7Rpf(Gbm-;fgIMlv%DCd*FM3#wd|lM7E*$g)SbUBz=bKrx_$Wx*RXMun zfUv(-pN;%r^Fh~;Pfe4Lt)4yN1wgUNN{K~9Mp>t!F)~w@lIcEbY;t|ocCWdB%*dQ9 zarLMjmm4X$SplE}$B)V@$XI@d;rh~L=e`uqEqw9C-<fpLrbc=s7bgYLT$j~t*%aLC z9xxK%I~19$$Sv3q7f2W*Thf7!`&tq`fXwn4s$nj{FIqYjN!0h5dGCqbT~>Qq(o7^A z%jcXG%IeBWW-QsYhhw!+gHczoM_=Ifzju3v9`3^tt1rB0b<T~5fhuzAAYv!4+dH<( zkLty(&NFlS0Kgn&>H%kl4sTnoD5Wg(zqlr3+~4vW`eodB%WDHWSb4Q)<#*Yse**73 zHVr=&MGu_IN^qf0+)L^hj(oDw)on<j?NRg|r&OW|AN6>m(k1;dZO}<^oNijbOdEz+ zOW#It^5P#toMg#l3(W_Zj-Lv$f%?p|%jn4mTmrd3MyszSRS5}{&$~+Ua-}11ajR=t zW~6yo=(zoIa+Qk<zRVW9vC@WWQrjMf<{efHM=2Ah)->scscLYRQ5ab*CZ_vaz|ea) zGO@kB?415ZJnjvg{^hwxVm^-i?Vz04Yx;W8?r5-AQMTF0tFpV&G$J&sSK)o%+gdAA zmIs~pF16Ky)e{%YSwj+2bBnMK1a0S>B;!11UzY5G%i1q`oj)v{j^DHO<KL4QS~8K5 TFdx!9vLnOnvuSI%KQR3dM+eY1 literal 0 HcmV?d00001 diff --git a/tests/abstract_cases/test_graphml.py b/tests/abstract_cases/test_graphml.py new file mode 100644 index 0000000..260d487 --- /dev/null +++ b/tests/abstract_cases/test_graphml.py @@ -0,0 +1,1712 @@ +# coding: utf8 +from __future__ import absolute_import, division + + +import unittest as ut +import io +import json + +from plwn.readers import nodes as nd +from plwn.bases import RelationInfoBase +from plwn import enums as en + +try: + import xml.etree.cElementTree as et +except ImportError: + import xml.etree.ElementTree as et + + +__all__ = ( + 'AttributesTest', + 'SynsetGraphTest', + 'LexicalGraphTest', + 'MixedGraphTest', +) + + +class AttributesTest(ut.TestCase): + """Test storing attributes in graphml output.""" + + _PLWNClass = None # Override in subclass + + def __assert_attr_syn_definition(self, attrkeys): + self.assertEqual( + attrkeys[u'syn_data-definition'], + { + u'for': u'node', + u'attr.name': u'definition', + u'attr.type': u'string', + }, + ) + + def __assert_attr_syn_is_artificial(self, attrkeys): + self.assertEqual( + attrkeys[u'syn_data-is_artificial'], + { + u'for': u'node', + u'attr.name': u'is_artificial', + u'attr.type': u'boolean', + }, + ) + + def __assert_attr_lex_lemma(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-lemma'], + { + u'for': u'node', + u'attr.name': u'lemma', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_pos(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-pos'], + { + u'for': u'node', + u'attr.name': u'pos', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_variant(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-variant'], + { + u'for': u'node', + u'attr.name': u'variant', + u'attr.type': u'long', + }, + ) + + def __assert_attr_lex_definition(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-definition'], + { + u'for': u'node', + u'attr.name': u'definition', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_sense_examples(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-sense_examples'], + { + u'for': u'node', + u'attr.name': u'sense_examples', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_sense_examples_sources(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-sense_examples_sources'], + { + u'for': u'node', + u'attr.name': u'sense_examples_sources', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_external_links(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-external_links'], + { + u'for': u'node', + u'attr.name': u'external_links', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_usage_notes(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-usage_notes'], + { + u'for': u'node', + u'attr.name': u'usage_notes', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_domain(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-domain'], + { + u'for': u'node', + u'attr.name': u'domain', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_verb_aspect(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-verb_aspect'], + { + u'for': u'node', + u'attr.name': u'verb_aspect', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_is_emotional(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-is_emotional'], + { + u'for': u'node', + u'attr.name': u'is_emotional', + u'attr.type': u'boolean', + }, + ) + + def __assert_attr_lex_emotion_markedness(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-emotion_markedness'], + { + u'for': u'node', + u'attr.name': u'emotion_markedness', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_emotion_names(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-emotion_names'], + { + u'for': u'node', + u'attr.name': u'emotion_names', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_emotion_valuations(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-emotion_valuations'], + { + u'for': u'node', + u'attr.name': u'emotion_valuations', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_emotion_example(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-emotion_example'], + { + u'for': u'node', + u'attr.name': u'emotion_example', + u'attr.type': u'string', + }, + ) + + def __assert_attr_lex_emotion_example_secondary(self, attrkeys): + self.assertEqual( + attrkeys[u'lu_data-emotion_example_secondary'], + { + u'for': u'node', + u'attr.name': u'emotion_example_secondary', + u'attr.type': u'string', + }, + ) + + def __assert_attr_edge_type(self, attrkeys): + self.assertEqual( + attrkeys[u'edge-type'], + { + u'for': u'edge', + u'attr.name': u'type', + u'attr.type': u'string', + }, + ) + + def __assert_attr_edge_name(self, attrkeys): + self.assertEqual( + attrkeys[u'edge-name'], + { + u'for': u'edge', + u'attr.name': u'name', + u'attr.type': u'string', + }, + ) + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'hiperonimia', + ), + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'deminutywność', + ), + nd.make_synset_node( + id=1, + definition=u'best friend', + related=((u'hiperonimia', 2),), + ), + nd.make_synset_node( + id=2, + definition=u'melk', + is_artificial=True, + ), + nd.make_lexical_unit_node( + id=11, + lemma=u'pies', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + definition=u'Barks.', + usage_notes=(u'P', u'S'), + external_links=(u'http://dogs.com',), + examples=(u'Ala ma psa',), + examples_sources=(u'Lies!',), + domain=en.Domain.zw, + related=((u'deminutywność', 21),), + is_emotional=True, + emotion_markedness=en.EmotionMarkedness.ambiguous, + # Names and valuations need to be tuples, so their ordering is + # predictable. + emotion_names=(en.EmotionName.strach, en.EmotionName.zlosc), + emotion_valuations=( + en.EmotionValuation.piekno, + en.EmotionValuation.krzywda, + ), + emotion_example_1=u'bim', + emotion_example_2=u'bom', + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'ssak', + pos=en.PoS.n, + variant=1, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + verb_aspect=en.VerbAspect.two, + ), + )) + + def tearDown(self): + self.__plwn.close() + + def test_synset_attrs_all(self): + """Include all synset attributes.""" + tree = _write_and_read( + self.__plwn, + include_attributes=True, + skip_artificial_synsets=False, + ) + + # Check data keys + keys = _make_attr_key_dicts(tree) + self.assertEqual(len(keys), 4) + self.__assert_attr_syn_definition(keys) + self.__assert_attr_syn_is_artificial(keys) + # The edges are always included + self.__assert_attr_edge_type(keys) + self.__assert_attr_edge_name(keys) + + # Check data for the synset + data = _get_data_values(tree.find(u"./graph/node[@id='1']")) + self.assertEqual(len(data), 2) + self.assertEqual(data[u'syn_data-definition'], u'best friend') + self.assertEqual(data[u'syn_data-is_artificial'], u'false') + + def test_synset_attrs_include(self): + """Include a select attribute of a synset.""" + tree = _write_and_read( + self.__plwn, + included_synset_attributes=(u'definition',), + skip_artificial_synsets=False, + ) + + keys = _make_attr_key_dicts(tree) + self.assertEqual(len(keys), 3) + self.__assert_attr_syn_definition(keys) + self.__assert_attr_edge_type(keys) + self.__assert_attr_edge_name(keys) + + data = _get_data_values(tree.find(u"./graph/node[@id='1']")) + self.assertEqual(len(data), 1) + self.assertEqual(data[u'syn_data-definition'], u'best friend') + + def test_synset_attrs_exclude(self): + """Include a select attribute of a synset by excluding other ones.""" + tree = _write_and_read( + self.__plwn, + excluded_synset_attributes=(u'definition',), + skip_artificial_synsets=False, + ) + + keys = _make_attr_key_dicts(tree) + self.assertEqual(len(keys), 3) + self.__assert_attr_syn_is_artificial(keys) + self.__assert_attr_edge_type(keys) + self.__assert_attr_edge_name(keys) + + data = _get_data_values(tree.find(u"./graph/node[@id='2']")) + self.assertEqual(len(data), 1) + self.assertEqual(data[u'syn_data-is_artificial'], u'true') + + def test_lexunit_attrs_all(self): + """Include all lexical unit attributes.""" + tree = _write_and_read( + self.__plwn, + graph_type='lexical_unit', + include_attributes=True, + skip_artificial_synsets=False, + ) + + keys = _make_attr_key_dicts(tree) + self.assertEqual(len(keys), 18) + self.__assert_attr_lex_lemma(keys) + self.__assert_attr_lex_pos(keys) + self.__assert_attr_lex_variant(keys) + self.__assert_attr_lex_definition(keys) + self.__assert_attr_lex_sense_examples(keys) + self.__assert_attr_lex_sense_examples_sources(keys) + self.__assert_attr_lex_external_links(keys) + self.__assert_attr_lex_usage_notes(keys) + self.__assert_attr_lex_domain(keys) + self.__assert_attr_lex_verb_aspect(keys) + self.__assert_attr_lex_is_emotional(keys) + self.__assert_attr_lex_emotion_markedness(keys) + self.__assert_attr_lex_emotion_names(keys) + self.__assert_attr_lex_emotion_valuations(keys) + self.__assert_attr_lex_emotion_example(keys) + self.__assert_attr_lex_emotion_example_secondary(keys) + self.__assert_attr_edge_type(keys) + self.__assert_attr_edge_name(keys) + + data = _get_data_values(tree.find(u"./graph/node[@id='11']")) + self.assertEqual(len(data), 16) + self.assertEqual(data[u'lu_data-lemma'], u'pies') + self.assertEqual(data[u'lu_data-pos'], en.PoS.n.value) + self.assertEqual(data[u'lu_data-variant'], u'1') + self.assertEqual(data[u'lu_data-definition'], u'Barks.') + self.assertEqual( + json.loads(data[u'lu_data-usage_notes']), + [u'P', u'S'], + ) + self.assertEqual( + json.loads(data[u'lu_data-external_links']), + [u'http://dogs.com'], + ) + self.assertEqual( + json.loads(data[u'lu_data-sense_examples']), + [u'Ala ma psa'], + ) + self.assertEqual( + json.loads(data[u'lu_data-sense_examples_sources']), + [u'Lies!'], + ) + self.assertEqual(data[u'lu_data-domain'], en.Domain.zw.value) + self.assertIsNone(data[u'lu_data-verb_aspect']) + self.assertEqual(data[u'lu_data-is_emotional'], u'true') + self.assertEqual( + data[u'lu_data-emotion_markedness'], + en.EmotionMarkedness.ambiguous.value, + ) + self.assertEqual( + json.loads(data[u'lu_data-emotion_names']), + [en.EmotionName.strach.value, en.EmotionName.zlosc.value], + ) + self.assertEqual( + json.loads(data[u'lu_data-emotion_valuations']), + [ + en.EmotionValuation.krzywda.value, + en.EmotionValuation.piekno.value, + ], + ) + self.assertEqual(data[u'lu_data-emotion_example'], u'bim') + self.assertEqual(data[u'lu_data-emotion_example_secondary'], u'bom') + + def test_lexunit_attrs_include(self): + """Include select attributes of a lexical unit.""" + tree = _write_and_read( + self.__plwn, + graph_type='lexical_unit', + included_lexical_unit_attributes=(u'lemma', u'variant'), + skip_artificial_synsets=False, + ) + + keys = _make_attr_key_dicts(tree) + self.assertEqual(len(keys), 4) + self.__assert_attr_lex_lemma(keys) + self.__assert_attr_lex_variant(keys) + self.__assert_attr_edge_type(keys) + self.__assert_attr_edge_name(keys) + + data = _get_data_values(tree.find(u"./graph/node[@id='11']")) + self.assertEqual(len(data), 2) + self.assertEqual(data[u'lu_data-lemma'], u'pies') + self.assertEqual(data[u'lu_data-variant'], u'1') + + def test_lexunit_attrs_exclude(self): + """Include select attributes of a lexical unit by excluding others.""" + tree = _write_and_read( + self.__plwn, + graph_type='lexical_unit', + excluded_lexical_unit_attributes=(u'lemma', u'variant'), + skip_artificial_synsets=False, + ) + + keys = _make_attr_key_dicts(tree) + self.assertEqual(len(keys), 16) + self.__assert_attr_lex_pos(keys) + self.__assert_attr_lex_definition(keys) + self.__assert_attr_lex_sense_examples(keys) + self.__assert_attr_lex_sense_examples_sources(keys) + self.__assert_attr_lex_external_links(keys) + self.__assert_attr_lex_usage_notes(keys) + self.__assert_attr_lex_domain(keys) + self.__assert_attr_lex_verb_aspect(keys) + self.__assert_attr_lex_is_emotional(keys) + self.__assert_attr_lex_emotion_markedness(keys) + self.__assert_attr_lex_emotion_names(keys) + self.__assert_attr_lex_emotion_valuations(keys) + self.__assert_attr_lex_emotion_example(keys) + self.__assert_attr_lex_emotion_example_secondary(keys) + self.__assert_attr_edge_type(keys) + self.__assert_attr_edge_name(keys) + + data = _get_data_values(tree.find(u"./graph/node[@id='11']")) + self.assertEqual(len(data), 14) + self.assertEqual(data[u'lu_data-pos'], en.PoS.n.value) + self.assertEqual(data[u'lu_data-definition'], u'Barks.') + self.assertEqual( + json.loads(data[u'lu_data-usage_notes']), + [u'P', u'S'], + ) + self.assertEqual( + json.loads(data[u'lu_data-external_links']), + [u'http://dogs.com'], + ) + self.assertEqual( + json.loads(data[u'lu_data-sense_examples']), + [u'Ala ma psa'], + ) + self.assertEqual( + json.loads(data[u'lu_data-sense_examples_sources']), + [u'Lies!'], + ) + self.assertEqual(data[u'lu_data-is_emotional'], u'true') + self.assertEqual(data[u'lu_data-domain'], en.Domain.zw.value) + # ElementTree reads empty-value nodes as None + self.assertIsNone(data[u'lu_data-verb_aspect']) + self.assertEqual( + data[u'lu_data-emotion_markedness'], + en.EmotionMarkedness.ambiguous.value, + ) + self.assertEqual( + json.loads(data[u'lu_data-emotion_names']), + [en.EmotionName.strach.value, en.EmotionName.zlosc.value], + ) + self.assertEqual( + json.loads(data[u'lu_data-emotion_valuations']), + [ + en.EmotionValuation.krzywda.value, + en.EmotionValuation.piekno.value, + ], + ) + self.assertEqual(data[u'lu_data-emotion_example'], u'bim') + self.assertEqual(data[u'lu_data-emotion_example_secondary'], u'bom') + + +class SynsetGraphTest(ut.TestCase): + """Create a graph of synsets and test its edges. + + The last element is artificial so it can be cut off with a parameter. + + :: + .---. a .---. B/b .----. + | 1 |----->| 2 |------>| 3a | + '---' '---' '----' + """ + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node(kind=en.RelationKind.synset, name=u'a'), + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'b', + parent=u'B', + ), + nd.make_synset_node(id=1, related=((u'a', 2),)), + nd.make_synset_node(id=2, related=((u'B/b', 3),)), + nd.make_synset_node(id=3, is_artificial=True), + nd.make_lexical_unit_node( + id=11, + lemma=u'A', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'B', + pos=en.PoS.n, + variant=1, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'C', + pos=en.PoS.n, + variant=1, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + verb_aspect=en.VerbAspect.dk, + ), + )) + + def tearDown(self): + self.__plwn.close() + + def test_full_graph(self): + """Include all nodes and edges in the graph.""" + tree = _write_and_read(self.__plwn, skip_artificial_synsets=False) + + # Check nodes + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'1', u'2', u'3')), + ) + + # Check edges + edges = _make_edge_dict(tree) + + self.assertEqual(len(edges), 2) + self.assertEqual( + edges[u'syn--1--2--a'], + { + u'source': u'1', + u'target': u'2', + u'type': u'relation', + u'name': u'a', + }, + ) + self.assertEqual( + edges[u'syn--2--3--B/b'], + { + u'source': u'2', + u'target': u'3', + u'type': u'relation', + u'name': u'B/b', + }, + ) + + def test_include_relations(self): + """Include only some of relation edges in the graph.""" + tree = _write_and_read( + self.__plwn, + included_synset_relations=(u'a',), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'1', u'2')), + ) + + edges = _make_edge_dict(tree) + + self.assertEqual(len(edges), 1) + self.assertEqual( + edges[u'syn--1--2--a'], + { + u'source': u'1', + u'target': u'2', + u'type': u'relation', + u'name': u'a', + }, + ) + + def test_exclude_relations(self): + """Exclude only some of relation edges in the graph.""" + tree = _write_and_read( + self.__plwn, + excluded_synset_relations=(u'a',), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'2', u'3')), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 1) + self.assertEqual( + edges[u'syn--2--3--' + RelationInfoBase.format_name(u'B', u'b')], + { + u'source': u'2', + u'target': u'3', + u'type': u'relation', + u'name': RelationInfoBase.format_name(u'B', u'b'), + }, + ) + + def test_include_nodes(self): + """Include only some nodes in the graph.""" + tree = _write_and_read( + self.__plwn, + included_synset_nodes=(1, 2), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'1', u'2')), + ) + + edges = _make_edge_dict(tree) + + self.assertEqual(len(edges), 1) + self.assertEqual( + edges[u'syn--1--2--a'], + { + u'source': u'1', + u'target': u'2', + u'type': u'relation', + u'name': u'a', + }, + ) + + def test_exclude_nodes(self): + """Exclude some nodes from the graph.""" + tree = _write_and_read( + self.__plwn, + excluded_synset_nodes=(3,), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'1', u'2')), + ) + + edges = _make_edge_dict(tree) + + self.assertEqual(len(edges), 1) + self.assertEqual( + edges[u'syn--1--2--a'], + { + u'source': u'1', + u'target': u'2', + u'type': u'relation', + u'name': u'a', + }, + ) + + def test_break_node_chain(self): + """Exclude one node in the middle which should make the graph empty.""" + tree = _write_and_read( + self.__plwn, + excluded_synset_nodes=(2,), + skip_artificial_synsets=False, + ) + + self.assertFalse(_get_node_id_set(tree)) + self.assertFalse(_make_edge_dict(tree)) + + def test_prefix_nodes(self): + """Check if nodes are correctly prefixed when told to.""" + tree = _write_and_read( + self.__plwn, + prefix_ids=True, + skip_artificial_synsets=False, + ) + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'synset-1', u'synset-2', u'synset-3')), + ) + self.assertEqual( + frozenset(_make_edge_dict(tree)), + frozenset(( + u'syn--synset-1--synset-2--a', + u'syn--synset-2--synset-3--' + + RelationInfoBase.format_name(u'B', u'b'), + )), + ) + + def test_no_artificial(self): + """Cut off the artificial synset from the rest of the synset graph.""" + tree = _write_and_read(self.__plwn, skip_artificial_synsets=True) + + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'1', u'2')), + ) + + edges = _make_edge_dict(tree) + + self.assertEqual(len(edges), 1) + self.assertEqual( + edges[u'syn--1--2--a'], + { + u'source': u'1', + u'target': u'2', + u'type': u'relation', + u'name': u'a', + }, + ) + + +class LexicalGraphTest(ut.TestCase): + """Create a graph of lexical units and test its edges. + + The graph is the same as in synset graph test, but with lexical units. + """ + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'a', + ), + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'b', + parent=u'B', + ), + nd.make_synset_node(id=1), + nd.make_synset_node(id=2), + # On lexical unit level artificial synsets have no meaning + nd.make_synset_node(id=3, is_artificial=True), + nd.make_lexical_unit_node( + id=11, + lemma=u'A', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + related=((u'a', 21),), + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'B', + pos=en.PoS.n, + variant=1, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + related=((RelationInfoBase.format_name(u'B', u'b'), 31),), + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'C', + pos=en.PoS.n, + variant=1, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + verb_aspect=en.VerbAspect.dk, + ), + )) + + def tearDown(self): + self.__plwn.close() + + def test_full_graph(self): + """Include all lexical units and relations in the graph.""" + tree = _write_and_read( + self.__plwn, + graph_type='lexical_unit', + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'11', u'21', u'31')), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 2) + self.assertEqual( + edges[u'lu--11--21--a'], + { + u'source': u'11', + u'target': u'21', + u'type': u'relation', + u'name': u'a', + }, + ) + self.assertEqual( + edges[u'lu--21--31--' + RelationInfoBase.format_name(u'B', u'b')], + { + u'source': u'21', + u'target': u'31', + u'type': u'relation', + u'name': RelationInfoBase.format_name(u'B', u'b'), + }, + ) + + def test_include_relations(self): + """Include only some of the lexical unit relations in the graph.""" + tree = _write_and_read( + self.__plwn, + graph_type=u'lexical_unit', + included_lexical_unit_relations=(u'a',), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'11', u'21')), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 1) + self.assertEqual( + edges[u'lu--11--21--a'], + { + u'source': u'11', + u'target': u'21', + u'type': u'relation', + u'name': u'a', + }, + ) + + def test_exclude_relations(self): + """Exclude some of the relations from the graph.""" + tree = _write_and_read( + self.__plwn, + graph_type=u'lexical_unit', + excluded_lexical_unit_relations=(u'a',), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'21', u'31')), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 1) + self.assertEqual( + edges[u'lu--21--31--' + RelationInfoBase.format_name(u'B', u'b')], + { + u'source': u'21', + u'target': u'31', + u'type': u'relation', + u'name': RelationInfoBase.format_name(u'B', u'b'), + }, + ) + + def test_include_nodes(self): + """Include only some of the lexical unit nodes in the graph.""" + tree = _write_and_read( + self.__plwn, + graph_type=u'lexical_unit', + included_lexical_unit_nodes=(11, 21,), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'11', u'21')), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 1) + self.assertEqual( + edges[u'lu--11--21--a'], + { + u'source': u'11', + u'target': u'21', + u'type': u'relation', + u'name': u'a', + }, + ) + + def test_exclude_nodes(self): + """Exclude some of the lexical unit nodes in the graph.""" + tree = _write_and_read( + self.__plwn, + graph_type=u'lexical_unit', + excluded_lexical_unit_nodes=(31,), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset((u'11', u'21')), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 1) + self.assertEqual( + edges[u'lu--11--21--a'], + { + u'source': u'11', + u'target': u'21', + u'type': u'relation', + u'name': u'a', + }, + ) + + def test_break_node_chain(self): + """Exclude one node in the middle which should make the graph empty.""" + tree = _write_and_read( + self.__plwn, + excluded_synset_nodes=(2,), + skip_artificial_synsets=False, + ) + + self.assertFalse(_get_node_id_set(tree)) + self.assertFalse(_make_edge_dict(tree)) + + def test_prefix_nodes(self): + """Make sure node ids are prefixed when told to.""" + tree = _write_and_read( + self.__plwn, + graph_type=u'lexical_unit', + prefix_ids=True, + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'lexical_unit-11', + u'lexical_unit-21', + u'lexical_unit-31', + )) + ) + self.assertEqual( + frozenset(_make_edge_dict(tree)), + frozenset(( + u'lu--lexical_unit-11--lexical_unit-21--a', + u'lu--lexical_unit-21--lexical_unit-31--' + + RelationInfoBase.format_name(u'B', u'b'), + )) + ) + + +class MixedGraphTest(ut.TestCase): + """Test how a mixed synset-unit graph behaves. + + :: + .--------.-->.-------. + | syn-4a | | lu-41 | + '--------'<--'-------' + ^ | + C/u| |C/d + | v + .-------. A .-------. B .-------. + | syn-1 |--->| syn-2 |--->| syn-3 | + '-------' '-------' '-------' + ^ | ^ | ^ | + | | | | | | + | v | v | v + .-------. a .-------. b .-------. + | lu-11 |--->| lu-21 |--->| lu-31 | + '-------' '-------' '-------' + """ + + _PLWNClass = None # Override in subclass + + def __assert_edge_syn_1_2(self, edges): + self.assertEqual( + edges[u'syn--synset-1--synset-2--A'], + { + u'source': u'synset-1', + u'target': u'synset-2', + u'type': u'relation', + u'name': u'A', + }, + ) + + def __assert_edge_syn_2_3(self, edges): + self.assertEqual( + edges[u'syn--synset-2--synset-3--B'], + { + u'source': u'synset-2', + u'target': u'synset-3', + u'type': u'relation', + u'name': u'B', + }, + ) + + def __assert_edge_syn_2_4(self, edges): + self.assertEqual( + edges[u'syn--synset-2--synset-4--C/u'], + { + u'source': u'synset-2', + u'target': u'synset-4', + u'type': u'relation', + u'name': u'C/u', + }, + ) + + def __assert_edge_syn_4_2(self, edges): + self.assertEqual( + edges[u'syn--synset-4--synset-2--C/d'], + { + u'source': u'synset-4', + u'target': u'synset-2', + u'type': u'relation', + u'name': u'C/d', + }, + ) + + def __assert_edge_lex_11_21(self, edges): + self.assertEqual( + edges[u'lu--lexical_unit-11--lexical_unit-21--a'], + { + u'source': u'lexical_unit-11', + u'target': u'lexical_unit-21', + u'type': u'relation', + u'name': u'a', + }, + ) + + def __assert_edge_lex_21_31(self, edges): + self.assertEqual( + edges[u'lu--lexical_unit-21--lexical_unit-31--b'], + { + u'source': u'lexical_unit-21', + u'target': u'lexical_unit-31', + u'type': u'relation', + u'name': u'b', + }, + ) + + def __assert_edge_s2l_1_11(self, edges): + self.assertEqual( + edges[u'uns--synset-1--lexical_unit-11--has_unit'], + { + u'source': u'synset-1', + u'target': u'lexical_unit-11', + u'type': u'unit_and_synset', + u'name': u'has_unit', + }, + ) + + def __assert_edge_s2l_2_21(self, edges): + self.assertEqual( + edges[u'uns--synset-2--lexical_unit-21--has_unit'], + { + u'source': u'synset-2', + u'target': u'lexical_unit-21', + u'type': u'unit_and_synset', + u'name': u'has_unit', + }, + ) + + def __assert_edge_s2l_3_31(self, edges): + self.assertEqual( + edges[u'uns--synset-3--lexical_unit-31--has_unit'], + { + u'source': u'synset-3', + u'target': u'lexical_unit-31', + u'type': u'unit_and_synset', + u'name': u'has_unit', + }, + ) + + def __assert_edge_s2l_4_41(self, edges): + self.assertEqual( + edges[u'uns--synset-4--lexical_unit-41--has_unit'], + { + u'source': u'synset-4', + u'target': u'lexical_unit-41', + u'type': u'unit_and_synset', + u'name': u'has_unit', + }, + ) + + def __assert_edge_l2s_11_1(self, edges): + self.assertEqual( + edges[u'uns--lexical_unit-11--synset-1--in_synset'], + { + u'source': u'lexical_unit-11', + u'target': u'synset-1', + u'type': u'unit_and_synset', + u'name': u'in_synset', + }, + ) + + def __assert_edge_l2s_21_2(self, edges): + self.assertEqual( + edges[u'uns--lexical_unit-21--synset-2--in_synset'], + { + u'source': u'lexical_unit-21', + u'target': u'synset-2', + u'type': u'unit_and_synset', + u'name': u'in_synset', + }, + ) + + def __assert_edge_l2s_31_3(self, edges): + self.assertEqual( + edges[u'uns--lexical_unit-31--synset-3--in_synset'], + { + u'source': u'lexical_unit-31', + u'target': u'synset-3', + u'type': u'unit_and_synset', + u'name': u'in_synset', + }, + ) + + def __assert_edge_l2s_41_4(self, edges): + self.assertEqual( + edges[u'uns--lexical_unit-41--synset-4--in_synset'], + { + u'source': u'lexical_unit-41', + u'target': u'synset-4', + u'type': u'unit_and_synset', + u'name': u'in_synset', + }, + ) + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node(kind=en.RelationKind.synset, name=u'A'), + nd.make_relation_type_node(kind=en.RelationKind.synset, name=u'B'), + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'u', + parent=u'C', + ), + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'd', + parent=u'C', + ), + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'a', + ), + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'b', + ), + nd.make_synset_node(id=1, related=((u'A', 2),)), + nd.make_synset_node(id=2, related=((u'B', 3), (u'C/u', 4))), + nd.make_synset_node(id=3), + nd.make_synset_node( + id=4, + related=((u'C/d', 2),), + is_artificial=True, + ), + nd.make_lexical_unit_node( + id=11, + lemma=u'first', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + related=((u'a', 21),), + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'second', + pos=en.PoS.n, + variant=1, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + related=((u'b', 31),), + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'third', + pos=en.PoS.n, + variant=1, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=41, + lemma=u'unreal', + pos=en.PoS.n, + variant=1, + synset=4, + unit_index=1, + domain=en.Domain.bhp, + ), + )) + + def tearDown(self): + self.__plwn.close() + + def test_full_graph(self): + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-1', + u'synset-2', + u'synset-3', + u'synset-4', + u'lexical_unit-11', + u'lexical_unit-21', + u'lexical_unit-31', + u'lexical_unit-41', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 14) + self.__assert_edge_syn_1_2(edges) + self.__assert_edge_syn_2_3(edges) + self.__assert_edge_syn_2_4(edges) + self.__assert_edge_syn_4_2(edges) + self.__assert_edge_lex_11_21(edges) + self.__assert_edge_lex_21_31(edges) + self.__assert_edge_s2l_1_11(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_s2l_3_31(edges) + self.__assert_edge_s2l_4_41(edges) + self.__assert_edge_l2s_11_1(edges) + self.__assert_edge_l2s_21_2(edges) + self.__assert_edge_l2s_31_3(edges) + self.__assert_edge_l2s_41_4(edges) + + def test_include_synset_edges(self): + """Include only certain synset edges in the graph. + + All nodes should remain enact, except for 4 connected by excluded syn + relations. + """ + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + included_synset_relations=(u'B',), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-1', + u'synset-2', + u'synset-3', + u'lexical_unit-11', + u'lexical_unit-21', + u'lexical_unit-31', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 9) + self.__assert_edge_syn_2_3(edges) + self.__assert_edge_lex_11_21(edges) + self.__assert_edge_lex_21_31(edges) + self.__assert_edge_s2l_1_11(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_s2l_3_31(edges) + self.__assert_edge_l2s_11_1(edges) + self.__assert_edge_l2s_21_2(edges) + self.__assert_edge_l2s_31_3(edges) + + def test_exclude_synset_edges(self): + """Exclude some synset edges in the graph. + + All nodes should remain enact. + """ + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + excluded_synset_relations=(u'B',), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-1', + u'synset-2', + u'synset-3', + u'synset-4', + u'lexical_unit-11', + u'lexical_unit-21', + u'lexical_unit-31', + u'lexical_unit-41', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 13) + self.__assert_edge_syn_1_2(edges) + self.__assert_edge_syn_2_4(edges) + self.__assert_edge_syn_4_2(edges) + self.__assert_edge_lex_11_21(edges) + self.__assert_edge_lex_21_31(edges) + self.__assert_edge_s2l_1_11(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_s2l_3_31(edges) + self.__assert_edge_s2l_4_41(edges) + self.__assert_edge_l2s_11_1(edges) + self.__assert_edge_l2s_21_2(edges) + self.__assert_edge_l2s_31_3(edges) + self.__assert_edge_l2s_41_4(edges) + + def test_include_lexical_relations(self): + """Include only some of the lexical relation edges in the graph. + + All nodes should remain. + """ + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + included_lexical_unit_relations=(u'b',), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-1', + u'synset-2', + u'synset-3', + u'synset-4', + u'lexical_unit-11', + u'lexical_unit-21', + u'lexical_unit-31', + u'lexical_unit-41', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 13) + self.__assert_edge_syn_1_2(edges) + self.__assert_edge_syn_2_3(edges) + self.__assert_edge_syn_2_4(edges) + self.__assert_edge_syn_4_2(edges) + self.__assert_edge_lex_21_31(edges) + self.__assert_edge_s2l_1_11(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_s2l_3_31(edges) + self.__assert_edge_s2l_4_41(edges) + self.__assert_edge_l2s_11_1(edges) + self.__assert_edge_l2s_21_2(edges) + self.__assert_edge_l2s_31_3(edges) + self.__assert_edge_l2s_41_4(edges) + + def test_exclude_lexical_relations(self): + """Exclude some of the lexical relation edges from the graph. + + All nodes should remain. + """ + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + excluded_lexical_unit_relations=(u'b',), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-1', + u'synset-2', + u'synset-3', + u'synset-4', + u'lexical_unit-11', + u'lexical_unit-21', + u'lexical_unit-31', + u'lexical_unit-41', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 13) + self.__assert_edge_syn_1_2(edges) + self.__assert_edge_syn_2_3(edges) + self.__assert_edge_syn_2_4(edges) + self.__assert_edge_syn_4_2(edges) + self.__assert_edge_lex_11_21(edges) + self.__assert_edge_s2l_1_11(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_s2l_3_31(edges) + self.__assert_edge_s2l_4_41(edges) + self.__assert_edge_l2s_11_1(edges) + self.__assert_edge_l2s_21_2(edges) + self.__assert_edge_l2s_31_3(edges) + self.__assert_edge_l2s_41_4(edges) + + def test_cut_off_nodes(self): + """Remove both corresponding lexical unit and synset edges. + + This should make a synset and a unit disappear from the graph. They are + not connected to any other synset or unit, and only have their + has_unit/in_synset edges. + """ + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + excluded_synset_relations=(u'B',), + excluded_lexical_unit_relations=(u'b',), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-1', + u'synset-2', + u'synset-4', + u'lexical_unit-11', + u'lexical_unit-21', + u'lexical_unit-41', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 10) + self.__assert_edge_syn_1_2(edges) + self.__assert_edge_syn_2_4(edges) + self.__assert_edge_syn_4_2(edges) + self.__assert_edge_lex_11_21(edges) + self.__assert_edge_s2l_1_11(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_s2l_4_41(edges) + self.__assert_edge_l2s_11_1(edges) + self.__assert_edge_l2s_21_2(edges) + self.__assert_edge_l2s_41_4(edges) + + def test_include_synset_nodes(self): + """Include only some of the synsets in the graph. + + This should cause corresponding lexical units to be dropped from the + graph, as well as relation edges. + """ + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + included_synset_nodes=(1, 2), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-1', + u'synset-2', + u'lexical_unit-11', + u'lexical_unit-21', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 6) + self.__assert_edge_syn_1_2(edges) + self.__assert_edge_lex_11_21(edges) + self.__assert_edge_s2l_1_11(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_l2s_11_1(edges) + self.__assert_edge_l2s_21_2(edges) + + def test_exclude_synset_nodes(self): + """Exclude some synsets from the graph. + + Some nodes should drop, like in the include test. + """ + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + excluded_synset_nodes=(1,), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-2', + u'synset-3', + u'synset-4', + u'lexical_unit-21', + u'lexical_unit-31', + u'lexical_unit-41', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 10) + self.__assert_edge_syn_2_3(edges) + self.__assert_edge_syn_2_4(edges) + self.__assert_edge_syn_4_2(edges) + self.__assert_edge_lex_21_31(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_s2l_3_31(edges) + self.__assert_edge_s2l_4_41(edges) + self.__assert_edge_l2s_21_2(edges) + self.__assert_edge_l2s_31_3(edges) + self.__assert_edge_l2s_41_4(edges) + + def test_include_lexical_unit_nodes(self): + """Include only some of lexical unit nodes in the graph. + + Same thing like with including synsets. + """ + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + included_lexical_unit_nodes=(11, 21), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-1', + u'synset-2', + u'lexical_unit-11', + u'lexical_unit-21', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 6) + self.__assert_edge_syn_1_2(edges) + self.__assert_edge_lex_11_21(edges) + self.__assert_edge_s2l_1_11(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_l2s_11_1(edges) + self.__assert_edge_l2s_21_2(edges) + + def test_exclude_lexical_unit_nodes(self): + """Exclude some of lexical unit nodes in the graph. + + Same thing like with excluding synsets. + """ + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + excluded_lexical_unit_nodes=(11,), + skip_artificial_synsets=False, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-2', + u'synset-3', + u'synset-4', + u'lexical_unit-21', + u'lexical_unit-31', + u'lexical_unit-41', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 10) + self.__assert_edge_syn_2_3(edges) + self.__assert_edge_syn_2_4(edges) + self.__assert_edge_syn_4_2(edges) + self.__assert_edge_lex_21_31(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_s2l_3_31(edges) + self.__assert_edge_s2l_4_41(edges) + self.__assert_edge_l2s_21_2(edges) + self.__assert_edge_l2s_31_3(edges) + self.__assert_edge_l2s_41_4(edges) + + def test_break_graph(self): + """Remove one of the middle nodes, to empty the whole graph.""" + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + excluded_synset_nodes=(2,), + skip_artificial_synsets=False, + ) + + self.assertFalse(_get_node_id_set(tree)) + self.assertFalse(_make_edge_dict(tree)) + + def test_no_artificial(self): + """Omit the artificial synset from the graph.""" + tree = _write_and_read( + self.__plwn, + graph_type='mixed', + skip_artificial_synsets=True, + ) + + self.assertEqual( + _get_node_id_set(tree), + frozenset(( + u'synset-1', + u'synset-2', + u'synset-3', + u'lexical_unit-11', + u'lexical_unit-21', + u'lexical_unit-31', + )), + ) + + edges = _make_edge_dict(tree) + self.assertEqual(len(edges), 10) + self.__assert_edge_l2s_11_1(edges) + self.__assert_edge_l2s_21_2(edges) + self.__assert_edge_l2s_31_3(edges) + self.__assert_edge_s2l_1_11(edges) + self.__assert_edge_s2l_2_21(edges) + self.__assert_edge_s2l_3_31(edges) + self.__assert_edge_syn_1_2(edges) + self.__assert_edge_syn_2_3(edges) + self.__assert_edge_lex_11_21(edges) + self.__assert_edge_lex_21_31(edges) + + +def _write_and_read(plwn, **kwargs): + outfile = io.BytesIO() + plwn.to_graphml(outfile, **kwargs) + return et.XML(outfile.getvalue()) + + +def _get_data_values(synset_elem): + return { + data_elem.attrib[u'key']: data_elem.text + for data_elem in synset_elem.findall(u'./data') + } + + +def _make_attr_key_dicts(xml_root): + dicts = {} + + for key_elem in xml_root.findall(u'./key'): + attrdict = key_elem.attrib.copy() + attrid = attrdict.pop(u'id') + dicts[attrid] = attrdict + + return dicts + + +def _get_node_id_set(xml_root): + return frozenset( + elem.attrib[u'id'] + for elem in xml_root.findall(u'./graph/node') + ) + + +def _make_edge_dict(xml_root): + return { + edge_elem.attrib[u'id']: { + u'source': edge_elem.attrib[u'source'], + u'target': edge_elem.attrib[u'target'], + u'type': edge_elem.find(u"./data[@key='edge-type']").text, + u'name': edge_elem.find(u"./data[@key='edge-name']").text, + } + for edge_elem in xml_root.findall(u'./graph/edge') + } diff --git a/tests/abstract_cases/test_plwordnet.py b/tests/abstract_cases/test_plwordnet.py new file mode 100644 index 0000000..748cfa4 --- /dev/null +++ b/tests/abstract_cases/test_plwordnet.py @@ -0,0 +1,1163 @@ +# coding: utf8 +from __future__ import absolute_import, division + + +import unittest as ut + +from plwn.readers import nodes as nd +from plwn.bases import RelationEdge, RelationInfoBase +from plwn import exceptions as exc, enums as en + + +__all__ = ( + 'ItemSelectingTest', + 'SynsetRelationEdgesTest', + 'SynsetRelationEdgesWithArtificialTest', + 'SynsetRelationEdgesWithArtificialLoopTest', + 'LexicalUnitRelationEdgesTest', +) + + +class ItemSelectingTest(ut.TestCase): + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_synset_node(id=1), + nd.make_synset_node(id=2, definition=u'tok'), + nd.make_synset_node(id=3), + nd.make_synset_node(id=4), + nd.make_lexical_unit_node( + id=11, + lemma=u'pies', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'kot', + pos=en.PoS.n, + variant=1, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + definition=u'kici', + ), + nd.make_lexical_unit_node( + id=22, + lemma=u'kot', + pos=en.PoS.n, + variant=2, + synset=2, + unit_index=2, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'pies', + pos=en.PoS.n, + variant=2, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + emotion_markedness=en.EmotionMarkedness.strong_negative, + emotion_names=(en.EmotionName.fear, en.EmotionName.disgust), + emotion_example_1=u'bim', + ), + # "Pies" is not a verb, but still, for testing... + nd.make_lexical_unit_node( + id=41, + lemma=u'pies', + pos=en.PoS.v, + variant=1, + synset=4, + unit_index=1, + domain=en.Domain.bhp, + verb_aspect=en.VerbAspect.ndk, + ), + )) + + def tearDown(self): + self.__plwn.close() + + def test_synsets_nonexistent(self): + """Select a synset that doesn't exists (in "synsets" method).""" + self.assertFalse(tuple(self.__plwn.synsets(u'not'))) + + def test_synsets_by_lemma(self): + """Select several synsets with the same lemmas.""" + self.assertEqual( + frozenset(self.__plwn.synsets(u'pies')), + frozenset(( + self.__plwn.synset_by_id(1), + self.__plwn.synset_by_id(3), + self.__plwn.synset_by_id(4), + )), + ) + + def test_synsets_by_pos(self): + """Select several synsets with the same pos.""" + self.assertEqual( + frozenset(self.__plwn.synsets(pos=en.PoS.n)), + frozenset(( + self.__plwn.synset_by_id(1), + self.__plwn.synset_by_id(2), + self.__plwn.synset_by_id(3), + )), + ) + + def test_synsets_by_combination(self): + """Select several synsets by lemma and pos .""" + self.assertEqual( + frozenset(self.__plwn.synsets(lemma=u'pies', pos=en.PoS.n)), + frozenset(( + self.__plwn.synset_by_id(1), + self.__plwn.synset_by_id(3), + )), + ) + + def test_synsets_one(self): + """Select one synset by "synsets" method.""" + self.assertEqual( + frozenset( + self.__plwn.synsets(lemma=u'pies', pos=en.PoS.n, variant=1), + ), + frozenset((self.__plwn.synset_by_id(1),)), + ) + + def test_synset_multi_lexunits(self): + """Select lemma / pos combination. + + That points to multiple lexical units within the same synset. + """ + self.assertEqual( + frozenset(self.__plwn.synsets(lemma=u'kot', pos=en.PoS.n)), + frozenset((self.__plwn.synset_by_id(2),)), + ) + + def test_synsets_all(self): + """Select all synsets.""" + self.assertEqual( + frozenset(self.__plwn.synsets()), + frozenset(( + self.__plwn.synset_by_id(1), + self.__plwn.synset_by_id(2), + self.__plwn.synset_by_id(3), + self.__plwn.synset_by_id(4), + )), + ) + + def test_synset_any_none(self): + """Make sure the method doesn't allow ``None`` as arguments.""" + self.assertRaises( + exc.SynsetNotFound, + self.__plwn.synset, + None, + en.PoS.n, + 1, + ) + self.assertRaises( + exc.SynsetNotFound, + self.__plwn.synset, + u'kot', + None, + 1, + ) + self.assertRaises( + exc.SynsetNotFound, + self.__plwn.synset, + u'kot', + en.PoS.n, + None, + ) + + def test_synset_one(self): + """Select a single synset.""" + sel_one = self.__plwn.synset(lemma=u'kot', pos=en.PoS.n, variant=1) + sel_id = self.__plwn.synset_by_id(2) + + self.assertEqual(sel_one, sel_id) + # As this tests both "synset" and "synset_by_id", ensure all properties + # are correct; those that were set, anyway. + self.assertEqual(sel_one.id, 2) + self.assertEqual(sel_one.id, sel_id.id) + self.assertEqual(sel_one.definition, u'tok') + self.assertEqual(sel_one.definition, sel_id.definition) + self.assertFalse(sel_one.is_artificial) + self.assertEqual(sel_one.is_artificial, sel_id.is_artificial) + self.assertEqual( + sel_one.lexical_units, + ( + self.__plwn.lexical_unit_by_id(21), + self.__plwn.lexical_unit_by_id(22), + ), + ) + self.assertEqual(sel_one.lexical_units, sel_id.lexical_units) + + def test_synset_nonexistent(self): + """Select a nonexistent synset.""" + self.assertRaises( + exc.SynsetNotFound, + self.__plwn.synset, + u'not', + en.PoS.n, + 1, + ) + + def test_synset_by_id_nonexistent(self): + """Select a nonexistent id.""" + self.assertRaises( + exc.SynsetNotFound, + self.__plwn.synset_by_id, + 100, + ) + + def test_lexical_units_nonexistent(self): + """Select a nonexistent combination of properties.""" + self.assertFalse(tuple(self.__plwn.lexical_units(u'not'))) + + def test_lexical_units_by_lemma(self): + """Select several lexical units sharing a lemma.""" + self.assertEqual( + frozenset(self.__plwn.lexical_units(u'pies')), + frozenset(( + self.__plwn.lexical_unit_by_id(11), + self.__plwn.lexical_unit_by_id(31), + self.__plwn.lexical_unit_by_id(41), + )), + ) + + def test_lexical_units_by_pos(self): + """Select a lexical unit by pos.""" + self.assertEqual( + frozenset(self.__plwn.lexical_units(pos=en.PoS.v)), + frozenset((self.__plwn.lexical_unit_by_id(41),)), + ) + + def test_lexical_units_by_combination(self): + """Select a lexical unit by combination of lemma and pos.""" + self.assertEqual( + frozenset( + self.__plwn.lexical_units(lemma=u'pies', pos=en.PoS.n), + ), + frozenset(( + self.__plwn.lexical_unit_by_id(11), + self.__plwn.lexical_unit_by_id(31), + )), + ) + + def test_lexical_units_one(self): + """Select one lexical unit using a full combination.""" + self.assertEqual( + frozenset( + self.__plwn.lexical_units( + lemma=u'kot', + pos=en.PoS.n, + variant=2, + ) + ), + frozenset((self.__plwn.lexical_unit_by_id(22),)), + ) + + def test_lexical_units_all(self): + """Select all lexical units.""" + self.assertEqual( + frozenset(self.__plwn.lexical_units()), + frozenset(( + self.__plwn.lexical_unit_by_id(11), + self.__plwn.lexical_unit_by_id(21), + self.__plwn.lexical_unit_by_id(22), + self.__plwn.lexical_unit_by_id(31), + self.__plwn.lexical_unit_by_id(41), + )), + ) + + def test_lexical_unit_any_none(self): + """Make sure the method does not accept ``None`` as arguments.""" + self.assertRaises( + exc.LexicalUnitNotFound, + self.__plwn.lexical_unit, + None, + en.PoS.n, + 1, + ) + self.assertRaises( + exc.LexicalUnitNotFound, + self.__plwn.lexical_unit, + u'kot', + None, + 1, + ) + self.assertRaises( + exc.LexicalUnitNotFound, + self.__plwn.lexical_unit, + u'kot', + en.PoS.n, + None, + ) + + def test_lexical_unit_one(self): + """Select a single lexical unit.""" + sel_one = self.__plwn.lexical_unit( + lemma=u'kot', + pos=en.PoS.n, + variant=1, + ) + sel_id = self.__plwn.lexical_unit_by_id(21) + + self.assertEqual(sel_one, sel_id) + # As this tests both "lexical_unit" and "lexical_unit_by_id", compare + # if some of the properties are correct. Don't check all to not + # duplicate tests; XXX but maybe move all tests to here and remove some + # external test cases. + self.assertEqual(sel_one.id, 21) + self.assertEqual(sel_one.id, sel_id.id) + self.assertEqual(sel_one.lemma, u'kot') + self.assertEqual(sel_one.lemma, sel_id.lemma) + self.assertIs(sel_one.pos, en.PoS.n) + self.assertEqual(sel_one.pos, sel_id.pos) + self.assertEqual(sel_one.variant, 1) + self.assertEqual(sel_one.variant, sel_id.variant) + self.assertEqual(sel_one.definition, u'kici') + self.assertEqual(sel_one.definition, sel_id.definition) + + def test_lexical_unit_nonexistent(self): + """Select a nonexistent lexical unit.""" + self.assertRaises( + exc.LexicalUnitNotFound, + self.__plwn.lexical_unit, + lemma=u'not', + pos=en.PoS.n, + variant=1, + ) + + def test_lexical_unit_by_id_nonexistent(self): + """Select a nonexistent id.""" + self.assertRaises( + exc.LexicalUnitNotFound, + self.__plwn.lexical_unit_by_id, + 100, + ) + + +class SynsetRelationEdgesTest(ut.TestCase): + """Inspect synset relations in a simple graph. + + :: + .----. mero/cz .----. + | 4 |<------------| 3 | + | |------------>| | + '----' holo/cz '----' + ^ | ^ | + | |hipo | |hipo + hiper| | hiper| | + | v | v + .----. mero/cz .----. + | 1 |<------------| 2 | + | |------------>| | + '----' holo/cz '----' + """ + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'hiperonimia', + ), + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'hiponimia', + aliases=('hipo',), + ), + # nd.make_relation_type_node( + # kind=en.RelationKind.synset, + # name=u'meronimia', + # ), + # nd.make_relation_type_node( + # kind=en.RelationKind.synset, + # name=u'holonimia', + # ), + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'część', + parent=u'meronimia', + ), + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'część', + parent=u'holonimia', + ), + nd.make_synset_node( + id=1, + related=( + (u'hiperonimia', 4), + ( + RelationInfoBase.format_name(u'holonimia', u'część'), + 2, + ), + ), + ), + nd.make_synset_node( + id=2, + related=( + ( + RelationInfoBase.format_name(u'meronimia', u'część'), + 1, + ), + (u'hiperonimia', 3), + ), + ), + nd.make_synset_node( + id=3, + related=( + (u'hiponimia', 2), + ( + RelationInfoBase.format_name(u'meronimia', u'część'), + 4, + ), + ), + ), + nd.make_synset_node( + id=4, + related=( + (u'hiponimia', 1), + ( + RelationInfoBase.format_name(u'holonimia', u'część'), + 3, + ), + ), + ), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'aaa', + pos=en.PoS.n, + variant=2, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'aaa', + pos=en.PoS.n, + variant=3, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=41, + lemma=u'aaa', + pos=en.PoS.n, + variant=4, + synset=4, + unit_index=1, + domain=en.Domain.bhp, + verb_aspect=en.VerbAspect.perf, + ), + )) + + try: + self.__rel_hiper = tuple(self.__plwn.relations_info( + u'hiperonimia', + en.RelationKind.synset, + ))[0] + self.__rel_hipo = tuple(self.__plwn.relations_info( + u'hiponimia', + en.RelationKind.synset, + ))[0] + self.__rel_mero = tuple(self.__plwn.relations_info( + u'meronimia/część', + en.RelationKind.synset, + ))[0] + self.__rel_holo = tuple(self.__plwn.relations_info( + u'holonimia/część', + en.RelationKind.synset, + ))[0] + except BaseException: + self.__plwn.close() + raise + + def tearDown(self): + self.__plwn.close() + + def test_include(self): + self.assertEqual( + frozenset(self.__plwn.synset_relation_edges( + include=(u'hiperonimia', u'meronimia'), + )), + frozenset(( + RelationEdge( + source=self.__plwn.synset_by_id(1), + target=self.__plwn.synset_by_id(4), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.synset_by_id(2), + target=self.__plwn.synset_by_id(3), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.synset_by_id(3), + target=self.__plwn.synset_by_id(4), + relation=self.__rel_mero, + ), + RelationEdge( + source=self.__plwn.synset_by_id(2), + target=self.__plwn.synset_by_id(1), + relation=self.__rel_mero, + ), + )), + ) + + def test_exclude(self): + self.assertEqual( + frozenset(self.__plwn.synset_relation_edges( + exclude=(u'holonimia/część', u'hipo'), + )), + frozenset(( + RelationEdge( + source=self.__plwn.synset_by_id(1), + target=self.__plwn.synset_by_id(4), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.synset_by_id(2), + target=self.__plwn.synset_by_id(3), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.synset_by_id(3), + target=self.__plwn.synset_by_id(4), + relation=self.__rel_mero, + ), + RelationEdge( + source=self.__plwn.synset_by_id(2), + target=self.__plwn.synset_by_id(1), + relation=self.__rel_mero, + ), + )), + ) + + def test_all(self): + self.assertEqual( + frozenset(self.__plwn.synset_relation_edges()), + frozenset(( + RelationEdge( + source=self.__plwn.synset_by_id(1), + target=self.__plwn.synset_by_id(4), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.synset_by_id(2), + target=self.__plwn.synset_by_id(3), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.synset_by_id(3), + target=self.__plwn.synset_by_id(4), + relation=self.__rel_mero, + ), + RelationEdge( + source=self.__plwn.synset_by_id(2), + target=self.__plwn.synset_by_id(1), + relation=self.__rel_mero, + ), + RelationEdge( + source=self.__plwn.synset_by_id(4), + target=self.__plwn.synset_by_id(1), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.synset_by_id(3), + target=self.__plwn.synset_by_id(2), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.synset_by_id(4), + target=self.__plwn.synset_by_id(3), + relation=self.__rel_holo, + ), + RelationEdge( + source=self.__plwn.synset_by_id(1), + target=self.__plwn.synset_by_id(2), + relation=self.__rel_holo, + ), + )), + ) + + def test_combined(self): + self.assertEqual( + frozenset(self.__plwn.synset_relation_edges( + include=(u'meronimia/część', u'hiperonimia'), + exclude=(u'meronimia', u'holonimia'), + )), + frozenset(( + RelationEdge( + source=self.__plwn.synset_by_id(1), + target=self.__plwn.synset_by_id(4), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.synset_by_id(2), + target=self.__plwn.synset_by_id(3), + relation=self.__rel_hiper, + ), + )), + ) + + +class SynsetRelationEdgesWithArtificialTest(ut.TestCase): + """Get relation edges from a graph. + + Where skipping artificial synsets will produce interesting results. + + :: + .---. .---. + | 3 | | 4 | + '---' '---' + | ^ ^ | + hiper | | hipo hipo | | hiper + | | | | + | '---.----.----' | + | | 2a | | + '---->'----'<-----' + | ^ + hiper | | hipo + v | + .---. + | 1 | + '---' + """ + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'hiponimia', + ), + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'hiperonimia', + ), + nd.make_synset_node( + id=1, + related=((u'hiponimia', 2),), + ), + nd.make_synset_node( + id=2, + related=( + (u'hiponimia', 3), + (u'hiponimia', 4), + (u'hiperonimia', 1), + ), + is_artificial=True, + ), + nd.make_synset_node( + id=3, + related=((u'hiperonimia', 2),), + ), + nd.make_synset_node( + id=4, + related=((u'hiperonimia', 2),), + ), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'aaa', + pos=en.PoS.n, + variant=2, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'aaa', + pos=en.PoS.n, + variant=3, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=41, + lemma=u'aaa', + pos=en.PoS.n, + variant=4, + synset=4, + unit_index=1, + domain=en.Domain.bhp, + ), + )) + + try: + self.__rel_hiper = tuple(self.__plwn.relations_info( + u'hiperonimia', + en.RelationKind.synset, + ))[0] + self.__rel_hipo = tuple(self.__plwn.relations_info( + u'hiponimia', + en.RelationKind.synset, + ))[0] + except BaseException: + self.__plwn.close() + raise + + def tearDown(self): + self.__plwn.close() + + def test_all(self): + self.assertEqual( + frozenset( + self.__plwn.synset_relation_edges(skip_artificial=False) + ), + frozenset(( + RelationEdge( + source=self.__plwn.synset_by_id(1), + target=self.__plwn.synset_by_id(2), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.synset_by_id(2), + target=self.__plwn.synset_by_id(1), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.synset_by_id(2), + target=self.__plwn.synset_by_id(3), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.synset_by_id(2), + target=self.__plwn.synset_by_id(4), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.synset_by_id(3), + target=self.__plwn.synset_by_id(2), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.synset_by_id(4), + target=self.__plwn.synset_by_id(2), + relation=self.__rel_hiper, + ), + )), + ) + + def test_skipping(self): + self.assertEqual( + frozenset( + self.__plwn.synset_relation_edges(skip_artificial=True) + ), + frozenset(( + RelationEdge( + source=self.__plwn.synset_by_id(1), + target=self.__plwn.synset_by_id(3), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.synset_by_id(1), + target=self.__plwn.synset_by_id(4), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.synset_by_id(3), + target=self.__plwn.synset_by_id(1), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.synset_by_id(4), + target=self.__plwn.synset_by_id(1), + relation=self.__rel_hiper, + ), + )), + ) + + +class SynsetRelationEdgesWithArtificialLoopTest(ut.TestCase): + """Checking for inifinite loop while skipping edges in a looping graph. + + :: + .---. foo .----. + | 1 |---->| 2a |<--. + '---' '----' | + ^ | | + |foo |foo |foo + | v | + .---. foo .----. | + | 4 |<----| 3a |---' + '---' '----' + """ + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'foo', + ), + nd.make_synset_node(id=1, related=((u'foo', 2),)), + nd.make_synset_node( + id=2, + related=((u'foo', 3),), + is_artificial=True, + ), + nd.make_synset_node( + id=3, + related=((u'foo', 2), (u'foo', 4)), + is_artificial=True, + ), + nd.make_synset_node(id=4, related=((u'foo', 1),)), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'aaa', + pos=en.PoS.n, + variant=2, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'aaa', + pos=en.PoS.n, + variant=3, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=41, + lemma=u'aaa', + pos=en.PoS.n, + variant=4, + synset=4, + unit_index=1, + domain=en.Domain.bhp, + ), + )) + + def tearDown(self): + self.__plwn.close() + + def test_all(self): + syn1 = self.__plwn.synset_by_id(1) + syn2 = self.__plwn.synset_by_id(2) + syn3 = self.__plwn.synset_by_id(3) + syn4 = self.__plwn.synset_by_id(4) + rel = next(iter(self.__plwn.relations_info(u'foo')), None) + self.assertEqual( + frozenset( + self.__plwn.synset_relation_edges(skip_artificial=False) + ), + frozenset(( + RelationEdge(source=syn1, target=syn2, relation=rel), + RelationEdge(source=syn2, target=syn3, relation=rel), + RelationEdge(source=syn3, target=syn2, relation=rel), + RelationEdge(source=syn3, target=syn4, relation=rel), + RelationEdge(source=syn4, target=syn1, relation=rel), + )), + ) + + def test_skipping(self): + syn1 = self.__plwn.synset_by_id(1) + syn4 = self.__plwn.synset_by_id(4) + rel = next(iter(self.__plwn.relations_info(u'foo')), None) + self.assertEqual( + frozenset( + self.__plwn.synset_relation_edges(skip_artificial=True) + ), + frozenset(( + RelationEdge(source=syn1, target=syn4, relation=rel), + RelationEdge(source=syn4, target=syn1, relation=rel), + )), + ) + + +class LexicalUnitRelationEdgesTest(ut.TestCase): + """Inspect lexical relations in a simple graph. + + :: + .----. mero/cz .----. + | 41 |<------------| 31 | + | |------------>| | + '----' holo/cz '----' + ^ | ^ | + | |hipo | |hipo + hiper| | hiper| | + | v | v + .----. mero/cz .----. + | 11 |<------------| 21 | + | |------------>| | + '----' holo/cz '----' + + Relation names are like for synsets, because they don't really matter. + """ + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'hiperonimia', + ), + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'hiponimia', + ), + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'część', + parent=u'holonimia', + aliases=(u'holo',), + ), + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'część', + parent=u'meronimia', + ), + nd.make_synset_node(id=1), + nd.make_synset_node(id=2), + nd.make_synset_node(id=3), + nd.make_synset_node(id=4), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + related=((u'holonimia/część', 21), (u'hiperonimia', 41)), + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'bbb', + pos=en.PoS.n, + variant=1, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + related=((u'meronimia/część', 11), (u'hiperonimia', 31)), + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'ccc', + pos=en.PoS.n, + variant=1, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + related=((u'meronimia/część', 41), (u'hiponimia', 21)), + ), + nd.make_lexical_unit_node( + id=41, + lemma=u'ddd', + pos=en.PoS.n, + variant=1, + synset=4, + unit_index=1, + domain=en.Domain.bhp, + related=((u'holonimia/część', 31), (u'hiponimia', 11)), + verb_aspect=en.VerbAspect.perf, + ), + )) + + try: + self.__rel_hiper = tuple(self.__plwn.relations_info( + u'hiperonimia', + en.RelationKind.lexical, + ))[0] + self.__rel_hipo = tuple(self.__plwn.relations_info( + u'hiponimia', + en.RelationKind.lexical, + ))[0] + self.__rel_holo = tuple(self.__plwn.relations_info( + u'holonimia/część', + en.RelationKind.lexical, + ))[0] + self.__rel_mero = tuple(self.__plwn.relations_info( + u'meronimia/część', + en.RelationKind.lexical, + ))[0] + except BaseException: + self.__plwn.close() + raise + + def tearDown(self): + self.__plwn.close() + + def test_include(self): + self.assertEqual( + frozenset(self.__plwn.lexical_relation_edges(include=( + u'hiperonimia', + u'meronimia/część', + ))), + frozenset(( + RelationEdge( + source=self.__plwn.lexical_unit_by_id(11), + target=self.__plwn.lexical_unit_by_id(41), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(21), + target=self.__plwn.lexical_unit_by_id(31), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(31), + target=self.__plwn.lexical_unit_by_id(41), + relation=self.__rel_mero, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(21), + target=self.__plwn.lexical_unit_by_id(11), + relation=self.__rel_mero, + ), + )), + ) + + def test_exclude(self): + self.assertEqual( + frozenset( + # Also, check aliases + self.__plwn.lexical_relation_edges(exclude=(u'holo',)) + ), + frozenset(( + RelationEdge( + source=self.__plwn.lexical_unit_by_id(11), + target=self.__plwn.lexical_unit_by_id(41), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(21), + target=self.__plwn.lexical_unit_by_id(31), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(41), + target=self.__plwn.lexical_unit_by_id(11), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(31), + target=self.__plwn.lexical_unit_by_id(21), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(31), + target=self.__plwn.lexical_unit_by_id(41), + relation=self.__rel_mero, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(21), + target=self.__plwn.lexical_unit_by_id(11), + relation=self.__rel_mero, + ), + )), + ) + + def test_all(self): + self.assertEqual( + frozenset(self.__plwn.lexical_relation_edges()), + frozenset(( + RelationEdge( + source=self.__plwn.lexical_unit_by_id(11), + target=self.__plwn.lexical_unit_by_id(41), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(21), + target=self.__plwn.lexical_unit_by_id(31), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(31), + target=self.__plwn.lexical_unit_by_id(41), + relation=self.__rel_mero, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(21), + target=self.__plwn.lexical_unit_by_id(11), + relation=self.__rel_mero, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(41), + target=self.__plwn.lexical_unit_by_id(11), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(31), + target=self.__plwn.lexical_unit_by_id(21), + relation=self.__rel_hipo, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(41), + target=self.__plwn.lexical_unit_by_id(31), + relation=self.__rel_holo, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(11), + target=self.__plwn.lexical_unit_by_id(21), + relation=self.__rel_holo, + ), + )), + ) + + def test_combined(self): + self.assertEqual( + frozenset(self.__plwn.lexical_relation_edges( + include=(u'meronimia', u'hiperonimia'), + exclude=(u'meronimia', u'holonimia'), + )), + frozenset(( + RelationEdge( + source=self.__plwn.lexical_unit_by_id(11), + target=self.__plwn.lexical_unit_by_id(41), + relation=self.__rel_hiper, + ), + RelationEdge( + source=self.__plwn.lexical_unit_by_id(21), + target=self.__plwn.lexical_unit_by_id(31), + relation=self.__rel_hiper, + ), + )), + ) diff --git a/tests/abstract_cases/test_unit_and_synset.py b/tests/abstract_cases/test_unit_and_synset.py new file mode 100644 index 0000000..626e50f --- /dev/null +++ b/tests/abstract_cases/test_unit_and_synset.py @@ -0,0 +1,981 @@ +# coding: utf8 +# XXX Relation operators for synsets and lexical units are not explicitly +# tested (only ordering), since they're not very important. +from __future__ import absolute_import, division + +import unittest as ut + +import six +from plwn.bases import RelationInfoBase +from plwn.readers import nodes as nd +from plwn import exceptions as exc, enums as en + + +__all__ = ( + 'SynsetPropertiesTest', + 'SynsetRelationsTest', + 'SynsetRelationsWithArtificialTest', + 'SynsetRelationsWithArtificialLoopTest', + 'LexicalUnitPropertiesTest', + 'LexicalUnitRelationsTest', + 'ItemOrderingTest', + 'ToDictTest', +) + + +class SynsetPropertiesTest(ut.TestCase): + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_synset_node(id=1, definition=u'foobar'), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + ), + )) + try: + self.__syn = self.__plwn.synset_by_id(1) + except BaseException: + self.__plwn.close() + raise + + def tearDown(self): + self.__plwn.close() + + def test_id(self): + self.assertEqual(self.__syn.id, 1) + + def test_definition(self): + self.assertEqual(self.__syn.definition, u'foobar') + + def test_is_artificial(self): + self.assertIs(self.__syn.is_artificial, False) + + +class SynsetRelationsTest(ut.TestCase): + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'foo', + ), + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'hiperonimia', + parent=u'foo', + ), + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'hiponimia', + aliases=(u'hipo',), + ), + nd.make_synset_node( + id=1, + related=( + ( + RelationInfoBase.format_name(u'foo', u'hiperonimia'), + 2, + ), + (u'hiponimia', 3), + ), + ), + nd.make_synset_node(id=2, related=((u'hiponimia', 3),)), + nd.make_synset_node(id=3), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'aaa', + pos=en.PoS.n, + variant=2, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'aaa', + pos=en.PoS.n, + variant=3, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + ), + )) + try: + self.__syn1 = self.__plwn.synset_by_id(1) + self.__syn2 = self.__plwn.synset_by_id(2) + self.__syn3 = self.__plwn.synset_by_id(3) + self.__rel_hiper = next(iter(self.__plwn.relations_info( + RelationInfoBase.format_name(u'foo', u'hiperonimia'), + en.RelationKind.synset, + ))) + self.__rel_hipo = next(iter(self.__plwn.relations_info( + u'hiponimia', + en.RelationKind.synset, + ))) + except BaseException: + self.__plwn.close() + raise + + def tearDown(self): + self.__plwn.close() + + def test_related(self): + related = tuple(self.__syn1.related(RelationInfoBase.format_name( + u'foo', + u'hiperonimia', + ))) + self.assertEqual(len(related), 1) + self.assertEqual(related[0], self.__syn2) + + def test_related_alias(self): + # Try getting relations by alias + related = tuple(self.__syn1.related(u'hipo')) + self.assertEqual(len(related), 1) + self.assertEqual(related[0], self.__syn3) + + def test_empty_related(self): + self.assertFalse(tuple(self.__syn2.related(u'hiperonimia'))) + + def test_nonexistent_relation(self): + self.assertRaises( + exc.InvalidRelationTypeException, + self.__syn1.related, + u'not', + ) + + def test_all_related(self): + # Without parameter select all related + related = frozenset(self.__syn1.related()) + self.assertEqual(len(related), 2) + self.assertIn(self.__syn2, related) + self.assertIn(self.__syn3, related) + + def test_tuple_related(self): + related = frozenset(self.__syn1.related(( + RelationInfoBase.format_name(u'foo', u'hiperonimia'), + u'hipo', + ))) + self.assertEqual(len(related), 2) + self.assertIn(self.__syn2, related) + self.assertIn(self.__syn3, related) + + def test_related_pairs(self): + related = frozenset(self.__syn1.related_pairs(( + RelationInfoBase.format_name(u'foo', u'hiperonimia'), + u'hipo', + ))) + self.assertEqual(len(related), 2) + self.assertIn( + (self.__rel_hiper, self.__syn2), + related, + ) + self.assertIn( + (self.__rel_hipo, self.__syn3), + related, + ) + + def test_relations(self): + self.assertEqual( + frozenset(self.__syn1.relations), + frozenset((self.__rel_hiper, self.__rel_hipo)), + ) + + +class SynsetRelationsWithArtificialTest(ut.TestCase): + """Proper testing of artificial synset skipping requires more complex setup. + + Artificial skipping may skip several nodes in a row. + + :: + hipo .---. + .------->| 3 | + | '---' + | + | hipo .---. + | .---------->| 4 | + | | '---' + | | + .---. hipo .----. mero .---. .---. + | 1 |------>| 2a |-------->| 5 | | 7 | + '---' '----' '---' '---' + | ^ + | hipo .----. hipo | + '---------->| 6a |--------. + '----' | + v + .---. + | 8 | + '---' + """ + + _PLWNClass = None # Override in subclass + + def setUp(self): + # Don't use relation type nodes, as a test for that case. + self.__plwn = self._PLWNClass.from_reader(( + nd.make_synset_node( + id=1, + related=((u'hiponimia', 3), (u'hiponimia', 2)), + ), + nd.make_synset_node( + id=2, + related=( + (u'hiponimia', 4), + (u'hiponimia', 6), + (u'meronimia', 5), + ), + is_artificial=True, + ), + nd.make_synset_node(id=3), + nd.make_synset_node(id=4), + nd.make_synset_node(id=5), + nd.make_synset_node( + id=6, + related=((u'hiponimia', 7), (u'hiponimia', 8)), + is_artificial=True, + ), + nd.make_synset_node(id=7), + nd.make_synset_node(id=8), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'aaa', + pos=en.PoS.n, + variant=2, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'aaa', + pos=en.PoS.n, + variant=3, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=41, + lemma=u'aaa', + pos=en.PoS.n, + variant=4, + synset=4, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=51, + lemma=u'aaa', + pos=en.PoS.n, + variant=5, + synset=5, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=61, + lemma=u'aaa', + pos=en.PoS.n, + variant=6, + synset=6, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=71, + lemma=u'aaa', + pos=en.PoS.n, + variant=7, + synset=7, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=81, + lemma=u'aaa', + pos=en.PoS.n, + variant=8, + synset=8, + unit_index=1, + domain=en.Domain.bhp, + ), + )) + + try: + self.__syn1 = self.__plwn.synset_by_id(1) + self.__rel_hipo = next(iter(self.__plwn.relations_info( + u'hiponimia', + en.RelationKind.synset, + ))) + except BaseException: + self.__plwn.close() + raise + + def tearDown(self): + self.__plwn.close() + + def test_related_noskip(self): + self.assertEqual( + frozenset( + self.__syn1.related(u'hiponimia', skip_artificial=False) + ), + frozenset(( + self.__plwn.synset_by_id(2), + self.__plwn.synset_by_id(3), + )), + ) + + def test_related_skip(self): + self.assertEqual( + frozenset(self.__syn1.related(u'hiponimia', skip_artificial=True)), + frozenset(( + self.__plwn.synset_by_id(3), + self.__plwn.synset_by_id(4), + self.__plwn.synset_by_id(7), + self.__plwn.synset_by_id(8), + )), + ) + + def test_related_pairs_noskip(self): + self.assertEqual( + frozenset( + self.__syn1.related_pairs(u'hiponimia', skip_artificial=False) + ), + frozenset(( + ( + self.__rel_hipo, + self.__plwn.synset_by_id(2), + ), + ( + self.__rel_hipo, + self.__plwn.synset_by_id(3), + ), + )), + ) + + def test_related_pairs_skip(self): + self.assertEqual( + frozenset(self.__syn1.related_pairs( + u'hiponimia', + skip_artificial=True, + )), + frozenset(( + ( + self.__rel_hipo, + self.__plwn.synset_by_id(3), + ), + ( + self.__rel_hipo, + self.__plwn.synset_by_id(4), + ), + ( + self.__rel_hipo, + self.__plwn.synset_by_id(7), + ), + ( + self.__rel_hipo, + self.__plwn.synset_by_id(8), + ), + )), + ) + + +class SynsetRelationsWithArtificialLoopTest(ut.TestCase): + """Test for an infinite loop. + + Where skipping artificial synsets would result in an infinite loop. + + :: + foo + .----------. + | | + v | + .---. foo .----. foo .----. foo .---. + | 1 |---->| 2a |---->| 3a |---->| 4 | + '---' '----' '----' '---' + """ + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'foo', + ), + nd.make_synset_node(id=1, related=((u'foo', 2),)), + nd.make_synset_node( + id=2, + related=((u'foo', 3),), + is_artificial=True, + ), + nd.make_synset_node( + id=3, + related=((u'foo', 4), (u'foo', 2)), + is_artificial=True, + ), + nd.make_synset_node(id=4), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'aaa', + pos=en.PoS.n, + variant=2, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'aaa', + pos=en.PoS.n, + variant=3, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=41, + lemma=u'aaa', + pos=en.PoS.n, + variant=4, + synset=4, + unit_index=1, + domain=en.Domain.bhp, + ), + )) + + def tearDown(self): + self.__plwn.close() + + def test_related_noskip(self): + syn1 = self.__plwn.synset_by_id(1) + rel1 = frozenset(syn1.related(u'foo', skip_artificial=False)) + self.assertEqual( + rel1, + frozenset((self.__plwn.synset_by_id(2),)), + ) + + def test_related_skip(self): + syn1 = self.__plwn.synset_by_id(1) + rel1 = frozenset(syn1.related(u'foo', skip_artificial=True)) + self.assertEqual( + rel1, + frozenset((self.__plwn.synset_by_id(4),)), + ) + + def test_related_pairs_noskip(self): + syn1 = self.__plwn.synset_by_id(1) + rel1 = frozenset(syn1.related_pairs(u'foo', skip_artificial=False)) + foo = next(iter(self.__plwn.relations_info(u'foo')), None) + self.assertEqual( + rel1, + frozenset(((foo, self.__plwn.synset_by_id(2)),)), + ) + + def test_related_pairs_skip(self): + syn1 = self.__plwn.synset_by_id(1) + rel1 = frozenset(syn1.related_pairs(u'foo', skip_artificial=True)) + foo = next(iter(self.__plwn.relations_info(u'foo')), None) + self.assertEqual( + rel1, + frozenset(((foo, self.__plwn.synset_by_id(4)),)), + ) + + +class LexicalUnitPropertiesTest(ut.TestCase): + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_synset_node(id=1), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + definition=u'scary noise', + usage_notes=(u'K', u'B'), + external_links=(u'http://foo.bar',), + examples=(u'aaaand',), + examples_sources=(u'me',), + domain=en.Domain.cdel, + verb_aspect=en.VerbAspect.two_aspect, + is_emotional=True, + emotion_markedness=en.EmotionMarkedness.ambiguous, + emotion_names=(en.EmotionName.joy, en.EmotionName.trust), + emotion_valuations=( + en.EmotionValuation.error, + en.EmotionValuation.good, + ), + emotion_example_1=u'bim', + emotion_example_2=u'bam', + ), + )) + try: + self.__syn = self.__plwn.synset_by_id(1) + self.__lex = self.__plwn.lexical_unit_by_id(11) + except BaseException: + self.__plwn.close() + raise + + def tearDown(self): + self.__plwn.close() + + def test_id(self): + self.assertEqual(self.__lex.id, 11) + + def test_lemma(self): + self.assertEqual(self.__lex.lemma, u'aaa') + + def test_pos(self): + self.assertEqual(self.__lex.pos, en.PoS.n) + + def test_variant(self): + self.assertEqual(self.__lex.variant, 1) + + def test_synset(self): + self.assertEqual(self.__lex.synset, self.__syn) + + def test_definition(self): + self.assertEqual(self.__lex.definition, u'scary noise') + + def test_usage_notes(self): + self.assertEqual(self.__lex.usage_notes, (u'K', u'B')) + + def test_external_links(self): + self.assertEqual(self.__lex.external_links, (u'http://foo.bar',)) + + def test_examples(self): + self.assertEqual(self.__lex.sense_examples, (u'aaaand',)) + + def test_examples_sources(self): + self.assertEqual(self.__lex.sense_examples_sources, (u'me',)) + + def test_domain(self): + self.assertEqual(self.__lex.domain, en.Domain.cdel) + + def test_verb_aspect(self): + self.assertEqual(self.__lex.verb_aspect, en.VerbAspect.two) + + def test_is_emotional(self): + self.assertTrue(self.__lex.is_emotional) + + def test_emotion_markedness(self): + self.assertIs( + self.__lex.emotion_markedness, + en.EmotionMarkedness.ambiguous, + ) + + def test_emotion_names(self): + self.assertEqual( + self.__lex.emotion_names, + (en.EmotionName.joy, en.EmotionName.trust), + ) + + def test_emotion_valuations(self): + self.assertEqual( + self.__lex.emotion_valuations, + (en.EmotionValuation.error, en.EmotionValuation.good), + ) + + def test_emotion_example(self): + self.assertEqual(self.__lex.emotion_example, u'bim') + + def test_emotion_example_secondary(self): + self.assertEqual(self.__lex.emotion_example_secondary, u'bam') + + +class LexicalUnitRelationsTest(ut.TestCase): + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'deminutywność', + aliases=(u'demi',), + ), + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'mpar', + parent=u'syn', + ), + nd.make_synset_node(id=1), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + related=( + (u'deminutywność', 12), + (RelationInfoBase.format_name(u'syn', u'mpar'), 13), + ), + ), + nd.make_lexical_unit_node( + id=12, + lemma=u'bbb', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=2, + domain=en.Domain.bhp, + related=((u'deminutywność', 13),), + ), + nd.make_lexical_unit_node( + id=13, + lemma=u'ccc', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=3, + domain=en.Domain.bhp, + verb_aspect=en.VerbAspect.two, + ), + )) + try: + self.__lex11 = self.__plwn.lexical_unit_by_id(11) + self.__lex12 = self.__plwn.lexical_unit_by_id(12) + self.__lex13 = self.__plwn.lexical_unit_by_id(13) + self.__rel_mpar = next(iter(self.__plwn.relations_info( + RelationInfoBase.format_name(u'syn', u'mpar'), + en.RelationKind.lexical, + ))) + self.__rel_demi = next(iter(self.__plwn.relations_info( + u'deminutywność', + en.RelationKind.lexical, + ))) + except BaseException: + self.__plwn.close() + raise + + def tearDown(self): + self.__plwn.close() + + def test_related(self): + related = tuple(self.__lex11.related(RelationInfoBase.format_name( + u'syn', + u'mpar', + ))) + self.assertEqual(len(related), 1) + self.assertEqual(related[0], self.__lex13) + + def test_related_alias(self): + related = tuple(self.__lex12.related(u'demi')) + self.assertEqual(len(related), 1) + self.assertEqual(related[0], self.__lex13) + + def test_empty_related(self): + self.assertFalse(tuple(self.__lex12.related( + RelationInfoBase.format_name(u'syn', u'mpar'), + ))) + + def test_nonexistent_relation(self): + self.assertRaises( + exc.InvalidRelationTypeException, + self.__lex11.related, + u'not', + ) + + def test_all_related(self): + # Without parameter select all related + related = frozenset(self.__lex11.related()) + self.assertEqual(len(related), 2) + self.assertIn(self.__lex12, related) + self.assertIn(self.__lex13, related) + + def test_tuple_related(self): + related = frozenset(self.__lex11.related(( + self.__rel_demi, + self.__rel_mpar, + ))) + self.assertEqual(len(related), 2) + self.assertIn(self.__lex12, related) + self.assertIn(self.__lex13, related) + + def test_related_pairs(self): + related = frozenset(self.__lex11.related_pairs()) + self.assertEqual(len(related), 2) + self.assertIn( + (self.__rel_demi, self.__lex12), + related, + ) + self.assertIn( + (self.__rel_mpar, self.__lex13), + related, + ) + + def test_relations(self): + self.assertEqual( + frozenset(self.__lex11.relations), + frozenset((self.__rel_demi, self.__rel_mpar)), + ) + + +class ItemOrderingTest(ut.TestCase): + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.__plwn = self._PLWNClass.from_reader(( + nd.make_synset_node(id=1), + nd.make_synset_node(id=2), + nd.make_synset_node(id=3), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'bbb', + pos=en.PoS.n, + variant=1, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=22, + lemma=u'Ä…Ä…Ä…', + pos=en.PoS.n, + variant=2, + synset=2, + unit_index=2, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'Ä…Ä…Ä…', + pos=en.PoS.n, + variant=1, + synset=3, + unit_index=1, + domain=en.Domain.bhp, + verb_aspect=en.VerbAspect.two, + ), + )) + + def tearDown(self): + self.__plwn.close() + + def test_synsets_order(self): + syn1 = self.__plwn.synset_by_id(1) + syn2 = self.__plwn.synset_by_id(2) + syn3 = self.__plwn.synset_by_id(3) + + list_ = [syn3, syn2, syn1] + list_.sort() + self.assertEqual(list_, [syn1, syn3, syn2]) + + def test_lexical_units_order(self): + lex11 = self.__plwn.lexical_unit_by_id(11) + lex21 = self.__plwn.lexical_unit_by_id(21) + lex22 = self.__plwn.lexical_unit_by_id(22) + lex31 = self.__plwn.lexical_unit_by_id(31) + + list_ = [lex31, lex22, lex21, lex11] + list_.sort() + self.assertEqual(list_, [lex11, lex31, lex22, lex21]) + + +class ToDictTest(ut.TestCase): + + _PLWNClass = None # Override in subclass + + def setUp(self): + self.maxDiff = None + # Try not using the relation type nodes here + self.__plwn = self._PLWNClass.from_reader(( + nd.make_synset_node( + id=1, + definition=u'foo', + related=((u'hiperonimia', 2), (u'hiponimia', 3)), + ), + nd.make_synset_node(id=2), + nd.make_synset_node(id=3), + nd.make_lexical_unit_node( + id=11, + lemma=u'aaa', + pos=en.PoS.v, + variant=1, + synset=1, + unit_index=0, + definition=u'bar', + usage_notes=(u'baz', u'ban'), + external_links=(u'http://a.com',), + examples=(u'nothing',), + examples_sources=(u'none',), + domain=en.Domain.rz, + related=((u'derywacyjność', 21),), + verb_aspect=en.VerbAspect.pred, + emotion_markedness=en.EmotionMarkedness.strong_negative, + emotion_names=(en.EmotionName.surprise,), + emotion_valuations=( + en.EmotionValuation.ugliness, + en.EmotionValuation.error, + ), + emotion_example_1=u'Bad thing.', + ), + nd.make_lexical_unit_node( + id=21, + lemma=u'bbb', + pos=en.PoS.n, + variant=1, + synset=2, + unit_index=1, + domain=en.Domain.bhp, + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'ccc', + pos=en.PoS.n, + variant=1, + synset=3, + unit_index=0, + domain=en.Domain.bhp, + ), + )) + self.__lex11_dict = { + u'id': 11, + u'lemma': u'aaa', + u'pos': u'verb', + u'variant': 1, + u'synset': 1, + u'definition': u'bar', + u'usage_notes': (u'baz', u'ban'), + u'external_links': (u'http://a.com',), + u'sense_examples': (u'nothing',), + u'sense_examples_sources': (u'none',), + u'domain': en.Domain.rz.value, + u'verb_aspect': en.VerbAspect.predicative.value, + u'emotion_markedness': en.EmotionMarkedness.minus_m.value, + u'emotion_names': (en.EmotionName.surprise.value,), + u'emotion_valuations': ( + en.EmotionValuation.error.value, + en.EmotionValuation.ugliness.value, + ), + u'emotion_example': u'Bad thing.', + u'emotion_example_secondary': None, + u'str': six.text_type(self.__plwn.lexical_unit_by_id(11)), + } + self.__lex11_dict_with_rel = { + u'related': { + u'derywacyjność': ( + ( + 21, + six.text_type( + self.__plwn.lexical_unit_by_id(21) + ), + ), + ), + }, + } + self.__lex11_dict_with_rel.update(self.__lex11_dict) + + def tearDown(self): + self.__plwn.close() + + def test_lexunit_dict(self): + lex11 = self.__plwn.lexical_unit_by_id(11) + self.assertEqual( + lex11.to_dict(include_related=False), + self.__lex11_dict, + ) + self.assertEqual(lex11.to_dict(), self.__lex11_dict_with_rel) + + def test_synset_dict(self): + syn1 = self.__plwn.synset_by_id(1) + + self.assertEqual( + syn1.to_dict(include_related=False, include_units_data=False), + { + u'id': 1, + u'str': six.text_type(syn1), + u'definition': u'foo', + u'units': ( + ( + 11, + six.text_type( + self.__plwn.lexical_unit_by_id(11) + ), + ), + ), + u'is_artificial': False, + }, + ) + self.assertEqual( + syn1.to_dict(include_related=False), + { + u'id': 1, + u'str': six.text_type(syn1), + u'definition': u'foo', + u'units': (self.__lex11_dict,), + u'is_artificial': False, + }, + ) + self.assertEqual( + syn1.to_dict(), + { + u'id': 1, + u'str': six.text_type(syn1), + u'definition': u'foo', + u'units': (self.__lex11_dict_with_rel,), + u'related': { + u'hiperonimia': ( + (2, six.text_type(self.__plwn.synset_by_id(2))), + ), + u'hiponimia': ( + (3, six.text_type(self.__plwn.synset_by_id(3))), + ), + }, + u'is_artificial': False, + }, + ) diff --git a/tests/cases/__init__.py b/tests/cases/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/cases/test_graphmlout.py b/tests/cases/test_graphmlout.py new file mode 100644 index 0000000..b021983 --- /dev/null +++ b/tests/cases/test_graphmlout.py @@ -0,0 +1,202 @@ +# coding: utf8 +from __future__ import absolute_import, division + + +import unittest as ut +import io + +try: + import xml.etree.cElementTree as et +except ImportError: + import xml.etree.ElementTree as et + +from plwn.utils.graphmlout import GraphMLWordNet + + +__all__ = 'GraphMLTest', + + +class GraphMLTest(ut.TestCase): + + def setUp(self): + self.__gmo = GraphMLWordNet() + + def ensure_directed(self): + """Graph type should always be directed.""" + graph = self.__write_and_read() + self.assertEqual( + graph.find(u'./graph').attrib[u'edgedefault'], + u'directed', + ) + + def test_node(self): + """Test adding a node.""" + self.__gmo.add_node(u'foo') + result = self.__write_and_read() + nodes = result.findall(u'./graph/node') + + self.assertEqual(len(nodes), 1) + self.assertEqual(nodes[0].attrib[u'id'], u'foo') + + def test_edge(self): + """Test adding an edge.""" + self.__gmo.add_edge(u'a-b-c', u'a', u'b') + result = self.__write_and_read() + edges = result.findall(u'./graph/edge') + + self.assertEqual(len(edges), 1) + self.assertEqual(edges[0].attrib[u'id'], u'a-b-c') + self.assertEqual(edges[0].attrib[u'source'], u'a') + self.assertEqual(edges[0].attrib[u'target'], u'b') + + def test_node_badattr(self): + """Add an attribute, which was not defined, to a node.""" + self.assertRaises( + KeyError, + self.__gmo.add_node, + u'foo', + {u'bar': u'baz'}, + ) + + def test_edge_badattr(self): + """Add an attribute, which was not defined, to an edge.""" + self.assertRaises( + KeyError, + self.__gmo.add_edge, + u'foo-bar', + u'foo', + u'bar', + {u'bar': u'baz'}, + ) + + def test_str_attribute(self): + """Add a string attribute and make sure the key is added.""" + self.__gmo.add_attribute_type( + u'foo', + u'foo_param', + GraphMLWordNet.DATA_TYPE_STR, + ) + # Add the node to an attribute + self.__gmo.add_node(u'oof', {u'foo': u'ala'}) + + result = self.__write_and_read() + keys = result.findall(u'./key') + + self.assertEqual(len(keys), 1) + self.assertEqual(keys[0].attrib[u'id'], u'foo') + self.assertEqual(keys[0].attrib[u'attr.name'], u'foo_param') + self.assertEqual(keys[0].attrib[u'attr.type'], u'string') + self.assertEqual(keys[0].attrib[u'for'], u'node') + + nodedata = result.findall(u"./graph/node[@id='oof']/data") + + self.assertEqual(len(nodedata), 1) + self.assertEqual(nodedata[0].attrib[u'key'], u'foo') + self.assertEqual(nodedata[0].text, u'ala') + + def test_int_attribute(self): + """Add an int attribute and make sure the key is added.""" + self.__gmo.add_attribute_type( + u'foo', + u'foo_param', + GraphMLWordNet.DATA_TYPE_INT, + ) + # Add the node to an attribute + self.__gmo.add_node(u'oof', {u'foo': 1}) + + result = self.__write_and_read() + keys = result.findall(u'./key') + + self.assertEqual(len(keys), 1) + self.assertEqual(keys[0].attrib[u'id'], u'foo') + self.assertEqual(keys[0].attrib[u'attr.name'], u'foo_param') + self.assertEqual(keys[0].attrib[u'attr.type'], u'long') + self.assertEqual(keys[0].attrib[u'for'], u'node') + + nodedata = result.findall(u"./graph/node[@id='oof']/data") + + self.assertEqual(len(nodedata), 1) + self.assertEqual(nodedata[0].attrib[u'key'], u'foo') + self.assertEqual(nodedata[0].text, u'1') + + def test_bool_attribute(self): + """Add a boolean attribute and make sure the key is added.""" + self.__gmo.add_attribute_type( + u'foo', + u'foo_param', + GraphMLWordNet.DATA_TYPE_BOOL, + ) + # Add the node to an attribute + self.__gmo.add_node(u'oof', {u'foo': True}) + self.__gmo.add_node(u'boof', {u'foo': False}) + + result = self.__write_and_read() + keys = result.findall(u'./key') + + self.assertEqual(len(keys), 1) + self.assertEqual(keys[0].attrib[u'id'], u'foo') + self.assertEqual(keys[0].attrib[u'attr.name'], u'foo_param') + self.assertEqual(keys[0].attrib[u'attr.type'], u'boolean') + self.assertEqual(keys[0].attrib[u'for'], u'node') + + nodedata = result.findall(u"./graph/node[@id='oof']/data") + + self.assertEqual(len(nodedata), 1) + self.assertEqual(nodedata[0].attrib[u'key'], u'foo') + self.assertEqual(nodedata[0].text, u'true') + + nodedata2 = result.findall(u"./graph/node[@id='boof']/data") + + self.assertEqual(len(nodedata2), 1) + self.assertEqual(nodedata2[0].attrib[u'key'], u'foo') + self.assertEqual(nodedata2[0].text, u'false') + + def test_json_attribute(self): + """Add a JSON attribute and make sure the key is added.""" + self.__gmo.add_attribute_type( + u'foo', + u'foo_param', + GraphMLWordNet.DATA_TYPE_JSON, + ) + # Add the node to an attribute + self.__gmo.add_node(u'oof', {u'foo': (1, 2, u'3')}) + + result = self.__write_and_read() + keys = result.findall(u'./key') + + self.assertEqual(len(keys), 1) + self.assertEqual(keys[0].attrib[u'id'], u'foo') + self.assertEqual(keys[0].attrib[u'attr.name'], u'foo_param') + self.assertEqual(keys[0].attrib[u'attr.type'], u'string') + self.assertEqual(keys[0].attrib[u'for'], u'node') + + nodedata = result.findall(u"./graph/node[@id='oof']/data") + + self.assertEqual(len(nodedata), 1) + self.assertEqual(nodedata[0].attrib[u'key'], u'foo') + self.assertEqual(nodedata[0].text, u'[1, 2, "3"]') + + # TODO: No enum tests + + def test_edge_attribute(self): + """Add an ordinary string attribute, but to an edge.""" + self.__gmo.add_attribute_type( + u'foo', + u'foo_param', + GraphMLWordNet.DATA_TYPE_STR, + u'edge', + ) + # Add an edge for this attribute + self.__gmo.add_edge(u'a-b', u'a', u'b', {u'foo': u'bar'}) + + result = self.__write_and_read() + edgedata = result.findall(u"./graph/edge[@id='a-b']/data") + + self.assertEqual(len(edgedata), 1) + self.assertEqual(edgedata[0].attrib[u'key'], u'foo') + self.assertEqual(edgedata[0].text, u'bar') + + def __write_and_read(self): + with io.BytesIO() as ss: + self.__gmo.write(ss) + return et.fromstring(ss.getvalue()) diff --git a/tests/cases/test_sqlite_storage.py b/tests/cases/test_sqlite_storage.py new file mode 100644 index 0000000..d93121b --- /dev/null +++ b/tests/cases/test_sqlite_storage.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import, division + + +from plwn.storages import sqlite as sq +from tests.abstract_cases import load_tests_from_abstract + + +def load_tests(loader, tests, pattern): + return load_tests_from_abstract(loader, 'SQLite', sq.PLWordNet) diff --git a/tests/cases/test_ubylmf_reader.py b/tests/cases/test_ubylmf_reader.py new file mode 100644 index 0000000..a8d7aba --- /dev/null +++ b/tests/cases/test_ubylmf_reader.py @@ -0,0 +1,563 @@ +# -*- coding: utf-8 -*- + +# FIXME Comments down below suggest that there are compatibility issues with +# some tests. Fix and uncomment them. + +from __future__ import absolute_import, division + + +import unittest +from plwn.readers import ubylmf as ur +from plwn.readers.nodes import make_synset_node, make_lexical_unit_node +from plwn.enums import PoS, Domain +from plwn import exceptions as exc +from io import BytesIO +from xml.etree import ElementTree as et +# Python2 +try: + from itertools import izip +# Python3 +except ImportError: + izip = zip + +__all__ = 'UBYLMFReaderTest', + +ENCODING = 'UTF-8' + +test_xml = u"""<?xml version="1.0" encoding="UTF-8" ?> +<LexicalResource dtdVersion="ubyDTD_1_0.dtd" name="plWordnet"> +<Lexicon languageIdentifier="pl" id="1" name="SÅ‚owosieć 2.2"> + +<LexicalEntry id="15" partOfSpeech="noun"> + <Lemma> + <FormRepresentation writtenForm="'patafizyka"/> + </Lemma> + <Sense id="plWN_Sense_628506" index="1" synset="plWN_Synset_396603"> + <Definition> + <Statement statementType="usageNote"> + <TextRepresentation writtenText="specj."/> + </Statement> + <Statement statementType="externalReference"> + <TextRepresentation + writtenText="http://pl.wikipedia.org/wiki/Patafizyka"/> + </Statement> + </Definition> + <MonolingualExternalRef + externalSystem="WordnetLoom PartOfSpeech And SenseIndex" + externalReference="POS[plWN rzeczownik] 0"/> + <SemanticLabel label="noun.plWN_umy" type="domain"/> + </Sense> + <Sense id="plWN_Sense_54584" index="2" synset="plWN_Synset_36078"> + <SenseRelation target="plWN_Sense_17308" relName="deminutywność"/> + <MonolingualExternalRef + externalSystem="WordnetLoom PartOfSpeech And SenseIndex" + externalReference="POS[plWN rzeczownik] 0"/> + <SemanticLabel label="noun.plWN_wytw" type="domain"/> + </Sense> + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + <SenseRelation target="plWN_Sense_17308" relName="deminutywność"/> + <MonolingualExternalRef + externalSystem="WordnetLoom PartOfSpeech And SenseIndex" + externalReference="POS[plWN rzeczownik] 0"/> + <SemanticLabel label="noun.plWN_wytw" type="domain"/> + </Sense> +</LexicalEntry> +<Synset id="plWN_Synset_10"> + <SynsetRelation target="plWN_Synset_9139" relName="hiperonimia"/> + <SynsetRelation target="plWN_Synset_19032" relName="wartość_cechy"/> + <SynsetRelation target="plWN_Synset_104177" relName="hiperonimia"/> + <SynsetRelation target="plWN_Synset_105404" relName="hiperonimia"/> + <SynsetRelation target="plWN_Synset_228433" relName="hiperonimia"/> +</Synset> +<Synset id="plWN_Synset_246792"> + <SynsetRelation target="plWN_Synset_245829" relName="hiponimia"/> +</Synset> +</Lexicon> +<SenseAxis id="433581" synsetOne="plWN_Synset_246792" + synsetTwo="WordNet 3.1 synset offset: 4730898"> + <SenseAxisRelation target="433581" relName="Hipo_plWN-PWN"/> +</SenseAxis> +</LexicalResource> +""" + + +class UBYLMFReaderTest(unittest.TestCase): + def test_ubylmf_reader(self): + filelike = BytesIO(b"") + self.assertRaises( + et.ParseError, + next, + ur.ubylmf_reader(filelike), + ) + + lu1 = make_lexical_unit_node( + id=628506, + lemma=u"'patafizyka", + pos=PoS.n, + synset=396603, + unit_index=0, + usage_notes=(u"specj.",), + external_links=(u"http://pl.wikipedia.org/wiki/Patafizyka",), + domain=Domain.umy, + variant=1, + ) + lu2 = make_lexical_unit_node( + id=54584, + lemma=u"'patafizyka", + pos=PoS.n, + synset=36078, + unit_index=0, + domain=Domain.wytw, + related=((u"deminutywność", 17308), ), + variant=2, + ) + s1 = make_synset_node( + id=10, + related=( + (u"hiperonimia", 9139), + (u"wartość_cechy", 19032), + (u"hiperonimia", 104177), + (u"hiperonimia", 105404), + (u"hiperonimia", 228433) + ), + ) + s2 = make_synset_node( + id=246792, + related=((u"hiponimia", 245829),), + ) + iter_corr = iter((lu1, lu2, s1, s2)) + filelike = BytesIO(test_xml.encode('UTF-8')) + for (read, correct) in izip(ur.ubylmf_reader(filelike), iter_corr): + self.assertEqual(read, correct) + + def test_make_lexicalunit(self): + # Wrong sense's ID + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584?" index="3" synset="plWN_Synset_!#!"> + </Sense> + """.encode(ENCODING) + ) + self.assertRaises( + exc.MalformedIdentifierException, + ur._make_lexicalunit, + et.Element(u"LexicalEntry"), + xml_sense + ) + # Incorrect synset's ID + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + </Sense> + """.encode(ENCODING) + ) + self.assertRaises( + exc.MalformedIdentifierException, + ur._make_lexicalunit, + et.Element(u"LexicalEntry"), + xml_sense + ) + # Incorrect variant (index) + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="A" synset="plWN_Synset_33"> + </Sense> + """.encode(ENCODING) + ) + self.assertRaises( + ValueError, + ur._make_lexicalunit, + et.Element(u"LexicalEntry"), + xml_sense + ) + # Missing <Lemma> + xml_lu = et.fromstring( + u""" + <LexicalEntry id="15" partOfSpeech="noun"> + </LexicalEntry> + """.encode(ENCODING) + ) + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="0" synset="plWN_Synset_33"> + </Sense> + """.encode(ENCODING) + ) + self.assertRaises( + AttributeError, + ur._make_lexicalunit, + xml_lu, + xml_sense + ) + # Empty <Lemma> + xml_lu = et.fromstring( + u""" + <LexicalEntry id="15" partOfSpeech="noun"> + <Lemma> + <FormRepresentation writtenForm=""/> + </Lemma> + </LexicalEntry> + """.encode(ENCODING) + ) + self.assertRaisesRegexp( + AssertionError, + "Lemma is empty", + ur._make_lexicalunit, + xml_lu, + xml_sense + ) + # Empty PoS + xml_lu = et.fromstring( + u""" + <LexicalEntry id="15" partOfSpeech=""> + <Lemma> + <FormRepresentation writtenForm="'patafizyka"/> + </Lemma> + </LexicalEntry> + """.encode(ENCODING) + ) + self.assertRaisesRegexp( + AssertionError, + "PoS is empty", + ur._make_lexicalunit, + xml_lu, + xml_sense + ) + # Don't check definitions and domain - they are in separate tests + # Incorrect unit index + xml_lu = et.fromstring( + u""" + <LexicalEntry id="15" partOfSpeech="noun"> + <Lemma> + <FormRepresentation writtenForm="'patafizyka"/> + </Lemma> + </LexicalEntry> + """.encode(ENCODING) + ) + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="0" synset="plWN_Synset_33"> + <SemanticLabel label="noun.plWN_wytw" type="domain"/> + <MonolingualExternalRef + externalSystem="WordnetLoom PartOfSpeech And SenseIndex" + externalReference="POS[plWN rzeczownik] A"/> + </Sense> + """.encode(ENCODING) + ) + self.assertRaises( + exc.MalformedIdentifierException, + ur._make_lexicalunit, + xml_lu, + xml_sense + ) + # Check LU + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_628506" index="1" + synset="plWN_Synset_396603"> + <Definition> + <TextRepresentation writtenText="ExampleDefinition"/> + </Definition> + <Definition> + <Statement statementType="usageNote"> + <TextRepresentation writtenText="specj."/> + </Statement> + <Statement statementType="usageNote"> + <TextRepresentation writtenText="specj1"/> + </Statement> + <Statement statementType="externalReference"> + <TextRepresentation writtenText="http://"/> + </Statement> + </Definition> + <MonolingualExternalRef + externalSystem="WordnetLoom PartOfSpeech And SenseIndex" + externalReference="POS[plWN rzeczownik] 0"/> + <SemanticLabel label="noun.plWN_umy" type="domain"/> + <SenseRelation target="plWN_Sense_17308" + relName="deminutywność"/> + <SenseRelation target="plWN_Sense_17309" + relName="deminutywność"/> + <SenseExample id="0"> + <TextRepresentation + writtenText="1° = (Ï€/180) rad = 60′ = 3600″[##W:]"/> + </SenseExample> + <SenseExample id="1"> + <TextRepresentation writtenText="1[##P:]"/> + </SenseExample> + </Sense> + """.encode(ENCODING) + ) + lu = make_lexical_unit_node( + id=628506, + lemma=u"'patafizyka", + pos=PoS.n, + synset=396603, + unit_index=0, + definition=u"ExampleDefinition", + usage_notes=(u"specj.", u"specj1",), + external_links=("http://",), + examples=(u"1° = (Ï€/180) rad = 60′ = 3600″", u"1"), + examples_sources=(u'W', u'P'), + domain=Domain.umy, + related=((u"deminutywność", 17308), (u"deminutywność", 17309)), + variant=1, + ) + self.assertEqual(lu, ur._make_lexicalunit(xml_lu, xml_sense)) + + def test_make_synset(self): + xml_synset = et.fromstring( + u""" + <Synset id="plWN_Synset_246792"> + </Synset> + """.encode(ENCODING) + ) + sn = make_synset_node(id=246792) + self.assertEqual(sn, ur._make_synset(xml_synset)) + + xml_synset = et.fromstring( + u""" + <Synset id="plWN_Synset_246792"> + <Definition> + <TextRepresentation writtenText="ExampleDefinition"/> + </Definition> + </Synset> + """.encode(ENCODING) + ) + sn = make_synset_node(id=246792, definition=u"ExampleDefinition") + self.assertEqual(sn, ur._make_synset(xml_synset)) + + xml_synset = et.fromstring( + u""" + <Synset id="plWN_Synset_246792"> + <SynsetRelation target="plWN_Synset_245829" + relName="hiponimia"/> + </Synset> + """.encode(ENCODING) + ) + sn = make_synset_node(id=246792, related=((u"hiponimia", 245829),)) + self.assertEqual(sn, ur._make_synset(xml_synset)) + + xml_synset = et.fromstring( + u""" + <Synset id="plWN_Synset_246792"> + <Definition> + <TextRepresentation writtenText="ExampleDefinition"/> + </Definition> + <SynsetRelation target="plWN_Synset_245829" relName="hip"/> + <SynsetRelation target="plWN_Synset_245828" relName="ó"/> + </Synset> + """.encode(ENCODING) + ) + sn = make_synset_node( + id=246792, + definition=u"ExampleDefinition", + related=((u"hip", 245829), (u"ó", 245828)), + ) + self.assertEqual(sn, ur._make_synset(xml_synset)) + + xml_synset = et.fromstring( + u""" + <Synset id="plWN_Synset_??"> + </Synset> + """.encode(ENCODING) + ) + self.assertRaises( + exc.MalformedIdentifierException, + ur._make_synset, + xml_synset + ) + + xml_synset = et.fromstring( + u""" + <Synset id="plWN_Synset_246792"> + <SynsetRelation target="plWN_Synset_245829" relName="hip"/> + <SynsetRelation target="plWN_Synset_?" relName="hip"/> + </Synset> + """.encode(ENCODING) + ) + sn = make_synset_node(id=246792, related=((u"hip", 245829),)) + self.assertEqual(sn, ur._make_synset(xml_synset)) + + def test_extract_definitions(self): + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + </Sense> + """.encode(ENCODING) + ) + self.assertTupleEqual( + (None, [], []), + ur._extract_definitions(xml_sense) + ) + + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + <Definition> + <TextRepresentation writtenText="ExampleDefinition"/> + </Definition> + </Sense> + """.encode(ENCODING) + ) + self.assertTupleEqual( + (u"ExampleDefinition", [], []), + ur._extract_definitions(xml_sense) + ) + + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + <Definition> + <Statement statementType="?"> + <TextRepresentation writtenText="specj."/> + </Statement> + <Statement statementType="externalReference"> + <TextRepresentation writtenText="http://"/> + </Statement> + <Statement statementType="externalReference"> + <TextRepresentation writtenText="http://1"/> + </Statement> + </Definition> + </Sense> + """.encode(ENCODING) + ) + self.assertTupleEqual( + (None, [], [u"http://", u"http://1"]), + ur._extract_definitions(xml_sense) + ) + + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + <Definition> + <Statement statementType="usageNote"> + <TextRepresentation writtenText="n1"/> + </Statement> + <Statement statementType="usageNote"> + <TextRepresentation writtenText="łóż2"/> + </Statement> + <Statement statementType="?"> + <TextRepresentation writtenText="specj."/> + </Statement> + <Statement statementType="externalReference"> + <TextRepresentation writtenText="http://"/> + </Statement> + <Statement statementType="externalReference"> + <TextRepresentation writtenText="http://1"/> + </Statement> + </Definition> + </Sense> + """.encode(ENCODING) + ) + self.assertTupleEqual( + (None, [u"n1", u"łóż2"], [u"http://", u"http://1"]), + ur._extract_definitions(xml_sense) + ) + + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + <Definition> + </Definition> + <Definition> + </Definition> + <Definition> + </Definition> + </Sense> + """.encode(ENCODING) + ) + self.assertRaisesRegexp( + AssertionError, + r"Too many definitions \(3\)", + ur._extract_definitions, + xml_sense + ) + + def test_get_domain(self): + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + <SemanticLabel label="noun.plWN_wytw" type="domain"/> + </Sense> + """.encode(ENCODING) + ) + self.assertEqual(u"noun.plWN_wytw", ur._get_domain(xml_sense)) + + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + <SemanticLabel label="ółźć" type="domain"/> + </Sense> + """.encode(ENCODING) + ) + self.assertEqual(u"ółźć", ur._get_domain(xml_sense)) + + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + <SemanticLabel label="1" type="domain"/> + <SemanticLabel label="2" type="domain"/> + </Sense> + """.encode(ENCODING) + ) + self.assertRaises(AssertionError, ur._get_domain, xml_sense) + self.assertRaisesRegexp( + AssertionError, + "2 SemanticLabel found, should be 1", + ur._get_domain, + xml_sense + ) + + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + <SemanticLabel label="1" type=""/> + </Sense> + """.encode(ENCODING) + ) + self.assertRaises(AssertionError, ur._get_domain, xml_sense) + # Python3 fails here, so leave it... Unless you have an idea how to + # make it in a cleaver way. + # self.assertRaisesRegexp( + # AssertionError, + # "SemanticLabel has type instead of domain", + # ur._get_domain, + # xml_sense + # ) + + xml_sense = et.fromstring( + u""" + <Sense id="plWN_Sense_54584" index="3" synset="plWN_Synset_!#!"> + <SemanticLabel label="1" type="ĄĘÓÅÅ»"/> + </Sense> + """.encode(ENCODING) + ) + self.assertRaises(AssertionError, ur._get_domain, xml_sense) + # assertRaisesRegexp tries to convert unicode to str implicitly + # without checking explicitly which version of Python is in use + # this won't work... + # self.assertRaisesRegexp( + # AssertionError, + # "SemanticLabel has type ĄĘÓÅÅ» instead of domain", + # ur._get_domain, + # xml_sense + # ) + + def test_extract_id(self): + self.assertEqual(ur._extract_id(u"id_1_and_id_2_234562"), 234562) + self.assertEqual(ur._extract_id(u"234562"), 234562) + self.assertEqual(ur._extract_id(u"łóżźć_234562"), 234562) + self.assertRaises( + exc.MalformedIdentifierException, + ur._extract_id, + u"id_" + ) + self.assertRaises( + exc.MalformedIdentifierException, + ur._extract_id, + u"łóżźć" + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/cases/test_wndb_reader.py b/tests/cases/test_wndb_reader.py new file mode 100644 index 0000000..e60f631 --- /dev/null +++ b/tests/cases/test_wndb_reader.py @@ -0,0 +1,667 @@ +# coding: utf8 +from __future__ import absolute_import, division, print_function + +import itertools as itt +import logging +import unittest as ut +import sys + +from plwn import enums as en +from plwn.readers import nodes as nd +from plwn.bases import RelationInfoBase + +_IS_PY2 = sys.version_info.major == 2 + +if _IS_PY2: + from plwn.readers.wndb import WNDBReader + import wndbmockup as wndbm +else: + wndbm = None + WNDBReader = None + + +__all__ = ( + 'GoodTest', + 'NonexistentRelationTest', + 'NonexistentSynsetInRelTest', + 'NonexistentUnitInRelTest', + 'SynsetWithNoUnitTest', + 'SynsetWithNoUnitInRelTest', + 'UnitWithNoSynsetInRelTest', + 'UnitWithNoSynsetTest', + 'UnitWithNonexistentSynsetTest', +) + + +# WNDBReader in general only works with python 2 (for now) +_py2_only = ut.skipIf(not _IS_PY2, 'Python 2 only functionality') + +_NODE_ORDER = [nd.RelationTypeNode, nd.LexicalUnitNode, nd.SynsetNode] + + +@_py2_only +class GoodTest(ut.TestCase): + """A case where everything's correct, touching all relevant tables.""" + + def setUp(self): + self.__dbm = wndbm.WnDbMockup() + + try: + self.__setup_mock() + except BaseException: + self.__dbm.close() + raise + + def __setup_mock(self): + self.__dbm.add_synset(1, u'Duży pies') + self.__dbm.add_synset(2, u'Duży ssak', abstract=1) + self.__dbm.add_synset(3, u'Duży kundel') + + rel_hipo = self.__dbm.add_relation_type(u'hiponimia', u'hipo') + # Make this relation have a fake parent, for testing + rel_par = self.__dbm.add_relation_type(u'foo', u'f') + rel_hiper = self.__dbm.add_relation_type( + u'hiperonimia', + u'hiper', + parent=rel_par, + ) + + self.__dbm.add_synset_relation(1, 2, rel_hipo) + self.__dbm.add_synset_relation(1, 3, rel_hiper) + + self.__dbm.add_lexical_unit( + synid=1, + lemma=u'psisko', + pos=2, + variant=1, + unitindex=1, + id_=11, + domain=1, + comment=u'##K: pot. [##W: Czarne psisko na rogu.]', + ) + self.__dbm.add_lexical_unit( + synid=1, + lemma=u'suczysko', + pos=2, + variant=1, + unitindex=2, + id_=12, + domain=1, + comment=u'##D: Samica dużego psa', + ) + self.__dbm.add_lexical_unit( + synid=2, + lemma=u'ssaczysko', + pos=2, + variant=1, + unitindex=1, + id_=21, + domain=1, + ) + self.__dbm.add_lexical_unit( + synid=3, + lemma=u'kundlisko', + # Make it a verb even if it isn't, to test out + pos=1, + variant=1, + unitindex=1, + id_=31, + domain=2, + comment=u'{##L: www.mieszance.pl} {##L: dogpedia.com/mutt}', + verb_aspect=4, + ) + + self.__dbm.add_emotion( + lex_id=31, + emotions=u'smutek;wstrÄ™t', + valuations=None, + markedness=u'- s', + example1=u'bam', + unit_status=1, + ) + self.__dbm.add_emotion( + lex_id=31, + emotions=None, + valuations=u'dobro;prawda;bob', + markedness=u'-m', + example2=u'bim', + unit_status=1, + super_annotation=1, + ) + # Additional test for coalescing emotion data: emotions and valuations + # must be uniq'd. + self.__dbm.add_emotion( + lex_id=31, + emotions=u'radość;wstrÄ™t', + valuations=u'piÄ™kno;prawda', + markedness=u'+m', + example1=u'uh', + unit_status=1, + ) + + rel_zen = self.__dbm.add_relation_type( + u'żeÅ„skość', + u'zen', + is_syn=False, + ) + + self.__dbm.add_lexical_relation(11, 12, rel_zen) + + def tearDown(self): + self.__dbm.close() + + def runTest(self): + with self.__dbm.sqlalchemy_url_file as db_file: + nodes = _process_read_nodes(self, WNDBReader(db_file)) + + self.assertEqual( + nodes, + set(( + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'hiponimia', + aliases=(u'hipo',), + ), + nd.make_relation_type_node( + kind=en.RelationKind.synset, + name=u'hiperonimia', + aliases=(u'hiper',), + parent=u'foo', + ), + nd.make_relation_type_node( + kind=en.RelationKind.lexical, + name=u'żeÅ„skość', + aliases=(u'zen',), + ), + nd.make_synset_node( + id=1, + definition=u'Duży pies', + related=( + (u'hiponimia', 2), + ( + RelationInfoBase.format_name( + u'foo', + u'hiperonimia', + ), + 3, + ), + ), + ), + nd.make_synset_node( + id=2, + definition=u'Duży ssak', + is_artificial=True, + ), + nd.make_synset_node( + id=3, + definition=u'Duży kundel', + ), + nd.make_lexical_unit_node( + id=11, + lemma=u'psisko', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=1, + usage_notes=(u'pot.',), + examples=(u'Czarne psisko na rogu.',), + examples_sources=(u'W',), + domain=en.Domain.by_db_number(1), + related=((u'żeÅ„skość', 12),), + ), + nd.make_lexical_unit_node( + id=12, + lemma=u'suczysko', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=2, + definition=u'Samica dużego psa', + domain=en.Domain.by_db_number(1), + ), + nd.make_lexical_unit_node( + id=21, + lemma='ssaczysko', + pos=en.PoS.n, + variant=1, + synset=2, + unit_index=1, + domain=en.Domain.by_db_number(1), + ), + nd.make_lexical_unit_node( + id=31, + lemma=u'kundlisko', + pos=en.PoS.v, + variant=1, + synset=3, + unit_index=1, + external_links=(u'www.mieszance.pl', u'dogpedia.com/mutt'), + domain=en.Domain.by_db_number(2), + verb_aspect=en.VerbAspect.two, + is_emotional=True, + emotion_markedness=en.EmotionMarkedness.strong_negative, + emotion_names=( + en.EmotionName.radosc, + en.EmotionName.smutek, + en.EmotionName.wstret, + ), + emotion_valuations=( + en.EmotionValuation.dobro, + en.EmotionValuation.piekno, + en.EmotionValuation.prawda, + ), + emotion_example_1=u'bam', + emotion_example_2=u'bim', + ), + )), + ) + + +@_py2_only +class UnitWithNoSynsetTest(ut.TestCase): + """Try to add a unit belonging to no synset. + + Should skip that synset. + """ + + def setUp(self): + self.__dbm = wndbm.WnDbMockup() + + try: + self.__setup_mock() + except BaseException: + self.__dbm.close() + raise + + def __setup_mock(self): + self.__dbm.add_synset(1) + # A correct unit + self.__dbm.add_lexical_unit(1, u'a', 2, 1, id_=1, domain=1) + # An incorrect unit + with self.__dbm.no_foreign_keys: + self.__dbm.add_lexical_unit(None, u'b', 2, 1, id_=2, domain=2) + + def tearDown(self): + self.__dbm.close() + + def runTest(self): + with self.__dbm.sqlalchemy_url_file as db_file: + nodes = _process_read_nodes(self, WNDBReader(db_file)) + + self.assertEqual( + nodes, + set(( + nd.make_synset_node(id=1), + nd.make_lexical_unit_node( + id=1, + lemma=u'a', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=0, + domain=en.Domain.bhp, + ), + )), + ) + + +@_py2_only +class UnitWithNonexistentSynsetTest(ut.TestCase): + """Unit has a synset that does not exist.""" + + def setUp(self): + self.__dbm = wndbm.WnDbMockup() + + try: + self.__setup_mock() + except BaseException: + self.__dbm.close() + raise + + def __setup_mock(self): + self.__dbm.add_synset(1) + # A correct unit + self.__dbm.add_lexical_unit(1, u'a', 2, 1, id_=1, domain=1) + # An incorrect unit + with self.__dbm.no_foreign_keys: + self.__dbm.add_lexical_unit(2, u'b', 2, 1, id_=2, domain=1) + + def tearDown(self): + self.__dbm.close() + + def runTest(self): + with self.__dbm.sqlalchemy_url_file as db_file: + nodes = _process_read_nodes(self, WNDBReader(db_file)) + + self.assertEqual( + nodes, + set(( + nd.make_synset_node(id=1), + nd.make_lexical_unit_node( + id=1, + lemma=u'a', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=0, + domain=en.Domain.by_db_number(1), + ), + )), + ) + + +@_py2_only +class SynsetWithNoUnitTest(ut.TestCase): + """An empty synset.""" + + def setUp(self): + self.__dbm = wndbm.WnDbMockup() + + try: + self.__setup_mock() + except BaseException: + self.__dbm.close() + raise + + def __setup_mock(self): + self.__dbm.add_synset(1) + self.__dbm.add_synset(2) + # One synset is going to be correct + self.__dbm.add_lexical_unit(1, u'a', 2, id_=1, domain=1) + + def tearDown(self): + self.__dbm.close() + + def runTest(self): + # The empty synset should be skipped + with self.__dbm.sqlalchemy_url_file as db_file: + nodes = _process_read_nodes(self, WNDBReader(db_file)) + + self.assertEqual( + nodes, + set(( + nd.make_synset_node(id=1), + nd.make_lexical_unit_node( + id=1, + lemma=u'a', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=0, + domain=en.Domain.by_db_number(1), + ), + )), + ) + + +@_py2_only +class NonexistentSynsetInRelTest(ut.TestCase): + """Try to add a relation to a bogus synset.""" + + def setUp(self): + self.__dbm = wndbm.WnDbMockup() + + try: + self.__setup_mock() + except BaseException: + self.__dbm.close() + raise + + def __setup_mock(self): + self.__dbm.add_synset(1) + # A synset needs a unit + self.__dbm.add_lexical_unit(1, u'a', 2, id_=1, domain=1) + # A relation is needed, any one + self.__dbm.add_relation_type(u'rel', u'r', id_=1) + # Now, relate to a synset which does not exist + with self.__dbm.no_foreign_keys: + self.__dbm.add_synset_relation(1, 2, 1) + + def tearDown(self): + self.__dbm.close() + + def runTest(self): + # Relation should be omitted, but synset and unit kept + with self.__dbm.sqlalchemy_url_file as db_file: + nodes = _process_read_nodes(self, WNDBReader(db_file)) + + self.assertEqual( + nodes, + set(( + nd.make_synset_node(id=1), + nd.make_lexical_unit_node( + id=1, + lemma=u'a', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=0, + domain=en.Domain.by_db_number(1), + ), + )), + ) + + +@_py2_only +class NonexistentUnitInRelTest(ut.TestCase): + """Try to add a lexical relation to a bogus unit.""" + + def setUp(self): + self.__dbm = wndbm.WnDbMockup() + + try: + self.__setup_mock() + except BaseException: + self.__dbm.close() + raise + + def __setup_mock(self): + self.__dbm.add_synset(1) + self.__dbm.add_lexical_unit(1, u'a', 2, id_=1, domain=1) + # Now, use any relation to link to nonexisting unit + self.__dbm.add_relation_type(u'rel', u'r', is_syn=False, id_=1) + with self.__dbm.no_foreign_keys: + self.__dbm.add_lexical_relation(1, 2, 1) + + def tearDown(self): + self.__dbm.close() + + def runTest(self): + # As with synset, relation should be ignored, but unit and synset + # should be kept + with self.__dbm.sqlalchemy_url_file as db_file: + nodes = _process_read_nodes(self, WNDBReader(db_file)) + + self.assertEqual( + nodes, + set(( + nd.make_synset_node(id=1), + nd.make_lexical_unit_node( + id=1, + lemma=u'a', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=0, + domain=en.Domain.by_db_number(1), + ), + )), + ) + + +@_py2_only +class NonexistentRelationTest(ut.TestCase): + """Synsets / units exist, but the relation between them is undefined.""" + + # XXX Only synset relation is checked + def setUp(self): + self.__dbm = wndbm.WnDbMockup() + + try: + self.__setup_mock() + except BaseException: + self.__dbm.close() + raise + + def __setup_mock(self): + self.__dbm.add_synset(1) + self.__dbm.add_synset(2) + self.__dbm.add_lexical_unit(1, u'a', 2, id_=1, domain=1) + self.__dbm.add_lexical_unit(2, u'b', 2, id_=2, domain=1) + # Now, an undefined relation + with self.__dbm.no_foreign_keys: + self.__dbm.add_synset_relation(1, 2, 1) + + def tearDown(self): + self.__dbm.close() + + def runTest(self): + # Both synsets and units should be retained + with self.__dbm.sqlalchemy_url_file as db_file: + nodes = _process_read_nodes(self, WNDBReader(db_file)) + + self.assertEqual( + nodes, + set(( + nd.make_synset_node(id=1), + nd.make_synset_node(id=2), + nd.make_lexical_unit_node( + id=1, + lemma=u'a', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=0, + domain=en.Domain.by_db_number(1), + ), + nd.make_lexical_unit_node( + id=2, + lemma=u'b', + pos=en.PoS.n, + variant=1, + synset=2, + unit_index=0, + domain=en.Domain.by_db_number(1), + ), + )), + ) + + +@_py2_only +class UnitWithNoSynsetInRelTest(ut.TestCase): + """Unit belongs to no synset and appears in an lexical relation.""" + + def setUp(self): + self.__dbm = wndbm.WnDbMockup() + + try: + self.__setup_mock() + except BaseException: + self.__dbm.close() + raise + + def __setup_mock(self): + self.__dbm.add_synset(1) + self.__dbm.add_lexical_unit(1, u'a', 2, id_=1, domain=1) + # Now, use any relation to link to a unit created with no synset + self.__dbm.add_relation_type(u'rel', u'r', is_syn=False, id_=1) + with self.__dbm.no_foreign_keys: + self.__dbm.add_lexical_unit(None, u'b', 2, id_=2, domain=1) + self.__dbm.add_lexical_relation(1, 2, 1) + + def tearDown(self): + self.__dbm.close() + + def runTest(self): + # Both the relation and the bogus unit should be ignored + with self.__dbm.sqlalchemy_url_file as db_file: + nodes = _process_read_nodes(self, WNDBReader(db_file)) + + self.assertEqual( + nodes, + set(( + nd.make_synset_node(id=1), + nd.make_lexical_unit_node( + id=1, + lemma=u'a', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=0, + domain=en.Domain.by_db_number(1), + ), + )), + ) + + +@_py2_only +class SynsetWithNoUnitInRelTest(ut.TestCase): + """A synset in relation exists, but has no synsets.""" + + def setUp(self): + self.__dbm = wndbm.WnDbMockup() + + try: + self.__setup_mock() + except BaseException: + self.__dbm.close() + raise + + def __setup_mock(self): + self.__dbm.add_synset(1) + # A synset needs a unit + self.__dbm.add_lexical_unit(1, u'a', 2, id_=1, domain=1) + # A relation is needed, any one + self.__dbm.add_relation_type(u'rel', u'r', id_=1) + # Now, create an empty synset and relate to it + self.__dbm.add_synset(2) + with self.__dbm.no_foreign_keys: + self.__dbm.add_synset_relation(1, 2, 1) + + def tearDown(self): + self.__dbm.close() + + def runTest(self): + # Relation should be omitted, as well as the empty synset, but synset + # and unit kept + with self.__dbm.sqlalchemy_url_file as db_file: + nodes = _process_read_nodes(self, WNDBReader(db_file)) + + self.assertEqual( + nodes, + set(( + nd.make_synset_node(id=1), + nd.make_lexical_unit_node( + id=1, + lemma=u'a', + pos=en.PoS.n, + variant=1, + synset=1, + unit_index=0, + domain=en.Domain.by_db_number(1), + ), + )), + ) + + +def setUpModule(): + logging.getLogger('wncomments').addHandler(logging.StreamHandler()) + + +def _process_read_nodes(utest, nodeiter): + type_order = [] + all_nodes = set() + + for k, g in itt.groupby(nodeiter, type): + type_order.append(k) + all_nodes.update(g) + + # Test the order, and return the nodes for further testing. + # The order must be: relation types, lexical units and synsets, with no + # breaks. The first element (relation types) is optional, so don't count it + # if the order list is shorter. + utest.assertEqual( + type_order, + _NODE_ORDER[1:] if len(type_order) < 3 else _NODE_ORDER, + ) + return all_nodes diff --git a/tests/setuptools_loader.py b/tests/setuptools_loader.py new file mode 100644 index 0000000..a62a2f9 --- /dev/null +++ b/tests/setuptools_loader.py @@ -0,0 +1,16 @@ +import locale +import unittest as ut + + +def setuptools_load_tests(): + """This function should be set as ``test_suite`` in ``setup.py``. + + Setuptools doesn't honor ``load_tests`` protocol and would import test from + a package many times, if told that the ``test_suite`` is a package, so it + needs to be pointed to a function which does the importing. + + Also, make sure sorting locale is Polish + """ + + locale.setlocale(locale.LC_COLLATE, ('pl_PL', 'UTF-8')) + return ut.defaultTestLoader.discover('tests/cases', top_level_dir='tests') diff --git a/tox.ini b/tox.ini index 66929ec..c90605f 100644 --- a/tox.ini +++ b/tox.ini @@ -26,16 +26,19 @@ max-line-length = 80 [pydocstyle] +# D100 Missing docstring in public module # D101 Missing docstring in public class # D102 Missing docstring in public method # D103 Missing docstring in public function # D104 Missing docstring in public package # D105 Missing docstring in magic method +# D107: Missing docstring in __init__ # D203 1 blank line required before class docstring # D213 Multi-line docstring summary should start at the second line # D214 Section is over-indented # D215 Section underline is over-indented # D401 First line should be in imperative mood; try rephrasing +# D403: First word of the first line should be properly capitalized # D405 Section name should be properly capitalized # D406 Section name should end with a newline # D407 Missing dashed underline after section @@ -43,6 +46,6 @@ max-line-length = 80 # D409 Section underline should match the length of its name # D410 Missing blank line after section # D411 Missing blank line before section -ignore = D101,D102,D103,D104,D105,D203,D213,D214,D215,D401,D405,D406,D407,D408,D409,D410,D411 +ignore = D100,D101,D102,D103,D104,D105,D107,D203,D213,D214,D215,D401,D403,D405,D406,D407,D408,D409,D410,D411 match-dir = ^(?!\.tox|venv).* match = ^(?!setup).*\.py -- GitLab