pax_global_header00006660000000000000000000000064145147752620014527gustar00rootroot0000000000000052 comment=642727d4fe0d6262019ee05a9dce9ff9fba045e4 LanguageMachines-timbl-642727d/000077500000000000000000000000001451477526200163105ustar00rootroot00000000000000LanguageMachines-timbl-642727d/.dockerignore000066400000000000000000000001241451477526200207610ustar00rootroot00000000000000.git .cache .* _* *.cache *.pyc build *.egg-info gource* *.tar.gz *.pdf TODO *.lock LanguageMachines-timbl-642727d/.github/000077500000000000000000000000001451477526200176505ustar00rootroot00000000000000LanguageMachines-timbl-642727d/.github/workflows/000077500000000000000000000000001451477526200217055ustar00rootroot00000000000000LanguageMachines-timbl-642727d/.github/workflows/badge.svg000066400000000000000000000061641451477526200234770ustar00rootroot00000000000000 LanguageMachines-timbl-642727d/.github/workflows/timbl.yml000066400000000000000000000074361451477526200235510ustar00rootroot00000000000000name: C/C++ CI on: push: branches: [master, valueclass] paths: - 'src/**' - 'include/**' - '.github/workflows/**' pull_request: branches: [master] jobs: notification: runs-on: ubuntu-latest name: Notify start to irc-gitlama outputs: branch: ${{ steps.extract_branch.outputs.branch }} steps: - name: Extract Branch name shell: bash run: echo "branch=$(echo ${GITHUB_REF#refs/heads/})" >> $GITHUB_OUTPUT id: extract_branch - name: IRC notification uses: Gottox/irc-message-action@v2 with: server: irc.uvt.nl channel: '#gitlama' nickname: GitHub message: > ${{ github.actor }} started a build of ${{ github.event.repository.name }} [${{ steps.extract_branch.outputs.branch }}] build: runs-on: ${{ matrix.os }} needs: notification strategy: matrix: os: [ubuntu-latest, macos-latest] compiler: [g++, clang++] steps: - name: Cancel Previous Runs uses: styfle/cancel-workflow-action@0.11.0 with: access_token: ${{ github.token }} - uses: actions/checkout@v3 - name: Install Build Environment run: | if [ "$RUNNER_OS" == "Linux" ]; then sudo apt-get install pkg-config autoconf-archive else brew upgrade; brew install pkg-config brew install autoconf-archive brew install autoconf brew install automake fi - name: Install Dependencies run: | if [ "$RUNNER_OS" == "Linux" ]; then sudo apt-get install libicu-dev libxml2-dev libbz2-dev sudo apt-get install zlib1g-dev cppcheck else brew install libxml2; brew install bzip2; brew install zlib; brew install cppcheck fi - name: install TiccUtils env: CXX: ${{ matrix.compiler }} run: | git clone https://github.com/LanguageMachines/ticcutils; cd ticcutils; bash bootstrap.sh; ./configure; make; sudo make install; cd ..; - name: bootstrap run: sh bootstrap.sh - name: configure env: CXX: ${{ matrix.compiler }} run: ./configure - name: compiler-id id: compiler run: | id=$(echo ${{matrix.compiler}} | cut -d\+ -f1) echo "id=$id" >> $GITHUB_ENV - name: Static Code-check run: cppcheck --enable=all --quiet --error-exitcode=0 . - name: make run: make - name: install run: sudo make install - name: make check env: CXX: ${{ matrix.compiler }} run: LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib make check continue-on-error: true - name: show log run: cat src/test-suite.log - name: Notify IRC of failure if: ${{ failure() }} uses: Gottox/irc-message-action@v2 with: server: irc.uvt.nl channel: '#gitlama' nickname: GH-${{ runner.os }}-${{ env.id }} message: "timbl [${{ needs.notification.outputs.branch }}] build with ${{ matrix.compiler }} by ${{ github.actor }} on ${{ matrix.os }}: \u00034FAIL\u0003" - name: Notify IRC of succes if: ${{ success() }} uses: Gottox/irc-message-action@v2 with: server: irc.uvt.nl channel: '#gitlama' nickname: GH-${{ runner.os }}-${{ env.id }} message: "timbl [${{ needs.notification.outputs.branch }}] build with ${{ matrix.compiler }} by ${{ github.actor }} on ${{ matrix.os }}: \u00033SUCCESS\u0003" LanguageMachines-timbl-642727d/.gitignore000066400000000000000000000003551451477526200203030ustar00rootroot00000000000000*~ *.gz Makefile Makefile.in compile config.guess config.h config.h.in config.log config.status config.sub configure INSTALL aclocal.m4 autom4te.cache/ depcomp install-sh libtool ltmain.sh missing stamp-h1 test-driver timbl.pc ChangeLog LanguageMachines-timbl-642727d/AUTHORS000066400000000000000000000016461451477526200173670ustar00rootroot00000000000000TiMBL authors Lead programmer: Ko van der Sloot Code, algorithm, and design contributions by: Peter Berck Antal van den Bosch Walter Daelemans Maarten van Gompel Ton Weijters Jakub Zavrel Contributors: People who contributed to Timbl by suggesting improvements, filing bug reports, asking the right questions etc.: Robert Andersson Vincent Van Asch Joris Bleys Johan Bos Joan Bresnan Stefan Breuer Sabine Buchholz Bertjan Busser Sander Canisius Giovanni Cassani Win Carus Felix Filoz Alan Frankel Sven Hartrumpf Iris Hendrickx Lyndon Hiew Steve Hunt Valentin Jijkoun Gunn Inger Lyse Svetoslav Marinov Erwin Marsi Liam McGrath Jens Nilsson Ties Kemper Tom DePlonty Adam Radziszewski Albert Russel Yvan Saeys Frank Scheelen Armin Schmidt Olaf Seibert Gabriel Skantze Carline Sporleder Herman Stehouwer Erik Tjong Kim Sang Joseph Turian Frederik Vaassen Corne Versloot Colin Wilson Linda Yung Bram Vandekerckhove Menno van Zaanen LanguageMachines-timbl-642727d/COPYING000077500000000000000000001045121451477526200173510ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read .LanguageMachines-timbl-642727d/Dockerfile000066400000000000000000000026621451477526200203100ustar00rootroot00000000000000FROM alpine:latest #VERSION can be: # - stable: builds latest stable versions from source (default) # - distro: uses packages as provided by Alpine Linux (may be slightly out of date) # - devel: latest development version (git master/main branch) ARG VERSION="stable" LABEL org.opencontainers.image.authors="Maarten van Gompel " LABEL description="timbl - tilburg memory-based learner" RUN mkdir -p /data RUN mkdir -p /usr/src/timbl COPY . /usr/src/timbl RUN if [ "$VERSION" = "distro" ]; then \ rm -Rf /usr/src/timbl &&\ echo -e "----------------------------------------------------------\nNOTE: Installing latest release as provided by Alpine package manager.\nThis version may diverge from the one in the git master tree or even from the latest release on github!\nFor development, build with --build-arg VERSION=development.\n----------------------------------------------------------\n" &&\ apk update && apk add timbl; \ else \ PACKAGES="libtar libbz2 icu-libs libxml2 libgomp libstdc++" &&\ BUILD_PACKAGES="build-base autoconf-archive autoconf automake libtool libtar-dev bzip2-dev icu-dev libxml2-dev git" &&\ apk add $PACKAGES $BUILD_PACKAGES &&\ cd /usr/src/ && ./timbl/build-deps.sh &&\ cd timbl && sh ./bootstrap.sh && ./configure && make && make install &&\ apk del $BUILD_PACKAGES && rm -Rf /usr/src; \ fi WORKDIR / ENTRYPOINT [ "timbl" ] LanguageMachines-timbl-642727d/MAINTAINERS000066400000000000000000000001551451477526200200060ustar00rootroot00000000000000Maarten van Gompel (KNAW Humanities Cluster) Ko van der Sloot LanguageMachines-timbl-642727d/Makefile.am000066400000000000000000000006201451477526200203420ustar00rootroot00000000000000 ACLOCAL_AMFLAGS =-I m4 --install SUBDIRS = src include demos docs m4 EXTRA_DIST = bootstrap.sh AUTHORS TODO NEWS README.md timbl.pc.in codemeta.json pkgconfigdir = $(libdir)/pkgconfig pkgconfig_DATA = timbl.pc ChangeLog: NEWS git pull; git2cl > ChangeLog docker: docker build -t timbl:latest . docker-dev: docker build -t timbl:dev --build-arg VERSION=development . deps: ./build-deps.sh LanguageMachines-timbl-642727d/NEWS000066400000000000000000000153131451477526200170120ustar00rootroot000000000000006.9 2023-10-21 [Ko van der Sloot] * better code: const correctness etc. 6.8.2 2023-02-22 [Ko van der Sloot] * plugged a memory leak * C++ code quality improved * removed dependency on deprecated sprintf function * removed dependency on libtar 6.8.1 2023-01-04 [Ko van der Sloot] * fix for some odd distro issues * for now re-added 2 backward compatability functions 6.8 2023-01-02 [Ko van der Sloot] * major code refactoring - BREAKS API and ABI - library bumped - getting rid of a lot of pointers and C-style arrays - removed C-style casts - Unicode is the default now for most functions. Some 'string' functions are still available in the API. - In general modernizing to C++11 - weeded out CppCheck warnings * improved GitHub action 6.7 2022-07-22 [Maarten van Gompel] * updated metadata (codemeta.json) following new (proposed) CLARIAH requirements (CLARIAH/clariah-plus#38) * added builds-deps.sh for automatically building and installing dependencies * added Dockerfile and instructions * no functional changes 6.6 2020-12-15 [Ko vd Sloot] * Internally we use NFC normalized UnicodeString's now. Timbl should be robust for UTF8 files, even exotic languages. * added some Unicode fuctions to the API * bumped library version * several code refactorings * added the possibility to use the options -f and -i, without -t 6.5 2020-04-15 [Ko vd Sloot] * adapted to the newest TiCC::CommandLine implementation * small code refactorings 6.4.14 2019-10-21 [Ko vd Sloot] * added JSON support. Still EXPERIMENTAL! the JSON syntax might change in the future. So handle with care. * confidence score calculation is now a real TimblExperiment member * removed Boost dependency. 6.4.13 2018-11-28 [Ko van der Sloot] - added a '--limit' option to use only the most significant features 6.4.12 2018-05-16 [Ko van der Sloot] Bugfix release: - updated usage(). Info on -G 2 option was wrong. - changed an error message to be more clear. - fixed building of the TeX documentation [Maarten van Gompel] - Added codemeta.json metadata 6.4.11 2018-01-09 [Ko van der Sloot] Bugfix release: - Fixed a major bug in similarity metric calculations. (Cosine and Dot product) 6.4.10 2017-11-09 [Ko van der Sloot] Bugfix release: - allow for spaces in TABBED input (they are significant) - corrected some typos in messages and man page - minor code refactorings 6.4.9 2017-05-04 [Ko van der Sloot] Maintenance release: - removed unused/non-functional functions from the API - code refactoring. Mostly based on CPPCHECK static analyzer. - small bugs: -e options didn't always do what you expected - added missing files in docs [Maarten van Gompel] - updated README.md 6.4.8 2016-07-11 [Ko van der Sloot] Maintance release: - code refactoring and improvement - relying more on ticcutils - fixed exit codes - accept long options: --version and --help - fix out-of-range problem in Sparse Format 6.4.7 2016-01-14 [Ko van der Sloot][Maarten van Gompel] * repository moved to GitHub * added travis support * code updates. (clearer code mainly) * depending a bit more on ticcutils (CommanLine, StringOps) * some small bug fixes (LOO with a 1 line file) 6.4.6 2014-09-23 [Ko van der Sloot] * release 6.4.5 2014-09-16 * small bug fixes 6.4.4 2013-04-03 * rely more on ticcutils stuff. A lot of functions are moved there * added a GetAccuracy option to the API * bug fix in Choppers.cxx 6.4.3 2012-10-11 * added an --occurrences option for training/testing files with an occurrence value. * made Tree.cxx and Trie.h 'omp thread-safe' and moved them to ticcutils * added a "Tabbed' inputformat (© Maarten van Gompel) * The Micro Avagare F-score calculation is now according to the Manual. There were small differences caused by a mixup of test and train data. 6.4.2 2011-12-20 * start to use Requires.private in timbl.pc * added a 'check' target to Make system * achieved a considerable speedup for multithreaded testing. * fixed a small problem in LogBuffer. Also simplified and cleaned up LogBuffer and LogStream code. All dependencies need recompiling! * implemented +vcf (confidence) output * The -T option for TreeOrdening is now named --Treeorder * fixed tiebreaking for -R (random) option, closes bug 43, again. * some small fixes for '-pedantic' compiler option * avoid zero result in Exponential Decay (bug 89). * removed unused relativeWeight() function. (was duplicated) 6.4.1 2011-08-25 [ Ko van der Sloot ] - added Version() and VersionName() functions. We want them for every member of the family - fixed a problem with including 'config'h' in the API - fixed a problem with normalization on empty distributions. - added a Confidence( class ) function to the instances API. returns the Weight of 'class'. Which is influenced by normalization! - added logProbability normalization - the +vS option was not always honoured. Now Timbl shuts it's big mouth better - Expand() is now also enabled for TRIBL and TRIBL2 6.4.0 - decapped Timbl and libTimbl to timbl and libtimbl this will shake the whole timbl family tree! - small fixes to survive -pedantic compiler option without warnings 6.3.4 - we now support incremental learning from a file on the command line. - implemented a --clones option to use multiple threads for testing - fixed bug 58. Emit Error when reading an InstanceBase without a test. - fixed bug 61. Give a Warning when a trainingset contains only 1 class. - cleaned up build system 6.3.3 - several small fixes. 6.3.2 - fixed bug 44. Segfault in weird cases - fixed bug 45. Needless processing of traindata when required option is missing - fixed bug 46. Tribl2 sometimes fails to correctly output +v+k+n - fixed bug 47. Unclear error message when InnerProduct fails - several small uncritical enhancements 6.3.1 - 2010-11-17 - Little API change in TimblOptions (more clear i hope) - Little bug fixes and improvement (logging mostly) - Moved LogStream stuff back in from TimblServer 6.2.3 forgot to edit this file 6.2.2 forgot to edit this file 6.2.1 - 2009-11-30 - Fixed compilation problem on Cygwin - Added functions to API - Improved server functionality (undocumented yet) 6.2.0 - 2009-11-03 - Stable release 6.1.99.0.20091021.1 - 2009-10-21 - Another snapshot from SVN. Now needs libxml2 for building. 6.1.99.0.20091014.1 - 2009-10-14 - Another snapshot from SVN. (By mistake, aka 6.2.0). 6.2.0.pre3 - 2009-10-05 - Another snapshot from SVN. 6.2.0-pre1 - 2009-09-03 - snapshot from SVN. 2009-08-31: Ko vd Sloot It's been a long time sine the prevous news lot of overhauling of the code took place 2008-03-04: Ko vd Sloot numerous small bug fixes. getting ready for 6.1.2 2007-12-03: Ko vd Sloot Packaging seems to be fine now. 2007-10-01: Ko vd Sloot first packaging attempt # $Id$ # $URL$ LanguageMachines-timbl-642727d/README000066400000000000000000000000521451477526200171650ustar00rootroot00000000000000Please see README.md for for information. LanguageMachines-timbl-642727d/README.md000066400000000000000000000105101451477526200175640ustar00rootroot00000000000000[![GitHub build](https://github.com/LanguageMachines/timbl/actions/workflows/timbl.yml/badge.svg?branch=master)](https://github.com/LanguageMachines/timbl/actions/) [![Language Machines Badge](http://applejack.science.ru.nl/lamabadge.php/timbl)](http://applejack.science.ru.nl/languagemachines/) [![DOI](https://zenodo.org/badge/20526237.svg)](https://zenodo.org/badge/latestdoi/20526237) =========================================== TiMBL: Tilburg Memory Based Learner =========================================== TiMBL 6.4 (c) CLS/ILK/CLiPS 1998 - 2023 Centre for Language Studies, Radboud University Nijmegen Induction of Linguistic Knowledge Research Group, Tilburg University and Centre for Dutch Language and Speech, University of Antwerp **Website:** https://languagemachines.github.io/timbl/ TiMBL is an open source software package implementing several memory-based learning algorithms, among which IB1-IG, an implementation of k-nearest neighbor classification with feature weighting suitable for symbolic feature spaces, and IGTree, a decision-tree approximation of IB1-IG. All implemented algorithms have in common that they store some representation of the training set explicitly in memory. During testing, new cases are classified by extrapolation from the most similar stored cases. For over fifteen years TiMBL has been mostly used in natural language processing as a machine learning classifier component, but its use extends to virtually any supervised machine learning domain. Due to its particular decision-tree-based implementation, TiMBL is in many cases far more efficient in classification than a standard k-nearest neighbor algorithm would be. ----------------------------------------------------------------------- This is a major extension to the sixth main release of TiMBL. Most significant change: **The main program is now called 'timbl' and not 'Timbl' anymore. Be warned!** This change is part of our effort to get our MBL software into software distributions like Debian, Ubuntu, RedHat . Comments and bug-reports are welcome at our issue tracker at https://github.com/LanguageMachines/timbl/issues or by mailing lamasoftware (at) science.ru.nl. Documentation and more info may be found on https://languagemachines.github.io/timbl . TiMBL is distributed under the GNU Public Licence v3 (see the file COPYING). ----------------------------------------------------------------------- This software has been tested on: - Intel platforms running several versions of Linux, including Ubuntu, Debian, Arch Linux, Fedora (both 32 and 64 bits) - MAC platform running OS X 10.10 Alternatively, with some effort, you may get it to work on a Windows platform using Cygwin. Compilers: - GCC (use 7.0 or later) - Clang Contents of this distribution: - Sources - Licensing information ( COPYING ) - Build system based on GNU Autotools - Container build file ( Dockerfile ) - Example data files ( in the demos directory ) - Documentation ( in the docs directory ) Dependencies: To be able to succesfully build TiMBL from the tarball, you need the following pakages: - ticcutils (https://github.com/LanguageMachines/ticcutils) - pkg-config - libxml2-dev To install TiMBL, first consult whether your distribution's package manager has an up-to-date package for TiMBL. To compile and install manually from source instead, provided you have all the dependencies installed: $ bash bootstrap.sh $ ./configure $ make $ make install If you want to automatically download and install the latest stable versions of the required dependencies, then run `./build-deps.sh` prior to the above. You can pass a target directory prefix as first argument and you may need to prepend `sudo` to ensure you can install there. The dependencies are: * [ticcutils](https://github.com/LanguageMachines/ticcutils) A `Dockerfile` for a container build is also available, specify `--build-arg VERSION=development` if you want the latest development version instead. You will still need to take care to install the following 3rd party dependencies through your distribution's package manager, as they are not provided by our script: * ``icu`` - A C++ library for Unicode and Globalization support. On Debian/Ubuntu systems, install the package libicu-dev. * A sane build environment with a C++ compiler (e.g. gcc 4.9 or above or clang), make, autotools, libtool, pkg-config LanguageMachines-timbl-642727d/TODO000066400000000000000000000001331451477526200167750ustar00rootroot00000000000000* reorganize clone() splitChild() and such in TimblExperiment. it is very confusing now. LanguageMachines-timbl-642727d/bootstrap.sh000077500000000000000000000042311451477526200206640ustar00rootroot00000000000000# bootstrap - script to bootstrap the distribution rolling engine # usage: # $ sh ./bootstrap && ./configure && make dist[check] # # this yields a tarball which one can install doing # # $ tar zxf PACKAGENAME-*.tar.gz # $ cd PACKAGENAME-* # $ ./configure # $ make # # make install # requirements: # GNU autoconf, from e.g. ftp.gnu.org:/pub/gnu/autoconf/ # GNU automake, from e.g. http://ftp.gnu.org/gnu/automake/ automake=automake aclocal=aclocal # if you want to autogenerate a ChangeLog form svn: # # svn2cl, a python script, as used in the GNU Enterprise project. # By jcater (Jason Cater), contributions by reinhard (Reinhard Müller). # Get it from # http://www.gnuenterprise.org/cgi-bin/viewcvs.cgi/*checkout*/gnue/trunk/gnue-common/utils/svn2cl . # svn2cl is used in Makefile.am too. # # (Another svn2cl implementation, in perl, is at # http://www.contactor.se/~dast/svn/archive-2002-04/0910.shtml) # # see also toplevel Makefile.am # test -f ChangeLog || { # svn log --verbose > ChangeLog #} if $automake --version|head -1 |grep ' 1\.[4-9]'; then echo "automake 1.4-1.9 is active. You should use automake 1.10 or later" if test -f /etc/debian_version; then echo " sudo apt-get install automake" echo " sudo update-alternatives --config automake" fi exit 1 fi # autoconf-archive Debian package, aclocal-archive RPM, obsolete/badly supported OS, installed in home dir acdirs="/usr/share/autoconf-archive/ /usr/share/aclocal/ /usr/local/share/aclocal/ $HOME/local/share/autoconf-archive/" found=false for d in $acdirs do if test -f ${d}libtool.m4 then found=true break fi done if ! $found then cat <&2 echo " Building latest stable release of main dependencies from source.">&2 echo "------------------------------------------------------------------------">&2 else echo "------------------------------------------------------------------------">&2 echo " Building development versions of main dependencie from source.">&2 echo " (This is experimental and may contain bugs! DO NOT PUBLISH!)">&2 echo "-----------------------------------------------------------------------">&2 fi PWD="$(pwd)" BUILDDIR="$(mktemp -dt "build-deps.XXXXXX")" cd "$BUILDDIR" BUILD_SOURCES="LanguageMachines/ticcutils" for SUFFIX in $BUILD_SOURCES; do \ NAME="$(basename "$SUFFIX")" git clone "https://github.com/$SUFFIX" cd "$NAME" REF=$(git tag -l | grep -E "^v?[0-9]+(\.[0-9])*" | sort -t. -k 1.2,1n -k 2,2n -k 3,3n -k 4,4n | tail -n 1) if [ "$VERSION" = "stable" ] && [ -n "$REF" ]; then git -c advice.detachedHead=false checkout "$REF" fi sh ./bootstrap.sh && ./configure --prefix "$PREFIX" && make && make install cd .. done cd "$PWD" [ -n "$BUILDDIR" ] && rm -Rf "$BUILDDIR" LanguageMachines-timbl-642727d/codemeta.json000066400000000000000000000110631451477526200207650ustar00rootroot00000000000000{ "@context": [ "https://doi.org/10.5063/schema/codemeta-2.0", "http://schema.org", "https://w3id.org/software-types" ], "@type": "SoftwareSourceCode", "identifier": "timbl", "name": "TiMBL", "version": "6.9", "description": "TiMBL is an open source software package implementing several memory-based learning algorithms, among which IB1-IG, an implementation of k-nearest neighbor classification with feature weighting suitable for symbolic feature spaces, and IGTree, a decision-tree approximation of IB1-IG. All implemented algorithms have in common that they store some representation of the training set explicitly in memory. During testing, new cases are classified by extrapolation from the most similar stored cases.", "license": "https://spdx.org/licenses/GPL-3.0", "url": "https://languagemachines.github.io/timbl", "author": [ { "@type": "Person", "givenName": "Ko", "familyName": "van der Sloot", "email": "ko.vandersloot@let.ru.nl", "affiliation": { "@id": "https://www.ru.nl/clst", "@type": "Organization", "name": "Centre for Language and Speech Technology", "url": "https://www.ru.nl/clst", "parentOrganization": { "@id": "https://www.ru.nl/cls", "@type": "Organization", "name": "Centre for Language Studies", "url": "https://www.ru.nl/cls", "parentOrganization": { "@id": "https://www.ru.nl", "name": "Radboud University", "@type": "Organization", "url": "https://www.ru.nl", "location": { "@type": "Place", "name": "Nijmegen" } } } }, "position": 1 }, { "@id": "https://orcid.org/0000-0003-2493-656X", "@type": "Person", "givenName": "Antal", "familyName": "van den Bosch", "email": "antal.vandenbosch@let.ru.nl", "affiliation": { "@id": "https://cls.ru.nl" }, "position": 2 }, { "@type": "Person", "givenName": "Walter", "familyName": "Daelemans", "position": 3 }, { "@id": "https://orcid.org/0000-0002-1046-0006", "@type": "Person", "givenName": "Maarten", "familyName": "van Gompel", "email": "proycon@anaproy.nl", "affiliation": { "@id": "https://knaw.huc.nl" }, "position": 4 }, { "@type": "Person", "givenName": "Ton", "familyName": "Weijters", "position": 5 }, { "@type": "Person", "givenName": "Jakub", "familyName": "Zavrel", "position": 6 } ], "sourceOrganization": { "@id": "https://www.ru.nl/clst" }, "programmingLanguage": { "@type": "ComputerLanguage", "identifier": "c++", "name": "C++" }, "operatingSystem": [ "Linux", "BSD", "macOS" ], "codeRepository": "https://github.com/LanguageMachines/timbl", "softwareRequirements": [ { "@type": "SoftwareApplication", "identifier": "libxml2", "name": "libxml2" }, { "@type": "SoftwareApplication", "identifier": "ticcutils", "name": "ticcutils" } ], "readme": "https://github.com/LanguageMachines/timbl/blob/master/README.md", "issueTracker": "https://github.com/LanguageMachines/timbl/issues", "contIntegration": "https://travis-ci.org/LanguageMachines/timbl", "releaseNotes": "https://github.com/LanguageMachines/timbl/releases", "developmentStatus": "https://www.repostatus.org/#active", "keywords": [ "nlp", "natural language processing", "memory based learning", "machine learning", "knn", "k-nearest neighbours", "decision tree" ], "referencePublication": [ { "@type": "TechArticle", "name": "TiMBL: Tilburg Memory Based Learner, Reference Guide", "author": [ "Walter Daelemans", "Jakub Zavrel", "Ko van der Sloot", "Antal van den Bosch" ], "url": "https://github.com/LanguageMachines/timbl/raw/master/docs/Timbl_6.4_Manual.pdf" }, { "@type": "Book", "name": "Memory-Based Language Processing", "author": [ "Walter Daelemans", "Antal van den Bosch" ], "url": "http://ilk.uvt.nl/mblp", "publisher": "Cambridge University Press" } ], "dateCreated": "1998", "targetProduct": [ { "@type": "SoftwareLibrary", "executableName": "libtimbl", "name": "libtimbl", "runtimePlatform": [ "Linux", "BSD", "macOS" ], "description": "Memory-based Learning Library with API for C++" }, { "@type": "CommandLineApplication", "executableName": "timbl", "name": "timbl", "runtimePlatform": [ "Linux", "BSD", "macOS" ], "description": "Memory-based learner, command-line tool" } ] } LanguageMachines-timbl-642727d/configure.ac000066400000000000000000000040021451477526200205720ustar00rootroot00000000000000# -*- Autoconf -*- # Process this file with autoconf to produce a configure script. AC_PREREQ([2.69]) AC_INIT([timbl],[6.9],[lamasoftware@science.ru.nl]) #also adapt in codemeta.json! AM_INIT_AUTOMAKE AC_CONFIG_SRCDIR([.]) AC_CONFIG_MACRO_DIR([m4]) AC_CONFIG_HEADERS([config.h]) if test x"${CXXFLAGS+set}" = xset; then # the user set CXXFLAGS; don't override it. cxx_flags_were_set=true else cxx_flags_were_set=false fi if $cxx_flags_were_set; then CXXFLAGS=$CXXFLAGS fi # Checks for programs. AC_PROG_CXX( [g++ c++] ) # libtool stuff LT_INIT # when running tests, use CXX AC_LANG([C++]) AC_OPENMP if test "x$ac_cv_prog_cxx_openmp" != "x"; then if test "x$ac_cv_prog_cxx_openmp" != "xunsupported"; then CXXFLAGS="$CXXFLAGS $OPENMP_CXXFLAGS" AC_DEFINE([HAVE_OPENMP], [1] , [Define to 1 if you have OpenMP] ) else AC_MSG_NOTICE([We don't have OpenMP for Clang. Multithreaded operation is di sabled]) fi fi #checks for libraries. # Checks for header files. AC_CHECK_HEADERS([sys/time.h]) # Checks for typedefs, structures, and compiler characteristics. AC_HEADER_STDBOOL AC_C_INLINE AC_TYPE_SIZE_T # Checks for library functions. AC_CHECK_FUNCS([floor gettimeofday pow rint sqrt ]) PKG_PROG_PKG_CONFIG if test "x$prefix" = "xNONE"; then prefix="/usr/local" fi if test "x$PKG_CONFIG_PATH" = x; then export PKG_CONFIG_PATH="$prefix/lib/pkgconfig" else export PKG_CONFIG_PATH="$prefix/lib/pkgconfig:$PKG_CONFIG_PATH" fi AC_OSX_PKG( [icu4c] ) PKG_PROG_PKG_CONFIG PKG_CHECK_MODULES([XML2], [libxml-2.0 >= 2.6.16] ) CXXFLAGS="$CXXFLAGS $XML2_CFLAGS" LIBS="$LIBS $XML2_LIBS" PKG_CHECK_MODULES([ticcutils], [ticcutils >= 0.30] ) CXXFLAGS="$CXXFLAGS $ticcutils_CFLAGS" LIBS="$LIBS $ticcutils_LIBS" PKG_CHECK_MODULES([ICU], [icu-uc >= 50 icu-io] ) CXXFLAGS="$CXXFLAGS $ICU_CFLAGS" LIBS="$ICU_LIBS $LIBS" AC_CONFIG_FILES([ Makefile timbl.pc m4/Makefile src/Makefile docs/Makefile include/Makefile include/timbl/Makefile demos/Makefile ]) AC_OUTPUT LanguageMachines-timbl-642727d/demos/000077500000000000000000000000001451477526200174175ustar00rootroot00000000000000LanguageMachines-timbl-642727d/demos/.gitignore000066400000000000000000000001701451477526200214050ustar00rootroot00000000000000*~ *.o *.lo Makefile Makefile.in .deps/ .libs/ tse api_test1 api_test2 api_test3 api_test4 api_test5 api_test6 classify LanguageMachines-timbl-642727d/demos/Makefile.am000066400000000000000000000012401451477526200214500ustar00rootroot00000000000000# $Id$ # $URL: $ AM_CPPFLAGS = -I@top_srcdir@/include AM_CXXFLAGS = -std=c++0x noinst_PROGRAMS = api_test1 api_test2 api_test3 api_test4 api_test5 api_test6\ tse classify LDADD = ../src/libtimbl.la tse_SOURCES = tse.cxx classify_SOURCES = classify.cxx api_test1_SOURCES = api_test1.cxx api_test2_SOURCES = api_test2.cxx api_test3_SOURCES = api_test3.cxx api_test4_SOURCES = api_test4.cxx api_test5_SOURCES = api_test5.cxx api_test6_SOURCES = api_test6.cxx exdir = $(datadir)/doc/@PACKAGE@/examples ex_DATA = dimin.script dimin.train dimin.test cross_val.test \ small_1.train small_2.train small_3.train small_4.train small_5.train EXTRA_DIST = $(ex_DATA) LanguageMachines-timbl-642727d/demos/api_test1.cxx000077500000000000000000000023401451477526200220360ustar00rootroot00000000000000/* Copyright (c) 1998 - 2015 ILK - Tilburg University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: http://ilk.uvt.nl/software.html or send mail to: timbl@uvt.nl */ #include "timbl/TimblAPI.h" int main(){ Timbl::TimblAPI My_Experiment( "-a IGTREE +vDI+DB+F", "test1" ); My_Experiment.SetOptions( "-w3 -vDB" ); My_Experiment.ShowSettings( std::cout ); My_Experiment.Learn( "dimin.train" ); My_Experiment.Test( "dimin.test", "my_first_test.out" ); My_Experiment.SetOptions( "-mM" ); My_Experiment.Test( "dimin.test", "my_first_test.out" ); } LanguageMachines-timbl-642727d/demos/api_test2.cxx000077500000000000000000000023261451477526200220430ustar00rootroot00000000000000/* Copyright (c) 1998 - 2015 ILK - Tilburg University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: http://ilk.uvt.nl/software.html or send mail to: timbl@uvt.nl */ #include #include "timbl/TimblAPI.h" int main(){ Timbl::TimblAPI *My_Experiment = new Timbl::TimblAPI( "-a IB2 +vF+DI+DB" , "test2" ); My_Experiment->SetOptions( "-b100" ); My_Experiment->ShowSettings( std::cout ); My_Experiment->Learn( "dimin.train" ); My_Experiment->Test( "dimin.test", "my_second_test.out" ); delete My_Experiment; exit(1); } LanguageMachines-timbl-642727d/demos/api_test3.cxx000077500000000000000000000021001451477526200220320ustar00rootroot00000000000000/* Copyright (c) 1998 - 2015 ILK - Tilburg University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: http://ilk.uvt.nl/software.html or send mail to: timbl@uvt.nl */ #include #include "timbl/TimblAPI.h" using Timbl::TimblAPI; int main(){ TimblAPI *My_Experiment = new TimblAPI( "-t cross_validate" ); My_Experiment->Test( "cross_val.test" ); delete My_Experiment; exit(0); } LanguageMachines-timbl-642727d/demos/api_test4.cxx000077500000000000000000000041371451477526200220470ustar00rootroot00000000000000/* Copyright (c) 1998 - 2015 ILK - Tilburg University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: http://ilk.uvt.nl/software.html or send mail to: timbl@uvt.nl */ #include #include #include "timbl/TimblAPI.h" using namespace Timbl; int main(){ TimblAPI *My_Experiment = new TimblAPI( "-a IB1 +vDI+DB +mM" , "test4" ); My_Experiment->ShowSettings( std::cout ); My_Experiment->Learn( "dimin.train" ); My_Experiment->Test( "dimin.test", "inc1.out" ); My_Experiment->SaveWeights( "wg.1.wgt" ); My_Experiment->WriteArrays( "arr.1.arr" ); My_Experiment->Increment( "=,=,=,=,+,k,e,=,-,r,@,l,T" ); My_Experiment->Test( "dimin.test", "inc2.out" ); My_Experiment->SaveWeights( "wg.2.wgt" ); My_Experiment->WriteArrays( "arr.2.arr" ); My_Experiment->Increment( "+,zw,A,rt,-,k,O,p,-,n,O,n,E" ); My_Experiment->Test( "dimin.test", "inc3.out" ); My_Experiment->SaveWeights( "wg.3.wgt" ); My_Experiment->WriteArrays( "arr.3.arr" ); My_Experiment->Decrement( "+,zw,A,rt,-,k,O,p,-,n,O,n,E" ); My_Experiment->Test( "dimin.test", "inc4.out" ); My_Experiment->SaveWeights( "wg.4.wgt" ); My_Experiment->WriteArrays( "arr.4.arr" ); My_Experiment->Decrement( "=,=,=,=,+,k,e,=,-,r,@,l,T" ); My_Experiment->Test( "dimin.test", "inc5.out" ); My_Experiment->SaveWeights( "wg.5.wgt" ); My_Experiment->WriteArrays( "arr.5.arr" ); delete My_Experiment; exit(1); } LanguageMachines-timbl-642727d/demos/api_test5.cxx000077500000000000000000000060051451477526200220440ustar00rootroot00000000000000/* Copyright (c) 1998 - 2015 ILK - Tilburg University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: http://ilk.uvt.nl/software.html or send mail to: timbl@uvt.nl */ #include #include #include "timbl/TimblAPI.h" using std::endl; using std::cout; using std::string; using namespace Timbl; int main(){ TimblAPI *My_Experiment = new TimblAPI( "-a IB1 +vDI+DB+n +mM +k4 " , "test5" ); My_Experiment->Learn( "dimin.train" ); { icu::UnicodeString line = "=,=,=,=,+,k,e,=,-,r,@,l,T"; const neighborSet *neighbours1 = My_Experiment->classifyNS( line ); if ( neighbours1 ){ cout << "Classify OK on " << line << endl; cout << neighbours1; } else { cout << "Classify failed on " << line << endl; neighbours1 = new neighborSet(); } neighborSet neighbours2; line = "+,zw,A,rt,-,k,O,p,-,n,O,n,E"; if ( My_Experiment->classifyNS( line, neighbours2 ) ){ cout << "Classify OK on " << line << endl; cout << neighbours2; } else { cout << "Classify failed on " << line << endl; } line = "+,z,O,n,-,d,A,xs,-,=,A,rm,P"; const neighborSet *neighbours3 = My_Experiment->classifyNS( line ); if ( neighbours3 ){ cout << "Classify OK on " << line << endl; cout << neighbours3; } else { cout << "Classify failed on " << line << endl; neighbours3 = new neighborSet(); } neighborSet uit2; { neighborSet uit; uit.setShowDistance(true); uit.setShowDistribution(true); cout << " before first merge " << endl; cout << uit; uit.merge( *neighbours1 ); cout << " after first merge " << endl; cout << uit; uit.merge( *neighbours3 ); cout << " after second merge " << endl; cout << uit; uit.merge( neighbours2 ); cout << " after third merge " << endl; cout << uit; uit.truncate( 3 ); cout << " after truncate " << endl; cout << uit; cout << " test assignment" << endl; uit2 = *neighbours1; } cout << "assignment result: " << endl; cout << uit2; { cout << " test copy construction" << endl; neighborSet uit(uit2); cout << "result: " << endl; cout << uit; } cout << "almost done!" << endl; } delete My_Experiment; cout << "done!" << endl; } LanguageMachines-timbl-642727d/demos/api_test6.cxx000077500000000000000000000036301451477526200220460ustar00rootroot00000000000000/* Copyright (c) 1998 - 2015 ILK - Tilburg University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: http://ilk.uvt.nl/software.html or send mail to: timbl@uvt.nl */ #include #include "timbl/TimblAPI.h" using std::cout; using std::endl; using namespace Timbl; int main(){ TimblAPI My_Experiment( "-a IB1 +vDI+DB -G 0 -k3", "test6" ); My_Experiment.Learn( "dimin.train" ); const ClassDistribution *vd; const TargetValue *tv = My_Experiment.Classify( std::string("-,=,O,m,+,h,K,=,-,n,I,N,K"), vd ); cout << "resulting target: " << tv << endl; cout << "resulting Distribution: " << vd << endl; ClassDistribution::dist_iterator it=vd->begin(); while ( it != vd->end() ){ cout << it->second << " OR "; cout << it->second->Value() << " " << it->second->Weight() << endl; ++it; } cout << "the same with neighborSets" << endl; const neighborSet *nb = My_Experiment.classifyNS( "-,=,O,m,+,h,K,=,-,n,I,N,K" ); WClassDistribution *vd2 = nb->bestDistribution(); vd2->Normalize(); cout << "default answer " << vd2 << endl; decayStruct *dc = new expDecay(0.3); delete vd2; vd2 = nb->bestDistribution( dc ); delete dc; cout << "with exponenial decay, alpha = 0.3 " << vd2 << endl; delete vd2; } LanguageMachines-timbl-642727d/demos/classify.cxx000077500000000000000000000063331451477526200217700ustar00rootroot00000000000000/* Copyright (c) 1998 - 2015 ILK - Tilburg University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: http://ilk.uvt.nl/software.html or send mail to: timbl@uvt.nl */ #include #include #include #include "timbl/TimblAPI.h" using namespace std; using namespace Timbl; char inf[] = "./dimin.train"; char test_f[] = "./dimin.test"; int main(){ string Bresult; double Distance; TimblAPI *Exp = new TimblAPI( "-a TRIBL" ); Exp->SetOptions( "+vS +x -N30 -q2" ); Exp->ShowOptions( cout ); Exp->Learn( inf ); ifstream testfile; string Buffer; testfile.open( test_f, ios::in ); cout << "\nStart testing, using TRIBL" << endl; while ( getline( testfile, Buffer ) ){ const TargetValue *tv = Exp->Classify( Buffer, Distance ); if ( tv ) cout << Buffer << "\t --> " << tv << " " << Distance << endl; else cout << Buffer << "\t --> (nill)" << endl; } testfile.close(); delete Exp; Exp = new TimblAPI( "-a IB1" ); Exp->SetOptions( "+vS" ); Exp->ShowOptions( cout ); Exp->Learn( inf ); testfile.clear(); testfile.open( test_f, ios::in ); cout << "\nStart testing, using IB" << endl; while ( getline( testfile, Buffer ) ){ if ( Exp->Classify( Buffer, Bresult, Distance ) ){ cout << Buffer << "\t --> " << Bresult << " " << Distance << endl; } else cout << Buffer << "\t --> (nill)" << endl; } testfile.close(); delete Exp; Exp = new TimblAPI( "-a IGTREE" ); Exp->SetOptions( "+vS -N40" ); Exp->ShowOptions( cout ); Exp->Learn( inf ); Exp->WriteInstanceBase( "dimin.tree" ); Exp->SaveWeights( "dimin.wgt" ); cout << "\nStart testing, using IGTree, first run" << endl; testfile.clear(); testfile.open( test_f, ios::in ); while ( getline( testfile, Buffer ) ){ if ( Exp->Classify( Buffer, Bresult, Distance ) ){ cout << Buffer << "\t --> " << Bresult << " " << Distance << endl; } else cout << Buffer << "\t --> (nill)" << endl; } testfile.close(); delete Exp; Exp = new TimblAPI( "-a IGTREE" ); Exp->SetOptions( "+vS" ); Exp->ShowOptions( cout ); Exp->GetInstanceBase( "dimin.tree" ); Exp->GetWeights( "dimin.wgt" ); cout << "\nStart testing, using IGTree, second run, (retrieved Tree)" << endl; testfile.clear(); testfile.open( test_f, ios::in ); while ( getline( testfile, Buffer ) ){ if ( Exp->Classify( Buffer, Bresult, Distance ) ){ cout << Buffer << "\t --> " << Bresult << " " << Distance << endl; } else cout << Buffer << "\t --> (nill)" << endl; } testfile.close(); exit(1); } LanguageMachines-timbl-642727d/demos/cross_val.test000077500000000000000000000001061451477526200223130ustar00rootroot00000000000000small_1.train small_2.train small_3.train small_4.train small_5.train LanguageMachines-timbl-642727d/demos/dimin.script000077500000000000000000000032741451477526200217560ustar00rootroot00000000000000# # Copyright (c) 1998 - 2011 # ILK - Tilburg University # CLiPS - University of Antwerp # # This file is part of timbl # # timbl is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # timbl is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see . # # For questions and suggestions, see: # http://ilk.uvt.nl/software.html # or send mail to: # timbl@uvt.nl # example script file for tse demo program. # # create an experiment with name exp1 # new exp1 # # make sure that we see something happen # exp1.set +v+f+di+O # # set the desired weighting to IG # exp1.set +w IG # # now train ( prepare is implicit) # exp1.train ./dimin.train # # save the Instancebase for later use. # exp1.save tree.tmp exp1.show options # # # first we start with OVERLAP metric # exp1.set -mO # # and test exp1.test ./dimin.test a1.tmp # # now we try the Value Difference Metric exp1.set -mM # exp1.test ./dimin.test a2.tmp # # start a new experiment: new exp2 # # fill it with de tree generated with exp1 # exp2.get tree.tmp # # let's make a lot of noice! exp2.set +v +o+f+di+n+db # # now delete exp1, to demonstrate that it works. free exp1 # # end perform a test with exp2 exp2.test ./dimin.test exp2.out.tmp # # ready LanguageMachines-timbl-642727d/demos/dimin.test000077500000000000000000000612721451477526200214330ustar00rootroot00000000000000=,=,=,=,=,=,=,=,+,p,e,=,T =,=,=,=,+,k,u,=,-,bl,u,m,E +,m,I,=,-,d,A,G,-,d,},t,J -,t,@,=,-,l,|,=,-,G,@,n,T -,=,I,n,-,str,y,=,+,m,E,nt,J =,=,=,=,=,=,=,=,+,br,L,t,J =,=,=,=,+,zw,A,=,-,m,@,r,T =,=,=,=,-,f,u,=,+,dr,a,l,T =,=,=,=,=,=,=,=,+,l,e,w,T =,=,=,=,+,tr,K,N,-,k,a,rt,J =,=,=,=,+,=,o,=,-,p,u,=,T =,=,=,=,=,=,=,=,+,l,A,m,E =,=,=,=,=,=,=,=,+,l,A,p,J =,=,=,=,=,=,=,=,+,sx,E,lm,P +,l,a,=,-,d,@,=,-,k,A,st,J -,s,i,=,-,f,E,r,-,st,O,k,J =,=,=,=,=,=,=,=,+,sp,a,n,T =,=,=,=,=,=,=,=,+,st,o,t,J =,=,=,=,+,sp,a,r,-,b,u,k,J +,h,I,N,-,k,@,l,-,bl,O,k,J -,m,e,=,-,d,A,l,+,j,O,n,E -,sn,u,=,-,p,@,=,+,r,K,=,T =,=,=,=,=,=,=,=,+,sp,A,N,E +,k,a,=,-,k,@,=,-,n,E,st,J =,=,=,=,+,v,u,=,-,r,I,N,K =,=,=,=,=,=,=,=,+,v,A,t,J -,r,@,=,+,G,I,s,-,t,@,r,T =,=,=,=,+,=,O,p,-,tr,E,k,J =,=,=,=,+,fr,a,=,-,t,@,r,T -,b,@,=,-,l,a,=,+,r,K,=,T =,=,=,=,=,=,=,=,+,=,K,=,T =,=,=,=,+,m,O,t,-,m,},x,J -,G,u,t,-,=,E,=,-,m,@,r,T =,=,=,=,+,r,a,=,-,t,@,l,T =,=,=,=,=,=,=,=,+,p,A,s,J +,r,o,=,-,z,@,=,-,b,O,k,J -,x,@,=,+,d,},lt,-,sp,E,l,E =,=,=,=,=,=,=,=,+,h,},l,E =,=,=,=,=,=,=,=,+,sp,e,r,T +,x,e,=,-,s,@,l,-,d,i,r,T =,=,=,=,-,pl,y,=,+,v,i,r,T =,=,=,=,+,v,A,=,-,N,@,r,T =,=,=,=,+,t,e,=,-,G,@,l,T -,v,i,=,-,j,o,=,+,l,i,r,T -,S,o,=,-,k,o,=,+,l,a,=,T +,p,i,=,-,k,o,=,-,l,o,=,T =,=,=,=,=,=,=,=,+,v,O,Nk,J =,=,=,=,+,l,O,k,-,fl,L,t,J =,=,=,=,=,=,=,=,+,tr,e,f,J =,=,=,=,-,=,O,r,+,G,a,n,T =,=,=,=,+,sp,i,g,-,br,i,f,J =,=,=,=,=,=,=,=,+,p,O,l,E =,=,=,=,=,=,=,=,+,k,e,l,T =,=,=,=,+,k,a,=,-,v,@,l,T =,=,=,=,+,dr,a,j,-,tr,A,p,J =,=,=,=,=,=,=,=,+,spr,K,=,T =,=,=,=,-,kl,a,=,+,v,i,r,T +,=,O,N,-,G,@,=,-,v,A,l,E =,=,=,=,-,pl,a,=,+,v,K,=,T +,z,O,n,-,d,A,xs,-,=,A,rm,P =,=,=,=,+,w,K,=,-,b,e,lt,J -,tr,A,ns,+,p,O,rt,-,st,O,k,J =,=,=,=,=,=,=,=,+,sp,E,l,E =,=,=,=,+,p,O,k,-,p,},t,J =,=,=,=,+,m,O,r,-,m,@,l,T =,=,=,=,+,v,O,n,-,d,@,r,T =,=,=,=,+,tr,K,n,-,r,K,s,J =,=,=,=,+,l,},x,-,t,@,r,T =,=,=,=,-,br,@,=,+,v,i,r,T +,l,O,=,-,m,@,rd,-,br,i,f,J -,k,O,m,+,k,O,=,-,m,@,r,T -,=,I,n,-,d,i,=,+,j,a,n,T =,=,=,=,+,l,e,=,-,G,@,r,T =,=,=,=,-,b,y,=,+,r,I,n,E -,t,@,=,+,f,L,=,-,j,@,=,T =,=,=,=,=,=,=,=,+,k,E,x,J =,=,=,=,+,h,e,s,-,t,@,r,T +,=,a,m,-,b,e,ldz,-,b,e,n,T +,sp,O,n,-,z,@,=,-,z,A,k,J +,p,O,=,-,p,@,=,-,d,K,n,T =,=,=,=,=,=,=,=,+,b,E,k,J =,=,=,=,=,=,=,=,+,sp,a,=,T =,=,=,=,=,=,=,=,+,tr,y,k,J +,kw,i,=,-,j,@,=,-,k,A,st,J =,=,=,=,+,kn,O,=,-,b,@,l,T =,=,=,=,+,m,O,=,-,n,@,k,J =,=,=,=,=,=,=,=,+,tr,e,=,T =,=,=,=,+,n,i,r,-,br,o,t,J -,h,A,m,-,tr,O,=,-,m,@,l,T =,=,=,=,=,=,=,=,+,st,E,m,E =,=,=,=,=,=,=,=,+,kl,L,s,J =,=,=,=,=,=,=,=,+,t,A,k,J -,m,A,=,-,S,i,=,-,n,@,=,T +,x,K,=,-,t,@,=,-,k,a,s,J =,=,=,=,+,h,A,nt,-,f,O,l,E =,=,=,=,=,=,=,=,+,t,L,l,T =,=,=,=,+,spr,I,N,-,v,E,rs,J =,=,=,=,+,p,a,=,-,l,I,N,K =,=,=,=,+,str,A,nt,-,h,L,s,J +,t,y,=,-,r,@,=,-,l,y,r,T =,=,=,=,+,p,i,=,-,m,@,l,T +,r,E,=,-,t,@,=,-,p,|,k,J =,=,=,=,+,sp,L,d,-,b,},s,J =,=,=,=,=,=,=,=,+,z,u,n,T =,=,=,=,+,sx,E,l,-,h,a,k,J =,=,=,=,+,w,I,nt,-,s,@,l,T =,=,=,=,=,=,=,=,+,vl,A,x,J =,=,=,=,+,st,a,n,-,d,@,r,T =,=,=,=,+,kl,e,=,-,p,@,l,T =,=,=,=,-,p,A,r,+,f,},m,E =,=,=,=,=,=,=,=,+,bl,K,=,T =,=,=,=,+,st,A,=,-,f,@,l,T -,=,a,=,+,m,A,n,-,d,@,l,T -,st,a,=,+,k,E,t,-,s,@,l,T =,=,=,=,=,=,=,=,+,w,K,f,J =,=,=,=,+,j,e,=,-,G,@,r,T -,f,i,=,-,b,e,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,sx,O,t,J -,h,E,r,-,d,@,=,+,r,I,n,E =,=,=,=,+,m,},=,-,m,i,=,T =,=,=,=,+,sx,e,=,-,m,a,=,T =,=,=,=,=,=,=,=,+,vl,u,r,T =,=,=,=,-,sp,i,=,+,j,O,n,E =,=,=,=,+,sx,I,l,-,f,@,r,T =,=,=,=,=,=,=,=,+,str,L,k,J =,=,=,=,=,=,=,=,+,st,e,n,T =,=,=,=,+,l,O,=,-,k,@,r,T =,=,=,=,+,tr,O,=,-,f,@,l,T =,=,=,=,+,t,},rf,-,p,O,t,J +,st,e,=,-,k,@,l,-,b,a,rs,J =,=,=,=,-,sx,a,=,+,b,E,l,E =,=,=,=,=,=,=,=,+,=,K,nt,J +,t,A,n,-,s,i,=,-,b,u,k,J +,=,u,s,-,t,@,r,-,m,E,s,J =,=,=,=,-,x,a,=,+,z,O,n,E =,=,=,=,+,w,e,x,-,fl,E,s,J =,=,=,=,+,st,E,l,-,s,@,l,T =,=,=,=,=,=,=,=,+,l,},xt,J =,=,=,=,+,z,o,=,-,m,@,r,T =,=,=,=,+,pl,a,=,-,G,@,r,T =,=,=,=,=,=,=,=,+,sp,L,t,J =,=,=,=,=,=,=,=,+,fr,A,ns,J =,=,=,=,-,s,M,=,+,s,K,s,J =,=,=,=,+,t,e,=,-,k,O,p,J =,=,=,=,+,st,E,n,-,s,@,l,T =,=,=,=,=,=,=,=,+,sn,O,r,E =,=,=,=,=,=,=,=,+,x,a,s,J =,=,=,=,+,sx,},t,-,s,@,l,T -,z,@,=,-,G,@,=,-,z,I,xt,J =,=,=,=,+,sx,O,=,-,f,@,l,T =,=,=,=,+,m,K,=,-,z,u,n,T -,k,o,=,-,m,o,=,+,t,i,f,J =,=,=,=,=,=,=,=,+,st,|,r,T -,br,i,=,-,G,a,=,-,d,i,r,T +,h,L,s,-,h,M,=,-,d,@,n,T =,=,=,=,+,d,a,=,-,d,@,l,T =,=,=,=,+,=,E,N,-,k,@,l,T =,=,=,=,+,st,E,=,-,k,@,r,T -,fr,i,=,+,t,y,r,-,m,A,nt,J =,=,=,=,-,k,o,=,+,l,O,m,E =,=,=,=,+,kl,O,=,-,d,@,r,T =,=,=,=,=,=,=,=,+,sn,L,f,J -,b,i,=,-,j,O,=,+,sk,o,p,J =,=,=,=,-,l,A,n,+,t,a,rn,T =,=,=,=,=,=,=,=,+,b,O,nt,J =,=,=,=,+,n,E,st,-,k,A,st,J =,=,=,=,+,st,O,=,-,f,@,r,T -,v,@,=,-,l,I,Ns,-,pl,E,k,J -,p,@,=,-,w,a,=,-,G,@,n,T =,=,=,=,+,t,u,=,-,spr,a,k,J =,=,=,=,+,b,A,rm,-,s,K,s,J +,k,I,n,-,d,@,r,-,sp,E,l,E =,=,=,=,-,x,@,=,+,b,A,k,J =,=,=,=,=,=,=,=,+,kr,I,N,E =,=,=,=,+,st,o,f,-,s,@,l,T =,=,=,=,+,xl,A,s,-,=,a,l,T =,=,=,=,=,=,=,=,+,st,o,p,J =,=,=,=,=,=,=,=,+,s,I,k,J =,=,=,=,+,st,O,p,-,m,A,nt,J =,=,=,=,+,fl,e,m,-,st,@,r,T +,b,|,=,-,k,@,=,-,n,o,t,J -,s,E,=,-,l,o,=,+,f,a,n,T =,=,=,=,=,=,=,=,+,st,O,rm,P +,w,o,r,-,d,@,=,-,l,K,st,J =,=,=,=,=,=,=,=,+,w,e,w,T =,=,=,=,-,k,O,r,+,b,e,l,T +,sx,o,n,-,h,K,ts,-,fl,E,k,J +,p,e,=,-,p,@,r,-,b,o,m,P =,=,=,=,+,str,A,v,-,b,A,Nk,J +,w,I,l,-,G,@,=,-,r,o,s,J +,l,E,k,-,t,@,=,-,z,A,k,J =,=,=,=,+,st,A,=,-,l,I,N,K =,=,=,=,-,h,o,=,+,b,o,=,T -,p,A,r,-,m,@,=,+,z,a,n,T =,=,=,=,+,=,o,r,-,l,O,x,J =,=,=,=,-,h,M,=,+,w,e,l,T =,=,=,=,=,=,=,=,+,j,A,n,T =,=,=,=,+,=,K,=,-,z,@,r,T =,=,=,=,+,s,i,r,-,s,@,l,T =,=,=,=,+,xr,A,s,-,f,E,lt,J =,=,=,=,=,=,=,=,+,t,o,m,P =,=,=,=,=,=,=,=,+,str,O,Nk,J =,=,=,=,+,sx,e,=,-,p,@,l,T =,=,=,=,+,str,o,j,-,m,A,nt,J =,=,=,=,+,sx,I,=,-,p,@,r,T =,=,=,=,+,=,O,N,-,k,@,l,T =,=,=,=,=,=,=,=,+,str,O,p,J =,=,=,=,+,l,e,=,-,z,I,N,K +,=,a,n,-,m,a,k,-,h,M,t,J =,=,=,=,-,k,a,=,+,d,E,t,J -,d,e,=,-,Z,|,=,+,n,e,=,T =,=,=,=,+,st,E,k,-,p,O,t,J =,=,=,=,-,p,a,=,+,p,u,=,T =,=,=,=,=,=,=,=,+,r,O,x,E =,=,=,=,=,=,=,=,+,st,},k,J =,=,=,=,=,=,=,=,+,st,},lp,J =,=,=,=,+,st,},m,-,p,@,r,T =,=,=,=,+,z,e,f,-,r,a,m,P =,=,=,=,+,d,a,l,-,d,@,r,T +,p,O,l,-,k,a,=,-,k,O,p,J -,b,A,=,-,G,a,=,+,t,E,l,E =,=,=,=,-,b,A,l,+,k,O,n,E =,=,=,=,+,sx,E,p,-,s,@,l,T =,=,=,=,+,kr,K,=,-,G,@,r,T =,=,=,=,=,=,=,=,+,z,a,l,T =,=,=,=,+,=,L,t,-,str,K,k,J -,p,a,=,+,r,a,=,-,b,@,l,T =,=,=,=,+,k,e,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,h,a,j,T =,=,=,=,+,p,O,l,-,k,a,=,T +,t,e,=,-,k,@,rs,-,fl,E,s,J =,=,=,=,-,h,K,=,+,d,I,n,E =,=,=,=,-,j,y,=,+,w,e,l,T -,st,A,n,-,d,@,=,-,l,I,N,E =,=,=,=,=,=,=,=,+,z,o,n,T =,=,=,=,+,=,E,k,-,st,@,r,T =,=,=,=,-,s,i,s,+,t,e,m,P =,=,=,=,+,bl,u,t,-,fl,E,k,J =,=,=,=,+,kn,u,=,-,d,@,l,T =,=,=,=,=,=,=,=,+,t,a,rt,J =,=,=,=,-,t,O,r,+,n,o,j,T =,=,=,=,+,k,|,=,-,t,@,l,T =,=,=,=,+,=,M,=,-,t,a,r,T =,=,=,=,=,=,=,=,+,d,O,p,J =,=,=,=,-,t,a,=,+,bl,o,=,T =,=,=,=,+,s,e,=,-,d,@,l,T +,l,L,=,-,j,@,r,-,br,u,k,J =,=,=,=,=,=,=,=,+,b,L,=,T -,m,y,=,-,n,i,=,+,k,e,=,T =,=,=,=,+,r,u,=,-,m,@,r,T =,=,=,=,-,v,@,r,+,n,I,s,J =,=,=,=,-,t,E,r,+,m,K,n,T =,=,=,=,=,=,=,=,+,b,u,f,J =,=,=,=,=,=,=,=,+,zw,K,n,T =,=,=,=,+,t,a,=,-,l,I,N,K =,=,=,=,+,f,I,l,-,t,@,r,T =,=,=,=,-,x,@,=,+,z,I,n,E +,dr,i,=,-,w,i,=,-,l,@,r,T =,=,=,=,+,n,},=,-,m,@,r,T =,=,=,=,=,=,=,=,+,sx,y,r,T =,=,=,=,-,f,o,=,+,t,L,=,T -,=,e,=,-,l,A,s,+,t,i,k,J +,sx,u,n,-,m,a,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,kn,O,l,E =,=,=,=,+,m,A,=,-,k,@,r,T +,v,A,r,-,k,@,s,-,h,a,s,J =,=,=,=,+,=,|,=,-,v,@,l,T -,p,@,=,+,n,A,nt,-,k,A,st,J =,=,=,=,+,=,o,=,-,v,@,n,T =,=,=,=,+,st,L,=,-,v,@,r,T -,f,A,n,+,f,a,=,-,r,@,=,T =,=,=,=,+,=,u,=,-,h,u,=,T =,=,=,=,+,t,e,=,-,k,@,n,T +,d,o,=,-,m,i,=,-,n,o,=,T =,=,=,=,+,t,e,N,-,k,o,t,J -,l,o,=,-,s,o,=,+,f,i,=,T =,=,=,=,=,=,=,=,+,vl,A,x,E =,=,=,=,+,f,o,=,-,t,o,=,T =,=,=,=,+,spr,u,=,-,j,@,r,T =,=,=,=,+,sx,e,r,-,st,e,n,T +,k,},=,-,s,i,=,-,d,O,p,J =,=,=,=,+,w,e,g,-,br,i,f,J =,=,=,=,=,=,=,=,+,kw,A,rt,J =,=,=,=,=,=,=,=,+,st,K,l,T =,=,=,=,-,k,A,n,+,t,e,l,T -,v,@,r,+,pl,e,x,-,st,@,r,T =,=,=,=,=,=,=,=,+,kl,i,k,J -,t,e,=,-,l,@,=,+,Gr,A,m,E =,=,=,=,=,=,=,=,+,st,i,l,T =,=,=,=,+,t,E,m,-,p,@,l,T =,=,=,=,=,=,=,=,+,st,E,rn,T -,b,@,=,+,st,E,=,-,l,I,N,K =,=,=,=,+,=,o,=,-,m,a,=,T +,k,a,rt,-,=,a,=,-,v,O,nt,J =,=,=,=,+,t,e,=,-,p,@,l,T =,=,=,=,+,l,O,N,-,bl,a,s,J +,k,O,=,-,t,a,=,-,b,e,lt,J =,=,=,=,=,=,=,=,+,h,E,mt,J =,=,=,=,+,st,K,l,-,bl,u,m,P -,r,@,=,+,m,e,=,-,d,i,=,T =,=,=,=,=,=,=,=,+,p,A,lm,P =,=,=,=,=,=,=,=,+,dw,K,l,T =,=,=,=,+,m,E,lk,-,st,u,l,T +,l,e,=,-,z,@,=,-,n,a,r,T =,=,=,=,+,str,a,t,-,l,i,t,J =,=,=,=,=,=,=,=,+,kr,u,x,J +,h,o,=,-,n,@,x,-,kl,i,r,T =,=,=,=,+,p,I,s,-,pr,a,t,J =,=,=,=,+,kw,A,=,-,b,@,=,T +,bl,I,n,-,d,@,=,-,m,A,n,E =,=,=,=,=,=,=,=,+,j,e,=,T +,b,I,=,-,n,@,=,-,pl,a,ts,J -,S,O,=,+,n,),=,-,r,@,=,T =,=,=,=,+,t,e,=,-,r,A,nt,J =,=,=,=,+,t,e,=,-,sx,E,p,J =,=,=,=,=,=,=,=,+,p,I,ls,J =,=,=,=,+,t,e,=,-,w,O,rst,J =,=,=,=,+,t,e,=,-,z,A,k,J =,=,=,=,=,=,=,=,+,b,y,r,T =,=,=,=,=,=,=,=,+,d,I,rk,J =,=,=,=,+,vl,K,=,-,j,@,r,T -,r,a,=,+,f,i,=,-,n,@,=,T =,=,=,=,+,br,K,=,-,m,A,nt,J -,t,@,=,-,k,O,=,-,f,@,r,T -,b,i,=,+,k,i,=,-,n,i,=,T =,=,=,=,+,b,E,=,-,N,@,l,T =,=,=,=,-,f,i,=,+,S,y,=,T +,t,a,=,-,f,@,l,-,m,A,t,J =,=,=,=,=,=,=,=,+,t,K,t,J =,=,=,=,+,t,K,t,-,sxr,I,ft,J -,v,@,r,+,z,I,n,-,s,@,l,T -,=,o,=,+,d,|,r,-,fl,E,s,J =,=,=,=,+,r,a,=,-,f,@,l,T -,f,A,n,-,t,a,=,+,z,i,=,T =,=,=,=,-,k,o,=,+,k,O,n,E =,=,=,=,=,=,=,=,+,h,O,l,E =,=,=,=,+,t,I,n,-,sx,L,t,J =,=,=,=,+,t,I,=,-,p,@,l,T -,d,A,xs,-,p,A,r,-,t,K,=,T =,=,=,=,-,r,A,nt,+,s,u,n,T =,=,=,=,+,t,I,=,-,t,@,l,T =,=,=,=,+,s,),=,-,r,@,=,T =,=,=,=,=,=,=,=,+,t,O,xt,J =,=,=,=,+,h,A,nt,-,f,A,t,J =,=,=,=,=,=,=,=,+,p,E,n,E =,=,=,=,=,=,=,=,+,t,u,r,T +,sp,E,l,-,d,@,=,-,d,o,s,J =,=,=,=,=,=,=,=,+,b,M,t,J -,v,E,x,-,t,@,=,+,r,K,=,T -,l,A,n,+,t,a,=,-,r,@,n,T -,x,@,=,+,l,K,t,-,st,@,r,T -,k,A,p,+,s,y,=,-,l,@,=,T =,=,=,=,+,sxr,e,=,-,w,@,r,T =,=,=,=,+,k,|,=,-,k,@,n,T =,=,=,=,+,x,E,lt,-,s,A,k,J +,k,o,=,-,l,@,=,-,sx,y,r,T =,=,=,=,=,=,=,=,+,w,A,m,E -,d,@,=,-,w,a,=,-,G,@,n,T +,xl,a,=,-,z,@,=,-,b,A,k,J =,=,=,=,+,sx,E,N,-,k,@,l,T =,=,=,=,-,t,O,=,+,n,o,=,T =,=,=,=,+,sn,u,=,-,p,@,r,T =,=,=,=,+,l,A,=,-,s,o,=,T +,k,A,=,-,t,@,=,-,b,E,l,E +,t,a,=,-,r,@,m,-,pl,a,t,J +,sp,e,k,-,s,@,l,-,p,O,mp,J =,=,=,=,=,=,=,=,+,s,u,p,J =,=,=,=,-,k,A,=,+,tr,O,l,E =,=,=,=,=,=,=,=,+,b,O,m,E =,=,=,=,+,kl,},=,-,p,@,l,T =,=,=,=,=,=,=,=,+,j,a,p,J =,=,=,=,+,w,o,=,-,n,I,N,E -,d,e,=,-,j,o,=,-,sp,E,l,E =,=,=,=,=,=,=,=,+,z,i,r,T =,=,=,=,+,vl,e,s,-,f,O,rk,J +,k,u,=,-,w,A,x,-,t,@,r,T =,=,=,=,=,=,=,=,+,t,M,w,T =,=,=,=,+,h,A,n,-,d,@,l,T =,=,=,=,=,=,=,=,+,p,L,st,J =,=,=,=,+,kn,A,k,-,w,O,rst,J =,=,=,=,=,=,=,=,+,tr,a,n,T =,=,=,=,-,t,E,=,+,r,K,n,T -,p,a,=,+,r,O,=,-,x,i,=,T =,=,=,=,+,sm,L,=,-,G,@,r,T =,=,=,=,-,f,O,n,+,t,K,n,T =,=,=,=,=,=,=,=,+,=,A,rm,P =,=,=,=,=,=,=,=,+,bl,A,t,J =,=,=,=,=,=,=,=,+,=,},rn,T +,=,o,=,-,v,@,r,-,st,A,p,J +,k,a,=,-,ts,i,=,-,br,i,f,J =,=,=,=,=,=,=,=,+,h,|,l,T =,=,=,=,=,=,=,=,+,pl,a,t,J -,n,I,Ns,-,f,@,r,-,sx,I,l,E -,m,a,=,-,t,@,=,+,l,O,t,T -,=,O,m,+,h,K,=,-,n,I,N,K =,=,=,=,+,t,},rm,-,br,u,k,J =,=,=,=,+,tr,E,x,-,t,@,r,T +,tr,o,=,-,z,@,=,-,p,A,k,J +,bl,O,k,-,n,o,t,-,f,E,l,E =,=,=,=,=,=,=,=,+,r,I,N,E =,=,=,=,=,=,=,=,+,r,K,m,P +,p,o,=,-,d,i,=,-,j,},m,P -,k,o,=,+,l,o,=,-,n,i,=,T =,=,=,=,=,=,=,=,+,l,a,r,T =,=,=,=,+,=,A,=,-,N,@,l,T =,=,=,=,=,=,=,=,+,x,|,r,T =,=,=,=,+,tr,E,m,-,h,L,s,J +,r,a,=,-,d,@,r,-,d,i,r,T -,x,@,=,+,l,I,d,-,b,e,n,T -,b,@,r,+,n,a,=,-,k,@,l,T +,z,E,=,-,l,@,=,-,fl,E,t,J -,=,O,nd,+,b,K,d,-,b,O,rt,J +,s,},=,-,k,@,l,-,G,A,N,E -,tr,i,=,+,b,y,=,-,n,@,=,T =,=,=,=,=,=,=,=,+,t,O,l,E =,=,=,=,+,w,E,rg,-,br,i,f,J =,=,=,=,-,l,i,=,+,b,E,l,E =,=,=,=,=,=,=,=,+,vl,a,=,T +,tr,u,=,-,t,@,l,-,k,I,nt,J =,=,=,=,=,=,=,=,+,kl,A,t,J =,=,=,=,-,sx,A,n,+,d,a,l,T =,=,=,=,+,=,E,=,-,N,@,l,T =,=,=,=,=,=,=,=,+,d,},t,J +,d,L,=,-,v,@,=,-,pl,A,t,J =,=,=,=,+,m,E,=,-,r,i,=,T =,=,=,=,-,k,a,=,+,r,e,l,T =,=,=,=,+,tr,M,w,-,b,u,k,J -,d,A,n,+,s,|,=,-,z,@,=,T =,=,=,=,=,=,=,=,+,n,I,xt,J =,=,=,=,=,=,=,=,+,st,u,p,J +,w,a,=,-,t,@,r,-,G,e,l,T +,t,a,r,-,t,@,=,-,v,O,rk,J =,=,=,=,+,xr,i,=,-,z,@,l,T =,=,=,=,=,=,=,=,+,w,I,l,E -,r,a,=,+,n,O,N,-,k,@,l,T =,=,=,=,+,fr,A,n,-,j,@,=,T -,p,e,=,-,l,i,=,+,k,a,n,T +,t,L,=,-,m,@,=,-,l,I,N,E =,=,=,=,=,=,=,=,+,pr,O,p,J =,=,=,=,=,=,=,=,+,w,e,t,J =,=,=,=,+,t,L,n,-,h,L,s,J =,=,=,=,=,=,=,=,+,t,L,n,T =,=,=,=,=,=,=,=,+,z,o,m,P +,d,a,=,-,m,@,s,-,kr,A,ns,J =,=,=,=,+,=,a,rd,-,m,A,n,E =,=,=,=,=,=,=,=,+,f,M,n,T +,sp,E,r,-,m,a,=,-,r,i,t,J -,d,u,=,-,k,@,=,-,m,A,nt,J =,=,=,=,+,b,e,=,-,d,@,l,T +,d,o,t,-,kl,O,=,-,p,@,r,T =,=,=,=,-,p,O,m,+,p,u,n,T +,tw,I,N,-,k,@,=,-,l,I,xt,J =,=,=,=,+,st,A,t,-,st,a,t,J =,=,=,=,+,m,A,r,-,t,@,r,T =,=,=,=,=,=,=,=,+,m,O,nt,J +,p,K,=,-,p,@,=,-,z,e,f,J =,=,=,=,+,kr,K,d,-,b,A,k,J =,=,=,=,+,k,i,=,-,k,@,n,T +,xr,A,s,-,h,|,=,-,v,@,l,T +,=,A,f,-,l,K,=,-,d,I,N,E =,=,=,=,=,=,=,=,+,bl,I,k,J =,=,=,=,+,st,e,=,-,G,@,l,T =,=,=,=,+,k,o,l,-,w,I,t,J +,w,O,r,-,t,@,l,-,m,},ts,J =,=,=,=,=,=,=,=,+,fl,M,s,J =,=,=,=,-,t,a,=,+,bl,E,t,J =,=,=,=,+,z,e,=,-,n,y,w,T =,=,=,=,+,l,o,m,-,br,i,f,J =,=,=,=,+,h,o,j,-,b,e,st,J =,=,=,=,+,=,L,t,-,fl,},xt,J =,=,=,=,=,=,=,=,+,n,},l,E =,=,=,=,-,d,u,=,+,s,|,r,T =,=,=,=,-,m,a,=,+,z,|,r,T -,v,E,=,-,l,@,=,-,r,K,m,P =,=,=,=,+,sm,I,=,-,k,@,l,T =,=,=,=,+,b,O,=,-,b,@,l,T =,=,=,=,+,l,O,=,-,l,i,=,T =,=,=,=,+,kw,a,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,sxr,e,w,T =,=,=,=,=,=,=,=,+,b,A,t,J =,=,=,=,+,b,L,=,-,G,I,N,K =,=,=,=,+,k,O,r,-,z,o,=,T =,=,=,=,=,=,=,=,+,v,a,n,T =,=,=,=,+,st,O,p,-,s,@,l,T +,xr,A,f,-,t,E,m,-,p,@,l,T =,=,=,=,+,w,I,nt,-,fl,a,x,J +,=,A,m,-,t,@,=,-,n,a,r,T +,=,A,f,-,tr,E,k,-,s,@,l,T =,=,=,=,=,=,=,=,+,p,a,=,T -,s,e,=,-,G,@,l,-,st,a,t,J +,k,A,n,-,s,i,=,-,r,K,s,J =,=,=,=,+,l,e,=,-,p,@,l,T =,=,=,=,=,=,=,=,+,pr,I,ns,J =,=,=,=,=,=,=,=,+,l,I,xt,J =,=,=,=,=,=,=,=,+,l,i,s,J -,l,@,=,-,p,a,=,-,p,i,r,T +,n,i,=,-,j,@,=,-,st,O,k,J =,=,=,=,=,=,=,=,+,S,},rt,J =,=,=,=,+,h,E,mts,-,kn,o,p,J -,r,i,=,-,j,e,=,+,t,e,=,T =,=,=,=,+,h,K,=,-,b,@,l,T +,p,u,=,-,d,@,l,-,pr,K,s,J =,=,=,=,-,p,A,r,+,k,i,t,J -,=,A,n,-,tr,@,=,+,d,|,=,T =,=,=,=,-,pr,o,=,+,Gr,A,m,E +,m,i,=,-,m,i,=,-,s,E,t,J =,=,=,=,+,p,o,=,-,t,@,r,T =,=,=,=,+,v,E,xt,-,p,E,t,J =,=,=,=,=,=,=,=,+,f,e,=,T =,=,=,=,+,h,E,=,-,G,@,=,T =,=,=,=,-,m,O,s,+,k,e,=,T =,=,=,=,+,d,O,=,-,b,@,r,T =,=,=,=,=,=,=,=,+,m,u,=,T =,=,=,=,=,=,=,=,+,sx,E,rm,P =,=,=,=,+,x,O,r,-,G,@,l,T =,=,=,=,=,=,=,=,+,sm,u,l,T =,=,=,=,+,sx,E,Nk,-,m,A,nt,J =,=,=,=,=,=,=,=,+,sp,A,r,E =,=,=,=,+,sn,E,=,-,b,@,=,T =,=,=,=,+,k,O,=,-,p,@,l,T +,w,i,=,-,k,E,nt,-,h,L,s,J +,sp,a,r,-,b,A,Ng,-,b,u,k,J -,=,e,=,-,l,@,=,+,m,E,nt,J =,=,=,=,+,k,o,=,-,k,@,r,T =,=,=,=,+,v,a,n,-,d,@,l,T +,h,A,l,-,t,@,r,-,tr,L,=,T -,v,@,r,+,fr,I,=,-,s,I,N,E =,=,=,=,+,kr,L,=,-,p,@,r,T +,s,L,=,-,k,@,r,-,sx,E,p,J =,=,=,=,-,p,A,s,+,t,E,l,E =,=,=,=,=,=,=,=,+,sx,},rk,J =,=,=,=,-,j,a,=,+,b,o,=,T =,=,=,=,+,l,A,=,-,d,@,r,T -,tr,A,ns,-,p,a,=,+,r,A,nt,J =,=,=,=,=,=,=,=,+,sl,O,k,J +,r,},n,-,d,@,r,-,l,A,p,J =,=,=,=,+,h,a,=,-,m,@,r,T =,=,=,=,+,m,E,l,-,k,A,n,E =,=,=,=,+,n,i,r,-,b,L,s,J =,=,=,=,-,b,A,Nk,+,r,u,t,J -,v,@,r,+,kl,I,=,-,k,@,r,T =,=,=,=,+,w,A,s,-,kr,K,t,J +,vr,},x,-,t,@,=,-,m,E,s,J +,l,O,=,-,m,@,r,-,h,L,s,J -,=,I,n,-,sp,E,k,+,t,|,r,T +,=,e,=,-,z,@,lz,-,br,},x,E =,=,=,=,-,r,O,n,+,d,e,l,T -,=,A,t,-,f,o,=,+,k,a,t,J =,=,=,=,-,r,@,=,+,s,y,=,T -,v,@,r,+,m,a,=,-,n,I,N,K +,k,E,l,-,d,@,r,-,tr,A,p,J +,pl,A,n,-,t,I,N,-,sx,O,p,J =,=,=,=,+,h,A,lf,-,sl,e,t,J =,=,=,=,=,=,=,=,+,p,O,p,E +,k,a,=,-,s,i,=,-,j,a,=,T =,=,=,=,+,h,K,=,-,h,a,n,T =,=,=,=,-,b,a,=,+,n,i,r,T =,=,=,=,+,spl,I,n,-,t,@,r,T -,h,K,dz,-,b,@,=,-,z,u,k,J =,=,=,=,=,=,=,=,+,vl,A,m,E =,=,=,=,=,=,=,=,+,p,o,s,J =,=,=,=,=,=,=,=,+,t,e,=,T +,k,I,=,-,k,@,r,-,v,I,s,J =,=,=,=,+,t,O,=,-,p,@,r,T =,=,=,=,+,s,a,=,-,t,@,r,T +,x,a,=,-,r,@,n,-,kl,O,s,J =,=,=,=,=,=,=,=,+,d,L,k,J =,=,=,=,+,b,e,=,-,v,I,N,E -,v,@,r,+,v,E,r,-,s,I,N,K -,z,@,=,-,k,e,=,-,v,@,r,T =,=,=,=,+,kl,o,s,-,t,@,r,T =,=,=,=,=,=,=,=,+,k,y,r,T =,=,=,=,-,pj,E,=,+,r,o,=,T =,=,=,=,+,t,O,r,-,t,@,l,T +,s,i,=,-,n,@,=,-,l,I,xt,J =,=,=,=,-,v,@,r,+,z,L,m,P =,=,=,=,+,h,M,=,-,w,@,r,T =,=,=,=,+,r,o,=,-,m,@,r,T =,=,=,=,=,=,=,=,+,vl,|,x,J -,t,@,=,-,b,@,=,-,r,I,xt,J =,=,=,=,-,pr,i,=,+,j,e,l,T =,=,=,=,+,kw,I,s,-,p,@,l,T =,=,=,=,+,f,y,=,-,G,a,=,T =,=,=,=,=,=,=,=,+,k,a,=,T =,=,=,=,-,k,A,S,+,n,e,=,T +,p,u,=,-,z,@,=,-,l,L,k,J =,=,=,=,+,v,|,=,-,l,@,n,T =,=,=,=,+,v,O,lk,-,spr,o,k,J =,=,=,=,+,zw,E,=,-,N,@,l,T =,=,=,=,+,v,o,r,-,d,e,l,T =,=,=,=,-,p,A,n,+,s,e,=,T =,=,=,=,+,kr,A,=,-,b,@,l,T =,=,=,=,=,=,=,=,+,v,K,l,T =,=,=,=,+,p,A,n,-,t,@,r,T =,=,=,=,=,=,=,=,+,z,E,t,J -,p,a,=,-,r,a,=,+,b,o,l,T +,b,e,n,-,l,I,=,-,x,a,m,P =,=,=,=,+,=,L,d,-,b,M,w,T +,v,I,=,-,N,@,r,-,d,u,k,J =,=,=,=,+,f,i,=,-,d,@,l,T =,=,=,=,=,=,=,=,+,b,I,l,E =,=,=,=,+,sx,o,l,-,sxr,I,ft,J +,r,o,=,-,z,@,=,-,l,a,r,T -,k,A,=,+,t,e,=,-,d,@,r,T =,=,=,=,+,h,a,r,-,l,O,k,J =,=,=,=,=,=,=,=,+,kl,I,N,E =,=,=,=,+,k,I,n,-,d,u,k,J =,=,=,=,+,v,E,rv,-,b,O,m,E =,=,=,=,=,=,=,=,+,h,a,l,T +,s,i,=,-,t,@,=,-,k,a,rt,J =,=,=,=,+,b,A,l,-,b,u,k,J =,=,=,=,-,h,y,=,+,m,|,r,T +,=,O,p,-,sxr,K,v,-,b,u,k,J =,=,=,=,+,n,e,=,-,G,@,n,T +,vl,I,n,-,d,@,r,-,d,A,s,J +,vr,i,n,-,d,@,=,-,pr,K,s,J =,=,=,=,=,=,=,=,+,m,A,f,J -,p,E,=,+,r,O,N,-,k,a,rt,J =,=,=,=,=,=,=,=,+,pl,e,=,T =,=,=,=,-,p,O,n,+,t,O,n,E =,=,=,=,+,sl,K,=,-,pl,a,t,J -,v,@,r,+,t,E,l,-,=,y,r,T +,h,O,n,-,d,@,=,-,b,a,n,T +,p,A,t,-,f,I,n,-,d,@,r,T -,G,@,=,-,v,A,=,-,N,@,r,T =,=,=,=,=,=,=,=,+,t,I,k,J +,=,M,=,-,t,o,=,-,p,E,t,J =,=,=,=,+,v,E,=,-,r,@,l,T =,=,=,=,+,st,E,m,-,br,i,f,J +,t,O,m,-,b,o,=,-,l,a,=,T =,=,=,=,=,=,=,=,+,xr,O,nt,J =,=,=,=,+,vl,i,=,-,r,I,N,K =,=,=,=,+,vl,i,=,-,r,I,N,E -,m,@,n,-,l,e,=,-,v,I,N,K =,=,=,=,+,b,O,t,-,s,I,N,E =,=,=,=,+,p,e,=,-,t,@,r,T +,b,I,=,-,n,@,=,-,pr,E,t,J =,=,=,=,+,sx,e,=,-,m,@,l,T =,=,=,=,+,kl,a,=,-,v,@,r,T =,=,=,=,=,=,=,=,+,vl,O,k,J =,=,=,=,+,vl,O,n,-,d,@,r,T =,=,=,=,-,b,a,=,+,r,e,l,T =,=,=,=,+,h,u,=,-,p,@,l,T -,t,K,=,-,d,@,=,-,b,u,k,J +,t,i,=,-,ts,i,=,-,b,u,k,J =,=,=,=,=,=,=,=,+,x,e,r,T =,=,=,=,+,vr,K,=,-,st,@,r,T =,=,=,=,-,p,i,=,+,j,O,n,E =,=,=,=,=,=,=,=,+,f,u,f,J =,=,=,=,+,v,u,d,-,b,e,n,T +,v,u,=,-,t,@,=,-,b,A,Nk,J +,w,i,=,-,G,@,=,-,l,i,t,J =,=,=,=,+,p,},=,-,k,@,l,T =,=,=,=,=,=,=,=,+,fl,I,k,J =,=,=,=,=,=,=,=,+,p,K,p,J =,=,=,=,-,S,a,=,+,l,O,t,J =,=,=,=,+,n,e,=,-,v,@,l,T =,=,=,=,+,h,A,s,-,p,@,l,T +,=,A,f,-,l,E,=,-,G,@,r,T =,=,=,=,-,x,@,=,+,l,},k,J =,=,=,=,+,x,i,=,-,b,@,l,T =,=,=,=,=,=,=,=,+,b,A,l,E =,=,=,=,=,=,=,=,+,h,A,lf,J =,=,=,=,-,v,o,r,+,=,A,f,J =,=,=,=,=,=,=,=,+,m,E,s,J =,=,=,=,+,sx,a,f,-,w,O,nt,J =,=,=,=,=,=,=,=,+,r,E,n,E =,=,=,=,-,s,i,=,+,t,a,t,J =,=,=,=,=,=,=,=,+,l,i,f,J =,=,=,=,+,b,L,=,-,d,@,l,T -,=,a,=,+,l,A,rm,-,b,E,l,E +,d,i,n,-,t,a,=,-,f,@,l,T +,G,e,t,-,m,@,=,-,n,i,t,J =,=,=,=,+,=,O,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,br,K,n,T =,=,=,=,=,=,=,=,+,v,I,lt,J +,z,o,=,-,m,@,r,-,h,L,s,J -,v,@,r,+,t,o,=,-,n,I,N,K =,=,=,=,+,h,u,k,-,pl,a,ts,J =,=,=,=,+,p,},nd,-,br,o,t,J =,=,=,=,+,=,K,=,-,s,A,k,J -,r,i,=,+,j,o,=,-,d,@,=,T +,vr,a,=,-,G,@,=,-,l,K,st,J =,=,=,=,=,=,=,=,+,b,o,m,P =,=,=,=,+,v,E,s,-,t,I,N,K -,t,@,=,-,G,@,=,-,z,I,xt,J =,=,=,=,=,=,=,=,+,b,E,l,E =,=,=,=,+,w,e,g,-,b,u,k,J =,=,=,=,-,m,A,=,+,kr,e,l,T =,=,=,=,-,p,A,=,+,tr,o,n,T =,=,=,=,+,p,A,=,-,d,@,l,T +,k,I,n,-,d,@,r,-,b,E,t,J =,=,=,=,=,=,=,=,+,vr,I,nt,J +,s,L,=,-,k,@,r,-,b,e,st,J +,k,o,=,-,l,i,=,-,br,i,=,T =,=,=,=,+,sx,I,=,-,m,@,l,T =,=,=,=,=,=,=,=,+,kl,M,n,T =,=,=,=,+,k,E,l,-,d,@,r,T =,=,=,=,+,xl,u,j,-,b,L,s,J =,=,=,=,=,=,=,=,+,v,y,r,T =,=,=,=,-,p,a,=,+,p,I,l,E =,=,=,=,=,=,=,=,+,w,e,=,T =,=,=,=,+,w,a,G,-,br,i,f,J -,t,e,=,-,l,@,=,-,f,o,n,T =,=,=,=,-,f,O,r,+,n,L,s,J =,=,=,=,+,v,E,n,-,st,@,r,T =,=,=,=,+,h,A,=,-,N,@,r,T =,=,=,=,+,w,},rx,-,st,O,k,J +,=,e,=,-,r,@,=,-,b,a,n,T -,kw,i,=,+,t,A,n,-,s,i,=,T +,b,A,=,-,b,@,l,-,pr,a,t,J =,=,=,=,+,p,},nd,-,b,a,rt,J +,n,a,=,-,G,@,l,-,t,A,N,E -,=,a,=,-,kw,a,=,+,r,E,l,E -,m,E,k,-,s,i,=,+,k,a,n,T =,=,=,=,-,s,i,=,+,tr,u,n,T -,t,e,=,+,j,a,=,-,t,@,r,T =,=,=,=,+,w,a,=,-,s,@,m,P +,b,A,k,-,sx,o,=,-,t,@,l,T -,b,A,=,-,t,@,=,+,r,K,=,T -,t,E,=,+,r,i,=,-,n,@,=,T =,=,=,=,=,=,=,=,+,p,a,p,J =,=,=,=,=,=,=,=,+,tr,},t,J =,=,=,=,+,b,K,=,-,b,@,l,T =,=,=,=,=,=,=,=,+,k,A,ns,J =,=,=,=,=,=,=,=,+,spr,O,N,E =,=,=,=,+,w,a,=,-,t,@,r,T +,w,a,=,-,t,@,r,-,t,O,xt,J =,=,=,=,+,m,K,=,-,t,@,r,T =,=,=,=,+,z,E,n,-,d,@,r,T =,=,=,=,+,kn,I,=,-,k,@,r,T +,p,I,=,-,p,@,=,-,l,I,N,K =,=,=,=,=,=,=,=,+,w,E,p,J =,=,=,=,+,vr,K,=,-,k,a,rt,J =,=,=,=,=,=,=,=,+,sn,L,t,J =,=,=,=,+,l,K,k,-,h,L,s,J -,d,y,w,-,n,a,rz,-,b,O,t,J =,=,=,=,+,w,e,f,-,s,@,l,T =,=,=,=,+,vl,A,=,-,d,@,r,T +,=,A,l,-,f,a,=,-,d,e,l,T =,=,=,=,+,w,E,rk,-,m,A,nt,J =,=,=,=,=,=,=,=,+,kr,a,j,T =,=,=,=,=,=,=,=,+,d,E,l,E +,t,I,k,-,t,a,=,-,f,@,l,T =,=,=,=,+,st,E,m,-,p,@,l,T =,=,=,=,+,w,e,r,-,m,A,n,E +,h,A,nt,-,sx,u,N,-,k,A,st,J =,=,=,=,+,dr,},=,-,kn,O,p,J +,d,O,k,-,t,@,r,-,s,o,n,T =,=,=,=,-,k,a,=,+,d,e,=,T =,=,=,=,+,=,O,m,-,b,@,r,T =,=,=,=,=,=,=,=,+,m,O,l,E =,=,=,=,=,=,=,=,+,sx,e,l,T +,str,K,k,-,=,O,r,-,k,E,st,J =,=,=,=,=,=,=,=,+,w,E,x,J -,st,o,=,-,k,@,=,+,r,K,=,T =,=,=,=,-,k,A,=,+,s,K,=,T =,=,=,=,+,p,u,=,-,j,@,r,T =,=,=,=,=,=,=,=,+,w,E,n,E =,=,=,=,-,b,u,=,+,Z,i,=,T +,w,E,=,-,G,e,=,-,v,@,r,T +,sx,i,r,-,=,K,=,-,l,A,nt,J =,=,=,=,=,=,=,=,+,w,I,x,J -,=,E,n,-,v,@,=,+,l,O,p,J =,=,=,=,+,v,a,=,-,d,@,r,T =,=,=,=,+,k,I,=,-,k,@,r,T =,=,=,=,+,v,E,n,-,d,@,l,T +,G,a,=,-,r,@,=,-,p,|,k,J +,sx,e,=,-,d,@,l,-,k,A,p,J +,p,K,=,-,p,@,=,-,d,O,p,J =,=,=,=,=,=,=,=,+,pr,E,t,J =,=,=,=,=,=,=,=,+,k,i,r,T =,=,=,=,=,=,=,=,+,k,K,k,J =,=,=,=,=,=,=,=,+,pl,A,n,E =,=,=,=,=,=,=,=,+,=,i,ts,J =,=,=,=,+,f,a,=,-,l,i,=,T +,w,i,=,-,r,o,k,-,sx,I,p,J -,d,A,xs,-,k,a,=,-,d,o,=,T =,=,=,=,=,=,=,=,+,l,O,t,T =,=,=,=,=,=,=,=,+,sp,I,n,E =,=,=,=,-,=,a,=,+,j,L,n,T +,tr,I,l,-,h,a,r,-,d,i,r,T =,=,=,=,+,w,K,n,-,=,A,p,J =,=,=,=,=,=,=,=,+,sp,},l,E -,v,O,nt,-,x,@,=,-,b,E,t,J =,=,=,=,=,=,=,=,+,w,K,s,J =,=,=,=,=,=,=,=,+,sxr,o,t,J -,m,a,=,-,r,O,=,+,k,a,n,T =,=,=,=,+,p,A,x,-,t,@,r,T +,r,|,=,-,z,@,=,-,k,o,p,J -,l,e,=,-,j,},m,-,l,I,xt,J +,d,i,=,-,v,@,=,-,p,o,rt,J =,=,=,=,+,h,A,l,-,t,@,r,T +,w,I,m,-,p,@,r,-,d,i,r,T =,=,=,=,+,w,I,m,-,p,@,r,T =,=,=,=,+,=,O,=,-,f,@,r,T =,=,=,=,+,sx,L,=,-,v,@,r,T -,r,e,=,-,f,y,=,+,Z,e,=,T =,=,=,=,=,=,=,=,+,b,O,rt,J =,=,=,=,=,=,=,=,+,d,M,w,T +,r,A,s,-,f,i,=,-,j,o,l,T =,=,=,=,=,=,=,=,+,=,K,=,T =,=,=,=,=,=,=,=,+,l,o,n,T +,sx,K,n,-,w,E,r,-,p,@,r,T =,=,=,=,+,f,a,=,-,b,@,l,T =,=,=,=,=,=,=,=,+,b,i,t,J +,w,I,=,-,s,@,=,-,w,A,s,J =,=,=,=,+,sx,O,=,-,m,@,l,T =,=,=,=,=,=,=,=,+,w,I,t,J =,=,=,=,=,=,=,=,+,k,},rk,J =,=,=,=,-,t,A,m,+,b,u,r,T =,=,=,=,+,tr,a,=,-,l,i,=,T =,=,=,=,+,m,u,=,-,d,@,r,T +,p,O,=,-,p,u,=,-,r,i,=,T =,=,=,=,-,p,A,s,+,t,K,=,T =,=,=,=,+,z,A,n,-,t,a,rt,J =,=,=,=,-,p,A,r,+,d,u,n,T -,p,a,=,-,l,@,=,+,t,o,=,T =,=,=,=,+,w,E,rg,-,b,u,k,J +,=,a,=,-,p,@,=,-,n,o,t,J -,l,@,=,+,f,o,m,-,b,O,t,J +,m,u,=,-,d,@,rs,-,k,I,nt,J -,k,a,=,-,p,i,=,+,t,e,l,T =,=,=,=,+,t,K,=,-,d,I,N,E +,z,a,=,-,d,@,l,-,t,A,s,J =,=,=,=,+,k,I,nt,-,fr,M,w,T -,zw,a,=,-,G,@,=,+,r,I,n,E +,w,E,n,-,t,@,l,-,tr,A,p,J =,=,=,=,+,h,o,ft,-,kn,I,k,J +,z,M,t,-,l,e,=,-,p,@,l,T =,=,=,=,=,=,=,=,+,m,L,s,J =,=,=,=,=,=,=,=,+,=,I,ks,J -,l,@,=,-,v,I,=,-,N,@,r,T +,j,O,=,-,G,@,rt,-,pl,A,nt,J =,=,=,=,=,=,=,=,+,z,E,t,J +,kr,o,s,-,fl,I,n,-,d,@,r,T -,br,M,=,-,w,@,=,+,r,K,=,T +,bl,u,=,-,m,@,=,-,st,A,l,E +,sp,O,n,-,z,@,=,-,n,E,t,J =,=,=,=,=,=,=,=,+,z,a,k,J =,=,=,=,+,kr,A,=,-,b,@,r,T =,=,=,=,=,=,=,=,+,t,K,l,T =,=,=,=,+,br,A,nt,-,pl,a,t,J =,=,=,=,=,=,=,=,+,Z,E,l,E =,=,=,=,+,x,i,=,-,t,@,r,T =,=,=,=,+,br,K,=,-,d,@,l,T =,=,=,=,+,kl,|,=,-,t,@,r,T =,=,=,=,+,z,A,k,-,sx,a,r,T -,w,o,r,-,d,@,=,-,b,u,k,J =,=,=,=,=,=,=,=,+,n,I,ks,J =,=,=,=,=,=,=,=,+,z,A,lm,P +,=,A,=,-,p,@,=,-,l,a,r,T +,h,a,=,-,z,@,=,-,sl,a,p,J =,=,=,=,=,=,=,=,+,l,O,t,J =,=,=,=,=,=,=,=,+,b,a,j,T =,=,=,=,-,x,a,=,+,l,K,=,T =,=,=,=,+,z,A,=,-,N,@,r,T =,=,=,=,=,=,=,=,+,spr,e,w,T =,=,=,=,-,=,M,=,+,G,},rk,J =,=,=,=,+,m,e,=,-,r,@,l,T =,=,=,=,+,=,e,=,-,t,E,nt,J =,=,=,=,+,sn,E,=,-,p,@,r,T =,=,=,=,-,f,O,r,+,t,L,n,T =,=,=,=,+,x,e,l,-,b,L,k,J -,s,i,=,-,l,u,=,+,w,E,t,J =,=,=,=,=,=,=,=,+,z,E,x,J =,=,=,=,+,r,I,N,-,b,a,rt,J =,=,=,=,=,=,=,=,+,kn,|,s,J =,=,=,=,+,h,L,d,-,m,O,nt,J =,=,=,=,+,b,o,G,-,b,A,l,E =,=,=,=,+,v,O,lG,-,br,i,f,J =,=,=,=,+,b,o,=,-,d,@,m,P =,=,=,=,=,=,=,=,+,z,K,l,T +,n,a,=,-,k,o,=,-,m,@,r,T +,m,L,=,-,z,@,=,-,tr,A,p,J =,=,=,=,=,=,=,=,+,h,a,k,J =,=,=,=,-,gl,a,=,+,s,e,=,T =,=,=,=,+,n,o,=,-,t,a,=,T =,=,=,=,-,k,u,=,+,p,O,n,E =,=,=,=,+,r,O,=,-,b,@,r,T +,m,a,=,-,z,@,=,-,h,a,k,J -,sx,a,=,-,p,y,=,+,l,i,r,T +,z,i,=,-,k,@,=,-,br,i,f,J =,=,=,=,+,b,E,t,-,st,e,=,T =,=,=,=,+,t,I,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,z,i,l,T +,r,L,=,-,t,@,r,-,st,},k,J =,=,=,=,-,w,a,=,+,l,I,n,E -,m,A,N,-,k,@,=,+,m,E,nt,J =,=,=,=,=,=,=,=,+,kr,A,m,E =,=,=,=,+,m,I,s,-,p,@,l,T =,=,=,=,=,=,=,=,+,z,I,n,E =,=,=,=,+,b,u,n,-,d,@,r,T +,br,u,=,-,k,@,=,-,m,A,n,E =,=,=,=,+,b,},r,-,G,@,r,T =,=,=,=,+,z,u,=,-,m,@,r,T =,=,=,=,=,=,=,=,+,kw,a,l,T =,=,=,=,+,m,K,=,-,bl,u,m,P =,=,=,=,=,=,=,=,+,z,A,lf,J =,=,=,=,=,=,=,=,+,h,i,l,T =,=,=,=,=,=,=,=,+,p,u,p,J =,=,=,=,=,=,=,=,+,sx,a,p,J -,m,@,r,-,k,o,=,-,n,I,N,K =,=,=,=,+,=,i,=,-,t,@,m,P =,=,=,=,=,=,=,=,+,b,u,r,T =,=,=,=,+,m,A,nz,-,b,A,k,J =,=,=,=,=,=,=,=,+,r,O,nt,J +,f,I,l,-,t,@,r,-,z,A,k,J +,vr,},x,-,t,@,=,-,sl,a,=,T =,=,=,=,=,=,=,=,+,k,A,m,E =,=,=,=,+,pr,i,s,-,t,@,r,T +,=,A,f,-,t,E,l,-,r,K,m,P =,=,=,=,=,=,=,=,+,k,A,nt,J =,=,=,=,=,=,=,=,+,kr,a,m,P +,l,|,=,-,t,@,r,-,pr,a,t,J -,k,O,r,-,p,E,n,+,d,y,=,T =,=,=,=,=,=,=,=,+,p,A,l,E =,=,=,=,=,=,=,=,+,t,a,l,T =,=,=,=,=,=,=,=,+,k,A,t,J =,=,=,=,+,z,L,=,-,G,@,r,T =,=,=,=,=,=,=,=,+,p,i,s,J =,=,=,=,-,p,),s,+,n,e,=,T -,b,u,r,-,d,@,=,+,r,K,=,T =,=,=,=,=,=,=,=,+,tr,K,l,T =,=,=,=,-,=,a,=,+,G,E,nt,J =,=,=,=,+,zw,A,=,-,b,@,r,T +,v,I,=,-,N,@,r,-,l,I,N,E =,=,=,=,+,st,e,=,-,k,@,r,T =,=,=,=,+,p,i,=,-,p,a,=,T =,=,=,=,+,=,a,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,b,L,k,J =,=,=,=,-,k,A,n,+,t,o,r,T -,p,e,=,-,d,E,=,+,st,A,l,E -,l,e,=,+,j,A,n,-,d,@,r,T =,=,=,=,=,=,=,=,+,l,K,f,J =,=,=,=,=,=,=,=,+,t,O,n,E -,p,O,r,+,t,i,=,-,r,a,m,P =,=,=,=,=,=,=,=,+,z,e,m,P =,=,=,=,=,=,=,=,+,=,L,=,T =,=,=,=,=,=,=,=,+,b,e,=,T =,=,=,=,+,b,a,rd,-,m,A,n,E -,b,A,N,+,k,E,t,-,h,A,m,E =,=,=,=,+,dr,O,=,-,p,@,l,T =,=,=,=,+,zw,E,=,-,l,I,N,E =,=,=,=,+,br,K,=,-,z,@,l,T =,=,=,=,+,l,I,n,-,d,@,=,T -,=,E,ks,-,p,o,=,+,s,e,=,T =,=,=,=,-,x,A,r,+,n,a,l,T =,=,=,=,=,=,=,=,+,h,o,rn,T +,k,o,=,-,n,I,Ns,-,k,a,rs,J LanguageMachines-timbl-642727d/demos/dimin.train000077500000000000000000002335611451477526200215730ustar00rootroot00000000000000=,=,=,=,+,k,e,=,-,r,@,l,T =,=,=,=,-,fr,i,=,+,z,I,n,E =,=,=,=,=,=,=,=,+,sn,},f,J =,=,=,=,+,l,I,=,-,x,a,m,P =,=,=,=,=,=,=,=,+,tr,A,p,J =,=,=,=,+,k,E,rst,-,k,I,nt,J +,r,i,=,-,j,a,=,-,b,e,lt,J =,=,=,=,-,v,I,n,+,j,E,t,J -,b,O,=,+,t,i,=,-,n,@,=,T +,b,A,k,-,st,O,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,p,L,=,T +,p,a,=,-,p,u,=,-,w,a,=,T =,=,=,=,+,bl,M,w,-,k,e,l,T +,zw,A,rt,-,k,O,p,-,n,O,n,E -,t,a,=,-,b,u,=,+,r,E,t,J +,h,O,s,-,p,i,=,-,t,a,l,T =,=,=,=,=,=,=,=,+,kr,L,t,J =,=,=,=,=,=,=,=,+,v,A,xt,J =,=,=,=,=,=,=,=,+,=,a,j,T =,=,=,=,+,r,O,=,-,t,I,N,K +,b,a,rd,-,br,A,n,-,d,@,r,T -,sp,a,r,-,b,A,Ng,-,b,u,k,J =,=,=,=,-,k,A,s,+,t,e,l,T =,=,=,=,-,s,a,=,+,v,o,j,T =,=,=,=,+,m,I,=,-,d,A,x,J =,=,=,=,=,=,=,=,+,=,E,m,E -,t,e,=,-,k,@,m,-,b,u,k,J =,=,=,=,+,zw,a,=,-,G,@,r,T +,x,i,=,-,r,o,=,-,b,u,k,J -,p,@,=,+,t,i,=,-,t,@,r,T -,x,@,=,+,spr,E,ks,-,xr,u,p,J =,=,=,=,+,b,K,=,-,b,a,n,T +,z,a,=,-,m,@,=,-,l,I,N,E +,r,O,=,-,m,@,l,-,z,o,=,T =,=,=,=,+,sx,e,r,-,m,E,s,J =,=,=,=,+,sp,e,l,-,h,L,s,J =,=,=,=,+,st,e,=,-,k,@,l,T =,=,=,=,=,=,=,=,+,sp,u,l,T =,=,=,=,=,=,=,=,+,r,A,t,J +,br,O,=,-,d,@,=,-,l,A,p,J =,=,=,=,=,=,=,=,+,b,o,x,J =,=,=,=,+,h,e,=,-,v,@,l,T -,t,@,r,-,h,A,ndz,-,b,e,n,T -,=,A,x,-,t,@,r,+,=,O,m,E -,p,O,r,+,t,i,rs,-,h,O,k,J =,=,=,=,+,w,I,n,-,d,@,l,T =,=,=,=,+,h,A,nd,-,w,E,rk,J =,=,=,=,=,=,=,=,+,=,},k,J =,=,=,=,=,=,=,=,+,=,a,p,J =,=,=,=,=,=,=,=,+,d,e,rn,T +,w,A,n,-,d,@,=,-,l,I,N,E =,=,=,=,-,=,o,=,+,d,|,r,T -,p,o,=,-,p,y,=,+,l,i,r,T =,=,=,=,=,=,=,=,+,j,A,m,E +,=,A,v,-,b,e,l,-,d,I,N,K =,=,=,=,=,=,=,=,+,m,A,t,J -,r,@,=,-,b,@,=,-,dr,K,f,J -,l,i,=,-,b,@,=,-,dr,K,f,J =,=,=,=,+,k,e,=,-,G,@,l,T =,=,=,=,=,=,=,=,+,=,E,ts,J -,x,i,t,-,s,@,l,-,d,i,r,T +,=,o,=,-,l,i,=,-,f,A,nt,J +,h,L,s,-,h,M,=,-,d,I,N,K +,h,K,=,-,d,@,=,-,t,L,n,T =,=,=,=,=,=,=,=,+,b,E,s,J =,=,=,=,=,=,=,=,+,j,A,n,T -,p,A,s,-,p,A,r,+,t,u,=,J =,=,=,=,=,=,=,=,+,k,i,k,J =,=,=,=,+,=,A,f,-,st,A,p,J =,=,=,=,-,x,@,=,+,l,L,t,J -,=,E,n,-,t,I,Nz,-,br,i,f,J +,=,A,f,-,t,E,l,-,v,E,rs,J =,=,=,=,+,s,I,r,-,k,@,l,T +,l,y,k,-,s,@,=,-,br,o,t,J -,pr,o,=,-,s,@,=,+,d,e,=,T -,p,a,=,+,r,a,=,-,d,@,=,T -,=,a,=,+,G,E,n,-,d,a,=,T =,=,=,=,+,v,E,d,-,b,O,l,E +,=,e,=,-,z,@,lz,-,br,},x,J =,=,=,=,=,=,=,=,+,kr,L,m,P =,=,=,=,+,r,o,t,-,st,a,rt,J =,=,=,=,=,=,=,=,+,k,O,st,J -,p,A,=,-,v,I,l,+,j,u,n,T +,=,A,=,-,k,@,r,-,m,A,n,E -,=,O,nt,-,st,e,=,-,k,I,N,K +,m,I,=,-,d,A,x,-,sl,a,p,J =,=,=,=,+,s,E,l,-,d,@,r,T +,r,I,=,-,t,@,=,-,b,u,k,J =,=,=,=,+,b,o,=,-,t,O,xt,J =,=,=,=,=,=,=,=,+,st,A,N,E =,=,=,=,+,v,o,r,-,v,A,l,E =,=,=,=,-,p,I,n,+,s,E,t,J +,=,I,n,-,d,e,=,-,l,I,N,E =,=,=,=,+,p,a,=,-,r,@,l,T =,=,=,=,+,k,o,j,-,h,O,nt,J =,=,=,=,+,r,L,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,dr,i,=,T +,h,A,rt,-,p,A,=,-,S,E,nt,J +,z,i,k,-,=,a,=,-,v,O,nt,J -,st,@,r,+,d,A,=,-,m,@,r,T -,r,A,=,+,p,O,rd,-,b,u,k,J =,=,=,=,+,l,O,=,-,d,@,r,T -,l,},s,-,t,o,=,-,r,@,n,T +,=,O,n,-,d,@,r,-,j,},rk,J =,=,=,=,=,=,=,=,+,=,a,rt,J +,=,A,n,-,t,i,=,-,d,e,l,T -,=,O,n,+,d,E,=,-,k,I,N,K -,v,@,r,-,bl,K,f,-,s,@,l,T =,=,=,=,+,v,O,lks,-,t,L,n,T =,=,=,=,+,x,i,=,-,x,@,l,T =,=,=,=,=,=,=,=,+,tr,E,m,E =,=,=,=,=,=,=,=,+,h,},t,J =,=,=,=,+,sp,a,r,-,b,},s,J -,=,A,r,+,t,i,=,-,k,@,l,T +,s,},=,-,k,@,=,-,l,a,r,T =,=,=,=,-,f,a,=,+,s,E,t,J +,m,i,=,-,l,i,=,-,f,e,st,J =,=,=,=,+,r,K,=,-,zw,e,p,J =,=,=,=,=,=,=,=,+,sx,},p,J =,=,=,=,=,=,=,=,+,kn,i,=,T +,j,a,=,-,G,@,rs,-,h,u,t,J =,=,=,=,+,x,},l,-,d,@,n,T +,b,I,=,-,t,@,r,-,pr,a,t,J =,=,=,=,+,n,e,=,-,r,I,N,K +,v,E,r,-,k,o,=,-,pr,a,t,J =,=,=,=,=,=,=,=,+,zw,I,k,J =,=,=,=,+,h,E,lm,-,st,K,l,T =,=,=,=,=,=,=,=,+,st,|,n,T =,=,=,=,+,=,M,=,-,t,o,=,T =,=,=,=,=,=,=,=,+,pl,O,k,J =,=,=,=,-,sp,a,=,+,l,i,r,T =,=,=,=,+,s,u,=,-,b,A,l,E =,=,=,=,=,=,=,=,+,t,e,f,J -,k,o,=,-,m,@,=,-,l,I,N,E =,=,=,=,+,w,O,rst,-,h,o,rn,T =,=,=,=,+,d,i,n,-,st,@,r,T -,m,A,=,-,S,i,=,-,n,@,=,T =,=,=,=,=,=,=,=,+,=,e,n,T =,=,=,=,=,=,=,=,+,sx,A,lm,P +,=,A,s,-,f,A,ld,-,b,a,n,T =,=,=,=,+,r,e,=,-,G,@,n,T -,k,o,=,-,l,O,m,+,b,K,n,T +,h,o,=,-,n,I,N,-,b,A,k,J -,s,@,=,-,l,a,=,+,r,K,=,T =,=,=,=,+,l,o,n,-,w,E,t,J =,=,=,=,+,b,a,rs,-,f,I,s,J =,=,=,=,=,=,=,=,+,kn,},l,E +,b,o,m,-,kr,L,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,z,},s,J =,=,=,=,=,=,=,=,+,z,o,j,T =,=,=,=,+,zw,a,=,-,l,y,w,T -,m,@,=,-,vl,i,x,-,t,L,x,J +,p,u,=,-,j,@,r,-,d,O,ns,J =,=,=,=,-,tw,A,=,+,l,E,t,J +,=,K,s,-,f,A,=,-,br,i,k,J -,str,a,=,-,p,O,n,+,t,K,n,T +,G,a,=,-,Z,@,=,-,m,A,nt,J =,=,=,=,+,w,K,n,-,st,A,t,J +,b,a,=,-,k,@,r,-,pr,a,t,J =,=,=,=,+,fl,A,=,-,p,@,r,T -,v,@,r,+,fr,I,=,-,s,I,N,K =,=,=,=,=,=,=,=,+,kl,},s,J +,f,A,l,-,b,a,=,-,l,a,=,T -,=,A,n,-,tr,@,=,+,m,E,=,T =,=,=,=,=,=,=,=,+,t,O,r,E -,r,O,=,-,x,i,=,+,j,a,n,T =,=,=,=,=,=,=,=,+,p,A,t,J =,=,=,=,=,=,=,=,+,m,K,n,T +,G,e,t,-,m,@,=,-,n,i,t,J -,b,A,=,+,l,E,t,-,r,O,k,J =,=,=,=,-,k,a,=,+,f,e,=,T =,=,=,=,=,=,=,=,+,h,u,n,T -,x,@,=,+,h,o,r,-,b,e,n,T =,=,=,=,+,x,e,l,-,b,O,rst,J =,=,=,=,+,k,u,=,-,p,@,l,T -,s,o,=,+,l,e,r,-,b,A,Nk,J =,=,=,=,+,k,O,s,-,t,@,r,T +,t,I,l,-,b,@,=,-,r,i,=,T +,h,a,=,-,v,@,r,-,=,a,l,T =,=,=,=,=,=,=,=,+,m,o,r,T +,k,I,n,-,d,@,r,-,b,A,t,J =,=,=,=,-,=,a,=,+,b,e,l,T =,=,=,=,=,=,=,=,+,r,|,=,T =,=,=,=,+,b,e,=,-,v,@,r,T =,=,=,=,=,=,=,=,+,b,e,st,J =,=,=,=,=,=,=,=,+,br,},x,E =,=,=,=,=,=,=,=,+,k,I,st,J =,=,=,=,+,dr,E,m,-,p,@,l,T -,t,i,=,-,s,@,=,+,r,i,=,T =,=,=,=,=,=,=,=,+,sp,i,=,T =,=,=,=,-,b,a,=,+,z,I,n,E =,=,=,=,+,p,O,l,-,d,@,r,T =,=,=,=,=,=,=,=,+,v,u,r,T -,r,@,=,+,s,E,bd,-,br,i,f,J =,=,=,=,+,b,},n,-,z,I,N,K -,m,E,n,-,t,@,=,+,n,e,=,T =,=,=,=,+,dr,a,j,-,k,O,lk,J +,k,I,n,-,d,@,r,-,h,A,nt,J =,=,=,=,+,z,A,nd,-,=,o,x,J -,t,i,=,+,k,y,=,-,l,@,=,T =,=,=,=,=,=,=,=,+,dr,},p,J =,=,=,=,+,b,E,t,-,s,K,l,T +,st,y,=,-,d,i,=,-,j,o,=,T =,=,=,=,=,=,=,=,+,b,e,lt,J -,n,a,=,-,r,@,=,-,b,a,n,T =,=,=,=,+,b,O,rst,-,kl,O,nt,J =,=,=,=,=,=,=,=,+,b,e,r,T =,=,=,=,-,l,i,=,+,vr,K,=,T +,l,A,n,-,d,M,=,-,w,@,r,T +,p,I,=,-,l,@,=,-,d,o,s,J -,m,a,=,+,n,I,=,-,l,a,=,T =,=,=,=,-,b,a,=,+,l,K,n,T =,=,=,=,+,z,u,=,-,k,@,r,T -,b,A,=,-,k,@,=,+,r,I,n,E +,f,i,=,-,n,@,=,-,sp,L,t,J =,=,=,=,-,b,@,=,+,l,A,N,E =,=,=,=,+,s,a,=,-,b,@,l,T =,=,=,=,=,=,=,=,+,z,O,n,E -,b,@,=,+,l,E,t,-,s,@,l,T =,=,=,=,+,b,E,l,-,kn,O,p,J +,b,E,=,-,l,@,=,-,fl,|,r,T =,=,=,=,=,=,=,=,+,x,a,j,T -,m,a,=,+,d,O,=,-,n,a,=,T -,=,A,l,+,r,L,n,-,m,A,n,E -,t,i,=,+,p,I,s,-,t,@,=,T =,=,=,=,-,b,@,=,+,r,I,xt,J =,=,=,=,=,=,=,=,+,v,E,st,J =,=,=,=,=,=,=,=,+,t,o,t,J =,=,=,=,+,h,A,n,-,t,A,s,J =,=,=,=,+,=,o,r,-,d,O,p,J -,n,a,=,-,G,@,=,-,z,I,xt,J -,p,a,=,-,n,a,=,+,s,e,=,T =,=,=,=,=,=,=,=,+,h,a,r,T =,=,=,=,+,b,E,=,-,k,@,n,T =,=,=,=,+,b,|,=,-,G,@,l,T =,=,=,=,+,tr,A,m,-,h,L,s,J =,=,=,=,=,=,=,=,+,sl,A,b,J =,=,=,=,+,r,e,=,-,s,@,m,P =,=,=,=,=,=,=,=,+,r,A,t,T =,=,=,=,=,=,=,=,+,t,A,lm,P =,=,=,=,+,r,K,z,-,d,i,f,J =,=,=,=,=,=,=,=,+,b,L,s,J +,l,E,n,-,d,@,=,-,d,u,k,J =,=,=,=,+,sl,A,x,-,pl,A,Nk,J =,=,=,=,+,t,O,=,-,b,@,=,T +,vr,A,xt,-,=,M,=,-,t,o,=,T =,=,=,=,+,k,u,=,-,bl,u,m,P =,=,=,=,=,=,=,=,+,b,i,r,T =,=,=,=,+,=,L,=,-,j,@,r,T =,=,=,=,=,=,=,=,+,h,M,w,T -,=,K,=,-,G,@,=,+,G,E,lt,J -,b,i,=,-,z,@,=,+,m,A,nt,J =,=,=,=,-,p,E,n,+,s,e,l,T +,=,a,r,-,d,@,x,-,h,K,t,J =,=,=,=,=,=,=,=,+,str,i,m,E =,=,=,=,+,=,E,=,-,m,@,r,T =,=,=,=,+,=,A,l,-,v,@,r,T =,=,=,=,=,=,=,=,+,j,},rk,J -,t,@,=,-,n,a,rz,-,b,a,n,T -,k,A,=,-,p,i,=,+,t,a,l,T +,=,I,N,-,k,o,=,-,m,@,n,T =,=,=,=,+,sl,e,=,-,p,@,r,T +,k,O,=,-,f,i,=,-,br,o,t,J =,=,=,=,=,=,=,=,+,pr,a,t,J =,=,=,=,=,=,=,=,+,kn,i,r,T +,b,I,=,-,n,@,=,-,br,A,nt,J =,=,=,=,-,f,E,s,+,t,u,n,T =,=,=,=,+,t,e,r,-,l,I,N,K +,Gr,A,=,-,m,a,=,-,b,u,k,J =,=,=,=,+,br,o,m,-,dr,A,Nk,J =,=,=,=,=,=,=,=,+,b,I,nt,J -,h,K,ts,-,k,E,=,-,t,I,N,K -,j,O,=,+,sk,o,p,-,s,a,l,T =,=,=,=,=,=,=,=,+,sx,e,n,T +,b,I,=,-,t,@,r,-,k,u,k,J +,p,},n,-,t,@,=,-,l,K,st,J +,=,M,=,-,t,o,=,-,r,I,t,J -,v,@,r,-,sxr,I,=,-,k,@,r,T =,=,=,=,+,=,},n,-,st,@,r,T +,n,A,x,-,t,@,=,-,G,a,l,T +,p,a,=,-,t,@,rs,-,f,A,t,J -,m,a,=,-,d,@,=,+,l,i,f,J =,=,=,=,=,=,=,=,+,w,A,l,E =,=,=,=,=,=,=,=,+,fl,E,t,J +,n,a,=,-,r,i,=,-,p,i,t,J +,z,o,=,-,m,@,r,-,j,},rk,J =,=,=,=,-,t,o,=,+,n,K,n,T -,r,e,=,-,s,y,=,+,m,e,=,T +,l,E,=,-,k,@,r,-,b,E,k,J =,=,=,=,=,=,=,=,+,m,K,l,T +,b,M,=,-,t,@,r,-,m,A,n,E =,=,=,=,+,=,L,t,-,x,A,N,E =,=,=,=,=,=,=,=,+,k,i,w,T =,=,=,=,+,z,M,t,-,f,A,t,J +,n,|,s,-,w,A,r,-,m,@,r,T =,=,=,=,-,p,i,=,+,j,u,n,T =,=,=,=,=,=,=,=,+,bl,u,t,J -,r,a,=,+,t,y,r,-,l,K,st,J =,=,=,=,=,=,=,=,+,bl,a,r,T =,=,=,=,=,=,=,=,+,l,K,=,T +,=,L,t,-,st,e,k,-,s,@,l,T =,=,=,=,+,p,o,=,-,p,@,l,T +,w,a,=,-,t,@,r,-,fl,L,t,J =,=,=,=,=,=,=,=,+,b,E,f,J =,=,=,=,+,v,o,r,-,w,E,rp,J +,=,o,m,-,z,E,x,-,st,@,r,T =,=,=,=,+,sl,a,=,-,pl,a,ts,J =,=,=,=,=,=,=,=,+,bl,O,m,E -,sx,O,r,-,p,i,=,+,j,u,n,T =,=,=,=,+,k,O,N,-,k,@,l,T =,=,=,=,+,h,E,N,-,s,@,l,T =,=,=,=,=,=,=,=,+,h,O,m,E =,=,=,=,-,b,o,=,+,b,K,n,T =,=,=,=,-,b,@,=,+,G,I,n,E =,=,=,=,+,d,E,k,-,xl,A,s,J =,=,=,=,+,t,a,=,-,l,i,=,T +,s,L,=,-,k,@,r,-,m,A,n,E =,=,=,=,+,l,K,=,-,d,I,N,E =,=,=,=,+,h,A,rt,-,k,L,l,T =,=,=,=,-,b,@,=,+,d,A,Nk,J =,=,=,=,=,=,=,=,+,m,i,r,T +,=,o,m,-,z,E,=,-,G,@,r,T =,=,=,=,=,=,=,=,+,h,O,p,J =,=,=,=,+,n,a,=,-,G,@,l,T =,=,=,=,=,=,=,=,+,p,O,n,E +,l,L,=,-,z,@,=,-,p,A,t,J -,b,u,=,-,r,@,=,+,kr,K,t,J -,v,@,r,+,st,O,=,-,p,I,N,K -,n,@,=,+,k,y,=,-,r,@,=,T =,=,=,=,+,k,i,=,-,l,o,=,T =,=,=,=,+,r,I,=,-,b,@,=,T =,=,=,=,=,=,=,=,+,b,a,l,T =,=,=,=,=,=,=,=,+,t,L,x,J =,=,=,=,+,b,u,=,-,z,@,m,P -,m,i,=,-,n,e,=,-,st,},k,J +,vr,M,=,-,w,@,=,-,sx,u,n,T =,=,=,=,=,=,=,=,+,f,e,st,J =,=,=,=,=,=,=,=,+,w,e,k,J =,=,=,=,+,b,i,xd,-,br,i,f,J =,=,=,=,+,dr,o,x,-,h,E,k,J -,b,o,=,+,l,e,=,-,r,o,=,T =,=,=,=,=,=,=,=,+,b,O,l,E -,k,a,=,-,m,i,=,+,z,o,l,T -,r,@,N,-,k,a,=,-,m,@,r,T =,=,=,=,+,b,O,m,-,b,u,k,J -,m,i,=,-,kr,o,=,+,f,o,n,T -,r,i,=,-,t,},s,-,l,I,xt,J +,sp,E,l,-,d,@,=,-,kn,O,p,J =,=,=,=,+,b,e,=,-,z,@,m,P -,tr,i,=,+,j,A,=,-,N,@,l,T =,=,=,=,+,=,e,=,-,G,@,l,T =,=,=,=,+,b,A,r,-,kr,},k,J =,=,=,=,+,m,A,s,-,k,@,r,T =,=,=,=,-,pl,@,=,+,z,i,r,T -,p,A,n,-,t,a,=,+,l,O,n,E =,=,=,=,=,=,=,=,+,b,o,t,J -,k,A,=,+,s,i,rz,-,br,i,f,J =,=,=,=,+,h,},=,-,m,@,l,T +,k,I,=,-,p,@,=,-,b,O,rst,J -,r,i,=,+,k,<,r,-,d,@,r,T =,=,=,=,=,=,=,=,+,x,|,l,T =,=,=,=,+,b,O,=,-,r,@,l,T =,=,=,=,+,b,O,rz,-,dr,A,Nk,J -,k,A,n,+,d,K,=,-,kl,O,nt,J =,=,=,=,+,st,O,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,str,o,m,P =,=,=,=,=,=,=,=,+,d,i,f,J =,=,=,=,+,v,e,=,-,G,@,r,T +,b,O,s,-,f,i,=,-,j,o,l,T +,b,o,=,-,t,@,r,-,b,i,s,J =,=,=,=,=,=,=,=,+,b,u,k,J +,kl,a,s,-,k,a,=,-,d,o,=,T =,=,=,=,+,t,e,=,-,kr,A,ns,J =,=,=,=,+,k,E,=,-,p,@,l,T =,=,=,=,+,b,o,=,-,t,@,r,T =,=,=,=,-,t,a,=,+,p,K,t,J =,=,=,=,+,st,E,=,-,N,@,l,T =,=,=,=,=,=,=,=,+,p,A,n,E +,zw,E,=,-,l,I,=,-,x,a,m,P =,=,=,=,+,n,I,=,-,p,@,r,T =,=,=,=,+,kr,L,=,-,p,A,k,J =,=,=,=,+,t,e,=,-,b,L,l,T =,=,=,=,=,=,=,=,+,b,E,rm,P +,G,a,=,-,r,@,=,-,sx,a,r,T =,=,=,=,-,s,A,r,+,d,i,n,T =,=,=,=,+,b,M,w,-,s,@,l,T =,=,=,=,+,xr,E,n,-,d,@,l,T +,d,O,n,-,d,@,r,-,b,e,st,J +,l,E,n,-,t,@,=,-,kl,O,k,J =,=,=,=,=,=,=,=,+,pr,i,m,P -,n,o,=,+,r,a,=,-,m,a,=,T -,v,@,r,+,v,A,l,-,p,},t,J =,=,=,=,+,br,K,=,-,h,M,t,J +,v,o,r,-,w,E,rp,-,xl,A,s,J +,l,E,=,-,t,@,r,-,st,a,f,J -,v,@,r,+,st,E,r,-,v,I,N,K -,G,@,=,+,l,K,=,-,k,I,N,K =,=,=,=,+,k,u,=,-,j,@,r,T =,=,=,=,+,r,|,=,-,t,@,r,T =,=,=,=,+,b,L,=,-,t,@,n,T =,=,=,=,=,=,=,=,+,d,L,m,P =,=,=,=,+,xl,K,=,-,b,a,n,T =,=,=,=,+,n,E,s,-,t,@,l,T -,p,A,=,-,p,@,=,+,G,a,j,T =,=,=,=,-,w,a,r,+,d,I,n,E =,=,=,=,=,=,=,=,+,n,E,p,J +,f,e,=,-,m,@,=,-,l,a,r,T =,=,=,=,=,=,=,=,+,br,O,n,E =,=,=,=,=,=,=,=,+,str,O,nt,J =,=,=,=,+,m,o,=,-,t,O,r,T =,=,=,=,+,m,A,n,-,t,@,l,T =,=,=,=,=,=,=,=,+,pr,E,nt,J =,=,=,=,=,=,=,=,+,br,},x,J =,=,=,=,=,=,=,=,+,s,E,nt,J -,br,y,=,+,n,E,=,-,t,@,=,J =,=,=,=,+,t,O,=,-,f,e,=,T =,=,=,=,=,=,=,=,+,w,K,n,T -,k,E,l,-,n,@,=,+,r,I,n,E +,v,o,r,-,sp,E,l,-,d,@,r,T =,=,=,=,+,p,A,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,v,L,st,J =,=,=,=,+,d,I,=,-,k,@,rt,J +,b,L,=,-,t,@,=,-,b,e,n,T -,m,@,=,+,n,a,=,-,d,@,=,T =,=,=,=,+,b,e,=,-,k,@,r,T -,t,@,n,-,tr,O,=,-,m,@,l,T =,=,=,=,-,k,a,=,+,b,a,j,T =,=,=,=,-,p,A,r,+,t,K,=,T =,=,=,=,=,=,=,=,+,sm,M,t,J -,z,O,=,-,t,@,r,+,n,K,=,T +,sx,i,t,-,x,@,=,-,b,E,t,J =,=,=,=,+,b,},n,-,z,I,N,E -,pr,o,=,+,p,E,=,-,l,@,r,T -,d,o,=,+,r,a,=,-,d,o,=,T =,=,=,=,-,b,y,=,+,r,e,l,T =,=,=,=,+,p,I,n,-,S,@,r,T =,=,=,=,+,z,u,k,-,pl,a,t,J =,=,=,=,-,b,y,=,+,r,K,n,T +,m,i,=,-,n,i,=,-,r,O,k,J -,sw,a,=,-,v,@,l,-,k,O,p,J =,=,=,=,+,tr,I,l,-,h,a,r,T =,=,=,=,+,d,L,=,-,v,@,l,T =,=,=,=,=,=,=,=,+,sx,I,p,J =,=,=,=,-,k,a,=,+,n,O,n,E -,n,e,=,-,G,@,=,+,r,I,n,E =,=,=,=,=,=,=,=,+,h,A,l,E =,=,=,=,+,d,O,=,-,f,@,r,T =,=,=,=,-,b,a,=,+,z,a,r,T =,=,=,=,+,t,o,=,-,k,o,=,T +,f,e,=,-,t,a,=,-,f,@,l,T =,=,=,=,+,tr,A,m,-,b,u,k,J =,=,=,=,-,m,o,=,+,d,E,l,E =,=,=,=,-,b,I,l,+,j,E,t,J -,t,@,rd,-,l,e,=,-,p,@,l,T =,=,=,=,=,=,=,=,+,h,O,r,E =,=,=,=,-,j,O,N,+,m,a,t,J =,=,=,=,=,=,=,=,+,n,I,p,J =,=,=,=,-,s,E,n,+,t,y,r,T =,=,=,=,+,=,A,v,-,d,A,k,J =,=,=,=,+,t,O,rn,-,m,E,s,J =,=,=,=,+,s,E,=,-,l,o,=,T =,=,=,=,+,w,K,=,-,z,@,r,T +,h,E,=,-,N,@,=,-,l,a,r,T -,l,A,n,+,t,a,rm,-,pl,a,t,J =,=,=,=,+,f,a,=,-,r,o,=,T =,=,=,=,+,m,E,s,-,f,I,s,J =,=,=,=,+,s,K,=,-,f,@,r,T -,s,i,=,+,l,I,n,-,d,@,r,T =,=,=,=,+,b,I,=,-,t,@,r,T +,l,O,=,-,t,o,=,-,b,A,l,E +,s,},=,-,k,@,l,-,str,a,t,J +,=,o,=,-,j,@,=,-,v,a,r,T =,=,=,=,=,=,=,=,+,l,o,p,J =,=,=,=,=,=,=,=,+,sn,O,l,E =,=,=,=,=,=,=,=,+,sx,A,r,E =,=,=,=,-,k,O,l,+,b,),r,T -,S,o,=,+,n,e,r,-,bl,O,k,J +,l,E,k,-,t,@,=,-,b,},s,J +,=,y,=,-,n,i,=,-,f,O,rm,P =,=,=,=,=,=,=,=,+,r,L,l,T =,=,=,=,=,=,=,=,+,w,K,f,E =,=,=,=,+,d,I,=,-,s,@,l,T =,=,=,=,+,spl,I,ts,-,f,a,n,T -,m,@,l,+,j,u,n,-,v,I,s,J -,sk,a,=,-,p,y,=,+,l,i,r,T -,k,O,m,+,pj,u,=,-,t,@,r,T =,=,=,=,=,=,=,=,+,k,I,p,E +,G,a,=,-,r,@,=,-,p,},nt,J =,=,=,=,+,sp,o,g,-,d,i,r,T +,tr,<,=,-,l,@,=,-,str,o,k,J =,=,=,=,=,=,=,=,+,str,e,p,J =,=,=,=,+,k,i,m,-,pl,A,nt,J =,=,=,=,+,fr,},t,-,s,@,l,T =,=,=,=,+,=,A,nt,-,w,o,rt,J -,b,u,l,+,j,O,m,-,bl,O,k,J -,k,u,=,+,p,O,m,-,b,u,k,J =,=,=,=,+,m,u,r,-,pl,a,t,J =,=,=,=,=,=,=,=,+,sf,e,r,T -,t,i,r,-,l,A,n,+,t,K,n,T =,=,=,=,-,k,},l,+,t,y,r,T =,=,=,=,+,k,A,st,-,r,A,nt,J +,m,e,=,-,v,A,=,-,l,@,r,T =,=,=,=,=,=,=,=,+,xr,u,f,J =,=,=,=,=,=,=,=,+,wr,O,N,E =,=,=,=,+,d,|,=,-,r,a,m,P =,=,=,=,=,=,=,=,+,st,i,r,T =,=,=,=,+,z,a,t,-,h,L,s,J =,=,=,=,+,dr,a,j,-,h,E,k,J =,=,=,=,=,=,=,=,+,d,A,k,J -,b,A,=,-,v,i,=,+,j,a,n,T +,d,A,l,-,k,O,n,-,sx,I,lt,J -,d,i,=,+,pl,o,=,-,m,a,=,T =,=,=,=,-,f,o,=,+,r,E,l,E =,=,=,=,+,d,a,=,-,m,@,=,T =,=,=,=,=,=,=,=,+,d,A,m,E =,=,=,=,-,x,@,=,+,b,E,t,J =,=,=,=,+,sp,o,r,-,k,a,rt,J =,=,=,=,=,=,=,=,+,r,O,x,J =,=,=,=,+,n,|,z,-,b,e,n,T +,kl,a,=,-,m,@,=,-,bl,A,t,J +,s,i,=,-,m,i,=,-,l,e,=,T +,p,u,=,-,d,@,r,-,d,o,s,J +,w,i,=,-,n,@,=,-,h,u,r,T =,=,=,=,+,d,e,=,-,G,@,n,T =,=,=,=,+,h,a,=,-,v,@,n,T =,=,=,=,-,k,O,r,+,d,e,l,T -,d,@,=,-,l,I,=,-,N,@,=,T =,=,=,=,-,=,E,n,+,tr,e,=,T =,=,=,=,=,=,=,=,+,w,E,l,E =,=,=,=,=,=,=,=,+,h,o,ft,J =,=,=,=,=,=,=,=,+,ps,A,lm,P -,l,i,=,+,k,|,r,-,Gl,A,s,J =,=,=,=,+,sp,u,l,-,dr,A,Nk,J =,=,=,=,=,=,=,=,+,d,E,n,E =,=,=,=,+,r,O,=,-,k,@,n,T =,=,=,=,-,d,E,=,+,s,),=,T +,n,a,=,-,l,@,=,-,br,o,t,J -,pr,o,=,-,v,I,n,+,S,a,l,T =,=,=,=,-,t,E,=,+,r,A,s,J =,=,=,=,+,n,A,xt,-,k,A,st,J =,=,=,=,=,=,=,=,+,d,|,r,T +,f,y,=,-,z,i,=,-,d,i,r,T -,j,a,=,+,b,o,=,-,l,o,=,T +,sp,e,=,-,s,i,=,-,br,i,f,J +,f,o,n,-,t,a,=,-,f,@,l,T =,=,=,=,=,=,=,=,+,m,u,r,T =,=,=,=,+,k,O,=,-,m,a,=,T =,=,=,=,=,=,=,=,+,l,I,nt,J =,=,=,=,+,w,K,g,-,bl,A,t,J -,fr,i,=,-,k,a,=,+,d,E,l,E =,=,=,=,+,kn,|,=,-,t,@,r,T =,=,=,=,=,=,=,=,+,l,},s,J =,=,=,=,=,=,=,=,+,t,e,n,T =,=,=,=,-,m,@,=,+,vr,M,w,T =,=,=,=,+,s,e,=,-,d,@,r,T +,s,a,=,-,t,e,=,-,st,O,k,J -,=,e,=,+,p,I,s,-,t,@,l,T -,k,a,=,+,b,M,=,-,t,@,r,T =,=,=,=,+,v,I,z,-,d,i,f,J +,d,I,N,-,s,@,x,-,h,K,t,J +,k,E,rst,-,k,a,=,-,d,o,=,T =,=,=,=,+,kr,i,=,-,z,@,l,T =,=,=,=,=,=,=,=,+,sx,K,n,T =,=,=,=,+,j,a,=,-,G,@,r,T =,=,=,=,-,k,O,m,+,f,o,r,T -,p,E,n,-,S,o,=,+,n,),r,T =,=,=,=,+,sp,O,t,-,k,o,p,J +,=,A,f,-,s,A,=,-,k,@,r,T =,=,=,=,+,=,M,=,-,w,@,l,T -,k,A,=,-,s,@,=,+,r,O,l,E =,=,=,=,+,l,o,n,-,str,o,k,J =,=,=,=,=,=,=,=,+,pr,L,k,J =,=,=,=,+,k,A,p,-,st,@,r,T =,=,=,=,-,t,o,=,+,n,e,l,T =,=,=,=,+,b,e,=,-,v,I,N,K +,d,i,=,-,n,@,=,-,bl,I,k,J =,=,=,=,+,l,o,n,-,z,A,k,J +,d,A,x,-,t,@,=,-,str,e,p,J +,l,E,=,-,t,@,r,-,bl,O,k,J =,=,=,=,=,=,=,=,+,n,O,p,J =,=,=,=,-,x,@,=,+,z,I,xt,J =,=,=,=,=,=,=,=,+,x,I,mp,J +,w,I,l,-,G,@,=,-,k,A,t,J -,r,e,=,-,m,O,n,+,tw,a,r,T =,=,=,=,=,=,=,=,+,r,e,=,T =,=,=,=,+,dr,},=,-,p,@,l,T +,r,e,=,-,G,@,n,-,z,O,n,E =,=,=,=,+,tr,A,p,-,f,E,lt,J =,=,=,=,=,=,=,=,+,d,o,s,J -,x,E,=,-,k,@,r,+,n,K,=,T =,=,=,=,=,=,=,=,+,sn,a,r,T =,=,=,=,+,sx,e,r,-,b,A,k,J =,=,=,=,=,=,=,=,+,st,y,r,T -,b,@,=,+,w,e,=,-,G,I,N,E =,=,=,=,+,kr,|,=,-,k,@,l,T =,=,=,=,=,=,=,=,+,v,A,k,J =,=,=,=,=,=,=,=,+,st,e,l,T =,=,=,=,=,=,=,=,+,b,O,rst,J =,=,=,=,+,pr,A,=,-,N,@,r,T -,k,a,=,+,l,E,n,-,d,@,r,T +,sx,M,=,-,d,@,r,-,kl,O,p,J =,=,=,=,+,sx,A,=,-,r,@,l,T =,=,=,=,=,=,=,=,+,dr,A,f,J =,=,=,=,=,=,=,=,+,str,E,N,E +,n,e,l,-,sp,e,l,-,st,@,r,T =,=,=,=,=,=,=,=,+,dr,A,Nk,J =,=,=,=,-,p,i,s,+,t,o,l,T =,=,=,=,+,sxr,K,f,-,k,I,st,J +,h,A,r,-,t,@,=,-,l,i,f,J =,=,=,=,=,=,=,=,+,dr,O,l,E -,m,A,ns,-,x,@,=,-,b,E,t,J =,=,=,=,=,=,=,=,+,sm,},l,E =,=,=,=,+,tr,i,=,-,k,o,=,T =,=,=,=,+,d,u,=,-,d,@,l,T =,=,=,=,+,d,o,=,-,r,@,n,T =,=,=,=,+,d,O,k,-,t,@,r,T +,st,a,t,-,sx,},ld,-,b,u,k,J -,f,M,=,-,d,L,=,-,v,@,l,T =,=,=,=,+,dr,},=,-,k,@,r,T =,=,=,=,=,=,=,=,+,dw,E,rx,J =,=,=,=,=,=,=,=,+,xl,|,f,J =,=,=,=,+,k,O,=,-,f,@,r,T =,=,=,=,+,k,E,rst,-,st,},k,J =,=,=,=,=,=,=,=,+,k,a,s,J +,z,O,=,-,n,@,=,-,st,O,f,J =,=,=,=,=,=,=,=,+,d,L,f,J -,p,a,=,-,p,i,=,+,j,O,t,J =,=,=,=,+,m,A,=,-,z,@,l,T -,w,e,s,-,x,@,=,+,Gr,u,t,J -,d,L,=,-,v,@,=,+,l,I,n,E =,=,=,=,+,kn,},=,-,p,@,l,T =,=,=,=,+,t,y,=,-,b,a,=,T =,=,=,=,+,kl,e,r,-,k,A,st,J =,=,=,=,+,sl,a,b,-,d,|,n,T =,=,=,=,=,=,=,=,+,p,A,t,J =,=,=,=,+,b,A,k,-,s,@,l,T =,=,=,=,+,k,E,r,-,k,@,r,T =,=,=,=,+,bl,u,t,-,pl,a,t,J =,=,=,=,=,=,=,=,+,t,O,rn,T +,=,O,=,-,p,E,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,b,},n,E -,kw,e,=,-,k,@,=,+,r,K,=,T =,=,=,=,=,=,=,=,+,p,I,l,E =,=,=,=,+,w,i,t,-,f,O,rk,J -,=,e,=,+,d,A,=,-,m,@,r,T =,=,=,=,-,s,o,=,+,pr,a,n,T =,=,=,=,=,=,=,=,+,zw,e,m,P =,=,=,=,+,str,E,=,-,N,@,l,T =,=,=,=,-,=,e,n,+,tw,e,=,T =,=,=,=,-,f,A,t,+,s,u,n,T =,=,=,=,=,=,=,=,+,=,i,=,T -,d,@,=,-,v,K,=,-,v,@,r,T +,b,O,=,-,r,@,l,-,h,A,p,J +,k,O,=,-,f,i,=,-,kr,A,ns,J =,=,=,=,=,=,=,=,+,pr,L,m,P -,k,a,=,-,b,@,l,+,j,M,w,T =,=,=,=,=,=,=,=,+,z,e,l,T =,=,=,=,+,r,a,t,-,s,@,l,T =,=,=,=,=,=,=,=,+,kl,o,t,J =,=,=,=,+,=,E,=,-,G,@,=,T -,tr,i,=,-,b,y,=,+,n,a,l,T =,=,=,=,-,m,a,=,+,n,i,r,T =,=,=,=,+,h,E,=,-,N,@,l,T =,=,=,=,+,=,K,=,-,k,@,l,T =,=,=,=,-,v,O,r,+,st,I,n,E =,=,=,=,+,h,A,=,-,r,@,=,T =,=,=,=,=,=,=,=,+,p,u,l,T =,=,=,=,=,=,=,=,+,xr,e,n,T =,=,=,=,+,st,O,v,-,d,e,l,T =,=,=,=,+,kl,y,=,-,w,@,n,T =,=,=,=,-,f,a,=,+,n,a,l,T +,=,a,n,-,m,a,g,-,bl,O,k,J =,=,=,=,=,=,=,=,+,kl,O,s,J +,=,E,l,-,z,@,=,-,k,A,t,J =,=,=,=,+,t,y,=,-,b,@,=,T -,x,@,=,+,h,A,gd,-,b,A,l,E =,=,=,=,+,d,o,=,-,br,i,f,J -,=,E,m,-,plw,A,=,+,j,e,=,T -,t,@,r,+,kl,a,s,-,f,E,rs,J -,p,A,=,-,r,a,=,+,Gr,a,f,J +,dr,},=,-,p,@,l,-,fl,E,s,J +,p,K,=,-,j,E,=,-,k,@,r,T =,=,=,=,+,p,a,s,-,m,A,nt,J -,=,e,=,-,z,@,=,+,l,I,n,E +,=,O,p,-,n,|,=,-,k,@,r,T -,=,A,n,-,tr,@,=,+,d,|,=,T =,=,=,=,+,k,o,=,-,G,@,l,T +,h,A,=,-,n,@,s,-,w,O,rm,P =,=,=,=,+,t,a,=,-,t,@,r,T =,=,=,=,+,n,I,=,-,k,@,r,T =,=,=,=,-,k,A,=,+,pr,u,n,T =,=,=,=,=,=,=,=,+,k,o,r,T =,=,=,=,+,fl,I,=,-,k,@,r,T +,=,E,rf,-,x,@,=,-,n,a,m,P -,k,@,n,-,h,L,s,-,m,A,n,E +,kn,O,f,-,l,o,k,-,s,M,s,J -,=,E,s,+,k,a,=,-,d,@,r,T =,=,=,=,=,=,=,=,+,sl,O,ns,J =,=,=,=,=,=,=,=,+,st,E,N,E +,r,a,=,-,d,i,=,-,j,o,=,T =,=,=,=,+,r,A,n,-,s,@,l,T -,t,@,=,+,r,K,=,-,br,i,f,J =,=,=,=,-,=,e,=,+,tw,i,=,T =,=,=,=,+,t,A,n,-,t,@,=,T =,=,=,=,+,w,e,=,-,z,@,l,T =,=,=,=,+,z,O,l,-,d,@,r,T +,k,a,=,-,b,@,l,-,b,a,n,T -,j,o,=,+,n,),r,-,s,o,n,T -,=,E,k,+,s,a,=,-,m,@,n,T -,=,E,k,-,s,@,m,+,pl,a,r,T =,=,=,=,+,sx,},=,-,t,I,N,K =,=,=,=,=,=,=,=,+,sxr,A,m,E +,=,A,f,-,sl,L,=,-,t,I,N,K =,=,=,=,+,r,I,=,-,b,@,l,T +,z,u,t,-,h,M,=,-,d,@,r,T +,r,K,st,-,f,o,=,-,G,@,l,T =,=,=,=,+,h,A,nd,-,b,o,rt,J =,=,=,=,+,st,I,k,-,s,@,l,T +,m,u,=,-,d,@,r,-,s,o,n,T =,=,=,=,-,f,A,=,+,br,i,k,J -,f,a,=,+,s,a,=,-,d,@,=,T =,=,=,=,+,z,A,g,-,b,u,k,J =,=,=,=,+,b,L,=,-,G,I,N,E =,=,=,=,+,l,O,x,-,pl,A,Nk,J =,=,=,=,+,n,a,j,-,st,@,r,T =,=,=,=,+,k,e,=,-,t,@,l,T +,kl,A,t,-,p,a,=,-,p,i,r,T +,p,},r,-,p,@,r,-,st,e,l,T =,=,=,=,+,m,K,=,-,z,u,t,J =,=,=,=,+,sp,O,t,-,pr,K,s,J =,=,=,=,+,st,|,n,-,s,@,l,T +,=,E,l,-,f,@,=,-,b,A,Nk,J =,=,=,=,+,=,o,r,-,l,E,l,E =,=,=,=,+,br,e,k,-,m,E,s,J -,pr,o,=,-,t,e,=,+,Z,e,=,T -,=,e,=,-,t,i,=,+,k,E,t,J =,=,=,=,+,r,E,m,-,h,O,k,J -,p,i,=,+,j,a,=,-,m,a,=,T =,=,=,=,-,b,y,=,+,r,o,=,T -,f,e,=,-,j,@,=,+,r,i,=,T -,b,O,r,+,d,y,r,-,st,@,r,T =,=,=,=,+,st,e,=,-,v,@,l,T +,=,A,f,-,sx,K,ts,-,f,e,st,J =,=,=,=,+,fl,e,=,-,m,@,r,T =,=,=,=,=,=,=,=,+,xl,A,s,J +,n,e,l,-,b,@,=,-,sx,L,t,J =,=,=,=,=,=,=,=,+,v,o,rn,T =,=,=,=,+,r,e,=,-,p,@,l,T =,=,=,=,=,=,=,=,+,z,|,n,T -,f,L,=,-,j,@,=,+,t,O,n,E =,=,=,=,+,t,K,=,-,d,I,N,K -,m,i,=,-,r,a,=,+,b,E,l,E =,=,=,=,=,=,=,=,+,st,O,k,J =,=,=,=,+,st,E,=,-,l,I,N,K =,=,=,=,=,=,=,=,+,br,I,l,E =,=,=,=,=,=,=,=,+,bl,O,k,J =,=,=,=,+,Z,y,=,-,bl,O,k,J =,=,=,=,-,x,A,l,+,j,u,n,T =,=,=,=,=,=,=,=,+,h,K,n,T =,=,=,=,-,s,E,r,+,v,i,s,J =,=,=,=,=,=,=,=,+,br,i,f,J =,=,=,=,+,z,e,=,-,G,@,l,T =,=,=,=,+,k,L,=,-,k,@,n,T -,r,@,=,-,l,A,n,+,t,K,n,T =,=,=,=,=,=,=,=,+,kr,L,s,J -,j,O,=,-,k,@,r,+,n,K,=,T =,=,=,=,+,h,u,zd,-,b,A,l,E =,=,=,=,-,x,E,=,+,k,I,n,E =,=,=,=,+,r,o,=,-,v,@,r,T +,=,I,n,-,d,e,=,-,l,I,N,K =,=,=,=,=,=,=,=,+,fl,A,p,E =,=,=,=,=,=,=,=,+,h,K,l,T =,=,=,=,=,=,=,=,+,fl,A,rt,J -,sx,I,l,-,d,@,=,+,r,K,=,T =,=,=,=,+,bl,A,d,-,=,a,l,T =,=,=,=,+,fl,M,w,-,t,@,=,T =,=,=,=,+,st,O,=,-,p,@,l,T =,=,=,=,=,=,=,=,+,f,K,t,J =,=,=,=,=,=,=,=,+,k,o,p,J =,=,=,=,=,=,=,=,+,=,a,l,T =,=,=,=,=,=,=,=,+,fl,E,s,J +,fl,E,=,-,s,@,=,-,b,A,k,J =,=,=,=,-,d,I,k,+,t,e,=,T -,=,a,=,+,=,A,l,-,t,O,l,E =,=,=,=,+,fl,I,dz,-,bl,O,k,J =,=,=,=,+,fl,I,ts,-,l,A,mp,J =,=,=,=,=,=,=,=,+,h,M,t,J =,=,=,=,=,=,=,=,+,h,a,=,T =,=,=,=,=,=,=,=,+,sx,|,t,J =,=,=,=,=,=,=,=,+,k,u,=,T -,pl,a,=,-,G,@,=,+,r,K,=,T =,=,=,=,-,fl,y,=,+,w,e,l,T =,=,=,=,=,=,=,=,+,v,E,nt,J =,=,=,=,+,p,u,=,-,d,@,l,T =,=,=,=,+,k,i,n,-,d,O,p,J =,=,=,=,-,f,u,=,+,r,i,r,T =,=,=,=,=,=,=,=,+,k,A,p,J =,=,=,=,=,=,=,=,+,sx,e,=,T +,f,o,=,-,l,i,=,-,j,o,=,T -,=,a,=,+,z,K,n,-,=,a,l,T =,=,=,=,=,=,=,=,+,s,O,m,E =,=,=,=,=,=,=,=,+,fr,e,m,P =,=,=,=,=,=,=,=,+,=,E,n,E +,=,O,p,-,fr,I,=,-,s,@,r,T -,f,O,r,-,m,y,=,+,l,i,r,T +,l,e,=,-,p,@,l,-,d,o,s,J =,=,=,=,+,h,K,=,-,n,I,N,E +,sx,I,=,-,m,@,l,-,pl,A,nt,J +,d,A,=,-,k,a,=,-,m,@,r,T =,=,=,=,+,kl,O,=,-,p,@,r,T -,t,o,=,-,t,u,=,-,st,E,l,E =,=,=,=,=,=,=,=,+,=,o,=,T +,b,|,=,-,z,@,l,-,pr,a,t,J =,=,=,=,+,k,o,k,-,s,@,l,T =,=,=,=,=,=,=,=,+,n,e,f,J +,str,I,=,-,b,@,=,-,l,I,N,E =,=,=,=,=,=,=,=,+,spr,A,Nk,J -,p,a,=,-,r,a,=,+,s,O,l,E +,r,a,t,-,s,@,l,-,h,u,k,J =,=,=,=,+,l,e,w,-,h,O,nt,J =,=,=,=,+,sl,L,=,-,p,@,r,T +,pr,i,=,-,m,y,=,-,l,a,=,T =,=,=,=,+,pr,),=,-,r,i,=,T -,s,A,k,-,s,o,=,+,f,o,n,T =,=,=,=,+,j,O,=,-,N,@,n,T =,=,=,=,=,=,=,=,+,fr,},t,J =,=,=,=,=,=,=,=,+,l,a,=,T =,=,=,=,=,=,=,=,+,z,},xt,J =,=,=,=,=,=,=,=,+,p,a,l,T =,=,=,=,+,st,A,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,dr,a,k,J =,=,=,=,+,j,A,=,-,s,A,k,J =,=,=,=,=,=,=,=,+,f,I,lm,P +,h,o,=,-,n,I,N,-,kl,i,r,T +,b,o,=,-,t,@,r,-,br,i,f,J =,=,=,=,=,=,=,=,+,k,O,l,E =,=,=,=,+,x,A,=,-,f,@,l,T =,=,=,=,=,=,=,=,+,p,i,f,J =,=,=,=,-,p,i,=,+,k,|,r,T -,v,@,r,+,G,I,=,-,s,I,N,E =,=,=,=,+,sl,a,p,-,m,},ts,J =,=,=,=,-,b,},=,+,f,E,t,J +,sn,a,=,-,p,@,=,-,r,I,N,E =,=,=,=,+,l,e,=,-,v,@,n,T -,kr,a,=,+,m,E,nts,-,h,L,s,J +,j,a,rs,-,p,A,r,-,t,K,=,T =,=,=,=,=,=,=,=,+,t,|,x,J =,=,=,=,=,=,=,=,+,h,u,k,J +,kr,L,=,-,d,@,=,-,b,L,l,T =,=,=,=,+,w,I,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,r,u,r,T -,x,A,r,+,n,e,r,-,s,@,l,T =,=,=,=,+,bl,u,=,-,z,@,=,T =,=,=,=,+,tr,u,=,-,t,@,l,T =,=,=,=,=,=,=,=,+,z,L,l,T =,=,=,=,+,n,a,j,-,k,I,st,J =,=,=,=,-,fl,y,=,+,w,K,n,T +,p,u,=,-,d,@,r,-,d,O,ns,J =,=,=,=,=,=,=,=,+,tr,L,=,T =,=,=,=,+,d,o,ts,-,pr,E,nt,J =,=,=,=,+,tr,o,=,-,n,i,=,T -,m,A,n,-,d,a,=,+,r,K,n,T +,d,i,=,-,j,a,=,-,r,a,m,P =,=,=,=,+,sp,i,=,-,G,@,l,T -,h,A,=,-,k,@,=,+,n,K,=,T =,=,=,=,=,=,=,=,+,fr,i,t,J +,h,a,=,-,v,@,m,-,b,o,t,J -,k,A,=,-,t,e,=,+,dr,a,l,T =,=,=,=,-,x,@,=,+,d,u,=,T =,=,=,=,+,fr,*,=,-,l,@,=,T =,=,=,=,-,k,a,=,+,j,e,=,T =,=,=,=,+,sx,E,=,-,l,I,N,K +,r,E,=,-,t,@,=,-,p,K,p,J =,=,=,=,=,=,=,=,+,=,K,s,J =,=,=,=,+,pl,A,k,-,pr,E,nt,J =,=,=,=,=,=,=,=,+,vl,i,r,T =,=,=,=,-,x,@,=,+,h,K,m,P +,p,i,=,-,r,@,=,-,b,A,t,J -,x,@,=,+,h,|,=,-,G,@,n,T =,=,=,=,+,k,A,n,-,s,@,l,T +,=,O,p,-,k,I,=,-,k,@,r,T =,=,=,=,-,x,@,=,+,h,},xt,J =,=,=,=,+,m,E,N,-,s,@,l,T =,=,=,=,=,=,=,=,+,b,a,n,T +,sx,o,n,-,h,K,ts,-,f,M,t,J =,=,=,=,=,=,=,=,+,kr,a,x,J =,=,=,=,+,m,O,z,-,bl,u,m,E =,=,=,=,+,l,A,nt,-,sx,A,p,J =,=,=,=,+,w,O,rm,-,k,u,k,J =,=,=,=,+,m,e,=,-,n,I,N,K =,=,=,=,-,x,@,=,+,sx,I,l,E =,=,=,=,+,k,|,=,-,t,@,r,T =,=,=,=,=,=,=,=,+,p,u,s,J =,=,=,=,+,b,K,=,-,t,@,l,T =,=,=,=,=,=,=,=,+,st,E,r,E =,=,=,=,=,=,=,=,+,k,A,f,J =,=,=,=,+,l,},Gd,-,b,E,t,J +,p,E,=,-,n,@,=,-,b,A,k,J =,=,=,=,-,k,o,=,+,r,a,l,T =,=,=,=,-,x,@,=,+,spr,E,k,J =,=,=,=,+,sp,|,r,-,t,O,xt,J =,=,=,=,-,x,@,=,+,t,A,l,E =,=,=,=,=,=,=,=,+,d,A,l,E =,=,=,=,+,sx,u,j,-,s,@,l,T =,=,=,=,=,=,=,=,+,v,I,n,E -,k,A,=,-,n,a,=,+,p,e,=,T =,=,=,=,-,x,@,=,+,w,e,r,T =,=,=,=,+,sn,e,w,-,b,A,l,E =,=,=,=,=,=,=,=,+,p,i,l,T =,=,=,=,=,=,=,=,+,kr,A,ns,J +,p,O,r,-,n,o,=,-,b,u,k,J +,=,o,=,-,p,@,=,-,d,u,k,J =,=,=,=,-,v,A,=,+,l,K,=,T =,=,=,=,=,=,=,=,+,x,I,ts,J +,f,e,=,-,t,@,=,-,br,o,t,J -,ts,i,=,-,=,a,=,-,G,E,nt,J =,=,=,=,=,=,=,=,+,w,I,xt,J +,=,e,n,-,=,A,k,-,t,@,r,T =,=,=,=,=,=,=,=,+,j,O,l,E =,=,=,=,+,x,i,=,-,r,o,=,T +,x,i,=,-,r,o,=,-,p,A,s,J =,=,=,=,+,vl,I,n,-,d,@,r,T =,=,=,=,+,h,E,xt,-,r,A,Nk,J =,=,=,=,=,=,=,=,+,p,e,=,T =,=,=,=,+,m,O,t,-,x,A,t,J +,n,y,=,-,m,@,=,-,r,o,=,T =,=,=,=,-,pr,i,=,+,m,|,r,T -,n,e,=,-,gl,i,=,+,Z,e,=,T +,b,u,=,-,k,@,=,-,l,K,st,J =,=,=,=,+,p,E,rs,-,p,O,t,J =,=,=,=,=,=,=,=,+,l,E,s,J =,=,=,=,+,kl,E,=,-,d,@,r,T =,=,=,=,+,st,I,=,-,p,@,l,T =,=,=,=,+,xl,u,j,-,k,M,s,J =,=,=,=,+,k,a,z,-,br,o,t,J =,=,=,=,-,p,E,=,+,r,O,n,E +,w,e,=,-,l,@,=,-,k,I,st,J =,=,=,=,=,=,=,=,+,v,a,s,J =,=,=,=,+,x,O,n,-,d,@,l,T +,=,u,=,-,f,@,=,-,n,I,N,E =,=,=,=,+,br,K,=,-,st,@,r,T =,=,=,=,+,v,a,=,-,r,@,n,T -,p,i,=,+,j,a,=,-,m,a,=,T =,=,=,=,+,=,M,=,-,w,@,=,T =,=,=,=,=,=,=,=,+,=,a,s,J =,=,=,=,=,=,=,=,+,spl,e,t,J =,=,=,=,=,=,=,=,+,xr,a,t,J =,=,=,=,=,=,=,=,+,xr,a,n,T =,=,=,=,=,=,=,=,+,d,A,ns,J +,v,o,r,-,w,E,nt,-,s,@,l,T =,=,=,=,=,=,=,=,+,sl,a,p,J =,=,=,=,+,b,A,r,-,k,A,st,J =,=,=,=,=,=,=,=,+,sxr,K,n,T =,=,=,=,+,=,E,=,-,x,o,=,T =,=,=,=,=,=,=,=,+,d,y,w,T =,=,=,=,=,=,=,=,+,m,a,n,T =,=,=,=,=,=,=,=,+,xr,A,s,J =,=,=,=,+,s,O,=,-,k,@,l,T =,=,=,=,+,xr,A,s,-,p,E,rk,J =,=,=,=,=,=,=,=,+,bl,i,k,J =,=,=,=,-,d,o,=,+,z,K,n,T =,=,=,=,+,k,e,=,-,v,@,r,T =,=,=,=,+,d,E,k,-,s,@,l,T =,=,=,=,+,b,u,=,-,m,@,l,T =,=,=,=,-,p,a,=,+,G,a,j,T -,st,@,=,-,l,I,=,-,N,@,=,T =,=,=,=,+,=,e,=,-,v,a,=,T =,=,=,=,=,=,=,=,+,sx,K,f,J =,=,=,=,+,=,A,=,-,d,@,r,T =,=,=,=,=,=,=,=,+,xr,i,t,J -,t,@,r,-,k,o,=,-,n,I,N,K =,=,=,=,+,b,i,r,-,bl,I,k,J =,=,=,=,+,xr,K,ns,-,l,A,x,J =,=,=,=,=,=,=,=,+,xr,I,l,E -,=,O,p,-,n,e,=,-,m,@,r,T +,zw,a,=,-,v,@,l,-,st,O,k,J -,f,O,=,-,k,@,=,+,r,K,=,T -,=,e,=,-,p,i,=,+,Gr,A,m,E =,=,=,=,=,=,=,=,+,t,A,nt,J =,=,=,=,+,dr,a,=,-,m,a,=,T +,=,o,=,-,v,@,r,-,sx,O,t,J =,=,=,=,+,d,o,=,-,j,@,r,T =,=,=,=,+,S,E,l,-,t,@,r,T =,=,=,=,=,=,=,=,+,m,},nt,J =,=,=,=,+,h,o,ft,-,p,K,n,T +,sx,o,n,-,h,K,t,-,sl,a,p,J =,=,=,=,+,k,a,=,-,m,@,r,T =,=,=,=,+,=,a,=,-,v,O,nt,J -,l,A,n,-,t,@,=,+,r,i,=,T =,=,=,=,-,v,@,r,+,v,A,l,E =,=,=,=,+,k,E,lg,-,d,u,k,J -,=,o,=,-,p,@,=,+,r,E,t,J =,=,=,=,=,=,=,=,+,h,a,m,P =,=,=,=,=,=,=,=,+,p,I,t,J =,=,=,=,+,h,a,r,-,b,L,s,J =,=,=,=,=,=,=,=,+,h,a,rt,J =,=,=,=,+,t,a,=,-,l,I,N,E =,=,=,=,=,=,=,=,+,w,},rm,P =,=,=,=,+,st,E,m,-,h,O,k,J =,=,=,=,-,v,@,r,+,sx,I,l,E =,=,=,=,=,=,=,=,+,r,o,s,J +,w,e,=,-,l,@,=,-,d,o,s,J -,t,@,=,-,h,M,=,-,d,@,r,T -,b,i,=,-,t,y,=,+,w,e,=,T =,=,=,=,+,h,a,=,-,G,@,l,T =,=,=,=,=,=,=,=,+,w,O,rm,P -,d,A,n,-,s,@,=,+,r,E,s,J =,=,=,=,=,=,=,=,+,d,O,rp,J =,=,=,=,=,=,=,=,+,vl,u,j,T =,=,=,=,=,=,=,=,+,kr,a,l,T -,N,@,l,-,t,A,=,-,N,@,l,T =,=,=,=,=,=,=,=,+,w,i,l,T +,t,e,=,-,sx,o,=,-,t,@,l,T =,=,=,=,+,sx,K,=,-,d,I,N,K -,d,i,=,+,n,a,=,-,m,o,=,T -,h,A,l,-,v,@,=,+,m,a,n,T =,=,=,=,=,=,=,=,+,x,A,s,J =,=,=,=,-,m,A,=,+,l,|,r,T =,=,=,=,=,=,=,=,+,spr,A,N,E =,=,=,=,+,sx,o,l,-,vr,i,nt,J -,d,i,=,-,j,o,=,-,pr,a,t,J +,z,I,l,-,v,@,r,-,=,L,=,T +,k,A,m,-,f,@,r,-,b,A,l,E -,v,@,r,+,h,M,=,-,d,I,N,E =,=,=,=,=,=,=,=,+,sp,E,lt,J +,sx,I,lt,-,p,A,=,-,t,O,r,E =,=,=,=,+,k,i,m,-,bl,A,t,J -,sx,u,=,-,n,@,=,-,v,A,k,J =,=,=,=,=,=,=,=,+,tr,A,p,E =,=,=,=,+,h,a,r,-,n,E,t,J =,=,=,=,+,pl,a,ts,-,k,a,rt,J =,=,=,=,-,s,I,n,+,j,a,l,T =,=,=,=,+,p,u,=,-,m,a,=,T =,=,=,=,+,xr,I,=,-,f,@,l,T =,=,=,=,+,zw,a,=,-,v,@,l,T =,=,=,=,=,=,=,=,+,v,M,w,T =,=,=,=,-,h,A,N,+,g,A,r,T =,=,=,=,+,st,O,=,-,m,@,l,T =,=,=,=,-,s,O,r,+,t,i,=,T -,t,E,s,-,t,a,=,+,m,E,nt,J =,=,=,=,-,p,O,m,+,p,O,n,E +,h,A,n,-,d,u,k,-,r,E,k,J -,h,A,r,-,l,@,=,+,k,K,n,T =,=,=,=,+,sn,A,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,=,),r,T =,=,=,=,+,v,E,s,-,t,I,N,E =,=,=,=,+,p,O,=,-,n,i,=,T +,h,A,r,-,t,@,=,-,d,i,f,J =,=,=,=,=,=,=,=,+,sx,a,l,T =,=,=,=,=,=,=,=,+,kr,O,l,E =,=,=,=,+,r,e,=,-,k,@,l,T =,=,=,=,-,b,O,r,+,d,E,s,J =,=,=,=,-,p,i,=,+,p,E,t,J =,=,=,=,=,=,=,=,+,str,a,l,T =,=,=,=,+,h,A,rt,-,s,A,k,J -,d,@,=,+,l,a,=,-,b,@,r,T +,=,A,l,-,p,@,=,-,d,O,rp,J =,=,=,=,+,z,e,=,-,v,@,n,T =,=,=,=,=,=,=,=,+,w,I,nt,J +,v,o,r,-,d,|,=,-,r,a,m,P +,s,I,=,-,k,@,=,-,p,I,t,J =,=,=,=,+,b,A,Ng,-,br,i,f,J =,=,=,=,+,m,a,=,-,d,@,=,T =,=,=,=,-,t,u,r,+,n,e,=,T =,=,=,=,+,d,O,m,-,p,@,r,T =,=,=,=,+,=,a,=,-,d,@,r,T =,=,=,=,=,=,=,=,+,h,e,r,T +,pr,E,=,-,s,i,=,-,Gr,u,p,J +,b,A,=,-,b,@,l,-,G,L,x,J =,=,=,=,=,=,=,=,+,r,E,l,E =,=,=,=,=,=,=,=,+,xr,a,f,J =,=,=,=,+,=,K,z,-,bl,O,k,J =,=,=,=,=,=,=,=,+,p,A,p,J =,=,=,=,=,=,=,=,+,f,K,l,T =,=,=,=,+,sn,I,=,-,p,@,l,T =,=,=,=,+,st,A,=,-,k,@,rt,J -,p,i,=,+,j,a,=,-,n,o,=,T -,t,@,=,-,m,O,=,+,n,e,=,T +,h,K,=,-,v,i,=,-,j,o,l,T =,=,=,=,=,=,=,=,+,h,E,lm,P =,=,=,=,=,=,=,=,+,xr,A,f,J =,=,=,=,+,h,E,l,-,p,@,r,T -,p,i,=,+,j,e,r,-,w,i,l,T =,=,=,=,=,=,=,=,+,k,E,t,J =,=,=,=,+,k,O,=,-,r,@,l,T =,=,=,=,+,t,u,=,-,t,@,r,T =,=,=,=,+,s,E,=,-,t,@,r,T =,=,=,=,+,bl,u,d,-,b,O,l,E =,=,=,=,=,=,=,=,+,k,M,s,J =,=,=,=,=,=,=,=,+,h,E,mt,P =,=,=,=,+,h,E,n,-,d,@,l,T +,p,E,n,-,t,i,=,-,k,M,s,J =,=,=,=,-,pl,a,=,+,t,a,n,T =,=,=,=,+,w,a,=,-,G,@,n,T =,=,=,=,=,=,=,=,+,h,E,n,E -,s,i,=,-,f,E,rs,-,h,M,t,J =,=,=,=,=,=,=,=,+,n,|,s,J =,=,=,=,+,z,L,G,-,n,A,p,J =,=,=,=,-,d,i,=,+,n,e,=,T +,h,E,=,-,b,@,=,-,d,I,N,E =,=,=,=,+,sxr,A,=,-,b,@,r,T -,=,A,=,-,b,O,=,+,n,e,=,T =,=,=,=,=,=,=,=,+,fr,O,ns,J -,r,i,=,+,v,i,r,-,=,a,l,T =,=,=,=,=,=,=,=,+,l,O,p,J =,=,=,=,+,b,O,rst,-,s,A,k,J -,h,i,s,+,t,o,=,-,r,i,=,T =,=,=,=,=,=,=,=,+,t,A,Nk,J =,=,=,=,-,v,i,=,+,j,o,l,T =,=,=,=,+,m,K,=,-,bl,u,m,E =,=,=,=,+,l,K,s,-,t,@,r,T -,sx,O,r,-,s,@,=,+,n,e,r,T +,r,e,=,-,s,e,=,-,d,a,=,T =,=,=,=,+,=,o,r,-,kn,O,p,J =,=,=,=,-,h,u,=,+,r,a,=,T =,=,=,=,+,k,A,=,-,p,@,r,T =,=,=,=,-,fl,A,m,+,b,M,w,T =,=,=,=,=,=,=,=,+,h,O,f,J =,=,=,=,=,=,=,=,+,s,E,l,E =,=,=,=,=,=,=,=,+,spr,i,t,J =,=,=,=,+,h,O,=,-,m,@,l,T =,=,=,=,=,=,=,=,+,=,E,s,J =,=,=,=,+,bl,u,=,-,s,@,m,P =,=,=,=,=,=,=,=,+,br,A,nt,J =,=,=,=,=,=,=,=,+,t,},k,J =,=,=,=,=,=,=,=,+,h,O,nt,J =,=,=,=,+,vw,A,=,-,j,|,r,T +,=,A,x,-,t,@,r,-,pl,a,ts,J -,w,I,n,-,t,@,r,-,k,I,nt,J =,=,=,=,-,=,o,=,+,bl,i,=,T =,=,=,=,=,=,=,=,+,kl,e,t,J =,=,=,=,+,v,u,d,-,b,A,Nk,J =,=,=,=,+,b,A,Ng,-,b,u,k,J -,t,a,=,-,f,@,=,+,r,e,l,T +,sp,O,n,-,s,@,=,-,n,E,t,J +,w,O,rst,-,h,o,=,-,r,@,n,T =,=,=,=,+,p,E,=,-,d,@,l,T =,=,=,=,=,=,=,=,+,=,E,nt,J =,=,=,=,=,=,=,=,+,kl,O,p,J =,=,=,=,+,h,o,x,-,t,@,=,T =,=,=,=,+,xr,A,=,-,spr,i,t,J =,=,=,=,=,=,=,=,+,h,o,p,J +,r,a,=,-,d,@,r,-,w,i,l,T -,n,a,=,-,Gr,a,=,-,b,e,lt,J +,m,e,=,-,n,e,=,-,m,@,r,T +,l,o,=,-,Z,@,=,-,b,A,nt,J =,=,=,=,+,p,K,=,-,l,@,r,T +,k,I,=,-,k,@,r,-,b,I,l,E =,=,=,=,+,h,O,r,-,z,@,l,T =,=,=,=,=,=,=,=,+,dr,a,j,T =,=,=,=,+,z,K,=,-,z,A,k,J =,=,=,=,-,h,o,=,+,t,E,l,E -,v,@,=,+,h,e,rs,-,h,a,n,T -,=,I,n,-,d,i,=,+,j,a,n,T =,=,=,=,=,=,=,=,+,w,E,p,E =,=,=,=,=,=,=,=,+,b,I,x,E =,=,=,=,-,b,A,n,+,d,i,t,J =,=,=,=,-,k,O,r,+,j,a,l,T =,=,=,=,=,=,=,=,+,m,E,m,E +,j,a,=,-,g,u,=,-,w,A,r,T +,p,I,=,-,t,@,l,-,st,O,k,J +,m,A,r,-,t,@,=,-,k,o,=,T =,=,=,=,+,st,A,m,-,p,@,r,T +,h,L,s,-,x,@,=,-,n,o,t,J =,=,=,=,=,=,=,=,+,st,o,f,J +,l,E,=,-,s,@,=,-,n,a,r,T +,h,L,s,-,m,u,=,-,d,@,r,T =,=,=,=,+,kn,},=,-,f,@,l,T =,=,=,=,=,=,=,=,+,m,u,s,J =,=,=,=,=,=,=,=,+,h,},lp,J +,h,},lp,-,m,o,=,-,t,O,r,T =,=,=,=,+,zw,e,b,-,d,i,r,T =,=,=,=,+,kn,K,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,s,E,t,J =,=,=,=,+,sp,o,r,-,b,u,k,J -,j,@,r,-,l,e,=,-,p,@,l,T +,z,a,=,-,r,@,=,-,st,},k,J =,=,=,=,+,k,i,=,-,z,@,l,T =,=,=,=,-,=,i,=,+,d,e,=,T =,=,=,=,+,w,e,r,-,pr,a,t,J =,=,=,=,=,=,=,=,+,xr,M,w,T =,=,=,=,+,kl,I,=,-,k,@,r,T =,=,=,=,-,r,y,=,+,br,i,k,J =,=,=,=,+,b,A,t,-,h,O,k,J =,=,=,=,+,p,O,=,-,k,@,t,J =,=,=,=,+,t,i,=,-,t,@,l,T =,=,=,=,+,l,O,=,-,G,@,r,T +,k,O,=,-,m,@,r,-,pr,a,t,J -,m,i,=,+,n,e,r,-,Gl,A,s,J -,l,o,=,-,t,@,=,+,r,K,=,T =,=,=,=,+,=,I,m,-,br,a,k,J -,pr,o,=,-,j,E,k,+,t,i,l,T -,x,e,=,-,v,@,=,+,r,K,=,T =,=,=,=,=,=,=,=,+,k,o,l,T -,f,A,k,-,t,o,=,+,r,K,=,T =,=,=,=,+,p,i,=,-,p,@,r,T =,=,=,=,+,=,I,N,-,Gr,e,p,J =,=,=,=,+,p,e,=,-,n,@,s,J +,k,A,n,-,s,i,=,-,h,L,s,J =,=,=,=,+,w,e,=,-,z,@,n,T =,=,=,=,=,=,=,=,+,sm,u,s,J =,=,=,=,=,=,=,=,+,r,E,m,E -,d,@,r,-,w,a,=,-,G,@,n,T =,=,=,=,+,=,O,r,-,G,@,l,T +,spl,I,n,-,t,@,r,-,t,A,N,E =,=,=,=,+,z,},s,-,t,@,r,T =,=,=,=,=,=,=,=,+,sp,I,l,E -,b,@,=,+,l,E,x,-,s,@,l,T +,sx,u,m,-,p,u,t,-,s,@,r,T =,=,=,=,=,=,=,=,+,l,o,t,J -,r,@,=,-,k,O,r,-,d,@,r,T =,=,=,=,=,=,=,=,+,xr,A,p,J =,=,=,=,+,s,i,=,-,t,@,r,T =,=,=,=,+,v,e,=,-,t,@,r,T =,=,=,=,=,=,=,=,+,st,O,mp,J -,k,o,=,+,r,I,s,-,t,@,=,J +,sp,e,l,-,m,A,=,-,k,@,r,T =,=,=,=,+,=,E,t,-,m,a,l,T +,t,E,m,-,p,o,=,-,r,e,=,T =,=,=,=,=,=,=,=,+,v,e,r,T =,=,=,=,+,b,E,t,-,j,A,s,J -,pl,@,=,+,z,i,=,-,r,K,s,J +,k,o,=,-,b,a,=,-,k,A,n,E =,=,=,=,+,m,i,=,-,kr,o,=,T =,=,=,=,+,r,O,l,-,br,i,f,J =,=,=,=,=,=,=,=,+,r,},x,J =,=,=,=,=,=,=,=,+,j,A,s,J =,=,=,=,=,=,=,=,+,bl,A,t,J =,=,=,=,+,z,K,=,-,l,@,r,T =,=,=,=,-,j,o,=,+,d,I,n,E =,=,=,=,=,=,=,=,+,=,y,=,T =,=,=,=,+,pr,a,=,-,t,@,r,T =,=,=,=,+,j,o,=,-,j,o,=,T =,=,=,=,+,j,O,=,-,k,@,n,T =,=,=,=,=,=,=,=,+,sx,E,l,E =,=,=,=,-,k,A,s,+,tr,O,l,E -,N,@,r,-,w,K,=,-,z,I,N,K +,k,I,n,-,d,@,r,-,h,o,ft,J =,=,=,=,+,j,O,N,-,k,@,r,T =,=,=,=,=,=,=,=,+,p,o,t,J =,=,=,=,+,f,O,l,-,d,@,r,T =,=,=,=,+,r,O,=,-,f,@,l,T =,=,=,=,+,j,},m,-,b,o,=,T =,=,=,=,-,m,o,=,+,m,E,nt,J -,k,O,m,-,pl,i,=,+,m,E,nt,J =,=,=,=,-,x,@,=,+,zw,E,l,E =,=,=,=,+,spr,O,=,-,k,@,l,T -,b,@,=,+,t,a,l,-,br,i,f,J -,=,A,=,+,dr,E,s,-,k,a,rt,J -,v,@,r,+,G,I,=,-,s,I,N,K =,=,=,=,+,st,K,l,-,bl,u,m,E =,=,=,=,=,=,=,=,+,bl,u,m,E =,=,=,=,+,l,I,x,-,t,@,r,T =,=,=,=,+,l,I,f,-,l,A,f,J =,=,=,=,+,kl,L,v,-,b,e,n,T =,=,=,=,=,=,=,=,+,k,a,r,T =,=,=,=,=,=,=,=,+,kl,|,r,T =,=,=,=,+,kr,O,N,-,k,@,l,T =,=,=,=,=,=,=,=,+,t,A,N,E =,=,=,=,=,=,=,=,+,z,o,=,T =,=,=,=,=,=,=,=,+,sx,I,m,E =,=,=,=,+,sp,K,=,-,j,@,r,T =,=,=,=,=,=,=,=,+,sm,a,k,J +,r,i,=,-,z,i,=,-,k,o,=,T -,w,K,=,-,z,@,=,+,r,E,s,J =,=,=,=,=,=,=,=,+,sn,e,=,T =,=,=,=,+,k,A,=,-,x,@,l,T =,=,=,=,+,vl,i,G,-,r,K,s,J =,=,=,=,=,=,=,=,+,p,e,r,T =,=,=,=,=,=,=,=,+,w,A,N,E =,=,=,=,-,b,@,=,+,dr,K,f,J -,x,@,=,+,n,O,=,-,f,@,l,T =,=,=,=,+,sp,o,=,-,kr,e,ft,J =,=,=,=,+,l,E,nz,-,b,|,rs,J =,=,=,=,+,k,o,=,-,n,I,N,K =,=,=,=,=,=,=,=,+,pr,M,w,T =,=,=,=,=,=,=,=,+,v,O,s,J -,sx,O,r,-,s,@,=,+,n,e,l,T =,=,=,=,=,=,=,=,+,s,A,p,J =,=,=,=,-,k,a,=,+,l,O,t,J =,=,=,=,=,=,=,=,+,xl,A,s,J =,=,=,=,+,f,A,=,-,k,@,l,T =,=,=,=,=,=,=,=,+,sp,O,n,E =,=,=,=,=,=,=,=,+,n,A,r,E -,s,i,=,-,f,E,r,-,h,M,t,J +,p,e,r,-,t,a,=,-,f,@,l,T +,n,A,x,-,t,a,=,-,f,@,l,T -,d,@,=,-,h,A,nd,-,zb,e,n,T =,=,=,=,-,xr,i,=,+,j,O,t,J =,=,=,=,+,h,o,=,-,r,@,n,T -,bl,O,n,+,d,i,=,-,n,@,=,T -,n,E,s,-,t,@,=,+,r,K,=,T =,=,=,=,+,h,A,rt,-,f,I,lm,P =,=,=,=,=,=,=,=,+,=,o,x,J +,h,L,s,-,h,M,t,-,st,@,r,T =,=,=,=,=,=,=,=,+,s,M,s,J =,=,=,=,+,l,e,=,-,l,i,=,T =,=,=,=,+,=,O,p,-,r,u,r,T =,=,=,=,+,r,E,f,-,t,@,r,T =,=,=,=,=,=,=,=,+,br,o,t,J =,=,=,=,+,p,I,=,-,s,@,r,T =,=,=,=,+,pl,A,t,-,f,O,rm,P +,=,O,p,-,w,A,r,-,m,@,r,T =,=,=,=,+,t,},=,-,n,@,l,T -,=,A,=,+,dr,E,z,-,b,A,nt,J =,=,=,=,=,=,=,=,+,xl,I,mp,J +,k,I,n,-,d,@,r,-,t,a,l,T =,=,=,=,=,=,=,=,+,m,},s,J +,h,L,s,-,h,M,d,-,b,u,k,J -,l,E,=,-,k,@,r,+,n,K,=,T =,=,=,=,-,v,a,=,+,p,|,r,T =,=,=,=,-,k,a,=,+,p,u,n,T =,=,=,=,-,m,A,m,+,z,E,l,E =,=,=,=,+,sxr,A,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,n,e,p,J =,=,=,=,-,x,@,=,+,wr,I,xt,J =,=,=,=,=,=,=,=,+,b,O,k,J =,=,=,=,+,n,K,=,-,p,@,r,T -,k,a,=,+,r,A,k,-,t,@,r,T =,=,=,=,=,=,=,=,+,kr,L,n,T -,k,a,=,-,r,a,=,+,v,a,n,T =,=,=,=,-,S,a,=,+,k,o,=,T -,b,o,=,+,n,a,=,-,d,@,=,J =,=,=,=,=,=,=,=,+,p,},l,E +,=,A,f,-,l,K,=,-,d,I,N,K =,=,=,=,+,n,E,=,-,b,@,=,T -,G,@,=,-,k,e,=,-,v,@,r,T =,=,=,=,-,k,A,r,+,t,O,n,E =,=,=,=,-,k,A,r,+,v,e,l,T =,=,=,=,-,r,i,=,+,v,i,r,T =,=,=,=,+,tr,E,m,-,b,u,k,J =,=,=,=,+,l,e,s,-,pl,A,Nk,J =,=,=,=,+,kr,A,=,-,b,@,=,T -,w,@,=,-,l,@,gz,-,b,o,t,J +,l,A,m,-,p,@,=,-,k,M,s,J +,=,a,n,-,d,A,xt,-,str,e,p,J =,=,=,=,+,p,O,ls,-,t,A,s,J -,k,y,=,+,p,i,=,-,d,o,=,T -,p,e,=,-,k,i,=,+,n,e,s,J +,w,a,=,-,t,@,r,-,k,I,p,E =,=,=,=,+,k,a,=,-,t,@,r,T =,=,=,=,+,l,o,p,-,xr,a,f,J =,=,=,=,+,k,E,=,-,f,@,r,T =,=,=,=,+,w,O,rm,-,G,A,t,J =,=,=,=,=,=,=,=,+,m,A,nt,J +,b,L,=,-,t,@,=,-,k,A,ns,J =,=,=,=,+,bl,O,g,-,b,A,nt,J -,p,i,=,-,j,},m,-,b,A,l,E +,k,I,=,-,p,@,=,-,tr,A,p,J =,=,=,=,+,sp,i,=,-,r,I,N,K =,=,=,=,=,=,=,=,+,k,e,n,T =,=,=,=,+,j,E,=,-,k,@,r,T -,pl,A,=,-,t,@,=,+,Gr,O,nt,J =,=,=,=,+,m,o,=,-,l,@,n,T =,=,=,=,=,=,=,=,+,pr,A,k,J =,=,=,=,-,fl,A,m,+,b,e,w,T =,=,=,=,+,s,u,=,-,bl,O,k,J +,sp,e,l,-,G,@,=,-,n,o,t,J +,h,|,=,-,G,@,=,-,st,|,n,T =,=,=,=,=,=,=,=,+,r,i,m,P +,j,},=,-,f,@,rs,-,h,O,nt,J =,=,=,=,+,w,A,z,-,b,L,l,T =,=,=,=,-,=,A,=,+,dr,E,s,J =,=,=,=,=,=,=,=,+,bl,a,s,J =,=,=,=,+,s,I,=,-,N,@,l,T =,=,=,=,=,=,=,=,+,kl,L,f,J =,=,=,=,+,k,E,=,-,n,@,s,J =,=,=,=,+,k,E,=,-,G,@,=,T =,=,=,=,+,k,e,=,-,p,i,=,T =,=,=,=,=,=,=,=,+,kl,M,w,T -,d,@,l,-,v,u,dz,-,b,e,n,T =,=,=,=,=,=,=,=,+,l,a,x,J +,k,E,r,-,k,@,=,-,z,A,k,J +,m,O,=,-,s,@,l,-,kr,A,p,E =,=,=,=,-,=,O,k,+,s,a,l,T =,=,=,=,-,=,E,m,+,bl,e,m,P =,=,=,=,-,m,A,=,+,S,i,n,T =,=,=,=,-,t,a,=,+,l,O,n,E +,r,A,=,-,m,@,=,-,l,a,r,T =,=,=,=,+,k,E,rst,-,m,A,n,E -,t,o,=,-,Gr,a,=,+,f,i,=,T =,=,=,=,=,=,=,=,+,l,O,N,E =,=,=,=,+,k,e,=,-,t,@,n,T =,=,=,=,=,=,=,=,+,v,I,m,E =,=,=,=,+,k,E,=,-,t,I,N,K -,=,a,=,-,fr,i,=,+,k,a,n,T +,=,o,=,-,p,@,=,-,n,I,N,E =,=,=,=,=,=,=,=,+,b,o,n,T =,=,=,=,+,=,E,k,-,str,a,=,T =,=,=,=,-,k,a,=,+,d,o,=,T =,=,=,=,+,p,A,l,-,t,@,r,T =,=,=,=,+,k,|,=,-,v,@,l,T =,=,=,=,+,m,O,r,-,G,@,n,T =,=,=,=,+,m,O,z,-,bl,u,m,P =,=,=,=,+,=,a,=,-,v,e,=,T -,s,},p,+,s,i,=,-,d,i,=,T +,s,},=,-,k,@,l,-,dr,A,f,J =,=,=,=,=,=,=,=,+,kr,a,n,T =,=,=,=,=,=,=,=,+,k,i,m,P =,=,=,=,+,l,e,=,-,d,i,=,T -,x,A,r,-,n,i,=,+,t,y,r,T =,=,=,=,=,=,=,=,+,d,A,x,J =,=,=,=,-,t,E,l,+,j,o,r,T =,=,=,=,=,=,=,=,+,dr,a,t,J =,=,=,=,=,=,=,=,+,k,O,t,J =,=,=,=,+,x,},nst,-,k,o,p,J =,=,=,=,=,=,=,=,+,sl,I,p,J =,=,=,=,+,zw,E,m,-,b,A,t,J -,t,},=,-,s,@,=,+,d,o,r,T =,=,=,=,+,b,O,=,-,k,I,N,K =,=,=,=,+,x,A,z,-,d,e,l,T =,=,=,=,-,w,a,=,+,G,O,n,E =,=,=,=,+,sxr,K,f,-,st,@,r,T =,=,=,=,=,=,=,=,+,b,A,Nk,J -,p,@,l,-,sx,I,=,-,l,@,r,T -,s,u,=,+,fl,|,rs,-,h,O,k,J +,k,I,n,-,d,@,r,-,f,i,ts,J +,l,L,t,-,spr,e,=,-,k,@,r,T +,sp,E,l,-,d,@,=,-,b,A,k,J =,=,=,=,=,=,=,=,+,sp,I,t,J +,=,O,n,-,w,e,rz,-,b,e,st,J =,=,=,=,+,st,E,m,-,pl,a,t,J =,=,=,=,=,=,=,=,+,str,I,k,J =,=,=,=,+,s,o,=,-,f,a,=,T +,k,I,n,-,d,@,r,-,st,u,l,T =,=,=,=,-,sp,i,=,+,r,a,l,T +,k,I,n,-,d,@,r,-,t,L,x,J =,=,=,=,-,s,a,=,+,l,O,n,E =,=,=,=,=,=,=,=,+,s,O,p,J +,=,O,r,-,d,@,r,-,br,i,f,J +,k,I,n,-,d,@,r,-,z,I,t,J =,=,=,=,+,z,e,=,-,b,A,k,J +,t,e,=,-,k,@,=,-,n,I,N,E =,=,=,=,+,p,o,lz,-,b,A,nt,J -,z,@,=,-,h,u,t,-,st,@,r,T +,=,y,=,-,l,@,=,-,v,E,l,E +,k,I,=,-,p,@,=,-,b,M,t,J =,=,=,=,-,l,o,=,+,Z,e,=,T =,=,=,=,=,=,=,=,+,zw,e,r,T =,=,=,=,-,p,@,=,+,d,a,l,T =,=,=,=,+,t,o,=,-,G,a,=,T -,v,@,=,+,h,e,rz,-,b,e,st,J -,=,a,=,-,v,O,n,+,t,y,r,T =,=,=,=,+,kl,A,d,-,bl,A,t,J =,=,=,=,=,=,=,=,+,tw,K,x,J =,=,=,=,=,=,=,=,+,=,o,r,T -,r,@,=,+,v,O,l,-,v,@,r,T =,=,=,=,-,pl,A,nt,+,s,u,n,T =,=,=,=,+,s,K,n,-,h,L,s,J +,sx,a,=,-,k,@,l,-,h,L,s,J -,p,a,=,-,r,a,=,+,pl,y,=,T =,=,=,=,+,n,a,m,-,k,a,rt,J -,v,@,=,+,s,I,m,-,b,@,l,T =,=,=,=,+,=,o,x,-,h,a,r,T =,=,=,=,+,r,y,=,-,z,i,=,T =,=,=,=,+,zw,A,x,-,t,@,l,T +,l,y,=,-,s,i,=,-,f,E,r,T =,=,=,=,=,=,=,=,+,w,E,rk,J =,=,=,=,=,=,=,=,+,p,e,n,T =,=,=,=,=,=,=,=,+,x,},m,E +,kl,K,=,-,n,@,x,-,h,K,t,J +,zw,e,t,-,k,a,=,-,m,@,r,T =,=,=,=,=,=,=,=,+,kl,E,m,E -,p,A,=,+,p,a,=,-,v,@,r,T =,=,=,=,+,kl,E,=,-,p,@,r,T =,=,=,=,+,pr,a,t,-,st,@,r,T =,=,=,=,+,kl,E,ts,-,pr,a,t,J +,k,A,=,-,s,a,=,-,k,o,p,J =,=,=,=,=,=,=,=,+,br,i,s,J =,=,=,=,+,br,a,=,-,s,@,m,P =,=,=,=,-,p,a,=,+,l,K,=,T =,=,=,=,+,s,o,=,-,l,o,=,T +,=,A,f,-,t,E,=,-,l,i,t,J =,=,=,=,=,=,=,=,+,zw,K,=,T =,=,=,=,=,=,=,=,+,k,O,nt,J =,=,=,=,-,d,O,l,+,f,K,n,T -,p,@,=,-,r,i,=,+,t,i,f,J =,=,=,=,=,=,=,=,+,kl,O,mp,J =,=,=,=,+,kl,O,n,-,t,@,r,T +,p,a,r,-,d,@,=,-,st,a,rt,J =,=,=,=,=,=,=,=,+,dr,O,p,J =,=,=,=,-,p,a,=,+,p,i,r,T =,=,=,=,+,m,i,=,-,m,i,=,T =,=,=,=,=,=,=,=,+,pr,},l,E =,=,=,=,-,k,u,t,+,s,i,r,T -,sx,a,=,-,v,@,r,+,d,K,n,T =,=,=,=,+,kn,I,p,-,=,o,x,J =,=,=,=,+,k,K,=,-,z,@,r,T =,=,=,=,+,k,E,rk,-,s,A,k,J =,=,=,=,=,=,=,=,+,k,u,ts,J +,k,I,n,-,d,@,r,-,w,E,t,J -,v,@,r,+,d,O,m,-,b,u,k,J =,=,=,=,=,=,=,=,+,kn,a,p,J +,k,I,n,-,d,@,r,-,b,u,k,J =,=,=,=,+,h,a,g,-,b,e,n,T =,=,=,=,-,k,u,=,+,p,e,=,T =,=,=,=,-,s,a,=,+,t,e,=,T =,=,=,=,+,kn,A,=,-,p,@,r,T =,=,=,=,=,=,=,=,+,w,e,r,T =,=,=,=,=,=,=,=,+,wr,A,t,J -,n,a,s,-,=,A,=,-,p,@,l,T =,=,=,=,=,=,=,=,+,kr,o,s,J -,f,},n,+,d,e,=,-,r,I,N,K =,=,=,=,+,kn,e,=,-,v,@,l,T =,=,=,=,=,=,=,=,+,m,A,m,E =,=,=,=,=,=,=,=,+,h,u,r,T =,=,=,=,+,=,A,z,-,b,A,k,J -,sx,A,r,+,m,I,N,-,k,@,l,T =,=,=,=,+,n,a,=,-,v,@,l,T =,=,=,=,+,k,A,r,-,p,@,r,T =,=,=,=,+,kn,I,p,-,x,A,t,J =,=,=,=,=,=,=,=,+,spr,o,k,J +,z,i,=,-,k,@,=,-,z,a,l,T +,kl,E,r,-,k,@,=,-,b,a,n,T -,t,@,=,-,k,O,=,-,f,@,r,T =,=,=,=,+,t,e,=,-,m,a,=,T =,=,=,=,=,=,=,=,+,=,L,l,T -,b,@,=,+,st,E,=,-,l,I,N,E =,=,=,=,=,=,=,=,+,kn,o,p,J =,=,=,=,=,=,=,=,+,kn,O,p,J =,=,=,=,=,=,=,=,+,kn,O,t,J =,=,=,=,-,fl,a,=,+,n,E,l,E =,=,=,=,=,=,=,=,+,kn,L,st,J =,=,=,=,=,=,=,=,+,b,a,r,T =,=,=,=,=,=,=,=,+,z,e,p,J -,v,@,=,+,r,A,=,-,s,I,N,K =,=,=,=,=,=,=,=,+,b,O,s,J -,n,@,=,-,k,o,=,-,m,@,r,T -,k,K,=,-,z,@,=,+,r,I,n,E =,=,=,=,+,z,a,=,-,d,@,l,T =,=,=,=,=,=,=,=,+,=,a,t,J =,=,=,=,+,=,L,t,-,st,A,p,J =,=,=,=,=,=,=,=,+,k,u,r,T =,=,=,=,-,b,a,=,+,v,E,t,J =,=,=,=,+,w,A,s,-,p,I,t,J +,=,A,v,-,d,e,=,-,l,I,N,K +,k,A,n,-,d,@,=,-,l,a,r,T =,=,=,=,=,=,=,=,+,s,},l,E =,=,=,=,=,=,=,=,+,pl,o,j,T =,=,=,=,+,=,e,k,-,h,o,rn,T =,=,=,=,+,sl,a,p,-,m,A,t,J =,=,=,=,+,kr,I,=,-,b,@,=,T +,t,u,=,-,s,i,=,-,t,@,r,T =,=,=,=,=,=,=,=,+,x,O,m,E +,k,o,=,-,G,@,l,-,fl,E,s,J =,=,=,=,=,=,=,=,+,br,u,r,T =,=,=,=,+,s,I,=,-,k,@,l,T =,=,=,=,+,k,O,ks,-,m,a,t,J =,=,=,=,+,k,O,l,-,d,@,r,T =,=,=,=,=,=,=,=,+,s,K,n,T =,=,=,=,+,k,a,=,-,b,@,l,T +,S,I,m,-,p,A,n,-,s,e,=,T =,=,=,=,+,w,E,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,m,e,r,T +,kr,a,=,-,j,@,=,-,p,o,t,J +,=,L,t,-,f,u,=,-,r,I,N,K =,=,=,=,-,p,E,n,+,S,u,n,T =,=,=,=,+,t,u,=,-,m,a,t,J =,=,=,=,=,=,=,=,+,p,a,r,T =,=,=,=,-,d,@,=,+,m,i,=,T +,d,o,ts,-,kl,O,=,-,p,@,r,T -,p,a,=,-,r,a,=,+,S,y,t,J =,=,=,=,-,k,O,m,+,pl,O,t,J =,=,=,=,=,=,=,=,+,sx,L,t,J -,k,o,=,-,n,I,=,+,N,I,n,E =,=,=,=,+,k,O,=,-,t,@,r,T +,l,A,=,-,x,@,=,-,b,E,k,J =,=,=,=,-,k,a,=,+,t,u,n,T -,s,i,=,-,f,E,rz,-,d,o,s,J =,=,=,=,-,m,a,=,+,d,A,m,E =,=,=,=,+,r,I,m,-,p,@,l,T =,=,=,=,+,x,e,=,-,v,@,l,T -,j,e,=,-,d,E,=,+,st,A,l,E =,=,=,=,=,=,=,=,+,g,o,l,T +,k,o,k,-,w,E,=,-,k,@,r,T +,k,A,N,-,G,u,=,-,r,u,=,T =,=,=,=,=,=,=,=,+,t,O,N,E =,=,=,=,+,v,E,t,-,p,L,st,J =,=,=,=,=,=,=,=,+,k,o,n,T =,=,=,=,-,j,a,=,+,p,O,n,E +,h,e,=,-,m,@,l,-,br,L,t,J =,=,=,=,-,v,@,r,+,z,E,t,J =,=,=,=,+,v,E,t,-,=,o,x,J =,=,=,=,+,k,o,s,-,n,a,m,P -,m,o,=,-,n,y,=,+,m,E,nt,J =,=,=,=,=,=,=,=,+,st,O,f,J =,=,=,=,+,sn,a,=,-,t,@,r,T =,=,=,=,=,=,=,=,+,k,O,p,J =,=,=,=,+,=,e,t,-,st,O,k,J +,w,E,n,-,t,@,l,-,t,e,f,J =,=,=,=,=,=,=,=,+,d,K,k,J -,p,A,r,+,t,i,=,-,k,@,l,T =,=,=,=,+,xr,E,=,-,p,@,l,T -,k,o,=,+,r,I,s,-,t,@,=,T =,=,=,=,+,d,y,=,-,w,o,=,T =,=,=,=,+,d,E,Nk,-,w,O,lk,J +,p,K,=,-,p,@,=,-,str,o,=,T =,=,=,=,+,p,e,=,-,p,@,r,T =,=,=,=,+,h,E,md,-,j,},rk,J =,=,=,=,+,w,E,=,-,G,@,=,T =,=,=,=,+,w,e,=,-,r,@,lt,J -,b,@,=,-,l,a,r,-,st,@,r,T =,=,=,=,-,k,O,s,+,t,y,m,P =,=,=,=,+,str,O,t,-,kl,E,p,J =,=,=,=,+,l,i,=,-,v,@,rt,J =,=,=,=,+,k,M,=,-,kl,|,m,P =,=,=,=,+,b,K,=,-,t,@,r,T -,w,A,n,-,d,@,=,-,l,I,N,E +,t,o,=,-,v,@,r,-,st,O,k,J =,=,=,=,-,k,o,=,+,z,K,n,T +,k,E,r,-,s,@,=,-,l,a,r,T =,=,=,=,=,=,=,=,+,k,u,f,J +,z,E,=,-,d,L,=,-,v,@,l,T =,=,=,=,+,r,|,k,-,fl,E,s,J =,=,=,=,=,=,=,=,+,=,O,lm,P =,=,=,=,+,=,A,l,-,t,a,r,T =,=,=,=,+,kr,a,n,-,l,e,r,T =,=,=,=,=,=,=,=,+,w,o,rt,J =,=,=,=,=,=,=,=,+,br,O,k,J +,k,O,=,-,f,i,=,-,k,O,p,J =,=,=,=,=,=,=,=,+,sn,u,t,J =,=,=,=,+,kn,A,l,-,k,o,p,J -,m,A,G,+,n,e,t,-,st,a,f,J +,t,e,=,-,b,@,=,-,sx,L,t,J -,m,a,=,+,d,e,=,-,r,a,=,T =,=,=,=,=,=,=,=,+,d,o,rn,T =,=,=,=,+,pl,A,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,d,O,l,E =,=,=,=,-,vr,i,n,+,d,I,n,E =,=,=,=,+,b,},s,-,k,a,rt,J =,=,=,=,=,=,=,=,+,kr,M,w,T +,m,o,=,-,d,@,=,-,sn,},f,J +,=,I,n,-,sx,|,=,-,r,I,N,K -,=,O,=,-,f,i,=,+,s,i,r,T =,=,=,=,=,=,=,=,+,xr,},t,J =,=,=,=,=,=,=,=,+,r,},nt,J =,=,=,=,=,=,=,=,+,r,u,=,T =,=,=,=,=,=,=,=,+,kr,I,p,J +,p,O,r,-,n,o,=,-,bl,A,t,J =,=,=,=,=,=,=,=,+,kr,i,l,T =,=,=,=,+,=,O,=,-,t,@,r,T -,v,i,=,+,z,a,=,-,ts,i,=,T -,t,A,=,-,l,@,=,-,b,u,k,J +,t,o,=,-,v,@,=,-,n,a,r,T =,=,=,=,-,s,O,l,+,d,a,t,J +,=,O,=,-,s,@,=,-,l,A,p,J -,z,i,=,+,G,|,=,-,n,@,r,T =,=,=,=,+,t,e,=,-,z,e,f,J -,kr,o,=,-,k,o,=,+,d,I,l,E =,=,=,=,+,l,e,=,-,z,@,r,T -,r,E,n,-,t,@,=,+,n,i,r,T +,b,},N,-,G,a,=,-,l,o,=,T =,=,=,=,=,=,=,=,+,p,E,ls,J =,=,=,=,-,s,A,=,+,f,i,r,T =,=,=,=,+,z,a,d,-,b,A,k,J =,=,=,=,+,kr,L,=,-,m,@,l,T =,=,=,=,+,sx,M,=,-,d,@,r,T =,=,=,=,+,sm,e,r,-,s,@,l,T =,=,=,=,+,r,A,f,-,t,@,r,T =,=,=,=,-,=,a,=,+,h,O,rn,T =,=,=,=,+,fl,a,=,-,t,@,r,T =,=,=,=,+,b,I,=,-,k,@,l,T -,d,i,=,-,r,E,k,+,tw,a,r,T =,=,=,=,-,fl,a,=,+,k,O,n,E =,=,=,=,=,=,=,=,+,kr,},l,E =,=,=,=,+,j,a,r,-,b,u,k,J +,=,E,l,-,f,@,=,-,spr,o,k,J =,=,=,=,+,t,O,=,-,f,@,l,T =,=,=,=,+,k,L,=,-,j,@,r,T +,sx,e,=,-,d,@,=,-,d,i,r,T =,=,=,=,=,=,=,=,+,k,L,l,T +,b,O,ts,-,=,M,=,-,t,o,=,T =,=,=,=,+,sn,L,=,-,v,@,r,T =,=,=,=,+,sl,A,x,-,h,u,t,J +,n,},=,-,m,@,r,-,p,a,l,T +,k,},=,-,s,@,n,-,b,A,Nk,J =,=,=,=,+,p,a,=,-,t,@,r,T =,=,=,=,=,=,=,=,+,=,E,l,E =,=,=,=,+,k,},t,-,sm,u,s,J =,=,=,=,+,h,|,=,-,v,@,l,T =,=,=,=,-,b,@,=,+,h,A,N,E -,p,A,r,-,v,@,=,+,n,y,=,T =,=,=,=,=,=,=,=,+,b,a,rt,J =,=,=,=,=,=,=,=,+,k,I,n,E =,=,=,=,+,p,A,=,-,s,@,r,T =,=,=,=,=,=,=,=,+,p,A,nt,J +,n,O,=,-,n,@,=,-,f,O,rt,J =,=,=,=,+,sx,},=,-,b,@,=,T =,=,=,=,=,=,=,=,+,kw,A,st,J =,=,=,=,=,=,=,=,+,l,i,t,J +,d,|,r,-,v,E,n,-,st,@,r,T =,=,=,=,=,=,=,=,+,kw,e,=,T =,=,=,=,-,kl,i,=,+,S,e,=,T =,=,=,=,=,=,=,=,+,p,},t,J =,=,=,=,-,kw,E,t,+,s,y,r,T =,=,=,=,+,kw,e,=,-,z,@,l,T =,=,=,=,+,pl,K,s,-,t,@,r,T =,=,=,=,+,l,},xt,-,x,A,t,J =,=,=,=,+,t,E,n,-,d,@,r,T =,=,=,=,+,w,I,n,-,t,@,r,T +,r,A,k,-,t,@,r,-,tr,E,k,J =,=,=,=,+,t,|,=,-,G,@,l,T =,=,=,=,+,d,L,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,l,a,n,T -,p,a,=,+,r,e,r,-,Gl,A,s,J =,=,=,=,=,=,=,=,+,b,O,t,J =,=,=,=,+,l,e,=,-,b,@,l,T =,=,=,=,=,=,=,=,+,b,i,s,J =,=,=,=,+,sl,O,=,-,k,@,r,T -,x,A,=,-,l,@,=,+,r,K,=,T -,m,o,=,-,r,i,=,+,j,a,n,T =,=,=,=,=,=,=,=,+,vr,M,w,T +,b,o,r,-,d,@,=,-,kn,o,p,J =,=,=,=,=,=,=,=,+,sp,A,n,E +,r,E,n,-,s,i,=,-,z,a,l,T +,b,},r,-,G,@,r,-,m,A,n,E +,l,A,m,-,p,@,=,-,k,A,p,J =,=,=,=,+,sx,o,t,-,h,O,nt,J =,=,=,=,=,=,=,=,+,l,A,mp,J =,=,=,=,-,l,A,m,+,pr,K,=,T =,=,=,=,=,=,=,=,+,l,e,r,T =,=,=,=,+,l,I,xt,-,f,I,s,J -,w,e,=,-,v,@,=,+,r,K,=,T =,=,=,=,=,=,=,=,+,l,A,nt,J =,=,=,=,-,pl,a,=,+,f,O,n,E -,x,e,=,-,r,y,=,+,b,K,n,T =,=,=,=,=,=,=,=,+,kl,A,k,J +,bl,I,k,-,s,@,m,-,sl,A,x,J =,=,=,=,=,=,=,=,+,bl,K,n,T +,z,O,nd,-,m,a,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,tr,K,n,T +,l,A,s,-,t,@,r,-,pr,a,t,J -,sn,L,s,-,t,@,=,+,r,K,=,T =,=,=,=,+,z,A,nt,-,str,A,nt,J =,=,=,=,=,=,=,=,+,sn,u,r,T =,=,=,=,+,k,K,=,-,k,@,r,T =,=,=,=,+,sp,E,l,-,b,u,k,J +,w,a,=,-,t,@,r,-,z,O,n,E =,=,=,=,+,l,E,v,-,d,u,k,J +,v,O,r,-,st,@,=,-,d,O,m,E =,=,=,=,+,l,K,=,-,d,I,N,K =,=,=,=,+,p,A,=,-,p,a,=,T =,=,=,=,=,=,=,=,+,l,E,l,E -,b,@,=,+,l,o,=,-,n,I,N,K =,=,=,=,+,z,A,nt,-,k,u,k,J =,=,=,=,+,sx,e,=,-,d,@,l,T =,=,=,=,+,t,i,=,-,n,@,r,T =,=,=,=,=,=,=,=,+,b,O,n,E -,s,i,=,-,f,E,r,-,d,o,s,J +,G,a,=,-,r,@,=,-,b,A,nt,J =,=,=,=,=,=,=,=,+,d,e,l,T +,m,O,=,-,s,@,l,-,kr,A,p,J =,=,=,=,=,=,=,=,+,z,E,s,J -,=,a,=,-,m,@,=,+,r,K,=,T =,=,=,=,+,z,e,=,-,r,K,s,J -,p,A,n,+,t,O,=,-,f,@,l,T =,=,=,=,+,sl,I,=,-,N,@,r,T -,k,O,r,-,p,o,=,+,r,a,l,T =,=,=,=,=,=,=,=,+,sx,o,r,T +,h,A,r,-,t,@,x,-,h,K,t,J =,=,=,=,+,l,|,=,-,G,@,n,T =,=,=,=,=,=,=,=,+,w,I,lx,J =,=,=,=,=,=,=,=,+,=,K,=,T +,r,o,=,-,z,@,=,-,h,u,t,J =,=,=,=,=,=,=,=,+,t,A,s,J =,=,=,=,-,=,E,=,+,f,E,kt,J +,=,e,n,-,d,E,=,-,k,@,r,T +,v,I,=,-,N,@,r,-,k,O,m,E =,=,=,=,=,=,=,=,+,p,E,l,E +,h,L,s,-,m,I,=,-,d,@,l,T -,xr,},=,-,t,@,=,+,r,K,=,T =,=,=,=,+,sn,a,=,-,v,@,l,T -,z,A,=,-,N,@,=,+,r,E,s,J =,=,=,=,+,z,e,=,-,sl,A,x,J +,sx,I,=,-,p,@,rs,-,kl,O,k,J =,=,=,=,+,k,M,=,-,b,O,j,T =,=,=,=,+,tr,},=,-,f,@,l,T =,=,=,=,=,=,=,=,+,l,L,m,P +,b,u,=,-,z,@,=,-,l,a,r,T +,p,A,n,-,z,i,=,-,b,},s,J =,=,=,=,=,=,=,=,+,n,i,r,T =,=,=,=,+,=,A,N,-,k,@,r,T =,=,=,=,=,=,=,=,+,k,u,k,J -,w,A,x,-,t,@,rs,-,h,L,s,J +,h,a,=,-,r,I,N,-,sp,e,t,J =,=,=,=,+,w,I,=,-,G,@,=,T =,=,=,=,=,=,=,=,+,w,E,t,J =,=,=,=,+,w,E,=,-,b,@,=,T =,=,=,=,+,k,},=,-,d,@,=,T =,=,=,=,-,kr,a,=,+,p,o,=,T =,=,=,=,+,w,A,xt,-,h,O,k,J =,=,=,=,+,kr,a,=,-,j,@,r,T =,=,=,=,+,kr,a,=,-,k,@,r,T =,=,=,=,+,k,a,z,-,b,O,l,E =,=,=,=,+,r,A,s,-,p,a,rt,J =,=,=,=,-,m,i,=,+,n,y,t,J =,=,=,=,+,l,i,=,-,t,@,r,T +,l,I,=,-,t,e,=,-,k,@,n,T -,l,e,=,-,d,i,=,+,k,A,nt,J -,v,@,r,+,k,o,p,-,st,@,r,T +,sn,u,p,-,w,I,N,-,k,@,l,T =,=,=,=,+,b,O,=,-,x,@,l,T =,=,=,=,=,=,=,=,+,b,K,=,T +,=,L,t,-,sp,A,=,-,n,I,N,K =,=,=,=,=,=,=,=,+,x,A,N,E =,=,=,=,+,f,I,s,-,t,@,l,T =,=,=,=,+,sx,L,=,-,j,@,r,T -,l,o,=,+,k,E,=,-,d,|,r,T =,=,=,=,=,=,=,=,+,dr,L,f,J =,=,=,=,=,=,=,=,+,p,a,n,T -,k,a,=,-,r,a,=,+,b,K,n,T =,=,=,=,+,kl,e,t,-,h,O,k,J +,n,a,=,-,v,@,l,-,b,A,nt,J -,f,@,r,+,t,E,n,-,s,i,=,T +,f,o,=,-,t,o,=,-,h,u,k,J -,f,O,=,-,p,@,=,+,r,K,=,T =,=,=,=,-,b,a,=,+,z,L,n,T =,=,=,=,-,pr,I,n,+,s,E,s,J =,=,=,=,+,l,o,ts,-,m,A,n,E =,=,=,=,=,=,=,=,+,xr,A,m,E =,=,=,=,-,xr,a,=,+,v,I,n,E =,=,=,=,-,k,A,r,+,b,e,l,T +,str,a,t,-,n,a,m,-,b,O,rt,J =,=,=,=,+,pr,I,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,k,O,rst,J =,=,=,=,+,g,E,N,-,st,@,r,T =,=,=,=,-,p,O,r,+,t,i,k,J =,=,=,=,=,=,=,=,+,k,o,t,J =,=,=,=,+,kn,|,=,-,k,@,l,T =,=,=,=,+,l,O,=,-,r,@,=,T =,=,=,=,=,=,=,=,+,l,O,p,E =,=,=,=,+,fl,L,=,-,t,@,r,T +,b,I,=,-,t,@,r,-,=,y,r,T =,=,=,=,+,pl,a,dz,-,br,i,f,J =,=,=,=,+,l,o,=,-,v,@,r,T =,=,=,=,=,=,=,=,+,k,|,rs,J =,=,=,=,+,l,A,=,-,x,@,r,T =,=,=,=,+,n,a,=,-,br,o,t,J =,=,=,=,+,t,e,=,-,h,L,s,J -,p,a,=,-,r,a,=,+,d,K,s,J =,=,=,=,=,=,=,=,+,st,E,l,E =,=,=,=,=,=,=,=,+,p,a,rt,J -,s,i,=,-,f,E,r,-,pl,A,nt,J -,v,@,=,+,h,e,rz,-,b,e,st,J +,bl,u,d,-,l,I,=,-,x,a,m,P =,=,=,=,-,r,o,=,+,z,K,n,T =,=,=,=,=,=,=,=,+,b,a,s,J +,G,a,=,-,d,@,=,-,r,I,N,E =,=,=,=,+,sx,a,=,-,d,y,w,T -,k,a,=,+,d,a,=,-,v,@,r,T =,=,=,=,+,l,L,=,-,j,@,r,T +,p,e,=,-,p,@,r,-,v,A,t,J +,d,E,S,-,b,<,rt,-,k,A,st,J =,=,=,=,+,k,},=,-,b,@,=,T -,f,I,k,+,s,e,r,-,sp,L,t,J +,=,o,=,-,p,@,=,-,r,a,=,T =,=,=,=,=,=,=,=,+,l,},l,E =,=,=,=,+,l,},nS,-,s,A,k,J +,h,o,=,-,n,@,x,-,sx,},p,E =,=,=,=,=,=,=,=,+,l,},t,J =,=,=,=,=,=,=,=,+,h,L,s,J =,=,=,=,=,=,=,=,+,n,E,st,J -,f,@,=,-,l,I,=,-,x,a,m,P =,=,=,=,=,=,=,=,+,pl,A,Nk,J =,=,=,=,+,r,o,s,-,t,@,r,T =,=,=,=,=,=,=,=,+,m,a,l,T -,d,u,=,-,k,@,=,-,r,E,k,J =,=,=,=,=,=,=,=,+,b,E,t,J +,m,a,n,-,z,a,d,-,br,o,t,J -,d,@,=,-,v,a,rt,-,k,E,rk,J =,=,=,=,=,=,=,=,+,=,a,=,T -,m,A,=,+,S,i,=,-,n,@,=,T -,v,@,r,+,w,K,z,-,br,i,f,J -,p,e,=,+,r,i,=,-,k,@,l,T =,=,=,=,=,=,=,=,+,w,K,=,T +,n,o,=,-,t,@,=,-,d,O,p,J =,=,=,=,=,=,=,=,+,w,O,lk,J -,t,e,=,-,j,o,=,+,r,i,=,T =,=,=,=,=,=,=,=,+,s,I,m,E =,=,=,=,+,h,a,rt,-,kl,e,t,J =,=,=,=,=,=,=,=,+,m,A,n,E =,=,=,=,+,r,I,N,-,k,@,l,T =,=,=,=,+,=,A,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,k,y,=,T =,=,=,=,=,=,=,=,+,h,E,mt,E -,xr,A,=,-,m,o,=,+,f,o,n,T +,=,O,N,-,G,@,=,-,l,},k,J =,=,=,=,+,r,L,=,-,t,@,r,T =,=,=,=,=,=,=,=,+,m,A,l,E +,t,e,=,-,p,@,l,-,h,u,t,J =,=,=,=,=,=,=,=,+,pl,A,nt,J =,=,=,=,=,=,=,=,+,k,},t,J =,=,=,=,-,k,o,=,+,p,i,=,T =,=,=,=,=,=,=,=,+,k,A,n,E =,=,=,=,=,=,=,=,+,b,A,nt,J -,k,a,=,+,n,a,=,-,r,i,=,T +,tr,<,=,-,l,@,=,-,l,A,mp,J +,x,A,z,-,br,A,n,-,d,@,r,T =,=,=,=,+,v,A,r,-,k,@,n,T =,=,=,=,-,t,i,=,+,S,},rt,J =,=,=,=,+,s,L,=,-,k,@,r,T =,=,=,=,+,l,a,=,-,d,I,N,K =,=,=,=,+,w,a,=,-,j,@,r,T =,=,=,=,-,k,O,r,+,v,e,=,T =,=,=,=,=,=,=,=,+,sx,I,lt,J =,=,=,=,-,kw,A,r,+,t,i,r,T +,kr,L,=,-,d,@,=,-,r,E,k,J -,t,@,r,+,m,E,t,-,s,o,=,T =,=,=,=,+,k,O,=,-,l,i,=,T =,=,=,=,-,f,E,s,+,t,O,n,E =,=,=,=,-,m,A,=,+,tr,A,s,J -,b,o,=,-,n,@,=,-,m,E,s,J =,=,=,=,-,pl,@,=,+,v,i,r,T =,=,=,=,-,kw,A,=,+,tr,K,n,T -,m,e,=,-,x,a,=,+,n,i,k,J =,=,=,=,=,=,=,=,+,z,A,k,J +,x,A,s,-,t,@,=,-,d,u,k,J =,=,=,=,+,kr,e,=,-,k,@,l,T +,t,e,=,-,l,e,=,-,p,@,l,T -,z,E,n,+,t,e,r,-,bl,A,t,J =,=,=,=,+,k,a,=,-,p,@,r,T =,=,=,=,+,fr,a,=,-,z,@,=,T =,=,=,=,-,k,},r,+,z,i,f,J =,=,=,=,+,m,y,r,-,bl,u,m,P =,=,=,=,=,=,=,=,+,h,u,t,J +,m,K,=,-,d,@,=,-,pr,a,t,J =,=,=,=,=,=,=,=,+,m,K,t,J =,=,=,=,+,p,A,s,-,h,O,k,J =,=,=,=,+,w,a,=,-,f,@,l,T -,k,a,=,+,p,O,t,-,h,u,t,J =,=,=,=,+,m,E,lg,-,b,},s,J +,h,O,n,-,d,@,=,-,fl,L,t,J =,=,=,=,+,tr,A,m,-,k,a,rt,J -,m,e,=,-,l,o,=,+,d,i,=,T =,=,=,=,=,=,=,=,+,st,A,t,J =,=,=,=,+,m,I,=,-,d,@,l,T =,=,=,=,=,=,=,=,+,sl,O,t,J =,=,=,=,+,m,E,=,-,N,@,l,T +,=,e,=,-,t,@,r,-,k,A,p,J =,=,=,=,+,t,o,=,-,r,@,n,T -,w,A,n,-,d,@,=,-,l,I,N,E =,=,=,=,-,kl,M,=,+,w,i,r,T +,n,I,s,-,t,@,=,-,z,},s,J =,=,=,=,=,=,=,=,+,pr,o,j,T -,v,@,r,+,d,I,xt,-,s,@,l,T -,fl,a,=,+,m,I,N,-,g,o,=,T +,k,A,=,-,k,@,=,-,t,u,=,T =,=,=,=,+,p,I,m,-,p,@,l,T -,pr,o,=,+,v,I,n,-,s,i,=,T +,r,e,=,-,k,@,N,-,k,},nst,J -,f,e,=,-,n,o,=,+,m,e,n,T =,=,=,=,+,r,O,nt,-,s,@,l,T =,=,=,=,=,=,=,=,+,k,a,k,J -,s,i,=,-,h,a,=,-,m,@,r,T =,=,=,=,+,xr,E,ns,-,pl,a,ts,J +,t,A,N,-,g,a,=,-,sl,I,p,J =,=,=,=,+,z,e,=,-,p,a,rt,J =,=,=,=,+,=,A,s,-,kr,L,s,J =,=,=,=,-,tr,y,=,+,w,e,l,T =,=,=,=,+,h,O,=,-,m,@,r,T -,p,A,s,-,t,o,=,+,r,i,=,T =,=,=,=,+,pl,A,z,-,d,A,Nk,J =,=,=,=,=,=,=,=,+,m,i,t,J -,=,I,n,-,v,i,=,+,t,e,=,T -,v,@,r,+,s,i,r,-,s,@,l,T =,=,=,=,-,t,E,x,+,n,i,k,J =,=,=,=,+,v,a,=,-,d,@,m,P =,=,=,=,-,k,o,=,+,n,K,n,T +,m,O,=,-,p,@,=,-,bl,A,t,J =,=,=,=,+,w,A,s,-,h,A,nt,J =,=,=,=,=,=,=,=,+,dr,o,m,P =,=,=,=,+,k,i,z,-,br,i,f,J =,=,=,=,=,=,=,=,+,z,I,t,J -,n,i,=,-,j,a,=,+,t,y,r,T =,=,=,=,+,v,a,=,-,d,@,r,T =,=,=,=,=,=,=,=,+,v,A,r,E =,=,=,=,-,v,@,r,+,h,a,l,T =,=,=,=,+,f,i,ts,-,p,O,mp,J =,=,=,=,+,j,},=,-,fr,M,w,T =,=,=,=,+,m,y,r,-,bl,u,m,E =,=,=,=,+,m,a,=,-,n,},s,J =,=,=,=,-,k,A,=,+,p,E,l,E -,k,K,k,-,sp,i,=,-,G,@,l,T =,=,=,=,=,=,=,=,+,t,o,n,T =,=,=,=,+,w,o,=,-,n,I,N,K =,=,=,=,+,pr,e,=,-,m,i,=,T +,t,O,=,-,f,@,l,-,d,i,r,T -,v,@,r,+,w,I,=,-,n,I,N,K =,=,=,=,+,=,O,p,-,st,o,t,J +,f,O,nd,-,=,E,N,-,G,@,l,T +,m,O,=,-,k,a,=,-,k,O,p,J =,=,=,=,-,kl,K,n,+,d,L,m,P -,l,@,=,-,n,a,=,+,r,I,n,E =,=,=,=,-,x,@,=,+,m,A,k,J =,=,=,=,+,w,e,=,-,G,@,l,T =,=,=,=,=,=,=,=,+,=,E,r,E =,=,=,=,+,tr,i,=,-,j,o,=,T =,=,=,=,=,=,=,=,+,kw,A,l,E =,=,=,=,+,xr,A,s,-,kl,O,k,J =,=,=,=,+,v,K,=,-,v,@,r,T =,=,=,=,+,w,I,=,-,s,@,l,T -,b,M,w,-,b,@,=,-,dr,K,f,J =,=,=,=,=,=,=,=,+,b,},l,E +,h,o,=,-,n,@,x,-,sx,},p,J =,=,=,=,=,=,=,=,+,st,A,f,J -,pr,o,=,+,Gr,A,=,-,m,a,=,T =,=,=,=,=,=,=,=,+,m,O,p,J +,_,},N,-,g,@,l,-,h,u,t,J =,=,=,=,-,m,o,=,+,r,E,l,E +,sl,O,r,-,d,@,x,-,h,K,t,J +,=,a,m,-,b,e,ldz,-,b,e,n,T +,w,O,r,-,st,@,=,-,br,o,t,J +,r,a,=,-,ts,i,=,-,d,o,s,J +,f,},t,-,s,@,=,-,l,a,r,T -,kw,I,s,-,p,@,=,+,d,o,r,T =,=,=,=,=,=,=,=,+,x,I,l,E -,v,@,r,+,=,A,n,-,d,a,=,T =,=,=,=,+,=,E,n,-,t,@,r,T =,=,=,=,=,=,=,=,+,tr,a,=,T =,=,=,=,=,=,=,=,+,z,e,n,T =,=,=,=,+,m,e,t,-,kl,O,k,J =,=,=,=,-,b,I,l,+,j,A,rt,J =,=,=,=,+,pr,u,f,-,r,I,t,J =,=,=,=,=,=,=,=,+,k,O,p,E -,=,A,n,-,t,i,=,+,f,o,n,T =,=,=,=,+,=,e,=,-,t,@,n,T =,=,=,=,=,=,=,=,+,b,u,l,T +,k,O,=,-,f,i,=,-,pr,a,t,J -,k,@,=,-,k,a,=,-,m,@,r,T =,=,=,=,=,=,=,=,+,m,L,l,T -,p,a,=,-,l,A,N,+,k,K,n,T =,=,=,=,+,bl,a,=,-,k,@,r,T =,=,=,=,-,m,o,=,+,t,i,f,J =,=,=,=,+,st,K,=,-,G,@,r,T -,p,@,l,-,sx,I,l,-,m,E,s,J =,=,=,=,+,t,I,=,-,x,@,l,T =,=,=,=,+,w,I,t,-,x,A,t,J =,=,=,=,=,=,=,=,+,m,},ts,J +,n,|,k,-,p,A,r,-,t,K,=,T -,l,M,=,+,r,i,r,-,bl,A,t,J =,=,=,=,=,=,=,=,+,m,y,r,T =,=,=,=,+,pr,I,=,-,k,@,l,T =,=,=,=,-,m,y,=,+,z,i,k,J +,m,a,=,-,t,O,r,-,h,L,s,J =,=,=,=,+,t,a,=,-,k,@,l,T +,G,a,=,-,r,@,=,-,z,A,k,J =,=,=,=,+,n,a,j,-,kr,A,ns,J =,=,=,=,-,f,A,k,+,t,y,r,T =,=,=,=,+,m,O,n,-,st,@,r,T =,=,=,=,=,=,=,=,+,st,o,l,T =,=,=,=,=,=,=,=,+,st,A,l,E =,=,=,=,=,=,=,=,+,k,a,n,T -,t,@,r,+,kl,a,s,-,l,i,t,J =,=,=,=,+,=,u,s,-,t,@,r,T =,=,=,=,+,n,A,xt,-,l,A,mp,J =,=,=,=,+,n,A,xt,-,l,I,xt,J =,=,=,=,+,v,K,=,-,z,@,l,T +,n,a,=,-,G,@,l,-,sx,a,r,T =,=,=,=,=,=,=,=,+,z,e,f,J =,=,=,=,+,pr,u,f,-,l,A,p,J =,=,=,=,+,v,O,l,-,G,@,r,T =,=,=,=,+,w,A,xt,-,h,L,s,J =,=,=,=,+,bl,u,m,-,k,A,t,J =,=,=,=,=,=,=,=,+,r,A,nt,J =,=,=,=,+,j,},=,-,f,@,r,T =,=,=,=,=,=,=,=,+,p,K,=,T -,=,o,=,+,w,a,=,-,z,@,=,T +,k,A,=,-,t,@,=,-,w,A,s,J =,=,=,=,+,=,A,f,-,w,A,s,J =,=,=,=,+,m,|,=,-,b,@,l,T =,=,=,=,+,sn,A,p,-,st,@,r,T +,f,O,n,-,=,E,N,-,G,@,l,T =,=,=,=,+,vl,|,=,-,G,@,l,T -,l,A,n,+,s,E,t,-,f,I,s,J =,=,=,=,-,fr,A,G,+,m,E,nt,J =,=,=,=,+,br,u,=,-,d,@,r,T =,=,=,=,+,z,e,=,-,t,@,l,T =,=,=,=,+,m,A,rkt,-,pl,a,ts,J =,=,=,=,+,=,o,=,-,b,@,r,T =,=,=,=,+,t,A,k,-,s,i,=,T +,=,A,n,-,d,@,=,-,r,I,N,E -,d,@,r,-,p,A,r,-,t,K,=,T +,vl,i,x,-,t,L,x,-,r,a,m,P +,k,I,=,-,p,@,=,-,=,K,nt,J +,v,I,=,-,N,@,r,-,k,o,t,J =,=,=,=,=,=,=,=,+,p,O,r,E -,n,i,=,-,m,@,n,+,d,A,l,E =,=,=,=,+,sp,M,=,-,w,@,r,T =,=,=,=,+,=,E,r,-,k,@,r,T =,=,=,=,+,vl,e,=,-,G,@,l,T -,=,a,=,-,b,e,=,+,s,e,=,T =,=,=,=,+,kn,A,=,-,b,@,l,T =,=,=,=,+,n,I,=,-,k,@,l,T =,=,=,=,-,k,a,=,+,t,E,rn,T =,=,=,=,+,p,u,=,-,p,@,r,T -,n,@,s,-,x,e,=,-,v,I,N,K =,=,=,=,+,=,A,f,-,spr,a,k,J =,=,=,=,=,=,=,=,+,tr,A,m,E +,r,A,s,-,t,a,=,-,f,@,l,T +,sp,i,k,-,p,a,=,-,p,i,r,T =,=,=,=,=,=,=,=,+,h,a,n,T -,k,O,=,-,m,E,n,+,t,a,r,T +,n,o,=,-,t,@,=,-,l,a,r,T =,=,=,=,+,m,A,r,-,m,@,l,T -,d,i,=,-,j,o,=,-,sp,E,l,E =,=,=,=,=,=,=,=,+,sx,E,p,J =,=,=,=,-,h,A,n,+,s,O,p,J -,=,A,=,-,p,a,=,-,r,a,t,J =,=,=,=,-,h,A,r,+,p,u,n,T -,d,@,=,-,v,u,dz,-,b,e,n,T =,=,=,=,+,=,O,p,-,st,A,p,J +,t,L,=,-,m,@,=,-,l,a,r,T +,d,A,G,-,r,@,=,-,t,u,r,T =,=,=,=,+,vl,i,=,-,G,@,r,T =,=,=,=,+,d,},=,-,f,@,l,T -,k,a,=,+,p,I,=,-,t,@,l,T +,h,A,n,-,d,@,x,-,h,K,t,J +,t,o,=,-,v,@,=,-,r,a,r,T =,=,=,=,=,=,=,=,+,r,O,p,E =,=,=,=,+,h,e,=,-,m,@,l,T +,kr,E,n,-,t,@,=,-,b,o,m,P =,=,=,=,=,=,=,=,+,p,O,st,J -,p,E,n,+,d,y,=,-,l,@,=,T =,=,=,=,-,l,o,=,+,k,a,l,T -,=,a,=,+,z,K,n,-,st,E,l,E +,v,I,n,-,s,i,=,-,pl,a,ts,J =,=,=,=,+,kn,I,p,-,s,@,l,T =,=,=,=,+,p,i,=,-,z,@,l,T =,=,=,=,=,=,=,=,+,st,e,=,T =,=,=,=,+,=,A,=,-,p,@,l,T =,=,=,=,+,v,I,=,-,st,u,l,T +,=,O,=,-,m,@,=,-,z,i,n,T +,p,O,=,-,k,@,=,-,br,i,f,J =,=,=,=,+,=,O,m,-,w,E,x,J +,=,O,n,-,d,@,r,-,d,|,r,T =,=,=,=,=,=,=,=,+,kl,o,f,J -,f,O,r,+,m,y,=,-,l,@,=,T =,=,=,=,+,r,A,=,-,m,@,l,T =,=,=,=,+,spr,A,N,-,k,@,l,T =,=,=,=,=,=,=,=,+,v,O,t,J =,=,=,=,+,sp,a,=,-,ts,i,=,T =,=,=,=,+,kw,E,=,-,b,@,l,T =,=,=,=,+,h,A,l,-,st,@,r,T =,=,=,=,+,l,E,=,-,t,@,r,T -,st,A,n,-,t,a,=,+,n,e,=,T =,=,=,=,+,=,O,n,-,w,e,r,T -,r,a,=,-,d,i,=,-,j,o,=,T =,=,=,=,=,=,=,=,+,kn,I,k,J =,=,=,=,=,=,=,=,+,k,A,s,J =,=,=,=,=,=,=,=,+,p,A,k,J +,=,I,N,-,k,e,=,-,p,I,N,K =,=,=,=,=,=,=,=,+,=,o,j,T =,=,=,=,+,v,I,=,-,N,@,r,T =,=,=,=,+,tr,e,=,-,m,a,=,T =,=,=,=,+,sx,I,l,-,m,E,s,J =,=,=,=,+,b,u,=,-,d,@,l,T =,=,=,=,=,=,=,=,+,vl,o,=,T =,=,=,=,+,p,O,st,-,f,A,k,J +,sx,I,ld,-,w,A,xt,-,h,L,s,J =,=,=,=,+,sl,O,=,-,b,@,=,T +,sl,|,=,-,t,@,l,-,m,A,nt,J =,=,=,=,+,=,o,=,-,p,a,=,T =,=,=,=,=,=,=,=,+,kr,E,N,E +,v,a,=,-,l,i,=,-,j,},m,P =,=,=,=,+,t,a,rt,-,f,O,rk,J =,=,=,=,-,fr,O,n,+,d,e,l,T =,=,=,=,=,=,=,=,+,bl,u,m,P -,d,@,=,-,v,i,=,-,j,o,l,T =,=,=,=,+,h,A,nt,-,p,O,mp,J +,p,a,=,-,r,i,=,-,j,a,=,T +,b,u,=,-,v,@,=,-,p,A,k,J -,p,A,n,-,t,o,=,+,m,i,m,E =,=,=,=,=,=,=,=,+,br,u,k,J =,=,=,=,+,s,u,p,-,s,o,=,T =,=,=,=,-,f,i,=,+,G,y,r,T =,=,=,=,=,=,=,=,+,p,|,l,T =,=,=,=,=,=,=,=,+,=,E,f,J -,k,O,m,-,pr,i,=,+,m,e,=,T =,=,=,=,=,=,=,=,+,l,L,k,J =,=,=,=,-,k,A,r,+,w,K,=,T =,=,=,=,=,=,=,=,+,t,E,kst,J =,=,=,=,=,=,=,=,+,sp,A,t,J =,=,=,=,=,=,=,=,+,h,},m,E +,=,O,p,-,s,I,x,-,t,@,r,T =,=,=,=,=,=,=,=,+,pr,u,f,J =,=,=,=,=,=,=,=,+,d,A,x,J =,=,=,=,-,pr,o,=,+,bl,e,m,P =,=,=,=,-,x,@,=,+,b,a,r,T +,p,e,=,-,p,@,r,-,h,L,s,J =,=,=,=,+,fr,O,=,-,m,@,l,T =,=,=,=,=,=,=,=,+,sx,A,p,E =,=,=,=,+,h,A,lf,-,=,y,r,T =,=,=,=,-,=,o,=,+,v,a,l,T -,t,A,=,-,p,@,=,+,r,K,=,T =,=,=,=,=,=,=,=,+,t,O,p,J =,=,=,=,+,st,O,=,-,b,@,=,T =,=,=,=,=,=,=,=,+,str,o,=,T -,v,@,r,-,st,A,p,-,k,a,rt,J =,=,=,=,=,=,=,=,+,xl,e,=,T =,=,=,=,=,=,=,=,+,vr,i,nt,J =,=,=,=,+,m,O,=,-,k,@,l,T =,=,=,=,=,=,=,=,+,d,O,ns,J =,=,=,=,+,p,a,n,-,d,@,r,T =,=,=,=,+,=,O,r,-,d,@,r,T -,xl,a,=,-,d,i,=,+,j,o,l,T =,=,=,=,=,=,=,=,+,kl,O,nt,J =,=,=,=,=,=,=,=,+,=,E,n,E =,=,=,=,=,=,=,=,+,=,A,r,E =,=,=,=,+,h,a,r,-,st,},k,J =,=,=,=,=,=,=,=,+,vr,},xt,J +,str,},=,-,b,@,=,-,l,I,N,E +,G,},lt,-,s,@,l,-,b,u,k,J =,=,=,=,+,st,A,n,-,d,@,r,T =,=,=,=,-,t,A,=,+,j,|,r,T +,=,K,=,-,j,@,r,-,d,O,p,J +,b,o,=,-,v,@,=,-,st,},k,J =,=,=,=,=,=,=,=,+,x,u,t,J -,f,i,=,-,l,e,=,-,p,@,l,T +,w,a,=,-,t,@,r,-,k,I,p,J =,=,=,=,=,=,=,=,+,p,i,t,J =,=,=,=,=,=,=,=,+,v,A,l,E =,=,=,=,+,w,E,rk,-,xr,u,p,J =,=,=,=,+,p,a,=,-,l,I,N,E =,=,=,=,=,=,=,=,+,s,e,l,T =,=,=,=,=,=,=,=,+,k,i,l,T =,=,=,=,+,sl,K,=,-,p,@,r,T =,=,=,=,-,p,A,m,+,fl,E,t,J =,=,=,=,=,=,=,=,+,kn,A,l,E +,p,A,=,-,n,a,=,-,m,a,=,T -,w,O,r,-,t,@,l,-,b,e,n,T =,=,=,=,-,p,a,=,+,n,e,l,T =,=,=,=,+,sx,o,=,-,t,@,l,T =,=,=,=,-,h,A,N,+,g,a,r,T =,=,=,=,=,=,=,=,+,str,i,m,P =,=,=,=,+,xr,A,v,-,b,e,lt,J -,f,i,=,-,G,y,=,+,r,A,nt,J =,=,=,=,=,=,=,=,+,v,I,Nk,J =,=,=,=,-,s,A,n,+,d,a,l,T =,=,=,=,=,=,=,=,+,vl,K,m,P =,=,=,=,=,=,=,=,+,kr,A,nt,J =,=,=,=,+,m,O,r,-,z,@,l,T +,h,O,nts,-,f,i,=,-,j,o,l,T =,=,=,=,=,=,=,=,+,b,e,n,T -,v,@,r,+,z,A,=,-,k,I,N,K =,=,=,=,+,v,y,r,-,vl,i,x,J -,k,|,=,-,t,@,=,+,r,K,=,T =,=,=,=,+,p,I,n,-,d,a,=,T =,=,=,=,+,m,e,=,-,t,@,r,T =,=,=,=,=,=,=,=,+,tr,o,n,T -,k,O,m,+,p,A,s,-,h,L,s,J =,=,=,=,+,z,A,k,-,s,E,nt,J +,h,E,r,-,d,@,rs,-,t,A,s,J -,=,E,=,-,N,@,=,+,l,I,n,E =,=,=,=,+,d,E,N,-,k,@,r,T =,=,=,=,+,=,e,=,-,G,a,=,T -,p,A,r,-,d,E,=,+,s,y,=,T -,=,A,n,+,t,E,=,-,n,@,=,T =,=,=,=,=,=,=,=,+,x,E,lt,J -,f,y,=,-,m,@,=,+,r,i,=,T =,=,=,=,=,=,=,=,+,k,E,rk,J =,=,=,=,+,l,o,p,-,xr,a,f,J -,v,@,r,+,l,A,N,-,l,K,st,J -,p,A,r,+,k,e,r,-,pl,K,n,T =,=,=,=,+,b,O,t,-,s,I,N,K =,=,=,=,-,m,A,=,+,m,a,=,T -,k,a,=,+,n,e,l,-,st,O,k,J =,=,=,=,+,sp,I,=,-,k,@,l,T =,=,=,=,+,vl,i,x,-,f,E,lt,J =,=,=,=,+,kr,I,N,-,k,@,l,T =,=,=,=,=,=,=,=,+,p,A,rt,J =,=,=,=,+,sxr,K,=,-,v,@,r,T =,=,=,=,=,=,=,=,+,k,},s,J =,=,=,=,+,str,K,=,-,k,@,l,T -,m,A,=,-,G,a,=,+,z,K,n,T =,=,=,=,+,l,A,k,-,st,e,l,T -,h,a,=,+,v,A,=,-,n,a,=,T =,=,=,=,=,=,=,=,+,x,},l,E =,=,=,=,=,=,=,=,+,n,a,t,J =,=,=,=,+,kw,E,s,-,t,i,=,T =,=,=,=,+,sl,I,=,-,p,@,r,T =,=,=,=,-,t,o,=,+,m,a,t,J =,=,=,=,+,b,A,=,-,b,@,l,T -,d,E,=,+,l,e,r,-,h,M,t,J -,pr,o,=,+,s,E,=,-,s,i,=,T =,=,=,=,=,=,=,=,+,p,A,t,J =,=,=,=,=,=,=,=,+,p,O,t,J =,=,=,=,+,pr,O,N,-,k,@,r,T =,=,=,=,+,t,E,=,-,N,@,l,T +,d,O,k,-,t,@,rz,-,br,i,f,J +,=,I,n,-,sl,a,=,-,p,@,r,T =,=,=,=,+,bl,M,w,-,b,O,rst,J =,=,=,=,+,l,o,=,-,p,@,r,T =,=,=,=,+,b,i,r,-,v,I,lt,J =,=,=,=,+,s,O,k,-,p,o,t,J =,=,=,=,-,x,@,=,+,sxr,I,ft,J =,=,=,=,-,dr,a,=,+,Z,e,=,T =,=,=,=,=,=,=,=,+,l,I,t,J -,v,@,r,+,t,E,=,-,l,I,N,K =,=,=,=,+,t,i,=,-,p,@,=,T =,=,=,=,-,p,E,n,+,w,a,r,T +,v,e,=,-,n,},z,-,b,e,lt,J -,n,i,=,+,z,u,n,-,st,A,t,J =,=,=,=,+,p,E,l,-,Gr,I,m,P =,=,=,=,=,=,=,=,+,v,E,t,J =,=,=,=,+,b,},=,-,s,@,l,T =,=,=,=,+,=,O,m,-,w,E,x,E =,=,=,=,=,=,=,=,+,b,A,lk,J =,=,=,=,=,=,=,=,+,sl,u,p,J +,sp,o,=,-,r,@,=,-,d,i,r,T =,=,=,=,+,st,L,=,-,t,@,r,T +,=,L,=,-,tr,E,k,-,s,@,l,T =,=,=,=,+,p,E,=,-,n,I,N,K =,=,=,=,+,kl,A,p,-,h,M,t,J =,=,=,=,=,=,=,=,+,m,O,t,J +,v,i,=,-,z,@,=,-,r,I,k,J =,=,=,=,=,=,=,=,+,st,a,l,T -,S,o,=,+,n,),=,-,r,@,=,T =,=,=,=,=,=,=,=,+,f,o,j,T +,b,o,=,-,t,@,r,-,h,A,m,E =,=,=,=,+,m,O,nt,-,h,A,rp,J =,=,=,=,+,l,},G,-,d,e,l,T +,l,E,=,-,k,@,r,-,b,e,t,J =,=,=,=,+,w,e,r,-,h,L,s,J =,=,=,=,+,str,a,=,-,d,|,n,T =,=,=,=,-,p,E,r,+,s,e,l,T +,h,O,=,-,N,@,r,-,b,a,n,T -,m,y,=,+,s,e,=,-,j,},m,P +,p,E,r,-,G,o,=,-,l,a,=,T =,=,=,=,+,=,O,p,-,st,e,k,J =,=,=,=,+,spr,E,N,-,k,@,l,T =,=,=,=,+,v,I,=,-,s,@,r,T =,=,=,=,-,x,o,=,+,d,I,n,E -,b,u,=,-,k,@,=,+,r,K,=,T +,st,E,m,-,p,@,l,-,b,u,k,J -,p,i,=,+,l,A,s,-,t,@,r,T =,=,=,=,+,p,},n,-,t,@,r,T +,l,K,=,-,k,@,=,-,h,L,s,J +,=,a,n,-,r,E,xt,-,k,A,st,J =,=,=,=,=,=,=,=,+,p,|,k,J =,=,=,=,-,pl,A,N,+,k,i,r,T =,=,=,=,+,p,|,=,-,t,@,r,T -,=,A,=,-,t,@,l,+,j,e,=,T =,=,=,=,=,=,=,=,+,k,},p,J -,s,E,r,+,v,e,r,-,st,@,r,T +,vr,a,=,-,G,@,=,-,=,y,r,T =,=,=,=,=,=,=,=,+,v,A,t,J =,=,=,=,=,=,=,=,+,kr,K,t,J =,=,=,=,+,k,E,rst,-,kr,A,ns,J +,w,e,k,-,=,K,nt,-,h,L,s,J =,=,=,=,=,=,=,=,+,v,E,n,E -,v,@,r,+,l,i,ft,-,h,K,t,J =,=,=,=,=,=,=,=,+,sl,O,t,J +,kl,A,s,-,x,@,=,-,n,o,t,J +,w,I,=,-,s,@,l,-,d,i,r,T =,=,=,=,+,b,i,xt,-,p,},nt,J =,=,=,=,+,m,I,=,-,st,A,p,J +,p,i,=,-,t,@,r,-,m,A,n,E +,p,E,r,-,z,I,k,-,h,L,t,J =,=,=,=,+,skr,y,=,-,p,@,l,T +,m,i,=,-,l,i,=,-,tr,E,k,J -,h,O,r,+,l,o,=,-,Z,@,=,T =,=,=,=,+,l,a,=,-,k,@,n,T =,=,=,=,=,=,=,=,+,v,E,l,E -,v,@,r,+,t,E,l,-,s,@,l,T +,=,E,r,-,f,@,=,-,n,I,s,J =,=,=,=,+,v,o,=,-,G,@,l,T =,=,=,=,=,=,=,=,+,m,O,m,E +,b,o,=,-,t,@,r,-,vl,o,t,J =,=,=,=,+,sl,|,=,-,t,@,l,T =,=,=,=,+,kn,|,=,-,z,I,N,K =,=,=,=,+,v,O,n,-,d,@,l,T =,=,=,=,-,p,A,s,+,t,o,r,T =,=,=,=,-,x,@,=,+,b,M,w,T =,=,=,=,+,b,O,=,-,k,I,N,E =,=,=,=,+,d,O,x,-,t,@,r,T =,=,=,=,+,v,E,st,-,s,A,k,J =,=,=,=,=,=,=,=,+,k,I,p,J =,=,=,=,=,=,=,=,+,v,I,s,J =,=,=,=,+,p,M,=,-,z,@,=,T +,d,E,n,-,t,@,=,-,f,e,st,J +,p,I,n,-,d,a,=,-,n,o,t,J -,k,K,=,-,v,@,=,+,r,K,=,T =,=,=,=,+,r,A,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,=,e,nt,J -,=,A,s,-,p,i,=,+,r,i,n,T +,k,I,n,-,d,@,r,-,l,i,t,J =,=,=,=,-,b,u,=,+,t,i,k,J =,=,=,=,=,=,=,=,+,d,u,k,J =,=,=,=,-,r,o,=,+,m,A,n,E =,=,=,=,+,d,y,=,-,v,@,l,T -,s,u,=,-,v,@,=,+,n,i,r,T =,=,=,=,=,=,=,=,+,x,K,n,T -,sx,u,=,-,n,@,=,-,k,A,st,J =,=,=,=,=,=,=,=,+,s,e,=,T -,v,@,=,+,h,i,=,-,k,@,l,T -,v,@,r,+,s,i,=,-,r,I,N,K -,=,A,=,-,p,@,l,-,m,E,s,J =,=,=,=,=,=,=,=,+,fl,L,t,J =,=,=,=,-,m,K,n,+,h,e,r,T =,=,=,=,+,b,I,t,-,pr,E,nt,J =,=,=,=,+,kn,O,=,-,k,@,l,T +,s,L,=,-,k,@,r,-,kl,O,nt,J =,=,=,=,-,pl,a,=,+,n,e,t,J =,=,=,=,+,x,M,d,-,b,u,k,J +,l,I,mf,-,l,I,=,-,x,a,m,P =,=,=,=,=,=,=,=,+,sl,i,r,T =,=,=,=,+,d,e,=,-,k,@,n,T =,=,=,=,=,=,=,=,+,zw,A,m,E =,=,=,=,+,h,E,r,-,d,@,r,T =,=,=,=,=,=,=,=,+,pl,A,s,J =,=,=,=,-,pl,A,s,+,tr,O,n,E =,=,=,=,+,w,i,=,-,b,@,rt,J =,=,=,=,=,=,=,=,+,t,o,st,J =,=,=,=,+,p,O,=,-,f,@,r,T =,=,=,=,=,=,=,=,+,kn,|,=,T =,=,=,=,=,=,=,=,+,h,a,x,J -,sx,A,=,-,p,@,=,-,l,K,st,J -,h,E,r,-,m,@,=,+,l,K,n,T -,=,A,t,-,m,O,s,+,f,e,r,T =,=,=,=,=,=,=,=,+,k,A,lf,J =,=,=,=,=,=,=,=,+,h,L,f,J -,d,@,=,-,k,a,=,-,m,@,r,T =,=,=,=,+,d,e,x,-,r,O,nt,J =,=,=,=,-,pl,i,=,+,s,e,=,T =,=,=,=,=,=,=,=,+,x,a,l,T =,=,=,=,+,pl,o,j,-,s,@,l,T =,=,=,=,+,v,e,=,-,z,@,l,T -,t,e,=,-,l,@,=,+,f,o,n,T =,=,=,=,+,k,M,=,-,t,@,r,T -,t,i,=,+,t,K,ts,-,pl,a,t,J =,=,=,=,+,p,O,=,-,x,@,l,T =,=,=,=,+,z,e,=,-,r,o,=,T =,=,=,=,+,b,},=,-,f,@,l,T +,p,o,=,-,d,i,=,-,j,},m,E -,p,A,=,+,tr,K,s,-,p,o,rt,J -,z,u,=,-,t,@,=,+,l,i,f,J =,=,=,=,+,kl,I,p,-,x,K,t,J =,=,=,=,+,t,e,=,-,l,I,xt,J =,=,=,=,=,=,=,=,+,n,A,t,J =,=,=,=,+,n,e,=,-,t,@,l,T +,p,u,=,-,j,@,r,-,d,o,s,J =,=,=,=,=,=,=,=,+,l,K,n,T =,=,=,=,-,p,E,=,+,n,u,n,T =,=,=,=,=,=,=,=,+,xr,u,p,J =,=,=,=,=,=,=,=,+,r,a,m,P =,=,=,=,+,p,I,=,-,k,@,l,T +,m,i,=,-,t,a,=,-,f,@,l,T =,=,=,=,+,z,O,n,-,d,@,=,T =,=,=,=,+,d,O,r,-,p,@,l,T =,=,=,=,-,pl,a,=,+,t,e,l,T -,b,o,=,+,n,a,=,-,d,@,=,E =,=,=,=,=,=,=,=,+,fl,E,ns,J +,z,O,n,-,d,a,rz,-,b,A,Nk,J +,pr,L,=,-,m,@,=,-,m,O,nt,J =,=,=,=,+,b,O,r,-,st,@,l,T =,=,=,=,+,sxr,O,=,-,b,@,r,T =,=,=,=,=,=,=,=,+,=,E,l,E -,p,O,=,-,l,@,=,+,v,i,=,T -,p,O,=,-,l,@,=,+,v,K,=,T =,=,=,=,+,d,|,G,-,n,i,t,J =,=,=,=,=,=,=,=,+,k,A,st,J =,=,=,=,-,kr,I,s,+,t,A,l,E +,w,u,=,-,k,@,r,-,d,i,r,T =,=,=,=,-,S,o,=,+,f,|,r,T +,m,O,nd,-,=,O,r,-,G,@,l,T =,=,=,=,=,=,=,=,+,l,O,l,E =,=,=,=,+,tr,E,k,-,s,@,l,T =,=,=,=,+,sl,A,=,-,b,@,=,T =,=,=,=,+,p,u,=,-,d,@,r,T =,=,=,=,+,h,a,r,-,z,A,k,J =,=,=,=,=,=,=,=,+,p,O,p,J =,=,=,=,=,=,=,=,+,w,O,rst,J -,s,@,=,-,l,a,=,+,r,K,=,T =,=,=,=,=,=,=,=,+,sx,L,f,J =,=,=,=,=,=,=,=,+,m,M,w,T =,=,=,=,+,l,e,=,-,z,I,N,E =,=,=,=,-,b,a,=,+,n,a,n,T =,=,=,=,-,p,O,r,+,t,a,l,T =,=,=,=,+,fl,E,n,-,t,@,r,T =,=,=,=,=,=,=,=,+,n,O,n,E =,=,=,=,=,=,=,=,+,fl,L,m,P =,=,=,=,=,=,=,=,+,sp,e,n,T =,=,=,=,+,sp,O,=,-,n,I,N,K =,=,=,=,+,p,O,r,-,s,i,=,T +,=,A,r,-,m,@,=,-,z,A,k,J =,=,=,=,+,b,E,d,-,l,A,mp,J =,=,=,=,+,p,O,st,-,k,A,r,E =,=,=,=,-,r,o,=,+,b,K,n,T =,=,=,=,=,=,=,=,+,w,a,s,J =,=,=,=,+,w,I,m,-,p,@,l,T =,=,=,=,+,h,E,=,-,r,@,=,T =,=,=,=,=,=,=,=,+,vl,i,x,J -,k,A,n,+,t,o,r,-,b,a,n,T =,=,=,=,+,h,E,lp,-,st,@,r,T =,=,=,=,=,=,=,=,+,pr,a,m,P +,zw,e,t,-,k,a,=,-,n,a,l,T =,=,=,=,-,r,@,=,+,v,y,=,T -,l,a,=,+,m,A,n,-,d,@,r,T =,=,=,=,-,=,A,=,+,pl,M,s,J =,=,=,=,=,=,=,=,+,k,e,r,T -,k,o,=,-,t,@,=,+,r,i,=,T =,=,=,=,=,=,=,=,+,k,a,rs,J =,=,=,=,+,k,e,=,-,m,@,l,T =,=,=,=,+,sx,e,ps,-,m,a,t,J =,=,=,=,+,r,K,=,-,G,@,r,T =,=,=,=,=,=,=,=,+,h,A,nt,J =,=,=,=,+,m,e,s,-,t,@,r,T =,=,=,=,+,l,e,=,-,v,@,r,T -,pr,E,s,-,p,a,=,+,pj,e,=,T =,=,=,=,+,r,a,=,-,k,@,l,T +,=,A,v,-,d,A,N,-,k,@,r,T -,v,e,=,-,l,@,=,+,m,E,nt,J +,G,E,n,-,t,@,=,-,fl,L,t,J =,=,=,=,=,=,=,=,+,j,a,r,T -,w,O,r,-,t,@,l,-,b,e,n,T =,=,=,=,+,tr,E,m,-,k,a,rt,J =,=,=,=,=,=,=,=,+,p,M,w,T =,=,=,=,=,=,=,=,+,h,A,rt,J +,w,a,=,-,t,@,r,-,pl,A,nt,J =,=,=,=,-,=,E,m,+,pl,o,j,T +,t,L,=,-,m,@,l,-,v,I,s,J =,=,=,=,-,s,E,r,+,m,u,n,T =,=,=,=,=,=,=,=,+,k,a,j,T +,h,o,=,-,n,I,N,-,sx,},p,J +,m,O,n,-,st,@,r,-,b,u,k,J =,=,=,=,+,k,a,=,-,n,o,=,T =,=,=,=,+,pr,u,=,-,fl,E,s,J =,=,=,=,=,=,=,=,+,sp,i,r,T =,=,=,=,+,z,e,=,-,br,a,=,T +,pr,u,f,-,n,e,=,-,m,I,N,K =,=,=,=,-,m,@,=,+,n,e,r,T =,=,=,=,=,=,=,=,+,fl,M,w,T =,=,=,=,-,pr,o,=,+,f,i,l,T =,=,=,=,+,spr,I,=,-,N,@,r,T +,=,K,s,-,l,e,=,-,p,@,l,T =,=,=,=,-,l,i,=,+,k,|,r,T =,=,=,=,+,v,e,=,-,d,@,l,T +,j,E,k,-,s,i,=,-,pl,a,t,J =,=,=,=,+,vr,a,x,-,st,@,r,T =,=,=,=,+,sxr,e,=,-,p,@,l,T +,b,o,=,-,t,@,r,-,m,E,s,J =,=,=,=,+,h,a,r,-,sx,|,r,T =,=,=,=,=,=,=,=,+,b,E,rk,J -,=,E,ks,+,f,o,=,-,t,o,=,T =,=,=,=,+,p,O,ns,-,x,A,t,J +,=,o,=,-,l,i,=,-,n,o,t,J +,r,e,=,-,k,@,=,-,n,I,N,E =,=,=,=,=,=,=,=,+,b,K,l,T =,=,=,=,+,pr,L,=,-,l,I,p,J =,=,=,=,=,=,=,=,+,m,a,=,T +,=,I,n,-,l,E,x,-,kr,L,s,J =,=,=,=,-,p,A,s,+,kw,I,l,E =,=,=,=,=,=,=,=,+,x,A,lm,P +,p,},=,-,d,I,N,-,br,o,t,J =,=,=,=,=,=,=,=,+,pr,I,k,J =,=,=,=,=,=,=,=,+,p,L,k,J =,=,=,=,=,=,=,=,+,b,A,k,J =,=,=,=,=,=,=,=,+,zw,a,n,T +,s,E,r,-,v,@,=,-,bl,I,k,J =,=,=,=,+,sp,a,=,-,t,@,l,T =,=,=,=,+,p,},=,-,m,@,l,T =,=,=,=,=,=,=,=,+,kl,O,k,J +,bl,M,w,-,s,@,l,-,d,O,t,J =,=,=,=,=,=,=,=,+,d,i,r,T =,=,=,=,=,=,=,=,+,v,O,rm,P =,=,=,=,=,=,=,=,+,p,},nt,J +,b,e,=,-,d,@,=,-,l,a,r,T =,=,=,=,+,tr,|,=,-,z,@,l,T =,=,=,=,=,=,=,=,+,sp,I,t,J -,s,i,=,-,f,E,r,-,st,O,k,J =,=,=,=,+,kr,I,p,-,k,A,st,J -,tr,o,=,-,d,y,=,+,s,e,=,T +,b,o,r,-,t,@,=,-,k,a,rt,J -,b,u,=,-,z,@,=,+,r,u,n,T =,=,=,=,+,vr,K,=,-,j,@,r,T -,l,@,=,-,p,A,s,-,t,K,=,T =,=,=,=,-,s,K,=,+,z,u,n,T =,=,=,=,=,=,=,=,+,k,I,nt,J =,=,=,=,+,f,i,t,-,s,@,r,T =,=,=,=,=,=,=,=,+,st,y,w,T =,=,=,=,+,h,O,=,-,b,@,l,T =,=,=,=,-,=,a,=,+,t,o,m,P =,=,=,=,+,sp,K,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,m,a,nt,J -,d,@,=,-,b,I,n,-,d,@,r,T =,=,=,=,=,=,=,=,+,r,A,t,J -,k,i,=,+,m,o,=,-,n,o,=,T =,=,=,=,=,=,=,=,+,n,a,m,P -,v,@,r,+,k,o,=,-,p,I,N,K -,t,i,=,-,sx,o,=,-,t,@,l,T =,=,=,=,=,=,=,=,+,w,O,lf,J =,=,=,=,=,=,=,=,+,bl,O,s,J =,=,=,=,=,=,=,=,+,r,A,m,E -,=,A,m,+,f,o,=,-,r,a,=,T =,=,=,=,=,=,=,=,+,pr,K,=,T =,=,=,=,-,m,o,=,+,r,I,n,E =,=,=,=,-,x,@,=,+,v,A,l,E +,=,A,f,-,s,E,t,-,s,@,l,T +,l,e,=,-,p,@,l,-,v,a,s,J =,=,=,=,+,r,A,s,-,t,@,r,T =,=,=,=,+,t,K,=,-,G,@,r,T =,=,=,=,=,=,=,=,+,kr,A,t,J =,=,=,=,=,=,=,=,+,st,A,m,E =,=,=,=,+,l,E,=,-,m,@,r,T +,kl,a,=,-,m,@,=,-,f,I,lm,P =,=,=,=,+,j,u,=,-,k,@,l,T -,r,e,=,-,p,y,=,+,bl,i,k,J =,=,=,=,+,=,e,=,-,v,@,r,T =,=,=,=,=,=,=,=,+,r,e,ks,J +,k,|,=,-,k,@,n,-,sn,},f,J =,=,=,=,+,sn,e,w,-,vl,O,k,J -,d,i,=,+,s,K,N,-,k,A,st,J =,=,=,=,+,x,o,=,-,z,@,r,T =,=,=,=,+,r,e,=,-,G,@,l,T =,=,=,=,=,=,=,=,+,vr,A,xt,J +,t,a,=,-,m,@,=,-,br,i,f,J =,=,=,=,+,m,O,=,-,s,@,l,T =,=,=,=,+,x,E,=,-,b,@,=,T +,k,o,=,-,n,I,Nk,-,r,K,k,J +,b,),=,-,t,a,=,-,d,e,l,T =,=,=,=,+,b,},n,-,d,@,l,T =,=,=,=,+,p,e,=,-,G,@,l,T =,=,=,=,=,=,=,=,+,sp,o,r,T =,=,=,=,+,=,A,v,-,dr,},k,J =,=,=,=,+,sx,I,N,-,k,@,l,T +,=,O,b,-,d,O,n,-,d,@,r,T =,=,=,=,-,f,a,=,+,v,|,r,T =,=,=,=,=,=,=,=,+,sm,e,l,T -,r,A,n,-,d,e,=,+,v,u,=,T =,=,=,=,-,f,E,r,+,m,o,r,T +,=,A,v,-,dr,a,=,-,G,@,r,T -,l,L,=,-,t,@,=,+,n,A,nt,J -,G,u,t,-,=,M,=,-,t,o,=,T -,b,K,=,-,d,@,=,+,h,A,nt,J -,p,@,=,+,t,i,=,-,ts,i,=,T =,=,=,=,+,d,I,k,-,t,@,=,T =,=,=,=,+,dw,A,rz,-,b,A,lk,J -,str,a,=,-,l,@,=,-,k,A,st,J =,=,=,=,=,=,=,=,+,st,u,l,T =,=,=,=,=,=,=,=,+,r,E,st,J =,=,=,=,+,p,},=,-,d,I,N,K =,=,=,=,+,kr,o,n,-,st,e,n,T -,r,@,=,+,t,u,r,-,k,a,rt,J =,=,=,=,-,r,@,=,+,t,u,r,T +,d,e,=,-,s,i,=,-,m,o,=,T -,k,@,r,-,l,e,=,-,p,@,l,T =,=,=,=,=,=,=,=,+,z,o,l,T =,=,=,=,-,sl,a,=,+,v,I,n,E =,=,=,=,=,=,=,=,+,v,u,t,J =,=,=,=,+,b,e,=,-,b,i,=,T =,=,=,=,=,=,=,=,+,h,A,m,E =,=,=,=,+,r,O,=,-,m,@,l,T =,=,=,=,+,t,E,gzd,-,b,u,k,J =,=,=,=,+,r,I,=,-,x,@,l,T =,=,=,=,+,k,o,=,-,v,@,l,T =,=,=,=,=,=,=,=,+,r,i,t,J =,=,=,=,+,kw,A,r,-,t,@,l,T -,b,@,=,+,w,e,=,-,G,I,N,K =,=,=,=,=,=,=,=,+,d,|,n,T =,=,=,=,=,=,=,=,+,vl,M,w,T =,=,=,=,-,t,u,r,+,n,o,j,T =,=,=,=,+,k,},=,-,s,@,n,T =,=,=,=,=,=,=,=,+,tw,e,=,T =,=,=,=,=,=,=,=,+,r,I,l,E =,=,=,=,+,l,L,=,-,f,@,l,T +,=,O,n,-,d,@,r,-,l,K,f,J +,b,O,=,-,k,@,=,-,p,o,t,J =,=,=,=,+,xl,I,=,-,p,@,r,T =,=,=,=,-,r,i,=,+,j,o,l,T -,b,y,=,+,r,o,=,-,b,a,n,T +,n,o,=,-,l,i,=,-,j,a,=,T =,=,=,=,-,p,@,r,+,s,o,n,T =,=,=,=,+,kl,K,=,-,n,@,=,T =,=,=,=,=,=,=,=,+,x,e,w,T =,=,=,=,=,=,=,=,+,r,O,p,J =,=,=,=,+,h,},lb,-,d,i,nst,J =,=,=,=,+,h,A,=,-,r,@,l,T =,=,=,=,+,z,a,j,-,k,I,st,J -,v,@,r,+,d,O,m,-,h,u,k,J =,=,=,=,-,sw,a,=,+,r,e,=,T =,=,=,=,=,=,=,=,+,k,A,r,E +,s,L,=,-,k,@,r,-,b,A,l,E =,=,=,=,=,=,=,=,+,vl,i,m,P =,=,=,=,+,r,o,=,-,k,@,r,T =,=,=,=,=,=,=,=,+,r,O,k,J +,v,o,=,-,G,@,l,-,n,E,st,J =,=,=,=,+,=,},l,-,st,@,r,T =,=,=,=,=,=,=,=,+,r,O,l,E =,=,=,=,=,=,=,=,+,pl,L,m,P =,=,=,=,+,kl,A,=,-,p,@,r,T -,k,a,=,-,m,@,=,-,r,a,t,J +,r,O,=,-,m,@,l,-,z,o,j,T -,l,i,=,-,n,i,=,+,j,a,l,T +,r,O,n,-,d,@,=,-,d,A,ns,J -,p,a,=,+,r,a,=,-,ts,i,=,T =,=,=,=,+,vl,},=,-,G,@,r,T =,=,=,=,+,sp,E,r,-,w,@,r,T +,sx,I,=,-,p,@,rz,-,b,o,t,J +,r,I,n,-,d,@,=,-,k,u,k,J =,=,=,=,=,=,=,=,+,p,K,l,T =,=,=,=,-,k,a,=,+,n,a,l,T =,=,=,=,+,st,O,k,-,p,a,rt,J =,=,=,=,+,=,o,G,-,b,A,t,J -,m,i,=,-,n,e,z,-,br,i,f,J +,r,o,=,-,z,@,=,-,kn,O,p,J =,=,=,=,=,=,=,=,+,kr,o,n,T +,k,I,n,-,d,@,r,-,=,y,r,T =,=,=,=,+,w,E,rk,-,pl,a,ts,J =,=,=,=,+,d,e,=,-,l,I,N,K =,=,=,=,=,=,=,=,+,r,},x,E =,=,=,=,-,x,a,=,+,l,O,n,E =,=,=,=,+,r,},x,-,sx,I,lt,J =,=,=,=,+,r,},x,-,s,A,k,J =,=,=,=,+,kw,I,g,-,b,A,k,J =,=,=,=,+,dr,L,=,-,p,@,r,T +,s,L,=,-,k,@,r,-,z,A,k,J =,=,=,=,+,v,o,=,-,r,@,n,T =,=,=,=,=,=,=,=,+,fl,A,p,J =,=,=,=,-,p,O,r,+,tr,E,t,J +,z,O,r,-,G,I,Ns,-,fl,E,t,J +,=,E,=,-,f,@,n,-,h,K,t,J =,=,=,=,=,=,=,=,+,d,A,rm,P =,=,=,=,+,m,O,t,-,m,},x,E +,m,I,=,-,d,A,G,-,=,A,rm,P =,=,=,=,=,=,=,=,+,t,},l,E =,=,=,=,-,tr,O,=,+,tw,a,r,T =,=,=,=,=,=,=,=,+,r,K,=,T =,=,=,=,+,s,A,l,-,v,o,=,T -,z,K,=,-,d,@,=,+,h,E,mt,J =,=,=,=,-,t,i,=,+,r,A,n,E =,=,=,=,+,h,a,=,-,r,I,N,K =,=,=,=,-,sx,A,l,+,m,K,=,T -,k,@,=,-,l,a,rz,-,br,i,f,J +,j,u,=,-,n,@,=,-,v,I,s,J -,r,a,=,+,d,e,r,-,m,E,s,J =,=,=,=,+,p,E,=,-,p,@,l,T =,=,=,=,+,x,O,r,-,d,@,l,T -,x,@,=,+,d,},ld,-,w,E,rk,J +,s,K,=,-,z,@,=,-,br,o,t,J =,=,=,=,+,sx,L,l,-,pl,a,ts,J =,=,=,=,+,p,A,nt,-,s,@,r,T +,vr,M,=,-,w,@,=,-,b,e,st,J =,=,=,=,-,p,i,=,+,l,a,r,T =,=,=,=,+,h,M,t,-,fr,E,t,J =,=,=,=,+,d,I,s,-,t,@,l,T =,=,=,=,+,sx,a,m,-,sx,O,rt,J =,=,=,=,=,=,=,=,+,z,a,t,J =,=,=,=,=,=,=,=,+,sx,a,r,T =,=,=,=,=,=,=,=,+,str,K,k,J =,=,=,=,=,=,=,=,+,sx,A,p,J =,=,=,=,=,=,=,=,+,st,A,f,J =,=,=,=,-,m,O,r,+,t,i,r,T +,sx,a,=,-,k,@,=,-,l,a,r,T -,m,a,=,+,d,e,=,-,r,a,=,T =,=,=,=,+,sx,a,=,-,k,@,l,T +,p,I,n,-,d,a,=,-,r,O,ts,J =,=,=,=,+,kr,M,=,-,w,@,l,T =,=,=,=,+,sx,o,l,-,r,K,s,J =,=,=,=,-,m,e,=,+,n,y,=,T -,m,e,=,-,k,a,=,+,n,i,k,J =,=,=,=,-,sx,A,r,+,n,i,r,T =,=,=,=,=,=,=,=,+,xr,K,n,T -,p,A,r,+,k,e,r,-,pl,a,ts,J =,=,=,=,=,=,=,=,+,sx,A,t,J -,r,e,=,-,z,@,r,+,vw,a,r,T -,p,i,=,+,n,a,=,-,k,@,l,T =,=,=,=,+,m,A,=,-,N,@,l,T =,=,=,=,+,n,e,=,-,G,@,r,T =,=,=,=,=,=,=,=,+,k,O,lf,J =,=,=,=,=,=,=,=,+,=,y,r,T =,=,=,=,-,b,I,s,+,kw,i,=,J =,=,=,=,+,b,y,r,-,pr,a,t,J -,d,@,l,-,h,A,ndz,-,b,e,n,T =,=,=,=,=,=,=,=,+,m,e,w,T =,=,=,=,+,p,|,=,-,z,@,l,T +,m,o,=,-,n,i,=,-,k,a,=,T =,=,=,=,=,=,=,=,+,sx,o,l,T =,=,=,=,=,=,=,=,+,p,I,n,E =,=,=,=,+,sx,K,=,-,d,I,N,E -,s,e,=,-,r,a,=,+,f,K,n,T =,=,=,=,+,w,O,r,-,t,@,l,T -,kl,A,p,-,t,a,=,-,f,@,l,T =,=,=,=,-,s,u,=,+,p,e,=,T +,b,u,=,-,k,@,=,-,st,A,l,E =,=,=,=,-,sx,a,=,+,v,O,t,J +,m,A,n,-,d,@,l,-,br,o,t,J +,b,I,=,-,n,@,=,-,h,L,s,J =,=,=,=,+,p,},=,-,t,@,r,T +,kr,a,=,-,k,@,=,-,l,I,N,E =,=,=,=,=,=,=,=,+,str,a,t,J =,=,=,=,=,=,=,=,+,sl,E,t,J =,=,=,=,=,=,=,=,+,sx,|,r,T +,k,I,n,-,d,@,=,-,r,K,m,P -,sx,i,=,+,d,A,=,-,m,@,r,T =,=,=,=,-,t,a,=,+,b,E,l,E =,=,=,=,=,=,=,=,+,vl,o,=,T =,=,=,=,+,sx,K,v,-,d,i,r,T =,=,=,=,-,m,@,=,+,l,u,n,T =,=,=,=,+,sp,O,n,-,s,A,k,J =,=,=,=,-,p,a,=,+,t,A,t,J =,=,=,=,=,=,=,=,+,h,O,k,J =,=,=,=,+,zw,E,=,-,l,I,N,K =,=,=,=,+,t,L,n,-,f,e,st,J +,=,A,x,-,t,@,r,-,p,o,rt,J =,=,=,=,-,st,A,=,+,tS,O,n,E =,=,=,=,=,=,=,=,+,sx,I,l,E -,m,e,=,-,t,@,r,-,p,a,l,T +,b,A,=,-,b,@,=,-,l,a,r,T =,=,=,=,=,=,=,=,+,=,e,=,T =,=,=,=,+,t,a,=,-,f,@,l,T =,=,=,=,-,pl,a,=,+,t,o,=,T +,d,i,=,-,n,@,=,-,bl,I,k,J =,=,=,=,=,=,=,=,+,v,e,l,T =,=,=,=,-,=,E,=,+,S,E,l,T =,=,=,=,+,st,|,m,-,bl,A,t,J =,=,=,=,+,=,O,k,-,s,@,l,T +,kr,I,=,-,b,@,=,-,k,A,st,J =,=,=,=,=,=,=,=,+,sx,u,n,T =,=,=,=,+,w,E,r,-,v,@,l,T =,=,=,=,=,=,=,=,+,sx,O,l,E =,=,=,=,=,=,=,=,+,d,I,N,E =,=,=,=,+,sx,o,l,-,kr,K,t,J =,=,=,=,+,x,M,t,-,h,a,n,T =,=,=,=,=,=,=,=,+,kl,i,r,T =,=,=,=,=,=,=,=,+,r,a,=,T -,G,@,=,-,l,},s,-,kl,O,k,J =,=,=,=,=,=,=,=,+,k,o,j,T -,z,E,=,-,t,a,=,-,f,@,l,T =,=,=,=,=,=,=,=,+,x,A,t,J =,=,=,=,=,=,=,=,+,d,|,k,J =,=,=,=,=,=,=,=,+,l,y,r,T =,=,=,=,+,h,A,nd,-,w,A,s,J =,=,=,=,+,k,A,lfs,-,l,A,p,J =,=,=,=,+,tr,O,=,-,m,@,l,T +,l,i,=,-,v,@,=,-,l,I,N,E -,x,@,=,-,z,E,=,+,l,I,n,E =,=,=,=,=,=,=,=,+,w,M,t,J =,=,=,=,=,=,=,=,+,k,E,lk,J =,=,=,=,+,sp,u,=,-,l,I,N,K +,p,a,=,-,G,i,=,-,n,a,=,T =,=,=,=,=,=,=,=,+,sxr,A,p,E =,=,=,=,=,=,=,=,+,sxr,A,p,J =,=,=,=,=,=,=,=,+,zw,E,rm,P =,=,=,=,=,=,=,=,+,k,},nst,J =,=,=,=,+,dr,},=,-,kn,o,p,J =,=,=,=,+,sxr,e,=,-,d,@,=,J =,=,=,=,=,=,=,=,+,r,L,t,J -,vr,K,=,-,j,@,=,+,r,K,=,T +,k,O,nt,-,j,O,=,-,N,@,n,T -,k,M,=,-,s,@,=,+,r,i,=,T -,m,A,=,-,S,i,=,-,n,@,=,T =,=,=,=,-,fr,O,n,+,t,O,n,E =,=,=,=,=,=,=,=,+,st,A,r,E =,=,=,=,+,t,e,=,-,k,u,k,J =,=,=,=,-,pl,y,=,+,m,o,=,T =,=,=,=,=,=,=,=,+,h,M,w,T =,=,=,=,=,=,=,=,+,spr,O,t,J =,=,=,=,-,t,A,m,+,p,O,n,E -,b,A,=,-,k,@,=,+,r,K,=,T =,=,=,=,-,l,i,=,+,m,u,n,T =,=,=,=,+,sx,L,l,-,h,u,k,J =,=,=,=,=,=,=,=,+,t,i,p,J +,sx,L,m,-,G,@,=,-,b,A,k,J =,=,=,=,=,=,=,=,+,h,A,k,J =,=,=,=,+,w,I,nt,-,h,a,k,J =,=,=,=,=,=,=,=,+,tr,O,m,E =,=,=,=,-,k,a,=,+,r,A,f,J =,=,=,=,=,=,=,=,+,v,i,r,T =,=,=,=,+,sx,y,r,-,h,M,t,J -,p,@,=,+,r,E,=,-,t,@,=,T =,=,=,=,+,m,O,nt,-,f,O,l,E =,=,=,=,=,=,=,=,+,p,i,r,T =,=,=,=,=,=,=,=,+,w,A,lm,P =,=,=,=,-,p,M,=,+,w,I,n,E =,=,=,=,+,vl,K,=,-,st,@,r,T +,=,e,k,-,h,o,=,-,r,@,n,T -,v,o,=,+,l,y,=,-,ts,i,=,T =,=,=,=,=,=,=,=,+,v,e,=,T =,=,=,=,+,f,I,r,-,m,a,=,T =,=,=,=,+,h,K,=,-,n,I,N,K =,=,=,=,-,s,E,r,+,v,E,t,J =,=,=,=,+,kw,A,=,-,k,@,l,T =,=,=,=,+,b,O,l,-,st,@,r,T +,w,A,=,-,t,@,=,-,st,a,f,J =,=,=,=,=,=,=,=,+,n,i,w,T =,=,=,=,=,=,=,=,+,S,<,l,T =,=,=,=,+,k,a,s,-,k,u,k,J +,r,K,=,-,w,i,l,-,pl,a,t,J =,=,=,=,-,tr,A,k,+,t,a,t,J =,=,=,=,-,s,i,=,+,G,a,r,T =,=,=,=,-,b,A,=,+,l,O,n,E +,G,a,=,-,r,@,=,-,k,I,st,J =,=,=,=,+,l,o,dz,-,br,i,f,J +,b,a,=,-,k,@,r,-,spr,o,k,J +,G,a,=,-,r,@,=,-,p,K,p,J =,=,=,=,+,st,E,m,-,fl,L,t,J =,=,=,=,-,b,u,r,+,Zw,A,=,T +,G,a,=,-,r,@,=,-,st,O,mp,J =,=,=,=,=,=,=,=,+,h,A,p,J =,=,=,=,+,w,I,N,-,k,@,l,T =,=,=,=,+,k,o,rdz,-,dr,A,Nk,J =,=,=,=,=,=,=,=,+,k,O,m,E =,=,=,=,+,pr,K,s,-,k,a,rt,J +,k,E,rn,-,l,I,=,-,x,a,m,P +,v,A,r,-,k,@,s,-,l,A,p,J =,=,=,=,+,s,i,=,-,l,o,=,T =,=,=,=,+,k,o,r,-,kn,a,p,J =,=,=,=,=,=,=,=,+,d,i,nst,J =,=,=,=,-,s,I,n,+,j,|,r,T =,=,=,=,+,k,a,=,-,d,@,r,T =,=,=,=,+,kr,a,=,-,t,@,r,T =,=,=,=,+,s,I,n,-,t,@,l,T =,=,=,=,=,=,=,=,+,h,E,x,J =,=,=,=,+,b,u,=,-,z,@,l,T -,v,@,r,+,h,o,=,-,G,I,N,K =,=,=,=,+,s,I,=,-,s,@,r,T =,=,=,=,=,=,=,=,+,S,a,l,T =,=,=,=,=,=,=,=,+,w,A,t,J =,=,=,=,=,=,=,=,+,xr,i,p,J =,=,=,=,=,=,=,=,+,sk,i,=,T =,=,=,=,-,d,e,=,+,p,o,=,T =,=,=,=,=,=,=,=,+,=,o,m,P =,=,=,=,+,sl,a,p,-,l,i,t,J =,=,=,=,+,kl,O,g,-,d,i,r,T =,=,=,=,+,sp,a,n,-,d,@,r,T =,=,=,=,+,pl,A,k,-,pl,a,t,J =,=,=,=,=,=,=,=,+,sl,a,=,T +,n,a,l,-,d,@,=,-,b,u,k,J =,=,=,=,+,=,A,l,-,f,a,=,T =,=,=,=,=,=,=,=,+,b,E,n,E =,=,=,=,+,k,o,l,-,m,e,s,J =,=,=,=,=,=,=,=,+,sl,A,N,E +,k,I,n,-,d,@,r,-,st,E,m,E =,=,=,=,=,=,=,=,+,sl,e,=,T -,b,K,=,-,d,@,rs,-,h,L,s,J =,=,=,=,+,=,e,=,-,z,@,l,T -,G,@,=,-,h,K,ts,-,f,E,rs,J +,h,o,=,-,n,@,G,-,b,A,k,J =,=,=,=,+,k,i,=,-,w,i,=,T =,=,=,=,+,sl,K,m,-,d,i,r,T =,=,=,=,-,b,o,=,+,k,a,l,T -,G,@,=,-,v,A,=,-,l,@,r,T +,sl,I,=,-,m,@,x,-,h,K,t,J +,p,e,s,-,k,a,=,-,m,@,r,T =,=,=,=,+,=,A,=,-,k,y,=,T =,=,=,=,+,p,},=,-,z,@,l,T -,x,A,r,-,n,i,=,+,z,u,n,T =,=,=,=,+,sl,u,=,-,b,@,r,T =,=,=,=,-,b,@,=,+,st,a,n,T =,=,=,=,-,=,o,=,+,p,a,l,T =,=,=,=,+,sxr,a,=,-,p,@,r,T =,=,=,=,-,p,E,n,+,S,O,n,E =,=,=,=,=,=,=,=,+,sl,o,r,T =,=,=,=,=,=,=,=,+,sx,M,w,T =,=,=,=,-,p,O,s,+,t,y,r,T =,=,=,=,+,tr,A,=,-,p,@,r,T =,=,=,=,+,sl,L,=,-,j,@,r,T +,tr,e,=,-,m,o,=,-,l,o,=,T =,=,=,=,=,=,=,=,+,x,e,=,T -,t,@,=,-,r,i,=,+,j,|,r,T -,k,a,=,+,l,A,n,-,d,@,r,T -,k,a,=,+,p,a,=,-,d,@,=,T =,=,=,=,-,r,@,=,+,fr,K,n,T =,=,=,=,+,v,E,t,-,kr,K,t,J -,v,@,=,+,r,A,=,-,s,I,N,E -,f,i,=,-,l,i,=,+,j,a,l,T -,v,@,r,+,h,o,=,-,G,I,N,E =,=,=,=,=,=,=,=,+,b,e,k,J =,=,=,=,=,=,=,=,+,h,A,lm,P =,=,=,=,=,=,=,=,+,pl,K,n,T =,=,=,=,+,k,},s,-,h,A,nt,J =,=,=,=,-,k,a,=,+,m,e,l,T =,=,=,=,+,v,i,=,-,l,a,=,T =,=,=,=,=,=,=,=,+,sn,E,p,J -,k,O,=,-,m,i,=,+,t,e,=,T -,v,@,r,+,h,M,=,-,d,I,N,K =,=,=,=,+,sn,e,w,-,kl,O,k,J =,=,=,=,=,=,=,=,+,d,e,=,T -,l,@,=,+,r,i,=,-,n,@,=,T =,=,=,=,=,=,=,=,+,x,o,t,J =,=,=,=,-,pr,@,=,+,z,E,nt,J =,=,=,=,+,sn,I,=,-,p,@,r,T =,=,=,=,-,b,u,=,+,r,I,n,E +,sp,O,n,-,s,@,=,-,z,A,k,J =,=,=,=,=,=,=,=,+,sn,u,p,J =,=,=,=,+,sn,u,p,-,r,K,s,J =,=,=,=,=,=,=,=,+,xr,u,n,T =,=,=,=,=,=,=,=,+,p,u,p,T =,=,=,=,+,k,A,k,-,t,},s,J =,=,=,=,=,=,=,=,+,sp,K,l,T =,=,=,=,+,st,},t,-,s,@,l,T =,=,=,=,=,=,=,=,+,sn,O,t,J =,=,=,=,-,x,O,r,+,d,K,n,T =,=,=,=,+,kn,i,l,-,b,A,Nk,J =,=,=,=,+,s,},=,-,k,@,l,T =,=,=,=,+,sn,L,=,-,t,@,r,T +,h,a,=,-,v,@,m,-,pl,a,ts,J =,=,=,=,=,=,=,=,+,z,e,=,T =,=,=,=,-,k,A,l,+,k,u,n,T LanguageMachines-timbl-642727d/demos/small_1.train000077500000000000000000000006701451477526200220140ustar00rootroot00000000000000Rockwell,PUNT,PUNT,PUNT,NNP,PUNT,PUNT,PUNT,I International,Rockwell,PUNT,PUNT,NNP,NNP,PUNT,PUNT,I CorpPUNT,International,Rockwell,PUNT,NNP,NNP,NNP,PUNT,I 's,CorpPUNT,International,Rockwell,POS,NNP,NNP,NNP,I Tulsa,'s,CorpPUNT,International,NNP,POS,NNP,NNP,B unit,Tulsa,'s,CorpPUNT,NN,NNP,POS,NNP,I said,unit,Tulsa,'s,VBD,NN,NNP,POS,I it,said,unit,Tulsa,PRP,VBD,NN,NNP,O signed,it,said,unit,VBD,PRP,VBD,NN,I a,signed,it,said,DT,VBD,PRP,VBD,O LanguageMachines-timbl-642727d/demos/small_2.train000077500000000000000000000006701451477526200220150ustar00rootroot00000000000000tentative,a,signed,it,JJ,DT,VBD,PRP,I agreement,tentative,a,signed,NN,JJ,DT,VBD,I extending,agreement,tentative,a,VBG,NN,JJ,DT,I its,extending,agreement,tentative,PRP$,VBG,NN,JJ,O contract,its,extending,agreement,NN,PRP$,VBG,NN,I with,contract,its,extending,IN,NN,PRP$,VBG,I Boeing,with,contract,its,NNP,IN,NN,PRP$,O CoPUNT,Boeing,with,contract,NNP,NNP,IN,NN,I to,CoPUNT,Boeing,with,TO,NNP,NNP,IN,I provide,to,CoPUNT,Boeing,VB,TO,NNP,NNP,O LanguageMachines-timbl-642727d/demos/small_3.train000077500000000000000000000006441451477526200220170ustar00rootroot00000000000000structural,provide,to,CoPUNT,JJ,VB,TO,NNP,O parts,structural,provide,to,NNS,JJ,VB,TO,I for,parts,structural,provide,IN,NNS,JJ,VB,I Boeing,for,parts,structural,NNP,IN,NNS,JJ,O 's,Boeing,for,parts,POS,NNP,IN,NNS,I 747,'s,Boeing,for,CD,POS,NNP,IN,B jetliners,747,'s,Boeing,NNS,CD,POS,NNP,I PUNT,jetliners,747,'s,PUNT,NNS,CD,POS,I Rockwell,PUNT,PUNT,jetliners,NNP,PUNT,PUNT,NNS,O said,Rockwell,PUNT,PUNT,VBD,NNP,PUNT,PUNT,I LanguageMachines-timbl-642727d/demos/small_4.train000077500000000000000000000006041451477526200220140ustar00rootroot00000000000000the,said,Rockwell,PUNT,DT,VBD,NNP,PUNT,O agreement,the,said,Rockwell,NN,DT,VBD,NNP,I calls,agreement,the,said,VBZ,NN,DT,VBD,I for,calls,agreement,the,IN,VBZ,NN,DT,O it,for,calls,agreement,PRP,IN,VBZ,NN,O to,it,for,calls,TO,PRP,IN,VBZ,I supply,to,it,for,VB,TO,PRP,IN,O 200,supply,to,it,CD,VB,TO,PRP,O additional,200,supply,to,JJ,CD,VB,TO,I shipsets,so-called,additional,200,NNS,JJ,JJ,CD,I LanguageMachines-timbl-642727d/demos/small_5.train000077500000000000000000000005251451477526200220170ustar00rootroot00000000000000for,shipsets,so-called,additional,IN,NNS,JJ,JJ,I the,for,shipsets,so-called,DT,IN,NNS,JJ,O planes,the,for,shipsets,NNS,DT,IN,NNS,I PUNT,planes,the,for,PUNT,NNS,DT,IN,I These,PUNT,PUNT,planes,DT,PUNT,PUNT,NNS,O include,These,PUNT,PUNT,VBP,DT,PUNT,PUNT,I among,KOMMA,include,These,IN,KOMMA,VBP,DT,O other,among,KOMMA,include,JJ,IN,KOMMA,VBP,O LanguageMachines-timbl-642727d/demos/tse.cxx000077500000000000000000000303151451477526200207430ustar00rootroot00000000000000/* Copyright (c) 1998 - 2015 ILK - Tilburg University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: http://ilk.uvt.nl/software.html or send mail to: timbl@uvt.nl */ #include #include #include #include #include "timbl/TimblAPI.h" using namespace Timbl; using std::ifstream; using std::ofstream; using std::ios; using std::cerr; using std::cin; using std::cout; using std::endl; using std::istream; using std::string; using std::equal; using std::getline; #define MAX_EXP 10 #define MAX_PARAMS 256 bool nocase_cmp( char c1, char c2 ){ return toupper(c1) == toupper(c2); } bool compare_nocase_n( const string& s1, const string& s2, size_t n ){ if ( equal( s1.begin(), s1.begin()+n, s2.begin(), nocase_cmp ) ){ return true; } else { return false; } } // // here are the various scripting actions: // enum ActionType { UnknownAct, New, Free, Set, Show, Classify, Train, Test, Increment, Decrement, Expand, Remove, SaveTree, GetTree, SaveW, GetW, Quit }; /* The following scripting commands are implemented: QUIT stop all further actions. NEW name create an experiment with name 'name' of type 'algo' algo can be IB1, IB2, TRIBL or IGTREE. Default: IBL FREE name delete the experiment with name 'name' .SET option value set option of experiment name to value .SHOW OPTIONS show all posible options with their default and current values for experiment 'name' .SHOW SETTINGS show all options with current settings of 'name' .TRAIN file build an instancebase from file. .TEST file1 [file2] classify all lines from file1, write results to file2 or to file1.out if parameter file2 is not present .EXPAND file1 increment the database with contents of file1 .REMOVE file1 decrement the database with contents of file1 .CLASSIFY line classify this line .ADD line increment the database with line .REM line decrement the database with line .SAVE file save the instancebase of experiment name to file. .GET file get the instancebase for experiment name from file. .SAVEW file save the current weights of experiment name to file. .GETW file get new weights for experiment name from file. */ TimblAPI *experiments[MAX_EXP]; int exp_cnt = 0; // the following functions implement a simple parser to parse the // script file, recognize experiment names en actions to perform // on those experiments int fill_params( string *params, const string& line ){ // chop line into a bunch of parameters. int i; for ( i=0; i < MAX_PARAMS; i++ ) params[i] = ""; i = 0; size_t len = line.length(); if ( line[0] == '"' && line[len-1] == '"' ) { params[0] = string( line, 1, len-2 ); return 1; } for ( size_t u_i = 0; u_i < len; u_i++) { if ( line[u_i] == ',' || line[u_i] == ' ' ){ if ( params[i] != "" ) // Don't accept zero length strings ! ++i; if ( i >= MAX_PARAMS ) break; } else params[i] += line[u_i]; } // u_i if ( i >= MAX_PARAMS ){ cerr << "too many parameters!" << endl; return MAX_PARAMS+1; } if ( params[i] != "" ){ // last param ended at line end i++; } return i; } int lookup( const string& name ){ // search for an experiment with the name 'name' in the list of // known experiments. int i; for ( i=0; i < exp_cnt; i++ ){ if ( name == experiments[i]->ExpName() ) return i; } return -1; } ActionType parse( const string& Buffer, int &pos, string *pars, int &len ){ // here we parse lines of the script-file: // first we take the first part and see if it is a NEW or FREE // command which need special attention. // otherwise we asume it to be the name of an experiment. string Buf = TiCC::trim( Buffer ); len = 0; if ( compare_nocase_n( Buf, "NEW ", 4 ) ){ len = fill_params( pars, Buf.substr(4) ); if ( ( pos = lookup( pars[0] ) ) != -1 ){ cerr << "you can't renew an experiment: " << Buf << endl; return UnknownAct; } return New; } else if ( compare_nocase_n( Buf, "FREE ", 5 ) ){ len = fill_params( pars, Buf.substr(5) ); if ( (pos = lookup( pars[0] ) ) == -1 ){ cerr << "you can't free this unknown experiment: " << Buf << endl; return UnknownAct; } return Free; } else if ( compare_nocase_n( Buf, "QUIT", 4 ) ){ return Quit; } else { string::size_type p = Buf.find( '.' ); if ( p == string::npos ){ cerr << "missing experiment reference!" << endl; return UnknownAct; } else { string expname = Buf.substr(0, p ); pos = lookup( expname ); // do we know it. if ( pos == -1 ){ return UnknownAct; // error } Buf = Buf.substr( p+1 ); // A well known experiment, so now we can see what we // must do. if ( compare_nocase_n( Buf, "SET ", 4 ) ){ len = fill_params( pars, Buf.substr(4) ); return Set; } else if ( compare_nocase_n( Buf, "SHOW ", 5 ) ){ len = fill_params( pars, Buf.substr(5) ); return Show; } else if ( compare_nocase_n( Buf, "GET ", 4 ) ){ len = fill_params( pars, Buf.substr(4) ); return GetTree; } else if ( compare_nocase_n( Buf, "GETW ", 5 ) ){ len = fill_params( pars, Buf.substr(5) ); return GetW; } else if ( compare_nocase_n( Buf, "SAVE ", 5 ) ){ len = fill_params( pars, Buf.substr(5) ); return SaveTree; } else if ( compare_nocase_n( Buf, "SAVEW ", 6 ) ){ len = fill_params( pars, Buf.substr(6) ); return SaveW; } else if ( compare_nocase_n( Buf, "TRAIN ", 6 ) ){ len = fill_params( pars, Buf.substr(6) ); return Train; } else if ( compare_nocase_n( Buf, "EXPAND ", 7 ) ){ len = fill_params( pars, Buf.substr(7) ); return Expand; } else if ( compare_nocase_n( Buf, "REMOVE ", 7 ) ){ len = fill_params( pars, Buf.substr(7) ); return Remove; } else if ( compare_nocase_n( Buf, "TEST ", 5 ) ){ len = fill_params( pars, Buf.substr(5) ); return Test; } else if ( compare_nocase_n( Buf, "CLASSIFY ", 9 ) ){ len = fill_params( pars, Buf.substr(9) ); return Classify; } else if ( compare_nocase_n( Buf, "ADD ", 4 ) ){ len = fill_params( pars, Buf.substr(4) ); return Increment; } else if ( compare_nocase_n( Buf, "REM ", 4 ) ){ len = fill_params( pars, Buf.substr(4) ); return Decrement; } else return UnknownAct; } } } void one_command( istream &in_file, int &line_count ) { // the actual "engine" // get a line from in_file, parse it and take appropiate action // Most of the time by directly calling a MBL Class function. // of course some sanity checking is done here and there static string *params = NULL; int pos = -1, len; if ( params == 0 ){ params = new string[MAX_PARAMS+1]; } string Buffer; getline( in_file, Buffer ); line_count++; if ( Buffer == "" || Buffer[0] == '#' ){ return; } cerr << "TSE script, executing line: " << line_count<< endl << "=== " << Buffer << endl; ActionType action = parse( Buffer, pos, params, len ); if ( len >= MAX_PARAMS ){ cerr << "Too many parameters, skipping....." << endl; return; } switch ( action ){ case Quit: exit(1); break; case New: { if ( exp_cnt == MAX_EXP ){ cerr << "To many different experiments in one run" << endl; exit(1); } if ( len == 0 ){ cerr << " Wrong number of parameters for New" << endl; exit(1); } string cmnd; if ( len == 1 ){ cerr << "1 parameters " << params[0] << endl; cmnd = "-a IB1"; } else { for ( int i=1; i < len; ++i ) cmnd += params[i] + " "; } experiments[exp_cnt++] = new TimblAPI( cmnd, params[0] ); cerr << "Created a new experiment: " << experiments[exp_cnt-1]->ExpName() << endl; break; } case Free: delete experiments[pos]; exp_cnt--; for ( ; pos < exp_cnt; pos++ ){ experiments[pos] = experiments[pos+1]; } experiments[exp_cnt] = 0; break; case GetTree: if ( len == 0 ) cerr << "missing filename to retrieve InstanceBase" << endl; else experiments[pos]->GetInstanceBase( params[0] ); break; case SaveTree: if ( len == 0 ){ params[0] = experiments[pos]->ExpName() + ".tree"; } else experiments[pos]->WriteInstanceBase(params[0]); break; case GetW: if ( len == 0 ) { params[0] = experiments[pos]->ExpName() + ".weights"; } else experiments[pos]->GetWeights(params[0]); break; case SaveW: if ( len == 0 ){ params[0] = experiments[pos]->ExpName() + ".weights"; } else experiments[pos]->SaveWeights(params[0]); break; case Show: if ( len != 1 ) cerr << "missing information about WHAT to show" << endl; else { if ( compare_nocase( params[0], "OPTIONS" ) ) experiments[pos]->ShowOptions( cerr ); else if ( compare_nocase( params[0], "SETTING" ) ) experiments[pos]->ShowSettings( cerr ); else cerr << "don't know how to show '" << params[0] << "'" << endl; } break; case Train: if ( len == 1 ) experiments[pos]->Learn(params[0]); else cerr << "missing filename for Train" << endl; break; case Expand: if ( len == 1 ) experiments[pos]->Expand(params[0]); else cerr << "missing filename for Expand" << endl; break; case Remove: if ( len == 1 ) experiments[pos]->Remove(params[0]); else cerr << "missing filename for Remove" << endl; break; case Test: { switch ( len ){ case 0: cerr << "missing filename for Test" << endl; return; break; case 1: params[1] = params[0] + ".out"; break; case 2: break; default: cerr << "too many parameters for Test, (ignored)" << endl; } experiments[pos]->Test( params[0], params[1] ); break; } case Classify: if ( len == 1 ){ const TargetValue *tv = experiments[pos]->Classify(params[0]); cout << "classify: " << params[0] << " ==> " << tv << endl; } else cerr << "missing instancestring for Add" << endl; break; case Increment: if ( len == 1 ) experiments[pos]->Increment(params[0]); else cerr << "missing instancestring for Add" << endl; break; case Decrement: if ( len == 1 ) experiments[pos]->Decrement(params[0]); else cerr << "missing instancestring for Remove" << endl; break; case Set: if ( len != 1 ){ for ( int j=1; j < len; j++ ) params[0] += params[j]; } if ( !experiments[pos]->SetOptions( params[0] ) ) cerr << "problem with Set " << params[0] << endl; break; case UnknownAct: if ( pos < 0 ) cerr << "[" << line_count << "]" << Buffer << " ==> Unknown experiment, skipped\n" << endl; else cerr << "[" << line_count << "] " << Buffer << " ==> Unknown action, skipped\n" << endl; break; } } int main(int argc, char *argv[] ){ // the following trick makes it possible to parse lines from cin // as well from a user supplied file. istream *script_file; ifstream test_file; if ( argc > 1 ){ if ( (test_file.open( argv[1], ios::in ), !test_file.good() ) ){ cerr << argv[0] << " - couldn't open scriptfile " << argv[1] << endl; exit(1); } cout << "reading script from: " << argv[1] << endl; script_file = &test_file; } else script_file = &cin; int line = 0; while ( !(*script_file).eof() ) one_command( *script_file, line ); exit(0); } LanguageMachines-timbl-642727d/docs/000077500000000000000000000000001451477526200172405ustar00rootroot00000000000000LanguageMachines-timbl-642727d/docs/Makefile.am000066400000000000000000000000721451477526200212730ustar00rootroot00000000000000# $Id$ # $URL$ man1_MANS = timbl.1 EXTRA_DIST = timbl.1 LanguageMachines-timbl-642727d/docs/Timbl_6.4_Manual.pdf000066400000000000000000021036251451477526200226770ustar00rootroot00000000000000%PDF-1.5 %쏢 5 0 obj <> stream xYْ}m F]e`y{lq{RRUzX"Lr̓'O_5Ѡa~u_ψ_û&.4Ik\Hq=n87)lxȍnK*<&ȥQ[f-vqt8D/ 6<$rCaJ95i۝Uov)iG4wK{w,Rz~Y '^e[҆oڙ7Qw1*Л<' WI?\|(!hp_s&aO+7š!B{٨)D㳝 _Ny !h+t3951b͗w<X>c]A($咍7^<1f'S2&V(#וػ%[kL6GXÓ'HM8 _\Gt-c#n.4 Z+sR*2RxGne+|5s0JQbٍb '],ٯr7òN|Xy*芫\iRf|e(67".Usʡ#KCf^ÑqǢF3j UH,aUM(/TW=2ER&۪zWxEbjEJ 3kP2[CY[XiLҪ}rbԫ0i.PERѪS'1gM+=|W'Z5aQǎ2B}rkIr%8J󹵒[\n۸yxU›\rыIHklPwo_<|ًmQuW2Dz:}Б@IܽdS?7bJáNkw \q!Y,Acx*,-9tUjr Kˌˎz!-xrbp܉\̽ z dkۚ8%M,Rbĭ 4 6X)Uj$3Mhν!btsPB8w"fm3'UyǬ I oJ6A>w&]$NB!&%m~<3=15J>NbK~{ڛ] n/%3*yi -ocl~13sCMb Kq"͐+g'H;jDYO/ޭ(gC4K!!D; o4 I-셈8S~mp~=܃,cox6O'Ǽ6@Amf]8IM-:)?˝>m7%WǤ۳ks-l -$7:Q*yP6/S U".(iRN-L/"B[QwUpzW}$TB\!&u'I_ìKB ly>JjR[q6wvJ]XVK-"sVro w3(XB`aqM$w5R;L|~oS/y!...4\q >tǞiWu D^DwwHYJq ?w@endstream endobj 6 0 obj 2235 endobj 19 0 obj <> stream x- 0D)`cl'VbB0j q2usq㠋b"Jxj$FEH9rZ_eAh*z1qendstream endobj 20 0 obj 103 endobj 24 0 obj <> stream xKo7: <:6}6r zp1pw%jmEq!X`j!̓;86Ϯf^/fgPήWRA~"W̚0Ov0_]T,׀}W.Z#V׋562Qu'-3!m.#t[hV?5$JdMeJɸr'2>Z~V׋%:kda(pY!.~z!爠:s zt|~-.b 9Lե*5}Xv 2C9,R`AUvgy}KT\. MDj`Y՜ժq)}p[c="bdr)qhKWBжP3uT2hRޕn"Z2-Ҹ4^E- h"gPm Esg}f!%ׇDe\27`O~lR`i%EdD_Kbxu(9Н]7oK9 AzGy9"östf|ȳӉ9d;NѤ=:y(e]Z!{C[JŪfY%92C7%rQR*9yt*gT&SI4$k32B7Yh~]0Xo{vIő9^MC%+mX 1.dCd](05;<55?xcCڮ[Ը^7J##.)ŋ:6CPU_b(lȸNJb͟@~GbQ7Ђu}AJݟPs~lkxDM҅(3 m&In&nQ  h&Xl4ga7Ynu#U0`CFy+ዦu,B$mdC AV鱗cg\_epI 姾c*DV5f[yH,.z;s^GyF/xgݵ> {:E89^%E-:+5p|Ia}{!v\ =$qf,d;nO=(ZFCBB/Ju,(h2 mҡ&qdmHD|iS)O >4Ǐggr{MM<~™]zCw3OvM<:~sR}ɮ=endstream endobj 25 0 obj 1412 endobj 29 0 obj <> stream xrS̲Y{vQ$vRQX"#Y+dy݃ќ")/4̇j#n.>{7׷rs}B?n^l^\q":rfstT(7^!M\\UZ?(߮MJKFU8U3G/F#r-tࡥ Ai>/)gI4 롅dşR'5O -)~MO}~w;ZA.Ly-Lr q-Y%^R烹a5-Cq˨Oy\ϻ0ڟMX_k< x[-*'罄`@N}SWLؓ-o}~&FLxDk&n.ڔV},  g.N*sia[#[|BoZGv|nb?e`@wx7t-͠*+qk`^ͫdžV9ޠE޷?xS]*AP5`'yG4 7 0M_oˀWue|ͥ\«È,SאpJw_Dž!] cuI:ü'1= mDtK{R`gڂhV '{f>-h5qEt41Pqur0t7[DTn'Zx2wh4 ΍M[7a˭z㖠US<@(a,Y'*/3+Ҋ||pNɪf- Uu%mq0|#5C+֏DVݹ8s%]]ѕStˬ]A׵_z"3ڲ\^4irHJG/K7XTdHe}7Eh]P+M}g^MkA4|ui >4 ᒊ线gzэA#?*0)(|6`S<Ӗ~4~$E mmkCȼF4GcF4^ mШ'4:4n%w4܂3-dpAх. xMZaķm,7>h Ѱܦ*4!rD!̠]iy~مmda.n hˎt83^Mhyz*6*ۮ:Ce9W=F. @zcYm6/Cn-Ш Y#f\%T@ZBŋR/dP#< +QtU-M{L"rgG^NB{*/,FDo<4%3 Zw+o.Ni~V}Rh`l/ǐ:ZnGFpU>5.Y.йѭg{,Ew=}ELK7$N19`"vBm xmVf'xTA q9${;Gm^*O*WXVRwZF7>2iRm{Lh&NnGqݽE쪬رoniӌB]Z#I9L j XڥS_ iAD*"t#h g8j݉9yp ah"ld^ /& KYi1o`?FQA+G:'TEvUad 4oe&]?Yp2Aއx+Mv +N~t>~ve^[&6<4T`qQ]pi9E̍fuk)v!Q .\ϒ%q\:~6endstream endobj 30 0 obj 2371 endobj 34 0 obj <> stream x=ْ$q~<88 Mi}Ik3’ݥ=ȝo@UejD+DW}w͂_}H7-7}o?/9 r27_=o܄bޤoϷwyNna O/bƟ^CM^ЫфPx3X>ߔs6z\=.甒y?1F=kLI~bOᧃC=Y|O8v>O?&h{޹遞&w:`I0K meom<1tOܻq%5̛'-"KZLa p!(9Twѝ L[[ @"&n׷v8n&>ʻ4f=}[̀NexPcROiEn@0f[ Pvoqs6A3'+n-f"bsd9'w?ݰ^k.n=b;xYa"--)G5-6=}]1:aˀ6{(6f@CdɝcY];r@$o9`u|Ѯ9g B'lY_CRw.ҍ7ʙv3NQZ{a19 `+tOes2fPb[ waT@=61Ugߑ1RGLca)K璁 s @h`RWi*z1bZ';ʮ<9]WKD-nA$#uЋ<~'q %++d4&$nb؍Ɏ)FB5!J'Rs.oHź(ь:7\c뉨-`9{;nEN: ʄ3dWDr| /&/?s՞E@@OiO%f;!JN|펭w׬ Ԡn4U$$>l1fJ W@nu&]3}AuF/l}!ݫ@lFד6K[ UU* yVA& z/AqeKD6<ݙ E=#"`&20  j̽/~qަ_,vUHLcQ&"z6X3QTnӚrnM uds@*z1Px{_0>F xBvoe")jS!mA) /k?3@ĥgU PLvn4&/rѳ5Z iLePMvp.fs?7sX{:2G%_#QW"w"MAg+:U {ndo `@k3OxUJtW˶{ vNlkC|;0iO+%+Ӟ=X~c)j|*ER U* v|e򱾜\6k7=Pw^&Q%%?#O⡡JN6~CXB8t]T$E~ 1#_ _OױVӚ*Y'#~+`VjqUjmXrKT7ٖ[) f?Tx37Ɖ/ݬ *ʌ Vݕ4h`N&dŬO4'ڠm DJ;ZٌJmt 6OԘ_ ܋/\r6th)|K{>"c~ِ7oȆ20;<<?߶9Ca"x8b8v(i/gdNn"BF8)W?#W&ҫ%(A>pt+n Ln} Tki/1tL,Ĺߩu^%#, qLLp=:?\wz=ny)so HFW1n`LT8ئnSFE WuRE)s )2kDLԇvv&ҸHIںdod >k,jK\ȅ0["  F`_ք[\<⛺ߘJI񡾖@(uq:ORy۬D09qvfV`>n>I0\?2"\gD=ZCGh,tw"43Pesp?`*tT`dΛJ[+=:4(=v!u|M5! Fm[D\`& 3N^?՝oِ'2Ow pW%5Δyb{c|47(\u/R75dF oBgvtHY!\z[stpZ1"۹|{-.`Gਅl|v%)?e+8[²x%x% O[. FMfղyW"nIK,!f^zŰG06{b"$j=A> `Cɔ`}'4rKlgba58;=#Yhb"9c`i0:}iLXgU,-S#vݺB́ O W mDoxv&5Swe '.BMŴZ-!zUDbv;Pj<ڭk y|f v} y7Ȼyw =ҷ*|yp6K$93tW09'?Rc7"_k >`c5GY[l9zI=4 (c=cC17DFœhaœ؝wsM)V4:1ԅB0ݕMJXR^rJ4u#d %o cZxչi#Fs.Ar7.޸9dIee"duf¬f M|7imkխuf_Y  T1ʚ1`4)JV4K'b[z$' g8k[|bՔ]Jj? #wn{ !, ܠG%q[p?1^FOw&e f)L>ԧUOw弩|WdnMe:1D}Y_1&cH||%Ze'].$.qS=Ѫ+(05=ƕPLxm=6#?c'-WUnF{;݅VKW7#~ub=(;1F&g}[6Qh>AҖz[K堆l;rgyX:vG6+7<DB"p@[G!-r`7׵b—b;4]-ɝC9L~Z=|` OhA3(Kdڵ^p^铢p> 6%eظ< }T8CsG}73 <]ԕ}ٛ5Ɨ<Bܫ),ĊI)}ٶ%O<x8lϻF"؏@ƲnPfQ ߊHZ9^ãvUQ4̒.Wa@-u뮠=,"%59{g;R>ߵ|Ƙ<*o: ?}־6]ndr\?EHרλZGx;uwbt53غ.OFC kQMY_ҦXN0Kۨ\qA?A.0!gV}{^B%>ǓuиF)C-3vNT81s}4! ]GUJC1fs4X= 'Ң_-Xڍ-ShvsZVR_o :z36-99^,j@ރ1J^dF9LE]RaZ셬zHɹC}9L\zd4xAߦ*,vjꭚI\,GD뱢6YIӴg (bmkG] r*0IfuI[2iZ9b1%-eB]|O¨Z2du ڔ#h.z2cc:Ͼtm LT4FD;\j k7GX0,aҮ EyKZk3B'A`x~{eo:[纇r$ pDy t]M =Ka1˃jDg|M8`: _P{SW~V;FM -Ĝ/Re3&ds|"zowRlr+!e뵿7 y΍Є`;z3 2ӟLqԩN{דƏ)Xjo&ȥXp8el8JkȫGX|ɧٙ'&};hOI|V.uo$}5Np.l"S ܦUʒAsDW%ס#ש14ȸ4:W BpV4f l604kûA 9*af \E?Ee(2GM@^vZDrVhjdks7vUzwu LV{B~(.[в3to˩g+h혍oUYx [u] *hܤ^{직WFt==|PA|LWOYżtꊿd 0I˶k]teij~[`r;䔄 LɶnkjA/z,]~O5n+HQ40vuH,ʀ% UDbՃwB4o[DJ=w*VS;VH҆(iJZ$8Y-]ZyOlOu^8UoX *XF-NҤ}#֞& iH>Ik.N|Z ^~gXfcylZ;5D5>Б![fUSA'earAM=Ԧ>l)ڧN0Hk~ < wJ&N7&BJwn[1370Ϟ=K 6k)朂 fpgcXcA|(s_5Cfj5X_S GAtMb:N#\OkV=<}鴬[jgMdBwq@.O+zQصGS.ekpFcܷE ~"s!G HQV;^o.srWCon'6nϔ`okxZ T_,Eoj[1 &fd)sK9.΂%0:aSt2~]Ӭ-oij:)(\npgy:qNW8\MD*wu8Q5jPjnlFAj5F5}nSÏ)v\uj֟L]IֈaףϴL7^Jw΢>IkR+B,ۙrΙ- s®O3EoS7`}k5PY>0qSqPkwY8ӧשd0Y98\|b&f%3xnvZOd֥fF*;w.2prP@&)LeɨA~_{cf$U!{TU u{:p+.>GzkkIOGlzO-&{ͼy#g-~)kTIh *B3Lا}\Qؼfm:M`5f!F"Liss"ǎVI@l T t9 h W5tR5z?jH&Bn'Kv]h^׳a ll:a (!8s;tSG8afWz]Yu36>*&+A&:?1l28^|2_n`5ŭJM~r=Qә;XYh􆛽rZHxL}u5=:D^fW08%x3ߥ}ێ⛃1#M%$XU`_5OXyY"jbǏ:G*~1neertK2|/p}/tGC9Ǐ ܁g ;/5ϟ?Kendstream endobj 35 0 obj 7567 endobj 43 0 obj <> stream x[IsJnaJ`_nYV|sEVLQ(pz40{r頩!htW5/~y<ܟ٫3M<8|t !9C}UKg_N?feã?^({5EpQy/*<.*릿iSOdK|٨<}]Vd\OFc_|!f3nd|&01!8368GCLr=Q֥ 1Sڤ_fobW%4%n7׸Vtq̼F.lo_4T"x}.~.o2ǵ-sa?.*𯌵U(dJ\$ETMPB v7t.&==[3G*SL_F o)8>+6RKLsL4 R96/7 Ӳm<7 %zǴxJ7~I`|{XS\7L䕎RY;;ꙺZ^@gL+S]̷\+y߶gxl5$e"=xZ:;Plto%C*BB7?VebD9‚#Z/Pqw\^eeY1)V&ŠH1れNE=ta߭<0kLC[0i  y-ݖem^V۔qSϺXZm,{Vx }Sī2} a`QhT4[_U.-㎢3E bE@,Mz OKdž"o U`?KzEoVw 'jpNجx+NaM^ 痉%؃B>߄|IFYfZ"Yȳd]CHw^g:(} <ژރ)3o_gƮG1p8ǧgrI`  zbd[Qlk;/%?tO;vYFfJWJoreA,|AaۺHMn{9ƠGHG)N낾Qa>$C1qv1 _=2RāOXڕ M%d  uIsH#U^fea ^, ([ܪX@ hV= sMSSbxHOD(aUQCQ; o!4 G6=5}Gۅxîsfa7}$eG%xK3k+wX2Y -o1 .-W)k4/z;M9.mc4hgCo*)!8S 3(^$keөC|qf[d^dELYlfٷPYlZ?zËC6K=Zj3QN`l20ybI@"BowZ,.()0̜W?)K݁ޝrʷSbr<*c# ;d|rX;7MϜ@ؒ(TB]Y] = k]fM[z PX_+56z$,Gbҿՠuy[o^ +`N:-aNȉn, fSAQ-Ckpف1.K/.LhUFX?[ ];ņHB^EQ/)$Ӈ'BOBNU$ʛBPmc=D|F^[yqN IL$,5 m.0XɶjOuapt(#.jTe@ښ#(4Z Ά):hC~53Lkľ]eW/7liEP^PW>AEUE]XMxтj4pÑ aM؀;KՓ~ ۸Nmg޺vN_s] GUAqDL/Qq9&Iq04 bm&(~C=!X#`$}RB+]eQ \pg,n.Dw#Xc2rҦgWN7riK}:{L ib'=7=mY`~f"ֆfL"=1>*1+kc<ο |ehpxW0)`2x8Zӯf6oՏQG1߾n7%W`X ͣ{d~}[4oQy G^S|' IZC YU̽ {#B`~"djԠD"nL~MHd`(›]Z5S" ~V]ͼ:oxI4)G`wI䒏E8]3{5+N,S߫Nc ֱa;>[\NY6i(,P\ظK2+y5, UI줝tXmoSh-p!:H;ȏҝjGh”n ʀ֯wڕn[x ;VspvؤX1vfdvk$CLe:n;Xf`TT~a_ފE\ aHnݼWֵ7\YN(ASuM( ~ܕgL~zrzm)q\ϐ>޺^%b3*/A)Bp;-|vooJwl]ng0 |/h*~> stream xZrSr9H*0@O\ؖdSȒeSz==|HRhqc5c?G|1f{|ON8V>go'QIw$6:"(hTP!9҅5I9GhQkyE x\Yc3^Ha Y-o)-o\S`SG*ZodDf4f7\yo2N*z9BTטoymT`$:%oX^򊎓Y]aR('N1:;1!*Äd,GC%gIad{U$A*AED` uCi< Dc^ܬ&uX|^1#A޻լͽ,#feP Ȃu ۏϙrLmu>"℡d'`f 3jʼ$ln[I0kU,8d6f |G!<|(es:e3A4"{<,E̘Ŗb2b -sPγuN'jMXI:Pt@%W#Wvxql#%XI>W"EOE)ޒ#/\ &> Ts#0_5t"|;YN;(P$.`H95D%%>c 7OeWh) ERjN`>y4*bCT^yX\B^g  j`}sm®ĥO~ )f&ồl(sie,$ɦc%Ęb@|`ɚl{HE!ͪ<2JI,V6/>Al.)\K9Jr!Wr?dބ{v&QX'3lO]p ڃ:wљƲ]}ED/?lJd'eP}l@E- *=q6vJ3[&ʌ\QJ@.i<3<vHlQ`ymz[n&HB'7RJkX8n}Tag7 c$,&}NbaOu\N}=fő, 核L Lubp#[9k<rϰ;fT`b_<xw۪BVlvM+tA?#y)Έ3LkϽAy!<묚1O$WRߤ-]e ʬg\b詷X<=_$&:):-vrZ+ Fuir\Δ63 oOUQT8BE{kyN%32(0\L|Uyg`Fy;g C&h?)akUB^ݎrY#@73æ:u_{-XbmjrW,Euu`9"ʮť(GM#:JXP?Fgcz*l ϼXVyVhiU[_/RmTev=84}{muWJ" 4|'Q2- #*]a /5ΤNJ) פ(oݤ-n-BCCܜCXOR*>̿i^^(,̻ 6 YH^$;~…E}XTfN4=gYֆ勓Z2vEr [9P},4){('xvBОBC_hSz16oW͞N#^+e /3E[~ؖl.KFyK[m-_/ jMgqG f|Yh;i/6wxS"y[޼Mϛ~qH_˭zq=~:ҮME~k4LRtSSda! +%j)?B=G0Fo+B AK1ܪ 408 D>V;F mw1 TxtG/UOAӇ.z  :g-[~M '| rYF-`^>)`A>Xd[>ؾlgv>&`l򦲝+o_ Îk~Ed9,C% Mʘ΢[}k^W_MNjAp '_Ze:Xk30||8;Vendstream endobj 49 0 obj 3378 endobj 53 0 obj <> stream x]Q;k1 +<ꆸ%؄3Nf!C {!<k=8Wws4=]F ނo7Pѝ!|׉D1wBuBv(DMkݰiH1QǦMS)NZf}0i* aEQC1&f4KDUS, 2)G(+ʰ`.0'( M.aP^e>M"e7 HZϮ%8c_G_fqendstream endobj 54 0 obj 269 endobj 58 0 obj <> stream x[[ox~yÞVgm4EHڦQIdIV ]ʿ3r=:` 3\H\ 7|z>]꧕_/֟DrW\G:O-,qJ# mu5ñ؝2VK;UsA%JbC}ïW{Uta0JjNZ p1ZXIB|VѼf4Wڔ#_/lHu#hΠZF,7~IcCw(c*h @\w I@T,e(sBn,S(ORG+opӡ%z} J1ZFdI^)ސdN *ya19͠ )W(^8 X MN'/MVw]&Գ2*}YޢBi_!7ma1-Z( \Wmt|A[?/J68|Iχe 5 7eؗ}0N+02{Sl6}mbR^ V̌]"Js8.4rȨBDP6V^'T<)VH>A#$XCFiOҪgWKN&`A\;c!\a@e&ۜHRշ+5JZA~j bp6zkxl+\"7w=)^r101b Yi hr"f?ۅ[}ĿrdX.j2^:<%`_%WOBb,ul=!Uc!zħ5g sS1"#0 iׂtӬe>&Hkb-Y:'UDj$:dI:V+HVcj-R' @tDg =fS ƌ}F2Z2٣&mYa΂@שPt~`H̩Yx6_fsp;mL ٸc^8i0)"@hbQ=3sxEð6;-7LM$O&nF NN|Qѫ`_iŐuq?w[~QfY]6n\8p=` .X+N3lj@MP-CF;ѲZz-u[0rZ!dA>-3<$q\:e]7A ~P!A՛W^&MI9c =|%v)7Vxl\y7mGzgr8ݱX1%MLfwCfcYu+F~{͂m-r ҝlr{@xߩAm z7 1 C&Á.QalTϰ禷J!zZJMR ZXʲ",IY\[2Vg>"2H_vs8{ cϜ.}?Kc YƳǭ%y\n'/^BMnaN3}hYjK*$zwBƤH;F4oB1@1=Zz5B->'nz7e-&Uu ̏퇢+.]D{ڦE&s^]}M[ \B).?2>և룚53_M-ݽYuk4'rqW1-o8J!CS*u%iPA=çBL^> stream x\Ksʑ=0~(+v+$~*;J$%)Rt70 \ASؙF ~W~|"6O?&Sx!lrfszy>(6^Q9}u`b4ZKwOH,B8H18md/fތި߷;xD@[Fmqmj8D>7 N0FE 6lrVddh>ca `Vy Er .Ic GGU,ڽ6#nI=*}RG+9}y-mNy+Ny`=>Ɨ^شJr`>?9#8iMF;|tJ\ 1x8p]ݷMbWq#gO.aH2ËAB5Z'@8Њ;PvrH(',X!R``2iT&ABiͥqsU6M,<ϓ g&M|fHDP1%F->R~Qh%+3fK'䣨=b%ON]v.?nQ~0`a IЦxiul{s+P)&„O ]4AK(34F/ ;F[c'`3 LAj@}VY864=>-WhC#w:j`\nv ѪDB,6ia S$D6F63&@іHp>QQ (=}N/M)ϚH:gvOOO<1 l3ljyۼa: )m63imFpj!l>Q(l0Gl;(m2;<6gk茗JNp=[J3'V~'!V^%j1z:셂ZL$Z-fy]Be,^ѱͣMrV)5ANl,("@Kg@`Ϳ#ǠHJh`60@;*'xwЍQ#UK~/@+P-CZl}>L!nc`N,6IgL.xqֺwj+{šR8hNR iO])OOXѦi;MW ϛ7d6 Ҏc|&]N!heI k&/%>:D8th kˡ1\ #ߟk>k2*r$ll,LrqIP^9ʵl5ɵh)@}Ѷ;7HX֑vT7A!VtF0~o҂?HLa&=_*7e>*e>k,}5,FISc ZAEI)#C z;8ր*/9O_@Fd#TLzk\ZbKnK2n:V1 XAwdi;1!=HUiiKG:jqKSiϻOd2k=O Ƙw! 㦱^苤Ar[Jib'Z(BJ]>L(LWMv3m욙#^c}Z' A|AJ#S`%gIyD8[\9k)e&)U}ߦkUOVi2cKSN7JUem粚y.+č@sr]v}!2T.'5f-H$LW2um=@!FcL6 5T?tle[3^rIɴ9Cxт<\w#?eO@7r 8)6s0`jW f=Ѫ(d«2TI[!c{BӋL^-d]=Dͅ%FbI#;&h\<^?Tn aᨈANihZ\8 s2=57ͦ}rdvep% -PW .HT,?G,LTt;&w+trT)"!Z7^kABA ;Yv,6 ǏsѰ*]0oԪ/64L7y1'6[8,;;,0GmomZ/ O]SѪFK݇pS-[B@dPlH],t Oq "#iR:0UPBʬnZNd%PH>0zeI?Lj-Y3HAEa[3Ygop3ӈvm7g'|+xP6{冐?|5+=+  Fo*?ley7iFpw,ߔ t]6EI-F*#1  L5X,;΅TcӦ21IrV(GB6|yms>&Xl٘'YAwYʕ\73`Ĥ54:'t(̘,m")>fؾ+[|A]រ}W}]T.vB@Ow< ѿ?#<޹5)Ľ`CF`G 0P19Bc Jf.~0 ?aV4U7yX9a fV1(}I^r1[ԉLWmB7{{?jdW>]zhؚ+!ptc vPûC=4i"v(GclEq`P+"#IEªUmUj{`B{2֑7Su> stream x\Ko]Ԧ.HhmĖ X@|!yx<#uQx!!y|3껃XAп˳wowg2P~;_Cݢ9\<[VxB`_JJy}~L\GuBY|(a>g"h}`t/'b4 IC1qؽE'[:J=VņV|*Eu|^cMhMk2 MNz~W&d-Q#@W6>HJb>˟莯h' d> yi*4oX|޲FlNM_=S뭗_20,b2,Zf/ dl^dV+LOKWʓ`qx \,ipatiYønp `1*-Ͳ3io20 ` 2|Yn&6φq{褋FԡzutⰢ@S'U0HN6̚3fȴk[>|@zw.6bYog / $~ynCU p77;6:OaqsX!5:DcN-N$D%hgb M2g@t5c"f\_vN]t?Yѕ0&G21"{9T&*P21f9 R)HӍ1K-\9~m;\vqP{"l gf@5cFS-*>,nq{1,Zr-{;wp^2k1iBHL{ Y׻ZB3l&Ik-y`=q t 8Nl JDy9v^̩=ɉ=xZʨ}Ii ٪`򆶣xdȢ7<FV5H"9Xڙ9y1y,l牽ി; .PfdFqvc,We1[1qm,3/ 8Z7SkZm-b&o˦Mj}RP5NfSdE:hLCo.cWL;peN9R 2aCS#H(5,#8aʗ` un΁9<Є6?ѽ: Ci6871%?ft*sG "n&uރmj,D@`wbW"r0:Kv2n/VgKbb21 Low .9Jژ>Ysy&C|4%W+Kc*:==&Nω.AmB*=>2>Kn_8 9lwbW=v ansڠ$ sV$'fXW܈ 'X&W]j؋\08|1k؞_ Q.Lx}/S8ۓQbF'l BI;P(.:݋БX)O!!:4% @`4kKbk'&iWZdƮoS)TtZG[{ADay4ƻKHh/ޱ[Gj1uqĹN :S;FOhҽ=~0I7I/61vĮ'jE / cdv%=)M2AvI1%1"fe,˻6eS@V'%T|l=ښIptSp|(nNRe~e:Kz˚o]33]|12ֆn _no{w}_ulݍBmSm񩶑ʬ&60ij&RU[yQ)Q57]heWB˸m['{'@%,gSD~QwjdʮfKͣg)&$!H2DDu PRh'7?ZR nq~4;PF]ԧ^$LRV`CBqY 3]2ՂF%wR)-@xz+x'ĉ`slOb^[khKݔ\N2gR=HU{#Q |oMgw(%ތka3#E9Lwz٦\Q,zc^yfW 'zXk"3A~^$uQ¸^hk~ntYĿjڜ7rM(>Ůb(R'm2 zKSL4 9"$)fX6ܱƏ{ " _ӥ;8v#L糵DuuG-mNdhs` [Ux!3`D, 1儧Rѓ .5 aKinzl}K$8#e8B#HKeG{IRk0jeW/H5=IV,b R4bd6ɦed \LbjfUM^|,XNDaM ˖ߢ T"%͠Don4N (碾Ҏvye 9hzpeINt-ǝhW۸>"mj]1|1$DT8ʴrJ$;S#'0/#IEOrOw+)h2/&˃n/HƷS<6bQ]l3kh4l=gqI.!a#FF&SDM^(hawNM?al@X#%O@$YwtY7xcmj]V6 d7)xh└mO{!39n `nWe2;&K8q@N> stream x<ێdq@%Џ݁oز ɱZ3ZhgGIQUEsxݍri;JG`]ɳ'=yoU"7eASP"6ւZ F}S0ɪ(٤/ ؗIQ<δ }Mgı ҆:3pG";ǑD` {І0L$oʂS<[7`cZ4C g9߬ԗƄ31ћRѷ跁K-ɴcڤI;/KT0v B,0b AL Dyݡ :-(wgf5~bB@0` 0pޙ?*/*]%ך]k0FW;_l)8` 8jV-}f`|d2K#P( L* A4 |>瀗H߯ߖk ڡ`r4̃f=5\,7t!_Aaٽ1\ɰ1`ac|d@Xq+ ˃Sw". 'n/& fڊfC 乐|܆gp0gc*M._YFr7cACnBO}/f'bȁajm`UÍeP8uf7ت0 i慬RWS ~KCӺj8X`8M@.gƦl5)M\Als@ ;Fn Oa֌ɏ`7X 4 ?YBP0-l)<6^39I)r ֤Qd!i.{G2qGGO*26zN->NvxR& 3W l/b7"ko/f_`kR8 Mt pue=Y1k<Ԭ)ǭ |̳v?u(ۆ͸ )1kӎÐ{D*!Q'= V6%$Xs@Gڧr vsۨ!6Rd=2AI;& <?E/Ht}Z (yAmh Smނ͊.BE}PIz ol\TZKXM }z8pL9 &5Z"}JL\ ..՜q, .߿z1trtZ 5߹A.`"h. , VˆK~F߻{SG$ϡD+m bu`/%J HdDm9^F'0!ZVݔmɲqL$r$L_L֍-U3>̕h+Rpȵ.!ۂE^f~NeYv8Ej2mBhR(Rͨf oE/Gxk921J^WڌFNcFR;g-zl,x М\ʭ%֜P_B t0iHF%&A`'ԑIlS<陒Ćc#5{s3;q?.t"D;glƢ52Dw~ b>ÇÒTxRd߬OO/[4f]g߮/ٻڟ/p:J`ۛ1sbG kXAtAau˰n;]?[?]/`›u EfϗaaL'X^T{K]kL"Rg7ap"|XbܾWNJ,c~s6W~_md̟X7)nx$6'B4wyKz/ JL;<$|smrf#v/nSS$#.5\жo@aB~}s$lDn^b#VyA<[UwA  2/%\>{#& ;{Z8C 5ou~Kg? îTYc f^l tS'Ξ1nj3rCVOwRXn^GNlۥ e%e$ zjŦϑw]IĶb~~bmD,~Ld1K?Hb}}VU1*_0ygkc@^u+$R͞14$"1'~<]QZ8 >}3?oy t+N=Lezx2"`O:KrξScE| I4SF(Ϸ ɦ=|%M{2S'CzZ_4C@m{=r )wjkT_ۉMvKMz^a:;ntFw/1|VejDZ0YPfY+Ƨ[DlM9_>@o ƌJ 3p\-<-ԍNkt`=gI:h~8-ְf.73EX7bvi'l}.Y=B2Wʂ(G8Y /lw^H@&$lX= mԐ:\O )J->gw}Tv*fyTmmnβ߆k:"L)==35PvqjVx`10._ ֪F}Fq"kj҆&>16̚KްUx۠(agd'(‡C^w| T1Q~x}me561kknhVl'z*Жi39, *>nPDi ixz?7я^t~N t*DQA";~ۗs Thl<]앏h<>jLқᠺW9W7(I}5اnUUOWUXAoGyXUrEUՒ9]U]:O)D6,j>= >܆ĝ[)'!TG]D P[>GSs5bm>Fϰجig],سy]AG5]3AQDV$څ,>\x87E4(cX6YHOM KlfChr.HIXSJXx94WafxS\> stream x=Ɏ%qs/Mrʋ<H:H!))Z߰?YUUu 7YDƾԢW/|w}}W0 E+^}B-9t%,Ң\{__Zrc]p2>A-\]*C+TeZ]~ 6OGm\>s Ρc7)/Rq Yƃqy17]~F@K&8Zc[ EzS\Ԏ]Tw&|54L؟#,B6caƱ ٯcy̒tԗFÔ)8%-gt4_-Xw;)Nɛ{= ?ࢼ|0 *ty/.&>{`C .17wɯCúw0iA p_Ѳ&>l|Z*۽l'{ܫ]_^ٸ8@K[d F߬>x_CX6$x1;v)&Ꭽv;|f~+lLxrPkxHIh^v5l̄L9:bEvTJ0i^em?ye]9~6*] /Ϊ ^<d  (PhM.'n{`vQ+zaW3|J~BTl(C׸\8 w1aDlhwb\ > 2nm[8[]p=I5zC 8aw3*Hb&@ѹA^%M浲lRG\BN/*$"!x}.WSJ.Yۍ!f r#fA6;!/$ѠLzJ0 y1rbeO1aWg lj| /$6d\+hemC8Xs1Mm/Ai{Lzl,~,XD!|6bX+7!Lv…ho 0QX @6Hc 2JmF"re#U$3nxB !#TB(۸I:FDJ#h@ l^c`z @U*H Qg] yYc#xW Pe[yzK{,&F%~2@ga\Q@T (: G.jLmEB:Xyc/$t^! fsJ |hF>Ւɑ cKt@}?g #~)Pg>eZ+=f.* Ѵ:6+t,e:<4ZgژOi-? Qӫjj~ijb>! :8ّCXNklLw.n 9L|@$[n*r@@[?Cgc;`rpRVY_g0lå >8BSL4W/ 夰)4;*B)?^5Q1W NvXӝZ.*ؙ\E+&moxNybgf8=&R%SѯB2^Y͹uv%q. Y*0+뱞uLYv177/èC8ۤ]αCv>UCUmff$Iԧ]w}-X\﷟pũە;W=[Yvƾߟ2On*إ  / w}4ZޏMlt7ef] V@w؀$[rdF2cKSc`QN3l  /̰dDXTD@:m$f)7{r;EDB).E#ZѠ;CL1l:>!$qj=R|TŢߕACp # 5 [Ntҙ5̉PpQӱN㓟ՉUᅨN=2w?O?jf{*ƒ^v홲 '2Y1>bm㎝̼)P ߁ .cy)k.Vz5htI"RaI% c/EpL8IJ%A7Hj1x3bP G ImMnntfĀT@ER}?1FmϘӦtqTзs=UGW!Ats#6`.kBe;k#{`MXHTbemƦH/wBC `ߚefX{Lb*?A%-DZ9f7\-@=]?/I} .ڐ쁟 4QZEt7=q}4 wS7Gp겑T |qK9XjD)c0q?C5UuA,(.!t")K(#1Ä {GYyO~KR5Pn~,Yymu;D..g0eqPa0#i " \p%' \Y0,Ls%|ٓ2mހd2%affƧq5}fȄdDhiP%Kr1Džxw-}'}1C`WE*ltMS.2ijwM tLr9Mj'X78ºdcݑ`ӞޙX`lsV(lonGA0 TS6:&(SU.Hj`e),CW5DW$?Y#7}08 EnkC=yN\Voo|-4/TAeAy$kGޭqm:A]F($gY'^X|2.cG4 fDꍴ|1^z6<^,&V(YF5!@ܥ ,d݈JźB53Ю 5v,|QY˻Z}[ff"6ܫk7^pMʃ\l0K< Ř+5鬞_c 3[JB?! Э΁-`C > ޒ EJN}d$lquu˞r'ҵ5%[ML+_ZvM̹$$`\$]^`'KSwL(3yIvBOUePbSp7Zb ]ö/֛2':O7[̶#? :DRy1d7q2.]mfN# SHR{DvlA ngZI"MX2tmۅK{w%-`MUh؀.FYN&>S܍3:Xc5y%#p;e C=;*:0DaDjy#ϥG˧ޯ 3[mYĭ0350E.9,oe\*Ћ3J}yLyF;u^]u /@ǿ;v0UpRM9Il}v|XukLdZb'c S*jK3Z&uгNyӻ]I{T OrˆQ1VtsmA hP *}fQI$@'MM0i#]QNYnY%LooqS'o&BjA 㴀%E6\61į3X'1So/11pSјaX+hz_fqTmdlIr ,pr˜ RV0~<5X&̻?h\yɋjf.Gܓѱ7[2N_lb@0-9wu9NDX-V(Da:5$KʷGVi3Etmٓ k>fJT4-VYebURQdV9 -m1i$Z,WjwI &xDсz)($12H}#'_}U^Jmyz-P˰ G,'v"j ֤0PFo NZZ3p oYxonN pQ;໠%ӓRBr󐏊V^~ydkC8I[`4AakrCVm4,Q4:2kbs^>slШb6C?6wMk&G* &s6y w ̩V%`ыT=u(P)?{$呴%iz[E?1H#XK*#]'N6wT4kblHUisU+wub_}0,TiYΑSntcp pQs}Z[Cز=cٌ`)m_A#k]dg(k+FqrM5fR>vVHn,ZTPm^ |)>mr>d'wlP,V6cһ^VΛM6q&f~QX2KGYn!啾@VFgkOCw˿*Pr;.‚sU^!7B'r/ Cc)l3='k0i`uZmP!nƗ,`oFxQ6Ay.k{fmyƿ-e'Irx=̐>a&\8~L;ryb$y [ uC;NrMMD}؄y"`~q[ƀ=qd[XNۍ^3ʤ.َ؃Y6VjwVۜyג?fKe#AI]b{\^W2_({N?x &A"S4ZzTii' TBl-M/یymv 6gMm=@\.~Eo H& ,rv ѥ 9 Dluny"Lc }Sa Er/IB5ő+ڢASj̫M{YiHceY:E2LI7O'd]4sS`&K~pYSFjlDA!­T }ݰsGr6}+}r5l3PqNϽG`C=!mↆG۟Ou~Ƒ V07CZΏ}Fbfmm'a{\46j%;!i fNy2Ya[UN81[]_wKـY^Dwb +zfHe(dXㅯx>?_'Mo)qh+b`uy c/}at1Įi`gU|PiGgMoQMr4a6QâM?\f"&%\准Gr>]&[Z`g&ʻޜk9(O>9PFLyC6*{^7~F%n[jr,s LCd KiP2 ]Q˻e9N$5i4{ZHJG-~'5{蓿3,kO f6t-wqɕ+~w#f/LJd-N{KƟz endstream endobj 79 0 obj 6981 endobj 85 0 obj <> stream x][%qMy}CN>/o D69` Qv f]̉+lQ;3Z%Ԣ/"L;{{=~/y,Jwbt~-LqzOk)yAkOOcBO?ua&^``Ku7#>MN̖/g/|KŬVM+zeM/56_aɮ`# =$ao[@23`;'~Qw|g0}p-'dʒ@r00N5"S|(R6DSCX/bmytvC"дlO}DZbr&;i +\IUFkxr'&Elmv1n\\C'<9%%ΑTMJ9RstŜ*>3*NDZ`v(:T0}!kT7z Ef.SX s؛rXPĝX]aۉ9L/tõ>ZG Wmצ5 ]b^ޛ)R{sn{]st@Iťqd}კ$`1f~ViEmض}՟ƯvvrqotNK(xc˖3m=#b7U| dxUxNsg$UxriحZ;eBPl6.TkA9\׬V|HDi) 2,fXS!r}|EƿSB&JTA Yg|C75ZB x8z?*S5Wnr!<"b |30_x_z7J4"xÁ2H2E=ZG 4B.a(&1!Aj€Q-g'M1H!hC判DCg u'Lf(ˍ.q 8׆% HE!bĞEU;+4Kց{&c1 ,ENJu2Aa?6*9h LHh `s، TdHBBzhwjFY.-Lj*7Ӂ=򾨎9k:Js;W-3#2خSYִ$ k[w><%(ë&2$59GғT+{sMZ=At9+m<,Jx-0f: ى.Ԋ?AmAmWϲ.a:g%3 C7jgSv:i+*! aPLFem V5ġVRqr!},s[y_x=+CGI_`mW~6e%vf@fQ 6/[q4Oun}7ص}\!InIxrNNaUjoaNIrۼ'`v!\fg3i1XX3g 4M*wPDv}Eƞ_IH*~&'560I׳̋^NF:,aPju-Yɸ="cm@j#lPZ'M~(?x'kuc %oHi䄒Neh5k!5l9S\kNbn9|JZLWE)Kfe"$ A\{쯉 킖=-69@JElـc2ME6b/&X%^?( Sr\t$Q% \&Z`wr= Ek@50e!VEאc>&tj \T$%*Z.e,&, tȫ4h<۴vm J=_,*‚WMN]O?LpFZ\ldi@i  $;x.~>Lٽ)`{\/q%O~ y̤ܽVᡘB4Ԑ-$D.;q1I7WAmgs2E hbNuvBNLOI 9C}`}|"1}bĻ0: RC) >mA*"A</N/ĹTdeKV*vuZQ6:0-ǃ:W,[+4\떲4%CT&gl8iGL`:[@Î&1)XV)Ʊ=.AH "cTa3mHM\Q.\׶1.#$ٔJA8~ȁ(XoV}(o>d>]V4}ȫr3c[FՃYfQොI7x{`,MDʼcsY>2?;b-l &ZoD!xic)P=`c48v=7lÐc-j/z=t]R2 0Malw:ܶ!3@qUyZ4hԺweJ]]&L v>k`֊(j_kP'Jl/MbAX]hj[_=GCƁ^KCuAC`j0I0sbQak.'+d{ ‹/Z'KrFn@lO֣)UNH{cmj -L\Jj7!,]ͶAG[n ƻ'c o XxwFXE Aa|^*~^L>Rer SO:$.WߔG#ܓkfQ A>5$4$ _5YR-&n:9pt>WZ/WVTOkـ[+Ob7TGգ=X6:LdJNT J6L0D<4wdAg J-'ht;-uEt8;lYmK{# !}= > stream x=ɲq »y^Ӫ}OD9 M!|IB ̬߰^zzp~յdwjw Ͼw=8}P5_Cc>?O\9?kEwxl9>V'-S9=sГ1qܼ ڜ2*!{Um˓]7ޱ hy@+wcl Loj7㯞Jl">w_KLp/e?"`ޜNƜ^ߵF&ꔫeb<3k6t⦆֥BFfϻpy0,c={B`s:q "P!6aK(0JÉp&>1+3^cO+j8O8 ٘3Lb~}o[ƻ5pi0js0@؝>C4H)=2vJ>~w*ȧ-ec~ /j7N@ibuooL a+]ܼ/s+UӯskٓO6w:VE"x``"sz3hA 6Ir5\ɘQB,3[(LgXV)xz3[-$OLBfO64VbH \YQC&x&f˕h*T{(`VSf+NF10z?Sr0e1~a98T3(e^;P@bS* x?QaR> u d@,k^0&qY:yn(~] DoAx;Jy$U5T2|0ľ. ?n!u޴84LleHafMN[`C05\ek_@bw&lkBP;0Mu WZ7w*zqYbڙٲ 6qb (MX  B̚UgYᛐ^6j#4z(mf71֭ 1c\g,L7o*WpxG  d808g,Mp|)X@X/޸f =Yа@{]LOόFT';#7J|R0h r'QDrA&r[ 37![JL?%jKꊬ۴9#TMˬ`WsWű~J6mE pm$a(hS}GqԠ`\wcϫEU;#gL|[%hppLLf)ąfԡY&pح9@r'dE6pqAμ'(ۆ};AV~CSvJgڍ=IIeP1p7xP\6>FmXi,YEj;uQt$ r➽7PetKI{diatiuG8/ɞaKbƒt quz]Tcn\sʮ] Խ%B@nG9ݦeT-S+օ_b,Ң3ϗ13/c1wŃfPbN1{4ˎK%\P2:k'NXr5D鋖˔ُ8`Tmt7$֟Rdޞ~#ɯ.2?[0w6wq+\ֈnr(6)Q+J2rm&\`فnBudxDr"v\'cQܪvZ rQ[9ϱ L^vmET #=U4uhvɟ열bLХZfy$. 2 dZt 4%ϾؔeXVH%|]2uDe.;l%$X.r:̷J%睋nDqk躊pJ `⛥҅|u@arbM{N i҂*lruB| jHm5TCP4yoVc˨VS* ].*v;PRS~* M ^x׾ )V](_..QU͌oGf']Nʕ~@֊{2:O L 0,{MhHJC3yb.3EM[<$ sQ5[f㸻ލFVw W*6ڼ{QfwM$*LXJ9-v\E\u.#ًNUF KRBkOe b/%Aw7ͬU=vImO?~XPHvSܢ5bPjLfǥ h; >fKMsfԡ!b {;k-;]t`_yb;PDžXyZCwmv]Cn$flsn4&;7>:9xAp?Do?I+d]zȰ8ਃ f-oPlŰZ4>]r8Ս BO*PX~]uDO9fZ&vCMؙ2&i>oj rfu%º6dr{XfY`FaLcU.)ݏH`e8&K1%uj 6zmE5ems|#>\RG8hUZIk ϒuY_]M۟ʙo>|fJkW֌,X}НQ(Q,w7<*%쨧Ii nGős_oLn1 6*ZV m_4ܧ F>gJ@vFʢ,E=oO~ʆOF2,Ybtlj}vmkZL}tO԰J_lx)o`W6uV3c_-g@xWcO4yL fE`]-*q{֗Sk5/:H/R:V֙+L֗O .6hM4$wz06wh4U#¡F3N7[,<[Ye޺6Yo׷-کUÍ o6`y3B *x9KFwɡ=KSkV DhJo)V@iqiV} 4SLѭ4[ecBe)&e,;r-8V m\_5csSO:iUԟ D6MNm9(H/${? AQf=xyEDXӀ+=P .~{09Ht/CRZы/Yzp?Iӻ5'l ǃ*JNZ0f:?-ݰ*ˡݞ_ʩ.:0'੺Rw@5@/^{eL^x3|l5t]faU&6QK#7>endstream endobj 91 0 obj 6988 endobj 95 0 obj <> stream x]Iqo/o˜vnMt%"샤E4C@H ^gfUwg``3x@\}{}ooWwWϾ}W/\4HWyw*(^yVVxR1BkP߫%`..|9/:?]@K]rO>uӿ{cLsY>&V'7X69 kQY齂Ե9?6mVn*$rl%=&ؒ/ޛ|rۇwu]:D֝g]d֞Z M 9W lm>`^ҧAJڙ.? ?36xOlktO;c0t%Y#i1)h*xouPYOeA:3n[2fe֑[p23| !t>_#l醝'.N>KX~3CDDV ݫl=A캝W8]X۲`ńRݵf?/b=)Ν Qz ,.AS mYjY_0 4Gzy[ʠ&{U%$h;Ju)Ќc%o UR;ʇ䨴&4'HA^`)<6` DePJAN =m+#&v_-sa:T)^߷~c2ˤ k P1h>^0מF&|@Y]:z`hu@ٱ#<$R!s~ɽ~_n;XFD:BGZL(M5;OZn|h4釀@N#WYN|zؓgGIă{WFp򦬃zVf %[W7|%3,8XgND .-h`oۭm컢]PzO$*H#:ɘ7 :Ms0;D挄Om@eU5U)ݺj4PY vSBU(@@=ApN54JR+M\( f´uOf[5;.+JHuq@E>G a%vc)uQ&hus:)uOEA03T*,5]0q3/>Z14\(04Fjc0%vt?CKJj<6}Y1 q"\+ Fv !@|mB1Ce8ݦ΂gfLaQvǾ٤(G)|YաGÁ,h cm=UY}{׀~Uwy߸]859{ˌ)dol $3ߍFm9=1Jʌ(5y_wBG[b[h) CDܠ1dq\E1-c!gRdsz1xQ>QLmQd ## Le2jCn#T=ɕI; K桦6ףj ߵvY>r;8+z+`c~b<-`Qu4̽t_&ՉB*5ZoYe]U<0C|E 1٬/&j1/b#iM eQPzfHcK[ilm3#_͕Sd\KA,]˟v5B 6nw%Urx['Q>}]66XpւCWdy1qWV*ǡ'6xI#J1\:9_Gӑ!^6:]ux6xZ l=. "S[F΍s$(gy}.7nqK{Ndž 69w_l'-̖71gQkmey+@5a1?L!͒Y vːJqkCk"A:;6i@%awi %KpUHa '>C~U&l?O6b_-,]84Gnfߗ~Cȍh]87- ($E挄E*si'W.S9Q`mq`|;YARnY }Wf,Yx_*0EP7(u3'[$"awͥ]n۫/Rb*c+F{|뻱Ra@ix9tE' >B#5A;9ޝcy*#k#)ԙJ& zj+05X40 gؽާ۠d$UD" ܠ6rOg)~i<˱jw8_|f?c5BNUB)s8u:4Hq*xpkG\|V[|S:!=Ip @m"L֪-Uܧ0,y ^WR~XCuBb+ .]'Aa ʩM1ZD 榁~~IOG#7XnsƳ<6Lɞ'ye ZZ;%xL` eaKF G88{sYqͥgN8yݼ/!Kcn\YM F4P6Na}\|W]LƯJMpX_SAټNOIycO)nՕv(M-]3KN[ &g_\ϻ-{]B\C`~}ф}3*o  DhOz׬Dǁ( pV69WM0bD) 6+6҅ChM0{I0q]vam%T,HR ]Y\K`uj ` 3V] lzvYb^N Qpq髁϶!Or鴻Y^Ά`B|$BpGso>0a6.X:7 ͺ,Qf8b69gCQ8ḫbX2L<8x9 +mp`dX m\l.H/ec25 6 "}?~?#.&q|EzZ;dm۷!ꁙcݗμ.f6wpEM]TFk~{k5V ۱`DΥ%4G\a˓1L~]v0kR>tڡtQzDM@tѥ,-KTsZTDԺ3h4J9wu2ڴ܇DLa&h9X`jKƤ*Ѣ0fa7+ $b*,e" /{m4G̼ IÇoE=;X4rLs@Jd$2MżNMcm?*MA]8h Aw-TC8uIN4a¯}ڼe-^Fg]=@mQI'&Y\h1e7pTp`fh&b2O/'t' : 01(bG0?Ex>Ng9 K9QOR]1qPamk[iK{n5)jg%Yh[!=FI]T{7%Kބ!ZLiYT =GUDcR[G8u$P5r# 1}l@?Q ӗɂԈVE&9cʓD6*Nkao[k/t7~X WFlFKi"2epןLǍg (jnrY+;?#%^IY=m)Σ V53K٨?,? #i`lRXVӀGG>.M4JF1_PFrÑ*jC-E/|#r]"H?Btj-QQ퇘}i`nװN@A`F"ɶ=솷zVUqw(t^[d8XY {dfWEDž JqOŠؓB-$~n<'1&dv\7/}z=86ȭX:EaIJ)E*x?ղEm!{LLvn(lwW6YǑ~/ Y0 zW}E雽ՃKR3V\֙CiiS:Z=^h|qQrn% 3{j-ԱvOeZ|m}-wU}6Y֓U) !&LuWE!mIv  o=⎳~8nz=ʻor(TSG5aAPLX>v [{܇zK6^f-񳃹- '=n.8F-VJ^RB~p@_i fyܟN i 7 1(!PR{&ew5n!דZԛE4^n= j셈yyԁϺ$cm{ߛEP OcIEX%ںpи`$-1l;e=zC"& a5#Jxͳ`~endstream endobj 96 0 obj 6794 endobj 100 0 obj <> stream x=˲mUYr_;[)";QhRqHHJпU7gǜsKpӍF 47j7 Կ|>7現?o__Ţ 7Ozu\f{MTiQ.:|R#A/Fog{xTe۳ntQxN>Zݿ£9$s;4C Bw6./1ӓ[U0襁M7ޱM X0oA" @?IP,gâ9o͓L %%[%>dz v֝El|:}U̧pZބY}kF5M201"2uY2KBi2W=og&xe+i1IEj2,&|¢zW ; }TOE|Z#`+z"wecޖp<΁Aĭ׽P{aX@ub3ٷ"3&@N2o` Zl >?wM`cRn(VsDHJj8RF _P%.&Le J@ - ck+30O(>Sih0.4Va mGBȈ0a2(6lfFcAHo'1xF=(!ҁ)aP3!_1~(L*dցUdͺZ@ IJ$va97g ZDW$ ,cʁzcc^WIf<9J+ gT61E9v?T (SˌV)vߐy :C0WqA2|^jL_ *V6-gQґYHv s fƢ<eCe8e$C^|c 6U2h|;n[eh\"vGk!}6iB8BnqS]הcjMy:qr&U('S6IB `2e}Yp@1dV~=L= = 9C}<1>"*+1ՙ+lIKtYO|;Y%!?{h5"qFٜ+(԰Uz) ``>*@^xv>(bAllT<>g.v9U?޶\UR[;)/`d68BRw+ c2,1-%I}8>qF mSATWW[EA:%8oH vQ6 4psXRn—|Jw_xU{|ѽX2*=2ƾ)[Pqiap<^LJV#T;oi fV T'#`o@l,ـBv?LM5SA['{/ eـ<:,+tnQrbJ$BxX:pf PF6HK[  `!I 8lf=Yk8&VH{"fD93[jyO[x2_mUz[ye/u@g~g;ґ(.(m3 eޓܞ艛9NTB~Gykc3m,noz 6`mE 'Hk6Ycru!M*cH< SDqBXV{B0E[hIfPǞڄcjĻ U/%;Q3p1uc?0z*W胄 o؁~Ȕ+XBqhP5ZzV&{>+:qȩk_$QoI![be€\3cy|WI[Xa%MN3}qIn< &m:Hr.3N2r?ÿCءԇ+LxN57y޻8ۦ -yDؾs7;[ @L,4T}RF[n-ݶ9\`i5vhGVR.Z>㳱qaB~߂xf:ݫ8sWl %b@ŕpFE[Tgenl-}e =goLjA4(Vb>aB\)ӫcws@6NzDX] $*Gֲb <Ғ#fgA+wCV6O0a. P1O9ieIK`-\6:Y3n`~,)"x0V@#JR(#_b]9}h)PW`ACtIysAM`fx+B?WZ~3˜g՘)h#Gz0iqX{= ;7|x\A_Ga8 !O'5P6]uqIB3wG\\]Y[4R2u!u1u3:61qZCs`* ۹)l4k!uqO\L',֢͂NT3zEY4WO IӖeڥBqZ{^bI\5ʻӮ48Jɘxli^P|n{"iv\ C;.[طZbPt |LwyM% Ҍ̣ll$D+z`$ZsƇ5'cX z^"\-H ޺Y$#2Do9Q5CwBTc6l%H ޱgX,ŀtGAweJ(8@ IO Bcy%4qZB'q`ʫ4+ge -)86sqlyRzpi@.RIN&Vw%FCt{ 3dg{,ZIWfQMF >c) p!bGUD&XVݖi7w$ﶦ&nٞf\GN;*ajIf7u"ܤ&׋y5Jk ]kKb;fi {S&"/_NlP&J.&JVv7*ƦU|=Un6@L\nTm bш,ǡN)qo:meȚW/(fetU~ Nfpbx|GIsz Wz<<J3:kl9g.+G%'}We9{2\l XEވt;sKA<頥i әG μ-PF;Xz0CAdn12)\HYaS8u vуܺI S^[=<`YQc#jPIgaNȌq)cDZ ,ӱqP15Xb{+mшhؤGҙ7K^}=^|ʉ9]W^:H7 %ג@o[o- QMz<5y*&;6❠R-[vJu벱%@{$l/s<-E  ]H mH`*Ҭph%' ;ĖYm0M"XA%~10yHNO&Bnj8꼞RbXamᯎ`C\L33d 7PN0.vPh Ue[}"5}8?3ߕ9|׵cPpެPa: Ze=d is@ 9R/kZ'VїU'/ 6LlC4a>]eW/Z|T8/w2i;n]uC]VG+k+=3v7+8-ov|^ QpNl/ƒkvBa3.^XO`ZA U6 }"'qYֱ`Z_3g`j_}gmŠgF5x 𫬽1vāF%KȈ٦6ѷ[.lW8eOInM=D&,T+>wEE{8WG[ IE|{TqwJC@8B'*CsPw9K_YuI]M"zO 1Oy 5je:~1KF'I7> d\/le.ߛF<=M*hsƥK& ͏:W P^(1{oE0qbI63!O[QgsGX) =@p Φp{?5]VC~x &(!Ű,uKG _R b;O(K1 ׂpeTdS#Щ?N<c-j`^proCv"T܏:rU*Lۓ^1cn\,2ZȇnAU(-*G  ]swh GJH?5`Ɣ#!b8$;l>-EL(f`y^d>E.MkUomv3m4LM;cba}ݭ;W|LjσV=0/FGRf>_nV S|/^OPhǢQ)2.b3tLk$YռD$XdkG o:2yº>A}ѿ:endstream endobj 101 0 obj 6062 endobj 105 0 obj <> stream xU1O1 xTlS%:PKMrQyg ׭78])G O xr*P$pÒP 5|G0 3ż&~o#Om!<&/B*F'*zO~"e]ŬXNq,Le1ixD=i^Y >tl% nfL>[]Fendstream endobj 106 0 obj 215 endobj 110 0 obj <> stream x=َq+)}O^YlȀW$?xHኢYUyAc`:+322#A-狏wxxto~5j >du9,6ǻyiwҳK>gt7./Z_ÿTHSOK~s7;p!gws֘t|A?0{>꾌J}#>v($ b"Nnn0D哘'B|w*:鐗 gA-1G5A26{3-jq.XQh# pޛ p@4Q*u9zڠ1!8`7u5ue-,bC~1iU#UJCS%>#-2LxWxb0le>lÄDrtoG+`=l M'  ) w'$(c~x \8A;L? Z$؜* \pq[m@|=i rw@<)E{E67 DBZB ޾K%'V6rmh_vPrAkc.h`8N`!ӒB\P8D*Rg !,O6S*enкMb$O/Z31*-g̓]7#ےmn^tVGĀd waALfb u#Q hh ۂoʶGj_쬼Mvb-LYz`@pc=e+#fL3jRԌwWv,ؔmu`[н6:yIA٫WV6,uo5ȴ͙Hp8 cLZ|8^$ė;' Wk`s o·!.ܶpeeݮΑmfʈMgHgF~aa1AR9xf٩N }Zv8m9'Zy\XoF xT7˱lFؘP]Y6PbUu Qt4DĬLYRql1{s]#L#_Mp>M|QAIƋҕRsT8WU2CVCQ!Kd#dcj^`ϹL>@3tl>lOJ;{sCٮ|ld u27S^4up|G+; dK] (GVYǸNk.De(KZF`HyoN1D`I4tĬlRZ:,`'~+_5^)` i 7&,- 81T]& +={' hX iǕl1 CGl` 2DC`l0INJt +`.p® B U0s[صY+sTk QUyXUX%™@+CS28cJiڝXՌR?/ h΀lsMpQ4*pCeEOwaja…ͷauHSdi񶞢ꢾgرJUQdP OXABG,T9-MGMaT ؼV Lg&"[O}M74gW5!wƀ?8,hmX=E#%궖I UIq^c/TJfy36w՛#k9]Lt~3L0 \e{.CqF2lbJgspTx}ŜZZQ,hB·^P_ĭJMuՙm+ ܚZ)?l2wdZqټKo%ô:e]0Jux9Eώ[ j>L}E3h8+0YSxx'p@-'vnϟ;d߼ 7n0W8¢6B ,FrdԊHK,/6xZd'D,"w>}uM^͏{6&Ns 656Y!`դ)a|\[nf"`"X2ݼ먝r~'_Mr~N_,Anc1&E G9,m€alȳp;O_*x1&]FT2˓ ۫d^ 1zDSêUH\vgٮ~)^3q l֞)gME`K(OݿdYe֋Bcゔ&-6.O¤hw `\[YpV {ť#k,`wnS[SLo &6QeVUE"N#H)RbNmM.*yF/=F`9a-M+}MKScبxq*L7cEv .W @jNG-!|ڴ긳Itw=u^P 55"~o,X˻"_"H5n`|=[EUXWxۀeutfANF+ld_.#Sd6Q2{nz0mJlOK` M4RStpz.4YqIlPޏ}- ^TZn)>wK1~W͉]&=sK4ٹkH~Ag>wA攭%wt9BaF W>~ pfV.% ˭095Rp8=K7{ǡ+.+}e{T1-4,ٚŮi5q~]p7\!mqs YtZSyM55w\HJIX(Yq{7<}Za2ZEXkRiwmwWsX4ܳƦmEMM5Mc5c on~>3nYeԨg&Kj o.1V~߯4fAjUr*GVr* Eî[߮EѡHapBE^GAJ$ȴ\g}M%*w4^"`ْ3\̮k9=d+ht5)EȆGW\Sb(I_<ȳIK'lo.Py1>q𫇻˝:.+u$%ݿO t^7w"Z)}gDtюnCg K!=ť{ݑRV  )!*xk0a-!q /Vql-! Zx_]`Z8gٟt*q?_jP^NߖT0vw9i#E*ȰgHt<oldpơE̤S"6/q(rs%vuW;Cժ}嫂?0 _d%>K Ėw̯W3TFsmQA&AԦ3Γ"6w)T\\0H6smׄbAݦma`s>3k _"[8N1D-\ņ_̮B&~Xendstream endobj 111 0 obj 6139 endobj 115 0 obj <> stream x=َ$q+P)}`ҖlR-RkA2`j3vFa#"*#%[Όʬ"^}?WWCg0 rU~u}LXt r}cQ%N{i?ԟ5K;MxCF +-ayB8&0~1Nx3QדH8?@_!B}RoE-VhHnֈ _,Fyd"4쑩f~xR&  q]o $B@tik5#_h<1-g~N6|(pqd33md 6pAA ӯLf2 m)b 4ԍu* h !:fvud/LÀ ^Vl9cSV@PϖF8=;Ca@j|0d)k ـye0WSyheX@9Tk*o kQ[wyopnap1LPk#VMTl?c~JO2?B*Wbr0?H .8Ŵhִv~(0?Rgve"'a:@K5R~dwEz6:~eeVΐR`a*Bஙp?/C $sCv"}#⫽Lo;Usgc qWE.L"/K(NJ]1,y.l=lo,rLjq'5ӆ8րUnf;-lp q܋kl{7E;`X "hB^,D}oJ,΁ n tb I,tz1ARHʉiGV6kTeKI5Mer¼WL XO֘x3m`,&z3(3g@P$rmypS{n!ĤQ@} @زg =SP2C~m!R=t5 r_oS6Q#u:ZKjWA?,!Yy{?"E h@4MQUHi.3f u.*u!yUқ2Wf/,x 貕hE? > &uF\O%`9~gc5iQȽ &+CKv-Ŗ(3r4 dx9%m ӎg[($[*Y5J6A;Hl!!ژ)aŌfeZ8b@.P4'Jeў|>>ygB48HKӝf #g s+|~/H+?%v&xB0a:" I7>jL{-R%]RZV57uQJ ߂;cHVSRMMdL. pXw$:fEnY:a0/8t:~?˺?<<A X,$?9Rd&V/7>zD5,2pmԫ 8lKO upL7DpD z_i4ѓK>FKc ꀐ,F;Qϵyrvvgnmqr~8x4? ^NeN|;ڡS?(o4a)Ln/4P36h v !m<s@YĉOp_{9Bw-Ic+ts?-YTnA)Cv*|_,R8d$/^x R7؄غR8˓]Nh$ob›%@U}U"W$ett]_c>%!v;3҇'H9+w#ڋ&۔ m[UZ… K['XX~uuD*/:kycaRƉa'7+}&Y)fVuLZ:j*OѰAV؞IxJy㶔 [Y4ݔ'{,z JkKvDkR/ݨi-HV^{ǫ :ΞIӸߚ)+|; vAVUΔU j)~7>_1?vQ3kӚȺ52v%-Y0ZL@b Bo@x*9 f !ixkm >x.#bwra@J %K*c`ֵ@IأjK]w6_XyCDOM܅}q`^/hX&te6>&h0O oI? ٻt>0,lƺW3`RK`JdL{:a %wRI#cKw]N.kK1\ Q%VמHze÷ԑXTGjQ8Wui ?r"} 10"SCZ\V>;)ly+m2#ԽauǠ粹 cJ/RzvfG72)P>rT}yg(q|2w 2LHUZPс1ˢ9C߂(>40;x!b!3t\ˀw8g"^QXsm^+fJ?L<SH\4gdJcCl"{fڡylR&Piǝ#fp5 3s$5- `8ey杶merٯtT 3!g>IڭEح,*+ȬLU-6yYq.MaD{j4e41GĠ;08%I#hF1 z^ ~yvtK|Ai-齞>*i-BS], r#VRql0xp <`,h||֘R=;⽝ VWV /L7:.XϽMT? }j.;ݥmbFu?Vų@wV CM\8VL5Զ>ySZ.վ82Ic>P\ a $Jp%5Q͊:{SYpSHFj״o>@ʃk+ Exda!1]Y  ZVPߧ$o)&T+}q g'%Iݒ`::%0AfK'"*~E4 ]p Fz&'^$(t^-2iy?u~H{h+]xq*㥛xl6fI)iu}#0"O$PݡI#~m5WV׃-h Ͼz[?0t9a|jFvrn F jQtBsE'XȤn| 2bI=2Xj-X(h`Azms74v^Fi rޭ :u)dž]M8QXBe.Ʒ9iGsycyy;mb}#3Q[/԰Sh ynx' NBޥ۳X̝gUbH;+HDܶ|֭IX懁5#kǴg_Aa+}3 ޏN?#(F5Zg΅tVVh,rDD3"?qtSG˱⸧'e> stream x\Y\u_1on"0"ڲ@ejtk)vNjuhJ=SǙѻ/ffw~c=V*{N[\aT"4 G~K3,W< g4E6Q_{7[U+{7$v3K MawN_,q.~_eį4ȰaFӑ߄! oQz.@ͥ׼*yCJ41{D]8Ä\VR5M.A`=̶HdzbIv?^v$#ZI"F7Ie7H,<v!&9 Y,m$'(D]:M"~[a_4}̋YNY|[,ȹ^l@K ܭLx߮}ڸ[m0o"/߳Q|9Ϛ+jOpgcCX@Yt=&DEF$| #͢١70^51j5]Iox%d`h?( agxYx!2u''fVxA!.[Hdsl KeRQ~Dpo{ .MKFW'FƵf-evB5^aZ"Vr3,#B+墝= i&|njfv}i}Űo _U1~;>b J4#_'`ZLYfG?(h횔!7QScUBeM>v,3?6fqR ˬ"G^4/J+p_N{\{N uͪ {YgzQן=dŽ8_ဢ\Nz\8UN;J䐺HDUL0tj{bI..nέo[6/Ц~z ܛ~:m0,{nQtbu('E4 >p0Bh T]X7Z}v8Dzڱ/w75O>PY_ ŕ~"RT^to7ugґ+Ȑ0*:0Byp}ș,oft ^ hu/.1( 4[{t[bP^;Xb;p3rx,_SP`Z @E%wP5OUAjR&|\cV{^ko?'=gWڻa>oRBb>gx4B6gAw^|YY)k]%—Ꮼ6JB:^X}kS:p Mhcy)R9Ɛ+7ihi!<-u<]}uf1_"p~-Vb@},Eg&&-yPe.`yj͉rbr,<>CZ'A-*0= 끭=J| ҇,p\(&!%Bl $oS |8c)%z|1a/uuzD(sqҔ t`J60d{AWIVBhE%%>)=5Y#^,X8j*wĽ \%28$f ;6`/$v[0~4SC"qZ)f0֞ +HcHȎ!S*/N1$?h؞˞|w}x~h4`b8486 LM1TU, 8Z؊CS?!wl2F.LnG8@z!܀CgJ&zy0pzv iT 0bVh= C")'QMQhCt;fpA,8rQpHFf2HVA'$;LM fc %nu.Y)`( h{-r[g\-2~^͸Ou1Vïw6tO~P>+g9 ŽBWèo:oürwZ^U+Q VqLiou@clfQNҤk}T/?*0^L :C4_[MCM9_<0c6& vmf/,W`?nqK;R0?ʃ'cw*\Ruߌ G'|7lr~aLEJ_ngݗJPs_n$ޗM[}2'76rI0Mfpx0~~T~,a~2(C|3Q:<C~X \y5駋 x3>\\kz+V r.TH~enyޫY;JFl,X6$`<@!SH}Q翱ueD"bntdzWf_=+%үwvaxE'Lendstream endobj 121 0 obj 4565 endobj 125 0 obj <> stream x\Ko]f_MAt;ݝxғN HfXeDܒS|\ɶ4 1/YXU$,t~¿œۃO'?=ǃ7TaԐxv`[rOlQRriw&}Zb{N2fB{xg ,L [ʿ><+vhs/W?rubw7ᆪ*;mPnTajiCuJ+JY2v$~P_Z!9uobsğaJS.J+/̨8]z#0≮bZ" |x#RN'/C#Õvã%V݋Vc_ ?Tn>*+KR: ކOuqA!'ٻPc`)窡u0Z80ܯ5z+Lz𦊜י*=U4u0ҋ#efyq!0>ge!(!v(H)-36͍E:>!9 D_D%ՑEP5]B RB~8\#IÅ30X1OAЬpj4uú/J`N [ 'Ӣ1D0ࠍ_Y1*7#/EJ43j시?Y'Y, u6#u0y~BdVMmZ5#qn赗dl ܼPw6 :,*Z`z]¨PyRnjy^+|<$-VS -<#^d k[֑(ZG9Fr0,X j߆ pā'0Zސk"3tKicFleuꂶ\(ub/?=xn̟}gva-~wS6:RYg30a#6ìoWyɹ$h4*0u*Z|J2 0g;Q%oԝ723;ip7_Վ|Z$12qk%4Ғ_Hhh- )e4z_vy#o~#ci(?`H ]Ϸ›4~tEӞBlvYK/g<q?^֧kٛ|x~<,(T~іDoײea.  -S7bX ]]"#q*Je~U%WGV?J `i RAue 4U*>3]U˟zp^^sO|ڳKG4rg|Ko%|чJliyU>h0t"L3[o*}DKG7CUOԇ\7f%n׬MO+ Da{Ϫ!EH" @j:R"pX<]+JT1$ei= +~fԸTa}mhh $2KN܈}; A{ ܢڿ p+a֋TY1 q;$cB_=$ 7f[oLq46< L/¸(v:r-gh#A )&/"&|ZoLXi+i GoPs 8j;"H /= XL sFc)Tj~+kx1X vY@P:=vݛUڴm`PMŽo7kvbk ö.W(a,ۈS?[?Y?@IԶ+=kPIu @|!Yp4'֭`7SU@8꼎=nzazU|O8dZCjJ >l5"vfmUSoAM5䀻hXJ=j -C*IXS2P泩s?z|x~CpqF4?HNȍzc?9oYY^~Tͽؐ7VJDP| 16 kNNl@}hg[l̐`f-)290 Lrqgsx1:_OfXt\K `3V7hӇ(&f0Nd\7Q-Soے-Zrx <^t*\*Mx4Yo|;eƛne~Ln}7K+vcwGX;{0CՏ;{0{=rP wPx*sj]{ HOTS%ΡRCLB&:D 0LYTvˊ)WSy%W)}ӍfcP.\WX %VO;WBQ4\2pf?AWC.*+Xmi.S2oU@d{!Rn~D.z()XHk_/ 6WHwmbF8g\:֘A3qݒpa>.eAJS֢/@},RF,%ru Q}NdOꕖ''= ϒ'Ad (rp[o[c_"M\$jb _?Be0ho{᝜,"h.ZɚAy*'GAa7[Bć=r z)5Ǭ"Y י r2TtVQJܟxxmFeIGG sɜPۺɜ$ d_0 Zls/aW8 ;,-d-&&ul#x ~u#dlfʉjĉj#Y. S2_!^Eb;o?E_n/o0L,I@QTSt\/xː4<ۈDC֐5Լ>pоWޢ>vX.,J.4~"?xĕ{ԦޒXHX~f 5g w|jLR$zBZF C 58G,L>l ]')M8pKendstream endobj 126 0 obj 4766 endobj 130 0 obj <> stream x]Yo%u7  NKYQ Ű=<КE4ѐcE#9Kuy9 /1PY]YsN/բ/xB]B/}}3hŢ ^^%`^:_nﲺ*-ʥg/6k'˯Xs^l}v=}YoRҧk\9~](C:}d響^B6pum^}+6iˋw$|{N9u=}ԉ.9mdzK&8-f}]hm`9q{N&b_sh|؟qPm ;zK{Nɛ r[x4;x -Ew܅^W߽ZE5jOW2$F!BOtR"8cv<ϐ}cOM:*.(4o쌏+6L4='7KЕ) 74"QOW ֆmN_n{f¦sq[9=@ uflmk?k M',ol+αy+#h6 c=s?Z0EZ`xI?ʘg6-[BiK:J(ϸ5u~A1TbЫM%f+ML\w;AM≌*cFZc>6kwU %Io-S9dߔ!a.YAgY~޲=t4$W-j%(GK8 L*|NU3fv0Qp7,KрpĴxֵE=P>: Dr3tb\zن=玳}cŴ7'G'b0V W{b;4'R,/;f#`l$*D֮!F2@ȦA~P&:Df`vI+s`AWhP#؃_cOeQbemlIvb=!~h|@D 1pf6N 2GJ|1důd`iS@>2j-5ȃ1KƽlgZ2Mp'+NTg'Y;=4?bC n ЍQ]j ̳0\\`܃Vk3Ơu?%sD{a_&Zqٱo Vsts+\hX8rK _(=6 6PhQUWȭB_&hKqb]Bu6+5OXtD{c/MPe&n@KH}|AcMB jucgm:PV+i'jHʱTm*3QB:N3YfC< *-ح:aC]v @hK6ue[NdS' ,I17R7MR֎]&6 ~1;@%4MۂJP \0:9LÜ$ޞ|(&jg,!#w3pK`hgAF(Щ$&z%w,2K)#rF8IfTG(ڢ%$]ՠ̶Q1*VQm& nj)"x`±-lIprU*4Vvw]%8p;F֚f6[qztDbtakRC&dV)֚`"ldzZ+ ғq ,?_m`^vul̦"vb7TiH*D-At Mu*t|@ZX<0wJVٰI‚k58ЁG>9`Y' x.ƭm[C#;n J>zg5£}T `k?;ܘC`»hk6-' ^Fa U|QvvXx(9gqoL .jX&>hC380 X9d$bg+N,qc$&m C>.a n-SuՠiR! סmjt`Kq?"R'Pq$ pü.jA2j̱ɲJF*o}6[%Af@΍-PY0^6sp&&UNkx^:9O1`&L6WZB % 8RQ=ÊE >% 4YhL Id9դ`MBƓ:sm$RΣZ_czvEf < d4OfL~2j? Ͱ[4#PO9'SX.\>1Ld{S:)b}ߍt+a&@^۴L ~6ڳpJAdz5{0Cɮ1`wlEܥJW}13й_r)֞Uc״x.Gy/r| ^5M "L``Mq*<3yw`5=rh`H_Wo~w2fq9)N‡`x$?k$pyk#NzDE6_"E ou[:'@ [gdlcf^V 6TTha !3N6I'e;b?#*УtնB|/j<0iPƱrY+8+&p+T94<ʴvu#gEP bJBmtAYo~ ϐ,=g. T+YoiLLyU_)|aV2٧ 82U#a, !d Js:䂀pB*7tuxBS.wpuk 4ऐM)nJ !f^C^d|f@a3jqk/I[R )}Bߕ@I8"Bs.q㗓ZBajj^{IrRjJ'Uzqٷ`ƭбOZ,6 w8r>տ:ю t|M#ے &ц&*f􅣉 K10J5hm苪ڌn|!Q Z "7bf-)Ijo~SUD-l" TȂX= ia.~Bf?'$4DJxV)#PY<=8~|(^'.j,;NHq?6K'Sϊ jz_y(sAYA **>EKHY0+!Qj?]j=|Ӟ(ذUlvMJ%mMs d-;IE;GpFh5_0ti1t&m }ӻp"qBދ;y.xRһKߐ S,w`4;4>wuLdPں=k>|EYt¹u1R9cIHNM)(Lh`5{{)/vG0u[}).QEW@Dž!+"-\D!k|RE[ϳPqNcbƵ*"4.o-^+9K` č\]5xkkͦ(Ӵ jnmK(bF2cri!}NYpDdvcX1X @*KANǙu@ڀYV(_C_Lo+.8<6m,O"U4!C'f37Paݙn?˞,1RրL{u& RuH_o }5| N砈",棢fv:wu%(Kкfےmh %wހ}糞KXހ7pԇ'hl\ʛ<Ûx +5\Q:r*{>]GhAN3~LJtF9(NpϾ|y>&}_]^h&zCuu-h)<2A+~1'>q@OC`ZWi;+H}%ʛ?'wzlEGS' ac PҊ\U)V-!q MgIS  7coh]YK0c0Ƹ|mY atЦ}%1@%fyTEįa< / -礪 =vD|F`JHŇ퍷/&~32D/uA!ػ dW;1FiT1{?(b㭲"S٘{e@Vڣ4^J91=B7-P;yf7*,$H]P%t7,:DUdgoN-,o+"vŋDLUфd`{ql{Ǜ4hM#O:J)"gNHЯ wW+DA@WUb{Qb{mv ΜOYQDmT #r,(8 \hUHzuXii,.,>MU3vXSz ~d"Hb,yڋ0~dd|  | 'ns'-h7uSHM>2f}=*q,{~PDޭ7C a`.ܢO򋋬ԥ/__g~^.hMj/ߋ6DO&q:BkL!]u{3>nCD"ܥV1e6e5(.,-'baZ  ^s5?5>n$L+7ۗV Ga3 U4Woj^{P)5߿F-t8ko=Þnݞ7R <{[.jSf=C)Sͨz-tyˋMx_J@!Zzȿ99g^TZ b+,{{]0`,ͽ8yzL뽏>_g-_ӕGB^ySLM8a/bZ{FOt\&|~wɀrendstream endobj 131 0 obj 7269 endobj 135 0 obj <> stream x=ْ$qmb{CvXdؤ{P]r{uޙ TU& -֠@"í䓿 NΠpFx*wd`-BT;'ePr?G'2>:N0}VbZ~rBKvd32rIQR=( D ])7+хQ.:D5(S%C֧I=?:lLbP_CHPGR7?m;a@̩iٱ!wh?mkhC3ڐ ЂSW>┣kdA/6 M{x4Sa_cK*Spj1PT`=s$ ܞROr3ʃ4Qz5hDg54sJ,;,lJ>'EцXPte08tepV{BԲ 0րeZTH ›\e g e=E/?QR=wG;5^6zKs@(dxܠ(y&%852ml rq#Yjx6㞊ÀR` 3%N2x_^%Sr48ow/08lD"^g9],s) -Ö́ b' 3WXukLm0L @,#Ӌu5=TwJREYĘ _3L}͒+pzHKffcEůRisuydie@(oWe:`:_>Њ@KR /ɸLgQ2@x],-EI2'k*;lf4v#\\XOM^*EgPT*ҟ cA̗хYXt5eݍRuRЅo2P0[˦*h! vgAR(;,OFst :Vvz"3sq3„=ombsF,05WQ늷fx"V3b8BT>9.3Rj1_8 .ÖHԷZ_gz׷gWߟoUr 6ؗ@Vy]6<<7n懯D2uzNn>~I J;'SIُU*WM\ zVfLO('&},f5$ BG!W,3hxV[Q9I0ӭN>Yո4.9Yj 8qiK.̓ 5`){Nv2y#5Znlоj0]ߝ]tXFڎׁK al0vACs5fc vFchz4E ,Kh!Gő޸׌Ƚ{8'0ZGS2fNamGbr]ݔbQ]k/ח1j)a#R10+ą2Z"s^$zTODn$d({ ,g%-8hd'*؎YO0:H#| d(I8D\2G OE5u\wˆt5x {A8VI뙹޸+Яvb4MP0ĻYԙq4FfpAA_f8GL.L(Y̩̪D;X ,Xn ?oJ`uJ](V//AĖr֎+<) 5~i-|\Ma9pygT-9[|(̾{ 4fGК)2(":S .v)Sf>E w˪{fWm+*Xʶ곧Xem-8+_3jO/`*U`$7BL#a"ƒ*Ha.þ j*2lJWz4TGZVY Sϵ);efQ>jN0EHp-J /5)#ZʶC) %Il/ bz`8B H% -!8se1M~/Djm.7ᲠMh6$d9%f^(oϷQQٺcS5{y Z%v#GlYf+ʷʜ%YB/d8q|S@e-Oم P;JSNzP~n=%1wI,T&QSikL*P"D;dn՗,.XɄ$kiorzuэYSA fSU#񬈾_~(pu"*JB m)`DoL.C8sש^IE%{jߵWd۰˜Kwq?ܖw+WzvF~Gf$o7dC_+ Rba|1fke(* y}?.\I|]T=?Bo{I (n@}8txАNNȗQ[%vtoPHBh|%#c!~EЙ&^fÞv!4:qmZ?shM ޔ /ip*ր5џEMJޱ#1 hxixENh.Xـ6k}B˱kH.0vDs_B6&Z7a6 Ӣ0nfY G*υ٨` 6l [!W=b!5)Q~ׅl~AEbfbl)M%; 8p77Y&KэwWkc/B]g8Ypei.5&LM ˬV?ap*Y䏛-s° ~o:ȔޠI|G!xf$Hm妄$?<{_|?÷XJ9dF"ayt>YT}BA(C%KbJ*׫]l"2W[`KT9X\mNScl[WNsIB Cw[Qw6n@b[[)9,sb#~ Ӌ'u#X0f 7!Ƚ`')dS0Grf#£0a* >V.DNp^LnN2Sxv M?20nн %X'y lXEkjaTH#. k:Tõ+JdJ rPo:is@/RcL/Meͷ {؋ cmNd\ܝQ<K'[KA\0mb 0w|R": eg e&B%nzx( [՝(J 7.Y6_LSFӠP g&w΋>dmy"67~P^K(N<Ip]'_7xij QTE].zrLHSNO+ѧ|,ZݤLs3cN78Ful OJsz&p83>SR:/1ބBŏrg5]kvc]`(3ߟh8GLsY?Gh-k`Bv.<Iư y]֨koDdX(wȓq"۰NZ4X|nt4vbDݪ_x/i^hfɎ&LFz%vom"x.`(K6)a-{yjqUm99P?{k X6AKRp>د%^a9@vq$a 6bմ8칵x_} <$XY&d^^%:\1g=ղIx7s#xz\~,EKs]>nm(:hyd7}\ڊ̫Sm fnUk G9͙}тrpmkL *~SԋZq JGɎRYlɽw yeۦɅyGkP RA_f&tޛg!S"7M{םu#3Vr/=R62]UD`/#)L7@Qz1EZ6dBm܅"數iAc2mDK072<Ʀ!`r{|1RR@f`C$'Z rA}^֣IA5ا^|^q5c d?‚Ůy=2fRl8vyX>e4ݿ2,\‿w.fyP\Ωѥi}#McApu5ٵ`!ZGw;JMq-xI"lxiViv`P,E9?JsaPTT_ɝ׈fv*P1Rc~xs+7_ *endstream endobj 136 0 obj 6843 endobj 140 0 obj <> stream xn裿B/j )*`7SٲG7GR.N ]wg#'[Ap8ùp;<}q/GգABGi#3gC_^Npp|諕9ZjעS8#no"v[':V">tX}MᴗƭDX|ԫõ u/Q:ND\=X>~)!`4Ee+t'o#Rqh"|<ǧ#_L~jjuRq 2z 1x-›@1|0Ks!@wwXBA93Lt4 Ɔ-W4uY!uJc(UBB3j-yH:.捧e:i~FV\D0,(.x(Ұ"͒R@DDWH>f%{0-Ek:l Vd@ 0@lf%ZKbQT)Ifb,؋=R3ZJt>P9N*u 3DY^gA)+;eLk=-N%%fᐺR%>6 $" g4=\ ح>Ɓ#mWa=48T3Ȱd\ڜQҕ娣#d茲鋬!^[yk>E+c8qao&R}QRk }`˼ܺ'ݕJ9pM\!Z 46/3 s?^> k/*K`Zy̖ ;Tgޫlu߸l{W}_7M8a+:7n۟;&Ѩ=_A4wb--ÚS c=J'p02)m3 zEQrQ6$"F5pbd*%|[ ׇ`)ܝ02 pT'Al2$ 2-*&[ݶ >bj҆~W .@dc ̈́7|c(/GV\ /;}wSP2f⣄zTllbCF(">-' 3BHfѠB6K)$].K3BdyvRu?v"6ۈN3OTov1!:1F26k7U^,&c!{Tϖ]ffdl4% uzGS2pl @5f o]g{RҏJ7ii6E8O5qtBVzB2!,v;y`FP؏9X# '*HiJ?>|bY$nDM!hqH tCj,u$'IW&=Mj)1e|h31JL>"ApG!i?f;0*o36FnXf=0cc :lrP׉/H5~UbTc6XlI2(R"@H6pyE)8t-N~&`rԶEzY-TN@_{P֢0k1ٌ,*M~%a]^$\\<86L%-KPRehbC",]F=gx n3oq@B0 {WõYGM?Z :G2dLq${%.¬~N Ic)Oek>aCC8M†0tC6%MƺkD'aB{=F #ZIgm60t?=v+?h\l Oj&K"q$K:KAUA0ns sŷPVX$[wXsJLHKRI!8dֿ'ѥ\RHd 4ϥ+ ɾD$ ;9$TS-vkFFj7DWm9h5wo wRT|bՆ$=iNËm~lENX[XHpޡ&^RJnؼ&j&xSVBIJHC4+(E!|touA ;F|1HˁСmR[mЫpJP[? ,$Pg[\BeHDb™>y'z#4-V.qak݂S[K;fN^&OB]i}1^ Qe=Av%#LJ$p<1+w&G?Y ,/&UW/>;e'z``k/Q|D-B hh/&;ع 4< gl:9x,S!L]h!,o'o$a.=E6u6P4[Ԇ|ˤ /^Ы m'Q 4gTnhiåͽ70>-̸6wzDKKp]>n>8]jcDnVg*44C{ $`"?".pʮI\e 6L;7xmC 4`jfTL0:nVn9 ʝyڔɭ]*餍'%J/Gm33zcԁ]ha'&Ȅo03١)5/ JO!xkƝn6h3@ҁTr! cH`ن+oo`ly_F `]M 9X[|8o4Y*Drذū*;j*dZ'k!co\ُtK]Qb_n: vr\Xa^MFd}GT~9qQ{}˟XnM`ݔ/Ìv?~&&$X G, Kb1_ךl }aaZb[T\C"?Böx7*5;>Mҽ$[I{v#$ҭY ߿s*C÷P;(.;r`ay|/t Q:i>W0^u9ukD{rMs4K`&,h|oѻ&\^;g>L53F3LC. =|0M#~wnmyPA$TLES'ahzTH}&L$=x gfq(Q;@$#6'4r7SnG&g 3*.n,\}KXd6 Ŕ xcpEçr'ok%IŃyW:oqOPզF3̣yE2LHޒ<'6 ݔK&*`J.Oc1]otU7Y-RˎН, txEqT3ë,]4@hn])e-{ E2|L4v6/g6[M,zS iQ XJTaIVTӧ{t4ͳfaz) > stream x<ێ\>+ؗ,^D1d$ؓ$yfEȞhV#]+ɬ~ az+c4L.R:bM 4X! ̸b{N&/W]e71p9ܻ0TJ2$OEX&}E vC  avx! 1b§g03 @2_s) om Vb6Pg1(cݜs?l}Hn_9GU׾-Okgec<}]Wx_<ébA/(,O\ <_u𦠿X]G:leY3|= Ăgz/3[uYcNˤh4܋+ڛ!_ a k{r03B IHH|`N#INף,z4Ty/;(!(ŇҸ2V b?QOM4ǽ4)!1)F:!{CC&Uo'TSEbw7n-krNhF jdRFkOcQmvJ'q2͹'(vp hx9uLSRlp+Ѩ簕IBTzM+_09PgVjl,wh UfKȬv ;#d<0e6Sk/ݾ6xxŢ+2¯H+VIR17JQ&l96t!^𔁨9*?tEn{D&2u0\*xbs vg5cxZ<]T 7\Llqqa$wd A8Iź,a'@ӝG}.r)HWIE!D*DfEt1'7jiw[TJ)gkmqK7D,2M2/ "GiLJ; d]f!& v=ҽWq 9{l{ං_#%|%&(A͑ !2@D Y ƒEs%mhBQ2kccRt @1fI/ m- 3YA/B6A;Gu$1%؎̘Lk&Fg-/fB:r;&f:2œLv|', UPćj?tQK (#rLԀ{tavт';"!$'H2Ҥ5W))ЁIzFLKyJs22k6JОs11^ x.\l!S{_߰aQ-y,3ҞT?C܀ 7ovBD]aA㉣郳\@""6"$H=3k<(ی.eeffTev8lID5p1aWzwۮb"ei b64GӐWum;l x8 /G#mhQ9!z8qFX<.(φ0Ȯ8BّR4nVnˢWv 3 ")"j~xEPHdHODBID}\s@d!Kxu,AqlƈLHP 1Fsc^#ov!ZnCZUUU.g؀cܝ:TVӛ(z!'ɪU!5˜AᏴx΁P R*OZkBY^ #fRq]LMLd8{J=`'r KaMO"wl-%,tIE&QVWI73`5Mك#:!(Me&)a`\!BSu] w7L]^4p!oTĒX2r&jY5ؤH4TbVܸ_N`1ZZW~UN wAlQp-+UBuq\@ !- PqƵcMuqA.X*ގpEϡ h8M-%q\oDȉdL[AHWA>fHG,9Ic#\FyʅqGz&4 ou62*7HNoM0 ӫwn9 N;¹efvj)3a ';)B= OP nC?t\WVKh2R$L5~Uߡq `:Zҏ2 1[*\D Ɣ @YcT 5iʕ2 g]O' GjΧ;hcmI@!O0K.}N־O{4JcF`d)ddl꬯-WܽU#ku؎i 2$-}n t⫾ҦqbG;zdbtz 16r;3C"tDKX9VCD҉E!yFixu6X,}>Aͪn41#COZNҩMb)@Gs ɆblFCJ%+_MJXHQ#7,OH>%'ȱq9$w.Hg0Q Q^"ń^LY+D ٟ1bbr'dNޛ*Qiس~r(}@%9?3mEzAY%Y@3uO߀MqS2Ј sV=eBcB$)6;C׫,kwkXSuy@CWu>ju06e\H*H[5֍s> Qt5GO6҈m8{e2Iׇ6k \kk/b[ ε`fv{qW ARD<놽0y@`baRvr؉K9K1Ʈz* Aؐ[ld7BA4pf5v](r2cl$EZr:@)KLX/6\(%c\ڐmqY)'\wJ>Ӕd>rg=Us).3v|qU 1&~.0I;tP̞q5::5^bKQsժ-첿tҏq`abRXQӉzëq̃Iiْ'Y,HmQ'ڊ<7<s;Cg3'C59ޅYθI5ipwgz>]_,XKC7"`>BEU]_4udV[yLˆdB95 |:\WN/! aOʝa"6֋&'.rh6BajLPZMOA*á"6tVVNg>Hhv6%0ƻt)jNB'qAFi*d0Fw˓%c(ips8Hw0E|JwQKaYi\I!+<',+MTHBVu XN$b 獛Pivpmf 10˟7G/XܷbaRZA-JZ%A\<#&Č|3P`{3Rd:N ;)/t~77ʪQӠM U$u;)`X[FښElOҩqxHtLQ>*ܓ蚡J$ucF [EN{s.$Nuûg/E?%yOT&'zK{h +z^ty"tG kbͼH}y}9l <}:mAYJ6<={yQp|^0gY{4x5[GWS=C"cPR2`xC1b7ݕEh+RWY}D2_N#R/9ٶ<}a;+1^4J2}} 7\R I_R9ȯuo__~lb=*}ٮǹ;)wx_xX}Қ<vm9S'6ʶO 'eA\a2Rp2}j +{dv,hӓueO Vrne>$o_#UN 4pS BjS5AED -`4ͪZd㕙vʧlw _RY9EM٘.g"fCs>ćx 1>k݇) # ?^[SIw$5qErԑbH^>Acnk( ++!eQ̕cp+$vGNRCam w 1<rC dBo[+>t 7Df؍Cw`Gb˰}IQSk5$> stream xXIoE8_17꽑8$PؒJ8o<;{z7|pR]W f'כ/pq?l3aF[!iA*/ ? '1k@qbJ[ t bVra"}*?ʁK5>҈ %tP!?fenE:H︖B|E>`^ FY(ׇ# (T2)Q~pspl5[Hwv[|m$_5򤑷I6X5lZ!g vs}mWM )Y0^/EY~3f⡑_^5;\Iu΄/]Pљ,Y#ܰ{}zef]~*Bg"RlPx#OR$[xUKfF.m >[utոWX 'X]dB{įT0Ti Ui7q達9POrSs!w+e!X%H0-Д?x)> Jь5΃JQv7QNNRkz1@KbP a]p\wm֊7̂\E2a8t~TsCG+BjSc9)2*c6%7 k}=N+[}|()buֺ̼5_[*+5H/$lP0@ƿ:aTZIҳwh#:iv3]>ɴ Br$ $B+)YoJ px%lM:a1R3KxFpsLH{%pI+eD ro;EKF\Kt)^a!D2x!UBgy1KLܔk!z\#P5=؍ɱxj`%5' Yan2^U3gF^fce GxPwy$tI{R<W 8Re3'NgmNS!oMֆLRZ'8l62&6x\f0+LYR-*pgq&e;$zy$,g 31yiE%g<~TxvJkTij'mZ-:'CMP&}7?ҀÊ~ygK3m?l5 rYZ|2 APxe f/#n;E⁁k-km|`%g@Q͝EX4r3t7({Fmٵ`J@qu;*]ז[N垡KHi*,SigѡVϬτ^ i:1ϙklEn*VSm$l1u"}?m~?7jendstream endobj 151 0 obj 1528 endobj 155 0 obj <> stream xU1O1 ;v<TP 1US/-(-{/9GLNfo09|r8۩5zr*q "EhXJ)|@0 3&9= c,P1xq)WFK4mr..\ji+;lT11KMN r i6&WxmST6n4dY{^l'Zaɧ(%)"=]OFYR{dv+ty_^_endstream endobj 156 0 obj 272 endobj 160 0 obj <> stream xſcci 9e}P0$faQ~KV˭ah)de}ӍZo>~yn>M)7i 7&b:`]0OuɇO#>]·bd\^-%͛ۿGXl†l>3=Hǧdx4I9wx6NKwtxr!C0F.r#odICý<m 5<*jKTyooUL+d[p-r=AY'tZD&K.(# mxZxzX&g0rgÎZ|_9*ʀ Glp$H=fwxqQJNA6dw  l?L'p,*i* "M C0Sm,66i?(j_7_30)k! Ŷa# C)))]x·nb10NB2XTyUЩ2ox&dPa@{yI^- qbHx, Em+Rιo;nx(7 dws,~VIJZ(Rc҃^9- (ĿHlTN} u:.X;E\!.kF >5x]l*`D!E +*tQ.87J, Ѽ^yJGg00@Q8VhU% ~1ʮZ)!k 7D f&H9Yf@6zWɦknN&AVƵAGiM_ȴP!S&hgOn[#e#`>% p +et>얧`{~KC /yt6cr Xda$|'j qlaS}? &ܯ[σ HVaR/O9Ka4&TY$`'I?%sF@.<3UٛXH JnrgW/.#Ǿe$HX]dS7}9 @ kc A_ Oϛ`VUDl7<!灰T(Q ׅ8~ *6 Ӻnd6h:l0+U%_ (؍a4bě;e@uTLl,`X13LKZdߏ[R@3Q. XFŁDJ,7=h^!O1p8;K?Ib"<VEd4{J\P#{5h4{2DYǺFKvlɞ3 x%.&+ń̓RV˗j؛W$%=oG0vPSBOȩL -zD?Ȇyd9&{ҁXȨ)d!QH ;C17j-bJFS3@L'`%UaD) ΰ}c:9{NT֪MmJ.:W[b 2FONÃ">6(0ָ:I i֮٘[w+]AQ4 ;BZ0|@ 2_YrSzsLƀ8'µ;K{9Q1 êDOʿ{w+ƻmN,B\V nko41k4%PVDY<&X!dn:.}ȣ?R {L߃-z,Y}y=AGi IGM)m;.?WI&zv#m."!`,.I-ɋ!]Hwl]@^UԌ;t]608۵W LδWaENP62H8ޱcdazƍADX1 %xr?)inFY7 *UX ˸qPMQEvA]!XmϥYqp*_iY @{zq&Grx_K%Q#"~B]J kERڵA}IY)MJt! /;FLӚ!՞ 9ֽvvj ت B#\5P08~c>W6>9&v)Sf+dV)K!DDxxPPC:ՅX{js v1gY(\m2TuaKDPB#`jŪ 4fAx޼BVFtbyfha^+"MNu*"I+UY\&E+Eu4;Vf\Sn K&;C\]֜/jSܒ7؜Y kwg6d(s`Z Q+ CxNǯ+W9lN+H SI{ȃ a֢)ȱ<>k $q$v ZWϬXΦk+m֩MQzfsX*P_t6h6mmFP:!e&6{͐w58U%˭3tvބ00Ŋqp\$ڽ\}grIuS B i3 *32HtDHKe.N{oÿ-2 WO{Q]U Rxw԰o*PEU^)4Ǔ\_cƆ>˴=%G k8f?po0A&t]IF P}!է:~C-K˹A4miUø;Zlo X+GAxY! &[*?g&Y\RhqP?>rs9 ori 6 Ҩs%8C+s(zU4!A\[ z_ Sc%J1%t z\۫cDۿMMOW` :#ߒ[!PDAu,Bgl]5~tWʍWBUB8_X>|އp%nܼ䵡{&N4!t>=Axm9 .ހ?lnDհ SgccDO9C Q7T9,R=12v{G7۵9!vDor=\]Wu_i )No56& XnxLqn+C^;6}#u`o 2""dy|z-S6ѭyA@b%Yqr n@0i,H;dwD9`$1F?Bdz)M ؇!L wL]Y`t_ij|ckUq_"6!*/`es-R.Ny}NOg@챘1w޾7quendstream endobj 161 0 obj 4873 endobj 165 0 obj <> stream x\s&7/x_eLŃx@ vM( .Yۉ휜?ݭMK#٢ɚZLvv94>=uZ 7/J ( _ yXfŔ`oOԈaAe HB# -pdBڭm0vp:#LXicZdYr,, Pc81*`%w^TdC1 ܤG!(df5Wϫ6l",#4 lj4i# ^iΰ)nҥτdzvZ H0{"RNFXlrjIѩY  lL@CDձFfAj (*b_U{f5" LOE8a3GNH$~"ߞ6q|R`Hp#Lg*Dʩ<,N^OSπf7[ڀ">%k+KOS=G)dDX~pujTd{e%X<O V+NyQ6kv^!Q!<.఩œ kq5 az;3-3<] 0'VR1]<%/2)ԁjR`Q9J9դ1NӊVzը\=n"@𬐅O_%.P^S@+A*HoФBS@-ƺHH"|&x2iB:ϒqZ` v9(Χ6lci;NN# ӊC4-]8e\#QQ }vq ˝|,"7>2ioջ`PeF]sfNu!)c@XKj aǶöZHCy BZxn_B:f8IHoaڋ/=U( 7Kf*S iCMΤ~7+x$O{zJa!ӡ[V>hYe]yBKZics0Ucnt֔jQgT>w8ou4/{LXO?ULXBR=xfv@`4 m;S|U90Xڳ) .||_τi2t%vV/u1e [SFitFHe)Ô"8u:EI̩]׀'9**:)G^T'\I0ьn\}@7ٵ򲸣*hvF1># UǔXsVVRTӚKՔdL_WU^E~dsְ*)WX ޅOJSCixrJ#=RNe9$= R~2E[Bws.#?,x4(?3~u0C[V!ׯ R5rl; @;eȁ$ F=DLy2bKG4l?9q9.W9VN RnP.HX0~}iBU`#^]qNdi\' EWV O`%m7nsˤ `&{TB'_ שUvBh9z0ll;e ffL2HX*M )dtuL ף:ߤ`IFDQ*~r7Rb.%f7UoWqJ? SOc#G oL.W,[h_Q `a+m̶حhHt8"kbpp_]X,_\1Kz|p^^% +Kl, [vhC,Ge/0u'F;i-Dɹ~\Z4 0qD»l*x!~H ^:o(2^UG[e Dcxyn1U~DF:qCv=eʰGT?Mbk1g cu |ʒQ-ӽ0j "az܋/oA]pek'v:iZvywE@Vo"(Dv_Xի*_-+JІԃp ĠN㧤Z׉%AtTH`<-qw2n%DŽ}]uu(__A79u얉3vҫKvϨa%_^mONkaGێ{lFw7 ,MzGo? ǿ6zv0}q/^}7ǧ:PN9E`Ջ7鯴 P7ݐ E߬(;(w&VSw$蝴a 1˯),=&mßN!Z?J'D ւop#;%<pLè7)ހbޔpo MF*خA ,Qoݼ{vggחgwnKxpp<ݟ]_i ӗ "w7~xvww{uxi<,Bw LC_nn'َǢ m—gOo=^?~ kw'{h$9]a%8|}yuwsv{~e_pE lG۹W {'?jb[>Kna{F --ʮMa'ˢCyK2e0(kѩ `GXG6#4T$j<endstream endobj 166 0 obj 5349 endobj 173 0 obj <> stream x]Iquû׮}!d"@1CK037=MBZrrb}ruϟ qx~݅?\~xH!ѳ ഑g,a&]]<>K$3(co">N,Zw9Ce^?^wR({eTO86֪x/m4z>¯/O*;~=pç8q|Op8),QK*ߏ3HDwyB/B\"Ur3u["Iy;=ZzH,lD6l*+ڝ`/ &.AJ 9/[ ,4)=7Q_ce,ԟqxV܇p_{V9vW'D`'aQU@/#iW؇rZR0opPZoh=fm-'AҢ2߲8M)W ,NUCJ>C S@܅V.|e8X40_}TyAGRNSgLf!^9Dap Qةub)_u,GQ& bf}'Q6 CG36[˭vbgLbInդ.3| dR2_+|ܲx,glkXH%| ZT:#v(OyݞQoI64Ա\[zcZkoT흧"ibƃlS,@-օb"piӖ[xST6}/hE[ˊEҟV慐C% PH͓ՐCŦk5d#5`"y8Q?\Xk[n;Vj7le2MpX7`T]N ҔY=^{Dw8EqvГ3R!;f(*TiU8zy~ .@S->*2j0|_CJm,ؾMZH IC;)|H1LZ3$y3$a2,7Ap0xo{誉} 1 pǸIҊAwC'J+3,=e. (8=_4=3H!x1+^aJ .SCy%F-z*5zhWm`$PƠ t<@ILj{.L.s~8U8=88bb?㴦1H~n) 7 $ e>O 2 gyHQ-  7!=3\a0qNctvlA^TRÒ+ykßA+2ke;,:#aTixEAA ϡo[` ظE/__|jd* !-X"vB%Sџm <7Zuh?Fub{1 Iq++|6B[K/Ũ55\]\ 03P|m`D<9!2hfc8R\4c4RHxҀ粁UPgRsqaʨ6O߁iK%S,L?|ц'NgV~b |{\:ġVk.KQI0:BKr_Jrga S|]_.y %cnhpYhn*ʃD{̅2`V,2qU*V?v?M_ h@X`u beYA;B&ᣋ @&W2gӤaKN`A*彮pPvR҂& A[6lzDɦ` KJO) Cʼ0߆M@Bţ{/I+o6NFy̝ l6bTZ((lo W[{C@hg45mggq'[cX67OHDm*5khJWqD ]bE [kYL*(2jGDj"2 3yIl5MfSń^acг(!4_]{a6Xv\{ 7&CkY3m,ί/Ís:Yoh*81Mi aZwe`ɎnM] &m-!a$A`֑F *9֗f cy6啞( B'\A՞.dOChk5RPw; 6!͵]\." SU^ kn]gk7 = > }r\h :N8aRbMY:1?e10Z^Z lP.`uLb.L4cLFiQHm8d4YyˇJ670؁\'`tȚ58W"Ӛ ְ3&cd^RrbV iΘU%q’Qe@_@ΘuGm)]ESۇ5yŬݎJx?lB-}P 9$BhhR18Lk ECƒ;u;~'&=J:M:UX0âc`t׀}J' j]N#@h![An;&wFSFUٴL͇5I`Dr.-*&iAr) fzrVN⬶]$)9[`غ87WutKi'Mzj.^äM)JKuh{YTWKDeFn#XGF`^!ءj>Ǿ)5G`wɰqBT渹!2}¤ `mr>^)>WUMmi!(l&Ȏ],,qfÛBh*Bz eJ"JڢyɎ$e?Mp c=5&W3a,͢٠/':UDFրqȭL vWHDqK.ҕ)4xLU ʶic% 95>-p.*Sj┯!.f(0Ҏ1Hw9Q55ꊔOn ۺ1%'`0]=<.šqضzzqLJ9qӁ"Tv#EMq0<0/V6rMY]rAЩ3M/|[Nw`,衖o5%m?E8.EfgZTE!%N4K(:CԜȈlUqDbat/pyai{&ôŃm Hv[Hߥ 8q$DhojR^?)D*P'|HuA'H)ng(~@UU-`@T"f02|f,~DfO{c>aw+g@A+n}vhnTS.rv$:Q3G⣲=2]/Q'q51czU'[Ei iDNL(nҠ#޾zfiBrOoJǿ<ڨGDJe$YZ\S'8w݆;;IW 0fY1k}{ni:2IY+̥'.r.Ûik+>y;N1Jiu~E(=) νG!I֌/j#4AU錌vIL/G_SYXTqF]% Zo'@ ˙+]ʉ!(y,w˕ԡRͼg&FَJjdI}>#4;>/L$d|{Q rtG XYLJc.?s]87}h<sVmy,RW6攥I-5={)M-64][@4bH:5Y<1z{Lpՠ5gHXG)uq`}}]\_3D^@z~Mвa5k?Ǜz|]4USX&]6:jMvo?SgnЛ]  9"!N%_Iٷ{{yWT8 6MmaEek(ZleEJC 5>&GB=P٭.UAHA&:@'t[Pq{~5K!Y:Ipr*eB[Vg t U 0VDE^hEN,P3Vk@HU^"~.|g;_ӎuZ%|7Oh eɟ~CL;zHa_mgvv*'"sלA+NkXNy2ޘwbVl a(q* %{l̯60=Q®R*Eu<_qTó-Ř,uq[:-m!*Hft ޱ/Hک4}gj)EAa&Sڥgqwl{_W뉢kשESab:3gϑ:3Yb( +Ja V|Lja?ܻ.U§:wTE2fźZHʖuKE+3}JهO<aM]P<00`H-X5u_E'Zendstream endobj 174 0 obj 7728 endobj 188 0 obj <> stream x=ےq1YgN+dC湝40KL ! 9N['¡ 6ģ\=x !<>E0 {C >oA(iJvY=T`OB& ]-C^!i>ϓXU gC>]DMNp},k8D _-~L<)@Xd$ 8wp~ ] 郞5Q _$w0-nDxDŽ'-|ӑgfgh RB ck GR۾lfT"A*-GGIJpB<ʁ\PX[.ī_2} LRRRᇒ/e $Ȃf830Q5#9yPl| T ٠:xdp0`fZK0D)6-@Wfe {/#hM(x;N}zMl΢b/8lDp=T|&wS"db2Xb:XHI28.Opn#}3TwyuLfߌvb_'b~ڳ 0"#d`;amL}g^1_QV90 @z l_l)+*f ))d?O3D cMŅi aߓTMvܧVSٸM Ȇ1c00ʆj mx#K[yd,=9ϕ2ZX0:*f!v{)aC#.<&Sw mm Ѳhna3;S8#! PRpf@j!ʾI337CM D.S%R@(!%2*4`+Iӑ~Wè n] Pj8r)0-7ce"NRt,b ϽgNexz0kW?ke&BEc])3Hh}л؅ eEΥ,FUfVUuBжl* )Zp%\D\;t|? crjjL2Ĉ "k5D%) ?ԧ*Q#2ع ]#\Y8Y֑>1Sv8%>j*|DN[/D_N*Z\H ͯ.1cjs2dI>$̘ %baɼRC!MѢRz<Y/[^+]pſ*ΩRSTd5 -},bI Rz.?F~>#IW/pZQfNv.I{ GTNӔkYz'Ù"j} FFeܥdmfOXrq$*jg|"V\>V/gUq.o~-8g*LgJBա 㹀 Ƞ ~& @`{7EfOB{>Pߴh9l'$ VHwNfN>'k (R<:_@AFDf(~G$я&<9򸰋z#F@&-c{1ڈF4)vl6{,qXP*^UYq$Be 4 Hw+,{J﮴ۯ J;zsYWyMu-P47 djgmWGt\]wa6ͫ4Ypk WXWil9sc$Lme @.t*Ήl Qt"zX։F_K@q~!1+3xx7`~XJaqQ'V=4&<){5#ڨBsxb*R7HT`0 i&5ٔS8X~RF~n` 'Q0@m,w -ҍFbb+k+Uo~o\&ͅ4A6lcP%ž?vQ+Ziaء!]eOlZ}5Z3D*يUg̛d>]c;M-іIJ*CE'w3&-JvqiNDJ^iaҫRpxyXF7̭%>wokZ&ӓ߸LX%[밫$=r"P44S5X-@聗[JKP, jܖGU ABkz'__W "$\,@~= Sr2iDv` y٠^Zִ<[-z.eEF7E+;7qsUMJ~gC%6,^@d2OʦB _Ä[j]7=3_B^m5oyd21DUVW)T{.wT/Ubc:}x1A;:oѿ-$'|U,޺iܑZuٍ`E~{%d24ūEu#UzԠʨ}a_䓍bdMUFSe.~dbnDFJHa@3#e|6+Lr2;`2&- =z6lF 2)7^˙AZ)]x(cޓ^*ZO'rO)_q_ēxQX^,tc<PlSiQrdVI+I+hkHچEvw6D'B{V[6HMuyvt*hr446fw׌dCO !@!VTPXl}žO[ 6QoujWUgxކNC73%KFuFQb6㨪}O"_]ߕJwêiVڪXQ)躥TuTVq'잸Ig?Y.*:בqs}bBr5&6p͉PE.W"~}*U' {R ݥъ%}cw[%E]ʌN=j{%ƫSލ+M&]Ħ' ͯЩxXـŶÊrP2ژC?rlkO&dQ`>E2AARJLΚZ2Xo,j rfnUFWUFw:S8SDXkNH&Ts$(um젅#gzMouNMmC5v9{;]qlo;OB&9d!uGvݰ%c|4FRl>'/I&u|VfRP KG w4J?Cի7@IA@Y F*h(RJ#8!ͥ7ڐXHnT$U^ʌvvDOu &?]ڥSjUp]ž1M+AZ}0cUVY^ؓ3Y8 A˅-~[(k/1귴KZ7` _M ~",~\ 2Ҭ >:m74Ԅ{dU}Yq˗)L{] Wצq#RsfjO~L+A%mIj!ĀoLg,oٞM7BנV9Ū &ʵ<<ldV_*AܨF?ޗ|˱g`)LƎ?bv;O #3gH~%NjҨK=-fZR롳Ẑ.a_(M>>O9!)mf- IX%y?ƿ^_1}/0>HaMj~?ϳׇϯג" 9q!Ib6FbPLv^mûnhMZq IXgtjwGX,yoQ~#t=L&"vPidKXh31bdWtݼendstream endobj 189 0 obj 7401 endobj 193 0 obj <> stream x=ɒuu>80;t,MlHXC.c"[^feVwld[}P>(۫6^_4P0 I+O_])`Y:gYJrG&g8]ߨX>j~]>ɚ\|!t ~Z| 6j^89s䢊\:M>b_pl}vMG/o7p U)ߔlkڇ7)}<`C1+;)kp69'qߘs0)-^0&/DQM1k&U m.rmShk\¼Qr=w >C*c {=;rYnn֖f !gAy11+#gz H":i'7qbż2pdcW_3THn}]q|&)z?ž=Mk[r8nӎzoy S ACJDtڨ⌼[ `݅c }r=b6qi<gfL%[aZf:-7Â<ۇu :QIv5VjQp8*êAK$<`':Nw& p$zu'3 M; LwoulXhcAD7+(=&itF!ZoF3Y:;΃o hywp8K@15^1RbZ; e$O@ُh Q}Er޼ZHȻ|ՅiT1VT.Li v:`)=9iX yH,/R{IZ$% lBG>' A`!ږ[e V2JrswMFycBecW O%Br_)d@(9y_-O-`޻e ^_ yVkTV2* 1Ac׌ͦږYsQ7B++gkL OZCك?SE>HIBOŴi}ƕq&+#!ބ%VEbqbGw | ]O=Pt^luY8xspbj+iX>F$YxKL~?Z+ p^ Y,V܅PP#ʞQ5v|`B^Z"#/ WuR1(ӫk3p+oS[ xbJPAq.3ϛ]IʬRS G5Έ-[@," >g3;9'L@N48{ o Nł'tSǩZɳ.xV';{EL6x;"&tpAi)SSa"LF̒ _(X!G{&MA'04|”uYyR% y\eⵍjcuo`2f쐢AΞFWL 8WX"⭗e5%tžp0p|f+/+[wJ k? _av$,Nc JmZdz rE~ W_ R̶ᑠ3\49I]uEi%QmUHʶQpI}O塧%N諽y;^Z,iO+5Zk-@"S\Q1Ə#E$%, (ﱀ;ۆVUJG J6*g]r2ց`+9&=}5hvB DJfph8L˼zJ@u '3|kd2hc$>GE[la@}S/e* QζȂ.-C3EG`8Y}Z0^pSgk_MN/<ЊcUүXc.7WahH,02* ۍk%*ͦl@BSޮCS:K\c;O9{c)c:Hz$pX gޣ,/Zf@xkW. ՗ 01~+bÀq %67@܉ 1V$*RR\:X`{x :KR[#qghP EA?yBbMȟ^.SI^@Ar~K 6$'\ᛔ'dTq)ԸЩTH{N(%m{XYJ+cDGL-:od0bU_Da%XQ_dNyaׁ2;@E8m#Qk]U }S J+1SүP+B8G Dƌ@<]$)8 =4,Z[{2e`ܑIp#GrY  Pu=V 0!ڬ*7 :7Z8>5m8=Rff~:ҋ%۴u> JB79x|r<tM `͗đut'&MTƭdMS;|x%F% Z ޷4 M6ر8Φ4à zho@4VW[BЃS_wxN}ݸ I=ANlHG~_zB.T'I0[>n-k7P[8 \:P5l~ r6_ǸVVb;B _]=.5},h MJ^vG{}>+>Q"K׽%2u0('џK+L&0Ljs` wb|EyI V٠M>7Urvz| 2""`7w >mYiE#܄ek/q[rUy@p|NʀJ%{D4^URHsolV8\%\ pwe41#mM06kg"nQ5;Bz~fj˲"g*vCD=0O]%#AsɥT*Ю(U2RB%ۤ0,#NU. a)3]4+ &)6&1> . b)"|& I ̻ 6E rdImaɯÄӧ3Y9g[I[s`lbt3n Xm,^db no@`g<~C&JZbA#KyJ!plWL`wRXR1h:!9\iOE4Jz`t̎$e2E_^jk<@Qf1J&X2r/jFqD8,|wˊ85~5 줏|S&2߃mo PYt>0O[BS]  ybxMRdkԵŒ;vXw J%i2khj9Gw l&1`~ݱeaa³97R^.NArt'8qV4AVUpfKU0I(xy=<ѣüb9[N7["mk@6Ux.ȧm5yPVKoβscWX7[Y{1gZDDyJ;eW/m;49mO7 Ap__YqJp{e}0/O]}7kS̆ prgn[TdDrNs>i16}D9tvQ D/@ `$жÍw-S֫9JAhxMRZ 0sl;>ڐ[&y}I' 830]\ŭ5., FSSs V;TsoرaJ e&j((N٦!4jx8.EK¦FnER:G$xF@{(\H`)B#{s%-w>ɩbPΦL:~{hDKbS4@|. I~\䆞ռ|~ -A+6eu"Z f4M8e.\AgxjI^xz}<jy]8AH6\@FEȹy7Jy2qsD]#Ny5C9Ե|^34urM,@v^E_OE#kҵ0 x̵H ]7arA1SICBE!.rB&VY\Eȥ%In&O֥(t`F/󺚀?\ŕ[MD:+l>G]^K뎒 u2ݿdm?RjW}}S*=)-eJc+:$4pýqwF1s֚j-[i&o&oV7n,h ? hM&XPd͛[+(RfcU7۬ d޿Wiu0 1a4g~hؚ3n{ќK԰WӾ:ſ 8c8D:NQq 'MљΘ}eD6\:ADvثjl$P( ./3ns>-ۼU/٦ijoG {}IņHFSUlgo?0L>+g8PzwJu!ucŒ9W`ioO ٲ-A>&~D#QvKJ+uL*\v(B`Ms I gYUHMiܸCƮ@;Ri$Ӓ jӫ|J!u%H8tMb5`05%weupZ#oQ[bkZE,/4GrGk-0w {pC;"ΘuC0ps(-qOh@I_HXexTWz8Z0(U!RZq^yxY XG0KKȏeC6A>l"+GxiB*97-ʞwJIoUwrdW*sUF j?J|`\6h(=7RnY>DŽkc y y_?m#Թ/满m(MNL.Eb`8riŦɣ 5¸5-ГsY ^z*C-6)?}(=U^""_IV0ʀ~sIΏG6[%H,*4Cg~]*Xvx3 XՙEdwXLLz-w@RS LpSQk~6)XZRUlP\esԏ叴 { {{C&/WC>&/G}1]F\(8 fE km6H>G^ I9)(Ye3ry cop(Up1b2 8]{"%P=  .|oVpb29Zd,EnOXrs E# L*6~Xė]<S`{^my\t)['@>Sl}igỵ/\ zڭ>9VIe7iFZVd qy0b<0\v^I);"!퇡an #Pua!$i :jW1$"aFV%4늋"zcendstream endobj 194 0 obj 7466 endobj 198 0 obj <> stream x=˒q_\up~BҦH(D[$% %C_̬ꪞŒ {둕WwP>(˫<^4P0  UyQ:D&˫͵c*?W r 鲋·qzZNo'K0gw >>dwG0=~~}2.OI_tO>t!kOl6N_g3D:M>~Nv9&iqy3^_c ΕN@LcOM>fPשmFcG)i!f{|T`C A9A4V۴ɥxW&C/i2/qr̢?)G|?&&)_byM ^R9_߻gW}+W&% 0aʆd''/Rsf"WP.v]=Yeʱ~ւO~e8yAN P_hT Pp_)g|6) hxQyf?*o0E3E~"b0םn"fkꟷnr:ã{},86U`?i mYƒ0iY-0)s҈;_u歇 +M1kOVG.k+h!(=|Ai&:Ht@8tcj-$[! S,1#5FPdtű㯱(OSYd 5qYri4𡏡ut nt-"ko,]}cz_.8ݷOF1Jx|W.>̖a9f+ WueekD(QFU]z0b5=kt΃A! ӢHfJ7e1@fӞcܙӕ/d6N Uf(@~˼6Bȶ@h؋Hʵy 9.a k(K9W mjmM! ^J;-c5LB Z(|_3gv)EqM SxtEB;w;mY]t'`AC2 BC7pOK_)ib*,+6Qᶞi-k1_zȮkx x~׀;FHpylbHQ1!Cÿ[J.L71fY `6E"an.g([ơoCJf)%9T pwEa2Es$ϾG9:lAP [0{2slKZ27E@jDVKn "@ BW?>Hsw]͍%sg`3&]S A#FJtIF (t#^=b/Y%lxvv#@#=̀+mEW  8 DrϘvOLDI>B.d|[w4 *®m8#b Ǹ S! F>b _ׂW<# u= "J*>γarġ0S _O`#{~di`GK)K E O2ŲVh(c*)`⮈reV-R&26޺qi"M;EGvMxm|+[( a~ٍudTϡ2U{A LlU@} 1JSttP&,;n.셖+on+E0GFxoɞ W[jLo#(s`@2ܲF$7}Gi(֣9**w ;")ے7Ce^Uh2F\Cnqc9'O1VR|`FE zL|l1]Rx )0" s 7į?L4IN@x񽘃^1tV6)L4 ͺ 9tU,A2y4pu:?mWlۧ__OY5*)b=Tx*ل۝DT1: x~M֍uNLrl DTh? ̚W\dځM;fQ`':L5;Z_#Zodq5え96Y܁rˆ hIP,Htt*5lGf4 Mdy`LI:1d49`0om}{QBJ[ 7 +kQ"@;W=M7=@"J7-+qP!l4]`ٍ֓6{[Y|Hzh/+8%ϛOU+Dxhr[<3b/QxFK},l#0K4#іZM6/oA8!A;pMLm0RƘKE;^aRV~So'3Zo*Imf G [>Y3ɍ֦(-ѭ)Ho %yoѕIv.vc+W&ItUOyO%;˛5,)X Jnk4bMb,+Lܛ21&x$3`#+GyŞ r@]- 3R7U@E5T*^N/b=8tR㶛`lܬLV:!$rϳn@I}N^ɌWg]B;P.q&̆ Ah,3pĀ\+#̍/nɀMD}D7`t-pWwj8$]o`K@ SvS45 _I)#͢n>JE mlIZR~5lNu[XaHVUDxv({\|c4ntj(R:sDMTw,4DU锁>a|O}B0 *bgRI ZsoLR/C+F5v_AKKh2}VNx>Chuj \DTxMzftIH-n8"j,S@ho%3d}Ayu0&Q< Hj}zc[L!ŭA8o=5peCOc{I,'QݛrP,cYPLdW;\$`LK$֑%''+L[߰HʠB 0"1-{P2챲Y'L 3ZLx0v/Wе߸DH3h,)qOu̥"eEߟB6@-E/{y1cn Õ@ݪl5n)3pkAo_{[y-k"Յϵ1?17@VEVnyXS R^} KzˮlZX?FlJo`!6*d ,wTQ|絖ڸKJyUjm >Ջ42r7(0!ġX*Y%cߌ1dFrQ:lu# .8$-(Ŵ_4Vc-ťZa`(f}7WJI\iS2/|:XI}FS{YC>?m|m.ȪB4)?Y/0Bv3 [GmTG *ehM1}'<}:[צUN0CqP׹,_큱Cm!{vhMp a.wkGn,%+{3U7^ME G/< lcjJsaTl+ɑF ҵx`< -ʢz*RM0)^6C d)d46T  DkVV{S&fN{Hm軅 ԨF6Xv.8L=\%) 7={2B4dsC5v<݂\d!'^S[".iz(Q+Kr; }p̚]>6kWGQ#l!xCs]B a|Ԁ^}[%Ek(c.1YCh{RK\ /7]|i8q^wˀ$:-ލ>.Lg~F^zjZԥn`i .޶#%cK]>'mZDʾ;& B4C.]~a?(4`:"nv8 #$+!vյqntbymۼX:ܒ斥k%J`n278g0H*9װq_`ZH'Yq_t>~+ V?ơŌ](r?-4O$Y'H4@~@9tam^.a]+xn:n{ >.jG-v=n\u~N7f?w@^LU슑伨F/%M ^ -XJ5'Is´7Y[O~^@f͕ŭt6̻ ]]~iSw0[Xwxf3M5} &6PT%zbxnY}c СIΘ$ޖ0E#AYYCWP⎇PN&=2]USCฏi[U\ 4FΦDךa>l8YyMP`elE_˒>u¨n~4d(GE6lp}|e.6Lprmꐤvjmb؟(8 L>nB$ZzP~Iy~H>HdvnZrY6YVyv[9^ľ㳛&-eb<̵蚚G1(ZF*&TcB`lCTuU&9ÛnTU R ru  gaQyX-tM4Tʎ)ۉګ"7;[lLB|4a* rN׭=s7A2PCԪm,=}l`߃pN(п @|~ԥj.pB㎉j+~3-ы"hڎjpeenʀ`K{#!#ָsHnA}Q{5_@`"'oz%lr(8"ȫ.--h{H~$s-'+Ł! Hىk$G+U~z1(FDذ΢Č;k`tMe=ΟtӠU:t7~eObSQWUǝ ]O(^nNރV#%lZ}sмl?gWYCا,^=e/zy13f80`'˫ҔWލߑ4Z@];jN]ѩr'hV9?vM=!icAtWLNWJM؊]LBS+@y_V;ub w:t|uwu?+>E%يӖs = P8kmX{Ob":x B)ŘW bWR-d-þ"³JiޓGP+" IV5F@0[SſЋe|:/._?K>[N0@r0ٞh8`5h4՚?Fw bt/zXRoP}rF,3q7"z$i;qra*3Wa`X[hhFArel62@~C7s|FP_K~05X\E& gz(Cb^a͹ ث)wyehuh|-<3J&]NHϮרPendstream endobj 199 0 obj 8123 endobj 207 0 obj <> stream x=ْq~>ژvG( :lBX%?@RAbaKwfVuwfuU vQ>`[]GVWw&}oxM|'Oa@V*>}yu\vymTiR.>}}󻃿SVxj2<.cdw8_6>V'-S lfrQ{?Y'1Nix[ _O?9n\_p9.鞗's|0ͦɩ.Q5LSLaqzFI8ן+g29bUɄd6=+e:e#d^_u; iYgC(rms{hKUrdKq`hqbϯW"d$V.;A*rV~{r| IGChLgL'*9;=ÿO3% ~Ɛ4ْK1uUS9ưppiV03>^ȹB2`5 U0d+4 eZ[Հ1l+pq8Hٿ$MItCSx(}+lfKtWζ{87gf^f1JbF39Asg. oO\Ѳ4 bTxcN S#GH:Ϗ|l+F3#a Ke;M +{JwQ dв]R]8Nwh@aPcYhTTG)) -UV7R٬:Q|[|$ 0l///8m-<4 ã9f7kv 5GeJHh8[F@L0A1=f` Wa+]e|偁+ t$'"y["9,8^Ng|'ce^#-ud\}1 Lhf&Xv43(5LaOL|(  xy5p1Ӭ]#Rzbը?u.G?l\cpy0WٸXd3\}h lPe0DU^&X!El^NPlr@{]8*͸@+{ Pl zs!} Xyf"hG B]c#δHTH%9Z㦜➽oz^_bt>g8O8 ɲttlЫdLXFTIN 84F:?xKr2Ӧx L9q(eDc+1&CHu9 o.3(u>Taݸf8K*fRI_2A0p;܇.^/ujTTK8ql A&`2~6щEu?F-zaߴe9P't,SÙ]v7t>j4Ó+ =6Jq_>3:X\ۨcc@e0:*vnnm0Z8hkV\cw]t3@+L2%K|>;WIᖵh# ']!s};O914/ACZ(bTZfe#9BG˅h>z~j=ԫH}`xD>v/JJ+mHmk pGÃK4EOUS‚RIS>r ̍;k$"Z؎a [)Ý׹8C-!cc}<n&='.!x;ak-~2=i! T)*ͫ2ۭ2rc(D$t?BJA*ERZ˒P7f@; aU Z3ap%`>EfZc7dV9D~ :EƷWSDX3m^/y&o1F"Q,>lEw0lWĐY"lkNcYЁ_Bvf6YGh'#(3 J]aY#4]r4ӄȾ`%) eykl ߖ>@R0+l¢XMy6kncDHD}OW葶\ݺH4VN9;#KoI=DǬ ̕BfRn1 K)T'dTl4ي08YiRfh:,6ȝՄ)P'~@mr7YC00"R aS,O mC=@ kwΡ:z$ pn^a Eqƾe-4,br-8>q[w^ Ѹl̍sOy kCr+iP` Wv`nr/C/cL1]O[NmbA&+Y!wUI!uDTU8<{a>7*?-,=ѣQU}1MsKe̖I0WˈT"Ggߖ]؏fn(;]r1LN-~Nn $A,zDUfs])`x1D9a47>|rȎ\r0i:?<`(-(U2R+ߖLQgo:IJkj16˰3~];tC#n:K=&Rm!UzN>jB@JtNO{>)s] -whܷ¥C&5M--%Y/LMk7W gmN"?!퍷>.a٨\6 3T煶gv#4"YCA1'g*41w91/p}ޯ K`#$Us mHƈә #g8…+FP^ն}\O7U-lWE7C%U;' S|bj(m$^WncT Ieaum%%/ڋ6sSCGe0MjC$k?'- x%5DzR82㤝O:MAiHbёBꌵXPN^wOFl A,J$#qCLӾ\.,֊C# 2B[~,/׹,/#6mQr1@0t}Wmd#'=e!K=ƃn[J JQ(錄3벂^SIr/l1oט&}-\ 9=$p5.xVIh9HN^q57tj7kc7*(/7!>Vs_'k!JF\tb#ۈy7V5?BQ4K#ݤ]YΦjYL 1O|q=9 Hew'ӎ$%έda :NxAM?1*5~w(^[V _6*^3%Cyf*Rq)nuMk4}Bn\t _2zl>bJGg{^MR'/$fcI8d6p׆MoWR}c†\0횾{pf6F z1ob|ݮ.d4q~)ʯ̱5앶ݕÚFO=[vXL2 >z?5~EtAPrP Y; Y5Xj6+3 l)j WY=f-叔b?dB$LoThe`e7 ӛ_H7Q?1 ַ&P7͝_[ͧBVn;!vO:"C0l[42:#_B?{s`͝VQ=,RwԱ(=ԁ߀QzөK`bKaA'>&U>͟3ۃ&cNpih!IEO^+j60Rg?<7РB:%F4BXb$1,ΎkA HbHF3 p<%w l c ;OLCWFFKGbF`d2 ;fĆ2Nљv)EWǧ6Ih01Cs3WwbpSZשY q .~x^%YN^d)laGz swKqg9C$/a7iی7oU;CLw6|oEjzsS i +k/~Ķp?Q gg>蜼@7:3O*Xas7Pޯe1~fk?لB? Zu8X2nsyf?7gtŜ;,::v?9ݣgNvp 8V~d<^Yl=PP?IIo*uӮqi}uCY}sFZ˯5(-.^%]_kiqPlu<'%Bږ"L5}dp9W1M\9u;`l(o>E1EdGLrt`)*267p-ko<{ .\~v>O3?83glr[ȃ^㗮EĠN{ҀV`B?p@lK@`HW< 6e9m~,1^PZ@P >MЗLjAAتɬ3Hsh4^ AvNzmQm^#iqٗɛܿm''Ҧe1dsh,̏&`jXJQTg5mt#;iVAp#Ԅ͵>ƨ#ΤqבFnOQT_d;G3*4KzF1s2FWF*ak˹eKըq 3 sE0QacV-ӹN&>N¬Mb4d8_Y(aK*,V Z0dSM;gBͱ/}*ܮ*c7KT,_R;6Vh6W Y%@kaХp_ 6vc>ڸJ <xDCmV`5U-endstream endobj 208 0 obj 7223 endobj 212 0 obj <> stream x=Ɏ%q"B%j[|s6J3Ci KVUʌ="~P>(72ߨۛh鐗LponE}UZKn~ujqjFſT*j)X·qzѺo`.OSl}vǟscL~l>X|/oK70Ƥt!kOl6./oY\xG:M>p@@ is iqY /1×)*0ؾ?SejheK>%m};l3sY*P;j>M n!$8y>5Oo_>{l\B9~CMN2`w8$b:~ؘ| WJ0;PJďb+Fl5̿.3{kY^3E*qMr,ˆyIM+ ^5qA:?pBԢ]o9/w\x N N[T'"0stQ1/wF#gV u+| JZW& paXI0%.&-_cVFiP捚Agy#ƾ' xwf &qQ@/n[YB#8PH/аK\aI>2>L|*˹EB2I/&`@8 9ueWD>˄zn?׼Dj!<hVB&rX4<1~=caWzl [v^ńIRN 0!vE8+؊p3\kTK8Zh2Pi=jDc)Z `-.;yH^ 'A@ C5 ցw0VJ-0D[6=MAfAmdGDWOG?4  d O>TSM\,&x#q})Vh)!``\7., 746˱`ht@P xa hOC"ؾ<zzK6X擱c[0;Q5sXO:+V|~"o*X2q^΀C;C9"0عpӀ0\'@&`Fr{\ZባA;ȟEy6[Wsm^V ̨+. 5|6f~+pSl̈>6zvx8X:zXk i ]Z^2*dрw;(JB#V 8tE).mE/CTLz_0Є*Wv1[C]D%u|!$Ʋ` ;Sl2 ?>Ɉnv2|kg'p 'u`cfᚍ=03GyN.$x634 Js@-̐xϳ5YbmAx q"JɧE"@o++'dY0+КDO>m3=֍^dao|H$zPc BIl.0RkLn҉| X3X-&ћtU~RLv$Іѥ m<ҸfjUiyg?ʷMTBu.:Pz@{)@sn 6{OPPc3m&'eX`4‹PbE.ĹZ0 |zR>Ogm:کI_ z_ӖQOD=ag2d7N2K qW-(hih,,(.$nUIX)KYPͰlrovxFv6X\`@jacP4..!KB>y{_c+<Hqf6DZNgSOsƠ%D)cpE=R;%HաeT <#=[0ɹJ_l>KbĂBx$,0ܤAK;PK7)hEce8- &1hqgK3xW q8T۶h{Rk$0@& ҂*HO.7Ə+e6j7SBeo'L?>DŧJO7Ҥ`04毝 +cB>3ɣhd*jvZ$<'S G*N>:w>Ul#*RevCUKu Ɠ>C-񤤦du˛H|c]T!3v >Kv\T&A '{`I6[R∱uƮC94c:'1`ĺ-w VB;y.)T޺O011_`^bκH`l g~*-h~B5+(P*~V  ~h2|;pqڍuVdutN\8 g! %B 5S(`YbJIJd SRpzq|ɼr3q;9D|idŘIO2DARR}vVGg̀C_9~m6OA[&™\t4ʥUr' A̯ 0VG,Zݷ/kZ C#ج2.]3quox攠[zWiUCHI)>%5hJjT6J?-,Sf.RgD:j+ɋ Xbr{ ~X 6hci7v<Of!zeؠ'Fw!0IO(sl>zye y|%2L<5zzX7KWW[ xk9`_`jo6nt!"9銵R@̷KG7.ίl¹ᘚsB#gݨOn@Zƃ ŴQCk4#Uapt֥ZW'aXQ:\BN$Fv_[>W]yb#b 7L"q me8.Fſy0ss<]}<}!o'@Hx &1 &09nR:r[D|SܠMe6D ] nwSRX 9㤓̧[.)&ڊE`*_٫wU3>أ,"I.bŪk^Ŵo~ Xׅb wU\g䔞%>Z/TA9X|\EFTG֡PK"'PjYuZ'l 69fx:\_{(6&J UtrAw&& 2؄n qUj H+*xm\p~AGJ NUHټE++.X/kIhbvFW?P]p FqpS+pvX0LhRD1jlV[3l/yOӺS`mja VOU->24`>az?scc ovsllv8(5q?_fk)X%ɕ+};Gxs0-{Cь7, h=Z ?+1=Ƶ+UQ5f*p`? _-44>ޖ<&F g=f dΜG_7m˪/'=.WAL11T_(C輆  Ig=Q~ UauøCLXVvcn'nU#p|.Ǿˈbŷ4-(ZX|m`nuA}le^rh菼bɭXG}Q\_/bǎy .c;!)kgZŻ,Tv{o@*?NȀΫE& xd0pR=M/=1s? e[xaLon_gcc;݂tζ$c0}&8ts__ I7gE_C9pY[Aq ^ [Jgt%!^[3`os;6XF NOpW%8ħC؞W﷯]j1LpffFI ACe] 2 <"Nߍ_ǝ,LdXpެ\{8W%RWkMn+& b pѝsf x/V׺heyR{m\R2@3~\4_ %'r4~%rg3V:NKiʤ98HuYAڋo-&lQxA~\2c=N/y@=F!̧NӐ/zjν`1K+nbl4W4/,}ӹx֗*-7j۸mt}KIT/:[}+Sp }?R89ZפJbqB`t=ECU& EYu6Yt$$yguoAt].+I Z H~5=ȗ\R{MQT,^JV$C8 ~z-#ex|FplX~\E/r_ < hY.OZpƫ-Jt4|MËDWjtNB_/{\]T3ξ;W%z0&ZxtbͣOu&QMΪ\&aYlY >]\!ӗA=Җpf> stream x=ɎuukݧVc_d Aci =lljw[vuXˋoAA/^]J^^}鏇|<8iٗWSN,E볬QrZFue|:Ysk/OV>څ6䢊\:M>r8A>MG¯o7pl{pq*lpL6kڇ7)zo|cWle .\@?qy8W><δ׸ߠO/Ug 4)p|Ov~O ژ[:Nɛd=%(E{D $o4SPx=!;Z3/8=-Ok%x}Ăjo7ā#P ֫~D6$*g1S 0],!}k:rʼn9_ ;r }t$H'U1'J3;Cn !g|ͻDYƜ%Q@!-+vU _vAΉxS]Q7WWiv4O D=N:ZDg >>#OMP(.3Qv$i-j>ŐXbS B^M{aPC KS,M=1VK~o .)BknhgrJv,UJO@ .vܮ= $+t"$JodrR{RN__)* er!JRDYCjQR%jv![j:T~nBTPDm ߢ$оqwK4%:h% 2ܘPCw<;fjZ+-G%UbJZgJ ^ee%-{Щ.:FE2k'|gY)<8[ c84 x */냹i'UB@xJ,mv6DS3 F+7Isîe6OOMhXӧ!GKo xReBͬuc' 1@` PA5ri6 ˋ"/վe#m L_]ʼ_SZ1{LP$ >Uy&)iq Lz0j-s<:6]!lTHG^\%{2S'4XyvV-`*@z\3:֬@KŀԌC](c(Pcg͌Y}d )GGwʵTέe J.ۨT., A ҖZxV|u=Mt.9&ޱv`(mҚ&V1[cv}~怵o@K]{3k%Mx|%Τ7 4pxճ`]MKItVHI*zC@\kJc"}œ4TJL/3THIiB_^x0YeU1'-c&"&P8L3Vw>Yg.0J`[Gbl8 z"q*X{Ŀgo;ƍRg=bFv`e,>(w5y]-H"(A|3wD3{mZ]‹s:ox؞ Z\{I8ØZ{]@Gѕ ,{R pmTk2Q(,Wk3. n*%/V9[G {<ޙ0K,[>R mքgoe2W6VU$v3IN t+Cx e`ۜE] cdS@][\m>Z~Nz{"sEs"LN|N+'+,[ՓikTcYO9/xD7\}r_YOk(R>f~V&jBEPD `Q\;'7qMr.6?Sh˫i7~\KK@%"чs ej>qF/} UBZ!C֭DYwt~0Q!FGy43ƦS`w\Yro䇀``:А[Ъb,98JPRP/NR`~eXmOդA; _Ur9td#%FR^rbIy"(٤Y޲r)r ֪^kD7Y I+aX`wWk`O |S|}暈-O¸hNdڐ~|!NRel]Wiu.}Sx~p V=!"Кo#|̄1b}m E@?)_csq#|־;.06̶@p׺9v*K CKa'?饳IiÈnf(?gACwR[rCHb+yTT~y2,TM_J/b#mb|fhc?y`.gcZڧOat4=s_ϹR./oxo%B"z{(lT2Nzӕ70 ./TgR:7r[8ɷ:7ǯRX1#wN_nZ9mUaʌYm Tlug})jJAh6SISS$:$3M(ĒypN^jY8^]WT`-OlsnKmӤ[<}0~\qeᥐ6WT:@ZOwdRD{ wʒ>O\IafHٝ\0kpwR{ws # Կ%PCEB<?$vMXjKԂ8Ip' }X#ZgH/Q%@+.N}]%+ULZHn*R-'-C'M|MI-2T?wICX|fm\t))S?4ّr±{g(4bw7D"JSϙR5zU:BݴhYvWi+wKֺsj:8\FU$BҹɇBG,Y-p?E˶j@$hi R#JDUR6lDLBDbLR=VgBָi/Y$ҹ4zJy`,{Kv58RSPpk' 5$ a`#0P@}»ʮ+D6ݗpAKVJ{ ,Pl((=%81z  #>VaqevIDQ(1?]rGH1QV}8YɍMY&Z-=.qs7el[gv6X.n6p٤] Ftka+(dwÍgh?7t7d^"8zy˟>6e0څ &~eVc{r{G'(H{l{4Dž.ikXt9diK&W.XM`Vi4|ho{^-as)o .g䯧m&~ E-Oĕsm w{TvSObѧ$$4^ jVh^F$]c&z&DGhN镋:0*'%t.N*Vf36K'? S57=WyTX$ΒڻO{< &Mظír_vK~D+FՙMH[=^YY2;LqavG. Aoq(ܓ6N֟%v׹ }6³"TilGHo{hA$c41?7Q՟DP\MR(=ڸbV H2K=z"V2LA"eIeT}A,;^;uS]`S]롿~ fQzIJљP[4]̹$5n3KRxTDّ ي~2k˧Z~ }ؤ 2dzpa> ¿%BΓzr_n9܍rI,A/RgKuv_m^Gӵ*c2+o"V<=cDZBkK.-jZd xjnB6^.,?ڰ'¼Ήiٸ mҞͳ an=#lg'øy͜`oA'x*͘컲Mcm$a-S~"dsC.ވDsMb͗w#7(n#7 &;{l7&1v:j\dA%5{#x 3 QG4b{zdt}om48C(kX0|C6Y$5}gB_DE_͉3ɞ hZzA]4_VְǴM8 Ums"-W M׍yC2ѝ\jdc8-\MeWip8k\Y ϫS;n~}FS{&$bK`oI5 tGV~g2rfp+.f!&2mzl޾0pfBB#Z~1]@U [Z`N[G%XfЖn%x,!rlkMh~MnIX^w"VsT2 $ ĎqMN ݏČ;uڴ)T˰d&ϝiZe΄]ִaXlZ[M?OZ6b3>,4m 6`>7ƽa {$;hh;{KMX{k`Ewj$~0ؼ;ϊ~XD`o'S~|6S?$rEEvOTV*!&cD}h}Tlm7B-[v9G3VT/N"4hZ1w<&@cx{6O6N_h:VZeH̘fd hj 805 ?}ZjpP 'KOC$@IetNG'ʡD}p# s-ah?͇l>BsEHGߌIO#:S!OPEN~j#$a1<]endstream endobj 218 0 obj 7530 endobj 224 0 obj <> stream x][m&?bmJ;%.?M.ʼneVRYd[$eo虞o+R.؞4?ߪM*_ni͍7y[>ÀtLpϾ)۬nJr٫__ڜƋ)?F r EpQN3NoZ=쒻_0gw[c2gsˏ1䣽|zwo\ޒNylw>`IB6 Ao03wl.d:MpL hsq1o6c f8W:esB`SOm>dPשm1%u|\dC3!h_޸- *tܲu)ݾ1Ͷq-1ML~u4-Ci3yEIg.Xݳ?^^ 9>q1هt.i}l#>Jl!>ههt6iܖ|hA C}Hy*n\ (Do7 C}Hy*)рKx+"c\(', 84&M 46e)S4Qq'SHN|VQ'+gFugu\7&+aS9DB6$&;sY)F|hpe+9FO'4.ep)St74H3BЬ: Fe0ڞz2vnю#Gr? P4&u8fׯ}ofh2~X ^μk ?o7%BP#S`i%_odt1cï`"Cza@D/̦-0:/1B ٪6L,昛ࠂDQ2KE!D.Ŕ w08g#v?\]xs=P3HHpl]yxs)Ņ/ogN2NPE<)Cc@A:E'ӚBQx 3.;J D RHD S":c>&`ɑXgǩ% W=Ѕ)C($&Oiif3E;X %@Z8v YzDiLe~Z  *2F3"Qȇ˝u$I()Z`a%с6eKP_lC_TzǢZz);H7BHnL# $V-Ey<$wMY` :̟y~x=դ-a2œ@((rYgR ɼ"P=AE\ JճuRHQ2i&QPdt@d]"ʐ5]duT=RUk2?]FSLB?Ew@/{yOsϖGS6Y^v6,ϓ/7\QvC*MGCE*zRAٺ pGU |l⦌@_L/\Kä-KC_xU؆ <@_x 0h]ֆDG/ }}*ᆾȖ7%(R/ ٢Kx͆x􆽰 6D^ kEv`Kz5ޖ^V\4My5t:MȫUn:j}~G^u#J8*" :Q ! F zƚ zOЫ"'UEW/Ы 7a/+d^eB 1*lwaa/96ʜD_֣ C }yr5f 1PE_G>N/@_t *:r 6Khr^/(/+7|AQ4u٧p" |锨cTŖ/(}_x\ ibkR/C 5;%%٣/ зRڎY+vQoȎk;ЮoǶj;i; >}RTPѷc@G;v{̓wco{/=og;vڱcvCoyk;<]C7րwI5.@ {i;KCKw)Tur(W3 52¯#(Yx_Gj_PT* x+6lIuTNKVZ7ih_K_7/(Ğ ~A5u O\#D-QXԄ/( YپDQYoi/&(nA׊D`-hIo0H%/(~R@o WP*kDQNoFs㯠T%(g~/Q}%/Qɪ JAWWS1'F/FDR:씙R' TiȁlH*#7(U_7u}}J%iDL}/Do:l}| RzߤZ7c:RU|߀pNWKbdݙ)ڡ6% |]8a¬h$T=H|}LO|^ߑc;ķ`H|ujEPK=Րd)ėвԋ[U-Zė}^PDmɯTI[( ~Eb֒_,&KͶ/b;#wDGߚ .79%|C_`egP\1) }5nKwo+۱oGցG;6[!Qo@;܀^~;TuසNߎ0 ~;x !זb1S:ޙu+ 픎u mS:7N)e5=k6`^ {u'jpT!~ty{Z ./PAPEɋ/y1De6{}yýѺ$_٘~L[95"M#MRtyM~*<`ďuL#xp__z j} ށ7+MS*5"tȎ%b՟,67-٘d谹`81ubxݗwԵj@=|!v歙*{ JK=M Ţr&;xM} Mun6b& \ޖ#s|0esfBL!z*Y!e򏲳]^$p:bl3@p4.D7 9)3G䲅˝$N ># E6Ѫ/z&h _Du{?0K*C,Q3];'!G\q,=N rc<(MX=v 2NڴPoa)XV86&XpH5 &b1i d[*Ky_EbGWv*2'ٰ:5=3S!^|jla4abo V3a_-x2YSLy˅0y~QG/,h㼡eǭ>B Gh;kqQvn gOr P3k2 ӘKM::j9TKbxhqJVaK-@ku8wLrs;Hʋyl?c?~!vMżl}ZcŮ ]8l(m{vL6]^`LD,Y+|8 {jC.;{yvG$iOۣ(RN'&hMl{ :DG2!##!' 0źd#q*#1G-UyfmN4~ o'9olDfP%90Yelj23g<{QmAPh{-PY=d/5tY'Ev1Ca+A)#x3軧'^g,^.0hyr'lXŏ!}t0\Jzc19Ҫ4(cuCLd?\90ƻƽUέq?:Vg /$'F`wt?sX.9o)XW햙4rWfJs? 6sD. m(j3j:?9-(:UJF#x%ܡ3fsPOڎ UYEqZeqL'.Y(>񨦼Jd@*e۟i`=Duv2!P䫶S[؋F"39QKIļEY>8>sErR(%p xܙ <洠j#yS3IcxUrn  j>ZUh( OsRY}Sޕ?WIjWk9sVMj2Q]dl, ߖZP)xi';Ս ⬨;%ͨ 0v]b-gXEN4]k6*xj8Z$")?$ɬ)X)-gćC1$txbH1R FӺ2vգsHQsU",0Wc1' ͡RK&Ez:~|P7%ՠJDOyrY:/4ب(8^aᅣ1?E_ڹ+M>ƟqguN]D}9k)QdJgka) 2j{\vĜqV" +aݓHwYq[Tg™ً焴M{?4 q"\C@[#½ &)tp]5q NW-^"qar==->8M%i^r/l,j?X*9i6ΐW9l[?r'Zy;OnAO"k !X5-P\Eu7{֛.L Ci*B܇pzU,eׁQ>2v> ?;i*$gJX!a7BtdD\Y348c:qtbIB_D M=qgW8&%~P0$oɪv'S,t;2S\P:%w4Ej6d5l*~GΝ IR];?K[Ҥ+qBOex(ܒk3Mgt"JQ{&[p׊[K[@U\}V{>P6ogM:gǯh1kAQOnӯGv 10 LSőGP>>aF<6ݸdF '0ĥuچ4Zb)@&2\#S^ԜYԟ$9IWniFfSNдKǔ]oGIEw ˲96FlM7s8?졶~a0GQjDq}:(PF?XWw'UW1>{ui !Lu[rgq38c~YZ#)#drͱmFe ՈJ) 4G*%9eRN8e BMi`[޽N~;ȻSDﵷ+9Z-RݲokQe]+lZsf\XȔZBu IoV~7Tզd=/H3 5ñ7=\5wUb`C!EO5}#͐٭wv%[WUdÓN*(*ӊo5Gtk kl{]E5WMX'W&Q4YۙZ5" 3O+0{ӏZp ?Gm1Lu8cBA #ъUg9' BV"oopow+k)ٽsv̏٫~Mm&2>3 DA_V@v31ҵX7:M&* ^K㜵ͮ=Zemy(61|S?=i9#]O?}vOjE CfRb v`]U=7>; qwf{bcBق.gWbPR\8>*2FkOX"鋢ڙvx(~ ?>۝94d|7 x@D_Ouq%!evjK?:<s:=.T Il/H|!nJtB;G%7LV\xi]z*:zL9hu"NjG;yhm9XW0)stB9+Rxv'w81xZpL6oXD>yBⲱn_Yoz\+^@.h >}NF.;ͳT;A9rgѴ,E,75uCh|Z5CLJ<[?ܾCJE/ĻExhާ%Xw=KnPP7ReVO9`Pe漁E DѾ[h|5螾q=# '1/Mn[/ !4 a{7%Nq9>>^eM6+quJsq+Vܺ޻$2o7{VėcJ#EZ}9SM7Z> stream x=ɒ%QՅ_(3$E2d!j3T/5M%2czYYVdkk_o>^7WW H`JԒSN,EgY]GW>8kV-ƺ2>A-֜9llȧh+Rz4ӛ[93 Qe_pYxzAQ? !t?)|b"(6jN_$6d? ^EXc< `^%{q[[q|`U[u!p>O{l]rts gqI?HK &chPaj[ZwL뎽?}38 /'sqp+b0?489pJ% GWꭷ¯ RcȐLLKȫWɺbCrẋ|&,T"T' 5 $. (t%:AKc Ť'#CY7r~b  ۶6wKS;E5xOSҌB,&c2{ 9l *Ljrlۙ0DRZ1`;|;y*wE+ش &%k!EN <LIa}cS:k. bF\W~0;qW W/tAc4MU8|en2*-3"}l -A [j)u>x5M9\Ds 7CW5nZlTq&s(vT $]a"y%I*uf/' TA%`{^8gg@{}XP uc riY tp&obux+_3h'a轞pzVK-#!#@?U'Hy8yR.!y5(&l ϋכSvlɰ1PGBUIOuwB!-> ͨwa RLp8nl4d~`?K*L wjz`T l)0F>_F* RA_D*0CgǺ Jt+==:1lS39i&S|j|z<0VHb7h!ցcx?haTov;?:p0ɎtV!PJ4%X[ ݀<3 Q2Xya<`9HcT8 =AT10**>l: )6Un&$Nc Th(!Dub6AZL :r=UK;!Na"` 켯*EKgNdЀDul.sMq\yUQLrZ*CHqtMs]BAgiYͫr0'oi*`'E[S( ^CCW.Dg:K{7VƠ@eiID <6)"4޲5 O "-*TmmgΤ0! gOi*7^i kdւ&j'$P'[tg$+&1q̃O gm$UxkMM!Uf7$d+A%z_ mPNt԰GJ H)@f3e'{15 Es  ` Mb៣#|f,.(a@2NR)YULɰtˉ$z({|qIvӚ!9@]- %5-ukоD 3NaڔiAa[rVq=2! XQ@ u}9-Z3Wh9(6mnjZeSSzҜ tIUw yA??PgHCbc1|p[ ήZ]ڱ jVOniS&WX"6"4Ъ_pu&ĀY+-ST*1imш?*%+梎ӽ6yGX:s\cҰRv~0:^H`.)佊WaZ2]>;pg2j9!L5.u=>nDl]3@VI1FV߅DjYIYlZidrXM8(DGJM)t_pI+FЛVoAh.j to+ƨS'i$NҡIx=Tk*zEtҽr+-&Ucf3]TO^\ ,u+u+`&6ӵw`Wav=ywi!D0oJvE)F/%BFRzi!g8:h@:p. hjJd[eƗFdf(-VbX3`ksZ#I#wxđ_3 8q^þZk0mS O4 <6eHZ D'qVuNj:̲u+ OseABpͿqEЕ,{{(H';S+b) aƆv.‚p5ئwsoQ gپcyάGPc"$vq66%bmPJ_ߧ 7zK)" M#FxIUxޜWA:l X VnvMA(pbqcxnڞںL1FL]0fSTL{k+.-w5 mwr9?W/@ڈdDwԈ6X^!8¼9& Y ~V 4C|YՙEpp̓|KLDZ4O=Gry0>2>iG:zꅐrJha tSœU^13\z1k Ujx뺷fa5&O0E'te!ܧ(w< $Yxۤǻx=1B֚du_.`@K:ak1(gk gRtvS[b̑]cyU0W)eYp;a|JNq֧OZ8q#Zg6l8ON–V5?!C+27g5nv FUuNbGʨ Sա<K`o*nM) /|)frN?V*r# jR屜e=3}lS7^ɴt:v_F?/MeSϠFYa@7-7(ޗ8Zi5hXXC6(@ى5G3,zuTT8ɼ4uc:mZ~܍A0,S)]p,}/ ^v<V{J'=Vq;Z4uxP4b/',szqg]zjmΰ0žW>cݫf2ݿ)T}؀K-pgmm8ETy/goX}O9HV h7}{K62frUc r|q$YXWޗ4`LGL` Ul*,v(9pDK>q*śhKي2)7qnJsA: 6I=܅i_ٱ{&i9 \<OJ 1L}L#^T1 + owQpZ.ǡ P~дI۝D'BZ#vg<>_a1apA>207tKBɸ;!i,@B{It7 J㤍ȶ.{UYYuŸPTAOLnc Q?rhM 淫uSb mcvADlG*W'8]>1 kL:X(/eR=O%6ED=I[L!j-4( n?mcJY}z}o,H`;FqU{ɉ hݶږ@2<$tF6ъD\3u쥣"^?}yd|U9%CaG ԭAy qV@s$ѓնS<.R 8_,_2*Cѽ4U=Ht1\co6XaX͗J[>4N5:´ՕYk\b^c$4z|S2)L1䧾{P>bZg։⛢ZeL|+E'EM>)Od)0G^O\-!}D4-r͠Jz'+CUy+lkFt`gJDdko%`N'!}b"F[qhR**W,4wǑ1 E[3T->L '̓k .}62㡝Ng' iMMr38¦I̹@'Y#?bUBDViPq>"X+ZhpZMNLŁ6Jɓ,TM8 <^IBD^& v)1lͲb'dᘜ|uQuȏFrK6Z0_nLiUcY^- ~%S~Kji> stream x=ْ%Uxԗܗ0r-€-̓HBH3X,YUdeVִ ٷʜ?>W_=1W_~&>ŝ[; ¸ #>i/p>s)7 MSv87k;R,N' Új_  v16ȗ'|`Kxt.O_hO`%}Ic]v]9H:M6fp^?vb h[3de9G0ٖBr5H;!78 {s$L95-n^V.yz:qB",E,,琯I땬ljdb 2 3DnLx`bg>ro,5؃m1κZO_ࣃ#fR9 󁢩!q e}k%*YD`F}kmkMi T Fo$<۶'Nz}9Ŏ 2 kD ˙ȽnN0LS-L!7(h%KHXM #{#/LgU{Xb?tq(9x _C\aDԅl -s8y`쳶[ؗ 6~9NT!/Nyk8X/_0_@IWs[$^޵ݬ}Xorem '3%i(vDK*(}2`*YKP3} z]k=! }(kk糶Z<^t, 'P'BMI!Y7] -4<6[ .i!]b. 6#pGƄky1cӂrOKZA@R0YZZe:yC |Z+3?(>RP=Gy#]~M2 0ф@p2ZXkX!"(_ VÂ{+Ftf Odq>4dob^q!Vh@m߫ن./f"W<9 l 3zˇ\<#>GR.8[^.p \bCg˙*u-ilxeJ!9YM!j۟} wbF7;5{e3DB@08o Іz'E_]rΞ 6#MUۇxxL ~ i^M{ltV8H>&Ѹ|hTZ혼懶^p9lZi!7b)B&feoB)_FǓb@v/) /6%;}nPBy`~|M oxbit4 $Ωȡb+{द,Ɗuބވ,H NeC\ɷk^"p`QiN1st+[53*so 9nX_~'! ̶f0,0<# "x._A jAf ]q-B5ȾV2 USr鯞Yy%9 xST3K8෸dV1~a79yĸON%.]\RP*zzImG4O s$_rZx='teǽk:s'Iho e7} ؕMK?eF!Az0AVx i"E5hVXp%Z ^]PrY~ګ͌nX]ey?@q1{$0)#i5PG-̏؍Y< h_Ng)N^kFzRbկvM Y ȍ3lu1*8KCpE9đ;<)=@7@qcVgC 3MV~^nqS;[pu\{.qzWE@g4 Kk%lrh={ח0zYqr,H7:a9o6г@ݼ(8Z<50v`;*T#\L"3o1=kw͝UCGUZvqHa[^K*JBAF$62NB{* 1GDZXވ)#;{H)';bCg+Hs|xlP_t7a`ݿŹcu>>0<߮W#O^lY f2;|gkƉGv_ZU Vqf^ 춡 LCTAkKqd=>hbNvs|5j9"6ิ/GtxPI|I/܅`?&څV{̮4$3+:e۾6Sƹ"}tG`4 _4,H+v3 bsnc $Mn:'.#hT‚P_'Eo[2p1  AXN$ &0&%27u&.DKi T!Jw(Ky ѓSiAu0lL<ؼu :,_ֳI}ai; 6O<o\TA3N54Cg:u2|gwÂCjZO"rh o%Xg&VB&&sWz*>% v3z/:r;YcBD,?}{ɯqQtPz#PTA)ƨy FZ/%R˄jm]֗c$SQ%7!^nckrRierYƃLz.}Ww0~&Sl8Phi ' #}~gi)>j P7]}h+g(w:uE_{/ b.0>cDbŇG0DlhTIyS6]JM~جBY#;GrJ,zF&@[6XEMTX&(c.몝mNҰfTwux6mmlO ݛq5:Y'˽iɥ) 7 V|,ֳ"<IӇ"a았}GJ\6x~jV)Tq):j`hj]'7:v <4Y.:<Щ摝%*7àMD5j ,i:<-y|aB{c[d# |!N\f7_sfx/(c23s-Xw^Gf#mT8ʔTep>* ߻)&.%+Wc A  5FE# ^w4amfoZ#ETP8H9Xjil͚X#ޏ}n&λvĭmcD7z>TnJaM^1jSx 0&}oDBoC cK5 p:ŻV{, &+uq&ـ^@, Mf:biRepn ӠyPM|Җ9𒪅:,ZHA7 kiV'-^ Tϥ5s՜W)悯8mOamFhqǬ;YcDY;^}J+el c%CݭΎſ,ݼ ujRAh1&)-s$|r;2J *zr,ӄbw~rOS8BLݻ n6lQ(]&kK .1VwMW_nlw瘓VkBZɘB 4 x&Ë2=r YٓJie1+꘥B~Mf˄+KRAqu-Asy_{r 1ԂhZ P@;uU蘕r!F"ӠYI )2rHp9i@p{MV袺QyT}֚]1-v"=_@r@Ӊ% 3IR€Y 8kX`/%kmLSx"U0"g&/*+ҏgm8HdJ-$oތ[ܱr*]J؛4V<1r=J,Vtu%57-D 1ݹYHacYi.W?E+t88Iȑ{`$lib! ˦`/qpjG!$4k>8,9}3?D@)^lEߛ14s}Q)>@~FG<>de]xl8VL6k_m/J#g1^(xd&I!`4T8/wl>נcUaoQ8xr.Cm]iby86xMRnkbq2D7WlGKnh,ժ'_^y͛{f ^3 Dെ bu ֊ّ6RISHaPXTA:e :Fs}%3Gc^I {>nv{^dN:0{x.豐iYk\0(U,]" @i}Sk& Z/x'Sj6f[m29{O;^녃3yΞXO/m .,b.4''Q'; 'ývîFŽo}0gf?8:71Zt 7 `D n)y*?'6o'BUJfc-crgd*,`Hk2ڨ׷-4,קG:fļY6T'r?;tsZ.}/u -TW42gSM}, q[dEDr1hB(:a@.㊈{ GhOߩ8#L7[|ix2+HG=^{oyoesַas1$*UE/oL c" =h!Bx$_tMM-l\1wpܙp:Waܯ~ԫpEFEԺ1z5i"[@(N,hF_fA칸ǼO-v$lL.0a6k2)`U. Z|>!7V;쬪CyKRvvHvj)>v<*SZg]>Q7r{bk5MҚ݉<>ZOD&d` TUĒ [jo tޜ$/yOQ#z V" D{kcz\EiMjNwUݛIU9mC@vڥbW3;x,LρͻNG Vڅ[>ױ0okPE"rY1<(=Ŕ =m55MxؖA\) 8g$+dewo\5ӵyp,sac T6^~lqZ߅ab8Dw3|Vu@A]*/,8Ŷ[O8 q5s/Fė2Ft0cmunN"nY`y3lA VԌǁvljANk0ɶlwU BT^6:?11涺2[*.EL1r.N,{H镟@o/z#9|\!b\5&15/~yEJ5H+xN!E}&/>ʥJf>;6 䉠[[H9JҸgWK..NV?W/k=PMzu ŏ9Mu=TʠfQu5ra.ldRw49}~v+2?8WK0"t;vBXxkD=xi: ȱEF]<ϛneeƏLSn8G̗rQ~V;(Kч `@{O/^z ߯ kk2yhv`κ\0SLpJe<D U4]EH֗kODrm= *3 OC%]p8qlд9L‚`(S޳g`p4L[߄J2cK1BKEiqʨy0AXka%R%SW<%Aueа0ovaZcvJŠL'A4$AsX&T7C뿱K%s_tdV#HںX>%L~9I'l~|*Ep'}k%eL7Y'jX~U&9}"P獂 }wǪᒝ|1'mOO/Tc2*>\j1>5W_篮~\s<`7wEdv(X].0B[09"c/w D,by|Ԑ}•#l7V025'f%}mg#,Nf%lE8˻΄Cv7 JEaI&/D! ">ai10&H&L1gM3GDd1 P_o;y;fYgo+B@Brif ғϓ̀_џ(=6^!čvy{= !Vo;lr;> stream x=َqMSLHXaX!xHJ2;"2*"deeF}nԢoWݓ}o?Q7oቦ?w% H`J%`.uٞeuUZK7/=דg)B5|q6da},7^E(!f 4\?q[[q|C-jO߇x}z?Λi1<-S?#^Nh]0@?/;8 h]!g2VkzMu&P_sq^|`B>6b0qZYOl3{OGir?bVg03rbӷO?dc6|1~Cbҧ:DgaqQ uk6쟍#T)cݮ1s#:x/FX ^܂Rn.YOPP\kWNaID+HoP0 +rF!f'qLϐFTEgSTrҺ~ar6A} bdPat[gDG+S VhEk5ÑjBDD_ x4SVag;z堢|6FOl2:'r+`PFccs`ڎy81Sc "kLVuh;fllF]Efe[[n&GJ*ݒ'&-Ho#XIgCj\O#C]}X)]P uON-1hvxJ.G߈L`$@3yV&N9~_&&> e~Xc&$0l(SPsh;ūy ل_TGĨ}Gk`8S^;ED0[үCoH _=1Lߧz_ )LV@rQzW'/Rqe,;v@jglj] ygKy Kңvy3~ئ=O` :o[:žun,]5mXpx06gǧݐ9eYrP&EG{hͻXzW; e`@w]:W@= jdκat^X!6ܳSh['`Zi >7RO3NܭMZ`^ɰKeJD*a0J m qƄo}lݧ&Mʙ0%7D2^+; tU):9͸h1R-mpUa>%Gn[ml(Q%KK VC)շߪT]tʷn/$ܺ!wشv2e+XQױYF!"+L:ݗK}Y(Da#-۞iKLFiA>{펤%n瘭w;N's,ƁfXBYF%xp ֫Z7WVۼz q3cx5P=D22&aĝ!r,1^妱;OIN蝡&{e+ |l| io[ui  o zE5&X@Z憌hGА&nW\]Ўlk3挴bT'l }l@a`T0cPg2dLJy*1Wxh86C pWEݨi&+ݝ=Mu4fO+$ Ot°c8I_I@4Y@ʘ^}*kAO^cqzذc4w;S0N|3fT-y^fױ>1nV"1j8!=pv1%W %f}[o~DλV@ݪ1I..1<xj‘2 #R3+a#7&^mh#Ωâ,KXfV7&U=^w>b"RwkĞ>^7^31;,KY7E%RbNu_?e+zVcTaǞ 7=)L2XPs}ew,'WQ67[7Չn ^ʜn2!j_04a'D3;.coCTU"%S"YV_5bot|dxҜBBV:ziSx!O&"@jэ﷞ u+'^"2p4DUW%uU x]X%ğJfgXF],cKjhD_s)R.Ƹ)XzpbskiG \R)'k;YtSFu %t = p"\3%e8e/W(+&L`WdX̐?}"ŜSaܕ~& xj?R M Pk#Ν&R`yNby\H]ܒ9ƥA_OSݠ I`5ZDF eqsFcȈzἙɚ2SbDŽ\W3_rW՘,0r$u~PJ܃z`Dv}6`a/)ۅN[ -"dgc'EpWqVZږB)T cӲdjoJVgK0뿆r l0 Z1y`-:8ԸU¼[)TӛM :^EIG+T 8p8隤t>K.L5ݭ*aW*MnqjZU s1) PNgB< V'{} 04I㺡vP3՝{֧, QA,Ib^ޣȏ$JccˇTfOVuuޑj= `LN{I.]Yo@GSe\ p~=JLh 7)\U 7ˠ@n`;KʢeZ39L:YKI=1R>Q E׆XǯaD 0Aׄ5yZC/KI26"QUgOڻ^%GS6)E;3%X#,֨c0(귇;G0Ɵ'zz,d3oBCWA 2c9{2,+.S2d NraePdEcN-#YyGrK{Afꥤl>z1h&yEkw@&X-hZy ES)LcJG%ޗ~`S.ȸ8Rh> ?g?y&#Le?[5Ư170(Ɂ駛! 9veGI:J_O="l$DŽ4sM 1(Фy626 O}|-M}Y:ĄF* ~}*6#r%`*̔Pӝ3szgv82q3oK]dk'.÷FRg[iSyܛiH}=||DkVؤv(A&ys(tZAskS&čk:R̷M ;8@ⰹ!*K7!88ͅh@4`rsN=Qp^mb#[ ę!z&;hk *G%j8ˆpǜUc5JsՇf留C\ԉ$(R6X:ue~bo0G^%/U0p-b:shj2e`jF)BL뱥`;jQM[yJ==}_]r* BvGld97 i3zzD25Ecm[jZ.-`,]E_jai_+dt8eǦ`FAzFfC]2~gڲq:./Ǫ0 lOwP!5~\'8jnOPgָ:WOP?%渌][_=n2lLAG8ltBCzx\ Gqml(c-vS5XQ?YSEt9E9#sh4H8TQѐ7R6ܪu+QSZ0v58cƮ8\I-r¬" -gv}JwM*}= 1|,y #౒}W!6 C2eede/B,/ulj}( Rim]9‚H&=^d~^Ab́.]_E0)m)]Vf ,S$M>-ަ2<F`wCҠQuҠvT5ssb? pc誟[;=&ck˴L£6-`>(T6 K)7l1uuD3Sg"RUM{sG|QCnA S/,`I3cIL@lflZO/IQO~6FXINX9[6bIeڜ%$"$#4bbEזt]Ya9Rk*'%w=XWpn[z[f3;d٬6Bzr؇Ng\~tM8&X@ !ؑsY Jf ^v[?`r4Ol;:D i^p]>ކqApj*-Mok꥚aݸjf7+kWh^ 9X˴ g4*/)JX]ॼGTx^W&H[B5~*1Pg|4D[y؇S8Ԓ++/UsΕ{O_eElmPPŶfKNttYsݦ%OaƬBgX4k)jcQ6:# FFBt,qA.f[3rUl2-[JEjt+ ?Ii*onslވR4xWw~m޳RH8&W^%D:% "Ec Aٱ BK1}4fEgqr j0lWKIz'ECo)j4p! l2~tkZfToQ7rTMyUv5I=@᙮-B,\[|QE th>߇|S L$&I\²2 p1ߘ: (4:xP \axk ofz*v %(H&M`?6*I)[{t @,XQ$~Fkd66:=F،EWM )`bZg^wQIȩҖ m @%]V|nLf\+fFleQX7צVDwM8֩cu ^j )qzg26;Hdž5 ,3 ^k}X~K:GN5wn/ivV0QyOcAxn "p  !:ª?(G X}u4mwk=w&mYQ*]26RzMBh&CkIkKQ@J ق LShG5q8"w5~rťш+*maۉ<,7̃DԟՇcl/nW FX- 0^=LNP FkM)N%v# Z)|tlE dhWHe2URp,h@#+HPJbXtV &%J+ǭY_SYn{6M`6rvP"ȫKj/>yJ uÛ #Esp>4؍wA=lrGqjbP2VqX"Hԓk0Ph0Lek7XCAMz!lD}DwQWf1X qv=ӌKƠ1~b5]]9CxOa`{McXOXyZ5MϸgTxwN:S[\Rb|s(0G&rM{iF"Rko\uW,C KwDdHʄ5v}!_,/@S.Dne7ijHm{~ZLD&7'_zy|!:BDsW0i&m<6Na땋/LW"[o. 1VL> stream x=ɲq _<U"$`–|A Z4Υ;kyD0xp^OuVV[}T-ʿ_|䗟ŧ=QO_??>-|߼Ӽ`{~ӨҢ\z'8;8k^T߫%`f(:?ݾ3N/Z =l>}X|K7NݽIB6ɟo >tVqqƟ.ŅO^ɧ (m$he0ocgg)ӿ*?}G\O->ˠ/K >\S֗CqC[A4V9`оpK~x|?5Nkx𐂱n6*B?w%${zر!~$Tb ÏCNp7W}:}ju1X}!(bV_ 'V=K΅Qe>#XOW| }keo_&GH37h@2-J; @;p إx؄x;24A:/ B`# `J=>R2g >Ļ^()P$ tG$m ssw:-VF,WodȌ?Ph,x"G>&x+%G8ߘECQ4 %MMeL#d%x^qXjU93r|}֒; >Z>0/xjζ&Le != @AlJ[:+]sPW%~& , !tMnd܆gD^V"*/[fxo,蕠+>6fԙx'6y4<䐨i"OG$Oex@O A.H3b1ڂ$aD`SQ`S,A{j%:+U| Ŕb C*iF_5~q9%>,bzx=~6, *HTZ >Gksdת gi>M h9:LP{%seW*aN/`+BHXdFcdda|t_rDB>HSwځl,[Nrs!6k$zu#X1̀db\%/K8?lzyǀo5M )rW"sf2@~zLKj% ,nܑs6%̑!9|lN1$-]N/囉,P!бV^p QC0E8pjWB\m=I (ӵ8ጽ7aOe12;W$ʑ';`_:_Tl!'I}I{C>ǶxNe.xWfe="ev9Z(iwa%p2T(- 6ۊ=Ğɯdvg/Zyją XZGc38!"ddAbB.t0ӷhy֮N3R3e ?0Eɀ# {aQaUH^i9@pc\LUynQv ūW˜B2܁D(@ڸ/%P㗉jzp*@_؏傧{ !| r{Ts+`"j$W<J RuaƢew$PYfvZǁ(#YPbok+8qָ[|q Y3\u 9K$Nv6c'Pǖ͚zIx\a0Ø)>X{]lX{g@ryN.P(6 ?0)WNU&(7~R>36YE5 w!5ggs1|8 زCwaK;2$n(#&=K!$0RY  6yܫ5'5:sz\87EZ ^˼2URQ؈veѨf>$ZW F9`CDthbV i\UqHc }Uj`gX]g+ȐK= aRŽ=aٜh滵WIxZWŇH<{^DTʟ:w%.OcSQl[(hz'Hz%@Ys66AcBO`\ag %X3:2mhĈ!X23:f|Ě5-۪/ krJr"Ft uB n^1IաGܔ^OfAne?HVd?5W y%ܓlI͹%_ Ei)9G:y 2A#ؤTE"yTȜKޯv#.z 7(E֦ۀN^&Jоǃ -q5TùّV8Z?mT@i pVlOQVijʩOW#t·؈`?# W0Vc.^g@ )[R4]W7}'+rd0ĥxiZW`P༤y'ǵ)YX "[4tIxEOq"K'L{@pIdݽѾ.FlD3%Lx˼ 1$0ۦtʪ>u{tФP p"Mh ^b 詝'`2!`)luBHVS,HhqP2vΦZې&]@ڍ"8 VV_ M.xR;R-,<"'4!0MA喝mg~AlwX <*٘IYwsNXϫ#]W ciЏDQMHE~`s}kLt ®GDV.}`ߍMwTIEz_DgӬy)cjNj\bPUߢ;(ѩ"9,ԃZڐ0aQ6'.cxpl7C 7BžS& Lk|SGRs[c}PWLi6yϸKE:A!#Q"!x izN)lWh3:f B|%"߿AXnJyUizH&2ķ ů#48uD:J}[o$nanSLT*ᨖki7#* kզӶJ#(g'}A vxl=+g5 q-6d~u iǓa2O]|8VyyHo˹|ܷ"}nC, t؇1=;6?֪k5*6ω/n^R:ꑎ\l| BQ9u5Coj6oVspRF8+~fZ(3[%/}mW<.v Ɔ e>E?ziOZeaMj/͡nQfeSosJQ. l.Hd_"%NW쳖bthG&J$D .vDaX"{F6kfxVC M؁EY=H=c;!0/Y{ɨN׼=ExNMtk=^dơ~ I|%fT07.Fei5Q)sU:a\\5b9q\N[\&,c 7hEJص?C E 39AZ6, \G݌]ֳ{ntɜ5ÿeL8;Pxfg+G6&i}zX0:@=fuKrW3z;b4p6}q[i򎱄 я IM\NsevLmbUFa"3<&WgS` PkEuw9bT a3*>BmM'W0l[@F6}=6/WSb~nj63a!su>*x%h{GV7Q7gt;~a"5KW=zgQ?_{JG?km%xwJ !nK: GSe ؋;@gT ~dkEЧؐQAϪª:a*쉡IкVAnnokbS4RNj{YW^7P` to47+$m1 ^EA_upl+<6{ʪLg h8I[9x{[ou1*Ɠ8l%;t: X+Ʇ@ETxhH Յ<[+jfNJ#[ξW䘯wўmZ[Ĭ cd+A~M RΝ'CUU [*5Лe{4wELyd_b.)LL錡lcJ! vD-K×>h@+&}Vwԁ-]>nH:#`(>Iќk#[;/9"⁗(3he+"&JV=l֏&^[C2uE2yfp/**CA9Erٍf/{q'u~st6Nbɳa.]u'q x녇L;1tDcQL*Uf \rAxuvAQXwn12=P;(K =P!?U k1ۊXp;Z}hZOy/֚&FEҽHƒP.ykʨuG>5~yڡ ДwoO?lcY427(%q9ȭt=qphGt0혣ky֡ʶd21nl|L4 C%$3Hٕ'1MftŅ|m& NM{iT:⤋Qu!_=K0l`Cƙj"ld=`]Eqz!6 /E{}^2~Lk&>c;5aOhV"1Dli(v4݀x6xqW`T s]Y]:sV+/,t!͜dza ĵ$+8w޼vI&u!UF2@qy;htzb>mĸ>rѝ~Qt8C:/gٵ]6x|R6YXg^_GI?r-tlʂn+uw e Kt|QU]}د袝pޢ}zyRSt5hU)<ʕK&|vzcјnZ}^z+Nn14֣i/,]Oi[f;l,I"[3Xa,TosȚݮq{@Raw{ U_Xw\~fW\ׄHi~F=endstream endobj 246 0 obj 8481 endobj 250 0 obj <> stream x]YelH lYK.yá5V3 9h+ 6( 1A^|/ޞ'q<{=|y<n8{~"62⌗qE1xFap;{*F~7ŨqF\zM։QPFm ʨ?J)G-"- F _oM0_-PQ?ͳboތg h`pK e~9t૥Ct:ចQ$?7utA8FAfSzڗEb'N볓?aFcGe8>o%edqß˓ ( 81XcMe78yl#1GScޙi+qg(o]vZوbmt cfA"5~YX>NMrs"H/>/Ov'ONOR*?샇m{=B} FZoizV ٥r!_W'+;@ p-^Nѹ #{wyʯcyշGL0%m:=F',XXmm g*v:XOE%:I7~?sH'd x;c V9]? gyO<4LnNΐϣRјP鈸q 8'?g{P*m)cnءTqO xA{/sJ[ cp0 0~L-2|~?)ZX[iU;_~Qa =f3agto >SG|Ǎ+00&^)?~޾e :00jaGc@4f=F07GU7gn IЧ=ҫгջS0<0YwbF{۰-#@63Z5pD@'novψbմ%Z?86 m ]]^Fd⃃LۊiAm )Km>#Z T$ 4_qEfg2;f`6mVr6zWMqGkOAX%[1([eS#iХY14IdD#xi-){ FC&`5 ^5+W Ni6>ҝ<(vvJi =u=&GrtVZ!@FLcEhK.1̙Xb0 وw=b'ɨx+,暣d~-yD)h.ֶuUrq'͟l@2[5fDd2!Rsv7yVߓF6r)!L^ID f11_<PqhBzP @[bvO%~-a&"m73I:3=cKcp & \@V~c==sp9=0) ̨zF^$wo7 4S"#HDUƃbD~7LVm Wvh/OK-A4ÁaUyd(g5{}R 6`oaKCV92D  !*?|3⫁&XCW7d/@d ՝ 8l\% v &0Y,Qmt'> ζZŠd_viA0lwvjp"Xeo^3Ի2j$ Cad+7EqJ28FID0)(muAJL[M 𚈯k)K%K\<&kEF셍l,YˈUQV N!Lt:F:$ьW<'Aqy<6N9=wwI 8`ۡL_6 `rt1i:1cZ=&UxH]*`Oۑ=S ؙz„ll;.*Բ^ 3rs+,%ѫϒfރ% %Pq\R =(YVhN0+KkHۓ{I lWx[Q32fjeISKA`=imkSg'BELʊEq+‘v7Y2IKۮV*]uh;[Zi. ++L՜r9$ -?OOeKVX9dfl:j3<;1x^]!)ZЫ6E75DyZı7ԙ2Y*~2U]2K8Vξ/3M6 4|.Agk),S T@mψ[_"LBYi4WJr<mzUQйcqŠދkVT$Xkh:AQ\ogDDk\&*m.V& =&G)iƄH88'C7"Al/7L\W ?"_%rjێae/M 8I3Ps[Uԅ#Wv]OH9WpJkFM)/j 7]ev}f(;ǛtMgfr(3MY \+Uߙ)j6Vi` ;yIpˊC,NAF4@UxOD=Ğ)`>Kf(=9 l`K=)dٳ^n 3xH$neQEa"_88@Dm[!>` mvFezk~)&\> o:,J:pT+=81}M\) =TVgNH.v6MYa!ۃP`B.ҙЉG>fJO^]KfKslvKϧ9e}t0%>rz_j_K밶U~߲"05>\mZ]eɡl2.+m-UH?ucmZ {'tX`)S0yh0kTjn>*DwB.k{fKpvNUըWjVb7wZltGDlEXzrJr{Ǯ4q*Zsx\)l;W>4'!K?"uN7.mrSq{|$w*vqC!>4l#|~ԃ\]|5ҞsDs(ϐ$:6oXk"Tp_`[+ ܰovz3BL2a dm b 7?(G3Y !.H@8l42f3yD'W/Hҥr? `ս|~B4[nVL~YQ8i"tۃÁ (n Ћ;Qv3c_6`91 ╀P~h] h;yjbQ*!?୭ᥩH>K ?`r({M{~k=tj|e7S37:/]:+>\P_8P,M b!>"Q#* GW 7?υ Z>Ϛ*E)笴o"aZ.Avݴd:(B^1cwXGNW'] \ަK䚣n:S' ^VGSi<$рqk'j{7njwpe.P7 zy!?잛Go /pp`Yos'giSlz'Wƕew],x#[Ejαb_t^AvKz\*{QrV^^V N P a1&,"}Wo/=l?B!<8xԔсe65Os, dӥ\xƋ69t"DRXDݏIp.V@( h)HäApcWT8"90_~T5p.u^5QP{ƭ-}綷i>aEHxGS*a$&`qo#ߗ%7ЗPNSJ0cS] Pޗ\Xlw_wS5b jd]T."2?#mrLXC* ~ %AΗ/RIB͸\L\.v"C"?:nOr6il|Û8QoM@XeI&{g4V_Ebo'!iN]endstream endobj 251 0 obj 5800 endobj 268 0 obj <> stream x}[]q^U+N;tlᎅ`Ɏ=.ILR&pf,)H*7u7yH*W4Ы+w?~U#w=쇻=]J{ wU\]Z.񱻦}Eӿ\xwmK*Jݍy?%]hjbn7h%\K+]D/_=]\X>/1])%.vi+ŗhX)5/ܔshU>Z$?!xia9\C,_L3WXK _1)\hK@ϗ|K֑ťSR:`uR[|)4l5+CA mG~%|5ȇ\x~"z声Oo KrגL{)GPuI5[>Ah%ar?GY\蘻F϶sk{s>c烉EZInׂװ'gX꘻EOi33}ܣG+aK'Cړ3,]Bqs}k>cPB T(;*|sWb}ܣϘ{h*dZ\ ˞ ackvnm1s>cg=|,>%A aGث-a4/CYrɁ'8tua4Ay$vޢG3h!{3{>2kvޢG3h!ĿJ\ 9*zL?-}u?t mF|eL@ vޢG3h!?mGVL<|I2C m{X>j?k_y``B#hzI7<_*Z!Q!.&!7#Kڒ'6W2.ɭ}B>,'/wG~/7Ie~Ӧ#nFF2zIcKc-\s.M0"$Du"BƆz,DM",hJUFKQn!9 *ʠJR^5ʼnbyT`gdndc& 2 0q!4%M֮<%CC8-9WfxPAUZv- h̃7Y3I]%Q1dW! 3rF^O^%܆z)p ̺8 !!VU A y E i ^#qCC#F>yZ\k>ٟr8lG##X6rKDlCYn<8H0“i[ ua@uuB~ 0y-8G 5 "C\ăh %ZHء$ny<+MFКeD aC, N9&{E$*o/,0H!c7I0x1&CK.֘%aD F5jHA$khVy@H?p0%%C!cr$ǐ@ONԂ((l AhbB,˄!pQd&$"j$I8"nA Q%2AؕGB|!AIbA"G}K6 T0o 1~$t<\$t<\*1ry8&^I؄yë`cjY <,<9;xRtq;EFKPGEoIZwVְޮ4 ӴIҔM !) G')ɥrI')͚!$$Qq2C>Yu% .)waՋZ5ToJ.%UX%GB@ZUX eV%Lªꢤ BϲJvnS霕):g}霕IR @9kXgD*+=Z,OEi"bgERa1E*Y0:5 8˓/rVsKzr(\s51H#Tw=xnZUux/몎lOeVW]s:p_$We]~Rpbu I4xF~"PZEDH4˓]"f%(z5*KPo$~ʋJPN~ wA & Zip|̚ j-2ؿ PقT~`P) 峕2!T~<^V~`l~ݒʏXAOy2boN#X|q4ݼ96yK^خjߒjfzlDn"[)_'n$Gns j9 >I0pKQ30K<1qn2hظpO.=9:*]Z_mv{CՒ*D#F+m\O `7zu aܻ)vwE]߅zx:nqYbuwdԦ;;bޑ'w!ػaQ;Q#.AWhw*Mˬ4%I@*j.TWS FLy`A^pQG#4*BF}q{\Qd$=.I4qI%N$E:R:0’$Ttbr r\,1$z9.LP2d23HJJV@F'3bIi %sD@UWWU(RfD"}@J 5)ŭ0ɁbgaZ쩑qIZ LVRF$s1`d-%k@J+UQntDJ%"+Ik kZ#5i#$O#$wHRKURQw d%nPG:}EJ];B ;B֮BB3!+Qk}VȑjY'sPK0GF,lnZEut;@tX]t0X@#)F-NEHDd׌;zj!xlڽhkkh u$Y%3"hDrF2rFԞ"ҤQòy%i'ӓFPRmJM _F͂s ,JҨEjBѢ"j2dzJ fPe*PdJP{NRY hk eUdTJj%A+^fT$`1K%Ah%AY|ZIJY+ C k*^qU'Г%hNɚpMl&6křj0i S |T?\+jQ5Z!2-r0ňU8fiPy?H Wb8`s"gDs2-Ox},@uw[J[= ' 8kos)_ΐ˿翥Gbl)Fx3vfcm?Gԛ#omYv5i7,b}(žkmY{mYӲx',{q~yiWoz#C <d$g`Lqs98Ω3u #t }< >ywwHƿ@9ruF+Tk/M$y2*o1u3-)˛€7!*-L6d; pf.iDre-߉BzfZq^.0AZNfyBh*#*CHN8.ĝ? 2mp78nNKS]FHdt]b T783qpɓqNwUk'vm=SCs;͘der [4/r ';xC606Q|v;nUy{8\Ȗ/dFcp0m yOtɷDf3q%Y\=Ugd4j7b!6Rr" vK#-w>aȏ;I)5'~ཝWPh'[0!S9O"xlqYcZ֐66\Ĩz}aie2` ?-l aihb0g Mym19ﱝj^ l38h8`lK#bBKo#F-[eȼu"JI'Y{<kWHXӬϼ#ݶ3l#׆_7N<F΀DiL6%sNoe2-3Xa<-ۆN&zEٵo`|$YkG=wHq:^ Sv?2Yqf)-C3s,qqؚ ɘ{Xt47 |a#zZTeXV"y}~L ,,ϱn ׯ Ψ|)h[6i o{&a>ʳw|x ܗ8jolCf)rgE4"n݅qfﵙnClB|#٣17έΞU%ٙG6TPu8k?$Z&̴=LGhcGĥ7N#ˁ⦵yC ͫ4k9vCM_ \r;o]XVf@‘=шdL1Lmhd综(n)7HOBf95vXpaZ {lt--n-c[?'"|5\*|g qkZaΞC+R1l'ޝh}hM7Y6џ|'9\#ʿI<=fd|; XZ@̊pEnoD1p2Rpw/< jU7Cޘ9[|`O9 gb.BIvQȪ }$I7~XOF>|WT$3X"M2y ޖ ^[>)/p(UaY,~:wBtC_e8XKK9(˹<$T1Jcj_ Z6 6CzL#kc?EUC##돭 6Fl4u݋c`D?-9padR(3N\GP\赥$gǜDpoTϔߗD]Oz奇aeuJEJeR>!Ap#'#MDmΉ(ؒ?R|s+ey*nLuW ԭ 0sb4^.:WE4a6Uqi"9aybL ]YÎ$ܨWa[ >3˙R/S4?҉ޠަ1wweQ#Q@IӐ<آ43||!%K #D R>K!KWY 1IJkS[^;E#=ђM~}B/9;?OɬP?Ѩ4Nǂ`Vj΋r803nߔ5!ҍNN|+K QF ͸ThZB0EIب5"L+Ħվ22˨]ϴďD0DL3qstZ2Ӈ՗VkdM^9.82Bd47w/7ʔ[ql4}B$UC Ȼdh:0U| g6:S5na¥נ⮼&Kຫ0<ײXtlvkawmy(LD 8l<1 }M"9"oƊ^s\H~W6 c#ƏCͪg|T7̅dq[(tZx qܵ7<Hmc81+,OS+ٖ ߱{O,nވ~z,2a?}ى=BEnȎ)Do;3ӻ5q48YŸoNQK+?;2n#G'W+.Ǘ[ԡ%s|!3]G{!g$p;O`T̓!l8!![>t{}| 쀥64scәss<IHfJ}|Q+OuiX=uh >/XbG}y&-TwgSeF =:]wwKnPbR|'B";d.Xe<7+M^c^2œ(s$1e1<\r+\Btn9axkRl[ }tD,ذ5YP׈S_FAۅ\9~k'|uvs@Z KHk?V[,kRJRg;)Zy{O0Af$uwQ p.4PJ!nVL(%?Z6F*6ֱl]Mлܑ#p܃$f,;U+oηVXiF拄=O:>cv\~XEBR"XPVƣmDr$J^c,K;Yf}h3qܦ!X-6< PF(|CQPwؚf6nM[8{!:Ȥ6Lxq4غ,Fod6is; ̙!'ScX7\<0QzoK"=\0ot\p@?ʰ=r}`axk͝~M™׏*RKendstream endobj 269 0 obj 11125 endobj 276 0 obj <> stream x=ْq~Wm}OME[`;’`.ޙYݙu :foMYy_|o^{n^>#M|ͯ/Ty#e7ۻnJrp6Ƿj1O.kbWRg;Zc20h]:}w^쒃FFӗ UQp'Ƿßr (/9D+(%H,]X1oev ,Nߚt!H V+=6gX9:&…޼yM9p=C>O?hŠDO T([ h|PyXJ&BPi SdDV/9|췏VkԘSlӆ/pVAX:9clܾ t< ,zX"|*1c&@"P4Gnۘ, 3|CJ}/\uX;YN|ˆNrcg{o :r|l0GY16!\`8?+$z`y_~Tcޕ  ya|o?/4"|!숱^ $z_WSx-7 GG0"nWpj 9o #-HS0$pLCt䳕;tKC8gA !%؀!ʑخ? +`yuc81a;{u6{cs^cnS?ʪ\"0p@d FzRi,p+1Vƶ\t()Er)Tr=DCʺxr0"q'`bnZU潫"/[('l;Tj,%j *2qR{NEXĨQ.;k@KҜ\^G5ނm!5:O(dJLိp d ɃlJ^@4Y< ߷خm2fdFˀ4Rm#^ f8}\h2=.^i2W-=6I&(<" 8YuP[ g-wg1Z6El]1cƒ!C'ȏ5'3|ytqlm2u*$E7.v3ռfSۃNkiU:=[~r*s]i7-ԩ<7;P"9vB`ljfz ⹱Kˈ3܆=&@dfF+meHf@ޕ 1nsFjTyZ"-lU "3lspf}`5X;`;wo?Vʀ 0o}a,ml#cMG#l

YA$dM4ٲ B7)>ZDxo16l2[1fU+3X%h]fٔiztbmD:3:խ:6u5lonƼqn2>aP3U^n"s ^;&'iuZ 71-fجgOZ/-vp3?J(+A~j,&ۢ{jfc͟\'yF@Ҫ1J@"~#d?H'edIY-Vǟ rB@aS*zܘ?RݟPeZy.0ͱ)ޖ*iY lZx],Y2Lo^x/7nC$?x6oȻ}$غO) s?~JGkm>/a2&:OXi>itЁm]Kzu}^~(i|0zgcms"'`V]G+h1T``OM/ȗa.:pBFV.i1gX`lcIf[seI aS_1Z-n# GfU@VqL>;OɮVX@E3|[&PGd5&;[a&o"ujK k"P-AV=Nt\ЎގoflkXO bNJdCz6*b7RRX!͒XiKZ{q^ԓl6˪)XU0=AE#N2O8ظ!*1?vGu}ro1r0vISFJpcB+lx n]Pq&;I#QxƲG+Nǒg6,p=ӡV*T,`]2jM~4҄0\g܊_f{|0# :؝_vl`mTݻ3TJq_J]T vG@򁰧;c,p)VߤGttP5 ݙ@/"P`ۙ|xhV`66̨3~[9;^qѣQu.J%.r@% ̛ ǟ L+-H';+¦{@P1/]#MRbRۣ1do9 Ž$Wz7^c%[ppޟWcw\5Z@&okeDG4䌟R"k[# . m\ S4=j/os;{;&Xz$YX\w"߮7Z \m\tՀJ26C8b`eEZN(+dך{*>or8'鬥Wp_R2WTGщ; Q^qGymltf Kj ()z Pm Ԡ].~B]6=U FJ5]dBO20K`2d1O0arR`BH"g֌Čm 6LnC&kPMjU5]$LacAԂ/\% MB?=Lb锔~q$}W 2"V%H78Rd9yy\ &.J)6S@ LCĬ|>qw`2+ ?$tLcK265 #-8{fDO4)/-oݚ]r#wShB)r74tQ jp',stqKy8)c^q՝?8sY Z-PdB?Q CQۄJR|aGq=4_+81*Sx^B&2t>N .m0<+/dT+[ihſ^ A2΋.׎4IMYt],H5 vcoMXkBnew\GYiD! ʶJEG۷|X+r*70-l[mF:5iкvWVP%]K;XR_0 NڝK,B,ՐO1N|_Ntv̌me!NB~HBd7[k%BoD_?=uր9s.z->"epCLҕ$$wc a8ƫeYUN`孛,UxOfg\ v 0Nź˦mϾ뺳;=)7PFh,lu˳ b[rS 3gHO# 4~(5-1nES;$ <)•v@&8 Nt$5t+(ɥ@ #O;()JPcF̃ʽϯ‚?*a փ~`ܗ 57>- }QQ|4 Bj *X`jCo#ගJpCwwN-#tCj{@sӀ0KIJ_X_((ʨgtyY*8r 3G8ˠF[`5\W#ZS1$SkliHT;fLݖC|$74=̺ Kɦf!Ȳ50`IWj~s.gfܬ 31'Xa `Nx|ALv1W,J$pSlpLvVmjBI^hUO)oI="dP yۏL@lg{끋%·*yIWv y1rdZd{)ӘS8ʆ4d ^$-(>rXb(hl=ݯqaQ=(x'!dTeq&R>>HnMbhÔhM0fè (TzZpbjzZ 6r]YFDDMȻ[+ٖMw&__"P\AدqE49cǾ'kR؃Gd*dI'[rA>yR4b!2G 9Т]9_j1nf^2#̷iۍy2&{NL[g6tg6%g! v:|K`Q }$jL:V.R/ytO+"p;]{\)Y[xFo~̜0a0a3mHFUL4a`_jxq5ʏ]c[ԣcw%}z3i6U m&.TeN΍رnT?NҘQ/\Э CxJ8 !'.)/M~pemqBj hnVTx{Q;jV]kA.ǒ]Y"m)ې^c [tW[3bOaSOIqugLxFg1Ri1E9ɬg 3 Q#’VS*ymf3{|y+\k`%ڶQ<-I}qF5GShQn h=sl}R+]0+1bi;YDI]?چM3C"b.Zc}KWaP X*+{u#ư0Ut Mgw0⦴A=&&or1i!pPӠY58V{NN bsytl2eGYs'^yjobT3)> Gvc&RF&,;R ̶CϫmZĕ }OjTΒuajc:5! ݲkޕ9BS}ŷ^}R3)kJ *.rܨ~M#Nڧ%[e |pjw׽4Le~۝:%G*,*CIq»6?SF(x7-%Jl>G+!S&xy.V:+δwfUUPAFz~׶m-2 ܤMfߊwP;Y^%"L&DNm+1,vu?|zOco`PDݶkyCU3uH u&jd22GQfSεILE X1P^Z:!$>+&DJjiDϗ]5e ";=i ST,3,;cʓW܅=y8ԾDRBpQ `ZtinF~0n*!Q-m8= 2Gs^# B+]),g󓣞5 ksSþ^Sti>6zm}g=G`ӝa-bȓ5xHK00wHD jB%K hoN<_7u>Ǩ="&Nxڏe<}(aſAm2IIߵkm(K>^5y_r:y8&9˳AYڲ;5aXAߍ~֯@7Q=k,Pv8 L䜟K8X%n4G"f0T̽Ad]\ !y5{HNN"aM*Ga]9 _u":*0Qy6p ℔eWΦ4?S/iֿ=e9ʂa5dIk4)ьfl! Pd =2*jq0FMߺǮKkacZdlR$DlG_bE5{Vo<) .6d]ʼn*2+?LB}{Wʵ %o;?E-Qh(U<)%[5zS.? ]04b][eDWS-)= 봇r?{_ &endstream endobj 277 0 obj 7490 endobj 281 0 obj <> stream x]YqJޘ?qS%|TV\E%-2_fИsRz\ Fn+գuͣGxxug4 ޥ)yݳ/4߅9Nw^=='gbeΏxB γ)Eoݒw]Xq˴,䢻|3zIvMO]CLc6]%^~޸4%^}Oi6k?WC09^q2i˺n]Mv/hM f6X ~;I4t7<<~-o٭ejѐʑ}H SZ. v:6H5_Z^o%#0xyd{ $]~wA8c/j%F'X{~4_ pCe!\x+/fiD=RޏB#+󊨐k|/x^gh֋1ۇm\pƘ>pف…o^M[dfq^gyۉ RN[@Fq4vt1Ţ n/@K{\[~77eB0E>J/gm nrRFă̴Hg # {r1B٫~*a<h IŀA/}C:u*O2#5lIaY/:JU)URԽ,׹p;a|4lZw)_W:SB#(F` 93`lQ$儫P\I->Cu' < `PY *Do[i'+)m\^gqhڜ7吽@S{˰e90]na+ߧ7u@IX2h-V:8Sڎgx$WV H8BU<3N]]o Y Ў_ xԐ#I25N 1ȓqE aDO[# v5BJNd -2XQC=#[r$1QhEmW1MSx!AGw<Hr+inB~\Ȳl޹l@~JFbB"Yn4y%q쯻Wdu鲻ixޕ YE^ȓqt # { e@UNcHCWnALؿr0>X3~7)ϳ)3/diVU <RE)qs'y#D];BН܍8qzx pP9D[!ʰH-х4! " [ /Zd .OA#H$cUxûQN> PqĤ3.d tjǖ=u ZgSqʞ㩀8('.҆Dzo0vކe |ɝyҕ}c| [u8fi-PEGs78|f| QBA)LqH!bZ-͝Ӡxs:,'T3?)I *`z"# [/C݃b>O$ۻ3. Hvy_ivifyVvI7x]ڱ(eW:DXd89>izYSfC?AЋE'sX 36Xr7,#h뀛xg?obHH1p?,GѬ.0͉v-k, V(+VuoyZG|(6YPo%ٍFݐ=< [wp/!LڹLܰ{`]Ȳ9L 6Y#}*e30s}HL*gO tҋ;%G^̊_ Zœu{-BTTu2gD'.gbN[4 f.8#ȉ7i;!Xe(=E^Κ<-(7a\LnXn`+1 ϛ"w:-iXJLeS=AcX2%8$dSt\(^gih+ w/#X@UTӳC2B  |{uTJ8Bb;ڑi UCULrfG]|-︘@[Ԣ' ȹFO[N ,EDžZsHUcY5˿†g2ųKdalST6.!wI. Y+=~ӆYab{ZJtvpO2_\(DH( ~l.[<ă$q7ĺGY.rE]qI`£.@vfȝbW 8YlHɟz;}8Kf9=Dq.wN tӁ BRsSDʉx`.F'990Ԗs-J*Lܔlmv$J"69 }ZXZyf1qI T(|KSMΆ Re =XLT"VvJfq'U3 ~[rYa&T~$z%gbg,A>%cGEOGw UoURWZ%[gZMHk`$gʙ2y фQϠ0MI=w~J>,KǛL:!9rxCZsx\HCT'da_8}e$}n6;D>-iBfZwoX.%Yk٥JmSA3n&ӺԳƊT$S ^Դ[t|2w -Ж. QVElTH[Pg0N)ɮF]s1GMOs rd=zJ0qјڄA.mZ6ҦyCژ,W WݐJ߉Ppώ8(.f`ӮI!.gOs AZ~f'}aA6\%HxNgmܣyj|l0MtC!Ć^?Xk>QЬwra ϼ%CU~my̹BR .+\Mj{=[nqPqp甔%uy\`6wx ~!$T~ dŠXivfb@a:.}@FUW?().#a V SPX{Lzv}LAWhD/Bՙŏ%{ZِmNUd=}fN=`[h;YЭc; a{GyE62tLIfJv'˧\$cDd}X6yPwcsEI{:akG!(&q8]-uDThtjǶB[dM],uШeMFU !jPHX/o(OK]>qqW$b1'-lU1{0wÞSm4dJg%; PN|^t>oeIϺ\ [Vm9Mtj::sQ͝@J`_ъQ|ߓW+kWK=z.8EѿrAg<5l1lkpXU{ E܉2p]7~6^ g\9O1d21D p^'0ɻK2#&ѮT{6rhi͞=LY_k=~޻0%sHښ܃'('Ȇ4:mY30&9uxSƍzSԖ ]ٸ#5oK/Em5 !36G%邲 KR>NCD9Q:>,c^Ԕnl(F`qT哵^h&&'r.֠t=h8$Uz}m<R=;V+ՠXIVYz;9!RN;zTI6BՊEZrP7x*I!M'0]I*6f~}V2[zY:uuA79:/AVR\R;Xd7ĺ|/^G.ƒAg3mN6ڒao:ur~EZ z}!XL}ZK#$~bշZ9lm\f݀D{nb)F`{;4`8\]V(ږ9}C׮-WS(&[PLL8>j E[+Uj{=7-wxC/l*K&Ndr%kԟdz%^F^+ar# }=rYQn`!UYwԡ&!,lUE=ay%L?6 ˉ+S;U(XAvj;2.?f٣!3endstream endobj 282 0 obj 7232 endobj 286 0 obj <> stream x=ٲqU-)]xd'r]; /E%2g+R>/7/~x7?O!E+n<u\fMTiQ.̸x4~2ߧ/7 V|zc ڿڭ\ٛ% ܉l9ic7Fe:)j |EW;3z]6paQ!13,P*RYBPĴG23~^M6n(*o+0!^btvMKzd;*D ;wUjɍםP NNTZFƦU%v پ5d3OݏNHkk Gs90:x}{d5fQN2y4v3: ^FnTY0ˉd%(`GE'Qw cf1hGրא7v!Ms7Q܀Kf[6њJD G z&7aQw<+t`d5SG&XT -[ؚp kU2ba5ӡh" ׀;mTQE8l[߻sĴ=x?Ècedr6y;ɮ (=L ]*due 6&drY0aN0) \KgQYrzn1zW ޘwIͤ./&]RL6OPWέXog퇋P'F;?-dÞ W`$#ȘQ"쑍 ߎ=;= vU|>YdQCKoշ݂!/,cgT ƈ|ug M4*&V<(Yq-lGXAC˃!ľf84>g!c0"$KfnS91 ܈6&%-\,ƭ# wŠq =95Ɯ;v4bG/h~5tlkbrrd;#SaN`9( 0e?pA [υAA# sJ:?ER"d t E1haX>XF(?/Fx.Agɒ7K'׳tP ]p-L3fC< msz[OcK `[IY7#,پdDX1JJVN; o Ϋc#j6|(bv+DEcǁF *uFdC H k!I'/.Lᑼ+2*iȫ5lXZHt3RNV 49|WсOYKݡ<)ae1q?k'~ɮ 9%8ň/3V&n'T0A2UMf=Ɔj< (// Mƛ̤fʃ 1]|8;bngwu'E-ld)^8|WTr,K#Pۮu4sA(jJ:.;xt.̒sf1&G5t:yE0:Yuut3P+"XmzF.d]zw.c WPdqvbx't~&uBH}3GەH'{^tE EZbZOgm~.svh sڧaŹ7ݒqeL+|㊺a?}ÄR^VN*tFgI2 6dLfN ݉UA 0o_ߑh/謴Ǻ+{;[hNW~Dk,aJI|,1$_/ȵ4oKeO&kb2^dȕgVA;3Sˠy!þ1I' $aLqI>xW 7PۏwjYk\c=ǫdnc02r.^S )#Lw„3cnZ|S%cKQdTz=盧ԴģE@rIvER. Vei}C5s (:71dӟ`@U6,ZūuM(2'ASQOmEV&&XM>o]_Ok=냾 X5ۇ@̚2^WA|@W( #*{$F_YRL7؇!m]#XY/zPg VoٴHUMm*Uy$wH6RK\ӠcmԚ@K)o.'\-h ,1\* @ײOe"7 -)Уa:A{_&(`qL~Ve4>HzTЦ'CyمOaz WܳO)^ J j:7<)aZѼp'3[I}]@e1:n:]DON<z6zXDRYZa((|>[&1Eq- 1bS ^ڌ0u m(TPEׂ+*6>2sh+bd٥3+fD,+zVVYqSbU[-7/ogևG %4ir_o6v-$´eLo;ԌdE+ Zӝ8L4YkPЄ $2o: }D9`:A"X%R~௛[M5f32bmO(ZNy}o˲ IRNn/妻<nW& lҊÁ(dCYrMq%?8gS\2NtƯ=ޝ:l}MNhq$|v~l_COf*:BlGrImfƯ.4Wm-=P.BԾg3vS vl sbܕ 8]{UF0w}+Btג"d6{f3D7N!h(P Ih,=|脝z%.iۺG_y#pګ崼 :D(lL| M## JXͬїMN`f> sr?-e8 mܺ"d*YE 9?4b- :J|,~o*ZYl6b-zZ"x˞']o.l*-dLH;YGp&X gXfW~IāB" YI-YG6gfvQ{L >PKn-׳ ?Y41;@OU7*,n"`IQTq2SvԍU\ʰhz&;_S.=~3M42s9< \S 1ڍG.4ZznE1LZ< aGdmdFC9ä q9TxDN`2`"3X8Y1kb>8ޘh-lK칕bG?Bbpy[6MA#sL2sϺ J}!Nau8Ps\ZZMP5řb׵%;29P|ٴX'x}%rDum<ӝ\n7x+t ѹ> {LP1ゟ -a״GPjtd muyjfVw6I%7ik)k ~ElqVZfyeh2dvYbSq< 4z;ypW{`1F(#$Lz6~7M0ؔ)ګEa;x.>&Tr.fFB?RU"eմE]awtX8ݷ j$kvUؙޥ]cpHoGC" X1D;71•F_Ө r@cOk_Y5%밗i9t}Z"r3+rݿXj+)k*7RN*hֵWO٨ N9%UIr_S=|ˏ9=>90<-9љ@膒}ѿ?5endstream endobj 287 0 obj 7864 endobj 291 0 obj <> stream xZKoԯ˹EGvI4NuE҅-ɲ`Yr$ǎ{~HQa4}xﹿ(w")=zo#sW:y.)gvǯE"„ۣZj?(>z}C~/.c N\8ewQhP˨m4ótA q@ L y:Qa1sotIga~4G5pt*T?QGGLf J eĘUnxZ`x82D55BHO?H|;9ut؂Z;@b" Aj[p;u]rTx%[.`jj-&FiQpVsfJ{x5N JG㇫,&GWU D@U ̝'>s 3f>J)[xU4e G{kz. ad/rJIRhP.2gX?"ٚj%.ћm6!IYYx͘'d&![8C鲲7 y4QtqPNޚsY2(8zsza -b>el7׬p!wl>GM~1M eUԦ#=?z܉CP6~ 9ub*bʱ2/S?懆)Rp52`SݠzX7nC~uw9+Z V,*ahi:@9h $d_=ǹ1-q[T 7fb \Sbw`T;rwj`mNEZ>- {Cc)Mūˡhqu4y" =3.[no5N)eരIGS䓢|+ /9߫'wft s[2rQ85EAͷrl\2yk< &bXKrxt ڋ7B,-kHJ**QoX$`#TJZz:a]"<RL4lr ڃ!z¤٭8V!ϜĎMoIঅAUU139˅& 7W2[\2KcZDwS e ! @Z>[1Wt,e6&F»<5;0 >ubŏ>`6.ܟs:rs:4""BvŊAƲ+zxy檆{.2s5e55QV)3)i/)meʼn#X;IʹwfI͚k쒑\2On)o83uiUgZˉd cgTil]|#)L@mH^fZ{c ;LLV"\>񹭿&64AhJ% ckRC ohPcFR4Q3@P*pM.M6FV[m3sٟdVyx^Ja86oFkS\sӿ}\ GqLKCh?75X_qP75$'ŭ%77ಳ\\4`kbo b#~x#|HV~~N~Ed7f춨,mz1Ggm]_Ѡo˰ysL.^YE*j`G>f^ojk$ X|e=([i/rC><~6_85M5Lܙqbڹt(4Ybc!MĘZ4rD4UJenef)p\Z/~[ӻ%_BIMEJ󼣲UXOJih q Ӫʵl_W [ qٚ`ڃH%+̒]2K"ݞn3SBda1 tF-'07-t7psR"D$ʳ%}*j1ĖS YqY4YN1bc26V5'8EJlThp¿t4,|[~a%35oޖVtFn)-ѓ!/\lpˣ*wG0S6+y?I}L%}' iʿW(WDjˑo/X.a*6/}V+$1t*ِf5/̼kՓDw= x&g?m7#սغXNG(Yncmu6rmaG Λ;7*NX?.nWsuʋ/qq/'Pט>i?wo.㣿ӟ}endstream endobj 292 0 obj 3066 endobj 296 0 obj <> stream x\[oIx̯1ʙ%$!HC6K@wo3c{7 !<3鮮5o'b'_}3qߞIϓ׫'S9N.7I'6EGsrٗ_aZoS74VxxcM=DZx`4Ge"eb~W:u]"m\iMw" o8;m?#g LEÙ[ѡ-HaaHcK&ZzZ^oZ[/V\t+:bwsKf82;BT"BƍvΧMhpOZ.^3QVyS(儣V$0Sܽ|aw IRh%ș&Cl='Uߤ) l4dZp" $o$@j, цZ;U~݆یkd p"E}۞޴m#% ʎe#[Kv~9\ n{8k.Y5_y%߷߬wHB3L.3^;f{\jLhCWYU87e]{j#2x+M ];,GBtW yAW1u_7omq#:m +9;ut^X5*#hwmi!~a{=Խ+d,aSPx(ӾA7a)\墪:|b} FX k}d%c5io+khds_j+l{E{jk#6+zoS^pFUWf 2.8SG`}5kaiqh<ڱ\՝IYݷD7/fNpϛ@!= f;xΟ=e.(aZ; !%P>s21|ݮNw , 614+0s3 iEZׄXn[/`M5Tp#7h643t2K(}R]f7y7YGI;&\Ҹ /= cqvpI}g eW\+V`l.J'ji`\z)@i2k n>ݪ}IgcizbpUx ]ړ ad#P\H 9C\jj~Ʞ0ejWªRVLAP:;i !l;Z*KJ)&L[E\U1OŇޤY5b> ۏ5ke1`eݘI8o ׁI)d} ?҂]$cٳ@X#'8n|}G4m?'w(&6N!agdǭX:336 Ȏ"7y ]zVǮ&)\ĸ_)spGr-fU7Zٹb,6-%_uTNՐqz-ڮ_"]IqޒG]{`f&=)djl ƸŊJc栛vn\!}"ƙ7='s 2o4;aSr^55E+ v-P;oq.GΓ iw=CJIQuu$5Y:arF4[ ߮=#A_|_Oø^z-؅0y]]d2f=&kL٘jW|,1R_j촎+'h4n &qe(h"!SGVhBݕUɴE ӸEb煴.Ļ5/lZx®?6uGˮ[v\|-0IyIpc$iIrlj`S}fԛD3f_?CL\L:vtˑ!yMVwr8Y+[c.wg^8셼yq]%,* Zikc<^igZN¶["Rڽrg9$4?GxLYNߥ&Rdt0esmԳ 0}NpKA*vF Xk/RXx(Bjsi#m5`evK ţݻ&0+aD; N2e8V΍z3“^lz@sIX+ݚppE ;`JyΚ9EϨC[~m:&}lǦ(i k aӹd J8ڃY KE2Ďc w%nfS&xYVsmL<0!2n줳S.m!S?띧GZ#ܰtS)_[UE`BF|d=W\GW? lbUZEW14_l}h|C}(DO,W)ʃAf"GD2:M(nMcsdafpV"t?465&' w5LIPL5j + F8GGɉ%*"YM4osix}g WiU<|} 0jvtw1;`FyQڏI9AM[壯r!y?C;jLdVDuCTmM[1B`=O^}c4w?NƭTj8wY~rLaKRJH?H{{/Y;M;y]H˳g?l]endstream endobj 297 0 obj 4610 endobj 301 0 obj <> stream x˒*Ko $CLHdž=8ߛs$t-}̌q j_nFa'鍷lw7xMOaaэ!x̍LꄍJJ.h{ 2f{6zg ,N85z=HSß߁ګ/^jkNóF6 i+{'[LjOOp: L1<2#-Wf0K3{ى4ez oftZ g+5UV;峏®lNʼnKx=AtD6aҏB w#` {GqjE_ӗ(bxp W01+w^:B01ֆ.0SZV*X J;_ry6k= `faIy5Ea3!4z_,GI…f8BK$s߂PQ./4l=z H"60 Qdgqq&xsv'(-ӂ:Ⳑ04`_stnl%z0܂K5_H;%:d ).0.œs`pCdgc?/_Np[90bWyGMSka촸V4J6HLJ17f%> *dscf+@ZL.%Q{3.3F4 i=x䳋<wxV~\1]mR8nb[%mIw V̢ge'u!:D!z&A=([FA+2V\?(Gs D.%E* ɛ*'|PyR֨e);n [(O]ۨ9B[*eDg>Puy@U~9ʓE^J >0Ė`[@. f?pdfNzm-^8Wo%AwI<{v;SN!Kx-x ZYD,%AbyÓ}=siAhe$Q\A=\~_z&$,DOkΘ`[[o Ef4TuZ,,a+a 5bLIL} Pc5$ Hd3&Ѳ~ 'e q.YOp>H!9qeݫƒ'3?Kv deIfr Ar{W ɬjА^ ?Gqb٢WAw2? Eø+ҦB?D}QY.逋RJ汯75qȀ(BqK!O{7ԓM;_c?,gm[s=(ffjp5mɳ> tC}04[DϚIx,}w6_7lըt6OB~3k⦓ޟUGk\$FuE VW\K_[aB"NȎ0z426zz25Ii'H*N;b`j2l;h\m+=r\ Y!<+bitT8 QJ+PjkAB"o&_K/nu Jr^A% JhՅi:S&x 8YvVZ׍Vߎ VwHLY5 85\RˌꙎ@h$,~7QtGOOͯNzX$v0 m5c3]3v{!M>ѻiBH[C[}$m3=G[z>lg$?D-Oe|k To{''e\>:Ru8 =/0WX`c3 MϹWXc/,֞zy֣mh`G5fW6ّ\D6$:agt ZioUh.%3RXyO}=v<4 Z^mP&˹qRv* 7sÊqס6YtMӽ$ adb]Yc߉1칺nuˋۡj*YmvKMS"'TgJԓrVÈ7Táxp PN$A qI 7; J枚rCX$=BJl!釛=)X&ȹ G]ve "ۚN@j &_Md<)'qz&iSpW&Ao+ ԾC 4ή 5dWǚԴK; H?梺=xd٣KDF§tYu;C!W5ЅG=o t3Z@:E |H}뵟S/H lHTD~[jXoƷ˝S٦1Fiu5SaoҲzYG>pMKj_|)>=hLu3v34w}^}WaR f,*ߎʝzAk[8B-0P2 v}d_<Yj]cLݲ|xrV`xˀmiMn83N*Ȍn m;s7lcvs7INW{\j3),gU֚oLrǤIE Fw3f_s1Qkr%Iz ?u^>nܔtcN`WY;hܽfܱh>U!Nsj]C.μZ$gJ?\8'zy!cs\l1:> stream x=I&WR W-h LO`8خn^{K[Zl П^%^[~w&}O_>x#u٣iϧ//{(C+>dNw2FX그gCRr0! xWvR6J8:6_$W6Y3]\sg0 2&O  bY=% ,M/ ]JvgtA}m:Á6>.%T:ef-{]pߝL.tjv?I嬳OŴB5V;+ܙ */A X \Y ͘YP H&Ov`k{_ @l '&n6MP1w̶y0~Q`rS|-p/~\99` ?"R0/6`~1%peNƎf[+M#f~M)AA\4@„dˉQ+͔0%JWRꕎBnpOC^D|¡}+~26gcdp egyKuW9M1~M'(@|?`vha Le`3ܔ3G3Dv]}$)wXһ)*`m'p)86d6-a6_/sJg.Y`OAf8&[QBD:`,!o3% Eh.d/3;~Ԁ"gb1]W*" :PeE߬tuK6Vy/qamD.{lۅ;78VIL) Ba4-\"O!9>/[}|LKg,\%"sVŢXA 3QSpS;$@&39%AY,23n,x8 ed#@"Yҵ"Q'oflIl08{7zPmyC hxL! nK}a,b5;Z.!fhǶv'cP})f]o se\.̸)ȍ]A'{e@}mj+ 3Y%8 &[{"ƉqЅҔB/%&PD7AAN?^%b0c׳|^p;> ={pUrLE6aOtĐ_MdP-AJaJʽ4 k} uwp&pDN%Đe=sl7'A L CEΜS}n5sm faduar[Y8^=g"FyӽÝ5PWCln.r*x^نːc= O"p2byG3X^$g;lp)קl oQma}K}#%Tn\z!Yj]!6@Ƕ<&1.byRWiLʃW%QB]>NL0@ˁLFf^*>m_<ώ,:ͭD̠; sVV]弃a7F35[N'(V>Eaad?va0Z+:֣ Q eڌ|0\"P{{0+!"l@=.V^Ko)T\xbBp Z<[-h>Ol=w(m==_Sp>(#sxATz;igeI,3R[5cg6zVc$y`>ގ?;;YǼ0sPvAVu쀴a5pٜEjr@Vkj @` QF#@7{7,wOAGHhecVl+3YXydK b@xa`lqhtF@~EK]ס,Jy5ӿ% >":RBpT}R+kSveqLlyr ݵm#5 T\Nx C>J>R/[@p/gSC$, t.ͭs8ޢJ&/2;'ոѓY7~뮟'I td$/@9 ȥ1.m%Sh0z!+G*>tVMy-* X,Ά@qn8`X0h?jTp X&][4~{vKj[a Vbr蜾UNyn=H3]p_3WG8-31`E Jrַc9٪ntE6e,:qT5p?9sh7+ :Qw hѧPG{㕍̕swFF hpRc.B&nJ0^ϕR.ƦG˪PwUMK6S t iEֈIz0Tmh!q0x֍8h7 $qĢA`7&'Ȯu0{]`A@f4.|g3Xb@*C5(e(,fyIOٞnk+`7* \޳r C$o`dI_M!{nT25;)A KTDu0s3x?is,Pڃ碰8f (6@XkuR"̆zP`b >=x{3dNUϿʗ&DkWJX ^hΏE=K"fbt`zN_IcHBPd5Iu 20a{n/uGtrܓV/`1쬜p*;#h1*u رk1kJn/0x6a$+8or/[ZS[Gq!NB&ԚCt4!ӥzX1Ab5"IّDl(!LUa&&@3vdiu~NxYzhXbe<,Ջۀi`l!B+3"g5wt/ J]~LYNwm|-ϩ6R*=ͩ%WbC&yKh~'#1oT@aϻR^,@p]Ey|nk f7~]f \lDOc̮ :%iPDhx*JsÎ;}G{bF& 7Dz ƛrc}\6\eJzZ Fn97kԭܢѕUqCb>ñ܇UZ͸4\3Tcv?ʸ<{Igy@Mtҕ+l6fԫerS>VUhL}X6j<ञRȋ&,; qJXTpvHNqY@Rִ ̞ψcUO )t6CfS>UpI&vS&6<]σ!Lda,>ܒ0BH&;^xv*=詬:eiĠD’Kx̍3 !k᷷$LG PBۡ9]bbT_`-0V[e2?'ftsq?qBoط-]OL&eQqKJ'lxA (a+-ʊso!8n[ƽFtݹe0RR qިFPǦ@F]M$9p9\Ӂ%j>;׵jߖ*Wxb.%z%j$:Xtv?Fח@ސak)TRA5*詵O1rsN\`0p.{H6pϳ:[u_LLؙѝ/<i^@oJ[ܮN"lcJmTʠk}Y0)a0[G,h' BHڢO=h% {ik¦ kH=j{b21] څ\u^ScHӵf.9dJǯDpxuSUCJ@^ı}UG԰3 fa%1{!v@3{<ښe77iÏR1!Ь6Q9eV?ү˖K&L_O̚}v`Whten lr@՚'ح2ljk$! B1F,lSZ={{ -X]ttt42)ߴ獮s\g{5sm a+/^&ۥ$=#&Ԥ.Tشl51U% WD Yv"[@-J~9F_jm4`lu&".H3A8Ξ}~ڎ yEZ V>?fC w,t"yU1/pvN7jKOgt1P"O{wj-ge ?D/4IR%2Ιo\l Dulr6[6RqUئYLZ" y" [kHWea&4cŜx~[ayqpwHbP:*;E7Mj!>Ua>MTS}gX|2DSi~tN8,j|Ֆ8Ž-Y1})P?#A"(|8Ĥk./OƞV ]"L R4 <|e߹L1]'=;J%Ä.V2,9[L یm]6R:g=ǟr_BޣJN"8 QDL/na#MT`kMzW;6?-LCo+y76; [,9nص~OegXεg5,#!#g]g!Wv;Ո4т=ۊ*6UoAcendstream endobj 307 0 obj 7691 endobj 311 0 obj <> stream x]Iqv_17v,2BI ˖H>X> (""㝙ݕY]կQ 𢧺\\*Oj ?{蟾/nH/ 4Hyw"4).~?'w&g> a@u"$7edϟ_^AK]rٝ~ϭ1r4:hO__^iژt9@?UQp,=g/pbW0='5yh_ڹӯ[a/$3LWU+X?K9+f݀r֧x#OJ9?Y8/BN/gpo;]^&=&wS8O3ޯ)%I'~}7Czr!onєiIE>#8dʏQS.=tg3jxб'5+v.1Е NS6&kMרc8i2 Nru1'-I0gڂM 9(ISfSӰĬLYZ(c= K~F+#.m,a Jhw]yyj]}wd 4ULMY$;Xo⼮,JkuΣr^):%P^z\g8Sv m5Y9ꢜ< J6ī6ʺ)#9e=/O#,wRR$l^`D¦9*t #pddf,ݐ/s7Br[QwNBbNEfޚQ<53ҝ5?㖤<E$ Z"l/3!Ɩ-*tȣh#Z(A\4mN ɵSϋqcvt 8.4VsH;*TVe.kSfouwZh֩ѫ,X@ XnaB6f!Tk3Қ wy+N ;0(Mާ dV܅ tymBIO;Ck>BC8Wkؠ]\haU@|bCiY1삪a2G@2|U}}zm{WJ0l׈ 5@@e&i GJn7bJ*TQf8EGR'x6pg4Ue%ktH\PMV6րp6po˄|`\ɭHL.;74{^-h`@xJ'fU8K\(zB^' S[8x !}̴^DGyݗb'"zemw46DB,wdQ* JS2+%PɁ%>2i``df[TߐCW|]ULb Sbhurr5`lXݛ҅3 Z1B ,2 DWÊHI"(;0OvZJfNuY8mc-P:zF/N9cl#Ǹ-)}d[X}l]dG` L$$/-݈> t\]a\Rl[0) #)ϲ׀HװL9r)b; K-M+a7N# `e"2Vܗ0 ӂXSP2ZĹyc XdB\HAho'[漳9f׵ҬuUL2ݖYb1n{иe!|k?{x\\p[qu =o(Ws `ĪK@TiGT&O%G(/~A ڸ-9"sǁЇI7ݫlUD&CKc柅6ō1l +`N^ Lf4sfAeiuiB妟iV\w|L#WG XI`L` ņu?롧IΆu?q%Y[1[J3{הU>̫$]K: HR9đ ڔQ$WLAYwm&~Kv7#|Osnb|\j%| o/ij[ghH] +Qmg?Q 5H0"I H`ꕎ\cX@H$f0& RX]٠p5F*{7CНVdb/AkO)4Y yJ܈aVNui}Rd5曟/3)Iq cV6&fmܲ7 ߔ% 1D;DF ر}#@fN0$X{ &hb/@"P.Hy]D5Q#4ht#\ܐ2NŌGsyBEǣ%zCKBs1rZBh~Ghskh[:J=x py0k(QdqFaxcaƘe`tt7Mib L *v6yfK:mV'lUց#WeGolRn$`v3O޳YѢ ش)h+CٞN=4h#}gvK2Ioۡ8Zfc,*+# lbyZ@pn>1z} { N18vG\$w({Iݔ0\L̐l7Jg h,,64/;XEܑYmb},cL.o6e]r2{trYd XY)LUx2 Қ"T@N!`H[d3R11Ͷ(Y}z>-jkS]A9ӎj"jXv떩X3,R#3W5"k 6&bSf&̼C}Ws0Us8< 7CWL 8oW Y9Vjg+Dvl$kJK -+m*F[ZYODeL-ҡd QU0P\&M 1ٚc~kӺOgZH\ӫcog=B5y>6gmP=+Y tKLX'j !7(o'O/pZ*!c9 Vo'8VۨcNitĻ$t,0O6V~~R@=%YgO/od-rYzD֐8^Kl^Gtw鱬[q ٢zZ GoIF,庨56PG=SaS s#(S4MFUҙ FUɱ*iYB@ޑm, O oG蕖~|~SSF Ұ*$(~|AcK0{**6x}$3Ҿ; jl[2}?) yRj=]6+=ws>P' [V̕P)MM&R,Al)8{m!qZ#s}h]]Io@4prn8ϻ8ؐxj*8 }~IvHT@s'HPpMcx|x?`g.;ffk,BL%ۅH ®/[ʮ>Xy t,-إɕMV?4F!]f5rGgaDC!Vw֡ǰ|[:3U2حjk=/w]&nn<(<~&=ǘ9Dd鑌xbxҹ1A_L#ࡣϵRG;FrCɣePa ̛j59Bbb ]h#c>/Ǖn"? m™A:PͰPִ7vy:#=[oS5 vfKZ( hA_4Ӗu__Fh[-v x<L|rr4xcwT P.l~ V&m>~v`?޵AKؙQYc-kY 6beپsDv1g]I:h% NBx˲teq6K B\ɝ#EyᩗX' r^g+s* c#fwRE#}9[@@%j EpF3@ڝ,oJ(LRVZٱ/wgV`e3Dخv\7aK=VB|pn+i6E7Lz~{ kM -cp,I׫l`.옝ܫoЬmXK,DiCz& ZkG*'evSܮ^=YJn*Ȳ;N{{޸ iОc87<>L?/QxdrcF"00 9`\>^g6]ErW֙(XدDΦ. HFP;U4wc;a"&~mkNK%Y~8ڥ(Ky7hLqzS:s)ȚPIwV{]ñHpƍ;Axb5. #k- }t`5r7_-PayJpi'T>3ާ-0#6a2ZʄtV=6@'g{?,:JKMýLfd I ʀӱjZ]6T7)J,O|ܮ zRWqλ~Á)1dHE-6?iC||##@< q3c0`lgq/ͶnDwyBݷ,EfiM*?֝|ک(q2g@|>UP#M MGXTrZ[;UsiNM33{; йI@}/0.Gu`LN[9<8JCZ)`&k>MkAr 'Ā9n5luV@hF:STCOB$ks$+3{=s|eG6]~b1nǨ(H%x 99ӤlkTQٟ-\}>#i\c{b hE/?/KV 84NQ}2,S1yme8\oj4DB@n) ~iЏը8kP$C FX?d Cqn&088Ny {ތ>p}P¡nlc.1v4tY*n g%JҟGus%ʢ"^_Pݗc q}Y|l@HlR>ĬJ?7Ho ol]-1SW1:Z?w0EYB{NK odER\թ3x&6{Gg5öI: Erѿ~endstream endobj 312 0 obj 7339 endobj 316 0 obj <> stream x][q#žy~WDv%9M_V'6qIiL;bcLfzB)ǥڅf!hkɯi W,vUM_5Xy^n.ߘ}'uUQ'X>$'2jZ;7f#-47>oɍqiOMs˗cm tWZy~)S9ǐ=&xSf~S)? 0+OKODC}|C̢6ӳ-{|=~=~-~؆֤g?z6s.}N7|h]ol",+^⣉Y&6s?ïg]{:٘ nk.(c-oeD89l7DƁ=\r}]>qL<'UΖiLMY]7:#f~GfI7plA}[Z9I!.M̛Z{Ff2å{ĩ~(?[`挪md&q BLR4zcA_ݐBYxJ;`V#a3RZ: 5(S @kE!e)6/hoIZ aCw-"+37+K,7*MZ}^6 37 -N. GP f- j謁QGjY|ң)`2@㖍\د=B5w w3ր3^c-Åܳ~o6 䰐WeK\hEF^n-6qqo2sA# `~(EoK=pfx5yb2DBg+N9̲ `z&5U ۟ T/7R00} (΃z=~K%f/iȠ, ,T+D y^׶ -a|8$ FP ,Aα(*4#2Or]k-a  5e`r;`3H|ƳE& Ǵ<qX0ۺ1,dȁEx\X7ỷ272mRF=wƵ\DPIS$95깬V*Ռ" u9$JZT2^dBs܊rAaꅃ.t_JBĸ6k /&zys01#y' ( dpcL`@1[WAH^`r{TńqَIұ q-`j,,lՍQyYy?2pqwڴSIpEg߉=Ȝ[bszwJU%g eº?'2-0x|}~bTLS1oiGrQ9L02?:9f& dɜds_ Sb\# -߇$Ilu sW5s[(vьPkq}Š}; ޚ8`b伉>c:gTΚ\"Q qG8=#)#*a}sj{~,aABdY:evJ/ r9ر7aI@gǪ'RjJ|t/Erڅyz)vk:k}O!a,Re!8F(@*}iG0aҺ?0.B(o/ I&m/*^8,V*O`nT$1XAw mW,,?vWK4B4VwG'.-'>x\7S[y-M(25\wϖ\ʩ( *-ؒX3-oCYUu;5`ʾZ`z>JRr0[Md[vѻen޶YhY>ahp*g5H_b٤T19\aʃzvTD.8`9yf;}̻QAm/w}&b7Ib%LG8Ki"6x[ o{Ly QmB^c}A-|Gb: e9e6Iζ- Bg`CR6N:/=krdd *ڏP>78]! NUZq@4)МA`23)!2 J`(xH+4Q*6 A[Tv)!ʎ!lgybQ8AX< 쮱q]tȹYndZ9Vv`ow%j.ȹ|Ymݎ8P#A$`*Jm.Yp_Ą$ؖmhJFvsAĠczo2011Ӡe3 T=Z4(7 lPeUL%FC=17.2$ϻ*tC5̇<5[z t-XS/tnr@/Ua˜~dMkln2X22X92|xYW>`j`6S  \]A~>AxɈf9okֵ[w.~] %(hA&(]nlNep+aQDc/<|[.>@htKւ.f$L2jTkHu+,;7Xƪj9^y>?Z'e::1!b X8 \=M2 G7$G>ȧ=]}5ю.vl84xk+ob(438ÛHh*Y@X)KPynA`*h"<ɏu8Uj Ξ[.>{vO{(<} ѩ:vnN CA zTdwLJ ;^&Fն!DksR%tt!U;0F5)8ycݢt%~:E]@v^ OM  "qgk]m > dY[i1PYj2֊c1&%%ke>L:Р(A4y~82 :Sma$e'vܺ4d<=r\(~AbK$P#LBsR !KĻ^6 XAeG)bWʁL`00=6 ~mńwnV,K*.[cgxVfFkCYJt{! Y6~749lʨX Ǥ"62p&yFXpjw1qA68Yܔo1 e0h+'BAgM{wPk+C65}z56dM#Edo>|s;+ʁ-Nnf;:}qr`Q**z"´!R^'^ϹƋ "lٕ2nGP ɻj[~ے [h_rۗ2hN5}LX0Tάxd-H}{ȋ/=?XtI[o8x 1{ixmj-|Y ]N&7MZCdktIiwzB^(MH> vۊ=vQ4!DžfQ ܀$d9((]`y?]X?cb MSp3F3&Xbܗbai..y|oQd |u$^iʫ\G0;{*\jlg(iϢaMBxJ97vţ /3A0!8-A 9sI{Szfu Uޗ!q; |X-O?.=X"{|jUgMAڮ;yuɼ nuWI  V> stream x=ێq@ƯؗgaA1`EyDRLDF˜%`ٞ_p}_y4wW>*/9?{T>WY]E]Ym<]LkXP߫%`.Puޙ쓜o`.'u>ӿ{cLS6>!V't: r}1o8#<`v[l`TgF 6Pb VA9_Ν~GӺ+|\o@V9LP!g}젬38Xo^$_?p.xz toK/ajO_~S ~MA+>`s1ӷ%<|L;<ZNs@yKu&n I[x˷زpʜ~4Ni D 8&cqBʀ_h=Ҥ.#0]@9/-dxkݺ;7 ^vٷn[ nL KP{9\)Dbv^GB06{&BEI#}OuPx@9ݯ 65c0v L.lVdeoluӎ˞7p1k]DSRӪȢ.BUU׍Ӿ_ǧ~[e>Sp>*a:{F#{Ȗ~UB bwq0 07g Tp^Poj| ϫjpjaR!;=kD䌼YRvj"$f~Ǻ㔀*%_[ <37p^&$;496avS @ϙ1E6E3` BBy]<p m6neC0 `&ᑪϤQc 97bT" 3r5lwaYwDu߾?z{ }0aɪXQzS6;2&gNцhV:ÉL?k8}-8bBjJ^enf%̸6G6D;nc击Pzrnǡ,kͽ, IsYl!,0DQ~ȧ8Kʴ`Y.D+xЎrӛV|Vc$<@[Y]MNl4J&)!O.]~ݥ~HoBpk$$-&YPN)F*X^q!eayYWH1TAj8o5PIW呬l\o-IsRb}DdcC1*FCÆ:`v }8V^do% o|_IDfq8崵x^h[0]vJ0Xq D&La}qݢbWo[GDE?BpP4WeECmZq:7b7/sCHBG[Y5:ܟBu}a \q +=1H'X>p|;orM+XcRñ*$ O j1;b `;bꎎƶP X& !XbLd]Y2,D+dcc#AmāFغCUYCw@J֪Nyu<8q3)#))bK ? Sx9He&Qy"((,|L_ٯ`NX+Jᨉ4I@_7l/#Iq?oWz`+@c329"{v2hHӫ$0ړ1B 9Lz &B.̬Y>#&5+:К5{p5Tگ_߮ a1Ncǭ^G\5TGE h=Vٽ{x>`!q$at$ۯ[8%2o"BjIBӺ00 q^2߇dA;%I?kuXWso e3 ϑ `1!lC'ŒS1Јb>xWtuj k8YŖ-'+*Ɇ:Y)%[ۋzbrw]^\yk؇NH籝6P.ʰ&Kf`Yna5V48.$\g'8# Ҝ=t9Y CO*heBՁjT*UK{N<F,΀d=D㭌̌|>FEj{zKgYx?g16vKwn^ջNqobzNRK_z"nź4͘VwTg!鉁 @zuMXrd"p`j7OX~U&.@bT,v(Rl50 kHDpnˎ}JcZ.5!〔(ޑD/*I8.VXR$6)$!U󫺧9;'Nx:R w rk0)AcM}nږa~f5qN Lϥ1Lk`r4v )14\Ros(F"J`ƓojxI$;*]폯=(ZeD3!pלJg'֑,"P'#? +$&6@M,9z1ЬZ#VR ^[Yv7Z)\Oi91K13pBuha"@EA,CdsIgCh[:jt%r}K GQ&^iD`#|k{2I,L.rc@+00ST-~˅xLerI.3sۣҦ-<5"Ksl  ~NdS;]JuβuV ۛ+ #y02"~AX(=ؔ55XՂHiqQJVԞ+ ?XxAIF6ضxk\W[VL"U9'!(T9n0s7,5 |5^;";:07q ڿ>m%| ;2nfzE Bdsގ ^i+@?*,hi6M[f8)|6m " Pa̔CL127/&SH' f+l>2ۖV;< _'y/9{#3 lܾʾ!&b/8xެ,^[fDDbsB_# [EĂ XP-їd߅1ټ.65nCV ;eAH!#{ٺ^s4Щ<ݝL fq76b!Ơ$I$cfZp%mȩ`#Mez/04O~>mVw1gl R c*EAj/ШETLb;b`V11rl AD+J4A]]g-=0Z{jS4Hn;`4Ixc#aeZ ~X]V1^Vki9t&]L{KQ_+;_ug7@/vX ]x vK3!+u`VFZƸ5KZa2G@hZ`*bQwB YalozaaqOjC6%rYPo%4ٱ7*n)z/LmRsbie%+JocM9,E mcA&\;)? oKi063%=ygg 6f%hUkؾ^+*^\アC1f?Q@s|Jjs~WO]b(,]7)3WI7nT{A 5@vůd(\poYiÃmVE^>M̪Nm"{p[LŸ4[ϡ#}A-&aFƄEH(6pq2BPlwmm{!>C=f蔀wW)@:oU!)4m>ij&H:\J+dpQMqj6Oe:4'#&MU}(DF`Q# a˺^WǵAOVYUK]e'+2fU>ގNۋA@y|j?9ݠEiH+E3SX]kh'qz&Κ]մ⍉&>L G2>ǑM6AGw`g)d2U{[p&![b6 X?q UmIB؄ȚeO;~;hC35&EF2ddX\#Q"%ު`a`oXlQ]ر&FHq\@KoqvȎ ί~aEHӰ߾vQ@q6ǩ(sR 3u@.#uSwn GjE3xoJj#)o9fxE+a*k[Η]WDjU߈Eӕݓt,ж~ZHŒЌt./_1E&𔡱 ?v='y}tؘ k`;Jlݘ  dwioav2%=e3 .ōw͔ڷa4Q:;NJSvxq|¯ N(s_ؖ_3\'?NsR8[wGb bQ[5 vU_KI#K#lmYաw'!bJװj{ڎ7gL~FqCѥL,lgAn2Eo񲧳?p[bk,$QU4P VΒ<#jY7>׏8;Bi/_Ƕb֐y=l|4i (t6X6$-? %&]c| l=Zb[ZY>qMU8xr=fhlڵ*!՞<%V{> stream x][%q8/ >g^/BeeٲɃ# ]f$e ,9rl8Ѓz{dXEovj;o M܍{f4HbJͅr KpQ.]TiP.\~.6˃u.5XWl]v?*ls &3|g},.5YԐF=s17SɜV1*ieʼ<3DѩŠ"9pӪu="L@s0`ܐ]Y$.~΢іyoŖͯd?z62f`E.kHW:J })$Alg:{UzVV:0}Ϳ2S.&K+ n%I2L6KoA20O:[O!}v°ϱ ;"Y-8`\95TQ)+iΌ'I2S9S6s`-Cx@k5L/ݼZ`!i܋K9 AX+YB2bp1w/YKrvSp/Ӎh%D_/ X IZD4oDL<b'F < ۲+l|#bŗU_Jm~j%f^vW 6j ch|kw@쎁Xmv0,-B ֯ b\hA iR3\ l1jpܾ`^#BLGMf=6 fKbQǖq6cE!"s:.h5LN]<$謀K 9Fvγ#Tc|EREI8&Aj2;`Ocp%݌c FJ+f_i%D2_;i Cs[νٜyTk̺_z?Y$YQWL<` mh@QV8`fv<HI5PwEF"C8ʣ1H(^) td$>]&Q@RX&Sw^:5G,\`ϓ$def*,@ HzO.zPcc.c=hvxq\ |CHަ!(Ftvss1"G3xDMoK[Jl2 pD6S׊d?dY@%32cyiTf.L5`IZ'~9 } Ŕ=&=_-MM2X ˏ'H5~8 L` \VJvj}OS0N Ұ =ϑ6"oTǣ‘*,}nl\[po.Q˼FY\YǢoڟ`JMڷ9VV><5ӒJ y*/- 9GXm(viGYM@NHG@:׃r2bb8[)CEL8Rl_9*vA8ԯYzl ԑ;& fw?~X:33DqY=ĊB,X(?nzG]_iZm1RB#26rYV-~(kV<3>2'DDSJk{$PF; Ci__PgAO0Cʀiل^w)+bsh qrkȯy>=~X~ 3yRfrztb)\%ܵuKL.,qkQZ*?_\qg_(/8ƴo =>kqcʺhtLY_ukDDQ{%}:x*k>femz^:c1 sy"ͯՐR@ߖ*6=5v_n& +lIcz̈`nOEZ{KVjX nO~68//QSDwmX^'=3)o_{ȅP B#*6zZd9SKcH`xQ 6")"y t:v%f?{L˂߱T I/ 㝊|<1@\$k*'DftP˼؜~U66?2`[ҙ06\8)c&4 vírjXA jX|yZ@|0/f Ȱr$Oh6OtgҀD=#2>q'v lldV$)6hE59I['[zP1=x;.&ͫ&q!$",/GvA!r3)lnejay,<N2(Vi]e.kw[WHgx[ 8l2sqPRfmgg%|Ҝk%Z:XC=)̡I6hv<خts9W2OhKN-ַTJZ5>66toOLԷcuQ%qڿ.ĮSaM>S3ɺVH Bm0H^+JD")%Eq{#0^P Նn+`c] TfV9?wxՇ:/0K=^JjMDzYDǐ,y Kt\S|4XLlCnC68 wWUj`=5pyb ,nsGVUśl] vIai>.ós|Wwm (]%ˁK }*sS$p`l s?!D+H{اh;zhSp{Q{m 薋VRAd=(K#w%dw8U)nu#87ו9>CA#9>VA|&J,׀g$@Mk:q z =465BrMCi{8W>-TM. b&;Mqu 䣩ZR]7#;߁o~n>c]9iV:%I@,D^Ƒ,oi~W.r݄@2^s)cBPz"- }:,;2^Z TކocIyFV+gĬs-o8Sd5Q JQ aA :(j$_YLb29ffkonǂ XiY,Zk>;lq)UJڊ&'NUfl]mqv7ScNdw6(Nչ?KxIhЉCYRdE 1-cPf3]Y,BЁrVB[ǣ@eT{ߗapCJ,bw`uk9gUկ^@5qn31{k 3$ +BQ@k;QаoddF`z)TeOQ$iaRU*% ֵi\dַZ: * WX ijW3kɨ0Ƹt) cs$tg}2]0,LʬWvI `{c0H{+JY@哰'Tݔ(р#8Swx~n8}fWQjyzxɠGn&kr顬OˌMTG"̪}ʙev%*E@ k0pA|C_]pv\dvYaqy{ƏCav'H+K_~ݐnoƀ;̑ ;:E N.Pc @T^3H(N :={+|HA]:< j1a]rxgUbPUnme_pYIazfzY~sgE(eendstream endobj 327 0 obj 5740 endobj 331 0 obj <> stream x=ɒq>W]b$QP@rf` Kuw H9p@Q{eR-R˷EB]~sÅ,|{0 ]%2(.6?F Zr i"$djȿ:H]rﭏ1szN9| X|/%tR1gs*ب]8 lT>|y bA`U!f W0=|"gZ;w uf̀/V@?3vVX !g}\pvTxa`,iVYul|su@w49Ûak&-R'K~^| |q'ҳ9U@Z`]O@s7$ܯi\1)z$Y$X0G.h*cm/feN)E :r 5M`Vkؕ=&w 8Om W<@ã x9=` W(M&$+M8u;\We] ! -_0_1`;ѣ ѠyBKfwȉotZL;+&0LHQ`È#s+lS=#>"+Z \[8s @(IwPZXQ)w!/ƅFnu[DRDF ʻQ`8*b@nY+,|BWZZ &+U@^@a6 o!IsHȋh (t]F%'{{p$#blFO]-s x+,X]0ZM<`k& Kiv77@LFüD;|oX{ N J%ٕw((E`L؀='N Gb/T$)M酘c}-fr8 rQ)@Ɇ^HW?b n:aqS*~Ұ8~rbvkN+G𧫢xU7r0g6CD[ tm!*O`Q$Ɖ$(m\5zb5Z&(e$}srμ$REˀQTP5M«5٩ e DXgqQgN.t BΪ(5+;/#oC߅N\Gna[)RˬêJZ.Y+Q cSp[H4ڇ B]aF3G\QA]+lm~nRpΤSxZZ jy {IH4 ;Ǒ8mj~!t`}6&̈́EA VщOT4#bw"ZGfS1 ɬZT0 Q@'IЛӑL}!F% m9Th*2cOzU^!3#W%JWB ф..eybɅ%d'}DW/Zuj#8}<-k`? Do  t$#Wm2iPjҊF3eMxCTXi':PTk{&[WyC7S1.qDbصQp7'2Ԫ+gx*w;kM}%%G 0׿$8Ϗ.<>E)j M qR@E\'!?is9$Z@En C!0ԈO'&` `~TňE[jDЄ.YtuЂx۷|T?i I"AV!|`|BTa}!!lkc3_ aXhϊwE69Dl X'#;Fޣ5m(()qtR%8:(bH'f"u*Gg:q`?5 'E5A%Y>^-" :p$~'VgU4L {x AMj3`gPW94_v6":â€Y?08cٚϣ 9P *n>~;Z@R˄$/@ MM/+($WE|V }콪ٷLR+%-~yu+o8B)dP5 qks<'BX,95THǰOvK챷KI;OEYfS`ؘO$B`cĜUT9_jd*Ӝ5q I, Uif[2T2T ppAjGb6yZF0]g_MW]67gTWz $f+cHݵl?3"?ZooH7|1IeHk60\3Mx̗3 ':7\rPGq@ٔDAgM"d uŪ\k⛷m6ȰaaFJdU}SnzD)T9N;A_NDȗg@A<: :(_wݣu+g˷+[o4ӏÊn_yؓwaƦ ЍB$*]!ٸ7Mu-}jZp +q?P6 Gm q}ڲjLF-yJk py Ol'ccjqTi`}/u9Flߊu`}n6%5) 1kFg8ciw(E'x`POȫ'\w!zq^4',l Z;X1&qy vH '*ۮT:6vmPq3N@qP昿U sk;h_%"7pm@)hf|C{R<B? ͘jSlk}<Ԇܝ|!I#u4mN[' UޱcokK!xػVV[NF&ɱDuH'Wa:w;guH^X;TNAʮr?^{e# ] MULO(:yf-tt6QBE$ LwMΏnX~ӥD H5."_;l 82B{oX2"V2)=:x -OҚ+,!Xw4Mtx.茫J5lf56lFsjRX3 4qM [2nUtɹkt/ǩ^|n͖w VT~ε̒di"m_T6Y*ԃ4N{FE.Ð UW2Qx,T xJVRV]+Ty9m߫@@*M(evVm:QK1bSncO"l>$x6R!Jk۬D@wֲkjW@$$n. 6$|Xh ۋ? K%q6pg/2%yeN+re#MM鶻mdJo6!Ww njmǜ鄨Y?Iz/;cpqθhbǺ4k$bB$wN lJuYYǕMcU7Q ^6j.8{HgԎsѳ:n ])f; $թɕ70t@)pIP\z~nR`OpA7~ ÆF<N ,;~VtlxIgxM~3U?ƻu `d<:HV~ rw)k%Q (+s.=7Q9/slߖ. M 궛GRCB]6⃡"-QXfqT _ym%QBy$(Q bu*Wx!]Ym/\Ե@6V7PK$&WFi_ٙFendstream endobj 332 0 obj 6294 endobj 336 0 obj <> stream x=r\qUb_R^Gs/Җ$%)>خ$AE4s3眞bA\҃93==}7b?<[99$qr'0 @$p'/FY2n(6^Iyry;ZjwbR8#*u{:1i}JG_} m-zx|BS8qo[8ߏ&k~yͤ0'Jww|[YSJ = ˝XYriI[l,N޸yWH ߚ)VY+F]ۗ0hfu6R%"#d-0֪٠ ~'-x=Gl_!Vnow2IƐ! " |J@8Iwbc)//C?yf^ ƱQgKp).6w'#ީ+> ^4:6aB2lNq< V MHS MoXd3xN{,hII97IA/VXspz۫u1K\j> d&2UB$dz222m"%fLbl ;ꌉg5c@f卵U:!- (iŹ򌋌AD&.A8a&UEgЕ@ud`vـp4ȤnDzqy8k8w BF0)^R,`kA 8![Iƭ (Xl̸+Ҽ}Fx]X P ޙ}op 3&$ . ˡXƦ1sh#ő^dTO&aZ(|1#,f%X8IBsf.ӿ__ @ cHkWeLHܙP; LV@c|DNZ+ض8\O@E O(f8_1ӪW4-c^.S%AW(}uQ*:dW(/peƾ kA6/JYΎ<`i6;̚qJάyidl<#yo*-.Ra<&)M5ʃ_Nh[3y! x_}nL_gL#t"ڙ93h@"ZW0ěLI DSfMSekk6; ڬ1uHI}*\X_q t,sS&28YrP}m4i:~D'2!-κE7,)%ؖv\:_TRא\/\"kK` gh;Iߣvz:p@_g,VVH4 8%]|BR I'\cc%1N"R#9IG`3"$I͞g~NgrĆw}iR/R\OUԅ̀SoRt&¼?:Y?_ лY8?Ehu,WӉ@`YԐA hpxiu%^zk6q}rB{氲dO)L;/P4˜\&e6l ]E !#)P b/3 U^iu̒"nf dKkC }2@; zhe,ۨ4},-C'z O_K-E3q ÃYHkF&DC ;@a<(zCCa O H|> |NJ$ScAt& 1gg=)TCݻۥv^۞R&A-~YZ.h"ljKS=836}etb%X39 iKVhEhϷ{Zo!VqMs񁬞B?ม0=L ZTEÝ8MCV*,NIG| 7iX1^k2z7gL C&4 3h0Rj?HhP0? E >Ug1Ki"SV7 c% h&4 C$fRZ,C|Y\X٘8© \ʳAά8&Rz%i5Z#ohݜsfg>,HƂY`\k/<$ep^`p}Y3`͂JJ){ {)/.# "@R kN:#٬)қԫ@ &E଻IrUhx` \'r㝲Mj FrV`{-0Vϗ2Lua7D #c HBKSC8a;yYul>NapňPcg:+X@:bF_>)dആldK\p8+ =^0d'Zmٰ9Ϻ;^IW{G6RXix-Z`{.%im|.A\mڡǧl+d6Jt b`jh \Kj'FJFe;HbC6GZ8)E 3m%v*<@r{i,h.3WGM5`kZ 8*0) *LHH|N[fU{wLA^e*\(0*$+Prƨ|{+OEi 3HZZA :q%)$*ֶP@ֱhYArNj@llAUd̀b1Ƥpև1 VL V@a0%;pd} }KjJKEݴZl}6߬doyivmyewnf&b!NQ3J^twˠ:7Sxiw4%~ԍپnn);$Xw˶xE|8u2. 2b]uPSU.M0Tr~u ae~ps- `,ޔ >}N;ڳ+@&͡Ch&#{?M`bz]vqoyM%8%e<1= ml4}f\hLRJƜ‘C# 2ޭ¥_H/bU# 4^Stp|00^67kՇ yq7 %7L5QgWU5;"A(?/2X/W+Rdw"o{ !JuV!}|HWR|,,^ЃXQaۦHwO=sDXc:X!=H@Q؃$HsQ/cvVJjӘok$Sο5T;YT\ҼnkPV`p]:; OoVrշ۪+X!sMorfdGK5˵_/ZaKB+U1ܸIG!NܸQ3q9JoR+>I٩IG m_~F>ر:~PձfRFK̡>Zhϻfp$3ZZ(un5N,~0w<-.˓͋ZeekfjB@y=/ K7뀷] c/.dd8hD_dm$pTۿ䦋d3r{=7j9}ύN)a/i9FgxίjL®~ӨH]-ԷGُv$Y/TQȜ 2_ߦǮ$`?1|jN?Cͭ_2db* RI)h[#Mً(Ctل)HO USBDhC}*ʂAߙiNb* w5yi)eg===F0+L}ݥ%E%h>+,$僑*'?HQNGL*?K>ȁei]_8|6X%ڐn> )a%W@p!2+f߱ifbOBhjC(R Oqآv 0H"x-PvJzSR^AWW Ys$u//Xy)n pNÙ6.0z5r&H>rR>9~WC!ctaIрQ"fw|no˙1dKZ]2PCQ)G˄TKVz!?yO#t9 cu/]"Tረ `c7m7ҩ'.b6 f~6?te)37{lgbqtm\V5VժO%jV bzHb|!n!꿨LW!23Pendstream endobj 337 0 obj 5938 endobj 341 0 obj <> stream xn%GQ_h $n!`[PvH߀g8fquuuuݫ_I*<':}q剦?_O^w4O9Nϟ4ӨҤ\:=yם?SV۸31_z *pr BEpQIn>?.݇nn{|o}9`:hwSiv1iGDgsGl.~9w>aǨAU{fw"[Dcu]s_Za |w][:/ blC9wq g%\NO9* $_L^/0EvַOW+CEزnW+0-Ȇ<9xz;q 񴌪azWa]l&~=~xp;CYR}ZjbMr_xTH Řtkc*9E8ĘvtbA.CA9ʹ5euf ғ :VX8X==^uۅy'% BC}vHBdӺ&[|ļ/ bqs6OTC}oUB f]@Zf$ `,-Tܲ5gx^ZKd|F;coWr0e;g P2qԧrKmŶ_jm18&٦f|k-ydElbxW3j~h/]F}Kݺ/cV|T*`cU_'uAo4>3vyρ &lb a, :pK)W306v{1pJ{4DuRҋO:x/-X)~DZ&5Җ#1f Yh c#&uA w j^Rd ډ!^KBHn pZJqyvgC-2y-C73A~Gendstream endobj 342 0 obj 4310 endobj 346 0 obj <> stream x<ێ%q@+˙`E1 X (Pl@qWxgWDZH7ua7l`&źWPPsx}L]~g~(t{5 Hбhgj)Xy[_VQEtq}?OR-ZmiR.8}2]nbilvwж:hO\^d'h`vgV!f{K;mOn1Q5lQ=nvLp3F.v'@&ݚ8,FïLĹ _.;E^`c.NnΧ80NuJ8e閐`MZXK8 .&}z@xB@ĦVzyo:`c_j&gYY 6S>Dv[擵Ѧ{tpt8W&E+ޟk6?]m$čh%mݿ\V0O 'R&1%+{7Zqp~O+ ʪM%txaQٟ^1V FP6!Y;ZتPoR9R|=R8uO; &On uFnOlDLGi:LWZùŸ7U2cb6Ir' * nkdT |!gstt& 򒔇ɨR,΁] ^  x?6fW OGbT&_Rjv BL+kn.L@zc Ā &R@Iz Xo7qC{[{/kw _^$ 6@% +̋2 ̕)3sZ W(`9,5~R^υ9c ĶN;▇מg8Ua1BgF/ ]C}oh^H+&mIp,0]ñ,5rGg"I #N \gKlnZV&I#:f9$Sv-GXD-Q&c]W>^!(\g: !V*5h|ngr.š&p(rKHӂhP ?'N46Eui8 d.]eW_ydbq((tӜ4!RL{;F\L*xy;E`eI1JJe; Ph&,DH VRs=HĉLoWN2&vJ !6'l7堽߃K\% cPVмNֆEqrN2U3/@6eKt@>t/lQ>!rdV,X$VD/Ÿk5@fT ~6{1N}ζSIwtBJpbSR!aI6G)}޽nU+[hlzS(vi_FS|ZDEGϪċ>v]yq}k`TEFvO86#6ύ(ٝtkVAU5Όfhxp,En{Kw5d\M]/uNĈ(iedUp$<"/exi57a:`a8^ʔ1ωqQQjf+)nH+)7 MxssHc(gQ1 6v k1R좄[^1fS})o0 f\ra0<4l'k~cmg0⻊6@r8+R7t+ ]r0mk3J]E\TxԞɤcߦ1LA+S0xK`oJj*ۦ6&DApV*"`EkcK}g"g&`Ɠq)׵mmqUR, Q-kvBj= 3X~]>1Md8\˅>CVz9:M\YA'FF;\H uGה^@Y Y n,;} q'y2Ër34@z`A4G5 ZkT6 9tZN<4ړy<+P2ͣiR鬗)f^[w喳 ܢ=Ja9D@bDΜ2/0W,|qrl^WsX=e;8xcJw/NYZ5ḥ^=]~V!cpnF_umR6:3*Pcy}tԉB`䀣)yv[ +jXgR/c߲/%#UtV(SqèTTF%CC1QGPVYֵWf޹h P!c1,mO`ͳp={n$!AN. >TRH ]."ڤ0Sn<*0b.XۄL޸,\5`䃕(.aM,'&Z4 p7ЋSd-P%&hoʹ}j/Y Ig#5&VbʂiAN&>ks$3jp:}jjWD0c Ji]p­N=@ :2#8#k{h`gh VT+;!_y,/ϫaY.,d;X}|0N3KɃLX^ۙD% w B ]k&[%|ʻH }bķu8e@{G#z%a -ysVF;;+7`6]:lZe;6ctcp}Dlkh9H9g bN:e6OZj> +b9<--Eq9?_"w=Sc"H̐aOʛ+Q6K /^xtIOwwdHɴ'^ޏI>%a_r;+ i3buQ/6(⍣0\G'hQ50S0R)e= 䩥C<?/.zsTvM:% `4@@ɚIW~vt_M M&f|mRz,0W%Bdd^ٖA|5 . ]5⵽8#OZ-->߆\? 0dC ~S6*_I ;%|@έ7z?lrIb3 r汦zm͛2]'V>>0L>QΥt3sa쟝H5{ %ŞU0""" 3-[?RRd͍YG(D^W2.Tjf|8w+S`@x=`L8dqN rvb O$rO( Yaw$@ ȍyL;bS!0DS1Qɦ J|W3]}#/N{TKG׳ݞ@=G'I5x`ѫ mIn&^̺˩VЯ]|,:ĨrP+%`l_IDɯK/`KOOcm~~k+NХ~&zsm:zI#n׿OĭPqmoҀb_c~oko| U=fx7<!mц9\kKطum+g-o 7Ԧ3~}"ݐlw vj;M=(]qW7 嶶V{gJpQ}iTk}ipr|)h4͑h{Ŭ^+3"ѭ~0ccR\:yJG$nX=&#V8{^%:;tl(垮\0`م8vYyzgV)Ք{-b1?Jx :ȞIfǷ*\v*ߔ]V/gif vҬFHPg,MOC(7#``e+7kEB9XUv/ ֌7ñE(=" '쿠X@E`+i(P~xmleJ8Цr϶U%ru7f鐟"$++xn1w^rmzdz1` o^R]0{B=kms<شz"ʌ%͇V8lp=˴zrv/+Gmā@{ K(I"+j =fp 1kV M$Y4m .,2f裒_Dk}c,yV6nendstream endobj 347 0 obj 5552 endobj 351 0 obj <> stream xZ[S˜Xݺ8΅l;dӺQKY`!aU:-MK/vbӓ8yq]wtwΏޠQG'i!YFɯދQI * :1( 蝑 FY:5zjDrj^  R[p8c="കAA7=PiA@e~{$E0K|@-H P:(!/PjCd,IeٟՂRroO)Hh"k1p:`D$EyBrxCMq^2|VweM#`2;e2[\a0؏dw?뻲0zPf_HK,ЕFOeɓVi&G%eA73l}jOY3 S͑Ѻh-mN8$E Otiً2|ܳ?( KʸD~+L;I)n)̎֏5̏X6U0qZ!tMO-dPlqNqִH=tGn]d.`ʮ]Eh1l-\ ՍP]/nѭ@I:t9ͪEO} .n)R/LCh_ZQ?_*-}|жlҪ:f[eZȮ3OY)oOڡlMF[Tphu?8*[["g83V2v#[^Wu&.[~ A%ye e̘32OvsY ?kA?b+]5Vb0zcBi%',H5hGbizH'OjyiCAסyN9+dcL?)ԏ`I/Ç9 RGeiZ!(>}؝v~hDAFFc3҇nj1t&%M9XrL@z kQ+%֚*@TI a$;;=?uh}p`$ qF d=݇+ ̲ND3|+F  RzM\#7?țw2FY3%hf , - ]&JG>wÿc%}fs)T d0{p#zsLpж"~<%qA&yYE ̕`~D0Dž$MR` |]LJ=3eJ% G(PfI5$@[Y"i eG%_ IZRb25*|1h`,,pl\a#t 4xb>k5*2JE1>T>O-5o˄'Pp^GyC咩Cf1sJ(EBNiT\e^SYrAǪzBGgSUNZeO˘WRXjN.Ka,nRz[<@.b lWa9tX IJ**zE^m BƘ -h7e .Rg͔Nٕ2bLLJIUgn0 3[/\V. M/rFei{tbLn?8?/ U!?Mk4;̶ļY:H|#l KzDie;q >>cRPY\=>kJJHa:n]i)2;4 MP<{ٛ+uBw؃ c>0e~YK fμe64= ;s1Q^ϣb{aھ.6\sˎ6ޡ?sw {:nbӮ|x'蜪pKLy_o5X$QrDhZ~dpÏk˶xSYP5>"gY=xbzޕM+cc)RK¾^g+R Jn7Yb{C06|[Y`6j3a+8>XK#}$ϴDn]yIj)5VR׌X}G 1f+Qe\FgJfK1i!J!`v-Oք}yUl.s7])tSPiC!]Bχ?nt]vU?kslۖVmd RQ3U- 2v͘⠼7}gZ5'd}[ŶQrR_y҆+B(_r8\ 'r]-Q+>w&쩬# qsnܧe-b \3eTNyZAتYeVsT{-mˠsL˾:S*R{0j .?R+_+QȺx@BQ}]nBF*׹.}.\+uAwo_̒endstream endobj 352 0 obj 3005 endobj 356 0 obj <> stream xgɝlsH˕9~^aҊ//ڄex;K/?Ni0 1$Ƥm1d.'qך*Wy~%,7\rkh_KQ]^j C Bmw R~߅N9`F8G?ndnB6(C̆8RcX;-!Op{`M=xb8ު643uo4M\}€dl*%pi]anH@R2J9rc[ _fKTC zKZX#f ?LD!c^ԽD^ŵq BɺfJXQYqaiA&%J Vfb(@fDx}e?׌|brD@E)Fţ[ss~4i,;  ("%T(Gn xq5k|ibq0HrQc9goή8 \;q0>dVʨX|H4 xJek8euI1`~$fjl0Ae<`;U+#6'"<υOYh)a*lS@|FD )sTȴc @kq#W(jFt>G"o!OL< {}ڛ,( ?G4uxܑ|܃X<6 |@Y/@9 v6qOv7 N^ĐXfɅ} ͱh%4k2ɰ&ڈ 㪵$]X}"2) PzpF;D-0շq5M_ͧ|^K1MaFCBPX e6vGtǟ5)zĆ,kl< ;  f#.\U|`,YQCr\n ީ5hKɀy uǷ/Ahc$/̣⒙WS/5NtEt0:Mg*ij*=wU,AIU.Ji1"?A~ ע,&G1y[8~Ӱ]p!9y Q(.P7Qj] rkTAAbXˍgt%m49`A }^.eH̙;>\hHpdMB&NЦu9?AP< #y7b/ϡ֟[yby + ]35qO2Rb|ix9>{:͓vL3f9Kr,x5 \G%v.n5!WqdgOWQt้8Y^S/d|rbWQA+P6Yt!YddXbW0} `}~)9\ԼA],r]le=68R+xY@hAR7j/.BfkTC ]s4?ɀ@J[ke,z8ӧvC`Yg1{eXU SB@Z4I;(<'[|Lh5-㺶{;PkD֮&24˩:>ԪCVA]|[c$ȀxPqKO#fB+kH+J Lw>A':U~n'oceIpH`gbelἜwC]eF0A{qt{:{}8Ŋ{ꔅ5N]8~z\@^cfEi܂794'b+F$e8nyaecJÉs#k{v # 1f]l YZithD4Z#`n rK k\+|z ,jZלM#$ίӒ^k4yrÖ"D><)ƯbxYBbDޏہnkρ$yL4 m91Jc(~9 6.;`s.f]jcLcw_3/mK5Ps1ەz(P^as` -Cע|@ff v<$bd<:&[ٯڮ\;JRN bଶ֜[c^R#1ـ6^!DFKuP`gaxhpw ? ã Hz},}míU9%QXXn<~fRwInCpUm}ZvPma-Z*wz' CA qqb׶ʜ]í5A2tNp窥TqWR zjNbu\LR]{-HVJj)Xd#]KB 9;tݹk,hVO'eP@+i!RR^آ.V bZ*OPceR% D{єP~ǘgFoV<^ADYhݮDNh)`yq`C$\E$yZ͜q/Dt^'6B9Iv -iNY`$x1j~/A/YH"mjј^<{Z&o"Q/tdTc̈́VR@4T#e-Alu2d"_\⃘$ WϼaRJd>y̥Z^ē-mA5Sd-@SoNL˿]5QH-r d-f~Oo#otW"~08Xߓ2MAΫCaF~f?פ cQP5!RS^`jn <yѐL_6jQ}eQզt^6)g=˧5<HNi3|{vkm"Z,@y~n$3"}L~ 6J0ٙ<Z"%iz1]y&bl}s !d+3 x'ݎ =v֡;iz/F=ġ38[_ ojF t bbohЈ6mR\U("#Ze&9~MǏqkobJRQQDH3ڬ`#YRfİ@5y]Ofʐ4%>yǮUc&Dljv%oUmjK?7wZv -F6]_'ƾ;#!vrY {a{ʕA`DE>xB1"SR냡AVBl=&8d>\R85SrU 8P-8E1tED8{xSo'KBPIaւa3}2($# ֑>WM&1{[?wK|Ż;œ&sRHgz}2!_)m\z"醇># l?lL #@TIzgIc۬gr7&ʝ_mo'a9*a`fb/@"Q_ouC} \ ]ݨsu]}m}|WNw-34j6 +?gSQHoludLx2?Upx"nӷN0@k#>E宧@x0EUwp LoƷhM:_UXG|:eC)ǯ.։87<~ #W\??яcĀ=kV'{~ؓYD?6U|;r~ o"6aXXmNa'w7ewzr՗u< ,s7=)S)/t;@Dm~m]S r=IÆoп;eo rT֍O11 b :u7}d_2VH> 0::Q Q X$PD!p{JE"F  ;uYwi˙QԹ"9Z.I>wUŔLNIIՑbK>p4^6pw!.~} nՎ~ȃaz.!ԟ])quԘw!6lsZ\*!,j̤Ɛj4ONBǾWvMu!j9:!)U] CcpxV_`kq1!n|HLb8t;m!l|iL XfO:UW@otq#ʹ_T|5&;dxbc[>Z^ BVcNp}x-S֭\ iD;(Ù> stream xnET8_1R.5(SԾ l( p0V_o"@uGE0m Ur9$2pM#NQ,N{O`n6@m7 mGcup.jST QgG=TO^%H*z8ռFSbk}B&탺תB4Ւ+k aQ$Y 9z W0xFJraۛwT@= w{*vÅ  AZrx`{\y(Ѕ4iFB#,c瑜[-L1\,2C H3zs)zx.7;.NYٌ =A7[eeGYˑv Msli`-Egc,+Iv_F?bUqrQoO #~X^) nfVQ|Nw CϹuNDNfSUw ַש(c0ۛ^I8o}SnMYkzEr{UZ7f:lފ-jϛ/kk#*@Sa!J*``>PƭI3-1*NyьU}Xզļ*t8қy_UH3zBMӣP}Ycc mo36֓[Em!ʭHe3/ U3k"QKqP4\싦(Ow6B'5M^;iboWjFg[Jq O/cD C.@C`Ft |eLn`htY$'$ ;#?8GQ.ׇ('T! YdCHOJͻ}}gW'3aDmNp{-v$ɼN-'{Pȣ3?߇^kaPmUcO/{eѥtp4~g]\rʚ' $E݌ݜ*`N henfRYKR ([oh t 5*KYw3,a3W ҵ517#"ޯӭfwb9)IbIE^Ec1NSV hpֹ[BǶIR (An[99>Y #\v7~efɒЈȃpȃVmb{Zg9k!rG\xF5cz^_ČyErJ˸PmZڷp{ ZJe| C,X;R*LJBeq@0"lA8 JX↬[@i*]~iU')K\ P@̕ n+e\is$.zHT(2p[NdŜɖ]g*qh. Oŵ.7U 5JpnSAe\ݧ:aOHY5sdn *gC<4c?<+?^e+=Vyx^83~Uv;t 8ݳw5D@vͤ35t0'/'endstream endobj 362 0 obj 2006 endobj 366 0 obj <> stream x]۲q#o'3Y%vDDZi.'xEe!f0fʲTz иt zx.كEx/(O>{cj?|UoJgi}ָ6>|ك]~q.vuSJ^ܮT˛Fm_OӗO3k5lpG?J}S!6-koܖZˁZN 4&b:Mt34ʶugOBG͙6OIa>s(<Юʹ?hyne&ei9:|Umu!Ɓ㫹woNC*`X̐Am/ҌOqϲXk~$³ &؅ux£eK"z>/ Y zA/aO~By,^Ӊ';%tc'4vh />F:xa;'li$'ULpY+.%xReN\RjU\g)KaQtx ^Zg5q-&3exb"1 N昋{Q]IƚO) (uʰYt&to+7ev6/FF(OW@TB??;gBs]]=j@hn" knMa&WHCܼT5xa$GE`zƐEo']) ߂WiQF?9`sKb+E !$136%@X3^ t48B}LO! 06 aqxaOq''hDpV@ ,<)L.!vyj\@hp X+V6O{jKHil`"ABpJzG:J6cYFMNw%aL?H]T^#3xj-mX2ejL1 Gm zLP6kظm=n ;ݩge gV"ؾq4VCf! I+Ȗ+ՀV2ufDΗW<* vi jڑq-VMO(sR/e6-!hؑhj ?e)Zol9An7xpt2_4qRC!5I`TY$Hg-HfΐX]3$ә3}Ny:k}T9g/:_=s Imyl1YӎU=o,8i g^W{2v 8"ZpecBc=<6F٦@MtI#Xўp,l39!.Gv |' ׻CάnE z5<401ཨ2̲#IOOy :j[S*(x-[vKlHe!cȹ7+bwGw,@7DҜmv+NO8 YUܛibF@ī!y= 4,ySy$y &Y83\>+h ,+m򚰇Y@jxzl2dPLCU44 !.FS= 瑃$-f.;RϫzGk,}2DegtW>-j4)XAx!c*&G&tM"S勫.y{2jGmn@ܔqckO0flM(Xw 7 qctmn E}'\fal"idw0^|PnzCTWk_P0&ho>O_sN *=2`+L&~ܳP6fK#́h1T݌8<vbq#9̠?<0H[r"w>3l @x &рqd PȄ"3z JHR 3$YNI-!wpfb0drw "IѬU/vCt`9=4@h6@m%Dc̙_ikxŷAN; G r@̠}m5rz 6D1EtġӷSfCgQv `r~Hls'\?y6YTX#:&1N#݅7hBm?_0Ek* 3!*,Xgx1C󁡊eLXG!=e)soSD Z4Inj\sy~ͥtt kLwIs(_Uq2lI:ۗc ̃fךh9U+w5h1 v/ĕS@OF+W-N{9QLȗ~x(:s">`5 -|3f'OP!I ljuuxO1Ab{&p` @lD08mGb1D:Xܝlh46^2*Vc'g HC;keA |waGo s8ذ{"+sݣOPQH&n, CM(e ~-88S 6GIfdB}_.Lo,J0-dJr7~Ey1NEq 0paOq;~MFW$X'u9-2^5e8yRW#@ ?lֲ_M<[RJܐd^Jk\[*'bcݖ<r/1=)abHQl1}s Os><)Ǹ@Tf"qDG7KT   c 9dlry^.X:q˹v^$PD JAd-%,hA]?E/?[rؙ%@|O\wk`("$>I <'{C&)NJy84vM40[H(7:Aý*{=;x .!y?J:}9X/ Ƭzx.*iƪ|LH|mipg1j9roV=qM{t@#7n%/2>W @2Dr8$ۅͥY,whl~9t" `F %ZODY7w,\8?|Y Qt==p+ >1l"-k=o? ꬿMلx<#bL.^#I o|1')/ $,[dOʅ@*! :9N+Zn ]zHu*}΢ap jp`%rt.ԐQ8fY6ғ}9Jܝcn@!='0NSps\+Ϳrm^wU&#`+; .8vm*:<`܀E>k֦-[I֚hEpb>wJjOJdu s/Or)g$r׌ÕCa h ̣[B0j3yle7 F=YImۆt8)T0+e Urwˆsu`^qn X)G/A$,ZY@n:~F2 pk%VP8.ZZ̅_U(]1 Rd WW"eE'X-Ig}kº0m91Vz3>N=H!$C)B? IX/Y_3$_3LG^(#ލSc"p1)26Esso}Se)@8-JOn1?6꤭ %g"Ksuε8$9ac 0 KV_1)jnĉ7>ܳ;mCI.3J6AAqXƯ&p)`8g߹Cku^bk6]1c? SotnEN4caߓhY~u9.cn#vS3:r=Țfg7Q87ܤt ]d '͵j«Sy\pt73lXox}'"zxmsj^{{z"r(C;9,rj(~s9FIo>1I+/L/ڢ2,+EEXL/*%@' OM1#0[^^K 9cc+ŽC%ޡ*xٳFj]cM2i =Yų;m,9(0#s-~;D=8ݰ\O݁1=/-}j.::m90-{xjWu\i!=c@RTǂdy[|!0crnmxPWo٬wpdmEmY(}NnٔN?X/*"RbLo,g5ĸis9/Rpn5mĻsR#kpyqęe;0Ǭf4lΣiG2>* +@Je=({n]LDQ\ A-`Lendstream endobj 367 0 obj 7421 endobj 371 0 obj <> stream x]ےq%{Ng@wRbGV;8 RH U=B([\e$o=57#C%&B 4=c^43G_t%u9Dm/sWnuC{7+6iVð-4x R&"eMfͽ Ŀ)IlϋbMOڛ <يY{>92GWuDszxI|/U% F7ԄK8KA ;Kj^\I9(e[=W0H哋X.P\>5-smj(A~'Lh` A\G`\P”&[I$pb}jA I}Ȳ9mȃNGZm\"J&?uQ6ت5"IZmDHl:/> ?8ϗ:r=5jG+ƪ*=t%]+=K[-/4g} OWG1?Mp7;EViذ\ !_Ci!4"3yy2h KeQeP#9J9ͩ[e;gPZ`|I9G(%9}bc҈f&+\Gc|qhQzFӎD%3Z2[gOC}U,J/X+xUs,oz"]GړByXmT>f]dM\3gD^ߘݧ QϬ[1hc֠&}u4.5dPeLj]p1 @4!x议l|QŎCG SWL }Nqnś^3@1(CVy+[[k& )ϩV/^.X:e'X-$K%"Y#8q1Zu8?9H[S=PRj1A^:XFK ET|k5,HE\<*U[ytlKNrp gj 31[Z1$"~|uuW Ɍx\U䵘y,3 쬣xYT4ά8Fݯj0izZ qAL@{`߬M`k"jӘ2 j[ x\:z;,!f&]I 2 )o?)ޟ݋#EY+?CJV@_ҭO"g@lZ=-̾Ϸ{$]vAh4b1 MU<(@ Aͺk1^tPu`9@79r GsU (]lsR9 $fEOP+ I`TC"jU$4Dcy0"B*wJ^ cKv6[?5W,!\_Qy Ю/@@nPPujP21aL$ӯziWx,"qΔ%|w?ܝ]$r?XSU&фcRrW|C?INi(:^q^97]3t>bh8hB790cRLb&ŕד\6AMl:);D^]>ġ$JH'3'ړitPwwԲ -- ;ߔh8$N{ sAY~V3tޘyu:CS92Z+?.n*Zc6tN:#ܙ;%:Z6Ј.ty {hBCƟxdlC{25@6?swAx[eBWDXg]bDm!P;BF4;Z͗h3Ns89`(>0*cd8\Ms6tJ+U5/1 Vа+4 K*zUQRea'28&_gQW)~~2XlSA&n/"+*Z4NI;tsTa}|HҸU&)BO|UB>xl`0SoNM[%*V8NCYfE:qxÞ9x7 Z=2Ȏ=R䪜QoCEˑ(4"+GuJ56p,3 Q\l؎wnIY<=9rN:Fq_Ɓ|h[ɧH>ty=feC˝W';w5,yHv߮m+^C0h45˼ͅEyn:ͭ,κw,- y!cT1FQq_ޙ vx̻m٭;Ce *O,wyۏa[ UdVY/)h,qNƀ2@/"ғ@aMc⻅޸}1Z<ҺXio<=Z%YBAmUgh9aRY!43_ eb 67!*V݊q"`,bΏbmt*L2EbAWʕ:.0!VN8Ҭcpɴ6alC<3*%3>x,a4 Bԣֱ K|}6 nj^H~ggzS%> ogx9q bRp:M~Tt GkB<9Я6?#, #LT i[l} sfnGBUkmu@|Ip<QbYJT1+5tqC.b7xV7O"͋AL&Jq3>$ :Ej̤v /SKzbv Pp*Kz+5ѢOD?qTxGp"F#$0J;!n~6^ښa.]@køp#g (mO"Ў'[AjVn?q-2 v||SG{5bc8 ,<""phmcyT J$X.Ļ)C4NRabc и7h^D9bq3؄EZрӐҐZ#\?v:GbPsTMjaȓ!8b-MZ7zMdDouMpD*}@cɾPzP"I-Yu8_.;n`w?Y{=II( ?6oCQC&FR9-!5; q4JiD8[&lr&udu.  n1h (|ŀcUk@ϼ80XS +,ݸ| ig ؕo7<oTeDg%/\9Օ:Cf6?EG|`9+we0vF""I>z+EGvhTZU:qkjwڞ7vAu(d~]kNن@bӣrJ~ 7y(U um/ٴv;5X~809pC!l,;[7ʏE?Nrk/i1a.9O>Թ \F^ A`,7KVo^+^+M`6zՌGa卌 VS/ u@acRV%ߤmƳ6s!Z7t$Ǿ,:̂ BSoONF{z`}37yZ ~W#fk WtZ,tASfm+OSX:åOYo4.b v,=As~oNendstream endobj 372 0 obj 7105 endobj 376 0 obj <> stream x]۲ő<3iwݫx6d x0B"n:a>s2WX"S]u̕ެYߏ|?֛}7?yG ҋEyUoJ7ބ5.7|ߧ?ݮ] =jR'wbs:kVŬQaۯQ;c ӣQ^i?~'Ħ%y-gnjՄ8S:;*7ӁJ7S0*i#gINἾE0J,Ooޙeۤ>-<6!7I:ţ%5 a}LCXc3=3!z:CZqפUzCcm \~rt@. &h"i +qXejA!÷Oy!Ӣ{Wma|F轎[j@vC.YCm"N[ =ou>nu Dԇ'~5M CaÀQ`'" Q|ï;bNx{W!>hݣGTŤI׍dB7Fut" .Ї-k &XnET)ߡ,z:Є3:7a㳩gJWJ,a z=Y וUo7Dݧǁ WʦrH[-wԪuCF]9]?=ήyz#moYJ1 eNzX|96,'guPSdT/vOH\ w )$mԖ!>hm#+Zr0P>"*}`}Ce]m-:dʴ]r z=6P)lb^\] ] C>0c@qao/d] * F#{BeB'(x+2lC`; 7YE*4@%I8[Rd1?ET2rߋz^-AAf]ƺyNE]*]Yc +N#6xQ ;U4"1U- yDX kkCc)w#^%X*Zw ^0vG)#pVy|_H}c$479f3hG%14,6jV7oUZ"?бǿ8^k@7yc`yW:=Ҁ$+J_睍Io~#CYBhO34fb{e(g25ֳ&38  ?+n+tw C'|=- i[I6ZXѴ$o˲l'LOWD{Dz<˃X44~a7{D*@F٠8ϠvI`0ܐ8?5Te=êH[h@Kac)_ֻX؊x <[6j>h|l"? ;ׅdq=IZş)YZAVڑZ$MZO;T͕օ] c!Yno-TGVOބ~`>(y%t䪍Y4 8P HzYE 1zR˒$+GUcO% HMH/4[#G )fxmJ|=:bQM<&{zO:?3Z,*<|!oiu/8d/8 ݡ,1d}BO Z5ֲA9:t(֙_ʲJ5jA:Kj:p0Xi?ƞz8hU3;ѐ%@=Ӓ+HyDtA|aAp-RJ)k'VḻͰg0.8 I!͂3^8)9tи{0܉cZ`AZWwq.!- #CكVuUNiv,.b8j٦ dC :Hλj1Hq:` #^P[) 7 7U j ʐ)K|@,=q 8g&t &!%;E~uCb^snw4#Iȇ D%׊NØ1{1zsEplXEX uB An9=]:.;=Z.C3{J\&4SBeĢ^ n-g xNV壳ȣ Sv cF uᣩՒ|UliC"WON>{ U׶9q~'qƽe" ~x-]d?WyeuݺGYXLΟS1XiSU}쑀|Ԉk3q- r$}GURأ`i~Y~ <_*<Jɩ.JHK?՛lxk6g$2KgK"Ąá cNLALt0 (BV&YΒB#ܳr V  :DejpG&A.M?֟v,- PC @8KJt%]oChwL/M\j4!H& 9LsRL1g2'M!`@\ '& Ѧ >mk:mO>'`e`>. x`'lJ Чa,DҜ@D~'e)>ɫn6Sd߰璤$ ՛e+ <֎kfG:l;#+K8:n" |'KUه `2RKZCЧ ]eM2ͱ7bӡYPt4=RVCqu|B I<~!):,!?<.=2Y5rY']Nq<dvYۤ$25t(sj-AR >ѓwK$ V)ן YvV@PO2T r#t6!SmpL)I[pHh6r8{QˇZyqxemeO;X]KRK2^OҸ3:T2{x[^1|ۚIn@=+ C% K؅_zpc`rZH2x лvNq[&gHY6ra'L}n m ǢӖHM&6jC)÷ ,3@5e~X(|h5qGa. u N_;U(rY8su*gNFQ 1_|IUax C"Zi 5orf]|GnA"d*aV] |%r?Xӕ7ul/6OQ y H[-]+xo**߸>oB|ύq<OJM܈]b3pH3L c=3)N$< (o!3|_uk,D,F*%r2 ^PĈ4Cac(aR^[nm8-ud9yxdkxE\\9|?4Vfn" 7EG.iG `$L[dW8z^~l<){[oW x&vdJ' A^|"؛@4zZu+,AVےZh1V|uX𫮉(Aڌe9mAmgw|Yh}78LtUI Mp;˔KaElQ/75!sZ38\["Τ;o)b2.̇/z*8z&i~{t-~8*Xw6ڳeaZO8nj.&V@f|$n+WXU'=kU/"[m%JY4!: z 〄0Q #G$HiWlNԱF萘E~8dgRt/f. Z2cb^ 勆唌Kp=&!?/񸺧:қAoQRKbDQ%+ &j1n"Q@EX3".| U".$۰vyPN(;QN,ӧMs'A"KMLt?|I(*hfzҵؐQ&pRq p>Raf2{i-2=_9*'(B)X|Eb2 )XU`ҹ"#KTq.p=sxCnt qp"c2UEBrsiV3 q$͎DJq8@Q, h\}٫}}Y#p{S$ܥr 0`1R:1 .O3#` Puu7+\X Y^2fyF#gM;ϠjWfuQؽ/IyK[nƨ^$$lXRFڀ 2oz?Ay*T,Y00L)OꜸ#-l _}'c\0ەATU$z[ z*J[jב0 $I'Y_Y]IzMsgO>X\Nd-d\z9y$7﷾}q1悳^GLJK(5 GpXuq/wa_Zș uA&e +D͏bɽy]k1H?:Sg$h=Y 8 @:7vdgߵnN6]t^7Y*sB s+/&SpAIg8PoZ)2].S2@@6ioft8 1B2|?s`U?b"Q=۳|/`$MNJi!T@}ZG+d}hcg Ⴓv(l$.2Z6OF~CO ]ǎY]>5뾮)1{ٍqGx$?-u@n^ &KC~mZ#I.8U R&ǕS3F:FHEIa ~.zmٵyK"+6+\# J'>6;C;A:;ICήnh;>B J1ͬ6JM7wR D9IgRNC {L!-͝G}e8ڎgr[= 3xumb2||؁ښE55XKڰjpembzJw?5kA?Sendstream endobj 377 0 obj 7236 endobj 381 0 obj <> stream x]ْq;Lo%HYa1– bc)~̬S۝ HмS]KVVɥv}g_?g恵gvO?}y ^%y׫OURWAEղZm8'kP)%EoWgנߪ?qwʇdOOhxzz}ZZjǗh3lZ5^ӣxbբwʬΟk=RGJ2fNa=}:F >߿;M:؅Oiqԅى -k kIӊt!s>֦tFsbhd6t ImI[Gdo 7H9z "F-4xp_^,^<fI:, 9o6H{1]+c- >>~8qgGV&jUK1O?Iehj4IO`6ƧI&BLMfKu c AtvC/8[qp4ހyFk:*eéUR;#F0N/~G !GoN<߃Cz?hi?9[b5Q!Q*ø꽞ȯZ2"-\TRxtC>'s:5kgDkيZ0KYt xP#L(/$o4AbZwrqcT(Ca20ՊL6o3%9=8/Em*ƌt.e%,:saUid5֚ue!LEMD I&@-m=8+h×َՍ q|yѲ"$JTGi&J8pĿe׿= qpy3126;/'0Z2chT$v 6m_o6@椎G}<q[;{|+B_Ra[2!z1 iy,LCГ}z RWx9IV>62 5rmiu0Tv6Vm*hG6݆*MR큰5fVpo7۲nMqC Ғ!yQ'gu^(i8##Kcŀ&v mAZc.$Qp-N%4?& bь#l`e![ڛ!$NNUP;.5| x;$;䵩*ox5D_d4nUC_@)Vk);XA7 Q^&f]y`b(: 12#< xglx+{p)Jt"-b4TqlJvf]vD$Ԭ l?~@*P!ϐ2[EÆ=?{[av.Zfkmb $YW׀B_|CZ쎺Gs电F m|? gFj}{M[p[x12 /]{ڹu_^s0dHjlh|tl#2x&o\Or(P@vt١ELgNuӴ^!Z u׿3Qc"NÙo>~I[|+GYH:MWP8Pn[YB9흿]^Bm&=GRHp4P{ JpB5>{<`b yV2E2*aIn;} {h_ٟz' lt(`'Fl Ùΰ}@d||&b UyU-ͷs ѵͻϱ[_ɜf=E% BZ! /!f+4m4I-?I5 ?Cעk|/||ڷwB=M蓣5{x./<3Sy}̎O+ڬѬgp4AI$́ȃB=S)q<( zݷz]PtmWBlAUn ovG)!XD Dע >.2Mh7&0 Nq;ۭ)S[@Vc@Gȑ;$ w6ctD#at>}_C xqzRѴ%쒈K֕L HV,յ=>@wJ~U99 VYOVhj&x1jE&I,CVTij:Ӯ 6y~'ABg%#I8 obpLD#r1Ur1v%X",mI8?x;r c#P*{{+c!)ĩ %"d,9J 彷q"o:f&NC?x dcv:(X'>2?IfO1`U]cuys ӯHO9?i6 cKak$bCe7HkCr'[H`F!=YG0Wqt ƲNj5aQ]7N5_krg1hW9c `۬ CQh?}pUnHesrS7$^r]V]R`^B y `+87}.(DSPzn'?o[7#sNuZG\k/am9u@}*3 @@s4hE8P>ۂDB;Rg,[@uQPmQ_A}[&b.mwXM ٳ3 2:S'7_hطG}2W@=h2ާ}O5"ow^pZj.Wg.op,4F x&=Y4P0cO^y"pn80 B'AO Tzdb2f֝@fLm (MB{G>~G$0/:U|\xm_^ 0]e80s5~/Cjx~!xo_3j#K<|d&WF]9J2u1‰ffz%4Ƴh^mi gUDF-3Hp{כU.sXtja[?+IIOcХtB)ĔJ3 6duA5_pDRBt ({׌P2Eu0Ǝ\0Ⱥ%9pI^ķ:aƑqGLTue19nv&]uwfKn6UDZRudtb+L>,i Nur54uǗ~"]*KE9ۢ`y'cuV2 өR5|L_ f~  _{E=t[ k΢ *q5n -ǐe@N@Hi3 B|zb`Z<ÇvTgu5%$h6B#g.ᖓsJp4A2p}4OkՊi4Qw:?4-ލ8*qSxy2ܤ];ά>..(mRK%ݢZMDٝ\v(혌Y~%nyWxZH΀nn8Vy3i(X@f V {=2JC̴079=YcQYwȚU]Ғ-2N.ĵ%܀Lb]TrZCn:/umivU![RFI-ѭ?gX)b'82RFWg Mָ̲VzC\󤸠\vJңqBug /m{\;݂=K=RޝV;CˣڄB%*\|7՘wu7l{N:@'B&إv˴*4 ]%2XwwHсS࢟S5jbP7YӧP^S qŦiy|3-*t8i!V0f,t¨ϬmZnU {:@O U@_3ge<ewOn`ciw6n:o'2Bu熔MH伩TN ^ʛi87AIw`E"'C*oRGi΂?-ڥlo8Kޞ+1IMg I80("С;V +դsQRX6Njwmgo ˈgteKcSr\-u^=Y{U5_Ҍ0-⣲?_a9u ںIhfKmۚEXa*!y.Yqďm?!Xu?/ IK L(P_o|jӁĽ7pdt_`W uqD*çu1zw*QਃJ-Ra?[ B(8,|DޖV4C5_j i)pîv刖Z*E7tluTU!`Uħ}.SG.FB->6$'->\0IƲYJf4'Tpr|=tl(T)}WzB ک.Z28e79ؓ Aݯ!Zje3FDKK ~j}'sꞫ<db0_H`r뤭LSZQ7RT+EUk= 6%Ȉ*e34Ӥ hK+a`^otB&囧@'kUٵD >5D'D';M9𜨪i@krGwGE"pHmKr$L `e>ʹYٰ0*n:'yҳXBOLWR+(:]y'[W#EҀP|KL ٢L].1$>w+"\bͯn'$?d寞 2%Bp*Bf_W-NR3Z^:6< ~̢t] jH-^2NJ-Je5 "bH7A u뫮cwHU#ׯnb Cg]L-$i _'|ۭXI>HU{`:J:V%4&@g$AԿ sO"9tW %V:=cwhh xR `2/fSi>X܎]?_<u\Jk_&떋ɝU> stream x]ɖq=+zv);(MBcR J?D_tDfV (p)CFlFOz/͋͋| o~u ЃM n? 62⌗xōa&<x(6#u~*)7oŦ*›pzPM SYc)wf4 O-T*3LlBoB8ib<'wrkdn~<oQIdKAvm"+:N'=V⼀7u6nGo=}i8o^'XF]&- u $MDoqj@Cn; &oFI+U|-M+Nt e.+X;n_;|:t?%ҥw#ep6 7CR՛ ?}f& hWVJ!O/<4okmMN4rm.龼R%Zv™Of! !x4y&CxzNNTtP_h&R'wܯWYp"U[Pi"8|ě%/lXEW(O!:py4f<0ڳ<Ɇš0 nhpzP3T֕{~{TEFhuӲrFHkFV szuxW?e1FZ7MpI N`S} A?uNxKjሇGr#G W7K Ij Rnmݯf$&1];n UGy:x[?<tAcs[S=ڙtgzȡ,-Z|U}@/x%u#4yv~,@,Fx‚T+AgIҎy l(h0RҭI捏 {@|URm#V8+4h:H nN&a%.C^1]Bt-h Y,%jbђ(z4HmY$d-rA[>3MJ^Ќ L7UZ$o)Ҷ-gNNL#RN*y~Js#?+" _yY"qeszT um ^5 B8N?KOE4;~-yP[9>RB1Ϲz*lO%`Z.A5n0 %oG,k7J=+5cSB 6] _FjC7H&4369B-Hُd)koUbL Wԋ?)zCQ-g`1&R`)РX_`^Wa0wl+Ӟ gF5WlrHnJa Zgk$sa`?A%k%$ FeHh77.KmAYm-lkF8fq+tu aS2ڌ*8&q{Օ6vdjZ)|@*w,x3;,U#|?qxY}Ve$tW+7hyo-ZI+T5쟛WdkK Y `G!'8[E[V:Vd.4ݸK3fW4/2 m[~T(L M"`}瀶l/^Q?Luͭ 2Hӵ'Y4S^[dBV=WA2D4N!'B^ƿ¸G@ѻy3}QD {x/7cBwBK߿4dIbխ= 8<KgQ/-@DCq54js\M:{!VdG!\[smiwUsp99 Ri WԬʨd:Ԩ: Gf}9VBP f`}AX0 &WAg\nlxNQ NH>36nA,RS8pCaoOuGgԦBox ,zA 0uo̳x 7)V!8׉^2*h4$H[ t9P h1!sMt) ,ޜ2 6>t~ư> NUz4F6#i!+Hikd7<釋!VV )p9{gnuy~*̠orذ¿Aェ&"LEв7YK \By)B1sxSө`g8yu-1@;F5I#-d4mdPM1NbAaE̚ ) v+Tp^e &n$InICyph iG-"m$rLi%!xN›n \^ui9{9 '-&~?p䁖f_ yјGp=(R-nQ,p /d n\̷SUh3;%4x}~^U38ҢFnA@Wg(wGa:˕W8폋s\GXE|f45*ȮLq!TeQ^-Z7HM%UYHCg 1l7#Clˑ "'뎻AuqVH#fMʏZGŀeqMbP#L ±{sN8Zd%\_L`Z-=`ʳNJdL2$-hHw(:X !?G;mv\uDux{\"TR<:Vl)[Y@_P1yya21~!gs|Wt1q]9h%bD z XU_{c0[ cY 6B-s.~*g<8.=z W4}HE!V==AJ> e] w,2^#N4W*4Di2}cʗAKLm-k^p[.Ö]=W1jp02M6PIUv7:pq|DQ ";xctbw*OAi͓(]q?CbX̤^a'\O +"+4Y+0 kwAOo\=&lrPS'ٻGBAg ȟ(Ӛ^I֐n2D|52` {$\ZdOS8SM0V 'WD¯N_L/+rGȇ:S eufK'gGM\( 88Esࢳ uC \Ne D^uP"ZcB3 &%#_(uP#3fd ~R/t'<˜ Ѓ8HVDǻ̐O4o.Eaq%RňI r)ny^JIitޢƁL~S( bjb#,ztB]-d U1r9,:^=#S}5@B}8حE߫3 eHbRma”XsCRjS66#rEt:ei&>}6jl4%Kh1NJұ˥ygpjevh]tO 'CipVZg#?j'v%T@j7j8( /`!qC*`ޜ|裚عy~n C'WXaRH])]d]E_a弘ե["G!*]B2$fTjͣǨ!1a4ly:Hϳw7R[gu3]yltG(nޠLOԂkJNDwԁMYBiqһ ;V(H`Lg#19.{.:H{&Mi}u!")-C-sNѥۈ6yUp*r`fA~q9Vh3[j)ϲGU ;D6ۢ~D K?R`Y{ZsQQdkЀ ;"?tuUĈcmS͋ Ⳉ[FVB~$sBxس8` C _g3c'+ًTޙQ.ُ` / -G\S -VbE_~8}#neĹ=-H'΄{0|$!'09(|#b[GleHi\>-_O%-T- PA}ڭOtNzrӊ3f͙E)c6KV䃪oYC߄gSHP77} Ly)='Y2 kz30r4IQA- ]ΐnNY-%$ϼ>SfVT.]XHsQb`X}>?^U.:SOLAm[K;֏+>+cLv%-l ~G)?954U*{&"]9ɚew76M sW MΧ%ׂB@S)$WL!h~%ǪU#ѡ7odR]z1xUx <%{:i}#5楠$R4ɂU?ycq4YKK+Q~yj񵩔!s=P%8|J4ubh|!be/W%X5ij;` LW>b54WY ]:ߜ{G\Hպ[Oƙިy=+e#Jڶ(0;qe?`+;YL: aɉ.GtNٕ2;yyZQuzcDJc(/Bc'h2DLVCt@{);Q7BP %Q3rvAO2Kfwcwx6KADZ_ޥ2bt+ 32?V J.ע_xL 'Wo$鳣)9--s4YN;6g\-Н|M3{me\Du|PHwlk(+wZݛ|!&eZ_)iGIݕsP y5@08v)ǟxendstream endobj 387 0 obj 7418 endobj 391 0 obj <> stream x][q#'-gS .[|7U,!΃I%bp;g\)=p44@J-Jߧ<߅oH?OjҒՓgURWAE7ղZm8_O>kP)ϿWKޮ:Sנnn o;Chxzv}ZZjk~4nu;[V4p~M7hutngZTt/yxr69}{zuW6n;g7&p#[f霏f%'>[Yt4'2F2*^ܽ&|c6z$M8fק۝ 5GWZ+êFw֭q*z v̗faO_ L0V^糤Mu7Frw2FYçL>Ԏ6ooՍDJyЯ̭y}n-6W&q=o| w!~q;{lg[\'2e-My,|~%gZW :M_8sʈ!2UiJt"MzKZhT%2z.K*A<6=[4AB:~#П~ƻL*BXd]ZEyrI;xq`fzȍjFa7^6x@MÙMtj!XԔITţ;~<2])߈,R&y?͇%Ldg ׇ~( ؽk)Oc]_+JYJi#t%n> [Y>5-'Ǯ^"۠dDBH=ѓ0 LcH~[h"-"L8Hp o,[);w#p%~dz6☐fDAfh!{~k!-iq3Vj!A0y5[i9v,87 ],SXu Lx3dmUO񹁅p_o_T-~`0zB"`_Q;{w kPBX6=†w5l(g Hk#jVOdM+q^mH7 T`8)pۀ0<)/ T0{ިM'< o<ƪd;…m1ttÆ>y4rw@Tۃ^p%M`-~9E;"ʰ*}?Z'b_78LӺ_2C+ia ZI=6 RE=vɺz?58pYj;whzmn#!qV۬_w`2HǗ)wB>LKĮK2 qf!DpZ=mXDwXu@O7~KzY tzj½!]¤K-f'b*Du;j6pyb7s`ޛ^ʏt@}) X/ɲr0$0*/&dJvl Nbyɻoude1x`s뒇n.% !>˥Wz۞z6J0:?\)"A pٺcn9>扏6I&E1q"!@[~^~~umDP|WmM#oKw=Z]^K5;PX)?Ơ,t0y[pQLHs)G `3HsDXfx[$ t#!NczIF#[TJQ RAE@tL@e2HSs/nck3<*cm5Q]c[X+Vq49DEƱ@B[޹\ F(qfǺ6'hƺ6FE 63_&mq+ 6]{_qiDzJʁx'3F}$Ex O1U]*҃ xVA # pdYo+ <<8[Q`+|!lݢ~srDsq`;ǁ\< WWKKw|>A6O4 mjuĴv1u 1iAA;[dƁ;# =G@c 4dքw|Z"@6zºwB]thSǺHj7f OIdݻ%n 3zGr}ůG}ݲEf~as~'\A$caɜҗ_)h1Th&3 '}W?~4{rHPޞ~%Ǯ3 XNČ`dP pDݮvq"6 VM'> î/FӶ09DxGJ0>x` a(F aO.iV7t42c/{on6V?,SP}uAsQ/Ρmu!4ԳAڙ#Ъgǁ~~l-s$I@>Ni>hg &Q"^ML ļ}tNt|A>JBt GKl>uRtu~ =bu}%P%_5E N<ՇvnoGD:dwhӗHG {|[tSIq64yJ2r}2 OFjCcO]@VlHHGHf*0i;%T5Lis?QR1Ib6IE&H 5! ddvVE+I]M9*2(݆AZJ7= 6!or(GpKuYNR"c9*BZ1^Y|,%qo`pEfG`yse\'lo^lK嫄|`f[l1>`Tw$[ жd[3}7wZ6Ss/^VB`pCyI5˙DÑfU5|FqnO_lJm3M } .UZӇ88s=h=zǞD:ˡs d(5M P!˹K:?c9p5yJOd 0! n cHINVG/lٕ/le1 Ҳy{_@LC|g4@pgXJZ#gEI9s֛6=ANC{^dhaa2ut48yF..IKӧ'rL,JϒC{-fmui!Q&':SG!UD5%b *k{uMHH)8=og4b&pmv&Y[y>!67($qNH#ա`q4( Wnt7 ʠ}85 P9/*jw8>%AńZ|qBDlKCFh]iُ$T#72t =%ͻ(wa&yDZqqƗS([L~|Co6~9ɕazoN;B֣Fܵ$!dR9}Z Lm%뼛{%%ji4mVX4] DDBk8q`x}.+ܖޔ$uP$u 4Lj߶w\J2#Yka7vi7P?&V&b%Hh7ߐ}8o߉Yjv-[2V26mb~xIv.fỎ=W.hTA%dxTQZ UH]RKc^vEgPFݮUb$Hnm<9FtPr և~t5/f:z\phw^v9V>AH-~Yy.g!5'`&40N<:ae$LVkӰQtxG,PgiSL1Ds9q:Y5O N'\Uyv„K^๴`I<`nwԹ)oɴlwNgSyڃu7նѐ|wSJx9Ff8I}ݝ yUL;-?Bao Jp2sJ|/ ⁃N'NNjs抍TZe0LтUGt{ݭ*AVq\kO%l7B`#Lq<5_nADXVoS@v0w/>(c;Һʴ^9I+\cǒgagn]a~K&uw1% bq`u\"h`@cM$s2 "Q[6sUYfq˕%>-:"zP E#1ȝP>~2SՁidB6i>nX~ko'm6q_4Vu ) jԍ!oh%kN:+t6 ܘ!Ǒdw !w$nLApKHyuA9=JЗEr!CB V~iKI!%rX8{36b]L;|wKhWkvd?q/a%b-NrvK>"8= Ă6!am 3Wq|G`nƥ/6*M,W[q-$hjKKLq9.sM(S)eY|rMݱ`7pۍcp2_MEQK鱸^ٔx$d`,̾xviHtj=^~_˘Q hX\F`x`+TԸ'wGF| eHty\tUFui+$=JgTz!~Ez0rC>%9m'T| tJ?[ɬ0v6AN{zJ5X"ouX-Hq'_EZ- m=E41-JHRMD`+uV9.\fB.d[sPF&`p؎j_ B+bƳSDY|l$|*#uUhjyH,o(5+\EXG~&=W2IlM&YGDϏg/6qTno?=x+n -՝ǠXrOeƬBYu];$ ~S0f XM@1D.ޗ lI)heݧl9/R)JBvZ[fi MS+㪇H؏]a)+.!89H,m? ߤ}۪4@_6 S5R")`?2,cT27Vo܉V8T5T;op+淜H}&Mȋg٥&"JC+wЙ-qjCXJYT7IKdL||kSy;,~{(ܙT͌) r!KpZȥ)uKL"}$őMiyPt~"SC(2vg*3"wE$=c]5|$Bp ۾#xr*N8D?.>_> stream x]ْq#;Lk$kDPa;–#  ).8DC̪> :zk̓ͺ?~n^?Zo^=#%)}͏RH/G뒢7V^ *zָ6<\~{.vu zJ`//oXt*`[Fm?Gӗ3k3lpOFy>L6-koyyƭfYM'?tJ?r'wVw:p_sj+jz4WF>PzqZV3*cDv7wMg2 6Co&xѴF_~+l^nqM{5lt]0cZuIԃ:|T12yCZu<~DJV o_VwRGy枿A{ړv}]zVyw[c A^' N-_$Z򢭧- 'SZ؆X”mҋrkگKL:)emYɱyZlπy3kpaYtsZH*:uotKJ6"4MQkź7?Iq9?r e=8CXJ# vQt}.o!,?,YKɜ>cG`Vd -#0˲h݋Opte)u'Yj袩D4<=ystYc?>4JrR{>v|˺_}=6wM*-S'<#OdQ ?{5}94B$3vCt g$?;:yuF/Z+=s {wyS%:|TnJָm Ye8mr#>l7f) ̻\a;]2 _:CgG<:h"p5C3'*tW>q`D@ aB^kIX++B7O{_qE !j?-˜tg.B{u\Kj:|.qU5>|qK|>&Zc;!O_r^d#<)& /۰lEG@$vL-ϙ*jL eOS|;*}㠶_fI}M }= OH!8Y4%4O'IO[鉼A?ߖfBſK4@Sv(+_@ToW7e9Ă0 @i&B˚X]mZ. cjYr.1 =9 QQ)h C@c]2Xm..?VCL-!nHS&! #C/wI4LVcP )+)6xY4:\@GLK.:\Q#f=z jY[x&*mp `ՉPIZ%)$Mn5 }Z9uՅ9"'{kH)x(<$֨o9BÓ@ ؠ[6`Р1| &W Ѷ\ 6,a%ɀT`Y|E#ė_u2o3M~>I`t Шc:z *ZOZ|ndW;o5lgŭQ๏xыIo#vGhIdFQ=B[uwrV;LyCĮ-SD2](:JCF;ӚTU>1b$w]7ǣVa׻9 Ž&;r7dۺ^٭d7`vH OE=l;}6"}RJކtt:yғ#8@Q O8}3s{x L>|;5&,.XD7^~ΆfD"vU`Q(H]D~4EV"p!f_23' &UfCi,xHae%C3R jV#yz[HcQ|™2Ƶo?KVul ރS.R;@-5 4 〶;/MXp[&|24e*HdW)FJX?f%+'%䍳v@< qw=F4pvkRRtfG RħNiEmO_L *DC%y1Vaxoy?暗Y5 2 -=B"| >\יj|Fp9NZ)|T7’l:暀̓m=Ӟd89_V9ij.ЁZFިRmiZ*;A =j[!B̄p@/Ek<@mh,4eUt'ւ8n[j\q$[7kvzaPߡ~A;t6 MMTUoNf)ܞT'\QY."¯97 &5h99+桮%(-|8:ƣ䘙rE*>kPjlGPZiɡuSf r4%L%ZS1LvQ|Pd׃&iZV!cҒ [[GXtf?ӄm ̀ [}:^tan ʫ9LⱩxxq&TӫRu: Sz2bt tH%1utf6 s>0.oئMy:,"%:[.P]h8|>sEkby*v n;%m8[TH-0Ʃ|&ȚEQ{`jsKsԱ'&Rųt=ˮ>PYm|=ݾmó\8`Zz i+ SkNH07/:E|VlGH~VbB"'﹩+WV6--2Mg%DyӻP,@gǹ?[](ݽ75$㕿棉1_1m#rE#rsv FIZi|-/]Lr thn9{+G+؊q?@|AD uˁY%-,1=K ~U=gj$Q8|#J˴qGFQ4a?4v[q ,-[&Du. =Dq0oh,M_Bl;Z뮟< ֱH,7cj{靡 CR0 &,>'K.VELMmqmڞXzK.ąf;cy]#|oUI*NMω,Q},W3ŊD>cne@^3O/gkGmrxOʞTBtz?mX1jT<-ΘK/2PLܾEU`TcE6~ؠJmU@&m*Z8F1ͩfW#}(Z8uWb*Xc@WýU{5#^۳Q~iY/C,32ױ?d4JEm&wg2Z[8Yn΂|㲸՞Oɢ]qP0o]uzGVtpx?PI) %a$j*˶g.1mzdl,ዶ!EFjJ`p$!<:֌$oGC8R##؅S6GjD$~8A#y8RN;yv$7ia $_8r8˭+$ >M)+OưN5;X|a;4`ݿP{Ѳ˕*A#`A +t $b"o HFjN7PyQٝ* BIQm~6u$x P5dqmr0[,q:Ok^)Q_wMdG_dU>~2o2w$j@~d;)TUKr^g6QO(qq`ڂcǦ3Tn77V|/=!qŋ-9?uW_/9lW> "~PV@6^GO&ſduNP'R#ZX 瓇́y)I&S1-Lo0{|YfXU| u\UD#t<]Ԫxˏk"|rh%JVdZBഒϔ; EWg+jQpKӸpR|_ 3e\|{Q6Us(@|>6.# M #Æv OU\ּc&\rCBsV I5{NAM{AEoo*_0A+f+?_mnwXDY@w i̥j6C +Lِ;턢>FzG) |Ndxdv;V(%{0Qsd BL@ؔ>/~d0a]}t'tG1q Q$_訾vsPޒt"i#|:V [lS>Ձ(@%,*KW/bT9˘( d]F 8Ig&ٲp.:SS['wХߗ]5pU[*y+m'ef12ty2N"qJEp^OȒ  *Y>.2L.Կ a4^֧Iq*Q"G)?K̜&]q׺Ѱ=$oH !Ϝ.(zhB0RYw:F#[/o~BZ!ZAȉg 4, u8>Ċ<0ohR.xq*gA^ ·ۥ)/Lm0ڶ;W(Jx2hǴ{j(uFknѝ~U{N%,_ޖ3aɟ{/ڔr2-/#wQ<1c~퐺S ç-iW {*7~ *D$-W+ȯG51Z>`FXa@c~A9@Hͬ su=yd|d%*olCZ۟jGqmrhК6sBЄ F%JoefOv m?KЖ 6f􂧂pi!#̳-Ȫ?bA KAm-yܰ5"x-Q?MYsVo7VڳP[f5Y[c &]@P^nWۆe:WqYꂊs,{hZ˨js%Qo8Fv\~)0-78-EF'l. D6L"TI"?E ,𾿨m =:Lrs'߯@XREf~{_RJԼiRj)Xd!,{񓧏~Cr<6bendstream endobj 397 0 obj 7247 endobj 401 0 obj <> stream x]ɲq_vAj, T$,&&) }̪SSk<9˕Xʿ߾7w;#?߾jr;Cyŕa&\=xy?OZ,FkGo=}^ҀB8^,18md3^ƫqvoWܝp>czTRxNOE-|ׯeX_GeUo3#u&Уzxb">: O?d ?qQg[aƮugw[%FjQn`9ǵׯd o]DJ^ϹW@XNr6{,œ9DLYPZ^ 6N45>25+KQۧ6N?s}T@2<1ƾ^Bvz6c0VVKxýEkS>L5,,^U6>UDmT1]w_enJ+:tw]Zet"fb*̪z\Y܉ӽ}DKGZ[G~|6/^4@كoyC~&=[zpJWv{7Tt 08ʓ$~̄lS)Aɽ*Щ<beZ"w̚H{+ im3WzKR{ُ>.uff0Ѳsz iO.Bkb׀.iZb{L.z=|3Cx犁s5Z׶dtYZњ]%+̭T<|3uTthD4fɳu?C\;0ao"oKt]+h~PP}k"܆kPG!c݆̀aRI\KI ~-lTEWuE>n ۝8e[7h''))>c͂H"f;$IiH4{ڂBc^A'zyk>P91kk)4Z"iwMؤ61I:4KC2YѠpL݈߃B@o]N":,#*2"4qf f |ZVoVu>-L>XK6q qF~5>yS# 7e88ˈ"Z aikM;w_}`xp!v3w*n5bD䈳:d[kb#&J+kC`v8Ӱr` 75S<["Ě;Lp^5Yϻ2su' ̯wQ UF;yA25~ge2UzN廞yUbs¤jtÀlΧ-NSya;E36!aKJ/s>fVMNUG)u*q*]0"n\el|cFf=ofl:l>,qþ֪zq;u!o.`&Sv!}R@ ?}ΑȔ[xˌ@ӿa>W{G[;=^ $U|{U:ɹŞ/5GwOC7P~%鷱Gm5&Xns gnSd+c+:3z\7=" Gמx8g+U5 }II4+7fz=W~l0;,ٷ;dK' l 8 M;R 5 \rbxhTϷ9zHDgݕ h)]͹ UD i7 fWF%Y7 hw]x8 ~wJ0'O6 U \ &sByxm:*b^>~nn qz B[##3"mQs@K"ذc@=*DLN`;DC;{<Ū3ƳܫqbZ:w8%#V Co_" V[P'ۚ|Xܓ"W~N;EBp 䮏<-F _зvp{ Cmσ( 6lQZH%H"ȈhftD-$}0fsl óHZ-g`s:>< 0XEbA yͭ&z 2 v.t'~_ w쭅R6;ԢÆ*  YԂiܷ`uӍisq?^oq{;H>h&9KL Ԛwbcygf5B4Mp%#r `dHE7&̑1=Gst[q$,s!Z8A晓Kq 5PAuڝ -+X8nanO dA&imp]E[MlEO5A X8iu|L mNY4~Rmk9h"@0v#\;,2+`''`O!B/Cyb SHm+艩P7 Ěx_wurto!%Р,]0M+!{?EGF_IHYm6,PcWvov}yޒ3W-Z2>3t`]l>[g"I'42֟ӹRq87UM&V~?& U]f.QuNh7VµylCY83QJ|6 %[fCy=|%z6F^Lڳ/'W8S%[@'N'l ~fJ+Lu%E6Av>[ $qSDbgmb?T088i$7D-Įc\_o۱G&,tD,K3Dnq(QG7e2:_[ʖ{CK-mLK!d L+2dj-k%|8MDqcWo~5\'gA` +%8ɪjZ4Y-W:j=@gM깧1l׀c|n6 :- (]dLR?ڢ.\bb )Hɚyp}n5Mpi>e%4)B H|JnJLRCmR 4 fLj?QqfFcl4x]If`@1ar˦ N֊(P&y cr*+ aH<'޴W!vZՔ"9EI`2h;x[X&πS [_@sG㬶.Qee^,L?m! qUl!M}Vr3M,)~Z(O[k%w:bPNz cxaYs&64I3^vBե'=-b6bІ2-FqANyZb7eu;cdĸ/I 3jmTht,W|~vG^w\ P?x:r6Gm6\-eWA`;hjゃ5㊟ 1 !>!IyD|i]2S} cU9 Ē0 $|9?e2 F`MW)Cɺu:**sIK.P^Jrk:Zh~T,AgAy6ų46=Q&dm$Z[LZjҹـ7M@.YˡII0qcI AEZiI~.aeIWqP2DdrP[#Ӷ.*.H),E3&? 6U`J^UB_.7.m<%S?_`U&.Y &>t\еc d@h`IIL(,ӏ>,pA!ӟ.H}otbl1|x|%_?'UDp!m pÆ SgG$h"{͜QZ[~c#0,6>#I+ߠ5o9F` p a*Tg 9E mswKLWqE6ɌSh«ncws0W.C.:Ev19 -̮ClS0hVݫ 1) U;n0sN^I~.@6y5sx&_BnP}5%̱0Fm< 2&8م96Aph,yS8ByNA5UԙXA͘bp_J;*fpOm&鈀hpAvYq6Na^Eܳ'>]/KxWqW z;864|^pfOqVS3n1zՖvz\[oQ.\cNC4m&RMJ۾~% I>'WU# [ŀ^+Rh`^G4R*N{$T[IPI5}M>0A>Jc1c];O^et6σt~C'ȶk cv7՘<$%=WFDz>tt3MhdS_Y/Z;s3ˏ'>p=TyFQo&w4nj?堤PA_7 ^>dl P-O{H< pdup؍Bl%ȜRp&tw'Ph},t`:8ǩ/?yDRq98`AĩCutE-<Sp&Y7.p7ꧠz*b=HTL MKMJ;Uk̍ZT/Ae)`A+txp@%ۢ9&oJɨXn,CNKRa8.r0KU<ۋvluO* |6r*TprT77q pxwJ#qr,rcjV ubֺT)Rڍ.SzfrWoLUҥFוɋs(L$Ge~9u Am?H51[ǡhO|u[PW3ocO |%\'Qi5-L*L.IsψVhUh''r`S߰2gs!~V1ەBf]_8~X,~liI."ƒB>ea/&ϼt lv,0-hbRm_)sx9NnT 8{}g]ž>#i]lIZ8 2(ǹ/[{ײ5GCA{=b2$6ُqZ!&*(ۤی|JxSI ?N?(k_.ƸD㌐MT+1VtiC+%޷},Z븸]k"8k ŷ7s+(&c|8n\j@%^jKwK+TGdK~/ ? vR0&IK}ZqÅ6  9AhVnᠵ;*{ _2r]n&pz /UDxK6ƀ۫Ue$dlߊ5pM]ٚq9:6~ jU7TUiQj|1@7~Xc築=Ox0: \"&VSN@#"0V~P<к_&6_fꊐd{la cQ€N#lr#`OCPY\a HBweǗ \j8x|Y{ԝu;ˍ>{*Mµbc0sDL*N.qC _ٴEwX ?{peendstream endobj 402 0 obj 7020 endobj 406 0 obj <> stream x]ɲqw/|;ɖ,-Ka # $3z85,pѨ2T<,?~}~ܛZzAgٽyJr/gi sfޟ';;)J`O_ϓ<7Ujg>=άװ 4+gBl2yfM(ܟQ:~r'V:p_jI:F@]ٛ$`pVO/O0 !>CK4n]ڨ 6-%o= OW:NQHL:)$%&&}S?s}ɜ2{qrHdoR<]flKpӝFBHt!{zO[&Ayt}|iQӡ#$fb2v流4AR[O薾\Q#ɞEbzwz?4Xf7=a Fm]xTX,ܧE쌹췙CLCK蓾VH[і:'ls<F%jbd?O;,SnAbw{ D/Ec~l쑌k-`/mj#/C(fTtZ+0Pxjْuf|Qd׻`v_?'>ΛH4;~&cN_C:2 |֩$rz_-[_vyޝ(}^#'axano1o~,L|gOi-ViboP ""ĊF0xt_Yhr{Shf]WRaxN|>HDgNo^<3Rw&3&ve vOֵL*d~W&ZP[IP{/PʹԅDB= jo[' C< <}`Zzi@o~>X:<2 Dtq&  fdԍV&rZ ^Et>ߗ w6x[)+;GXsMFcjfBDrCUs֪Īfv vUyH4<VR-5GG}DU.)]˝~wLd' JՀ3<_S5`l@o?A9S>o2?7;SɀD'[YT lI!i4>o5 PXUN[!oLU0,Iˇټ݀!d6XӆqR^4GB [>9LD#{0)mh? Jw1z~"JD-u㭘 PAҍJ[h9jm?OUC֩;ꜗh{<%F!_ &m`> #u#[B[eh~,RZ[M Y`3Ư5"x ]mȀS?8[k_I+-Vv]嬞IN!֌/ tF. 4_-41 hܾ#|?>̺ s zn&vgt' y)8Bz¯|=.Bz:[։m6%b%g?Oʆ m7+BzAJ1hxF-@?B1e C-3YJHi%cmd,$uT<0.qAEE$`qAٸ9/d[|l[\^cQRU L^ ~-п֟:g[7̫ ;*}Q ~Kɓ+m|Puigdm!D5B 7nMkR x@w"ĥ\ t%fc苳51"m6>9ʇ^,YaF|rdUYhhҙ YpgN*6#3x8Lâp5T8,j}ҏgf:I><>I_:qX0~|]6=9K3Y&}l-l6f̤=wZdp(}r(?Κbp[F %͊h&4Zj7kzГ wَ Cڦux'^Bpɰv IjWSVyONUr]Dl }m҈A AӅ_d 9sBNLX)]Km%͟scm) :եӜo᭎KjR]AvJ]ω Oy-!ɒ'o>} ;.'R.m Ңuj^XKNMT4px 7,{:3Sa?~kC[hW%7s+@7yB z1͑fN(dN;Hˀ `3ѷ XyS?Qم tY_3JGܽfkճT텕W_h^2瀟ģE쎙$B~=T22X4i:͏'$ Rvdʜ} ;FygB-go8zfz/ 0x&K8>{>Z[62;?wH#@|=Bxo>{40[yG?#po!xz3V@~p<KҚރ8yyv\oOz}!h{"foy LG˩ɸܳj .L>)ltXuy0퐜47C8Ҁt3*k8l" fRSG y#P^\SKL&7(JjhB9P1W`wb w,ckW<8MhVT6˵g/Xdgx>Q܃z j VV*a^=])W6nlcu,Sp~wS{AG7ޜZ992—DmPC:1 PIXf] Aaє1Iœ:3Cx:m*7LG39D:hg(dԨ%FFMz;TŮkA{X 3Y\Ӑ\:dyD@L:<([̫3af"3yC 5wZ6F.N!k@+iNSm[m 6#P_AGH%.@Cz;N[S^h Rv-pUHdi ] 'Bmj?. J'TiuR'pEW(-½X %rCqe 2%0S,kKez,G|C_jZfzp9M|]LpIF~Й}117X]k^RdчeI]`fT8\07GuU AnpF6J{Yg.{|t3!.Ne-b~il`Y.WlpZsNItW|M*˚~MlTe9&]WlH~NaN%Z<gz5 }l@Whwh"qMW0F :Mxbi=rSלj(ܔ˩><T^̪\sƭ6|}fa>b>煮t/+yaR'BdbP[7rۥOY-߯dT%UBT"o2D%ft :]w ق+"3"ըH%lFu;64fX znv& tA+ WH*7&zeWU/t(5GfkLSx"3\I_ZئE6啧Lkmd @֒Ff=}x7*=a_x˭_{uk۶VZ[b$G"qb75DnRq W(C6j؋6ghhԄ21 $^W1欂B<0i:@hvt6K}m'c[u։A_ϑӞ,X=gk%Bi':JZn/ ߋ/{kkGfWX: ٿ?5_,n.6fBȰ\"pc ߸:S+">zeWߙ##7#U}>YPRP5Pw603n KcI$n]oQQ|QjI4H^1]pH=+z^WcŗSY/SvpPzt,v xj=i늎|gٰ/PɄ:R'qY5(/~;ǵ!q9D=k0bdzFmMla>Y HDf@}nprdZk,wV-:x2j<.{s%$cit3}ۼ)} 4fncs9jO .Dž;tیԒK|[ZM<+6~28ۏ{AzSٽmӰ_-\gI-^_a}w )~wHFhjW YNETU}0!l1aK>+]e}F5EƟ9udX,Bhq ;(>/2ζtNK(2.Hϛ@D=̇V%QbTe2ή#lCJ;BJeSIҁF򞷨E@dOI2/*jç3ۆS;;:ɟ1w<͇q nd/D=e ޚ$^vEr>*KA"W`%/Հ;N#5钽LfrmfBH|A>X?UW`(&du@pvhMۇgWu͐%"m#l]bڹN`k}m nLc0W0>GHZb4_sd D.D[qI}Pq uW#/Oۚn pť?)1t>)[f]BýuPN#aB6d}a-]D8k*.U M{*L#y`\=7DRq?ijeUe Uמ-~gGFz?/0@;?bmendstream endobj 407 0 obj 7454 endobj 411 0 obj <> stream x]ْq#oഺ*i e¶3J>DtfUuZAR@Y]kV.'+WG?Mxţo?׿>ϞR8-ko?}T^T8qYm|GsWbQ&l?M뒢7VYxTz?-=[}HtC?Z۫u1֬!WW*.F~?N>άR tG?.m.kte^Jfٱ/;K1W:O'֓lE{AzQD8 ,WcЛ̜VY ZW锐CnmPC*Zi_Ol 1eDɯ\;ڭޙƘ~mִ8NR.A-#\.ONHe Vml27=G:x'Z0I[9ӓ QZpMί K0:Ox M1m3/z~,S~ko@b[ -ZiXz=oԷǹ㩾to7)fE^W@oώ|^zHF?-N48Xt9,Vk-_ep}w{T2!jLB5l&*);Z- Z*ǯDi3 1MȂB)kX7c`Dʽuڔ!qbIx{m |/kELfB߶mC)@{ ܕ]ߟ` v|DqߛҝWtޓVpб$Q&|-r:b,uj86!eDGKj.=2eRN\?c=!O%,j"T[up3!_%EVh\6B D7<Zˠb9}>[Ӻ9.nD]N>JN/r _"EMynv!F{m.?q=8RC/̀˄q v/U$EGC#c;24򋏪14v[uZ"6IcZNrnRP4)8ѫ!X"k*1,ebڋq[;R'~v?-5O}8&ISpjFǗYΛf}5+TL<z ֓/+7ЀѝNı kzV=sʫoxM,ii%m?CtmM65p@ֽ݄&^Ha!!<]87XDBm p$/5.f&ts܉uEӓ"C ZӢ}c(^ۆ]*3H/&Ob U"|rS:?겍Q . 𖄂:nكX?M+3jXC~{ v` <_!FZ4gE4wae 5؟p"&0˻z|Q$y^q[x_.Y ?FK[$S4zY.Bgѫ*\cU5a6"H9ݎE&-#nUUZv[Le qU,"8c[z`Sʅ&Y42C0BcH˶-SCv|y{6 0h f JQ^Cy^nf:h!k,x1Mi8" ni [4_ 8Ż Y7?72 M6/?l|uIO;Ao?[j[h*X X$9 %ǝe4#i+)T֍B{- CJEY1j;'@0 d4[| YVyMD?lcUx $l`l{M@Eu*D&$!-y#!,VB+Ꞻ'G@Uݵ1/ފlژ>)4pßy ].:kAzLBE3-w<#oRhȅlciD@'M!n^S3M,ٶzl3ؼEї ioǐjw",=d~StR?@')[up(`3MfT3i}|w! c5Kץl44[Kq8f C;W ǽuB ؎|S]㆚:QO,f8s9 Еjŭ%30GV, ZϷV#=z J:}Grr5p٨䣠!1$mU$.f{ Fgi p@cx_Bm9-GX<i[9^hB&ԄBƾkC 7F [SYhcԴ]V%Ou{[D#<}UǕ *rc;LuD.IKsS,.0W>9KoCA>g-[Iq|uنm@<3O0P{`q(b{%YX-Vo|I4-wުk $mQ AYv6|XF:]iP44%PIM.6ց![qgkRF}9g ߃b밤%]DK[BHsZ"Z֑sު'-@>C4 {Ǖ_3{wds0 ? ڨ~{Z^LRD"6]W91A](Rg^&ǡKRvL5FP|~0><. |K[vK 7Jǥ,.aJ{YsdJx9%a$9d+Nril2a X^l{PHmh\M>sS8r4Fqacc!yvl-v ĪH+T-%#HqhE:ɝF+.}hEyRh.Uڭ3 G?a׍Il;Fjtg~yon]z^tqҡtȝ[}S;M2.ҭ_#nGrT`vurv {J^5҉S^edwt_<.y4 eߡyW1d@Ҩ;31q~I7DN?L7rgeZحܜ*?-(#.Jsx py0FìK<: O-$J/u!>-AϣI2xV%ʞgNK`QMP"'?;rM0/xu"t)O!ωV,YJ]n-,.&HDNԽl `gyBu{^513tw2HIC$9vJ@%Slcp x"26*y_J-7i\CHmNdYHxW2׸O[0^Į{[:¹|[c3|/7O$(FSo*4ʓӖQcl%&!ׂٰ=lǺ9f#}2pʼn:z?JG1嶧 jG42U#fxm_s YDrхZD lߕyQC 0"u+m][ 3EYǃumFQg9E ![D]F1H}Z23llw "F4f.S1ׄD{,a[as!{bxt.g@)cgruu➾tx}yr;XޝFa5^ X_Ra%R \n"aCB9hMH@##Ԭ/4p tv\U0a?9DX~6:o,tZd~Z5L \xSrBl ̪Z~a7WNx8-W3g9;vPwqSVLߚ XV a@O1BQʋe-eTn؆^ {9R7E&% 1tot%]DY?.Q$BFk^*70;T PU@߽[CZ7~Ú9,Ads`^^̭3k&A@aKp>oWU{۵,Rxuaཋy|(rpRޅpQ5 2Rذ95m%w=),Ɛr~v-lj&rbY\`t<5Ѣ+i٠#^m/Hmsvve4>޶ql3{%ɠe\>%ctkFq4جVʟolleQSWnkHOwntj85J$[lQg"GXkW  ~rՋ}!#=3>ZF_TR([$uFqB%+‘1S2!Ia+0O q6ev!/͂94 Lx1_o,5TšmAd#(Gov{嘃NF1ZË_WF&uhco sTf־NE'H J$`,]V jf=zB^ֺ)SQ-w[h:&%#i~-W6l,/:`_i}bC|/7D_HDq?w;3BلލrΙg Q'.-fdlЍKE$D}%$)=S XQ?bsp4AsoCkdR&0,?1o\jh92lJj+uy8̍*u*k[ʱ M;?6sFdjaULj6P0"'ǕKo0\64Dr@:sqcSmNb}lQA9N`cSfVqTO_y~!X5e% U _ɒlڝ'5I7X:}=JMQrKE[iOP3w!" Ye<"=RL}$Oƈgz\ia}2gWDɡڎ 'Oم{%!F*Kր +ԏ k?yC(q0*o#+\r.?._mQ}L1Q~[4`WtE9bJVG4GI5I_8|/?T:u'DZ/?+>?)Vd#$b|BD,9AW);ŕ1 ؐY*Qj"ogS;jIRK/ċ2eyԚ׭pdptRȘ1:Xv60zD3G cݎZؤwS"Ω9 Pul8{-UyOPƥB^d֝Õ?$2WMmEi uaZL^ٝ`OtԸf\ :|zendstream endobj 412 0 obj 7578 endobj 416 0 obj <> stream x[rʑO[)q}K9شS8FiEeQ-?! ' |wF}O<_]㫣xhBQ+.ԘSN]:/cYGFӣ )COun<9Uuޛ< DѪq3ifx9Kp5:hcy0Lpl9>ʎƖYmr79uL?5g4LIep.qf<~jmÝSc>+ONGšËz0 7~^ ?ai  iy}6f3:EcV ᗼ1X[Nir'N^l ǰ34|S)úr^\ eZHYLDtp+,f p"6 S23u^k9%;O^̪L&Jkx*?ON9Z=;fqUn; )NW"}M/Z 7^aa&sX&Daea4Tkb[M:ى6iLD3ތ&"#6 ZXZV("{mYpQHcPˢw46Xwl@&rN(reY3xsC+2ht64D:"BՅdx)8~MPD{zIZ.=$/+L+rl8 uo겇eMu;Yf['A>LPM{9гj~\ez=$<:GΑb\K>&@f>m8O'hyt~b$FExTF H+bG4{ywͤDgX*bdaw!e/D#~@$Vy Dh4C@pcG `qa>.#TVnhɂ\䵆חq6/ԓl񄠅=XLυPRTy82F3ZFY勔G|:_mBnOi~%Ǝ1<l[V%~s @ƾyu{s̭W>*& \{ML),vV . 6zp'+rex3rOdRJ453sE,GO?P4vr֗`DrM&9)E|9S(^)4- =@.Ԕ,YBޗ%ω4ptJ7u1Vf[DJm)1Q941rc ~MuwQ};E=E6tqvImS|z>]"~e 鮡ͦFmgA43M>u[Lj^<) o^ cn~o,z&ϫA?=PN*%Zz=<[VgPA~g"'X%<`6@ m]le׵mPWU熄 HLoHM}96<[Íš}UBx.CuWl[Z1,wA'U?MzvTf2 z~wb 6z[dW$_ $zvK"t#Ϧ,%ij ^.ǹ~ 7HS+']MK*,D'(qIPj.íe%#7QڗqLri{-}ހ[m-NtIR ĭZɄM-N(ߢdaFnYnPc >`@?[ވ4 K!nZ~.Hn5*9_30f. }*D3Jd5^thQ^eqRnCU^#WU "HZKzL7=⁡>mx~쪰COGzUWm,{U=ga;Z'^5bHÛ $_'KFw>xV}I5^7`6Oq%w̄C]C%Հ]/}Ld 28;xo '˜`̇@ޜsXuE+۾Jp*#G[}ěQZ x UQpD]Z ֬im-?[{^ݵ}g=/"2~d-%oǦڰbwQmNbfB^Cney\cۂe0HBd6;:>g+m{ (^mKNqfbSv7sU)w*aio>3Wh-zE|SaϫDNWy =OD7姙ӥq̀-(ds 7لciO]aN٫(=gyϥHಷ忭?'(Y̓:XѾ'MV!vNJw27z/+`P1Lۨ>( Q-]٘6㗚P8Go%S$; mʣ_ܛ`R:6 / 794xMU[/l6XtUϨ^x͒᲌p9TC{1mX,{˹$>6"_QqӅYYvhG Vm D߳^u o)J"XRPܨ%E!"B DPUثY?! ?u{SNT Ҏ+3聱eW?oF3 N_hP nRp~|ފ9v(@^a)5Q9xlطD;8Bl dWN^e5&AlEa96$O5?emEd΍]< gGendstream endobj 417 0 obj 4201 endobj 4 0 obj <> /Contents 5 0 R >> endobj 18 0 obj <> /Contents 19 0 R >> endobj 23 0 obj <> /Contents 24 0 R >> endobj 28 0 obj <> /Contents 29 0 R >> endobj 33 0 obj <> /Contents 34 0 R >> endobj 42 0 obj <> /Contents 43 0 R >> endobj 47 0 obj <> /Contents 48 0 R >> endobj 52 0 obj <> /Contents 53 0 R >> endobj 57 0 obj <> /Contents 58 0 R >> endobj 62 0 obj <> /Contents 63 0 R >> endobj 67 0 obj <> /Contents 68 0 R >> endobj 72 0 obj <> /Contents 73 0 R >> endobj 77 0 obj <> /Contents 78 0 R >> endobj 84 0 obj <> /Contents 85 0 R >> endobj 89 0 obj <> /Contents 90 0 R >> endobj 94 0 obj <> /Contents 95 0 R >> endobj 99 0 obj <> /Contents 100 0 R >> endobj 104 0 obj <> /Contents 105 0 R >> endobj 109 0 obj <> /Contents 110 0 R >> endobj 114 0 obj <> /Contents 115 0 R >> endobj 119 0 obj <> /Contents 120 0 R >> endobj 124 0 obj <> /Contents 125 0 R >> endobj 129 0 obj <> /Contents 130 0 R >> endobj 134 0 obj <> /Contents 135 0 R >> endobj 139 0 obj <> /Contents 140 0 R >> endobj 144 0 obj <> /Contents 145 0 R >> endobj 149 0 obj <> /Contents 150 0 R >> endobj 154 0 obj <> /Contents 155 0 R >> endobj 159 0 obj <> /Contents 160 0 R >> endobj 164 0 obj <> /Contents 165 0 R >> endobj 172 0 obj <> /Contents 173 0 R >> endobj 187 0 obj <> /Contents 188 0 R >> endobj 192 0 obj <> /Contents 193 0 R >> endobj 197 0 obj <> /Contents 198 0 R >> endobj 206 0 obj <> /Contents 207 0 R >> endobj 211 0 obj <> /Contents 212 0 R >> endobj 216 0 obj <> /Contents 217 0 R >> endobj 223 0 obj <> /Contents 224 0 R >> endobj 229 0 obj <> /Contents 230 0 R >> endobj 234 0 obj <> /Contents 235 0 R >> endobj 239 0 obj <> /Contents 240 0 R >> endobj 244 0 obj <> /Contents 245 0 R >> endobj 249 0 obj <> /Contents 250 0 R >> endobj 267 0 obj <> /Contents 268 0 R >> endobj 275 0 obj <> /Contents 276 0 R >> endobj 280 0 obj <> /Contents 281 0 R >> endobj 285 0 obj <> /Contents 286 0 R >> endobj 290 0 obj <> /Contents 291 0 R >> endobj 295 0 obj <> /Contents 296 0 R >> endobj 300 0 obj <> /Contents 301 0 R >> endobj 305 0 obj <> /Contents 306 0 R >> endobj 310 0 obj <> /Contents 311 0 R >> endobj 315 0 obj <> /Contents 316 0 R >> endobj 320 0 obj <> /Contents 321 0 R >> endobj 325 0 obj <> /Contents 326 0 R >> endobj 330 0 obj <> /Contents 331 0 R >> endobj 335 0 obj <> /Contents 336 0 R >> endobj 340 0 obj <> /Contents 341 0 R >> endobj 345 0 obj <> /Contents 346 0 R >> endobj 350 0 obj <> /Contents 351 0 R >> endobj 355 0 obj <> /Contents 356 0 R >> endobj 360 0 obj <> /Contents 361 0 R >> endobj 365 0 obj <> /Contents 366 0 R >> endobj 370 0 obj <> /Contents 371 0 R >> endobj 375 0 obj <> /Contents 376 0 R >> endobj 380 0 obj <> /Contents 381 0 R >> endobj 385 0 obj <> /Contents 386 0 R >> endobj 390 0 obj <> /Contents 391 0 R >> endobj 395 0 obj <> /Contents 396 0 R >> endobj 400 0 obj <> /Contents 401 0 R >> endobj 405 0 obj <> /Contents 406 0 R >> endobj 410 0 obj <> /Contents 411 0 R >> endobj 415 0 obj <> /Contents 416 0 R >> endobj 3 0 obj << /Type /Pages /Kids [ 4 0 R 18 0 R 23 0 R 28 0 R 33 0 R 42 0 R 47 0 R 52 0 R 57 0 R 62 0 R 67 0 R 72 0 R 77 0 R 84 0 R 89 0 R 94 0 R 99 0 R 104 0 R 109 0 R 114 0 R 119 0 R 124 0 R 129 0 R 134 0 R 139 0 R 144 0 R 149 0 R 154 0 R 159 0 R 164 0 R 172 0 R 187 0 R 192 0 R 197 0 R 206 0 R 211 0 R 216 0 R 223 0 R 229 0 R 234 0 R 239 0 R 244 0 R 249 0 R 267 0 R 275 0 R 280 0 R 285 0 R 290 0 R 295 0 R 300 0 R 305 0 R 310 0 R 315 0 R 320 0 R 325 0 R 330 0 R 335 0 R 340 0 R 345 0 R 350 0 R 355 0 R 360 0 R 365 0 R 370 0 R 375 0 R 380 0 R 385 0 R 390 0 R 395 0 R 400 0 R 405 0 R 410 0 R 415 0 R ] /Count 73 >> endobj 1 0 obj <> endobj 7 0 obj <>endobj 16 0 obj <> endobj 17 0 obj <> endobj 21 0 obj <> endobj 22 0 obj <> endobj 26 0 obj <> endobj 27 0 obj <> endobj 31 0 obj <> endobj 32 0 obj <> endobj 40 0 obj <> endobj 41 0 obj <> endobj 45 0 obj <> endobj 46 0 obj <> endobj 50 0 obj <> endobj 51 0 obj <> endobj 55 0 obj <> endobj 56 0 obj <> endobj 60 0 obj <> endobj 61 0 obj <> endobj 65 0 obj <> endobj 66 0 obj <> endobj 70 0 obj <> endobj 71 0 obj <> endobj 75 0 obj <> endobj 76 0 obj <> endobj 82 0 obj <> endobj 83 0 obj <> endobj 87 0 obj <> endobj 88 0 obj <> endobj 92 0 obj <> endobj 93 0 obj <> endobj 97 0 obj <> endobj 98 0 obj <> endobj 102 0 obj <> endobj 103 0 obj <> endobj 107 0 obj <> endobj 108 0 obj <> endobj 112 0 obj <> endobj 113 0 obj <> endobj 117 0 obj <> endobj 118 0 obj <> endobj 122 0 obj <> endobj 123 0 obj <> endobj 127 0 obj <> endobj 128 0 obj <> endobj 132 0 obj <> endobj 133 0 obj <> endobj 137 0 obj <> endobj 138 0 obj <> endobj 142 0 obj <> endobj 143 0 obj <> endobj 147 0 obj <> endobj 148 0 obj <> endobj 152 0 obj <> endobj 153 0 obj <> endobj 157 0 obj <> endobj 158 0 obj <> endobj 162 0 obj <> endobj 163 0 obj <> endobj 170 0 obj <> endobj 171 0 obj <> endobj 185 0 obj <> endobj 186 0 obj <> endobj 190 0 obj <> endobj 191 0 obj <> endobj 195 0 obj <> endobj 196 0 obj <> endobj 204 0 obj <> endobj 205 0 obj <> endobj 209 0 obj <> endobj 210 0 obj <> endobj 214 0 obj <> endobj 215 0 obj <> endobj 221 0 obj <> endobj 222 0 obj <> endobj 227 0 obj <> endobj 228 0 obj <> endobj 232 0 obj <> endobj 233 0 obj <> endobj 237 0 obj <> endobj 238 0 obj <> endobj 242 0 obj <> endobj 243 0 obj <> endobj 247 0 obj <> endobj 248 0 obj <> endobj 252 0 obj [/CalRGB <>]endobj 257 0 obj <>stream P scnrGRAYXYZ acspAPPL,descTcprt\wtptpbkptA2B0mluc enUS8Ghostscript Internal Profilemluc enUS>Copyright Artifex Software 2009XYZ ,XYZ mAB  Dtcurvcurvcurv,curvcurvcurv$&)<@EZ_h~,<MkxFDc N8c#Pec[MrcGh|Q l  k B   / . 7 o o ?   k\t9iDM4^JX?*$ |Q*y`J8) ! "n!$ `"%!U#&2"N$'O#J% (o$H&,)%J';*&O(N+'X)d-(c*}.F)q+/|*,0+-1,/33-0,4w.1Y50 27 1-38X2T493~6,;47iCBG@CpIrAeDJBF-LnDGMEZHOwFJ`QH KRIiM;TJNUL,P$WHMQXNSZPgT\ QV]SGW_kTY%aV2ZbW\=duY)]f*Z_bg\,`i]bk[_:d/m`enbSgqpcirtewjtCg lkvhnwjCoykq{ms7}{o't]pvBrxxo*t%z2u{w}ygcurv endstream endobj 258 0 obj [/ICCBased 257 0 R]endobj 263 0 obj <> endobj 264 0 obj <> endobj 265 0 obj <> endobj 256 0 obj <>/Length 3566>>stream xks۶E?ϴMVPӈz茰| ױRׯ_Y?5~_Ɵ~N`/9u>/_ 5kw=_>yLﭥѺ"DZs OKt Kh g  1Ӆ.5Gֈ#ʙn7R \{?\#׉.G0"9j Up]ƶK?V puw!tE,Q 1F%Η4^$h4ŹW?}(R'5HSKvS ZMcSւ K%-T4DE!T@%<E=hLc+cQ uIFH7IGUKE/u+$@=elG"iPxE]jTZ% 1QHL j5hM!S0"@2B""Kq溸οtQ1[ QoxIU}OݠU4L;1uGwj2.\jkkD Uv\jNB2XO_ygj(_tաRY7;%V(ET0qAT-H(_16ؤҎ ܛjNdDyKх]Sriyt{IG1f=߲ ˬR(riUR )LcSp=:e}< ULElip># _z?^Gizd4$Rw҅"W-4q nH/r񒌎fq?glqHL.9JFj) zTWts椡s-J\9O"">zjVQ`_uF4 Ԝ4DtDKj#xGMcDjl1F6^.R /]*r+W]x{^ ʥТSʓާDfF !9\J LOvIh9_aqeޝ>y ^+%#]1HS#]"WrsA^LWy|Gua%D ]jR]LCm·3RtmDvt"^KHQ3`. pօPGMqAіn43B ȣK@’^ -'>nM+yzB9̾M]m:J]Eu%2t]c>wntA =ySkWk"95]B!%$ En1&q.=8#/e؎.*W5DJQ]a.]c麨Ry;rFjߋn< Ԫ"BAD7҅Wy#.G݋'(cAGbNFb P[?V$c 1Ftoâ⊤!%.iH<_"\E4V٧BK#臘j7Ax32Nf1e{iD+p!6KēnsrS2ZX(zH,OT5_I=@i¾ T2R+* Kۅ>o͡ _2I%MR:4F'#b$1mHtRTLii4ԥo"W{{_^KĴ2%rO-W'q1JF.}2-!gj\ˮ.II՞ɽwF(uETj.̪ Ctajwڬ[k4GG^ ^RhEt8%#Ftyc\+e]^]D %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At Kh]BD %4.At ֥zWmGDU}ސ.6jA#] \~%W1>2c]J%J.5ּ戂^Yz-m]88]|Dq.y-DE1׿KQT2"+qD ]^6cIQtGt!ƠKW\krܫ Ɣ ҅EF.p`픒vkaRcXR#1"5gօjףKICP!hT#gztI2:)MѥTt2NҐ15(́# ]`DQt!M=.qbJI:N.$ yMɨPtQ2rVu\44c4%Lwg^ߚs3:`[OIu:{.klw=oy2z'Vb 'hu\{z8ٗhX>"f7+<{h1G0.TtqEƖoTK$ۣu1ëK+ׅ ߂RG: eJLS36tN[aMnYEY땄:O^ʛEeX%W㍋6ϯބ=a:*H3].c^chJaÜ{&$n"*аƗƋKu1<7o\ *+oÔVvcj/6ΪۣVŻD=]ƲRAf7$=w lVItn2jqIuei&KL;i^/4gs-w~d}{8e:}x3+N!$Z{n]zJOw6(rig]vNZt Kx#Kh]B X endstream endobj 255 0 obj <>/Length 3931>>stream x휻; , `8o``c`"-^`NTg&9HeeWW5DDfK3dl99.ਗ7cI];YzӧҝG'C?ءX3t?shǓ9dޞtZ8I}>|paCHr#/oqYYt̎.߿?Ct%' 9;]'(+NHo'fݻ]wd!9s&:9hrz:5VT;vLm޾};{?.fd’ˀ|Rz2tʳ Jc\;y Bg#]v3]^~ ݪG%K>W* XNyG#1Eh<[Վ4fի]0%. fAB dc6MCHL"Й f^/_޾+i Hshu~K[y-Ū5~^F?؜Y?$ęmxvyݺ&8D,9gZEI)ƨӥ=| 򤪼UE!t]x9*9L]"]:i03˳gv>CҍNaef\+-2fZ(fd3ܹsjyi74-`crSTrZ'e`Z(GpsaFɓ齻`%:3:l9 tC4Ǐw}yf]DFYuȫbs… ffr3ˣG{3ӹ 1:4YZe=6++$ rE03[7za7V5 #$##'Ԙ.%G"fi(ϟ?gu61SJ3MҥKt6Srs,i+hF(++D- D|2&7ݍ6:Wp qQAJF[ҥ)ݗf]F,sՒK\A0vetݻ1&.Fu )tϴW}{hM.YtAi |^;wLo %+]򢚪d˪th!,r ($%ܼ.oMG&DŽ".DDɺ8M%v\ ]ȘJ.nݚ޾ mi+#t!(ƴKKuuYCKօ"]r>t03ܼy7!m1qѐ 5Q.rst1&oYѣiЂ1WE+꜏jƍqICnW۶:)R.Kmw -qy](_(UvOgt~z7w\hȋ(y%CI(y&c]KXw)-ީ˵k׶ǹ^0.R%oӵqѥKI@.Kt" \zK.E5QP.rZd+/=JlzK1Ѫ+Ñi:g/}rq]a8J6$]%XeKߠ]U4K%?V:o9իO?GJ\ Ĕ4i9%ȁ.љ-eVLغ׵k׺{!Ih%6D tH:Y\[BQt ?"EG驦׻!Rr!1vD~NC/]kQVbo]] 3h{j\wˍ7!DbJK2R NY.E.Pc_H͛ݍWH^!gi1] XкDD ʈR6'#֭[0JHxt!!xDNjMo".*u9H@$#\8!Eu}vwo:]U-AuюKmPhm6#gFBiDQ"DB3˝;wW"Qk4u!!AŘf]F EuT(V.ݻ>!( !GpEE.m4ƢtaC5:?Ѕt,jm"_{u&wEl,%6"9tX5P]-f/R ܿ x=.!gF:4ChQ2b]y% +i&"hٜJ}Q2"&eqTO+$UR:So$7za7L3k"wN=@.os6#vϥB3`rK%y]=zݛeNd29H=:%̲()QD_Bh_V{6;uyq7*$rtQǮ]p ^%h4Dz{cp^'OtCX(yqިT~zZ]Uu`HCDvup}i7` Y"Q!7)C1)Zlj߅1ġjK(qt]˳gϺ!}=E @%>}D@,t=vn#rE!Z$,D ϟ ' &[FBDD 'hC# =şBe.DvVX (Jb|v߅mKy-+?~T_Imn6fY u!hE43.Kd[P!Zi(DDh]b˺] ."[ICVEYR?@\aDę$#і.^Aދ"ю>D?DŘhPJшt8c,qiٴ&Nvc8 S]._A.Xꄈ"-:dM^ QXQ(hM$]rFі.o޼A]bvI12) Q~w.]9G5|E cW?-EnC ;*}V90ͤ+W%x}qw>o,ݎKu U +#KHщѻwt”+ŠoF'(1BFwHՐ`EPsL9ZFWw[u\WRE%^ i‰>(%e#̲0m3T-$#tHCѢKB#Q.IdYd`60$_5ڸůJ.k6VEfY6hU&"O$#iīT- ES]Z Q℈Lh-(djwq8 !OZ !-hgDyWWk.1:8BDDKF!6eO KhRY!i钧PϐЅ4.19%oW_hE;.zHQR%W(ZtQt.M'J]D9EK.&hmVIsB]2Za]XS˿֏ endstream endobj 254 0 obj <>/Length 3284>>stream x훽3?DD Hxހ & D"D]mW.;k*ϙ]~-rܹl .1^\|9W\9իWq8K.q;qf?Y>..8Z3*]0qxb{] w_H|Ս7r)W]vMl_o'~t0yfSD ]NĻ8.KMmZZnnJC1mhx*0?P '+U|w.1RZO}vԂlHV7FR1Bԉk@Ye]&$5ݹs$Dqܙ4H`Du;.[c"Xg艾NݻwVFZr#FPNdLHCl]rUs K DcfKOԅ**D~2q$J\G.S,TkJӫj)OC EC$ ;(H]e u#wZV3{A`}#I$û5iF P!I/lg=|v[\Q$ﶡqϟ?NWEw)6-3fѣ4 c;M&dcx<i/oSFE97GI*a#{qR011E! Gأya=q#ehP$Mq]|HC %6 jX"1!t?WEuAPGjLڀǏipW~ K%LI;6)OE}h1s WZR4SB b6`\.Ќ0KkGi?5.ns lx/#|9 %].,:D_| ]-x ip)tngeKXrxxt-;]N/%9 p]K! . G]&Bte tad]bv ]Av4&-!b,]PRR%rv. y>tqI,;ץ]n u+.(xd6hV]T匋s}, eDk]u)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XS.u1)`]Lb XSu Lccׁ8 .q*Dmfq;'f:P`@uq=>Yu o0.q(QP0‚]X~X)+P Q4.0&l.]hwK߻0]` zOpe3t i*D4#tq1in.D5]€. /(Jɝ.. 肕6.h9.ӅPh.\;]Khb- ]. ჻e].z1vW4...Fbe0Z..ئ1~Gh_Bv-IF 2 /0qDDq=+nLC_4`,n/Cst$] iMV"LYj=Xiײύcoo+Jg]Bk 6Rc'+qD JeѤ.tAY.6AtI颫$.+jXuAo ՐJtJ]L2F[Jd]E;H&fn¿a2WtkN=+ u2`{eѿ^h+tцWKR3t- [~uLrML2FC/COA#5_ZCbhj( Åv3)T +it雘cZn֜m}ug-Fe? n&KzMȟ6+xh1SI?L\CE[eEF3Ce+YtRuwQQx:Zj],GcW-[@14Bw$`BIѼQųWFQi?FЊhi'҆sJ%sS}UFЙNeٓjˣХuƴڔUse;cf1:u#UG-@ejRIja+m}OY=iS 'e1ӆ Nݶҕv.^e'fR8d:Zwq fHq33ݷ#+FzJi"{!RXdjQ9_h,69ySfΌ[yXS. endstream endobj 253 0 obj <>/Length 2681>>stream xrHD!k|>HU K#ps0DYUM uܡ \@t S3{[=moǡ[r itы-Ӑ?]u+pn!.Wzʕդh|YSr)rܝ)\% H9~UNՁȫWIud̞.ŕaFH CD)%lq;l,'ؒm^2-+(֥HSbZ]h)gOmC9{3#͐(_q.OsA%T~]'c͛'Vct P1D.:e:J1P!'眃$BGNҡ0tqACŢ|ʔ2G_)ٕw3ML}E]\fWd7M ͬKƔ% ņPׯ:)Ƹɹ]#gbveu $.nrK{+Qİ˗/GºxP*sOűZ쁴C>H!͆~z%ɓkR-3EECI]rE'6&vۺJY(u&eohI]-%QS6`zdH~H:yKdthn\-2 :Qv˃.t.ˉNۥD/[dƇEZ`7RFhWah}ޅt9okŭeX'Kn8outq+]lL~ ~"]t9J7.GPb.D}W BIz^Ϯ]M/ E,?ĕ_Э.eV.gY@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h.]@t  4@h?epbV},3q~uwЭ(3]uq}(NEQх&P<]0EQ6.hy..;S{w'ٻ#L[^lݝEޅbt.F]v#hIWԻ8ZhNx}k)8ZԸq..FپdŮd%wE 볶tQ0(Fcw{Bs.Qt 6&+ܬD .z$?4eײr2Gƫ6atqh>=}nCL9QeȺR0\yPJ*Q9Bg\6Jc䁴D̕qT JXt(+]TV%5Mng}Ж.)]rJP]\&7]CK;o fcDNCE+u)0ŘlY\V+ǬKv0M̪+ſaȉ:{l{+5y&b2: uIc<'Y> endobj 272 0 obj <> endobj 273 0 obj <> endobj 274 0 obj <> endobj 278 0 obj <> endobj 279 0 obj <> endobj 283 0 obj <> endobj 284 0 obj <> endobj 288 0 obj <> endobj 289 0 obj <> endobj 293 0 obj <> endobj 294 0 obj <> endobj 298 0 obj <> endobj 299 0 obj <> endobj 303 0 obj <> endobj 304 0 obj <> endobj 308 0 obj <> endobj 309 0 obj <> endobj 313 0 obj <> endobj 314 0 obj <> endobj 318 0 obj <> endobj 319 0 obj <> endobj 323 0 obj <> endobj 324 0 obj <> endobj 328 0 obj <> endobj 329 0 obj <> endobj 333 0 obj <> endobj 334 0 obj <> endobj 338 0 obj <> endobj 339 0 obj <> endobj 343 0 obj <> endobj 344 0 obj <> endobj 348 0 obj <> endobj 349 0 obj <> endobj 353 0 obj <> endobj 354 0 obj <> endobj 358 0 obj <> endobj 359 0 obj <> endobj 363 0 obj <> endobj 364 0 obj <> endobj 368 0 obj <> endobj 369 0 obj <> endobj 373 0 obj <> endobj 374 0 obj <> endobj 378 0 obj <> endobj 379 0 obj <> endobj 383 0 obj <> endobj 384 0 obj <> endobj 388 0 obj <> endobj 389 0 obj <> endobj 393 0 obj <> endobj 394 0 obj <> endobj 398 0 obj <> endobj 399 0 obj <> endobj 403 0 obj <> endobj 404 0 obj <> endobj 408 0 obj <> endobj 409 0 obj <> endobj 413 0 obj <> endobj 414 0 obj <> endobj 418 0 obj <> endobj 419 0 obj <> endobj 438 0 obj <>stream x]1n0 EwB7l9v\%Cd ]Η4(K9Mˣ6#_T5> k\է72iqٹ=m(,߳\|ru1RiWjmbֶ!`mߓhK@QuDjGHui HsT==zܩDq@@U\&`A\EeHUὲj@$4+M R8Dω~//YP. endstream endobj 270 0 obj <> endobj 169 0 obj <> endobj 439 0 obj <> endobj 440 0 obj <>stream x]n0 yASZ!!_؅æ x4qPQ(li/vw8K^lpŦ\bhD\L6> endobj 441 0 obj <> endobj 226 0 obj <> endobj 12 0 obj <> endobj 442 0 obj <> endobj 219 0 obj <> endobj 10 0 obj <> endobj 443 0 obj <> endobj 202 0 obj <> endobj 8 0 obj <> endobj 444 0 obj <> endobj 200 0 obj <> endobj 445 0 obj <> endobj 183 0 obj <> endobj 446 0 obj <> endobj 38 0 obj <> endobj 447 0 obj <> endobj 181 0 obj <> endobj 448 0 obj <> endobj 449 0 obj <>stream x]n0D{} 6N"$2TX'"QE!DK<˭=Zo^?{Xߖ?Zއ9=?:kx1^;T?}XjB~uo+qbᅫP?vX~<k>ԏ8C]g ;T!K!+&aBMݙO <,Yd> endobj 450 0 obj <> endobj 179 0 obj <> endobj 451 0 obj <>stream x]1 EwN VKVU c""d Nҡ÷lWᅉtů8YDõUU`pS jy: j^`TnB׵썑 a4fI錐$el $el+K#5Ft%u{5P\<}[V endstream endobj 177 0 obj <> endobj 452 0 obj <> endobj 453 0 obj <>stream x]=n0 wB7#*.钡EL"3}$%O+9})ǚrrQnKVӭ[-_ߋ8q&GTMitI._8yϧqBW};y]M_1p]cۆMޣA$EaP{> endobj 454 0 obj <> endobj 167 0 obj <> endobj 455 0 obj <>stream x]1n0 EwB70N\%Cd _i^]Q%sܖ{ F̩jZ7a}pOo>dq qw{l5PX&etH4+561Gk0 w2PmMw T{ӎd:0t@50PmL d4{8jd͸U`z0 UFD4 Cu@"ϧ[83UHZ"}W^M9h endstream endobj 259 0 obj <> endobj 168 0 obj <> endobj 456 0 obj <> endobj 457 0 obj <>stream x];n0D{7cq HrZ,L\)R<[pXί7SfBLKz/̗5KpU}zs'+ן0ٗ-;ť WGkUQ3sx kD[jG@tP X;BֶVu$ Љ vTIGՙ N F b7tvht-Gk~jN]k1w5_ endstream endobj 261 0 obj <> endobj 80 0 obj <> endobj 458 0 obj <> endobj 271 0 obj <> endobj 420 0 obj <>stream xYy|յ?Y25d&3Lfɾ!$l( $H(bƅF TEJ0Cab b_m+.T[Z?עm̼ (ؾ~7]~{ιs@00皹%x"~qWdَohqO'Y+u%e+z&˚%(lG{{| +;".`;dutuߕ,а$ֻ#>>וVvG/qs -7@}gwl^IǺT'LcUp'ax؍|nm3E0 IX,x`)<@t[7_n,ȷ *p B(uć ^؇x1~DeŖSY0B] /s&6=N <@bb c -C ar &*0PPn0nD,L\vM'EdC#'[[4Σ.{Y2ow#6^"~4~2X7*X{ހ aPH%!7a#Od'9L |BΓZjn+L'Qn'w 6~$L 4Њ] ;j#p~H'P @AJlLtL/'%ECP5P+uҹv>cҙfs\d9Zzv: ekm垈[ظ M#m]9=Q;l[S)O͟+%ۚ&J%nVD*&݊ GQ9MC: e&vM@NVM&.c4 `e(ɔ)\I,[9J1۠“Y/%juz*<{a` !{ > (.ڃ67QQ?y`DeCBlt$]$TJ_FU7P%eL^$ĔO,/-a-f*efƌ=P_§cL.ɟy/Fbѓ|bc&)ӟ}M% *P)"j&] 6;UBH4jULzr鵮\Z]yVzL3J6^-\*T UUN;1%=9#5Paђ #2%H%"eD*eEBХGEJҜX>H*BP BJK0l0+hofP^ MDRGIW[\2<Ţ}G+%nYmi3 9y4_޻ؑ0o-N^,\FVyEsg}Ɇmc[ Nx/򼉜04ϰ>{/NGS9EgLeld sʜLO|_uS^II#]$rRV4WV4Ԯ4O|y] @+9YBnޖv5Y@o)椥C+0NxYKX7zެgy/3=ˏ{'.UƢ!aDGv OnoۛZI )'ڎfAHBOWsm}"ީM>#&8c9eow?xDsy}szś+%516Tr"*Y7;[\ŐTs(-7ܺC(Sun]2mKҲ5k$4J}!٨4݌ #yJ^VN38W'z05?S&2,T27E `7$<$F,GF$! fSE+;=i[8K_0[xIʼЁUc۔PM] ; ߫Pm,r`/P.IYn,ZzYݾny&!g Lత[6K re\U$BAS Bۖf XB (ֹ)(^ c!Ťc!=rrCJ )Q%)1g|Lҹ%Pᅅ0y'\FK7$I͔@ԥD>Xd9/yHn)B21)Ow5M5ݽd0ׇ|4Oپ3ls@9 npF̜oѦKUxYw%k` BMZv/x9mo@s(S8[rA 9A(,,bs/n*C 3k8=xGP@WxWcQ!2RB^]FY2<<ǪETQ!2l>h̞ D֪fܙ>md/ǑJ U~@*j*_ G \s;r> endobj 421 0 obj <>stream x5_LSW@+2;n;&pf 4B[Z s{[Z_o{)Y2sn1٢>n>,ezqn999|s~$Q_G$)?s̾;6R^'~ReYD@l+ N4u<[\]P6({X/gZE,HKtDQ?e :FK4BHEg ϲI:6ƅӠBLJchߙ;8:%ͧq6N `$B&+"UwI\J%7Lo6N*œE앆h c>!r<}-XRj^(KD1L \J^(~Q8޸&,Ι杠0팗w-YaI,dl_'} uqo|: 9ONcWEmT5?~c%E8#%мhzE|׿ Kqiл:NQ҄;op,i!Y5d&hA!̫[#[qɳK;3 vpμ,[2V+'E ZHOa_=,B R E -%Y&Y磝qsۏ#ǏQˢ\6(A7v\"C5~'k)nRMR?Nk38n˘@@JAsd͜?`j1`~r\v.82oV7 ]+)>O^ڥMOT z2LXv~(QM7]<mjlU!_(2)Zbo K$p1 Lh&X, J)yފbDVmv)N5^̴7DEЋ;ͨ?K b(m+MTS[C xFf{ endstream endobj 13 0 obj <> endobj 422 0 obj <>stream xyt[U#,47ɤBIHͩv'r-[-MV۲lqqO2!0Cz9嵼t)}>Ǐ11_ݡ!At\pK%A~g>O/8*bz`?$aW={S .Zľeg36WXΘabb1111c-%F8ceAz+ W [[KK1v01fp`H8`LgL`1#18cCHeıOrԄTL `2~ ']Y Cj̥N[$aI( @ rTB}Zv[_++ Q&3d&a hiUbb!f ,4€}DPhTLz τUb`#@q刡À7"cT29 S:O9t5dXGuQ PmAlE;&Fbr,EdIt]f.*B]qsI/,NC x9!j #@6Exµ2yat& .P:=$m'+JH[*l;w G[ҫ2*Sg3'_ɓY3|Hvģ\,Q'IBzlgJB*CI3a62ney'v0T` &{*8"tq5DXe-xpvt 5F ]$賟Ts4լfVh&v  U;jYZ>t:0Ǩ⿑\ZPA(fj2xr-W7]#\ "AySVc9ax =ׇ3=5(y9Ü(w5 Z)YXHqjMUk\| rlF {~Rc$L'-#z#pyK c&{" ",32PiWW|q;+v5ڛA,B#:|gG!'A:ZWhwQƳzXflĠjq'@\!#d r"4:T:!^ɮ\~BZd6wQR^W$tRO' X~i>̶[F42I,=ESKU:! ڐfS8=H 9PtEB^=@ FZA eT'G/s7^,tj}tceg2z Z <O٩:R3Y"SQj1wIp|^Y~4nZ;Hhpe7 f@X}j~(8Vh[F?/ߞ]ŏIX, +8V%TQŋZMN3ւ'.?gUED@mkO d,O i;+(2&cJ&` 9UP@Oo`ΪNnogWi4e c'f \=u-,Z-^jFȳw9_vhRvJRby5ˇ@5(e}7࿉r.(ysS-.@N{Hثʦ H8&)ú2W z"sI5 ʢ~y8&a~X[Q>u} [q>J D 夎,t˴Št56 ކq熒q퀨>\U*C-8UC(o\_݄y&@nUU _ߋ M2H`,#Rĺ-*s;=jSR׃¤7+3*o (ȝ 69Īq 7gl!C#→Ouh:,8Tu]sT0?,=9u\5D5kI^U֚k{[Z76q;ʷ]V%G:\UJChj6pNmҍP\_Ho␅hcp*>ݦ)$Ez"kÉİLTST MN@&.㔍1R[v2 H5/֒ڎ@-n82^`z@$׷})M_T>;W_}N5va0X'lo8!O)Hړ\{=iw!Ýʲ[۳ż)X'h~Ȑ1 !#B$I,Ac+~T'ז+AT/gYм]ޅ5jJJoCk6)pVr8[ypUO(~̭ ôU(3=Bn8B@m2' TGPr4'^μ^EVr6@Ԃq9y4Anh9/ZX&+sgJ,@A!,3$s6HE ySo niV%RD,qUj9@SJ֑n*ANңm-UvPciq3#t#J$դ, Sf 6Z,%^öR;ܓ}-]W,ӳr{"R-DXԒB{Fg*& Sg@%r)\#lӻɪBp]@ HJ='HE@އb@/u!kDޣ6ipKJw#Ͻ#i0!a070X uKlD(H52}~d;u?GPszs#OQ$tt `RӁ-q$ {Ѝ3n}ʥp }s`ҔĈPO8$}2d?Fsh%J]ρSs܁s\ 37qќ!0,o^7cׇؒ2/V} '^:W}'l/z?;lZm)ZBHA"U '}9#>%vps1\"AN;W֊wHęTT-Ri2{,՞T:4ޑ *ZIAʮq霠l<b[rm[O4!Sa/XK&*r#[ڍ *-0bɛ1WPfXn)3TȗiE2xAЁD3l;AOhfSqo> [Z/5^@E8};ې>sUm:P߇pSE,Ȝb=i:Cٜrrcy R@iUWy$avֽ}1oojjN/hnh(cU;<Iޅ/ $Td,tC{w5s*+}׻s1aݬHcoPR҇23p<3UjO46psn@G_b"VѾLQ,lsV|2`VA!OF9y Ch@HGFX`| ߿X]U +䫓yat*A ;(w -u&&qrdB"[4o%w^ b5Al/>˸}Khc*7 VcIcGq翱~&/Sth[4`\12m,'ARBԳb [Z/ 6ug{NíC~A!iE?n180MYPhyB@hI*Z`7WރgLnʞg8iiI"WnMSMcgM;lh0뮿792v=\kJ}Q1Ya`XdA}"؍_y>~qS$oE{ +BE' N  TAwzTn.MW^t w+G7Oy=]qhJ. 4@mX'@E玩qE,:YJ05m 3}t:&hF!Tj zlH"RK.1Ys*kJX4 %c(ia:̣F,4)i4Q=?z:HWJ N捵Ȫ֔5 4(qP? -*w̓qR`N[뾈,2lޤ?BiG7v}]]a"4^}o e NJIo0RJHPNxIƷ Y[&,9W }=![͵ϩe wd0XyP"I474I * (!2xM-KQ5AoA,ڻw-8P ~ᰠPx;sh3:5} :o-iWG(w"CƓBRRTpVߧ|9NWmh"}"$@%B1ԡG灝 aV!BrGӾNj1ebW:sm7&X87Pf^nd4kˢHলGgH_!9QqUm 9wH&uAb&͊r7]?k7L:? 0Y+g.n٩LR bq< OHƑ|H} ?Sq>@=E6i7<Ƽp %FXm]P ᄱg\1m/"JvW2$)(@-R>eܥo$>MPc18;^ΔZ2 HIi ]ViTcwpPJ-/!c|xNWb=OT./<<=|c{O,HHUKpwͫ(Gu {VlزI)-R愊%KZ@?{0txgl /]=Wh;˺Uk}|BA-٪h2YK,QjRǃyMo;ҍ*$`io,t$^vJkfO{w8&2Hv/nBAg9T6ooR(:CObVlQERjJط=ztN@Scƶx=.Ry:Dv0,JJj4,OUeh+E#XUxsɱ=?'`H>i *oCpߛ4o3dbW/X2#h3Ggq{eؙέR^kl,:\xB9ﭻx?|v 3ǬF0)2[%]YJP߅l#̀$2LTH"?gō#NءE7Kǥe(7|v50>lO͐ Rb0l=§Q@0e}n ܞ~+th͠gu=p.ŗ=w^ŏ%82@ޱ׸3gbv߷&7oTso7#3h28LyΠ+/7  g=4o[[|ݷY{DqGoa Zs͜+Gw-g՛b} g޻x`i|ya;g.O:Xt&XM{c,:Ic{~`%RYV 2twc\Ƒޛ\^0s 4j<~8)4^Z)❅fk}SMS e7ϩRi6@Fx%n`#8\v:cP/K Gbj85W7@Wjo1ǀ?(@ tW˳r#6ћ _#R Rj= _X~߀y0&;rDXl-zۍu*Iw{Zǚi-n՛j/,)abD>?= `+^x;&Ei-{i< =;u1/Ṩ۔|._;¸;]s΍禪4j-m.!2PBYICԉ dAOʬ328@w3k\@\h_m8+,^IOR pWod2^u\ͲvM||@}/1!CIĠ)R7yXߙi<x endstream endobj 220 0 obj <> endobj 423 0 obj <>stream xcd`ab`dddu 21T~H3a!cO=nn }=P19(3=DA#YS\GR17(391O7$#57QOL-Tа())///K-/JQ(,PJ-N-*KMQp+QKMULD8)槤1000201012iӽǴ3~?OѕYr.޿˻9->wt ǟExCOC{yG+vO[%Wy&.d4};n9.> endobj 424 0 obj <>stream xZ XW$E22#R\jERXQA-C's&!vŽ (*o.vv?]O33gy{; 6~fcc;8::8L[\t\u8T[5W[H*À-0Ņti~rdkOFgSosh9>;=.^(Hl2IRrCJ\/)il|6T!R]7^yd4V-NJXtD(Ih\lmۣvD-\%K-=ܼ=ϙ/xæc[76ۊ=`.6l`\{ۉa0w9l71Ol{{sc "l1慽m–`S=9bFc1'l6[`՘===9`xǺm6a\˶_*7Erq}² dž^;5iͤ]'W=1Od|O},q@lsMJRx2 Ă=JZ^fH_ңE@^DZ6PAg9"h$OhvoV>iWٱ'55TvNrasOv!13-CU77:;E{e.1ӓc)l >k-\[x|.K"79۟|˃s gI8ۏTY??csεy32||j9wG`lkPڮSΆłBm}L-f^psE|NŜ37/Vv7ZAG@+}9Y̧Ռ \|ZCuljMj'\~Y}}FX:.Byc!&o#[f\Uy+ f}כ2Iy-84Xvgc }v m=sbE( <4 \llW!lndNM,# cU>ɩ,Bz;ϧTR~@Gvqn; !Zo(+߆/W oN %n!sNz79`Fw)p(URJ8TOcb팛6 NxE #*3;v#D>qt#e0asa-C9ѱEۜB@SCMW6ua/VJvQ}B ĀoPa6=0녑(SuZZ?6cWA(ceTRG?B+%:  KtwlVT )e:eKCS߈Pjdxn6Y"8tlxg)+)eVh0 i WFP$P"C'D4sn?CVd;B}WoujK|Z8혵;-)G5*Al ^A禝M; hpʏz^8/ 3WTTl36G\NH ~hrAQiAHs$3\rdN!S~Sa1tuWAt\4kj sKDLjOU7l>E] Rsr4Ȩrq984 gQi;~lH E\ȵGYGs$慇3's'B ;ZܣD=ڰ}PE1^X'ECTp"L s>_,Rр)4(ҪpN@eīpbKMII4yEY0b2`+&Npd",XtjexF 8!MOAn2 yWkXA5sw12(]/b3m ^@)~kIuL%7 KJ[_PIq aE`]m)6vdZ,-QiM-o9 xnuG}- 2IRʜh--Ro\]^ssPV  '\߇_k>.+G} e%=,3Oa\^ecj5R,NRT:Amfޱd|˒g\p# Gɼ\y>'5 Lqog|5[ |(~Qxd Zjt5w^phQL>^<8=sX;g!W=oS0~*QYIXVt@[ŔWIi::;i[O\bNjQQyٍ~ >uNPRg|D޺1NV3xfNh㧛k'Q. ujs]$̜4#[mwwޑϏ,'S %yzC觊)QMqoJgH[w?JNQdZDkV,IٴH W gl+0"!?=^>,v Gs ?\),n8] h?-Pz[A(g`əJ o' `XJ]S9p4f3^յl$Bn\\"tD!+`<I.9wy Oxp^=sR.q26"C󇃃{O]w DE)ӢYW_[Za56zP]Ю.stI9Ҩ6QJ%(SՑBt E`{aL2v.\X| y|?ųR֚rЄf|ȸAȄ?~#&8ӑx^F.ެeiJ_Iw9gB-l5H[yrxI'Pǟ[kV +B`Hpf99Ԙ$=_qZvåHhHlm' CҘd@#ِk>tYEdSs]jUlTN~~ 1N퐂p◐$Eքe*Sܹ)C^q׎ÓM(/ù!侠2_i=LXF_!}{FW\]^jr..Ÿ©NY:nsᤅ@- Qy!}+8-ΜprJYd_^33+,^i+[H| n^_9CIUk4_ڸ} G UmX=jYg[5S']0+0Pܾ1g2"k> (1 Z?J汉 GPjT dDrӹ_(cqFji`*7U"Gۤ1sa?HCr4CE^uQ{\t[)ʋF}FNh`_5лO! >4̔~ÏJ" TΆ6y^1@ue.,ބPr@4(,fP!p h5uZsvi G%laU9d)h)B˗%--AUu#κ C> 4ar5깼 o 'fw\ɾ9\ENҘ;cm}Ε4+`…)'SC[SJ 8qZ߁?^s~ȭhnJt0bu´J -%u@W^jvyECsR}f}X H2jvv##l5*zYOojfp>u뺈 '*/8xnƃ{ʇNu,),s*)k8`,k˪pl '9nԗ,KL54* EF{`.ݪ l ψ@ (\Y:G'v,Yw΃"cuKK1 ڮy̯bԦ-*M)7욵z7g8 XfYO3 o>p:mywwXfgG.(F`b)n=w@<}kp4`l8YYR׮m=3339,ul=ڂ ~$ߋl>ݛ}}pf#b98|=a8=pՁA21 @m*u}Npk$䥮Lϝ-E:mc={AP\d@ܬ_/z{I}sk蜳=gp>\`ՙdL?/ u;T^-?NZ$as:xus;kEp\{hSy5Of5RmVr:vg.8}gۤ\;IŊ7U~֛ #nxdu;zr7bE9 \ŭe"lU%FsECZ%ԀEuBq խawX*v;HyW2;f@&&PKj \u  ȥ=Cjk=iyBia9[ :7XcWD4x-$ẢRpF-N!l5qR~WceJgǽ#ITb6P!7>yU5#3#ٽUYԽwl3@}^yVi V( k@aa^^f:໥KƱĘ}a^g0k)PCF1q Q0J"g8ax,Tj #4< V m2)j` oƨfg{L֬ tB;4-CxGI_{g=).= ]&~~#8v7"&eTɑs#WoQvSe6 Np\qKOUkCE_\`zzԱ:J.[D 6-]uE"[LΗԽNVT#ܳI/եUv/up|v*2(lS&H,N+Ke|Ь! oV/8,$zJmBDA[k-B^Zknp .߻= +e=7͇uk֬Z̭ S-#,[-¡džvvq$ _E[ endstream endobj 203 0 obj <> endobj 425 0 obj <>stream xcCMR6$-  WTQmqCopyright (c) 1997, 2009 American Mathematical Society (), with Reserved Font Name CMR6.CMR6Computer Modern2wOf}I|:}O˪16-X~_ȱЋ=:D\BEKmlGvCoa  7 ޜ (% endstream endobj 9 0 obj <> endobj 426 0 obj <>stream xz \ &3J-q$KRVkuGQEeqa C%9IHaT6W\nmmJUko j{ޯ~19>+++[;7{{95,ē;bc޲FLsi F/?;M˂7F?FcUajxl̙VEKE-Z$ =KBEShqpXx8T:[M,I"߀`hͮN׈"Z#Gz6GyxštoX(wXO4 ,T2[\"I8[>% GH$(@" }D0Q@wp;=*G! jsD* W9>]S+ EaOwϤT+e|$qx^r*8!$o>$T=r@*?ḃ.Dxfˀ\R# `o`)% ̆oѐB[tcTԇ'1&ۻPoS34Y W!\@ԉ-+gޙ#CFg; Q =r\U@0@R'~`c2 [2j O*gCEAS?DP%Kb4!\ѩR6m1(mAE (Ȁ(EĐ`)<7 [3:@ꐒaRz@}{H-vՎP@"" ![Y`` iQI|)ɛZ.y _E֥(o`\*l:s ׊{2i>tl]D996ۑ9 y7Gg4"TG  }Mno@x mC1M&5ldh4ed4M7~3-a\ Õh<"wp8xW G-<OkYBuc;(XM 4K2v+MYsE} G1Ъ:v6n Mͧ^M{H-&˃]|6LF h, : ecq9auЪwk˻ܸ1f)h, ]3B/>• Hvk+|AZ]i"@ )0WlGe.bh|;=426`xj|硽kLuF_l_ ZQ~ q! uy%umeGYIhm*>a旎xі7-3:O*8\NF_uUҗjqxhcc18trOZ 3EwR-sJO@أHqd>fvU8xG=M/L47MOpQTL1%Zg5j{^,vjhZpIgۏ֗';ssȔ8 A ёۜz@_FCVQ"M鏍AԡlFf?{%)цY,lmjwI4pTy?(hwG X0:0E&Ei1zPj&էUچ~9^xYjf ~tqs әs+p.r~Ik"V&8\(3eHOJ ZHxԜnJ!H>Άmu xk:sGE pׁja}>:zp= xG>aGgY^Pm_iq|4>rN'/ ͋h4ZRx)͈"ʕ$cD+_.eA|nN&;\\c;ZVrx)ITC:+cȈ]~Loԓdu!/H2+.êo -7L[,#KsRB~$ؠ O_]oomhM=@MNy)H-=-."a6XZ\j;PkDh\X1zϤdp.is@ $YUN Tf{Ƨ*Pv!MvU)T* #4=& +]l~:aO%qz CGO]3J*Yf)UV+S %Ơ>b e/d\ɲ2 eߗVkKCq ;f u$ @R9CkLhХm`[&꽾{iw싲"CycjoLWS+@G~w'&TcOG)tϰ-ɒ19sfLMo: A9]e| I:eL쁋ohuweayR ar99bx-jzZkCv 9&@6Pr<; h8yQFs6eMVӧ̸^,lp mJ7+OSp *8.T| ˿@}$^<7IȥVܾr :Jk @TJT~~P@wvEAMC= J+sBkj߭)WŢYh Gp @). E =id| @'! O IG2"8Ce{I:sȫ"j1j{řD+MGvYoHHOՖGW9aO%X؎[-f,)Ӯmt1D~U8Ñ>:x@%BF|֕NAksJvJvWavm;aP7;,z|7 v%yI`y׮iK6S3e8Jz{y()-7x{ΐg?A܏v@;% ^dgUz񲎺Φvpitҵm:=sƍ rH׽dD!Κa ؐy0X:!#6#@m@t1_ 3R>~9铙c^yCmSw3-XuH8h"CR|! MA .{cyy 4sy~E/X}ܧOt;.jcar_9L_pe lcƦ`l6Գ9+ *]2aFrL/&hr3|}szQYRmyjk9Ϯ%H+kNSV@x% \4RdXĆq%a +_Ck;z66o5"2w3_ӼКښ:s9 KVUB5fō{q9 ;paspWCٍiMt~(-[7iG:2.s8,/2neTƞSe1 eMa5B&4k7ghKKuhKԆm ,&\[A9x{$*60Y*M&hbt^7)/thxxXhm.&21]&[UI4XhmZ Q䯵C{E1[v+M5`}ÛV5cK6 m|{.@@=͞π@q*I - j@M_KJA ҿ[kJ_'xf>*9g!pj32ԋ>'ф5Nm$tqYĺt~vkяjygWwwZSC4G7Ǿ/9(ol,re Zf`H1;rQ^"p75݂+EyE&m=D(Tdb[$qc UA*HTERF"Z?GL*[cRS6$_Px{ڹQ@ pb5lzFϞh۵^O^mlӧ¾LmssC qK]u_&Kp\98Jg"<%RNgi :Po0-|}1B4RDENP?G0QgfA7zCh.V2.|U0 @1V3X [E:TuV[ʸ;0PU֍D?4MkAVX5(b:fsCL&-k>| .l59Kt@gܷ7۫"T*cO7[Γ;JnsЯ:Tjpt\}יqAkĺ;5+޿~Rܔ ™>AFrӦ/3Xޛ5#+$)W\5wt:p:'@qaZvՊykcNC:OG,P 4S_Geٙ|F^,9(f*2ݵzpv "uŵ&IkIR½sb ՝Dpo!wkj aL } ~ɩ]%NaNտ} ; MV;||ڷdfW̮!ޢТ[oC}+I/&<6;Ur=AˣN&Zݟq-ey%Š0T[\.lt@qZ>GAW}V7Lvn RP%dN\ Ke{QFvf l[ ~eֵOcK![wg hԊnu)W|~7B-tvp?މ{@;7#ԥkΓK ~h2݁%Z?ukO剮b(ч">3s*F,'J>=+r+H#e* b+K"Cٱ.מr6}H ~_@ϊlWLp_Y15A!緾z(l 1 ?oøHAԶ߹H9m4sGÊN:m7[ǴLH8HY75RVhm3 WÝ}t<.cGbu[f;5/ TBse7LV>>v5܋KYg{qݽW=|BJmܵ}m.;Bzi>qR@Zˁ  p'Wn5@,(>1.%* G FWt?ƌ5_l`'xtdI /gC'&@2HɖQe5;%ہ:[P\XʔKR3+O3bYkYNo]!^~ߚo-߬e?gx%gXh^DOwџ~ .ܮ^#V@T=g^F?f;GQ}b]SC'!0ސ],~urҏ߯^dZg\> RF]vF{֪2xb "v<UkTWuPudȓ'uZ&m[x-SR}]pĶHqO7\up|m"!޲\u(9qV' vęB%g6\)>āwEWr5߂8nD>IC+\fC \67xMxnP/+zi[i $-{7v k4ױ>S2 1 %L\2孻8F.o?Ƭ+5#ppo_sFBUhC<+MݻSFGou.ri4 _:pEk,G Qd-0bnZ]&+$ai+>=B sWBL1&?w8g?04+CZ,~XmQ3KLZ7a[_9@_b1 =a?t͙ T@? De"& ]fF.Ojh^#H3%EmHk`:זjAOnצc;2I.`U ,b~ҧO"6ʐhwvVVٻ({!lwUhCf_O/(H~t VŸ9ģOV_;'],\8h&dYkⶼ?0rdq(_^ endstream endobj 201 0 obj <> endobj 427 0 obj <>stream xOQZGlsc< j4& Lh)bNV,H1.lL\VV¸[992EQ1reϝ')rDζ1yX[jq1}(2S7>LCC=rXrz)Y)Ga0վP(>~Fwy7g,?N[>Gco.!deRe 2I5AS:^j+Crڲȉa4r*|UH@ٜaڹaȤ@l Kxf#%j 3Ʃ  xQa=?e34xsH'I#m_YK+EBU]ۚM{"MۗrY 4:/i(b u\EMK@EZ"_JgsT( 4ʘFC*)ؒ˨P*vꢮnOWOl/ 畼ƢJ"dl8poGjB鶙*E_oZwW6Uz endstream endobj 184 0 obj <> endobj 428 0 obj <>stream xUOMLAhC(T.XClLH$1'2n]ۂ-M- Eت5i%p ^ɋ&^<.~=]B%'Tg> endobj 429 0 obj <>stream xYXWQY {# ("vmU4)Җ],*vXn$11M9sȽ"?>egN} |Q!1 G-G$܀.@LqƊ85-Ֆ~xØܣ^[lop%˝F9'&6%N6A6nڴi<^- HGFɣFrYZ,\)YrlS- &D*Be x,<&N S$(bGeXy/'cXy\">)eq 0YBL߇D'bbx['ć)bdD_wv'+,& MKVD 9!rY">628SDHWDGO)GDžEMV1'hbZcy"!^>ݫ2BO^AܢW̎}8xτy6/HY#_xߺ#E-0q)S >t玕3;:12`/Mi|:A &ӈ!/1XL K?‘XJ ' 'bL,'F+Hb%1E"܉jb.1 8b1OL bLxSb*ч ;/aO8+?!"Hb !%("`D7bѝp!L†I77% H,'YtuŻ=K?*?ҬZ|`q6EQQѮݺ~L7nuobAqff֞={۷-[/ BAs>wt ?vAvl?>Cvwo3`̀<4@bf$A6Ms A=<{ký,tda3kB_r"StcZ]yU< 'J(BAaky%"FW砣jXe͢d/XƻngJ$ռF#zH&?׼i ,#zf%iضk`C߸=2DQ(fi, xD$/^@dY 4X@~~,Afh @c`yG}Lzvc^7+yqzE8Ԕ{04Efdfsjr !kڊlm*<R2vivVPUP*.,TNWSkS [u`X3z %TԢ]Q@ˏzL0kyS-;-a:Y7Q=|G G]\T~\uv|i<{_ٶD t YX΂ (DI, ]ڬEKZhAX=%f@~=}嚭+ΜٸzZPmil(`I`<ԊVv| 'X3ux͝}^nOq7mu=ϳý2J*MebMQF~W~@=8n., {DFumWv\*4?#9G?vR/(yLymc/7G¥g8ʾZnfPaՀE$h&L\<14lj$@dA;Q߳rNAB!R s0Iq AN"rθzg򐙌,d sZz͈ 7o^sO\Q<߶ްȷȆu~WS+Ӈ8MHwy䩐*d_nm`AvB{֬6Ia"$tlEBW+{ß#$M/Dg3d as}ͭF ~#F"{1͛uMg]͵+u٪L-.Z`i2L5k,6`O4"m[@O'A+I"v$hԊVOY̍KԒ8.2'8D\395btEpޚs#kѧ 3S\ dE3{q-JӉ7ba Q,|ی!qt'GL1z;X;(X'Ah %%H4 \\P^j+jW f3Ξ5p5 g8@v _AMܼ\~v7@q'7-x[=jV9kq&Z4E}44 1 'vhnu "P4p$#"GA1AkOt8~}7X^|<}C;'dQ[Wg0<`lgD/Fk6ꖰIoMH1Ho槄Y>1gD_;::{:ֲ=||FB^"s{{/b>m>$O5M X Mp#P+g ,!ο<'8gE{Tptq(44>c/dt8(£57S;KQBLo/Z0cSgV\35L6AE7M<)@DRZ}CTOl1xM<70*: `ya ̞w3enQקfWjvfZ4dôTqw=p42hgVzjpWkoWO={|5pQCG y{in1j8V :Ƕ,i2Q ɶ54w4_ S?p|'I p%+:!Hhv[ېrq_̴ ޼!;9vk{ô~%F:4!7ww~R[Rg5Yn"AᅦǺdl1Zi4^Y6sxqE9h:c>.|L"DQ>|DׂI#sZCÙ`C@+$WK)M gٙ%K.ܷ޵a #0?zҞzF&SC% &\-P`XM5FB17Z ]'ºo8Y@%mȂ &U?.VZ5tӋSI֖&c}uO"pe"[WEISp_B/x+8&,8IN+\ yl3gy3_èJՀV*3*3R1'2N ~z{͕} >, k BkLPY^+`@+)t Ekİ&aX߮'|BlK'n[]Z`&* e#[%qZjFUnR Kʬڴ/9tڗXma_+V0fg]>r!isa.]h, tzWjyr0|}6WSX (Vg0FTyy,cE9ϑ ѷx7Xwi!2vs݋]&-X&_x q,k MlA2 R)Unuel-|ь4]w7{W1uϾRي9pyœ7þ u5+~^>]g-:{ {t%/1J뗞}7!nYr ']\ (T/VAJIviNi^Q.vL2+y77<L6Ɉhʫ{?cstY)G&jOx3Sv2%JPX[.f-c?ѯOVø!8duNajJKnoˋ ,{L*H'P(GӠ2mmt3L"7œB8TrmQm5LnancI9:n@< ):,*&<|O!FOڳ]aL6p jwo1Л[ ":),zc7߂ O^ ym;vg_ϼQ-~SzNJZyG js;ʹK>\[K]p"xcܠSkH*XAIn$Uw‰;W/FFדe~E,Yoc XPX3B׃P<-0.ܼރ[< _  X#?t ōNuW01,,xfP ])-:JSbZ'ml2r LmqiQq)/}HsЦ҂P4%e%%EPʔ•ѕjJ_2-T K,8#fh?/x.*VQ]-3pU<_h5>RV[#߲ޘ@RvtlFZ r,]T#ݽWWV*!.pQlu;jk4e@Kuټ-mܒ||uA}A>)4j].h=y_yEE%2l2Mqi'Zr^XlRƺ.a+pCFml=ΰzASX\Px^ށ6j/,j ]^Yʬ\.Ǒo]eCڷWESfg1I)!A`%X[{(xEpWO+]#VPEu#VYZtţMi>΋FGTEymZBJ5uiR^+} !+m&_  :4,r#Kʀ\WJC2Oxx/C?xތ[ ggvêh8OP}r x<$ʄvlLFJHUzi/H܇w>5IeQ 21\Ji!,8@sUR͟ TXllU}:y@a(GͭՕU%Y\\fAr"h -Az=[GИU',ƋϨD"91196nFl.v+1*Bݴ4(W <>."?0GGV]]?~рuڊ*Uirt}INHAڲް_Ot`\\Uu"U"Fl"Zo&`axH1}dSܠ4`B< 1%']; .S&=uY.Mψ)Rʪ4+_ G|<cFa\fNM-Kpү`܋ٛLEBU.H2u9m,-p~"m9 K+(ֹԼt2'3> endobj 430 0 obj <>stream x}Luna62wKe1c `V HNڎJ9hG_ii(6y˜am4!2FCEcwm<7ϓ|}2 _y 2q$]֔^J9(c= [P&tj3qsyocqX zY*)-}uoRD[ :3UcIFSoYu=Z^+.lE:Sscm(۵X=un3یL4WV9C[ͧiVa`<İCaLغ\&L<]ѯ;2TVq$ȤCqGZ>= N"hRMES!C`V3}ŕ@0!w}yd6]vR.uy\/4omHI )M?@ؓRF9i89()ohO::C#B%#GX7ߴ~6OPoW ٷ˥m'po$UȠډ/NdX˭rQ}I pB+/m:y!A@!}N`[j&6-yģ-n8Pn.k,*vܺDo.;t&B~d#Ʉ4 CÃV8 ~МI%/SUޛ1Z[ UvBB/O2 dUY]*aoK抔3) u mC?H({LjIx1N0w)2OrTT{iɮ endstream endobj 37 0 obj <> endobj 431 0 obj <>stream xX xSeN bAHνEԂEitɾ{6Mt(;B, 8Ìl:gtg>iww=;J\jżoهǰ?rҎ8Fc'܋:&ɼ|~a8pbDQ%M:3aނg%<5wӋSS VHSREŒEYRd92ɔ' 3Ϝ ˖f%K/I/.KOKx@:%?=a4'GK,̗JӋVfeIRvKS)钒‚y<o΋_*ܒ(Yrtyi,E[:}MFRf y-~nL-M%M={7o /72o.4Y:nH Qet(uyZNZe &[ jk=EwщӜi=% N; xdZ˂à!7Ѳ* ~o~M#!Ӏrhj8s ޠ!8 ڂntp7űPG>`3" #jdCΦ'Έ3hm:zYI.J+@Ml:tMw{@k4Wro\mh4z"T݉}h oKu4MdiUI`"4^z&$Cw)shr7t C(UsD6m#gnx~nE@12cPy!0Ԕz(429RuʹX9Z hd;Ez;9j3e 55jf1i&EkrB; qehe\~k@G-g̶ϗw.ָq`<u^%9td@-s$z%e< Ji!  E4nNy !А=& >뺿\V\&BYj (di⬢stYmAD›I;~fGq빛*004nQ{ uģ׽kSOp8V> 7Ϋ65^9"ޞW)t2=IsE|3cnK9pB(_3a?g?dhhՅrU2sκ:敗 po|=b#}"36m¾o}q՛FƉ=-; &ssg\~AʝҕsUEn $ts8buP}Vgo7Jwb]Ƣ`,߹awE![Nw #u$1J8}L>B'E.^YhC QᓷD:j'fm⋯{ ǒY=VM@ui/UY);,cj)uW&}i#i@|5DMk&Ix$NlG2 " -̣%Y,MT5zm\RTYY'V]îptw哲"gߑn_y,OxiVu5qJ.%ﱙ-gc5NMRK<SbǛ*w-[fs772ϖ[}wxRSS`;3VRI5ucQvzA2T[yo n1A5Ҝ[dOѸV7J > dQ~\zV]wd{R!NM;k476Zf\4 $G8$t^eP;\p]YZs9. DҲ5ffe߀ЁQ7=DE޾6gbRB9t(75`N+-47vxm$;ˆ}$tj5֪@\ClSfY?w8鸊z.?xT!>zr o =%>vfIDRΈ*C-Ӹn[i5~? "TfryqUB4v:zeTtKE3 a}?%d=73Q͝M^4?6Tad%Qǵ; X6$1$ ^|{B`fS4i֪"{&7~M1#4њ>e(|aRwMk/XʞRI=c*!){|"ju\EѽW&hP$-.)XT;܆&ZBDš zc' WH%"pZj6&\ht ͭ@xc6[EJQ6 jȘ?҆Oc+;UȤ,@$~IV%oo鉐M|'5^nd+}!{pyڞEk;:88qVߌryMՈ.ݳ0Y\"^IġcȐh |?0nt0Mz;uhOS;DF*OJF~h*>(:>[0!`~ϔ=Zu(RpÕB Alt<:!u Ișc[ˁЫF$-QMf  :fBMOB]>U%&6سQgS A= 4\ e)m O?߭ܘtwC J wn ->Q `rq }ǝMP26A5պ;ڴz3̿"|(;]>GMA$qAQgTnz+؀y<>ޡrRČ\sΡv Cx 4jVuzp0 =Z ;ڹ ъdD{rdŅp6+`KՋYkrr(zdۂ]SІƂƒ6,4g0;E.fɎK^OhVh6WȉqX=/"|qod-D8MNdH> endobj 432 0 obj <>stream x-_HSq?['kQDCI#>|˶jޟAGjLcZ )SX=8 ?  ! m8|8#[ z}ݞ9i3ו`{ff|TJaqӫFP HI4xHB΅-#'Ӡi!\'THLhkCC,zkH,D[_%j rX!iEhU4S{-nlBHDBQi 4Ns/f,;ϰ%H:ĥS|\][y)_kE] Ai$r>bKMY~[Cr| ]:$Z9+gI8F rsf*2WWOLbg{ 8vͲߴ*īE%/LaX+DZWCT}g;[innsv/ƇGaVgku9X~9X Uinyq{x0pkb_6:$k Pn<4,2;-=/:mo l> endstream endobj 178 0 obj <> endobj 433 0 obj <>stream xURmLSW^014MH :3*~j̲B TEԂ|P-XB揍ۀ]L6dldf[s˩dqyޓ<>KS8ibp2-qZ_5&5UV=Ϋ^$ٗrvԷt]r|uYkD!2Mؘ5]v.K.(*U8਴E_#LǓae8\EJufl vQc23_^4WfJbZ¢ESPĥdsZޥ̝jӬ(s;v'EK4ЈtM')5ULM=7ڮj‘BH*(B#j #b 8y';&!~7㔤ZCt' $.ُSxxỸd>EҸ3ИAe'؍ 8y,l)&? I/c[eӯʆXŔe=\W^8 r#r} 9Fmd? "#e|WɛLLL~V85Nuɸ.. D4SIbPE+XB'0žX >hDRlՓI\t^1xžaB \E 5w*BD>Riwr k'i9':#8m01qŸb֑ EbSyGOogVCNY`0eF7IͳVC"ߧi^u8VMEX5?e3㞳 {jœ`˹hU$)Ek`DoG?}ޥ\I4<`DR;4_{Vy ZAzPb$gn*Yt:;c6;8<9ݜ\_?QF? IZ8H< yba-/i:a=yZq!)(梛Y8{/ׇӮ- !4@аo4{׎>#T!̫۔5Wn:7$«)=bp}ͽM'V}W"0&bx S˦뗩mdV@= endstream endobj 176 0 obj <> endobj 434 0 obj <>stream xX xuO EЬ(WpYPQWK =hKϤiݤ9hӤ *t`E@Q\dFyfiuuq4ix}Wج wl6w٪U/-|~ }[8q"dq kBاӐ~**5ͮkКb(oNܼK,أ.{LXYZ\XX/, 4JonD9sV@fvjV:XL,A)5l*ȱnp;ܶF֨7JfPP@ȭ ~hvj^ 16[eL6sWIBS3&{z!D1KgD*X%vBhK85V й`74kieBUc(tXNhCt6vX@ 댭!->3>7?D?ओ(C2sZfIAWmJjf5 "~\yu6 6၁&[S۱n4&`=ʍBAMiU>a+HO`f_K#V0T~k4*֋wI13|IWah⃘D5mCo/ <%dW4 -K۪3X,:t3@,>fwC=z0F./{߷Π52h*(7Ub[ѐJc~CCMtѬfأø!]Ȃef|lh6E)'}2ERs[Ln8va2l\ua^A9m޴'蠽`wfTd<&>@pxIf;~+0=ɍ+,̳=&!)w=I%ۏ~8>ܜ{e0K/ z/ɾt }~4-7Trj>rE;% *Q+1 -ӱxB)-m04WԻ\ 3ڐg+݂dۀ/m;R_|dBjaƞ!чps<-ƍ-̠`[hjbrF;olmݗ2B؋F)-U .G@wU@_ ר.FUT-Ä87;=g/hKted"M ~I;LLѨ NdR?{b7њht>w^lođҽWd <:$\Z4D˜J\QH F8&zXn-(AeS;|-$:e~iMl@x Bٽg[{ܓ[0 ՜gd|@=j5f'0>N+6{5AdQdHi a"eBE,\q-w.O`hјX>\=tͳ~q2#*, @OꢬvGZ7LVݾ?|}ؑSd? Ezm%4}`ÞJťU5% "I<>wvЃ =ݣZlԠ]yfPl1MP@Iޚ4: 鳼=''>k'SUE6@IPpVm)xTz>*y4b`u&N;(Iq]n!Lݧz'أpH0qZ[LO3Jqw?HocqZ? ˻VXV>^VaCAm?ѿW T6hf*4yژJ('2{D.Aލ{P.f0ůAw???sl8.7>33yXvj2Yh)*`C8X,={li$!Ay'y} yE}, Hظ endstream endobj 260 0 obj <> endobj 435 0 obj <>stream x9y|յo%LfLfLf&JBBB2 -"$ B‹JYkY4Cq£>RkkE_}-o&Px{=w9ܳ;0 ),Yվ:Yg%=d:^*YCTZoY[ پ-XE IDtZsz 4s8ݮU/m$J2VyWORt^9!pr~pK=D8 تs54䘚YNãAm ^p L("y#߀e`;=YVnE|-Cw Uu8 4Vu p}n#xo~ _?,#I@4 ;vb!#9tJMp'l$ 3 ^`vp/+̢];^JSJ{:"Tͬepb?$lvnzK:\K\.c 6mq;{u6^f3+g=l;^b?ẹܓܯ/: {EXGlS'?Ƞ4@30vq x^KgXs(5 io[! %{Dj a7F7nl!FBod4I%xFoۼȴޮF:#:uLm_eB8}טqp[ڨi^ QXЭ > AG/np #M˔#V\C0H4%mj} fҴ졅-|h Iv;}am756h#¼!g7.ܖ!GȑQh NQž>Ydu~g~MyWJgĻ$RAgl^RE[ngjp$sm+4Կmq-C>/7[[D PQ ѯ }]>{QyX"e(Bj u+rdOl 6,]T[LjH.H)"٧d";>x39G&ORb)Gx9t3C:sku7($ F1+=2{;Dx?z{T=`?5%2^HLHܽ* L~=wlϟ5owe ;)E%wYz:te{gd1=һ^_XȲPgPo7ᙆ;ִ^֦ki;5Qh;VܡժժH3=No{73?2E-6Fq56MD3.ʑ}ׅA,KhS#JYQYrɷb휱fjjvVͥ%&7KS U`?@h:d2y^ehdt8. 'H0<׎JKbRocg#Ne %Bo2D& 8[ !cQEQb!#rZ-2T KKNһ՚*_?hִr4|HO}rWc$EKS\c i0-X=Tѭ6ך&K˲KrhԢ!UVfYk,Rޙ!aYӣ#amVXc&ZelH"[KKZӔ2>T(el}+;pW|}p>qkSON!^_#^%r eˌ e!o͞l8"-y9U'P=/R3 fN^` p0/~>J/Ze*F],o2vkVj٬peRmԻkYQn͋5,o6J f\Q).1wVuYܼκ\`crp'mF/&tl9qj"eSJ\4Z4Tlե%SB9Y?V*`P kQPɂLۦv kU+UN//X9 $Zrbeń/K%_dN}Y@P-*'aM w(]e:JHUGk}n_i+k&kO߹ f{ )R..?;o+{vteqCzjZϥyՆ72`xwvnlc8vJ)n.CȰftk64Y7.݄VW^.!ф('?Ҍ4Rjp "9]zJR4v1< 'Ū 6 tUQZ,WE*P.sk??dt f~~Kho{z?$dP^w M4L7,0f~RFn)M6PVd5]CoУ˙ykƕ3˭k/2Ed =jm֌ʐKWWVu;W԰;t43cg9dKiroËy 'Dl''f Yjf LHc|TFFԸMn-ȂPAy^ RhptY;E"%7+~X9zeNiz0] 8|2Xt0fIc|fy=Փ!iW? VnhŞ}JjF~f#'ڂ}ܑ3rjc=ctz @Dn:d#3hL}P$MZٙÜұԟ;0&T"`Uzͪz&J=d}6f7>1øS\3KՅQ~A1Q%0?ieݕ'd4ez- ˲})}^_Ov'/@AG/Eif\vk('U-~JiW\4wKIP;=t{V9R'h`tŎ^N3%AjxV/jT#W< YwG =I++6S[Ů75K%ː" - I d\ :]xt W&ꭋl7.bX)hU% WUM@/KwMMSz~qwLeBe{^j~ g%|.*Yp6E]˦79o~wa!]/00"iF>ӫmhКd`5XS9Ǹ/x9Z}&YA'LldP~CkiJ35~ bom Wm/g/o#}@l#n MZ^0Mlq9,9=ے$`Ђf])is:sUV߼66ӕ^_[=؊I6~bZ+L~r/ **TS!VB=:A@T@0?&ROh\ӭ+ǨW?5$ J W`1]Ce&< (o" P @lTO?:<~{j UPl˯H T\W6 endstream endobj 262 0 obj <> endobj 436 0 obj <>stream x8kxUs{ͥi&ͽI&izIoZPR.PRl Zԭ [V@eUp],6 >]]u}~7h͛{r9ss̙7@ z97+c@pD-YӑL.~}Rղ֖`VHCFk{;j]K%z{6"pellLJޱ~8w z !֏6zKK,aJRъ28.τp,a8Ob!u  68J|HBܱDǞ ?UHl_R=Ȧ^]vlvl0GIf>Jg~ ߐp5{+PZa5d| ?=:EIdB[y\)$D$d+F!:n Qc`#J(?EjdL's&V_P,^.v'eQ%lIlcv0cc9oM7bb 5S up4i;`Oq j4sb+JI!RGn"Hyr2˟)*ʨΣʹhcaBf9ZIu'M"RVZ=}YLǝJw:i6l2RFTes,C \Ս1z\55RՄM 48T}㜰1>q \3D(G刐s#Xk#<kq< q_pU0it]CUQtu ͑FaִՈ4#6Ba*Ncܡ9s燪,NáGnN'<\ZH$ִ͍h~iZZU6Z*z:bZ6WIt^ӣ؛5ρ Ɇ1&$WT%mqUjݼ AsT sx'7qmO4wjT-u&?KsJwSlgՏ H;f aǒ&.dvZ&%p>  h3;̹g4{]e*\㊪!,roܬ|˱;@Fqilw =n+Hw#mgtJ ]uؗD#օ g͙ ; X@pIG4z< 5ld_ML5Bc}+mƺ 8Q+n0AkYg ;?cqb1 %l7BH5LrڋmyވHkm*=EAjj4[;q2@vA5i?( ߤPMf;B djnlW"~9ap0U(z`tVT*Q)EQ"Cv8(D^"%dGDV"Ha:tX-a!%qJ&̊L"FUV0;.sQwyyY!91 5x!Imt.[wvNZhTI HxfD;i2 C2feIpRV>[TnP Q1B!CfS+m+=RJK ZVo!..xm~+ʄ2Neg->d$Ӕ_2%vr\{a6#R4ZCA G#[!P?;Y2Q&%N0Y93&R'm';$?@2R 'L!*Lxד%38̜%2g-?p}eʥ89DΙ3Ֆ!i r[-loeݩn+g+ʛlrfo_L= 9^tр6&a\x#8j5X±ݣQk)jtK`4 x@WyHev@4 ƅ nDEL#9KF]Aa >?Dr{|c7 7wO?O̙爋~-sW|3 %Kϱ (zVW?th3X V4M~e/1$YHMr`V%i|MVUTuEg28_Ct\Lu/Qpy Q;0ՠ],E/*5IQڶ(-W2OoG;-eB;C.~c ֖f3d}~>EPdR}ttQ,ﳙZb$s\4&í<4#(TNHV)ܶtZ|xZ't7jHxg[T>V3~e=e|:䝲͠)[R2~tڽWnʉV=|vX-{OHGwc'y֣KbNI*|Ji#~"|#gsX9=Zvz\Δ"jq|ج"p)}A=9KX1~-Xj1#i#YN)ΨqF,͠V7L``]OB'YD3K)_ .o"BZ/9b_͜k&{ټ?ѹ沥IwyKcNJ0"^~qz_jD<'~N9YmFAi*LɤdE՛̖Ook(Rd74Cb\brD@LEΨ=K!C[?aܺC(F҄tCc/>]I.~ GoїH AuU}^Vd^e*Uk4gjӗlre<<0nǓ3X<T` 3mLËD%jԒĠqWF%?=ҹ>t,:x|n2#,Gs/?$z;t},>3-qV;gLޚqwYrO;[twz0\֟f`5sudEq;=If_Rߙd*5 ϙgS^CyGf Tv~S~x45bqyRJH:=)ۻ;$znǩ;ALB̦1$t'8UIW$LNrY'~3#HXĔjM䀉4%t6n.yRDO3v# v"ZK.}Sdӝ)߱-Yw+mY!sv ;ҳf<- [N VgRly9~ևҿ-b[ޡ$DHEͦ k(@'3v2WRM&35[uD=z!B3\j=Rx?G_O]{[GCRn.gu>g~ـ-N,EԙRLL^}ASɁOs蹌шi_S*fX}KGG/ъ% 61iOX&~Q ˷~1/ y JU~fJ;7TXA%9C(,V,f7r{ӡؙ܏MY'ȼ'S6Gh$1IuG]K /u4oN2p9V.JLjG"TwX;駣vUVa(6 ujD=knwM}Zayǜ|k9A|)]l;EF03ѕټ+}LQBSeЭdf9.bt%~S$/ILA3Q)Y' jO9^C)Moc1N J TaOS $̪ Wǁ%ҴOeeWKgےܛWݽ% yOm솁{-ŶVl&ch]t8ǀT:;˾#$nC\a F@$N̛ endstream endobj 81 0 obj <> endobj 437 0 obj <>stream xmT PSg< :ޛi*U.XE" Zk!B$ H /AJH  (*Zu;s]ut[Wk{tfv:ϜϜ<>{Q4MK"c!/ܿsܿz<#s@4`?|tv^EudNn&=M/H +BCW,/ RiSXVRj'SjsÖ,tʬM;AtZSi Tl|2K%U]+Ui9*Mv*S(/'-s߯Z0DMT Kͤ~CI(FM|*_L)5@y)&D=C;$Hq&b'E ?_Ia_cqd&CRg:vl d *HI# =7F.ΒG fOq݃L|m%DWisR!L* h9iiGPu`諍z(c[ښ:e=JN٫N],5#P٢r3!|p#wJG> A`8Q[$g YKP ˷FmpyS4]zΔ^k4|OKw!˔<_'3X}pIc/)o(;p/Pf{k޲ Dž~? %Ֆ~`N~(3oޗgH7i,<*- 3)ڹ!ɽAdntvRKjq?זQp79Rý wd^^qg~Å_ CtoDg>%Ԏ4ImU`dK`1*>@ $6쬴 L9T9iT6Xk涁(8ԤvF!.Og_"D@KV LobR!q[(e?"x?ύ;;jx'"NPCrPm_Bsha{ `£Via75[l(eϰne֟9FfuZ0 W^i9KH\ V)n'Fǀ5YjkUb-m> ^eD+`U3x_&_o_и_>Vw zǡ+TUWWQ?^YD$|"ee:"xԉ c1=_앴BN%_IJfV=(]2^gF6%AľUV*N;cps۞;ّ4rMM΢G \ƱqzjQǂ +Y=:{Cg=3٣Pb[†eJmh3uDf*Sp> oL/%"C05/^Uԥ49 \ftXooWF7G-tcFb2$*ׁ -ez D\W `6ɟf+ SYmgB?%w?d>5=/˷#1.!K]gp+l>||m>ݜ& endstream endobj 459 0 obj <>stream 2018-01-09T12:27:26+01:00 2018-01-09T12:27:26+01:00 dvips(k) 5.997 Copyright 2017 Radical Eye Software Timbl_6.4_Manual.dvi endstream endobj 2 0 obj <>endobj xref 0 460 0000000000 65535 f 0000439943 00000 n 0000549425 00000 n 0000439323 00000 n 0000427207 00000 n 0000000015 00000 n 0000002320 00000 n 0000440009 00000 n 0000466905 00000 n 0000501058 00000 n 0000466078 00000 n 0000492389 00000 n 0000465178 00000 n 0000481623 00000 n 0000464491 00000 n 0000479990 00000 n 0000440050 00000 n 0000440080 00000 n 0000427367 00000 n 0000002340 00000 n 0000002515 00000 n 0000440143 00000 n 0000440173 00000 n 0000427529 00000 n 0000002535 00000 n 0000004019 00000 n 0000440203 00000 n 0000440233 00000 n 0000427691 00000 n 0000004040 00000 n 0000006483 00000 n 0000440274 00000 n 0000440304 00000 n 0000427853 00000 n 0000006504 00000 n 0000014143 00000 n 0000470152 00000 n 0000521544 00000 n 0000468526 00000 n 0000512649 00000 n 0000440345 00000 n 0000440375 00000 n 0000428015 00000 n 0000014164 00000 n 0000018545 00000 n 0000440438 00000 n 0000440468 00000 n 0000428177 00000 n 0000018566 00000 n 0000022016 00000 n 0000440531 00000 n 0000440561 00000 n 0000428339 00000 n 0000022037 00000 n 0000022378 00000 n 0000440635 00000 n 0000440665 00000 n 0000428501 00000 n 0000022398 00000 n 0000026092 00000 n 0000440695 00000 n 0000440725 00000 n 0000428663 00000 n 0000026113 00000 n 0000030526 00000 n 0000440777 00000 n 0000440807 00000 n 0000428825 00000 n 0000030547 00000 n 0000034778 00000 n 0000440859 00000 n 0000440889 00000 n 0000428987 00000 n 0000034799 00000 n 0000039690 00000 n 0000440963 00000 n 0000440993 00000 n 0000429149 00000 n 0000039711 00000 n 0000046764 00000 n 0000474042 00000 n 0000545763 00000 n 0000441056 00000 n 0000441086 00000 n 0000429311 00000 n 0000046785 00000 n 0000052460 00000 n 0000441171 00000 n 0000441201 00000 n 0000429473 00000 n 0000052481 00000 n 0000059541 00000 n 0000441286 00000 n 0000441316 00000 n 0000429635 00000 n 0000059562 00000 n 0000066428 00000 n 0000441390 00000 n 0000441420 00000 n 0000429797 00000 n 0000066449 00000 n 0000072585 00000 n 0000441494 00000 n 0000441525 00000 n 0000429962 00000 n 0000072607 00000 n 0000072896 00000 n 0000441589 00000 n 0000441620 00000 n 0000430128 00000 n 0000072917 00000 n 0000079130 00000 n 0000441651 00000 n 0000441682 00000 n 0000430294 00000 n 0000079152 00000 n 0000086497 00000 n 0000441735 00000 n 0000441766 00000 n 0000430460 00000 n 0000086519 00000 n 0000091158 00000 n 0000441830 00000 n 0000441861 00000 n 0000430626 00000 n 0000091180 00000 n 0000096020 00000 n 0000441914 00000 n 0000441945 00000 n 0000430792 00000 n 0000096042 00000 n 0000103385 00000 n 0000441998 00000 n 0000442029 00000 n 0000430958 00000 n 0000103407 00000 n 0000110324 00000 n 0000442115 00000 n 0000442146 00000 n 0000431124 00000 n 0000110346 00000 n 0000115228 00000 n 0000442243 00000 n 0000442274 00000 n 0000431290 00000 n 0000115250 00000 n 0000120778 00000 n 0000442327 00000 n 0000442358 00000 n 0000431456 00000 n 0000120800 00000 n 0000122402 00000 n 0000442411 00000 n 0000442442 00000 n 0000431622 00000 n 0000122424 00000 n 0000122770 00000 n 0000442484 00000 n 0000442515 00000 n 0000431788 00000 n 0000122791 00000 n 0000127738 00000 n 0000442546 00000 n 0000442577 00000 n 0000431954 00000 n 0000127760 00000 n 0000133183 00000 n 0000472755 00000 n 0000473357 00000 n 0000464063 00000 n 0000442641 00000 n 0000442672 00000 n 0000432120 00000 n 0000133205 00000 n 0000141007 00000 n 0000472201 00000 n 0000530578 00000 n 0000471222 00000 n 0000529016 00000 n 0000470779 00000 n 0000528115 00000 n 0000469193 00000 n 0000520264 00000 n 0000468064 00000 n 0000511752 00000 n 0000442775 00000 n 0000442806 00000 n 0000432286 00000 n 0000141029 00000 n 0000148504 00000 n 0000442957 00000 n 0000442988 00000 n 0000432452 00000 n 0000148526 00000 n 0000156066 00000 n 0000443128 00000 n 0000443159 00000 n 0000432618 00000 n 0000156088 00000 n 0000164285 00000 n 0000467734 00000 n 0000510863 00000 n 0000466750 00000 n 0000500396 00000 n 0000443273 00000 n 0000443304 00000 n 0000432784 00000 n 0000164307 00000 n 0000171604 00000 n 0000443455 00000 n 0000443486 00000 n 0000432950 00000 n 0000171626 00000 n 0000179028 00000 n 0000443600 00000 n 0000443631 00000 n 0000433116 00000 n 0000179050 00000 n 0000186654 00000 n 0000465923 00000 n 0000491769 00000 n 0000443767 00000 n 0000443798 00000 n 0000433282 00000 n 0000186676 00000 n 0000195102 00000 n 0000465112 00000 n 0000443951 00000 n 0000443982 00000 n 0000433448 00000 n 0000195124 00000 n 0000202424 00000 n 0000444070 00000 n 0000444101 00000 n 0000433614 00000 n 0000202446 00000 n 0000211614 00000 n 0000444200 00000 n 0000444231 00000 n 0000433780 00000 n 0000211636 00000 n 0000220013 00000 n 0000444317 00000 n 0000444348 00000 n 0000433946 00000 n 0000220035 00000 n 0000228590 00000 n 0000444412 00000 n 0000444443 00000 n 0000434112 00000 n 0000228612 00000 n 0000234486 00000 n 0000444529 00000 n 0000457676 00000 n 0000454191 00000 n 0000450059 00000 n 0000446292 00000 n 0000444688 00000 n 0000446103 00000 n 0000473141 00000 n 0000535024 00000 n 0000473818 00000 n 0000540223 00000 n 0000446139 00000 n 0000446187 00000 n 0000446218 00000 n 0000460558 00000 n 0000434323 00000 n 0000234508 00000 n 0000245707 00000 n 0000463843 00000 n 0000474521 00000 n 0000460687 00000 n 0000460735 00000 n 0000460766 00000 n 0000434509 00000 n 0000245730 00000 n 0000253294 00000 n 0000460867 00000 n 0000460898 00000 n 0000434675 00000 n 0000253316 00000 n 0000260622 00000 n 0000460951 00000 n 0000460982 00000 n 0000434841 00000 n 0000260644 00000 n 0000268582 00000 n 0000461024 00000 n 0000461055 00000 n 0000435007 00000 n 0000268604 00000 n 0000271744 00000 n 0000461108 00000 n 0000461139 00000 n 0000435173 00000 n 0000271766 00000 n 0000276450 00000 n 0000461203 00000 n 0000461234 00000 n 0000435339 00000 n 0000276472 00000 n 0000281724 00000 n 0000461309 00000 n 0000461340 00000 n 0000435505 00000 n 0000281746 00000 n 0000289511 00000 n 0000461439 00000 n 0000461470 00000 n 0000435671 00000 n 0000289533 00000 n 0000296946 00000 n 0000461545 00000 n 0000461576 00000 n 0000435837 00000 n 0000296968 00000 n 0000302913 00000 n 0000461662 00000 n 0000461693 00000 n 0000436003 00000 n 0000302935 00000 n 0000309164 00000 n 0000461757 00000 n 0000461788 00000 n 0000436169 00000 n 0000309186 00000 n 0000315000 00000 n 0000461863 00000 n 0000461894 00000 n 0000436335 00000 n 0000315022 00000 n 0000321390 00000 n 0000462004 00000 n 0000462035 00000 n 0000436501 00000 n 0000321412 00000 n 0000327424 00000 n 0000462099 00000 n 0000462130 00000 n 0000436667 00000 n 0000327446 00000 n 0000331830 00000 n 0000462194 00000 n 0000462225 00000 n 0000436833 00000 n 0000331852 00000 n 0000337478 00000 n 0000462300 00000 n 0000462331 00000 n 0000436999 00000 n 0000337500 00000 n 0000340579 00000 n 0000462395 00000 n 0000462426 00000 n 0000437165 00000 n 0000340601 00000 n 0000346398 00000 n 0000462479 00000 n 0000462510 00000 n 0000437331 00000 n 0000346420 00000 n 0000348500 00000 n 0000462574 00000 n 0000462605 00000 n 0000437497 00000 n 0000348522 00000 n 0000356017 00000 n 0000462647 00000 n 0000462678 00000 n 0000437663 00000 n 0000356039 00000 n 0000363218 00000 n 0000462742 00000 n 0000462773 00000 n 0000437829 00000 n 0000363240 00000 n 0000370550 00000 n 0000462815 00000 n 0000462846 00000 n 0000437995 00000 n 0000370572 00000 n 0000378179 00000 n 0000462899 00000 n 0000462930 00000 n 0000438161 00000 n 0000378201 00000 n 0000385693 00000 n 0000462972 00000 n 0000463003 00000 n 0000438327 00000 n 0000385715 00000 n 0000393205 00000 n 0000463056 00000 n 0000463087 00000 n 0000438493 00000 n 0000393227 00000 n 0000400548 00000 n 0000463140 00000 n 0000463171 00000 n 0000438659 00000 n 0000400570 00000 n 0000407664 00000 n 0000463213 00000 n 0000463244 00000 n 0000438825 00000 n 0000407686 00000 n 0000415214 00000 n 0000463286 00000 n 0000463317 00000 n 0000438991 00000 n 0000415236 00000 n 0000422888 00000 n 0000463359 00000 n 0000463390 00000 n 0000439157 00000 n 0000422910 00000 n 0000427185 00000 n 0000463432 00000 n 0000463463 00000 n 0000474738 00000 n 0000480295 00000 n 0000482159 00000 n 0000491980 00000 n 0000492852 00000 n 0000500607 00000 n 0000501683 00000 n 0000511080 00000 n 0000511997 00000 n 0000513267 00000 n 0000520486 00000 n 0000521894 00000 n 0000528348 00000 n 0000529334 00000 n 0000530864 00000 n 0000535242 00000 n 0000540448 00000 n 0000546049 00000 n 0000463516 00000 n 0000464148 00000 n 0000464207 00000 n 0000464914 00000 n 0000465822 00000 n 0000466666 00000 n 0000467599 00000 n 0000467978 00000 n 0000468401 00000 n 0000469087 00000 n 0000469538 00000 n 0000469625 00000 n 0000470630 00000 n 0000470976 00000 n 0000471579 00000 n 0000471790 00000 n 0000472645 00000 n 0000472822 00000 n 0000473443 00000 n 0000473502 00000 n 0000474434 00000 n 0000547959 00000 n trailer << /Size 460 /Root 1 0 R /Info 2 0 R /ID [] >> startxref 549640 %%EOF LanguageMachines-timbl-642727d/docs/texfiles/000077500000000000000000000000001451477526200210635ustar00rootroot00000000000000LanguageMachines-timbl-642727d/docs/texfiles/Timbl_6.3_API.tex000066400000000000000000001733001451477526200237370ustar00rootroot00000000000000% TiMBL 6.3 API \documentclass{report} \usepackage{epsf} \usepackage{a4wide} \usepackage{palatino} \usepackage{fullname} \usepackage{url} \newcommand{\chisq}{{$ \chi^2 $}} \author{Ko van der Sloot\\ \ \\ Induction of Linguistic Knowledge\\ Computational Linguistics\\ Tilburg University \\ \ \\ P.O. Box 90153, NL-5000 LE, Tilburg, The Netherlands \\ URL: http://ilk.uvt.nl} \title{{\huge TiMBL: Tilburg Memory-Based Learner} \\ \vspace*{0.5cm} {\bf version 6.3} \\ \vspace*{0.5cm}{\huge API Reference Guide}\\ \vspace*{1cm} {\it ILK Technical Report -- ILK 10-03}} %better paragraph indentation \parindent 0pt \parskip 9pt \begin{document} \maketitle \tableofcontents \chapter*{Preface} This is a brief description of the TimblAPI class, the application programming interface to the Timbl\footnote{\url{http://ilk.uvt.nl/timbl}} software package, and its main functions. For an introduction into Timbl, consult the Timbl Reference Guide \cite{Daelemans+10}. Although most of the API can be traced in the {\tt TimblAPI.h} file, the reverse is not true; some functions {\tt TimblAPI.h} are still ``work in progress'' and some others are artefacts to simplify the implementation of the TiMBL main program\footnote{Timbl.cxx is therefore {\em not} a good example of how to use the API.}. To learn more about using the API, you should study programs such as {\tt classify.cxx}, {\tt tse.cxx}, and the examples given in this manual, which can all be found in the {\tt demos} directory of this distribution. As you can readily gather from these examples, the basic thing you need to do to get access to the TimblAPI functions is to include {\tt TimblAPI.h} in the program, and to include {\tt libTimbl.a} in your linking path. {\bf Important note}: The described functions return a result (mostly a bool) to indicate succes or failure. To simplify the examples, we ignore these return values. This is, of course, bad practice, to be avoided in real life programming.\footnote{as stated by commandment 6 of ``The Ten Commandments for C Programmers''' by Henry Spencer: If a function be advertised to return an error code in the event of difficulties, thou shalt check for that code, yea, even though the checks triple the size of thy code and produce aches in thy typing fingers, for if thou thinkest ``it cannot happen to me'', the gods shall surely punish thee for thy arrogance.} {\bf Warning}: Although the TiMBL internals perform some sanity checking, it is quite possible to combine API functions such that some undetermined state is reached, or even a conflict arises. The effect of the {\tt SetOptions()} function, for instance, might be quite surprising. If you have created your own program with the API it might be wise to test against well-know data to see if the results make sense. \chapter{Changes} \label{changes} \section{From version 6.2 to 6.3} No changes to the API are made for this release. This Manual is made up to date (preserving the beta-state). \section{From version 6.1 to 6.2} In version 6.2, some additional functions were added to the API: {\tt matchDepth()}, {\tt matchedAtLeaf()}, {\tt WriteMatrices()}, {\tt GetMatrices()} and {\tt ShowStatistics()}. These reflect the additional functionality of Timbl 6.2. The API is still experimental, and contains more functions than described in this manual. Using these `undocumented' features is, as usual, unwise. \section{From version 5.1 to 6.1} The major change in 6.0 is the introduction of the {\tt neighborSet} class, with some special Classify functions. We added Classify functions that deliver pointers into Timbl's internal data. This is fast, but dangerous. Also, a {\tt WriteInstanceBaseXml()} function is added, which comes in handy when you want to know more about the instance base. Two more examples demonstrating neighborSets and such are added in Appendix B. From version 6.0 to 6.1, the API has not changed. \section{From version 5.0 to 5.1} The API is quite stable at the moment. Most TiMBL changes did not affect the API. The only real API change is in the {\tt GetWeights()} function. (see the section on Storing and retrieving intermediate results). A few options were added to Timbl, influencing the table in Appendix A. We have also changed and enhanced the examples in Appendix B. \chapter{Quick-start} \section{Setting up an experiment} There is just one way to start a TiMBL experiment, which is to call the TimblAPI constructor: \begin{footnotesize} \begin{verbatim} TimblAPI( const std::string& args, const std::string& name ="" ); \end{verbatim} \end{footnotesize} args is used as a "command line" and is parsed for all kind of options which are used to create the right kind of experiment with the desired settings for metric, weighting etc. If something is wrong with the settings, {\em no}\/ object is created. The most important option is {\tt -a} to set the kind of algorithm, e.g. {\tt -a IB1} to invoke an IB1 experiment or {\tt -a IGTREE} to invoke an IGTREE experiment. A list of possible options is give in Appendix A. The optional name can be useful if you have multiple experiments. In case of warnings or errors, this name is appended to the message. For example: \begin{footnotesize} \begin{verbatim} TimblAPI *My_Experiment = new TimblAPI( "-a IGTREE +vDI+DB", "test1" ); \end{verbatim} \end{footnotesize} {\tt My\_Experiment} is created as an IGTREE experiment with the name "test1", and the verbosity is set to DI+DB, meaning that the output will contain DIstance and DistriBution information. The counterpart to creation is the {\tt \~{ }TimblAPI()} destructor, which is called when you delete an experiment: \begin{footnotesize} \begin{verbatim} delete My_Experiment; \end{verbatim} \end{footnotesize} \section{Running an experiment} Assuming that we have appropriate datafiles (such as the example files {\tt dimin.train} and {\tt dimin.test} in the TiMBL package), we can get started right away with the functions {\tt Learn()} and {\tt Test()}. \subsection{Training} \begin{footnotesize} \begin{verbatim} bool Learn( const std::string& f ); \end{verbatim} \end{footnotesize} This function takes a file with name 'f', and gathers information such as: number of features, number and frequency of feature values and the same for class names. After that, these data are used to calculate a lot of statistical information, which will be used for testing. Finally, an InstanceBase is created, tuned to the current algorithm. \subsection{Testing} \begin{footnotesize} \begin{verbatim} bool Test( const std::string& in, const std::string& out, const std::string& perc = "" ); \end{verbatim} \end{footnotesize} Test a file given by 'in' and write results to 'out'. If 'perc' is not empty, then a percentage score is written to file 'perc'. For example: \begin{footnotesize} \begin{verbatim} My_Experiment->Learn( "dimin.train" ); My_Experiment->Test( "dimin.test", "my_first_test" ); \end{verbatim} \end{footnotesize} An InstanceBase will be created from dimin.train, then dimin.test is tested against that InstanceBase and output is written to my\_first\_test. \subsection{Special cases of {\tt Learn()} and {\tt Test()}} There are special cases where {\tt Learn()} behaves differently: \begin{itemize} \item When the algorithm is IB2, {\tt Learn()} will automatically take the first $n$ lines of f (set with the {\tt -b n} option) to bootstrap itself, and then the rest of f for IB2-learning. After Learning IB2, you can use {\tt Test()} as usual. \item When the algorithm is CV, {\tt Learn()} is not defined, and all work is done in a special version of {\tt Test()}. 'f' is assumed to give the name of a file, which, on separate lines, gives the names of the files to be cross-validated. Also, if {\em featureWeights}\/ or {\em probabilities}\/ are read from user-defined datafiles, a special {\tt CVprepare()} function must be called, to make the weigthing, weightFilename and probabilityFileName known to the {\tt Test()} function. See Appendix B for a complete CV example (program {\tt api\_test3}). %TODO: een voorbeeld met CVPrepare erbij! \end{itemize} \section{More about settings} After an experiment is set up with the TimblAPI constructor, many options can be changed "on the fly" with: \begin{footnotesize} \begin{verbatim} bool SetOptions( const std::string& opts ); \end{verbatim} \end{footnotesize} Here, `opts' is interpreted as a list of option settings, just like in the TimblAPI constructor. When an error in the opts string is found, {\tt SetOptions()} returns false. Whether any options are really set or changed in that case is undefined. Note that a few options can only be set {\em once}\/ when creating the experiment, most notably the algorithm. Any attempt to change these options will result in a failure. See Appendix A for all valid options and information about the possibility to change them within a running experiment. Note: {\tt SetOptions()} is lazy; changes are cached until the moment they are really needed, so you can do several {\tt SetOptions()} calls with even different values for the same option. Only the last one seen will be used for running the experiment. To see which options are in effect, you can use the calls {\tt ShowOptions()} and {\tt ShowSettings()}. \begin{footnotesize} \begin{verbatim} bool ShowOptions( std::ostream& ); \end{verbatim} \end{footnotesize} Shows all options with their possible and current values. \begin{footnotesize} \begin{verbatim} bool ShowSettings( std::ostream& ); \end{verbatim} \end{footnotesize} Shows all options and their currect values. For example: \begin{footnotesize} \begin{verbatim} My_Experiment->SetOptions( "-w2 -m:M" ); My_Experiment->SetOptions( "-w3 -v:DB" ); My_Experiment->ShowSettings( cout ) \end{verbatim} \end{footnotesize} See Appendix B (program {\tt api\_test1}) for the output. \section{Storing and retrieving intermediate results} To speed up testing, or to manipulate what is happening internally, we can store and retrieve several important parts of our experiment: The InstanceBase, the FeatureWeights, the ProbabilityArrays and the ValueDistance Matrices. Saving is done with: \begin{footnotesize} \begin{verbatim} bool WriteInstanceBase( const std::string& f ); bool SaveWeights( const std::string& f ); bool WriteArrays( const std::string& f ); bool WriteMatrices( const std::string& f ); \end{verbatim} \end{footnotesize} Retrieve with their counterparts: \begin{footnotesize} \begin{verbatim} bool GetInstanceBase( const std::string& f ); bool GetWeights( const std::string& f, Weighting w ); bool GetArrays( const std::string& f ); bool GetMatrices( const std::string& f ); \end{verbatim} \end{footnotesize} All use `f' as a filename for storing/retrieving. {\tt GetWeights} needs information to decide {\em which}\/ weighting to retrieve. Weighting is defined as the enumerated type: \begin{footnotesize} \begin{verbatim} enum Weighting { UNKNOWN_W, UD, NW, GR, IG, X2, SV }; \end{verbatim} \end{footnotesize} Some notes: \begin{enumerate} \item The InstanceBase is stored in a internal format, with or without hashing, depending on the {\tt -H} option. The format is described in the TiMBL manual. Remember that it is a bad idea to edit this file in any way. \item {\tt GetWeights()} can be used to override the weights that {\tt Learn()} calculated. {\tt UNKNOWN\_W} should not be used. \item The Probability arrays are described in the TiMBL manual. They can be manipulated to tune the MVDM similarity metric. \end{enumerate} If you like you may dump the Instancebase in an XML format. No Retrieve function is available for this format. \begin{footnotesize} \begin{verbatim} bool WriteInstanceBaseXml( const std::string& f ); \end{verbatim} \end{footnotesize} \chapter{Classify functions} \section{Classify functions: Elementary} After an experiment is trained with {\tt Learn()}, we do not have to use {\tt Test()} to do bulk-testing on a file. We can create our own tests with the {\tt Classify} functions: \begin{footnotesize} \begin{verbatim} bool Classify( const std::string& Line, std::string& result ); bool Classify( const std::string& Line, std::string& result, double& distance ); bool Classify( const std::string& Line, std::string& result, std::string& Distrib, double& distance ); \end{verbatim} \end{footnotesize} Results are stored in 'result' (the assigned class). 'distance' will get the calculated distance, and 'Distrib' the distribution at 'distance' which is used to calculate 'result'. Distrib will be a string like ``\{ NP 2, PP 6 \}''. It is up to you to parse and interpret this. (In this case: There were 8 classes assigned at 'distance', 2 NP's and 6 PP's, giving a 'result' of ``PP''.) If you want to perform analyses on these distributions, it might be a good idea to read the next section about the other range of Classify() functions. A main disadvantage compared to using {\tt Test()} is that {\tt Test()} is optimized. {\tt Classify()} has to test for sanity of its input and also whether a {\tt SetOptions()} has been performed. This slows down the process. A good example of the use of {\tt Classify()} is the {\tt classify.cxx} program in the TiMBL Distribution. Depending on the Algorithm and Verbosity setting, it may be possible to get some extra information on the details of each classification using: \begin{footnotesize} \begin{verbatim} const bool ShowBestNeighbors( std::ostream& os, bool distr ) const; \end{verbatim} \end{footnotesize} Provided that the option {\tt +v n} or {\tt +v k} is set and we use IB1 or IB2, output is produced similar to what we see in the TiMBL program. When 'distr' is true, their distributions are also displayed. Bear in mind: The {\tt +vn} option is expensive in time and memory and does not work for IGTREE, TRIBL, and TRIBL2. Two other functions provide the results as given by the {\tt +vmd} verbosity option: \begin{footnotesize} \begin{verbatim} size_t matchDepth() const; bool matchedAtLeaf() const; \end{verbatim} \end{footnotesize} The first returns the matching Depth in the InstanceBase; the second flags whether it was a Leaf or a Non-Terminal Node. \section{Classify functions: Advanced} A faster, but more dangerous version of Classify is also available. It is faster because it returns pointers into Timbl's internal datastructures. It is dangerous because it returns pointers into Timbl's internal datastructures (using 'const' pointers, so it is fortunately difficult te really damage Timbl) \begin{footnotesize} \begin{verbatim} const TargetValue *Classify( const std::string& ); const TargetValue *Classify( const std::string&, const ValueDistribution *& ); const TargetValue *Classify( const std::string&, double& ); const TargetValue *Classify( const std::string&, const ValueDistribution *&, double& ); \end{verbatim} \end{footnotesize} A ValueDistribution is a list-like object (but it is not a real list!) that contains TargetValues objects and weights. It is the result of combining all nearest neighbors and applying the desired weightings. Timbl chooses a best TargetValue from this ValueDistribution and the Classify functions return that as their main result. {\bf Important}: Because these functions return pointers into Timbl's internal representation, the results are only valid until the next Classify function is called (or the experiment is deleted). Both the TargetValue and ValueDistribution objects have output operators defined, so you can print them. TargetValue also has a {\tt Name()} function, which returns a std::string so you can collect results. ValueDistribution has an iterator-like interface which makes it possible to walk through the Distribution. An iterator on a {\tt ValueDistribution *vd} is created like this: \begin{footnotesize} \begin{verbatim} ValueDistribution::dist_iterator it=vd->begin(); \end{verbatim} \end{footnotesize} Unfortunately, the iterator cannot be printed or used directly. It walks through a map-like structure with pairs of values, of which only the {\tt second} part is of interest to you. You may print it, or extract its {\tt Value()} (which happens to be a TargetValue pointer) or extract its {\tt Weight()}, which is a {\tt double}. Like this: \begin{footnotesize} \begin{verbatim} while ( it != vd->end() ){ cout << it->second << " has a value: "; cout << it->second->Value() << " an a weight of " << it->second->Weight() << endl; ++it; } \end{verbatim} \end{footnotesize} Printing {\tt it->second} is the same as printing the TargetValue plus its Weight. In the {\em demos}\/ directory you will find a complete example in api\_test6. {\bf Warning}: it is possible to search the Timbl code for the internal representation of the TargetValue and ValueDistribution objects, but please DON'T DO THAT. The representation might change between Timbl versions. \section{Classify functions: neighborSets} A more flexible way of classifying is to use one of these functions: \begin{footnotesize} \begin{verbatim} const neighborSet *classifyNS( const std::string& ); bool classifyNS( const std::string&, neighborSet& ); \end{verbatim} \end{footnotesize} The first function will classify an instance and return a pointer to a {\tt neighborSet} object. This object may be seen as an container which holds both distances and distributions up to a certain depth, (which is {\em at least}\/ the number of neighbors (-k option) that was used for the classifying task.) It is a const object, so you cannot directly manipulate its internals, but there are some functions defined to get useful information out of the neighborSet. Important: The neighborSet {\em will be overwritten}\/ on the next call to any of the classify functions. Be sure to get all the results out before that happens. To make life easy, a second variant can be used, which fills a neighborSet object that you provide (the same could be achieved by a copy of the result of the first function). {\bf Note}: NeighborSets can be large, and copying therefore expensive, so you should only do this if you really have to. \subsection{How to get results from a neighborSet} No metric functions (such as exponential decay and the like) are performed on the neighborSet. You are free to insert your own metrics, or use Timbls built-in metrics. \begin{footnotesize} \begin{verbatim} double getDistance( size_t n ) const; double bestDistance() const; const ValueDistribution *getDistribution( size_t n ) const; ValueDistribution *bestDistribution( const decayStruct * ds=0, size_t n=0 ) const ; \end{verbatim} \end{footnotesize} {\tt getDistance( n )} will return the distance of the neighbor(s) at n. {\tt bestDistance()} is simply {\tt getDistance(0)}. {\tt getDistribution( n )} will return the distribution of neighbor(s) at n. {\tt bestDistribution()} will return the Weighted distribution calculated using the first n elements in the container and a metric specified by the {\tt decayStruct}. The default n=0, means: use the whole container. An empty decay struct means zeroDecay. The returned ValueDistribution object is handed to you, and you are responsible for deleting it after using it (see the previous section for more details about ValueDistributions). A decayStruct is one of: \begin{footnotesize} \begin{verbatim} class zeroDecay(); class invLinDecay(); class invDistDecay(); class expDecay( double alpha ); class expDecay( double alpha, double beta ); \end{verbatim} \end{footnotesize} For example, to get a ValueDistribution form a neighborSet {\tt nb}, using 3 neighbors and exponential decay with alpha=0.3, you can do: \begin{footnotesize} \begin{verbatim} decayStruct *dc = new expDecay(0.3); ValueDistribution *vd = nb->bestDistribution( dc, 3 ); \end{verbatim} \end{footnotesize} \subsection{Useful operations on neighborSet objects} You can print neighborSet objects: \begin{footnotesize} \begin{verbatim} std::ostream& operator<<( std::ostream&, const neighborSet& ); std::ostream& operator<<( std::ostream&, const neighborSet * ); \end{verbatim} \end{footnotesize} You may create a neighborSet yourself, and assign and delete them: \begin{footnotesize} \begin{verbatim} neighborSet(); neighborSet( const neighborSet& ); neighborSet& operator=( const neighborSet& ); ~neighborSet(); \end{verbatim} \end{footnotesize} If you create an neighborSet, you might want to reserve space for it, to avoid needless reallocations. Also it can be cleared, and you can ask the size (just like with normal containers): \begin{footnotesize} \begin{verbatim} void reserve( size_t ); void clear(); size_t size() const; \end{verbatim} \end{footnotesize} Two neighborSets can be merged: \begin{footnotesize} \begin{verbatim} void merge( const neighborSet& ); \end{verbatim} \end{footnotesize} A neighborSet can be truncated at a certain level. This is useful after merging neighborSets. Merging sets with depth k and n will result in a set with a depth somewhere within the range $[max(k,n), k+n]$. \begin{footnotesize} \begin{verbatim} void truncate( size_t ); \end{verbatim} \end{footnotesize} \chapter{Advanced Functions} \section{Modifying the InstanceBase} The instanceBase can be modified with the functions: \begin{footnotesize} \begin{verbatim} bool Increment( const std::string& Line ); bool Decrement( const std::string& Line ); \end{verbatim} \end{footnotesize} These functions add an Instance (as described by Line) to the InstanceBase, or remove it. This can only be done for IB1-like experiments (IB1, IB2, CV and LOO), and enforces a lot of statistical recalculations. More sophisticated are: \begin{footnotesize} \begin{verbatim} bool Expand( const std::string& File ); bool Remove( const std::string& File ); \end{verbatim} \end{footnotesize} which use the contents of File to do a bulk of Increments or Decrements, and recalculate afterwards. \section{Getting more information out of Timbl} There are a few convenience functions to get extra information on TiMBL and its behaviour: \begin{footnotesize} \begin{verbatim} bool WriteNamesFile( const std::string& f ); \end{verbatim} \end{footnotesize} Create a file which resembles a C4.5 namesfile. \begin{footnotesize} \begin{verbatim} Algorithm Algo() \end{verbatim} \end{footnotesize} Give the current algorithm as a type enum Algorithm. First, the declaration of the Algorithm type: \begin{footnotesize} \begin{verbatim} enum Algorithm { UNKNOWN_ALG, IB1, IB2, IGTREE, TRIBL, TRIBL2, LOO, CV }; \end{verbatim} \end{footnotesize} This can be printed with the helper function: \begin{footnotesize} \begin{verbatim} const std::string to_string( const Algorithm ) \end{verbatim} \end{footnotesize} \begin{footnotesize} \begin{verbatim} Weighting CurrentWeighting() \end{verbatim} \end{footnotesize} Gives the current weighting as a type enum Weighting. Declaration of Weighting: \begin{footnotesize} \begin{verbatim} enum Weighting { UNKNOWN_W, UD, NW, GR, IG, X2, SV }; \end{verbatim} \end{footnotesize} This can be printed with the helper function: \begin{footnotesize} \begin{verbatim} const std::string to_string( const Weighting ) \end{verbatim} \end{footnotesize} \begin{footnotesize} \begin{verbatim} Weighting CurrentWeightings( std::vector& v ) \end{verbatim} \end{footnotesize} Returns the current weighting as a type enum Weighting and also a vector v with all the current values of this weighting. \begin{footnotesize} \begin{verbatim} std::string& ExpName() \end{verbatim} \end{footnotesize} Returns the value of 'name' given at the construction of the experiment \begin{footnotesize} \begin{verbatim} static std::string VersionInfo( bool full = false ) \end{verbatim} \end{footnotesize} Returns a string containing the Version number, the Revision and the Revision string of the current API implementation. If full is true, also information about the date and time of compilation is included. \chapter{Server mode} \label{Using TiMBL as a Server} \begin{footnotesize} \begin{verbatim} bool StartServer( const int port, const int max_c ); \end{verbatim} \end{footnotesize} Starts a TimblServer on 'port' with maximally 'max\_c' concurrent connections to it. Starting a server makes sense only after the experiment is trained. \clearpage \chapter{Annotated example programs} \subsection{example 1, {\tt api\_test1.cxx}} \begin{footnotesize} \begin{verbatim} #include "TimblAPI.h" int main(){ TimblAPI My_Experiment( "-a IGTREE +vDI+DB+F", "test1" ); My_Experiment.SetOptions( "-w3 -vDB" ); My_Experiment.ShowSettings( std::cout ); My_Experiment.Learn( "dimin.train" ); My_Experiment.Test( "dimin.test", "my_first_test.out" ); My_Experiment.SetOptions( "-mM" ); My_Experiment.Test( "dimin.test", "my_first_test.out" ); } \end{verbatim} \end{footnotesize} Output: \begin{footnotesize} \begin{verbatim} Current Experiment Settings : FLENGTH : 0 MAXBESTS : 500 TRIBL_OFFSET : 0 INPUTFORMAT : Unknown TREE_ORDER : Unknown ALL_WEIGHTS : false WEIGHTING : x2 [Note 1] BIN_SIZE : 20 IB2_OFFSET : 0 KEEP_DISTRIBUTIONS : false DO_SLOPPY_LOO : false TARGET_POS : 18446744073709551615 DO_SILLY : false DO_DIVERSIFY : false DECAY : Z SEED : -1 BEAM_SIZE : 0 DECAYPARAM_A : 1.00000 DECAYPARAM_B : 1.00000 NORMALISATION : None NORMFACTOR : 1.00000 EXEMPLAR_WEIGHTS : false IGNORE_EXEMPLAR_WEIGHTS : true NO_EXEMPLAR_WEIGHTS_TEST : true VERBOSITY : F+DI [Note 2] EXACT_MATCH : false HASHED_TREE : true GLOBAL_METRIC : O METRICS : MVD_LIMIT : 1 NEIGHBORS : 1 PROGRESS : 100000 CLIP_FACTOR : 10 Examine datafile 'dimin.train' gave the following results: Number of Features: 12 InputFormat : C4.5 -test1-Phase 1: Reading Datafile: dimin.train -test1-Start: 0 @ Mon May 31 11:03:34 2010 -test1-Finished: 2999 @ Mon May 31 11:03:34 2010 -test1-Calculating Entropy Mon May 31 11:03:34 2010 Lines of data : 2999 DB Entropy : 1.6178929 Number of Classes : 5 Feats Vals X-square Variance InfoGain GainRatio 1 3 128.41828 0.021410184 0.030971064 0.024891536 2 50 364.75812 0.030406645 0.060860038 0.027552191 3 19 212.29804 0.017697402 0.039562857 0.018676787 4 37 449.83823 0.037499019 0.052541227 0.052620750 5 3 288.87218 0.048161417 0.074523225 0.047699231 6 61 415.64113 0.034648310 0.10604433 0.024471911 7 20 501.33465 0.041791818 0.12348668 0.034953203 8 69 367.66021 0.030648567 0.097198760 0.043983864 9 2 169.36962 0.056475363 0.045752381 0.046816705 10 64 914.61906 0.076243669 0.21388759 0.042844587 11 18 2807.0418 0.23399815 0.66970458 0.18507018 12 43 7160.3682 0.59689631 1.2780762 0.32537181 Feature Permutation based on Chi-Squared : < 12, 11, 10, 7, 4, 6, 8, 2, 5, 3, 9, 1 > -test1-Phase 2: Building index on Datafile: dimin.train -test1-Start: 0 @ Mon May 31 11:03:34 2010 -test1-Finished: 2999 @ Mon May 31 11:03:34 2010 -test1- Phase 3: Learning from Datafile: dimin.train -test1-Start: 0 @ Mon May 31 11:03:34 2010 -test1-Finished: 2999 @ Mon May 31 11:03:34 2010 Size of InstanceBase = 148 Nodes, (5920 bytes), 99.61 % compression Examine datafile 'dimin.test' gave the following results: Number of Features: 12 InputFormat : C4.5 Starting to test, Testfile: dimin.test Writing output in: my_first_test.out Algorithm : IGTree Weighting : Chi-square Feature 1 : 128.418283576224439 Feature 2 : 364.758115277811896 Feature 3 : 212.298037236345095 Feature 4 : 449.838231470681876 Feature 5 : 288.872176256387263 Feature 6 : 415.641126446691771 Feature 7 : 501.334653478280984 Feature 8 : 367.660212489714240 Feature 9 : 169.369615106487458 Feature 10 : 914.619058199288816 Feature 11 : 2807.041753278295346 Feature 12 : 7160.368151902808677 -test1-Tested: 1 @ Mon May 31 11:03:34 2010 -test1-Tested: 2 @ Mon May 31 11:03:34 2010 -test1-Tested: 3 @ Mon May 31 11:03:34 2010 -test1-Tested: 4 @ Mon May 31 11:03:34 2010 -test1-Tested: 5 @ Mon May 31 11:03:34 2010 -test1-Tested: 6 @ Mon May 31 11:03:34 2010 -test1-Tested: 7 @ Mon May 31 11:03:34 2010 -test1-Tested: 8 @ Mon May 31 11:03:34 2010 -test1-Tested: 9 @ Mon May 31 11:03:34 2010 -test1-Tested: 10 @ Mon May 31 11:03:34 2010 -test1-Tested: 100 @ Mon May 31 11:03:34 2010 -test1-Ready: 950 @ Mon May 31 11:03:34 2010 Seconds taken: 0.1331 (7135.13 p/s) overall accuracy: 0.962105 (914/950) Examine datafile 'dimin.test' gave the following results: Number of Features: 12 InputFormat : C4.5 Warning:-test1-Metric must be Overlap for IGTree test. [Note 3] \end{verbatim} \end{footnotesize} Notes: \begin{enumerate} \item The {\tt -w2} of the first {\tt SetOptions()} is overruled with {\tt -w3} from the second {\tt SetOptions()}, resulting in a weighting of 3 or Chi-Square. \item The first {\tt SetOptions()} sets the verbosity with {\tt +F+DI+DB}. The second {\tt SetOptions()}, however, sets the verbosity with {\tt -vDB}, and the resulting verbosity is therefore {\tt F+DI}. \item Due to the second {\tt SetOptions()}, the default metric is set to MVDM --- this is however not applicable to IGTREE. This raises a warning when we start to test. \end{enumerate} Result in my\_first\_test.out (first 20 lines): \begin{footnotesize} \begin{verbatim} =,=,=,=,=,=,=,=,+,p,e,=,T,T 6619.8512628162 =,=,=,=,+,k,u,=,-,bl,u,m,E,P 2396.8557978603 +,m,I,=,-,d,A,G,-,d,},t,J,J 6619.8512628162 -,t,@,=,-,l,|,=,-,G,@,n,T,T 6619.8512628162 -,=,I,n,-,str,y,=,+,m,E,nt,J,J 6619.8512628162 =,=,=,=,=,=,=,=,+,br,L,t,J,J 6619.8512628162 =,=,=,=,+,zw,A,=,-,m,@,r,T,T 6619.8512628162 =,=,=,=,-,f,u,=,+,dr,a,l,T,T 6619.8512628162 =,=,=,=,=,=,=,=,+,l,e,w,T,T 13780.219414719 =,=,=,=,+,tr,K,N,-,k,a,rt,J,J 6619.8512628162 =,=,=,=,+,=,o,=,-,p,u,=,T,T 3812.8095095379 =,=,=,=,=,=,=,=,+,l,A,m,E,E 3812.8095095379 =,=,=,=,=,=,=,=,+,l,A,p,J,J 6619.8512628162 =,=,=,=,=,=,=,=,+,sx,E,lm,P,P 6619.8512628162 +,l,a,=,-,d,@,=,-,k,A,st,J,J 6619.8512628162 -,s,i,=,-,f,E,r,-,st,O,k,J,J 6619.8512628162 =,=,=,=,=,=,=,=,+,sp,a,n,T,T 6619.8512628162 =,=,=,=,=,=,=,=,+,st,o,t,J,J 6619.8512628162 =,=,=,=,+,sp,a,r,-,b,u,k,J,J 6619.8512628162 +,h,I,N,-,k,@,l,-,bl,O,k,J,J 6619.8512628162 \end{verbatim} \end{footnotesize} \clearpage \subsection{example 2, {\tt api\_test2.cxx}} This demonstrates IB2 learning. Our example program: \begin{footnotesize} \begin{verbatim} #include "TimblAPI.h" int main(){ TimblAPI *My_Experiment = new TimblAPI( "-a IB2 +vF+DI+DB" , "test2" ); My_Experiment->SetOptions( "-b100" ); My_Experiment->ShowSettings( std::cout ); My_Experiment->Learn( "dimin.train" ); My_Experiment->Test( "dimin.test", "my_second_test.out" ); delete My_Experiment; exit(1); } \end{verbatim} \end{footnotesize} We create an experiment for the IB2 algorithm, with the {\tt -b} option set to 100, so the first 100 lines of {\tt dimin.train} will be used to bootstrap the learning, as we can see from the output: \begin{footnotesize} \begin{verbatim} Current Experiment Settings : FLENGTH : 0 MAXBESTS : 500 TRIBL_OFFSET : 0 INPUTFORMAT : Unknown TREE_ORDER : G/V ALL_WEIGHTS : false WEIGHTING : gr BIN_SIZE : 20 IB2_OFFSET : 100 KEEP_DISTRIBUTIONS : false DO_SLOPPY_LOO : false TARGET_POS : 4294967295 DO_SILLY : false DO_DIVERSIFY : false DECAY : Z SEED : -1 BEAM_SIZE : 0 DECAYPARAM_A : 1.00000 DECAYPARAM_B : 1.00000 NORMALISATION : None NORM_FACTOR : 1.00000 EXEMPLAR_WEIGHTS : false IGNORE_EXEMPLAR_WEIGHTS : true NO_EXEMPLAR_WEIGHTS_TEST : true VERBOSITY : F+DI+DB EXACT_MATCH : false HASHED_TREE : true GLOBAL_METRIC : O METRICS : MVD_LIMIT : 1 NEIGHBORS : 1 PROGRESS : 100000 CLIP_FACTOR : 10 Examine datafile 'dimin.train' gave the following results: Number of Features: 12 InputFormat : C4.5 -test2-Phase 1: Reading Datafile: dimin.train -test2-Start: 0 @ Mon May 31 11:03:34 2010 -test2-Finished: 2999 @ Mon May 31 11:03:34 2010 -test2-Calculating Entropy Mon May 31 11:03:34 2010 Lines of data : 2999 [Note 1] DB Entropy : 1.6178929 Number of Classes : 5 Feats Vals InfoGain GainRatio 1 3 0.030971064 0.024891536 2 50 0.060860038 0.027552191 3 19 0.039562857 0.018676787 4 37 0.052541227 0.052620750 5 3 0.074523225 0.047699231 6 61 0.10604433 0.024471911 7 20 0.12348668 0.034953203 8 69 0.097198760 0.043983864 9 2 0.045752381 0.046816705 10 64 0.21388759 0.042844587 11 18 0.66970458 0.18507018 12 43 1.2780762 0.32537181 Feature Permutation based on GainRatio/Values : < 9, 5, 11, 1, 12, 7, 4, 3, 10, 8, 2, 6 > -test2-Phase 2: Learning from Datafile: dimin.train -test2-Start: 0 @ Mon May 31 11:03:34 2010 -test2-Finished: 100 @ Mon May 31 11:03:34 2010 Size of InstanceBase = 954 Nodes, (38160 bytes), 26.62 % compression -test2-Phase 2: Appending from Datafile: dimin.train (starting at line 101) -test2-Start: 101 @ Mon May 31 11:03:34 2010 -test2-Learning: 101 @ Mon May 31 11:03:34 2010 added:0 -test2-Learning: 102 @ Mon May 31 11:03:34 2010 added:0 -test2-Learning: 103 @ Mon May 31 11:03:34 2010 added:0 -test2-Learning: 104 @ Mon May 31 11:03:34 2010 added:0 -test2-Learning: 105 @ Mon May 31 11:03:34 2010 added:0 -test2-Learning: 106 @ Mon May 31 11:03:34 2010 added:0 -test2-Learning: 107 @ Mon May 31 11:03:34 2010 added:0 -test2-Learning: 108 @ Mon May 31 11:03:34 2010 added:0 -test2-Learning: 109 @ Mon May 31 11:03:34 2010 added:0 -test2-Learning: 110 @ Mon May 31 11:03:34 2010 added:0 -test2-Learning: 200 @ Mon May 31 11:03:34 2010 added:9 -test2-Learning: 1100 @ Mon May 31 11:03:34 2010 added:66 -test2-Finished: 2999 @ Mon May 31 11:03:35 2010 in total added 173 new entries [Note 2] Size of InstanceBase = 2232 Nodes, (89280 bytes), 32.40 % compression DB Entropy : 1.61789286 Number of Classes : 5 Feats Vals InfoGain GainRatio 1 3 0.03097106 0.02489154 2 50 0.06086004 0.02755219 3 19 0.03956286 0.01867679 4 37 0.05254123 0.05262075 5 3 0.07452322 0.04769923 6 61 0.10604433 0.02447191 7 20 0.12348668 0.03495320 8 69 0.09719876 0.04398386 9 2 0.04575238 0.04681670 10 64 0.21388759 0.04284459 11 18 0.66970458 0.18507018 12 43 1.27807625 0.32537181 Examine datafile 'dimin.test' gave the following results: Number of Features: 12 InputFormat : C4.5 Starting to test, Testfile: dimin.test Writing output in: my_second_test.out Algorithm : IB2 Global metric : Overlap Deviant Feature Metrics:(none) Weighting : GainRatio Feature 1 : 0.026241147173103 Feature 2 : 0.030918769841214 Feature 3 : 0.021445836516602 Feature 4 : 0.056561885447060 Feature 5 : 0.048311436541460 Feature 6 : 0.027043360641622 Feature 7 : 0.037453180788027 Feature 8 : 0.044999091421718 Feature 9 : 0.048992032381874 Feature 10 : 0.044544230779268 Feature 11 : 0.185449683494634 Feature 12 : 0.324719540921155 -test2-Tested: 1 @ Mon May 31 11:03:35 2010 -test2-Tested: 2 @ Mon May 31 11:03:35 2010 -test2-Tested: 3 @ Mon May 31 11:03:35 2010 -test2-Tested: 4 @ Mon May 31 11:03:35 2010 -test2-Tested: 5 @ Mon May 31 11:03:35 2010 -test2-Tested: 6 @ Mon May 31 11:03:35 2010 -test2-Tested: 7 @ Mon May 31 11:03:35 2010 -test2-Tested: 8 @ Mon May 31 11:03:35 2010 -test2-Tested: 9 @ Mon May 31 11:03:35 2010 -test2-Tested: 10 @ Mon May 31 11:03:35 2010 -test2-Tested: 100 @ Mon May 31 11:03:35 2010 -test2-Ready: 950 @ Mon May 31 11:03:35 2010 Seconds taken: 0.0456 (20826.48 p/s) overall accuracy: 0.941053 (894/950), of which 15 exact matches [Note 3] There were 43 ties of which 32 (74.42%) were correctly resolved \end{verbatim} \end{footnotesize} Notes: \begin{enumerate} \item IB2 is bootstrapped with 100 lines, but for the statistics all 2999 lines are used. \item As we see here, 173 entries from the input file had a mismatch, and were therefore entered in the Instancebase. \item We see that IB2 scores 94.11 \%, compared to 96.21 \% for IGTREE in our first example. For this data, IB2 is not a good algorithm. However, it saves a lot of space, and is faster than IB1. Yet, IGTREE is both faster and better. Had we used IB1, the score would have been 96.84 \%. \end{enumerate} \clearpage \subsection{example 3, {\tt api\_test3.cxx}} This demonstrates Cross Validation. Let's try the following program: \begin{footnotesize} \begin{verbatim} #include "TimblAPI.h" using Timbl::TimblAPI; int main(){ TimblAPI *My_Experiment = new TimblAPI( "-t cross_validate" ); My_Experiment->Test( "cross_val.test" ); delete My_Experiment; exit(0); } \end{verbatim} \end{footnotesize} This program creates an experiment, which defaults to IB1 and because of the special option ``-t cross\_validate'' will start a CrossValidation experiment.\\ Learn() is not possible now. We must use a special form of Test(). ``cross\_val.test'' is a file with the following content: \begin{footnotesize} \begin{verbatim} small_1.train small_2.train small_3.train small_4.train small_5.train \end{verbatim} \end{footnotesize} All these files contain an equal part of a bigger dataset, and My\_Experiment will run a CrossValidation test between these files. Note that output filenames are generated and that you cannot influence that. The output of this program is: \begin{footnotesize} \begin{verbatim} Starting Cross validation test on files: small_1.train small_2.train small_3.train small_4.train small_5.train Examine datafile 'small_1.train' gave the following results: Number of Features: 8 InputFormat : C4.5 Starting to test, Testfile: small_1.train Writing output in: small_1.train.cv Algorithm : CV Global metric : Overlap Deviant Feature Metrics:(none) Weighting : GainRatio Tested: 1 @ Mon May 31 11:03:35 2010 Tested: 2 @ Mon May 31 11:03:35 2010 Tested: 3 @ Mon May 31 11:03:35 2010 Tested: 4 @ Mon May 31 11:03:35 2010 Tested: 5 @ Mon May 31 11:03:35 2010 Tested: 6 @ Mon May 31 11:03:35 2010 Tested: 7 @ Mon May 31 11:03:35 2010 Tested: 8 @ Mon May 31 11:03:35 2010 Tested: 9 @ Mon May 31 11:03:35 2010 Tested: 10 @ Mon May 31 11:03:35 2010 Ready: 10 @ Mon May 31 11:03:35 2010 Seconds taken: 0.0006 (16207.46 p/s) overall accuracy: 0.800000 (8/10) Examine datafile 'small_2.train' gave the following results: Number of Features: 8 InputFormat : C4.5 Starting to test, Testfile: small_2.train Writing output in: small_2.train.cv Algorithm : CV Global metric : Overlap Deviant Feature Metrics:(none) Weighting : GainRatio Tested: 1 @ Mon May 31 11:03:35 2010 Tested: 2 @ Mon May 31 11:03:35 2010 Tested: 3 @ Mon May 31 11:03:35 2010 Tested: 4 @ Mon May 31 11:03:35 2010 Tested: 5 @ Mon May 31 11:03:35 2010 Tested: 6 @ Mon May 31 11:03:35 2010 Tested: 7 @ Mon May 31 11:03:35 2010 Tested: 8 @ Mon May 31 11:03:35 2010 Tested: 9 @ Mon May 31 11:03:35 2010 Tested: 10 @ Mon May 31 11:03:35 2010 Ready: 10 @ Mon May 31 11:03:35 2010 Seconds taken: 0.0005 (19646.37 p/s) overall accuracy: 0.800000 (8/10) Examine datafile 'small_3.train' gave the following results: Number of Features: 8 InputFormat : C4.5 Starting to test, Testfile: small_3.train Writing output in: small_3.train.cv Algorithm : CV Global metric : Overlap Deviant Feature Metrics:(none) Weighting : GainRatio Tested: 1 @ Mon May 31 11:03:35 2010 Tested: 2 @ Mon May 31 11:03:35 2010 Tested: 3 @ Mon May 31 11:03:35 2010 Tested: 4 @ Mon May 31 11:03:35 2010 Tested: 5 @ Mon May 31 11:03:35 2010 Tested: 6 @ Mon May 31 11:03:35 2010 Tested: 7 @ Mon May 31 11:03:35 2010 Tested: 8 @ Mon May 31 11:03:35 2010 Tested: 9 @ Mon May 31 11:03:35 2010 Tested: 10 @ Mon May 31 11:03:35 2010 Ready: 10 @ Mon May 31 11:03:35 2010 Seconds taken: 0.0005 (20202.02 p/s) overall accuracy: 0.900000 (9/10) Examine datafile 'small_4.train' gave the following results: Number of Features: 8 InputFormat : C4.5 Starting to test, Testfile: small_4.train Writing output in: small_4.train.cv Algorithm : CV Global metric : Overlap Deviant Feature Metrics:(none) Weighting : GainRatio Tested: 1 @ Mon May 31 11:03:35 2010 Tested: 2 @ Mon May 31 11:03:35 2010 Tested: 3 @ Mon May 31 11:03:35 2010 Tested: 4 @ Mon May 31 11:03:35 2010 Tested: 5 @ Mon May 31 11:03:35 2010 Tested: 6 @ Mon May 31 11:03:35 2010 Tested: 7 @ Mon May 31 11:03:35 2010 Tested: 8 @ Mon May 31 11:03:35 2010 Tested: 9 @ Mon May 31 11:03:35 2010 Tested: 10 @ Mon May 31 11:03:35 2010 Ready: 10 @ Mon May 31 11:03:35 2010 Seconds taken: 0.0005 (19880.72 p/s) overall accuracy: 0.800000 (8/10) Examine datafile 'small_5.train' gave the following results: Number of Features: 8 InputFormat : C4.5 Starting to test, Testfile: small_5.train Writing output in: small_5.train.cv Algorithm : CV Global metric : Overlap Deviant Feature Metrics:(none) Weighting : GainRatio Tested: 1 @ Mon May 31 11:03:35 2010 Tested: 2 @ Mon May 31 11:03:35 2010 Tested: 3 @ Mon May 31 11:03:35 2010 Tested: 4 @ Mon May 31 11:03:35 2010 Tested: 5 @ Mon May 31 11:03:35 2010 Tested: 6 @ Mon May 31 11:03:35 2010 Tested: 7 @ Mon May 31 11:03:35 2010 Tested: 8 @ Mon May 31 11:03:35 2010 Ready: 8 @ Mon May 31 11:03:35 2010 Seconds taken: 0.0004 (19093.08 p/s) overall accuracy: 1.000000 (8/8) \end{verbatim} \end{footnotesize} What has happened here? \begin{enumerate} \item TiMBL trained itself with inputfiles small\_2.train through small\_5.train. (in fact using the {\tt Expand()} API call. \item Then TiMBL tested small\_1.train against the InstanceBase. \item Next, small\_2.train is removed from the database (API call {\tt Remove()} ) and small\_1.train is added. \item Then small\_2.train is tested against the InstanceBase. \item And so forth with small\_3.train $\ldots$ \end{enumerate} \clearpage \subsection{example 4, {\tt api\_test4.cxx}} This program demonstrates adding and deleting of the InstanceBase. It also proves that weights are (re)calculated correctly each time (which also explains why this is a time-consuming thing to do). After running this program, wg.1.wgt should be equal to wg.5.wgt and wg.2.wgt equal to wg.4.wgt . Important to note is also, that while we do not use a weighting of X2 or SV here, only the ``simple'' weights are calculated and stored. Further, arr.1.arr should be equal to arr.5.arr and arr.2.arr should be equal to arr.4.arr First the program: \begin{footnotesize} \begin{verbatim} #include #include "TimblAPI.h" int main(){ TimblAPI *My_Experiment = new TimblAPI( "-a IB1 +vDI+DB +mM" , "test4" ); My_Experiment->ShowSettings( std::cout ); My_Experiment->Learn( "dimin.train" ); My_Experiment->Test( "dimin.test", "inc1.out" ); My_Experiment->SaveWeights( "wg.1.wgt" ); My_Experiment->WriteArrays( "arr.1.arr" ); My_Experiment->Increment( "=,=,=,=,+,k,e,=,-,r,@,l,T" ); My_Experiment->Test( "dimin.test", "inc2.out" ); My_Experiment->SaveWeights( "wg.2.wgt" ); My_Experiment->WriteArrays( "arr.2.arr" ); My_Experiment->Increment( "+,zw,A,rt,-,k,O,p,-,n,O,n,E" ); My_Experiment->Test( "dimin.test", "inc3.out" ); My_Experiment->SaveWeights( "wg.3.wgt" ); My_Experiment->WriteArrays( "arr.3.arr" ); My_Experiment->Decrement( "+,zw,A,rt,-,k,O,p,-,n,O,n,E" ); My_Experiment->Test( "dimin.test", "inc4.out" ); My_Experiment->SaveWeights( "wg.4.wgt" ); My_Experiment->WriteArrays( "arr.4.arr" ); My_Experiment->Decrement( "=,=,=,=,+,k,e,=,-,r,@,l,T" ); My_Experiment->Test( "dimin.test", "inc5.out" ); My_Experiment->SaveWeights( "wg.5.wgt" ); My_Experiment->WriteArrays( "arr.5.arr" ); delete My_Experiment; exit(1); } \end{verbatim} \end{footnotesize} This produces the following output: \begin{footnotesize} \begin{verbatim} Current Experiment Settings : FLENGTH : 0 MAXBESTS : 500 TRIBL_OFFSET : 0 IG_THRESHOLD : 1000 INPUTFORMAT : Unknown TREE_ORDER : G/V ALL_WEIGHTS : false WEIGHTING : gr BIN_SIZE : 20 IB2_OFFSET : 0 KEEP_DISTRIBUTIONS : false DO_SLOPPY_LOO : false TARGET_POS : 18446744073709551615 DO_SILLY : false DO_DIVERSIFY : false DECAY : Z SEED : -1 BEAM_SIZE : 0 DECAYPARAM_A : 1.00000 DECAYPARAM_B : 1.00000 NORMALISATION : None NORM_FACTOR : 1.00000 EXEMPLAR_WEIGHTS : false IGNORE_EXEMPLAR_WEIGHTS : true NO_EXEMPLAR_WEIGHTS_TEST : true VERBOSITY : DI+DB EXACT_MATCH : false HASHED_TREE : true GLOBAL_METRIC : M METRICS : MVD_LIMIT : 1 NEIGHBORS : 1 PROGRESS : 100000 CLIP_FACTOR : 10 Examine datafile 'dimin.train' gave the following results: Number of Features: 12 InputFormat : C4.5 -test4-Phase 1: Reading Datafile: dimin.train -test4-Start: 0 @ Mon May 31 11:03:35 2010 -test4-Finished: 2999 @ Mon May 31 11:03:35 2010 -test4-Calculating Entropy Mon May 31 11:03:35 2010 Feature Permutation based on GainRatio/Values : < 9, 5, 11, 1, 12, 7, 4, 3, 10, 8, 2, 6 > -test4-Phase 2: Learning from Datafile: dimin.train -test4-Start: 0 @ Mon May 31 11:03:35 2010 -test4-Finished: 2999 @ Mon May 31 11:03:35 2010 Size of InstanceBase = 19231 Nodes, (769240 bytes), 49.77 % compression Examine datafile 'dimin.test' gave the following results: Number of Features: 12 InputFormat : C4.5 Starting to test, Testfile: dimin.test Writing output in: inc1.out Algorithm : IB1 Global metric : Value Difference, Prestored matrix Deviant Feature Metrics:(none) Size of value-matrix[1] = 168 Bytes Size of value-matrix[2] = 968 Bytes Size of value-matrix[3] = 968 Bytes Size of value-matrix[4] = 168 Bytes Size of value-matrix[5] = 168 Bytes Size of value-matrix[6] = 1904 Bytes Size of value-matrix[7] = 1904 Bytes Size of value-matrix[8] = 504 Bytes Size of value-matrix[9] = 104 Bytes Size of value-matrix[10] = 2904 Bytes Size of value-matrix[11] = 1728 Bytes Size of value-matrix[12] = 1248 Bytes Total Size of value-matrices 12736 Bytes Weighting : GainRatio -test4-Tested: 1 @ Mon May 31 11:03:35 2010 -test4-Tested: 2 @ Mon May 31 11:03:35 2010 -test4-Tested: 3 @ Mon May 31 11:03:35 2010 -test4-Tested: 4 @ Mon May 31 11:03:35 2010 -test4-Tested: 5 @ Mon May 31 11:03:35 2010 -test4-Tested: 6 @ Mon May 31 11:03:35 2010 -test4-Tested: 7 @ Mon May 31 11:03:35 2010 -test4-Tested: 8 @ Mon May 31 11:03:35 2010 -test4-Tested: 9 @ Mon May 31 11:03:35 2010 -test4-Tested: 10 @ Mon May 31 11:03:35 2010 -test4-Tested: 100 @ Mon May 31 11:03:35 2010 -test4-Ready: 950 @ Mon May 31 11:03:35 2010 Seconds taken: 0.0791 (12003.74 p/s) overall accuracy: 0.964211 (916/950), of which 62 exact matches There were 6 ties of which 6 (100.00%) were correctly resolved -test4-Saving Weights in wg.1.wgt -test4-Saving Probability Arrays in arr.1.arr Examine datafile 'dimin.test' gave the following results: Number of Features: 12 InputFormat : C4.5 Starting to test, Testfile: dimin.test Writing output in: inc2.out Algorithm : IB1 Global metric : Value Difference, Prestored matrix Deviant Feature Metrics:(none) Size of value-matrix[1] = 168 Bytes Size of value-matrix[2] = 968 Bytes Size of value-matrix[3] = 968 Bytes Size of value-matrix[4] = 168 Bytes Size of value-matrix[5] = 168 Bytes Size of value-matrix[6] = 1904 Bytes Size of value-matrix[7] = 1904 Bytes Size of value-matrix[8] = 504 Bytes Size of value-matrix[9] = 104 Bytes Size of value-matrix[10] = 2904 Bytes Size of value-matrix[11] = 1728 Bytes Size of value-matrix[12] = 1248 Bytes Total Size of value-matrices 12736 Bytes Weighting : GainRatio -test4-Tested: 1 @ Mon May 31 11:03:35 2010 -test4-Tested: 2 @ Mon May 31 11:03:35 2010 -test4-Tested: 3 @ Mon May 31 11:03:35 2010 -test4-Tested: 4 @ Mon May 31 11:03:35 2010 -test4-Tested: 5 @ Mon May 31 11:03:35 2010 -test4-Tested: 6 @ Mon May 31 11:03:35 2010 -test4-Tested: 7 @ Mon May 31 11:03:35 2010 -test4-Tested: 8 @ Mon May 31 11:03:35 2010 -test4-Tested: 9 @ Mon May 31 11:03:35 2010 -test4-Tested: 10 @ Mon May 31 11:03:35 2010 -test4-Tested: 100 @ Mon May 31 11:03:35 2010 -test4-Ready: 950 @ Mon May 31 11:03:35 2010 Seconds taken: 0.0866 (10965.92 p/s) overall accuracy: 0.964211 (916/950), of which 62 exact matches There were 6 ties of which 6 (100.00%) were correctly resolved -test4-Saving Weights in wg.2.wgt -test4-Saving Probability Arrays in arr.2.arr Examine datafile 'dimin.test' gave the following results: Number of Features: 12 InputFormat : C4.5 Starting to test, Testfile: dimin.test Writing output in: inc3.out Algorithm : IB1 Global metric : Value Difference, Prestored matrix Deviant Feature Metrics:(none) Size of value-matrix[1] = 168 Bytes Size of value-matrix[2] = 968 Bytes Size of value-matrix[3] = 968 Bytes Size of value-matrix[4] = 168 Bytes Size of value-matrix[5] = 168 Bytes Size of value-matrix[6] = 1904 Bytes Size of value-matrix[7] = 1904 Bytes Size of value-matrix[8] = 504 Bytes Size of value-matrix[9] = 104 Bytes Size of value-matrix[10] = 2904 Bytes Size of value-matrix[11] = 1728 Bytes Size of value-matrix[12] = 1248 Bytes Total Size of value-matrices 12736 Bytes Weighting : GainRatio -test4-Tested: 1 @ Mon May 31 11:03:35 2010 -test4-Tested: 2 @ Mon May 31 11:03:35 2010 -test4-Tested: 3 @ Mon May 31 11:03:35 2010 -test4-Tested: 4 @ Mon May 31 11:03:35 2010 -test4-Tested: 5 @ Mon May 31 11:03:35 2010 -test4-Tested: 6 @ Mon May 31 11:03:35 2010 -test4-Tested: 7 @ Mon May 31 11:03:35 2010 -test4-Tested: 8 @ Mon May 31 11:03:35 2010 -test4-Tested: 9 @ Mon May 31 11:03:35 2010 -test4-Tested: 10 @ Mon May 31 11:03:35 2010 -test4-Tested: 100 @ Mon May 31 11:03:35 2010 -test4-Ready: 950 @ Mon May 31 11:03:35 2010 Seconds taken: 0.0740 (12844.09 p/s) overall accuracy: 0.964211 (916/950), of which 62 exact matches There were 6 ties of which 6 (100.00%) were correctly resolved -test4-Saving Weights in wg.3.wgt -test4-Saving Probability Arrays in arr.3.arr Examine datafile 'dimin.test' gave the following results: Number of Features: 12 InputFormat : C4.5 Starting to test, Testfile: dimin.test Writing output in: inc4.out Algorithm : IB1 Global metric : Value Difference, Prestored matrix Deviant Feature Metrics:(none) Size of value-matrix[1] = 168 Bytes Size of value-matrix[2] = 968 Bytes Size of value-matrix[3] = 968 Bytes Size of value-matrix[4] = 168 Bytes Size of value-matrix[5] = 168 Bytes Size of value-matrix[6] = 1904 Bytes Size of value-matrix[7] = 1904 Bytes Size of value-matrix[8] = 504 Bytes Size of value-matrix[9] = 104 Bytes Size of value-matrix[10] = 2904 Bytes Size of value-matrix[11] = 1728 Bytes Size of value-matrix[12] = 1248 Bytes Total Size of value-matrices 12736 Bytes Weighting : GainRatio -test4-Tested: 1 @ Mon May 31 11:03:36 2010 -test4-Tested: 2 @ Mon May 31 11:03:36 2010 -test4-Tested: 3 @ Mon May 31 11:03:36 2010 -test4-Tested: 4 @ Mon May 31 11:03:36 2010 -test4-Tested: 5 @ Mon May 31 11:03:36 2010 -test4-Tested: 6 @ Mon May 31 11:03:36 2010 -test4-Tested: 7 @ Mon May 31 11:03:36 2010 -test4-Tested: 8 @ Mon May 31 11:03:36 2010 -test4-Tested: 9 @ Mon May 31 11:03:36 2010 -test4-Tested: 10 @ Mon May 31 11:03:36 2010 -test4-Tested: 100 @ Mon May 31 11:03:36 2010 -test4-Ready: 950 @ Mon May 31 11:03:36 2010 Seconds taken: 0.0727 (13075.49 p/s) overall accuracy: 0.964211 (916/950), of which 62 exact matches There were 6 ties of which 6 (100.00%) were correctly resolved -test4-Saving Weights in wg.4.wgt -test4-Saving Probability Arrays in arr.4.arr Examine datafile 'dimin.test' gave the following results: Number of Features: 12 InputFormat : C4.5 Starting to test, Testfile: dimin.test Writing output in: inc5.out Algorithm : IB1 Global metric : Value Difference, Prestored matrix Deviant Feature Metrics:(none) Size of value-matrix[1] = 168 Bytes Size of value-matrix[2] = 968 Bytes Size of value-matrix[3] = 968 Bytes Size of value-matrix[4] = 168 Bytes Size of value-matrix[5] = 168 Bytes Size of value-matrix[6] = 1904 Bytes Size of value-matrix[7] = 1904 Bytes Size of value-matrix[8] = 504 Bytes Size of value-matrix[9] = 104 Bytes Size of value-matrix[10] = 2904 Bytes Size of value-matrix[11] = 1728 Bytes Size of value-matrix[12] = 1248 Bytes Total Size of value-matrices 12736 Bytes Weighting : GainRatio -test4-Tested: 1 @ Mon May 31 11:03:36 2010 -test4-Tested: 2 @ Mon May 31 11:03:36 2010 -test4-Tested: 3 @ Mon May 31 11:03:36 2010 -test4-Tested: 4 @ Mon May 31 11:03:36 2010 -test4-Tested: 5 @ Mon May 31 11:03:36 2010 -test4-Tested: 6 @ Mon May 31 11:03:36 2010 -test4-Tested: 7 @ Mon May 31 11:03:36 2010 -test4-Tested: 8 @ Mon May 31 11:03:36 2010 -test4-Tested: 9 @ Mon May 31 11:03:36 2010 -test4-Tested: 10 @ Mon May 31 11:03:36 2010 -test4-Tested: 100 @ Mon May 31 11:03:36 2010 -test4-Ready: 950 @ Mon May 31 11:03:36 2010 Seconds taken: 0.0732 (12975.31 p/s) overall accuracy: 0.964211 (916/950), of which 62 exact matches There were 6 ties of which 6 (100.00%) were correctly resolved -test4-Saving Weights in wg.5.wgt -test4-Saving Probability Arrays in arr.5.arr \end{verbatim} \end{footnotesize} \clearpage \subsection{example 5, {\tt api\_test5.cxx}} This program demonstrates the use of neighborSets to classify and store results. It also demonstrates some neighborSet basics. \begin{footnotesize} \begin{verbatim} #include #include #include "TimblAPI.h" using std::endl; using std::cout; using std::string; using namespace Timbl; int main(){ TimblAPI *My_Experiment = new TimblAPI( "-a IB1 +vDI+DB+n +mM +k4 " , "test5" ); My_Experiment->Learn( "dimin.train" ); { string line = "=,=,=,=,+,k,e,=,-,r,@,l,T"; const neighborSet *neighbours1 = My_Experiment->classifyNS( line ); if ( neighbours1 ){ cout << "Classify OK on " << line << endl; cout << neighbours1; } else cout << "Classify failed on " << line << endl; neighborSet neighbours2; line = "+,zw,A,rt,-,k,O,p,-,n,O,n,E"; if ( My_Experiment->classifyNS( line, neighbours2 ) ){ cout << "Classify OK on " << line << endl; cout << neighbours2; } else cout << "Classify failed on " << line << endl; line = "+,z,O,n,-,d,A,xs,-,=,A,rm,P"; const neighborSet *neighbours3 = My_Experiment->classifyNS( line ); if ( neighbours3 ){ cout << "Classify OK on " << line << endl; cout << neighbours3; } else cout << "Classify failed on " << line << endl; neighborSet uit2; { neighborSet uit; uit.setShowDistance(true); uit.setShowDistribution(true); cout << " before first merge " << endl; cout << uit; uit.merge( *neighbours1 ); cout << " after first merge " << endl; cout << uit; uit.merge( *neighbours3 ); cout << " after second merge " << endl; cout << uit; uit.merge( neighbours2 ); cout << " after third merge " << endl; cout << uit; uit.truncate( 3 ); cout << " after truncate " << endl; cout << uit; cout << " test assignment" << endl; uit2 = *neighbours1; } cout << "assignment result: " << endl; cout << uit2; { cout << " test copy construction" << endl; neighborSet uit(uit2); cout << "result: " << endl; cout << uit; } cout << "almost done!" << endl; } delete My_Experiment; cout << "done!" << endl; } \end{verbatim} \end{footnotesize} Its expected output is (without further comment): \begin{footnotesize} \begin{verbatim} Examine datafile 'dimin.train' gave the following results: Number of Features: 12 InputFormat : C4.5 -test5-Phase 1: Reading Datafile: dimin.train -test5-Start: 0 @ Mon May 31 11:03:36 2010 -test5-Finished: 2999 @ Mon May 31 11:03:36 2010 -test5-Calculating Entropy Mon May 31 11:03:36 2010 Feature Permutation based on GainRatio/Values : < 9, 5, 11, 1, 12, 7, 4, 3, 10, 8, 2, 6 > -test5-Phase 2: Learning from Datafile: dimin.train -test5-Start: 0 @ Mon May 31 11:03:36 2010 -test5-Finished: 2999 @ Mon May 31 11:03:36 2010 Size of InstanceBase = 19231 Nodes, (769240 bytes), 49.77 % compression Classify OK on =,=,=,=,+,k,e,=,-,r,@,l,T # k=1 { T 1.00000 } 0.0000000000000 # k=2 { T 1.00000 } 0.0031862902473388 # k=3 { T 1.00000 } 0.0034182315118303 # k=4 { T 1.00000 } 0.0037433772844615 Classify OK on +,zw,A,rt,-,k,O,p,-,n,O,n,E # k=1 { E 1.00000 } 0.0000000000000 # k=2 { E 1.00000 } 0.056667880327190 # k=3 { E 1.00000 } 0.062552636617742 # k=4 { E 1.00000 } 0.064423860361889 Classify OK on +,z,O,n,-,d,A,xs,-,=,A,rm,P # k=1 { P 1.00000 } 0.059729836255170 # k=2 { P 1.00000 } 0.087740769132651 # k=3 { P 1.00000 } 0.088442788919723 # k=4 { P 1.00000 } 0.097058649951429 before first merge after first merge # k=1 { P 1.00000 } 0.059729836255170 # k=2 { P 1.00000 } 0.087740769132651 # k=3 { P 1.00000 } 0.088442788919723 # k=4 { P 1.00000 } 0.097058649951429 after second merge # k=1 { P 2.00000 } 0.059729836255170 # k=2 { P 2.00000 } 0.087740769132651 # k=3 { P 2.00000 } 0.088442788919723 # k=4 { P 2.00000 } 0.097058649951429 after third merge # k=1 { E 1.00000 } 0.0000000000000 # k=2 { E 1.00000 } 0.056667880327190 # k=3 { P 2.00000 } 0.059729836255170 # k=4 { E 1.00000 } 0.062552636617742 # k=5 { E 1.00000 } 0.064423860361889 # k=6 { P 2.00000 } 0.087740769132651 # k=7 { P 2.00000 } 0.088442788919723 # k=8 { P 2.00000 } 0.097058649951429 after truncate # k=1 { E 1.00000 } 0.0000000000000 # k=2 { E 1.00000 } 0.056667880327190 # k=3 { P 2.00000 } 0.059729836255170 test assignment assignment result: # k=1 { P 1.00000 } 0.059729836255170 # k=2 { P 1.00000 } 0.087740769132651 # k=3 { P 1.00000 } 0.088442788919723 # k=4 { P 1.00000 } 0.097058649951429 test copy construction result: # k=1 { P 1.00000 } 0.059729836255170 # k=2 { P 1.00000 } 0.087740769132651 # k=3 { P 1.00000 } 0.088442788919723 # k=4 { P 1.00000 } 0.097058649951429 almost done! done! \end{verbatim} \end{footnotesize} \clearpage \subsection{example 6, {\tt api\_test6.cxx}} This program demonstrates the use of ValueDistributions, TargetValues an neighborSets for classification. \begin{footnotesize} \begin{verbatim} #include #include "TimblAPI.h" using std::cout; using std::endl; using namespace Timbl; int main(){ TimblAPI My_Experiment( "-a IB1 +vDI+DB -k3", "test6" ); My_Experiment.Learn( "dimin.train" ); const ValueDistribution *vd; const TargetValue *tv = My_Experiment.Classify( "-,=,O,m,+,h,K,=,-,n,I,N,K", vd ); cout << "resulting target: " << tv << endl; cout << "resulting Distribution: " << vd << endl; ValueDistribution::dist_iterator it=vd->begin(); while ( it != vd->end() ){ cout << it->second << " OR "; cout << it->second->Value() << " " << it->second->Weight() << endl; ++it; } cout << "the same with neighborSets" << endl; const neighborSet *nb = My_Experiment.classifyNS( "-,=,O,m,+,h,K,=,-,n,I,N,K" ); ValueDistribution *vd2 = nb->bestDistribution(); cout << "default answer " << vd2 << endl; decayStruct *dc = new expDecay(0.3); delete vd2; vd2 = nb->bestDistribution( dc ); delete dc; cout << "with exponenial decay, alpha = 0.3 " << vd2 << endl; delete vd2; } \end{verbatim} \end{footnotesize} This is the output produced: \begin{footnotesize} \begin{verbatim} Examine datafile 'dimin.train' gave the following results: Number of Features: 12 InputFormat : C4.5 -test6-Phase 1: Reading Datafile: dimin.train -test6-Start: 0 @ Mon May 31 11:03:36 2010 -test6-Finished: 2999 @ Mon May 31 11:03:36 2010 -test6-Calculating Entropy Mon May 31 11:03:36 2010 Feature Permutation based on GainRatio/Values : < 9, 5, 11, 1, 12, 7, 4, 3, 10, 8, 2, 6 > -test6-Phase 2: Learning from Datafile: dimin.train -test6-Start: 0 @ Mon May 31 11:03:36 2010 -test6-Finished: 2999 @ Mon May 31 11:03:36 2010 Size of InstanceBase = 19231 Nodes, (769240 bytes), 49.77 % compression resulting target: K resulting Distribution: { E 1.00000, K 7.00000 } E 1 OR E 1 K 7 OR K 7 the same with neighborSets default answer { E 1.00000, K 7.00000 } with exponenial decay, alpha = 0.3 { E 0.971556, K 6.69810 } \end{verbatim} \end{footnotesize} \end{document} LanguageMachines-timbl-642727d/docs/texfiles/Timbl_6.3_Manual.tex000066400000000000000000004776231451477526200245620ustar00rootroot00000000000000% TiMBL 6.3 manual \documentclass{report} \usepackage{epsf} \usepackage{epsfig} \usepackage{a4wide} \usepackage{palatino} \usepackage{fullname} \usepackage{url} \newcommand{\chisq}{{$ \chi^2 $}} \author{Walter Daelemans* \and Jakub Zavrel*$\dagger$ \and Ko van der Sloot \and Antal van den Bosch\\ \ \\ Induction of Linguistic Knowledge Research Group\\ Tilburg centre for Cognition and Communication \\ Tilburg University \\ \\ (*) CLiPS - Computational Linguistics Group\\ Department of Linguistics \\ University of Antwerp\\ \\ ($\dagger$) Textkernel B.V.\\ \\ P.O. Box 90153, NL-5000 LE, Tilburg, The Netherlands \\ URL: http://ilk.uvt.nl\thanks{This document is available from http://ilk.uvt.nl/downloads/pub/papers/ilk.1001.pdf. All rights reserved Induction of Linguistic Knowledge, Tilburg University and CLiPS, University of Antwerp.}} \title{{\huge TiMBL: Tilburg Memory-Based Learner} \\ \vspace*{0.5cm} {\bf version 6.3} \\ \vspace*{0.5cm}{\huge Reference Guide}\\ \vspace*{1cm} {\it ILK Technical Report -- ILK 10-01}} %better paragraph indentation \parindent 0pt \parskip 9pt \begin{document} \pagenumbering{roman} \maketitle \tableofcontents \chapter*{Preface} Memory-Based Learning ({\sc mbl}) is an elegantly simple and robust machine-learning method applicable to a wide range of tasks in Natural Language Processing (NLP). In our research group at Tilburg University, we have been working since the end of the 1980s on the development of Memory-Based Learning techniques and algorithms. The foundations are bundled in \namecite{Daelemans+05}. Section~\ref{furtherreading} provides a historical overview of work on the application of {\sc mbl} in NLP. With the establishment of the ILK (Induction of Linguistic Knowledge) research group in 1997, and with the increasing use of {\sc mbl} at the CNTS (now CLiPS) research group of the University of Antwerp, the need for a well-coded and uniform tool for our main algorithms became more urgent. TiMBL was the result of combining ideas from a number of different {\sc mbl} implementations, cleaning up the interface, and using a whole bag of tricks to make it more efficient. We think it has become a useful tool for NLP research, and, for that matter, for many other domains where classification tasks are learned from examples, so we started to release the software in 1999. With the release of the sixth version of TiMBL we moved to releasing our software under the GPL license, for anyone to use under the conditions stated in the license. Memory-Based Learning is a direct descendant of the classical $k$-Nearest Neighbor ($k$-NN) approach to classification, which has become known as a powerful pattern classification algorithm for numeric data. In typical NLP learning tasks, however, the focus is on discrete data, very large numbers of examples, and many attributes of differing relevance. Moreover, classification speed is a critical issue in any realistic application of Memory-Based Learning. These constraints demand non-trivial data-structures and speedup optimizations for the core $k$-NN classifier. Our approach has resulted in an architecture which compresses the typical flat file organization found in straightforward $k$-NN implementations, into a decision-tree structure. While the decision tree can be used to retrieve the exact $k$-nearest neighbors (as happens in the {\sc ib1} algorithm within TiMBL), it can also be deterministically traversed as in a decision-tree classifier (the method adopted by the {\sc igtree} algorithm). We believe that our optimizations make TiMBL one of the fastest discrete $k$-NN implementations around. The main effort in the development and maintenance of this software was and continues to be invested by Ko van der Sloot. The code started as a rewrite of {\tt nibl}, a piece of software developed by Peter Berck from a Common Lisp implementation by Walter Daelemans of {\sc ib1-ig}. Some of the index optimizations in TiMBL are due to Jakub Zavrel. The code has benefited substantially from trial, error and scrutiny by all past and present members of the ILK and CLiPS (formerly CNTS) groups in Tilburg and Antwerp. We are furthermore indebted to Ton Weijters of Eindhoven Technical University for his inspirational early work on $k$-NN and for his involvements in {\sc igtree}. Our sincere thanks go to the many users of TiMBL who have contributed to it immensely by giving us feedback and reporting bugs, and to the two organisations that have supported and enabled its development: NWO, the Netherlands Organization for Scientific Research, and the School of Humanities of Tilburg University. NWO funding has spanned three subsequent periods. From 1997 until 2001 development was part of the ``Induction of Linguistic Knowledge'' research programme, partially funded by the Netherlands Organization for Scientific Research (NWO) and Tilburg University. Between 2001 and 2006 it was funded as part of the ``Memory Models of Language'' research project under the NWO {\em Vernieuwingsimpuls}\/ programme, and since 2006 it is funded as part of the ``Implicit Linguistics'' research project under the NWO Vici programme. The current release (version 6.3) succeeds major release 6.2. The most significant change is that all {\em server}-related functionality is moved to a separate TimblServer package. An elaborate description of the changes from version 1.0 up to 6.3 can be found in Chapter~\ref{changes}. Although all new features have been tested for some time in our research groups, the software may still contain bugs and inconsistencies in some places. We would appreciate it if you would send bug reports, ideas about enhancements of the software and the manual, and any other comments you might have, to {\tt Timbl@uvt.nl}. This reference guide is structured as follows. In Chapter~\ref{license} you can find the terms of the license according to which you are allowed to use TiMBL. The subsequent chapter gives some instructions on how to install the TiMBL package on your computer. Chapter~\ref{changes} lists the changes that have taken place up to the current version. Next, Chapter~\ref{tutorial} offers a quick-start tutorial for readers who want to get to work with TiMBL right away. The tutorial describes, step-by-step, a case study with a sample data set (included with the software) representing the linguistic domain of predicting the diminutive inflection of Dutch nouns. Readers who are interested in the theoretical and technical details of Memory-Based Learning and of this implementation can refer to Chapter~\ref{algorithms}. Chapter~\ref{reference} provides full reference to the command line options of TiMBL and supported file formats. \chapter{GNU General Public License} \label{license} \pagenumbering{arabic} TiMBL is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. TiMBL is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with TiMBL. If not, see $<$http://www.gnu.org/licenses/$>$. In publication of research that makes use of TiMBL 6.3, a citation should be given of: {\em ``Walter Daelemans, Jakub Zavrel, Ko van der Sloot, and Antal van den Bosch (2009). TiMBL: Tilburg Memory Based Learner, version 6.3, Reference Guide. ILK Technical Report 10-01 Available from \\ {\tt http://ilk.uvt.nl/downloads/pub/papers/ilk1001.pdf}''} For information about commercial licenses for TiMBL 6.3, contact {\tt Timbl@uvt.nl}, or send your request in writing to: Prof. dr.~Walter Daelemans\\ CLiPS - Language Technology Group\\ Dept. of Linguistics \\ University of Antwerp\\ Prinsstraat 13, L-203, B-2000 Antwerp \\ Belgium \pagestyle{headings} \chapter{Installation} \vspace{-1cm} You can get the TiMBL package as a gzipped tar archive from: {\tt http://ilk.uvt.nl/timbl} Following the links from that page, you can download the file {\tt timbl-6.3.tar.gz}. This file contains the complete source code (C++) for the TiMBL program, a few sample data sets, the license, and documentation. The installation should be relatively straightforward on most UNIX systems. To install the package on your computer, unzip the downloaded file ({\tt >} is the command line prompt): {\tt > tar xfz timbl-6.3.0.tar.gz} This will make a directory {\tt timbl-6.3.0} under your current directory. Alternatively you can do: {\tt > gunzip timbl-6.3.0.tar.gz} and unpack the tar archive: {\tt > tar xf timbl-6.3.0.tar} Go to the timbl-6.3.0 directory, and configure the package by typing {\tt > cd timbl-6.3.0} \\ {\tt > ./configure --prefix=} If you do not use the {\tt --prefix} option, TiMBL will try to install itself in the directory {\tt /usr/local/}. If you do not have {\tt root} access you can specify a different installation location such as {\tt \$HOME/install} It is not obligatory to install TiMBL, but if you plan to install TiMBL-based extensions such as TimblServer\footnote{\url{http://ilk.uvt.nl/timbl}}, Mbt\footnote{\url{http://ilk.uvt.nl/mbt}}, Dimbl\footnote{\url{http://ilk.uvt.nl/dimbl}}, or Tadpole\footnote{\url{http://ilk.uvt.nl/tadpole}}, or you want to build your own extensions using the TiMBL API, installing is the best choice. After {\tt configure} you can build TiMBL: {\tt > make} and (as recommended) install: {\tt > make install } If the process was completed successfully, you should now have an executable file named {\tt Timbl} in the directory {\tt /bin}, and a static library {\tt libTimbl.a} in the directory {\tt /lib}. Additionally, several demo programs named {\tt api\_test*}, {\tt classify} and {\tt tse} are created in the {\tt ./demos} subdirectory. Within the {\tt } directory a subdirectory is also created: {\tt share/doc/timbl} where the TiMBL 6.3 documentation can be found, and which in turn contains a subdirectory {\tt examples} with example data files. Some of these data sets are used in the Quick Start Section~\ref{tutorial} of this document; other data and source files are referred to in the API documentation. The latter, along with a pdf version of this document, can also be found in the {\tt doc} directory. Note that the API documentation is a beta-state document. TiMBL should now be ready for use. If you want to run the examples and demos from this manual, you should act as follows: \begin{itemize} \item Be sure to add {\tt /bin} to your PATH. In many shells something like {\tt > export PATH=\$PATH:/bin } will do. \item copy all the files from {\tt /share/doc/timbl/examples} to some working location. (By default, TiMBL writes its results to the directory where it finds the data.) \item and test: {\tt cd} to the working location, and then {\tt Timbl -f dimin.train -t dimin.test} \end{itemize} If you did not install TiMBL, the executable can be found in the {\tt src} directory of the build. The demo files can be found in the {\tt demo} directory. The e-mail address for problems with the installation, bug reports, comments and questions is {\tt Timbl@uvt.nl}. \chapter{Changes} \label{changes} This chapter gives a brief overview of the changes from all previously released versions (1.0 up to 6.3) for users already familiar with the program. \section{From version 6.2 to 6.3} \begin{itemize} \item All server-related functionality is removed from Timbl. A new TimblServer package is available wich provides the same interface as Timbl up to version 6.3, but also adds some extra features, such as running multiple but separate experiments on one TCP port. See the TimblServer package for more details. \item Starting with Timbl 6.3 we support installable packages for Debian and Ubuntu (.deb), RedHat (.rpm) and MacOSX (Fink)\footnote{\url{http://ilk.uvt.nl/timbl-packages}}. \item Some bugs and inconsistencies have been fixed. \end{itemize} \section{From version 6.1 to 6.2} Version 6.2 differs from 6.1 in a great number of internal changes aimed at making the code better maintainable and extendible, in some minor bug fixes, and in the following more prominent changes: \begin{itemize} \item A new distance metric, the Dice coefficient, has been added; the metric can be set with {\tt -mDC}. Analogous to the Levenshtein ({\tt -mL}) metric, the Dice coefficient operates at the feature value level; it computes the overlap in character bigrams of two value strings. \item Value difference matrices, as used by the {\sc mvdm} and Jeffrey divergence distance metrics, can now be written to file, and read into TiMBL, allowing for user-defined value difference metrics to be used. The new command line options are {\tt --matrixout=} and {\tt --matrixin=}. \item The {\sc IGTree} algorithm has been optimized beyond the improvements introduced in version 6.0. With very large training sets, {\sc IGTree} was reported to be exponentially slower in the later stages of training. Trees are now built in near-linear time. \end{itemize} Finally, besides minor bug fixes, a great number of internal changes were made to make the code better maintainable and extendible. \section{From version 6.0 to 6.1} Version 6.1 differs from 6.0 mainly in the changed configuration. It is now based on autotools and is delivered as an installable package. Some bugs have been fixed as well. \section{From version 5.1 to 6.0} Version 6.0 differs from 5.1 firstly in terms of internal changes aimed at increasing classification speed and lowering memory usage, of which the most prominent are \begin{itemize} \item The {\sc IGTree} algorithm has been optimized. Learning has been made more memory-lean, while classification has been optimized so that it is now orders of magnitude faster than before on most data sets. \item {\sc Mvdm} matrices are partly prestored; only the {\sc mvdm} values of pairs of frequent values are precomputed. The threshold frequency $n$ can now be determined with {\tt -c n}. This way memory can be traded for speed, up to a point. The default value 10 remains the recommended one. \end{itemize} Also, two metrics and several verbosity options and other command-line switches are added: \begin{itemize} \item Two distance metrics are added: {\tt -mC} sets the Cosine metric, and {\tt -mL} sets the Levenshtein metric. The latter metric operates at the feature-value level, and thus offers an alternative to the all-or-nothing Overlap metric for string-valued features. \item Class distribution output generated with {\tt +v db} can be normalized so that they add to $1.0$, with the additional {\tt -G} option (or {\tt -G0}). As a simple smoothing option, with {\tt -G1:double} all class votes are incremented by {\tt double} before normalization. For example, {\tt -G1:1} (or {\tt -G1} for short) is ``add one''-smoothing; {\tt -G1:0.5} adds $0.5$ to all class votes. \item With {\tt -Beam=n} (from version 6.2 onwards: {\tt --Beam=n}), where $n$ is an integer, the {\tt +v db} output is constrained to the $n$ classes receiving the highest votes. This special limit is useful in cases in which the {\tt +v db} output, typically used for further processing, generates far too much output in its default unconstrained setting. \item Class distributions are not stored on non-terminal nodes with {\sc IGTree} and {\sc tribl} by default. To revert this default, e.g. to be able to use {\tt +v db} with {\sc IGTree}, the setting {\tt +D} can be used. \item With {\tt -T n}, the user can specify that the $n$th column in the training set of labeled examples contains the label to be predicted, while all other columns represent the input features. By default, the last column is assumed to contain the class labels. \item After classification, TiMBL reports its classification speed at microsecond precision instead of in seconds. \item The verbosity option {\tt +v md} displays the level at which a classification was made by {\sc IGTree} ({\tt -a1}), and whether the class label was obtained from a leaf node or an end node. \item With {\tt -X [file]}, TiMBL dumps its internal TiMBL tree into a file containing an XML tree. This option is analogous to {\tt -I [file]}, which prints a TiMBL tree in TiMBL's proprietary format, the difference being that the latter format can be read into TiMBL again. \item Several minor bugs have been resolved. \end{itemize} \section{From version 5.0 to 5.1} Version 5.1 adds speed and memory improvements that are notable with datasets that have very large amounts of examples, features, feature values, or classes (and, especially, combinations of those). Previous versions exhibited exponential slowdown in some worst cases; this has been largely countered. On the outside, TiMBL has been updated in the following aspects: \begin{itemize} \item TiMBL offers extended performance reporting: next to accuracy it reports on micro and macro-averages of F-score and AUC (area under the ROC-curve) with {\tt +v as}. Optionally, it also shows each individual class' precision, recall (or true positive rate), and false positive rate with {\tt +v cs}. \item TiMBL always uses gain ratio feature weighting as the default case, if not specified by the user, also with the {\sc mvdm} and Jeffrey Divergence similarity metrics. \item Two additional feature orderings for the internal TiMBL trees are added, {\tt -TGxE} and {\tt -TIxE} (gain ratio $\times$ entropy and information gain $\times$ entropy, respectively) to potentially tackle the problem of unbalanced trees. \item Bugs in leave-one-out testing with numeric features and with exemplar weighting were fixed. \end{itemize} \section{From version 4.3 to 5.0} Version 5.0 is the conclusion of a number of recodings (mostly involving more generic treatment of variables to improve robustness, but also the removal of inverted indexing on the internal tree representation) that have changed the internals of TiMBL considerably. On the outside, TiMBL displays the following new characteristics: \begin{itemize} \item Next to the Overlap, {\sc mvdm}, and Numeric distance functions, TiMBL now features the Jeffrey divergence distance function and the Dot-product distance function. \item The exponential-decay distance weighting function can be set using a second parameter, which can change the shape of the function from normal exponential to bell-shaped. \item In addition to the ``binary'' format, TiMBL can now read a more generic sparse data format. This format allows instances to be coded by tuples of $<$ feature number, feature value $>$ where the value can be symbolic or numeric rather than only binary. \item Tree files generated by TiMBL versions 1.*, 2.* and 3.* are no longer supported. \item The command line interface has had the following additions, including the ones reflecting the above changes: \begin{itemize} \item {\tt -m J} activates the Jeffrey divergence distance metric. \item {\tt -m D} activates the Dot-product distance metric. \item {\tt -d ED::} (without whitespace) sets the $\alpha$ and new $\beta$ parameters. If unspecified, as in {\tt -d ED:} or the older (deprecated) {\tt -d ED }, $\beta$ is set to $1.0$. \item {\tt -F Sparse} declares that training and test files are in the sparse $<$ feature number, feature value $>$ tuple-format described in more detail in section~\ref{commandline}. \item {\tt +v k} is a new verbosity option that prints all class distributions per $k$-nearest distance per classified instance in the output file. It works analogous to the {\tt +v n} option, but does not print the neighbors themselves. \end{itemize} \end{itemize} \section{From version 3.0 to 4.3} As the last upgrade of the version 4 strain, version 4.3 added some command line functionality and internal code changes to version 4.2. Minor progressive changes from 4.0 to 4.3 are found at the bottom of this list and are marked as such. \begin{itemize} \item Distance weighting of the $k$ nearest neighbors. This classical exemplar weighting scheme \cite{Dudani76} allows closer nearest neighbors in the $k$ to have a more prominent vote in classification. TiMBL incorporates linear, inversed, and exponential distance weighting. \item Incremental edited memory-based learning with {\sc ib2} \cite{Aha+91}. This incremental version of {\sc ib1} adds instances to memory only when those instances are misclassified by the then-current set of instances in memory. \item Frequency-filtered {\sc mvdm} distance metric. The option, which is not selected by default, is an add-on of the {\sc mvdm} metric, that backs off from the {\sc mvdm} metric to the Overlap distance function whenever one or both in a pair of matched values occurs fewer times in the training material than a user-determined threshold. \item {\sc tribl2}. The {\sc tribl2} algorithm has been implemented as an additional trade-off between {\sc igtree} and {\sc ib1}. In contrast to {\sc tribl}, {\sc tribl2} uses no threshold parameter. \item Exemplar weighting. TiMBL can read additional numeric exemplar weights (generated externally) when reading a data file, and use these weights during neighbor distance computation in $k$-NN classification. \item Cross-validation testing. Analogous to the leave-one-out testing option, with cross-validation testing it is possible to let TiMBL run systematic tests on different values of parameters, without completely re-initializing the classifier in every fold of the validation experiment. \item The number of concurrent connections to a TiMBL server has been restricted, but can be set to different values. \item The command line interface has had several additions reflecting the above changes, plus one extra verbosity option: \begin{itemize} \item the {\tt -d metriccode} option sets the distance weighting metric. Three metrics are available: inverse distance (code ID), inverse linear (IL), and exponential decay (ED, which takes an extra argument $a$, without whitespace, determining the factor of the exponential function). By default, no distance weighting is used (code Z). See Chapter~\ref{algorithms} for descriptions. \item the {\tt -L n} option sets the frequency threshold in the optional switch (backoff) from {\sc mvdm} or Jeffrey divergence to Overlap; whenever in an {\sc mvdm} or Jeffrey divergence distance computation one or both of a pair of values occur fewer than {\tt n} times, Overlap is used rather than the {\sc mvdm} metric. The default value for {\tt n} is 1 (no switching). \item the {\tt -a 3} or {\tt -a IB2} switch invokes the {\sc ib2} algorithm. This algorithm expects to have the {\tt -b} switch set. \item the {\tt -b n} option sets the number ($n$) of lines counting from the top of the training set file, which form the bootstrap set of memorized instances to which {\sc ib2} will start adding instances incrementally. \item the {\tt -a 4} or {\tt -a TRIBL2} switch invokes the {\sc tribl2} algorithm. \item the {\tt -C n} switch (default: {\tt n} set to 10) restricts the number of concurrent connections to a TiMBL server (cf. the {\tt -S} switch). \item the {\tt +v/-v} option has {\tt cm} as a new optional argument; it returns the confusion matrix, obtained after testing, between predicted and actual classes in the test data. \end{itemize} \item The ``programmer's reference'' or API section has been separated from this manual. This new API, describing the underlying structure of TiMBL, is available as a separate document in the TiMBL software distribution. \item Two bugs relating to a type of sparse data problem have been resolved. The first involved leave-one-out experiments on data sets with features that have values that occur only once in the training data. The second bug occurred with the use of the {\tt -F Binary} option with the same type of data. \item {\bf [4.1]} Exemplar weights are stored in the TiMBL-tree. \item {\bf [4.1]} The core representation of TiMBL-trees has been modified, causing no changes at the surface except that the {\sc tribl} variant uses less memory. \item {\bf [4.2]} Feature value and class information in the internal TiMBL tree is hashed, by default, except with binary features. Hashing can be explicitly set on or off through the flag {\tt +H} or {\tt -H}. \item {\bf [4.2]} The discretization of numeric features, used for computing feature weights, has changed from linear binning between minimum and maximum values, to equal-content binning. \item {\bf [4.2]} Tie resolution between equal class distributions in the nearest neighbors set is resolved by first expanding the $k$ by one value. If the tie persists after the enlargement of the nearest neighbor set, the original tie resolution method is applied. \item {\bf [4.3]} Internal changes in the code (with no effect on learning and classification functionality) have been implemented with respect to namespaces. \item {\bf [4.3]} A progress marker (one dot per 10 seconds) in computationally intensive operations on the internal representation of the instance base (e.g. pruning {\sc igtree}s) is added in TiMBL's screen output. \item A number of bugs have been fixed, notably to handle erroneous input more robustly. \end{itemize} \section{From version 2.0 to 3.0} \begin{itemize} \item Server functionality. Apart from the standard processing of test items from a file, alternatively you can now specify a portnumber with {\tt -S portnumber} to open a socket and send commands for classification of test patterns or change of parameters to it. A sample client program is included in the distribution. This allows fast response times when small amounts of test material are presented at various intervals. It also opens the possibility of having large numbers of ``classification agents'' cooperate in real time, or of classication of the same data with different parameters. \item Leave-one-out testing. To get an estimate of the classification error, without setting aside part of one's data as a test set, one can now test by ``leave-one-out'' ({\tt -t leave\_one\_out}), in effect testing on every case once, while training on the rest of the cases, without completely re-initializing the classifier for every test case. \item Support for sparse binary features. For tasks with large numbers of sparse binary features, TiMBL now allows for an input format which lists only the ``active'' features, avoiding the listing of the many (zero-valued) features for each case. This format is described in Section~\ref{binaryformat}. \item Additional feature weighting metrics. We have added chi-squared and shared variance measures as weighting schemes. These weighting metrics are sometimes more robust to large numbers of feature values and other forms of data sparseness. \item Different metrics (Overlap, {\sc mvdm} or Numeric) can be applied to different features. \item The command line interface has slightly been cleaned up, and re-organized: \begin{itemize} \item The {\tt -m metricnumber} switch to choose metrics has been replaced by the use of a specification string following {\tt -m}. E.g.~you can specify to use {\sc mvdm} as the default metric, but use Overlap on features 5-7,9, Numeric on feature 1, and ignore feature 10 ({\tt -m M:O5-7,9:N1:I10}). \item All of the output needed for analysing the matching of nearest neighbors has been moved to the verbosity setting. \item Verbosity levels and some other options can be switched on {\tt +v} and off {\tt -v}, even between different classification actions. \item Because of the large amount of verbosity levels, the {\tt +v} option takes mnemonic abbreviations as arguments instead of numeric verbosity levels. Although the old (numeric) format is still supported, it's use is not encouraged as it will disappear in future versions. \end{itemize} \item Because of significant optimizations in the nearest neighbor search, the default is no longer to use inverted indexes. These can however still be turned on by using the {\tt +-} switch on the command line. \item You can now choose the output filename or have it generated by TiMBL on the basis of the test filename and the parameters. \item You can use TiMBL in a pipeline of commands by specifying '-' as either input, output or both. \item Several problems with the display of nearest neighbors in the output have been fixed. \item The API has been adapted a bit to allow more practical use of it. \end{itemize} \section{From version 1.0 to 2.0} \begin{itemize} \item We have added a new algorithm: {\sc tribl}, a hybrid between the fast {\sc igtree} algorithm and real nearest neighbor search (for more details, see~\ref{tribl}, or~\namecite{Daelemans+97d}). This algorithm is invoked with the {\tt -a 2} switch and requires the specification of a so-called {\sc tribl}-offset, the feature where {\sc igtree} stops and case bases are stored under the leaves of the constructed tree. \item Support for numeric features. Although the package has retained its focus on discrete features, it can now also process numeric features, scale them, and compute feature weights on them. You specify which features are numeric with the {\tt -N} option on the command line. \item The organization of the code is much more object-oriented than in version 1.0. %The main benefit of this is that: \item A Memory-Based Learning API is made available. You can define Memory-Based classification objects in your own C++ programs and access all of the functionality of TiMBL by linking to the TiMBL library. \item It has become easier to examine the way decisions are made from nearest neighbors, because several verbosity-levels allow you to dump similarity values ({\tt -D}), distributions ({\tt -v 16}), and nearest neighbor sets ({\tt -v 32}) to the output file. The {\tt -d} option for writing the distributions no longer exists. \item Better support for the manipulation of {\sc mvdm} matrices. Using the {\tt -U} and {\tt -u} options it is now possible to respectively save and read back value difference matrices (see Section~\ref{mvdmformat}). \item Both ``pre-stored'' and ``regular'' {\sc mvdm} experiments now generate filenames with ``{\tt mvd}'' in the suffix. This used to be ``{\tt pvd}'' and ``{\tt mvd}'' respectively. \item a number of minor bugs have been fixed. \end{itemize} \chapter{Quick-start Tutorial} \label{tutorial} This quick-start tutorial is meant to get you started with TiMBL right away. We discuss how to format the data of a task to serve as training examples, which choices can be made during the construction of the classifier, how various choices can be evaluated in terms of their generalization accuracy, and various other practical issues. The reader who is interested in more background information on TiMBL implementation issues and a formal description of Memory-Based Learning, is advised to read Chapter~\ref{algorithms}. Memory-Based Learning ({\sc mbl}) is based on the idea that intelligent behavior can be obtained by analogical reasoning, rather than by the application of abstract {\em mental rules} as in rule induction and rule-based processing. In particular, {\sc mbl} is founded in the hypothesis that the extrapolation of behavior from stored representations of earlier experience to new situations, based on the similarity of the old and the new situation, is of key importance. {\sc mbl} algorithms take a set of examples (fixed-length patterns of feature-values and their associated class) as input, and produce a {\em classifier} which can classify new, previously unseen, input patterns. Although TiMBL was designed with linguistic classification tasks in mind, it can in principle be applied to any kind of classification task with symbolic or numeric features and discrete (non-continuous) classes for which training data is available. As an example task for this tutorial we go through the application of TiMBL to the prediction of Dutch diminutive suffixes. The necessary data sets are included in the TiMBL distribution, so you can replicate the examples given below on your own system. \section{Data} The operation of TiMBL will be illustrated below by means of a real natural language processing task: prediction of the diminutive suffix form in Dutch~\cite{Daelemans+97b}. In Dutch, a noun can receive a diminutive suffix to indicate {\em small size} literally or metaphorically attributed to the referent of the noun; e.g. {\em mannetje} means {\em little man}. Diminutives are formed by a productive morphological rule which attaches a form of the Germanic suffix {\em -tje} to the singular base form of a noun. The suffix shows variation in its form (Table \ref{variation}). The task we consider here is to predict which suffix form is chosen for previously unseen nouns on the basis of their form. \begin{table}[ht] \begin{center} \begin{tabular}{l|l|l} Noun & Form & Suffix \\ \noalign{\smallskip} \hline \noalign{\smallskip} huis (house) & huisje & {\em -je} \\ man (man) & mannetje & {\em -etje\/} \\ raam (window) & raampje & {\em -pje\/} \\ woning (house) & woninkje & {\em -kje\/} \\ baan (job) & baantje & {\em -tje\/} \\ \end{tabular} \caption{Allomorphic variation in Dutch diminutives.}\label{variation} \end{center} \end{table} For these experiments, we collect a representation of nouns in terms of their syllable structure as training material\footnote{These words were collected form the {\sc celex} lexical database~\cite{Baayen+93}.}. For each of the last three syllables of the noun, four different features are collected: whether the syllable is stressed or not (values - or +), the string of consonants before the vocalic part of the syllable (i.e. its onset), its vocalic part (nucleus), and its post-vocalic part (coda). Whenever a feature value is not present (e.g. a syllable does not have an onset, or the noun has less than three syllables), the value `=' is used. The class to be predicted is either E ({\em -etje}), T ({\em -tje}), J ({\em -je}), K ({\em -kje}), or P ({\em -pje}). Some examples are given below (the word in the rightmost column is only provided for convenience and is not used). The values of the syllabic content features are given in phonetic notation. \begin{table}[ht] \begin{center} \begin{tabular}{cccccccccccc|l|l|l} + & b & i & = & - & z & @ & = & - & m & A & nt & J & {\em biezenmand} \\ = & = & = & = & = & = & = & = & + & b & I & x & E & {\em big}\\ = & = & = & = & + & b & K & = & - & b & a & n & T & {\em bijbaan}\\ = & = & = & = & + & b & K & = & - & b & @ & l & T & {\em bijbel}\\ \end{tabular} \end{center} \end{table} Our goal is to use TiMBL in order to train a classifier that can predict the class of new, previously unseen words as correctly as possible, given a set of training examples that are described by the features given above. Because the basis of classification in TiMBL is the storage of all training examples in memory, a test of the classifier's accuracy must be done on a separate test set. We will call these datasets {\tt dimin.train} and {\tt dimin.test}, respectively. The training set {\tt dimin.train} contains 2999 words and the test set contains 950 words, none of which are present in the training set. Although a single train/test partition suffices here for the purposes of explanation, it does not factor out the bias of choosing this particular split. Unless the test set is sufficiently large, a more reliable generalization accuracy measurement is used in real experiments, e.g.~10-fold cross-validation~\cite{Weiss+91}. This means that 10 separate experiments are performed, and in each ``fold'' 90\% of the data is used for training and 10\% for testing, in such a way that each instance is used as a test item exactly once. Another reliable way of testing the real error of a classifier is leave-one-out~\cite{Weiss+91}. In this approach, every data item in turn is selected once as a test item, and the classifier is trained on all remaining items. Accuracy of the classifier is then the number of data items correctly predicted. With the option {\tt -t leave\_one\_out}, this testing methodology is used by TiMBL. We will use this option in the tutorial on the file {\tt dimin.data}, the union of {\tt dimin.train} and {\tt dimin.test}. \section{Using TiMBL} Different formats are allowed for training and test data files. TiMBL is able to guess the type of format in most cases. We will use comma-separated values here, with the class as the last value. This format is called C4.5 format in TiMBL because it is the same as that used in Quinlan's well-known C4.5 program for induction of decision trees~\cite{Quinlan93}. See Section~\ref{fileformats} for more information about this and other file formats. An experiment is started by executing TiMBL with the two files ({\tt dimin.train} and {\tt dimin.test}) as arguments (``$>$'' is the command line prompt): {\footnotesize \begin{verbatim} > Timbl -f dimin.train -t dimin.test \end{verbatim} } Upon completion, a new file has been created with name {\small\tt dimin.test.IB1.O.gr.k1.out}, which is identical to the input test file except that an extra comma-separated column is added with the class predicted by TiMBL. The name of the file provides information about the {\sc mbl} algorithms and metrics used in the experiment (the default values in this case). We will describe these shortly. Apart from the result file, information about the operation of the algorithm is also sent to the standard output. It is therefore advisable to redirect the output to a file in order to make a log of the results. {\footnotesize \begin{verbatim} > Timbl -f dimin.train -t dimin.test > dimin-exp1 \end{verbatim} } The defaults used in this case work reasonably well for most problems. We will now provide a point by point explanation of what goes on in the output. %\vspace{0.5cm} %\rule{\textwidth}{0.5mm} {\footnotesize \begin{verbatim} TiMBL 6.3.0 (c) ILK 1998 - 2010. Tilburg Memory Based Learner Induction of Linguistic Knowledge Research Group, Tilburg University CLiPS Computational Linguistics Group, University of Antwerp Mon Oct 19 21:30:00 2009 Examine datafile 'dimin.train' gave the following results: Number of Features: 12 InputFormat : C4.5 \end{verbatim} } %\rule{\textwidth}{0.5mm} %\vspace{0.5cm} TiMBL has detected 12 features and the C4.5 input format (comma-separated features, class at the end). %\rule{\textwidth}{0.5mm} {\footnotesize \begin{verbatim} Phase 1: Reading Datafile: dimin.train Start: 0 @ Mon Oct 19 21:30:00 2009 Finished: 2999 @ Mon Oct 19 21:30:00 2009 Calculating Entropy Mon Oct 19 21:30:00 2009 Lines of data : 2999 DB Entropy : 1.6178929 Number of Classes : 5 Feats Vals InfoGain GainRatio 1 3 0.030971064 0.024891536 2 50 0.060860038 0.027552191 3 19 0.039562857 0.018676787 4 37 0.052541227 0.052620750 5 3 0.074523225 0.047699231 6 61 0.10604433 0.024471911 7 20 0.12348668 0.034953203 8 69 0.097198760 0.043983864 9 2 0.045752381 0.046816705 10 64 0.21388759 0.042844587 11 18 0.66970458 0.18507018 12 43 1.2780762 0.32537181 Feature Permutation based on GainRatio/Values : < 9, 5, 11, 1, 12, 7, 4, 3, 10, 8, 2, 6 > \end{verbatim} } %\rule{\textwidth}{0.5mm} %\vspace{0.5cm} Phase 1 is the training data analysis phase. Time stamps for start and end of analysis are provided. Some preliminary analysis of the training data is done: number of training items, number of classes, entropy of the training data. For each feature, the number of values, and four variants of an information-theoretic measure of feature relevance are given. These are used both for memory organization during training and for feature relevance weighting during testing (see Chapter~\ref{algorithms}). Finally, an ordering (permutation) of the features is given. This ordering is used for building the tree-index to the case-base. %\vspace{0.5cm} %\rule{\textwidth}{0.5mm} {\footnotesize \begin{verbatim} Phase 2: Learning from Datafile: dimin.train Start: 0 @ Mon Oct 19 21:30:00 2009 Finished: 2999 @ Mon Oct 19 21:30:00 2009 Size of InstanceBase = 19231 Nodes, (769240 bytes), 49.77 % compression Examine datafile 'dimin.test' gave the following results: Number of Features: 12 InputFormat : C4.5 \end{verbatim} } %\rule{\textwidth}{0.5mm} %\vspace{0.5cm} Phase 2 is the learning phase: all training items are stored in an efficient way in memory for use during testing. Again timing information (real time) is provided, as well as information about the size of the data structure representing the stored examples and the amount of compression achieved. %\vspace{0.5cm} %\rule{\textwidth}{0.5mm} {\footnotesize \begin{verbatim} Starting to test, Testfile: dimin.test Writing output in: dimin.test.IB1.O.gr.k1.out Algorithm : IB1 Global metric : Overlap Deviant Feature Metrics:(none) Weighting : GainRatio Feature 1 : 0.024891535617620 Feature 2 : 0.027552191321752 Feature 3 : 0.018676787182524 Feature 4 : 0.052620750282779 Feature 5 : 0.047699230752236 Feature 6 : 0.024471910753751 Feature 7 : 0.034953203413051 Feature 8 : 0.043983864437713 Feature 9 : 0.046816704745507 Feature 10 : 0.042844587034556 Feature 11 : 0.185070180760327 Feature 12 : 0.325371814230901 Tested: 1 @ Mon Oct 19 21:30:00 2009 Tested: 2 @ Mon Oct 19 21:30:00 2009 Tested: 3 @ Mon Oct 19 21:30:00 2009 Tested: 4 @ Mon Oct 19 21:30:00 2009 Tested: 5 @ Mon Oct 19 21:30:00 2009 Tested: 6 @ Mon Oct 19 21:30:00 2009 Tested: 7 @ Mon Oct 19 21:30:00 2009 Tested: 8 @ Mon Oct 19 21:30:00 2009 Tested: 9 @ Mon Oct 19 21:30:00 2009 Tested: 10 @ Mon Oct 19 21:30:00 2009 Tested: 100 @ Mon Oct 19 21:30:00 2009 Ready: 950 @ Mon Oct 19 21:30:00 2009 Seconds taken: 0.0650 (14609.99 p/s) overall accuracy: 0.968421 (920/950), of which 39 exact matches There were 5 ties of which 5 (100.00%) were correctly resolved \end{verbatim} } %\rule{\textwidth}{0.5mm} %\vspace{0.5cm} In Phase 3, the trained classifier is applied to the test set. Because we have not specified which algorithm to use, the default settings are used ({\sc ib1} with information-theoretic feature weighting). This algorithm computes the similarity between a test item and each training item in terms of {\em weighted overlap}: the total difference between two patterns is the sum of the relevance weights of those features which are not equal. The class for the test item is decided on the basis of the least distant item(s) in memory. To compute relevance, Gain Ratio is used (an information-theoretic measure, see Section~\ref{infogain}). Time stamps indicate the progress of the testing phase. Finally, accuracy on the test set is logged, and the number of exact matches\footnote{An exact match in this experiment can occur when two different nouns have the same feature-value representation.} and ties (two or more classes are equally frequent in the nearest neighbor set). In this experiment, the diminutive suffix form of 96.8\% of the new words was correctly predicted. Train and test set overlap in 39 items, and the algorithm had to break five ties, all of which were broken correctly. The meaning of the output file names can be explained now:\\ {\tt dimin.test.IB1.O.gr.k1.out} means output file ({\tt .out}) for {\tt dimin.test} with algorithm {\sc mbl} (={\sc ib1}), similarity computed as {\em weighted overlap} ({\tt .O}), relevance weights computed with {\em gain ratio} ({\tt .gr}), and number of most similar memory patterns on which the output class was based equal to 1 ({\tt .k1}). \section{Algorithms and metrics} A precise discussion of the different algorithms and metrics implemented in TiMBL is given in Chapter~\ref{algorithms}. We will discuss the effect of the most important ones on our data set. A first choice in algorithms is between using {\sc ib1} and {\sc igtree}. In the trade-off between generalization accuracy and efficiency, {\sc ib1} usually, but not always, leads to more accuracy at the cost of more memory and slower computation, whereas {\sc igtree} is a fast heuristic approximation of {\sc ib1}, but sometimes less accurate. The {\sc igtree} algorithm is used when {\tt -a 1} is given on the command line, whereas the {\sc ib1} algorithm used above (the default) would have been specified explicitly by {\tt -a 0}. {\footnotesize \begin{verbatim} > Timbl -a1 -f dimin.train -t dimin.test \end{verbatim}} We see that {\sc igtree} performs only slightly worse (96.6\%) than {\sc ib1} (96.8\%) for this train-test partitioning of the data --- it uses less memory and is faster, however. When using the {\sc ib1} algorithm, there is a choice of metrics for influencing the definition of similarity. With {\em weighted overlap}, each feature is assigned a weight, determining its relevance in solving the task. With the {\em modified value difference metric} ({\sc mvdm}), each pair of values of a particular feature is assigned a value difference. The intuition here is that in our diminutive problem, for example, the codas $n$ and $m$ should be regarded as being more similar than $n$ and $p$. These pair-wise differences are computed for each pair of values in each feature (see Section~\ref{mvdm}). Selection between weighted overlap and {\sc mvdm} is done by means of the {\tt -mM} parameter. The following selects {\sc mvdm}, whereas {\tt -mO} ({\em weighted overlap}) is the default. {\footnotesize \begin{verbatim} > Timbl -mM -f dimin.train -t dimin.test \end{verbatim} } Especially when using {\sc mvdm}, but also in other cases, it may be useful to extrapolate not just from the most similar example in memory, which is the default, but from several. This can be achieved by using the $-k$ parameter followed by the wanted number of nearest neighbors. E.g., the following applies {\sc ib1} with the {\sc mvdm} metric, with extrapolation from the 5 nearest neighbors. {\footnotesize \begin{verbatim} > Timbl -mM -k5 -f dimin.train -t dimin.test \end{verbatim} } Whenever more than one nearest neighbor is taken into account for extrapolation, it may be useful to weigh the influence of the neighbors on the final decision as a function of their distance from the test item. Several possible implementations of this distance function are provided. E.g., the following provides inverse distance: {\footnotesize \begin{verbatim} > Timbl -mM -k5 -dID -f dimin.train -t dimin.test \end{verbatim} } Within the {\sc ib1} {\em weighted overlap}\/ option, the default feature weighting method is gain ratio. Other feature relevance weighting methods are available as well. By setting the parameter {\tt -w} to 0, an {\em unweighted overlap}\/ definition of similarity is created where each feature is considered equally relevant. In that case, similarity reduces to the number of equal values in the same position in the two patterns being compared. As an alternative weighting, users can provide their own weights by using the {\tt -w} parameter with a filename in which the feature weights are stored (see Section~\ref{weightformat} for a description of the format of the weights file). \begin{table} \begin{center} \begin{tabular}{l|rrrr} & no weight & gain & information & chi \\ & (overlap) & ratio & gain & squared \\ \noalign{\smallskip} \hline \noalign{\smallskip} Overlap, $-k1$ & 86.4 & 96.8 & 96.7 & 96.7 \\ Overlap, $-k3$ & 73.1 & 96.4 & 96.8 & 96.9 \\ Overlap, $-k5$ & 62.6 & 95.4 & 96.1 & 96.1 \\ \hline \noalign{\smallskip} {\sc mvdm}, $-k1$ & 95.8 & 96.4 & 96.2 & 96.3 \\ {\sc mvdm}, $-k3$ & 97.3 & 97.6 & 97.6 & 97.6 \\ {\sc mvdm}, $-k5$ & {\bf 97.8} & 97.7 & 97.7 & 97.7 \\ \hline \noalign{\smallskip} \end{tabular} \caption{Some results for diminutive prediction.} \label{diminresults} \end{center} \end{table} Table \ref{diminresults} shows a small matrix indicating the effect of distance metric (Overlap versus {\sc mvdm}) and weighting method choice on generalization accuracy, using the same training and test set as before, and increasing $k$ from 1 to 3 and 5. While increasing $k$ leads to a deterioration of generalization accuracy with the Overlap function, it leads to improvements with {\sc mvdm}. Another clear contrast is that the absence of feature weighting leads to the lowest scores with the Overlap function, and the highest score with {\sc mvdm} and $k=5$. Given that TiMBL offers several more hyperparameters than only $k$, the distance metric, and the feature weighting metric, it should be obvious that even with a single training and test set experiment, a large experimental matrix can be explored. Unfortunately, the location of the cell with the highest number in this matrix cannot be predicted upfront. It is therefore useful to try out a large set of reasonable combinations of options by cross-validation on the training data to achieve best results with {\sc mbl} \cite{VandenBosch04b}. The option {\tt -t @f} where {\tt f} is the name of a file, allows you to predefine various combinations of options to be tested and test them without having the training stages repeated each time. See Chapter \ref{commandline}. \section{More options} Several input and output options exist to make life easier while experimenting. See Chapter~\ref{commandline} for a detailed description of these options. One especially useful option for testing linguistic hypotheses is the ignore option, which allows you to skip certain features when computing similarity. E.g. if we want to test the hypothesis that only the rime (nucleus and coda) and the stress of the last syllable are actually relevant in determining the form of the diminutive suffix, we can execute the following with the previously best parameter settings to disregard all but the fourth-last and the last two features. As a result we get an accuracy of 97.1\%. {\footnotesize \begin{verbatim} > Timbl -mM:I1-8,10 -k5 -w0 -f dimin.train -t dimin.test \end{verbatim} } The {\tt +/-v} (verbosity) option allows you to control the amount of information that is generated in the output, ranging from nearly nothing ({\tt +v s}) to a lot ({\tt +v as+cs+di+db+n+k}). Specific verbosity settings exist for dumping option settings ({\tt +v o}), feature relevance weights (default), value-class conditional probabilities ({\tt +v p}), exact matches ({\tt +v e}), distributions ({\tt +v db}), a confusion matrix ({\tt +v cm}), advanced statistics besides accuracy: micro-average and macro-average F-score and AUC ({\tt +v as}), per-class advanced statistics ({\tt +v cs}), the nearest neighbors on which decision are based ({\tt +v n}), just the class distributions per $k$-nearest distance per classified instance ({\tt +v k}), or the distances to the nearest neighbor ({\tt +v di}). E.g. the following command results in an output file with distributions. {\footnotesize \begin{verbatim} > Timbl +v db -f dimin.train -t dimin.test \end{verbatim} } The resulting output file {\tt dimin.test.IB1.O.gr.k1.out} contains lines like the following. {\footnotesize \begin{verbatim} +,t,L,=,-,m,@,=,-,l,I,N,E,E { E 1.00000 } =,=,=,=,=,=,=,=,+,pr,O,p,J,J { E 3.00000, J 12.0000 } =,=,=,=,=,=,=,=,+,w,e,t,J,J { J 2.00000 } =,=,=,=,+,t,L,n,-,h,L,s,J,J { J 1.00000 } =,=,=,=,=,=,=,=,+,t,L,n,T,T { T 1.00000 } =,=,=,=,=,=,=,=,+,z,o,m,P,P { P 3.00000 } +,d,a,=,-,m,@,s,-,kr,A,ns,J,J { J 1.00000 } =,=,=,=,+,=,a,rd,-,m,A,n,E,E { E 2.00000 } =,=,=,=,=,=,=,=,+,f,M,n,T,T { T 43.0000, E 20.0000 } -,d,u,=,-,k,@,=,-,m,A,nt,J,J { J 1.00000 } \end{verbatim} } This information can e.g. be used to assign a certainty to a decision of the classifier, or to make available a second-best back-off option. Another verbosity option, {\tt +v di}, displays the distance to the nearest neighbor: {\footnotesize \begin{verbatim} > Timbl +v di -f dimin.train -t dimin.test +,l,a,=,-,d,@,=,-,k,A,st,J,J 0.070701 -,s,i,=,-,f,E,r,-,st,O,k,J,J 0.000000 =,=,=,=,=,=,=,=,+,sp,a,n,T,T 0.042845 =,=,=,=,=,=,=,=,+,st,o,t,J,J 0.042845 =,=,=,=,+,sp,a,r,-,b,u,k,J,J 0.024472 +,h,I,N,-,k,@,l,-,bl,O,k,J,J 0.147489 -,m,e,=,-,d,A,l,+,j,O,n,E,E 0.182421 -,sn,u,=,-,p,@,=,+,r,K,=,T,T 0.046229 =,=,=,=,=,=,=,=,+,sp,A,N,E,E 0.042845 +,k,a,=,-,k,@,=,-,n,E,st,J,J 0.114685 \end{verbatim} } This can be used to study how very similar instances (low distance) and less similar patterns (higher distance) are used in the process of generalization. The listing of nearest neighbors is useful for the analysis of the behavior of a classifier. It can be used to interpret why particular decisions or errors occur. {\footnotesize \begin{verbatim} > Timbl +v n+k -mM -k3 -w0 -f dimin.train -t dimin.test +,m,I,=,-,d,A,G,-,d,},t,J,J { J 3.00000 } # k=1, 1 Neighbor(s) at distance: 0.99179269134432 # +,p,a,=,-,t,@,rs,-,f,A,t,{ J 1.00000 } # k=2, 1 Neighbor(s) at distance: 0.99458957262696 # +,h,o,=,-,n,@,G,-,b,A,k,{ J 1.00000 } # k=3, 1 Neighbor(s) at distance: 1.0088291749842 # +,h,E,r,-,d,@,rs,-,t,A,s,{ J 1.00000 } -,t,@,=,-,l,|,=,-,G,@,n,T,T { T 3.00000 } # k=1, 1 Neighbor(s) at distance: 0.33024081383366 # -,x,@,=,+,h,|,=,-,G,@,n,{ T 1.00000 } # k=2, 1 Neighbor(s) at distance: 0.49144604610567 # -,d,@,r,-,w,a,=,-,G,@,n,{ T 1.00000 } # k=3, 1 Neighbor(s) at distance: 0.56944572926932 # -,st,@,=,-,l,I,=,-,N,@,=,{ T 1.00000 } \end{verbatim} } A confusion matrix, printed when the {\tt +v cm} option is selected, can bring to light specific errors of the classifier that would not be apparent from the overall accuracy. Applied to the diminutive data, the following confusion matrix is computed and printed: {\footnotesize \begin{verbatim} > Timbl +v cm -f dimin.train -t dimin.test Confusion Matrix: T E J P K ----------------------------------- T | 453 0 2 0 0 E | 0 87 4 1 8 J | 1 4 347 0 0 P | 0 3 0 24 0 K | 0 7 0 0 9 -*- | 0 0 0 0 0 \end{verbatim} } The confusion matrix associates the class predicted by TiMBL (vertically) with the real class of the test items given (horizontally). All cells outside the diagonal contain errors of one class being mistaken for another. For example, the K class ({\em -kje}) is mispredicted seven times as class E ({\em -etje}). (The bottom line, labeled with {\tt -*-}, would contain aggregate counts of classes occuring in the test data that did not occur in the training data. In the diminutive data this does not occur.) In general, a confusion matrix allows a more fine-grained analysis of experimental results and better experimental designs (some parameter settings may work for some classes but not for others, or some may improve recall, and others precision, e.g.). From such a matrix, not only accuracy can be derived, but also a number of additional metrics that have become popular in machine learning, information retrieval, and subsequently also in computational linguistics: {\em recall}, {\em precision}, and their harmonic mean {\em F-score}, as well as {\em true positive rate}, {\em false positive rate}, and their joint measure {\em AUC} in ROC space. The details of these advanced statistics are given in Section~\ref{advancedstats}. They can be reported by TiMBL using the {\tt +v as} and {\tt +v cs} verbosity options: {\footnotesize \begin{verbatim} > Timbl +v as+cs -f dimin.train -t dimin.test Scores per Value Class: class | TP FP TN FN precision recall(TPR) FPR F-score AUC T | 453 1 494 2 0.99780 0.99560 0.00202 0.99670 0.99679 E | 87 14 836 13 0.86139 0.87000 0.01647 0.86567 0.92676 J | 347 6 592 5 0.98300 0.98580 0.01003 0.98440 0.98788 P | 24 1 922 3 0.96000 0.88889 0.00108 0.92308 0.94390 K | 9 8 926 7 0.52941 0.56250 0.00857 0.54545 0.77697 F-Score beta=1, microav: 0.968123 F-Score beta=1, macroav: 0.863060 AUC, microav: 0.980729 AUC, macroav: 0.926462 overall accuracy: 0.968421 (920/950), of which 39 exact matches There were 5 ties of which 5 (100.00%) were correctly resolved \end{verbatim} } We hope that this tutorial has made it clear that, once you have coded your data in fixed-length feature-value patterns, it should be relatively straightforward to get the first results using TiMBL. You can then experiment with different metrics and algorithms to try and further improve your results. \chapter{Memory-based learning algorithms} \label{algorithms} TiMBL is a program implementing several memory-based learning algorithms. All implemented algorithms have in common that they store some representation of the training set explicitly in memory. During testing, new cases are classified by extrapolation from the most similar stored cases. The main differences among the algorithms incorporated in TiMBL lie in: \begin{itemize} \item The definition of {\em similarity}, \item The way the instances are stored in memory, and \item The way the search through memory is conducted. \end{itemize} In this chapter, various choices for these issues are described. We start in Section~\ref{mbl} with a formal description of the basic memory-based learning algorithm, i.e.~a nearest neighbor search. We then introduce different distance metrics, such as Information Gain weighting, which allows us to deal with features of differing importance, and the Modified Value Difference metric, which allows us to make a graded guess of the match between two different symbolic values, and describe the standard versus three distance-weighted versions of the class voting mechanism of the nearest neighbor classifier. In Section~\ref{indexing}, we give a description of various algorithmic optimizations for nearest neighbor search. Sections~\ref{igtree} to~\ref{ib2} describe three variants of the standard nearest neighbor classifier implemented within TiMBL, that optimize some intrinsic property of the standard algorithm. First, in Section~\ref{igtree}, we describe {\sc igtree}, which replaces the exact nearest neighbor search with a very fast heuristic that exploits the difference in importance between features. Second, in Section~\ref{tribl}, we describe the {\sc tribl} algorithm, which is a hybrid between {\sc igtree} and nearest neighbor search. Third, Section~\ref{ib2} describes the {\sc ib2} algorithm, which incrementally and selectively adds instances to memory during learning. The chapter is concluded by Section~\ref{furtherreading}, which provides an overview of further reading into theory and applications of memory-based learning to natural language processing tasks. \section{Memory-based learning} \label{mbl} Memory-based learning is founded on the hypothesis that performance in cognitive tasks is based on reasoning on the basis of similarity of new situations to {\em stored representations of earlier experiences}, rather than on the application of {\em mental rules}\/ abstracted from earlier experiences (as in rule induction and rule-based processing). The approach has surfaced in different contexts using a variety of alternative names such as similarity-based, example-based, exemplar-based, analogical, case-based, in\-stance-based, and lazy learning~\cite{Stanfill+86,Aha+91,Cost+93,Kolodner93,Aha97a}. Historically, memory-based learning algorithms are descendants of the $k$-nearest neighbor (henceforth $k$-{\sc nn}) algorithm \cite{Cover+67,Devijver+82,Aha+91}. An {\sc mbl} system, visualized schematically in Figure~\ref{mbl-method}, contains two components: a {\em learning component}\/ which is memory-based (from which {\sc mbl} borrows its name), and a {\em performance component}\/ which is similarity-based. The learning component of {\sc mbl} is memory-based as it involves adding training instances to memory (the {\em instance base} or case base); it is sometimes referred to as `lazy' as memory storage is done without abstraction or restructuring. An instance consists of a fixed-length vector of $n$ feature-value pairs, and an information field containing the classification of that particular feature-value vector. In the performance component of an {\sc mbl} system, the product of the learning component is used as a basis for mapping input to output; this usually takes the form of performing classification. During classification, a previously unseen test example is presented to the system. The similarity between the new instance $X$ and all examples $Y$ in memory is computed using some {\em distance metric} $\Delta(X,Y)$. The extrapolation is done by assigning the most frequent category within the found set of most similar example(s) (the $k$-nearest neighbors) as the category of the new test example. In case of a tie among categories, a tie breaking resolution method is used. This method is described in subsection~\ref{tiebreaking}. \begin{figure}[htb] \begin{center} \leavevmode \epsfxsize=8cm \epsffile{mble-method.eps} \caption{General architecture of an {\sc mbl} system. } \label{mbl-method} \end{center} \end{figure} \subsection{The Overlap metric} \label{overlap} The most basic metric that works for patterns with symbolic features is the {\bf Overlap metric}\footnote{This metric is also referred to as Hamming distance, Manhattan metric, city-block distance, or L1 metric.} given in Equations~\ref{distance} and~\ref{overlapeq}; where $\Delta(X,Y)$ is the distance between instances $X$ and $Y$, represented by $n$ features, and $\delta$ is the distance per feature. The distance between two patterns is simply the sum of the differences between the features. The $k$-{\sc nn} algorithm with this metric is called {\sc ib1} \cite{Aha+91}. \begin{equation} \Delta(X,Y) = \sum_{i=1}^{n} \delta(x_{i},y_{i}) \label{distance} \end{equation} where: \begin{equation} \delta(x_{i}, y_{i}) = \left\{ \begin{array}{ll} abs(\frac{x_{i}-y_{i}}{max_{i}-min_{i}}) & \mbox{if numeric, else}\\ 0 & \mbox{if $x_{i} = y_{i}$}\\ 1 & \mbox{if $x_{i} \neq y_{i}$}\\ \end{array} \right. \label{overlapeq} \end{equation} The major difference with the {\sc ib1} algorithm originally proposed by \cite{Aha+91}, is that in our version the value of $k$ refers to $k$-nearest {\em distances}\/ rather than $k$-nearest examples. With $k=1$, for instance, TiMBL's nearest neighbor set can contain several instances that are equally distant to the test instance. Arguably, our $k$-NN kernel could therefore be called $k$-nearest distances classification. Another difference with the original {\sc ib1} as well as with other implementations such as $k$-NN in the {\sc weka} machine learning toolkit \cite{Witten+99} is the way in which ties are resolved in choosing the majority category among the set of nearest neighbors. Since this method is independent of the distance function we discuss this issue separately in subsection~\ref{tiebreaking}. \paragraph{Variations on Overlap: Levenshtein and Dice coefficient metrics} The Overlap metric is all-or-nothing. For measuring the similarity between numeric or atomically symbolic values this may suffice, but there are cases (such as in natural language processing) in which string-valued feature values occur that can mismatch with other string values in a meaningfully graded way. For example, the value pair ``bathe'' and ``bathes'' only differs in one letter; counting them as more similar than ``bathe'' and ``rumour'', for example, may be useful for the classification task at hand. We implemented two additional metrics, Levenshtein distance and the Dice coefficient, that each provide a graded similarity score between pairs of strings. {\bf Levenshtein} distance is a classic {\em edit distance}\/ metric \cite{Levenshtein66} that counts the number of insertions, deletions, and substitutions to transform the one string into the other. In our (dynamic programming) implementation the three operations count equally heavily. The {\bf Dice} coefficient computes the overlap between the occurrences of character bigrams in two strings as in Equation~\ref{dice}, where $n_{x_{i} \cap y{i}}$ is the number of character bigrams (uniquely) occuring both in string value $x_{i}$ and in string value $y_{i}$ (and where $i$ is the index of the feature as introduced in Equation~\ref{distance})\footnote{Strings of length one are not handled by Dice; we back off to Overlap in these cases.}. The equation subtracts the similarity from 1, because we assume $\delta$ to produce a distance, not a similarity. \begin{equation} \delta(x_{i}, y_{i}) = 1 - \frac{2 n_{x_{i} \cap y_{i}}}{n_{x_{i}} + n_{y_{i}}} \label{dice} \end{equation} \subsection{Information-gain and gain ratio feature weighting} \label{infogain} The distance metric in Equation~\ref{overlapeq} straightforwardly counts the number of (mis)matching feature-values in both patterns. In the absence of information about feature relevance, this is a reasonable choice. Otherwise, we can add domain knowledge bias to weight or select different features (see e.g.~\namecite{Cardie96} for an application of linguistic bias in a language processing task), or look at the behavior of features in the set of examples used for training. We can compute statistics about the relevance of features by looking at which features are good predictors of the class labels. Information Theory gives us a useful tool for measuring feature relevance in this way~\cite{Quinlan86,Quinlan93}. {\bf Information Gain} (IG) weighting looks at each feature in isolation, and measures how much information it contributes to our knowledge of the correct class label. The Information Gain of feature $i$ is measured by computing the difference in uncertainty (i.e.\ entropy) between the situations without and with knowledge of the value of that feature (Equation~\ref{IGgain}). \begin{equation} w_{i} = H(C) - \sum_{v \in V_{i}} P(v) \times H(C|v) \label{IGgain} \end{equation} Where $C$ is the set of class labels, $H(C) = - \sum_{c \in C} P(c) \log_{2} P(c)$ is the entropy of the class labels, and $V_{i}$ is the set of values for feature $i$. The probabilities are estimated from relative frequencies in the training set. For numeric features, an intermediate step needs to be taken to apply the symbol-based computation of IG. All real values of a numeric features are temporarily discretized into a number (the default is 20) of intervals. Instances are ranked on their real value, and then spread evenly over the intervals; each interval contains the same number of instances (i.e., by default, $1/20$th of the total amount of instances). Instances in each of these intervals are then used in the IG computation as all having the same unordered, symbolic value per group. Note again that this discretization is only temporary; it is not used in the computation of the distance metric. It is important to realize that the IG weight is really a probability-weighted average of the informativity of the different values of the feature. On the one hand, this pre-empts the consideration of values with low frequency but high informativity. Such values ``disappear'' in the average. On the other hand, this also makes the IG weight very robust to estimation problems. Each parameter (weight) is estimated on the whole data set. Information Gain, however, tends to overestimate the relevance of features with large numbers of values. Imagine a data set of hospital patients, where one of the available features is a unique ``patient ID number''. This feature will have very high Information Gain, but it does not give any generalization to new instances. To normalize Information Gain for features with different numbers of values, Quinlan~\cite{Quinlan93} has introduced a normalized version, called {\bf Gain Ratio}, which is Information Gain divided by $si(i)$ (split info), the entropy of the feature-values (Equation~\ref{splitinfo}). \begin{equation} w_{i} = \frac{H(C) - \sum_{v \in V_{i}} P(v) \times H(C|v)}{si(i)} \label{IGgainratio} \end{equation} \begin{equation} si(i) = - \sum_{v \in V_{i}} P(v) \log_{2} P(v) \label{splitinfo} \end{equation} The resulting Gain Ratio values can then be used as weights $w_{f}$ in the weighted distance metric (Equation~\ref{distancew})\footnote{In a generic use IG refers both to Information Gain and to Gain Ratio throughout this manual. In specifying parameters for the software, the distinction between both needs to be made, because they often result in different behavior.}. The $k$-{\sc nn} algorithm with this metric is called {\sc ib1-ig} \cite{Daelemans+92b}. \begin{equation} \Delta(X,Y) = \sum_{i=1}^{n}\ w_{i} \ \delta(x_{i},y_{i}) \label{distancew} \end{equation} The possibility of automatically determining the relevance of features implies that many different and possibly irrelevant features can be added to the feature set. This is a very convenient methodology if domain knowledge does not constrain the choice enough beforehand, or if we wish to measure the importance of various information sources experimentally. However, because IG values are computed for each feature independently, this is not necessarily the best strategy. Sometimes better results can be obtained by leaving features out than by letting them in with a low weight. Very redundant features can also be challenging for {\sc ib1-ig}, because IG will overestimate their joint relevance. Imagine an informative feature which is duplicated. This results in an overestimation of IG weight by a factor two, and can lead to accuracy loss, because the doubled feature will dominate the distance metric. \subsection{Chi-squared and shared variance feature weighting} \label{chisquared} Unfortunately, as~\namecite{White+94} have shown, the Gain Ratio measure still has an unwanted bias towards features with more values. The reason for this is that the Gain Ratio statistic is not corrected for the number of degrees of freedom of the contingency table of classes and values. \namecite{White+94} proposed a feature selection measure based on the chi-squared statistic, as values of this statistic can be compared across conditions with different numbers of degrees of freedom. The \chisq statistic is computed from the same contingency table as the Information Gain measure by the following formula (Equation~\ref{chisq-eq}). \begin{equation} \chi^{2} = \sum_{i} \sum_{j} \frac{(E_{ij} - O_{ij})^{2}} {E_{ij}} \label{chisq-eq} \end{equation} where $O_{ij}$ is the observed number of cases with value $v_{i}$ in class $c_{j}$, i.e.~$O_{ij} = n_{ij}$, and $E_{ij}$ is the expected number of cases which should be in cell ($v_{i}$, $c_{j}$) in the contingency table, if the null hypothesis (of no predictive association between feature and class) is true (Equation~\ref{chisq-expect-eq}). Let $n_{.j}$ denote the marginal for class $j$ (i.e.~the sum over column $j$ of the table), $n_{i.}$ the marginal for value $i$, and $n_{..}$ the total number of cases (i.e.~the sum of all the cells of the contingency table). \begin{equation} E_{ij} = \frac{n_{.j} n_{i.}}{n_{..}} \label{chisq-expect-eq} \end{equation} The \chisq statistic is well approximated by the chi-square distribution with $\nu = (m-1)(n-1)$ degrees of freedom, where $m$ is the number of values and $n$ is the number of classes. We can then either use the \chisq values as feature weights in Equation~\ref{distancew}, or we can explicitly correct for the degrees of freedom by using the {\bf Shared Variance} measure (Equation~\ref{shared-variance-eq}). \begin{equation} SV_{i} = \frac{ \chi^2_{i}}{N \times ( min(|C|,|V_{i}|)-1 ) } \label{shared-variance-eq} \end{equation} Where $|C|$ and $|V_{i}|$ are the number of classes and the number of values of feature $i$, respectively, and $N$ is the number of instances\footnote{Note that with two classes, the shared variance weights of all features are simply divided by $N$, and will not be different from \chisq weights.}. We will refer to these variations of {\sc mbl} as {\sc ib1-\chisq} and {\sc ib1-sv}. One should keep in mind, that the correspondence to the chi-square distribution generally becomes poor if the expected frequencies in the contingency table cells become small. A common recommendation is that the \chisq test cannot be trusted when more than $20\%$ of the expected frequencies are less than $5$, or any are less than $1$. Chi-squared and shared variance weights of {\em numeric}\/ features are computed via a discretization preprocessing step (also used with computing IG and GR weights). Values are first discretized into a number (20 by default) of equally-spaced intervals between the minimum and maximum values of the feature. These groups are then used as discrete values in computing chi-squared and shared variance weights. \subsection{Modified value difference and Jeffrey divergence metrics} \label{mvdm} It should be stressed that the choice of representation for instances in {\sc mbl} remains the key factor determining the strength of the approach. The features and categories in NLP tasks are usually represented by symbolic labels. The metrics that have been described so far, i.e.~Overlap and IG Overlap, are limited to exact match between feature-values. This means that all values of a feature are seen as equally dissimilar. However, if we think of an imaginary task in e.g.~the phonetic domain, we might want to use the information that 'b' and 'p' are more similar than 'b' and 'a'. For this purpose a metric was defined by \namecite{Stanfill+86} and further refined by \namecite{Cost+93}. It is called the (Modified) Value Difference Metric ({\sc mvdm}; Equation~\ref{MVDMeq}), and it is a method to determine the similarity of the values of a feature by looking at co-occurrence of values with target classes. For the distance between two values $v_{1},\ v_{2}$ of a feature, we compute the difference of the conditional distribution of the classes $C_{i}$ for these values. \begin{equation} \delta(v_{1}, v_{2}) = \sum_{i=1}^{n} \left| P(C_{i}|v_{1}) - P(C_{i}|v_{2}) \right| \label{MVDMeq} \end{equation} For computational efficiency, all pairwise $\delta(v_{1}, v_{2})$ values can be pre-comput\-ed before the actual nearest neighbor search starts. Although the {\sc mvdm} metric does not explicitly compute feature relevance, an implicit feature weighting effect is present. If features are very informative, their conditional class probabilities will on average be very skewed towards a particular class. This implies that on average the $\delta(v_{1}, v_{2})$ will be large. For uninformative features, on the other hand, the conditional class probabilities will be pretty uniform, so that on average the $\delta(v_{1}, v_{2})$ will be very small. {\sc mvdm} differs considerably from Overlap based metrics in its composition of the nearest neighbor sets. Overlap causes an abundance of ties in nearest neighbor position. For example, if the nearest neighbor is at a distance of one mismatch from the test instance, then the nearest neighbor set will contain the entire partition of the training set that matches all the other features but contains {\em any} value for the mismatching feature (see~\namecite{Zavrel+97} for a more detailed discussion). With the {\sc mvdm} metric, however, the nearest neighbor set will either contain patterns which have the value with the lowest $\delta(v_{1}, v_{2})$ in the mismatching position, or {\sc mvdm} will select a totally different nearest neighbor which has less exactly matching features, but a smaller distance in the mismatching features. In sum, this means that the nearest neighbor set is usually much smaller for {\sc mvdm} at the same value of $k$. In NLP tasks we have found it very useful to experiment with values of $k$ larger than one for {\sc mvdm}, because this re-introduces some of the beneficial smoothing effects associated with large nearest neighbor sets. One cautionary note about this metric is connected with data sparsity. In many practical applications, we are confronted with a very limited set of examples, with values occuring only a few times or once in the whole data set. If two such values occur with the same class, {\sc mvdm} will regard them as identical, and if they occur with two different classes their distance will be maximal. In cases of such extreme behaviour on the basis of low-frequency evidence, it may be safer to back off to the Overlap metric, where only an exact value match yields zero distance. TiMBL offers this back-off from {\sc mvdm} to Overlap through a frequency threshold, that switches from the {\sc mvdm} to the Overlap metric when one or both of a pair of matched values occurs fewer times in the learning material than this threshold. Jeffrey divergence, offered as a close neighbor alternative to {\sc mvdm}, is a statistical dissimilarity metric that can be used to compute the distance between class distributions of two values of the same feature. Functionally it is quite similar to {\sc mvdm}. It is best known for its application as a distance function in unsupervised vector space models, e.g. in image retrieval, where it is applied to histogram vectors. While {\sc mvdm} computes a straightforward geometrical distance between two class distribution vectors, Jeffrey divergence introduces a logarithm term, as seen in Equation~\ref{jd}. Jeffrey divergence is a symmetric variant of Kullback-Leibner distance; the $m$ term given in Equation~\ref{jdm} is used for this purpose. \begin{equation} \delta(v_{1}, v_{2}) = \sum_{i=1}^{n} ( P(C_{i}|v_{1}) log \frac{P(C_{i}|v_{1})}{m} + P(C_{i}|v_{2}) log \frac{P(C_{i}|v_{2})}{m} ) \label{jd} \end{equation} \begin{equation} m = \frac{P(C_{i}|v_{1}) + P(C{i}|v_{2})}{2} \label{jdm} \end{equation} Compared to {\sc mvdm}, Jeffrey divergence assigns relatively larger distances to value pairs of which the class distributions are more orthogonal. In other words, it assigns more prominence to zero probabilities, which in the case of sparse data (e.g, with Zipfian distributions of values) are generally better estimations than non-zero probabilities. This makes Jeffrey divergence in principle more robust than {\sc mvdm} with respect to sparse data. As with {\sc mvdm}, TiMBL offers an optional frequency-thresholded back-off from Jeffrey divergence to the Overlap metric to further remedy some negative effects due to data sparseness. \subsection{Dot-product and cosine metrics} \label{dotproduct} When features have numeric or binary values, TiMBL can also compute the distance between two instances via the dot product (or inner product) of their feature-value vectors. The dot product (which is higher with better matches) is subsequently inversed to a distance by subtracting it from the maximum dot product attainable, i.e. that on an exact match. In Equation~\ref{doteq} this maximal dot product is referred to as $dot_{max}$. \begin{equation} \label{doteq} \Delta(X,Y) = dot_{max} - \sum_{i=1}^{n} w_{i} x_{i} y_{i} \end{equation} As with the other distance metrics incorporated in TiMBL, we include the feature weight $w_{i}$ in the metric. When no weighting is set ({\tt -w 0}), all weights are set to $1.0$, and equation~\ref{doteq} reduces to the normal unweighted dot product. The dot-product metric is typically used with binary vectors or sparse vectors in general. When either $x_{i}$ or $y_{i}$ has a zero value, that value pair is not counted in the dot product. Consequently, the significant deviation from the Overlap metric is that matching values that both have a zero value do not count here, whereas they count as much as any other value match in the Overlap metric. A commonly used variant of the dot product metric, e.g. in information retrieval, is the cosine metric, which corrects for large differences in the length of the instance vectors. The cosine metric divides the dot product metric by the product of the length of the two vectors. As with the dot product, TiMBL converts the cosine metric similarity to a distance by subtracting it from a $cos_{max}$ term that is larger than the maximal cosine similarity, as given in Equation~\ref{coseq}. Again, feature weighting is included in the formula: \begin{equation} \label{coseq} \Delta(X,Y) = cos_{max} - \frac{\sum_{i=1}^{n} w_{i} x_{i} y_{i}}{\sqrt{\sum_{i=1}^{n} w_{i} x_{i}^2 \sum_{i=1}^{n} w_{i} y_{i}^2}} \end{equation} Due to its internal tree structure, TiMBL is not particularly suited to handle feature vectors with thousands or more features. Many features cause very deep and usually very unbalanced trees, from which retrieval can be rather inefficient (especially when there is little variance in the feature weights). Other internal data structures such as inverted indices are typically more suited to these types of vector spaces. For now, inverted indices are not implemented in TiMBL. \subsection{Distance-weighted class voting} \label{distweightvote} The most straightforward method for letting the $k$ nearest neighbors vote on the class of a new case is the {\em majority voting} method, in which the vote of each neighbor receives equal weight, and the class with the highest number of votes is chosen (or in case of a tie, some tie resolution is performed, cf. Subsection~\ref{tiebreaking}). We can see the voting process of the $k$-NN classifier as an attempt to make an optimal class decision, given an estimate of the conditional class probabilities in a local region of the data space. The radius of this region is determined by the distance of the $k$-furthest neighbor. Sometimes, if $k$ is small, and the data is very sparse, or the class labels are noisy, the ``local'' estimate is very unreliable. As it turns out in experimental work, using a larger value of $k$ can often lead to higher accuracy. The reason for this is that in densely populated regions, with larger $k$ the local estimates become more reliable, because they are "smoother". However, when the majority voting method is used, smoothing can easily become oversmoothing in sparser regions of the same data set. The reason for this is that the radius of the $k$-NN region can become extended far beyond the local neighborhood of the query point, but the far neighbors will receive equal influence as the close neighbors. This can result in classification errors that could easily have been avoided if the measure of influence would somehow be correlated with the measure of similarity. To remedy this, we have implemented three types of distance weighted voting functions in TiMBL. A voting rule in which the votes of different members of the nearest neighbor set are weighted by a function of their distance to the query, was first proposed by Dudani~\shortcite{Dudani76}. In this scheme, henceforth referred to as IL (for inverse-linear), a neighbor with smaller distance is weighted more heavily than one with a greater distance: the nearest neighbor gets a weight of 1, the furthest neighbor a weight of 0 and the other weights are scaled linearly to the interval in between (\namecite{Dudani76}, Equation 1.). \begin{equation} \label{dudani_eq} w_{j}= \left \{ \begin{array}{ll} \frac{d_{k} - d_{j}}{d_{k} - d_{1}} & \mbox{if $d_{k} \not= d_{1}$ } \\ 1 & \mbox{if $d_{k} = d_{1}$}\\ \end{array} \right. \end{equation} Where $d_{j}$ is the distance to the query of the $j$'th nearest neighbor, $d_{1}$ the distance of the nearest neighbor, and $d_{k}$ of the furthest ($k$'th) neighbor. Dudani (\namecite{Dudani76}, eq.2,3) further proposed the {\em inverse distance weight} (henceforth ID). In Equation~\ref{inverseweight} a small constant is usually added to the denominator to avoid division by zero~\cite{Wettschereck94}. \begin{equation} \label{inverseweight} w_{j}= \left \{ \begin{array}{ll} \frac{1}{d_{j} + \epsilon} \\ \end{array} \right. \end{equation} Another weighting function considered here is based on the work of \namecite{Shepard87}, who argues for a universal perceptual law which states that the relevance of a previous stimulus for the generalization to a new stimulus is an exponentially decreasing function of its distance in a psychological space (henceforth ED). This gives the weighed voting function of Equation~\ref{expdecayweight}, where $\alpha$ and $\beta$ are constants determining the slope and the power of the exponential decay function. \begin{equation} \label{expdecayweight} w_{j}= e^{-\alpha d_{j}^\beta} \end{equation} Note that in Equations~\ref{inverseweight} and ~\ref{expdecayweight} the weight of the nearest and furthest neighbors and the slope between them depend on their absolute distance to the query. This assumes that the relationship between absolute distance and the relevance gradient is fixed over different datasets. This assumption is generally false; even within the same dataset, different feature weighting metrics can cause very different absolute distances. Figure~\ref{dist-weight-fig} visualises a part of the curves of ID and ED, the latter with a few varied settings of $\alpha$ and $\beta$. Generally, both distance weighting functions assign highly differing weights for close neighbors, and less differing weights for more distant neighbors. ID assigns very high votes (distance weights) to nearest neighbors at distances approaching 0.0 - in effect it assigns absolute preference to exact matches. In contrast, all ED variants have a vote of 1.0 for exact matches, and have a shallower curve than the ID curve for higher distances. Higher values of $\alpha$ in the ED function assign relatively higher weights to exact matches. When $\beta$ is set to larger values than $1.0$, the ED curve becomes bell-shaped, effectively assigning relatively less different weights between exact-matching neighbors and near-exact matching instances. \begin{figure}[htb] \begin{center} \leavevmode \epsfxsize=0.8\columnwidth \epsffile{distanceweight-ided.eps} \caption{Visualisation of the Inverse Distance weighting function (IL) and three variants of the Exponential Decay distance weighting function (ED) varying settings of $\alpha$ (1) and $\beta$ (b). } \label{dist-weight-fig} \end{center} \end{figure} Following Dudani's proposal, the benefits of weighted voting for $k$-NN have been discussed widely, e.g. \cite{Bailey+78,Morin+81,MacLeod+87}, but mostly from an analytical perspective. With the popularity of Instance-Based Learning applications, these issues have gained a more practical importance. In his thesis on $k$-NN classifiers, \namecite{Wettschereck94} cites Dudani, but proceeds to work with Equation~\ref{inverseweight}. He tested this function on a large amount of datasets and found weak evidence for performance increase over majority voting. An empirical comparison of the discussed weighted voting methods in~\cite{Zavrel97} has shown that weighted voting indeed often outperforms unweighted voting, and that Dudani's original method (Equation~\ref{dudani_eq}) mostly outperforms the other two methods. From that set of experiments, it also seems that Dudani's method shows its optimal performance at much larger values of $k$ than the other voting methods. \subsection{Tie breaking} \label{tiebreaking} Thus far we have described the last step of $k$-NN classification as taking the majority category among the set of nearest neighbors, where their vote is either unweighted or weighted by their distance (subsection~\ref{distweightvote}). Especially in case of unweighted voting, ties can occur; e.g.\ of a set of ten nearest neighbors, five vote for class $A$, and the other five for $B$. The procedure for breaking this tie in the $k$-NN classifier in TiMBL is as follows. First, the value of the $k$ parameter is incremented by $1$, and the additional nearest neighbors at this new $k$th distance are added to the current nearest neighbor set ($k$ is subsequently reset to its user-specified value). If the tie in the class distribution persists, then the class label is selected with the highest overall occurrence in the training set. If that is also equal, then the first class is taken that was encountered when reading the training instance file. Optionally, TiMBL can be set to avoid ties by making a {\em random}\/ choice of a classification from a class distribution in a nearest-neighbor set, weighted by the distribution of the classes in the set. \subsection{Exemplar weighting} \label{exemplar} Exemplar weighting in memory-based learning captures the intuition that some instances are better (more reliable, more typical, more regular) nearest neighbors than others. Classification accuracy could benefit from giving these instances some priority in the $k$-NN classification process. This idea has been explored in the context of on the one hand classification \cite{Salzberg90,Zhang92}, and on the other hand editing bad instances from memory \cite{Aha+91}. \namecite{Salzberg90}, as a classic example, uses {\em class-prediction strength}: the ratio of the number of times the instance type is a nearest neighbor of another instance with the same class and the number of times that the instance type is the nearest neighbor of another instance type regardless of the class. Another example is {\em typicality}\/ as used by \namecite{Zhang92}. Exemplar weights could in principle be used either as weights in the class voting (as distance weights, cf. Subsection~\ref{distweightvote}), or as weights in the distance metric (eq.~\ref{distancew}). TiMBL supports only the latter type, and in this respect exemplar weighting is not an intrinsic part of TiMBL. TiMBL does not compute exemplar weighting metrics itself, but only allows users to specify preprocessed exemplar weights with the {\tt -s} input option. Subsequently, when the distance between a test instance and a memory instance is computed, TiMBL uses the memory instance's weight as follows, where $\Delta^{E}(X,Y)$ is the exemplar-weighted distance between instances $X$ and $Y$, and $ew_{X}$ is the exemplar weight of memory instance $X$: \begin{equation} \Delta^{E}(X,Y) = \frac{\Delta(X,Y)}{ew_{X} + \epsilon} \label{exweight} \end{equation} $\epsilon$ is the smallest non-zero number, and is used to avoid division by zero. Exemplar weights approaching zero yield very large distances; relatively higher values yield relatively smaller distances. Note that when a training instance occurs more than once in a training set, TiMBL expects it to have the same example weight with all occurrences; TiMBL cannot handle different example weights for the same instance type. TiMBL produces a warning ({\em Warning: deviating exemplar weight in line \#$\ldots$}), and uses the first weight found for the instance. \section{Indexing optimizations} \label{indexing} The discussion of the algorithm and the metrics in the section above is based on a naive implementation of nearest neighbor search: a flat array of instances which is searched from beginning to end while computing the similarity of the test instance with each training instance. Such an implementation, unfortunately, reveals the flip side of the lazy learning coin. Although learning is very cheap: just storing the instances in memory, the computational price of classification can become very high for large data sets. The computational cost is proportional to $N$, the number of instances in the training set, times $f$, the number of features. In our current implementation of {\sc ib1} we use tree-based indexing to alleviate these costs. \subsection{Tree-based indexing} The tree-based memory indexing operation replaces the flat array by a tree structure. Instances are stored in the tree as paths from a root node to a leaf, the arcs of the path are the consecutive feature-values, and the leaf node contains a {\em distribution}\/ of classes, i.e.~a count of how many times which class occurs with this pattern of feature-values. % (see Figure~\ref{example2}). Due to this storage structure, instances with identical feature-values are collapsed into a single path, and only their separate class information needs to be stored in the distribution at the leaf node. Many different {\bf tokens} of a particular {\bf instance type} share one path from the root to a leaf node. Moreover, instances which share a prefix of feature-values, also share a partial path. This reduces storage space (although at the cost of some book-keeping overhead) and has two implications for nearest neighbor search efficiency. %\begin{figure}[htb] % \begin{center} % \leavevmode % \epsfxsize=0.8\columnwidth % \epsffile{example2.eps}\ % \caption{A tree-structured storage of the instance % base from figure~\ref{example1}. An exact match for % the test is in this case directly found by a top down % traversal of the tree (grey path). If there is no % exact match, all paths are interpreted as instances % and the distances are computed. The order of the % features in this tree is based on Gain Ratio. % } % \label{example2} % \end{center} %\end{figure} First, the tree can be searched top-down very quickly for {\em exact matches}. When $k=1$, an exact match ($\Delta(X,Y)=0$) can never be beaten, so then it is possible to omit any further distance computations. The shortcut is built into TiMBL, but by default it is not used with $k>1$. TiMBL does, however, offer the possibility to use the shortcut at any value of $k$, with the command line switch ({\tt +x}. Using it can speed up classification radically for some types of data, but with $k>1$, the shortcut is not guaranteed to give the same performance (for better or for worse) as classification without it. Second, the distance computation for the nearest neighbor search can re-use partial results for paths which share prefixes. This re-use of partial results is in the direction from the root to the leaves of the tree. When we have proceeded to a certain level of the tree, we know how much similarity (Equation~\ref{overlapeq}) can still contribute to the overall distance (Equation~\ref{distance}), and discard whole branches of the tree which will never be able to rise above the partial similarity of the current least similar nearest neighbor. By doing the search depth first\footnote{Suggested by Gert Durieux.}, the similarity threshold quickly gets initialized to a good value, so that large parts of the search space can be pruned\footnote{With the special command line setting {\tt --silly=true} this tree search shortcut is switched off; as the name of the setting suggests, this is not recommended, except for explicit speed comparisons.}. Disregarding this last constraint on search, the number of feature-value comparisons is equal to the number of arcs in the tree. Thus if we can find an ordering of the features which produces more overlap between partial paths, and hence a smaller tree, we can gain both space and time improvements. An ordering which was found to produce small trees for many of our NLP data sets is Gain Ratio divided by the number of feature-values (this is the default setting). Through the {\tt -T} command line switch, however, the user is allowed to experiment with different orderings. Note that different orderings may only affect classification speed, not the actual classifications. \section{IGTree} \label{igtree} Using Information Gain rather than unweighted Overlap distance to define similarity in {\sc ib1} improves its performance on several {\sc nlp} tasks \cite{Daelemans+92b,VandenBosch+93,VandenBosch97}. The positive effect of Information Gain on performance prompted us to develop an alternative approach in which the instance memory is restructured in such a way that it contains the same information as before, but in a compressed decision tree structure. We call this algorithm {\sc igtree}~\cite{Daelemans+97} %(see Figure~\ref{example3} %for an illustration). In this structure, similar to the tree-structured instance base described above, instances are stored as paths of connected nodes which contain classification information. Nodes are connected via arcs denoting feature values. Information Gain is used to determine the order in which instance feature-values are added as arcs to the tree. The reasoning behind this compression is that when the computation of information gain points to one feature clearly being the most important in classification, search can be restricted to matching a test instance to those memory instances that have the same feature-value as the test instance at that feature. Instead of indexing all memory instances only once on this feature, the instance memory can then be optimized further by examining the second most important feature, followed by the third most important feature, etc. Again, compression is obtained as similar instances share partial paths. %\begin{figure}[htb] % \begin{center} % \leavevmode % \epsfxsize=0.7\columnwidth % \epsffile{example3.eps} % \caption{A pruned {\sc igtree} for the instance base % of Figure~\ref{example1}. The classification for % the test instance is found by top down search of the % tree, and returning the class label (default) of the % node after the last matching feature-value (arc). Note % that this tree is essentially a compressed version of % the tree in Figure~\ref{example2}. % } % \label{example3} % \end{center} %\end{figure} Because {\sc igtree} makes a heuristic approximation of nearest neighbor search by a top down traversal of the tree in the order of feature relevance, we no longer need to store all the paths. The idea is that it is not necessary to fully store those feature-values of the instance that have lower Information Gain than those features which already fully disambiguate the instance classification. Apart from compressing all training instances in the tree structure, the {\sc igtree} algorithm also stores with each non-terminal node information concerning the {\em most probable} or {\em default} classification given the path thus far, according to the bookkeeping information maintained by the tree construction algorithm. This extra information is essential when processing unknown test instances. Processing an unknown input involves traversing the tree (i.e., matching all feature-values of the test instance with arcs in the order of the overall feature Information Gain), and either retrieving a classification when a leaf is reached (i.e., an exact match was found), or using the default classification on the last matching non-terminal node if an exact match fails. In sum, it can be said that in the trade-off between computation during learning and computation during classification, the {\sc igtree} approach chooses to invest more time in organizing the instance base using Information Gain and compression, to obtain simplified and faster processing during classification, as compared to {\sc ib1} and {\sc ib1-ig}. The generalization accuracy of {\sc igtree} is usually comparable to that of {\sc ib1-ig}; often slightly worse, but sometimes even better. The two causes for {\sc IGTree}'s surprisingly good accuracies attained with dramatically faster classification are that (i) most 'unseen' instances contain large parts that fully match stored parts of training instances, and (ii) the probabilistic information stored at non-terminal nodes (i.e., the default classifications) still produces strong `best guesses' when exact matching fails. The difference between the top-down traversal of the tree and precise nearest neighbor search becomes more pronounced when the differences in informativity between features are small. In such a case a slightly different weighting would have produced a switch in the ordering and a completely different tree. The result can be a considerable change in classification outcomes, and hence also in accuracy. However, we have found in our work on NLP datasets that when the goal is to obtain a very fast classifier for processing large amounts of text, the tradeoff between a somewhat lower accuracy against stellar speed increases can be very attractive. It should be noted that by design, {\sc igtree} is not suited for numeric features, as it does not use some type of discretization. If present in data, numbers will simply be treated as literal strings by {\sc igtree}. Moreover, one should realize that the success of {\sc igtree} is determined by a good judgement of feature relevance ordering. Hence {\sc igtree} is not to be used with e.g. ``no weights'' ({\tt -w 0}). Also, setting the {\tt -k} parameter obviously has no effect on {\sc igtree} performance. \section{The TRIBL and TRIBL2 hybrids} \label{tribl} The application of {\sc igtree} on a number of common machine-learning datasets suggested that it is not applicable to problems where the relevance of the predictive features cannot be ordered in a straightforward way, e.g.~if the differences in Information Gain are only very small. In those cases, {\sc ib1-ig} or even {\sc ib1} tend to perform significantly better than {\sc igtree}. For this reason we have designed {\sc tribl} \cite{Daelemans+97d} and {\sc tribl2} as hybrid combinations of {\sc igtree} and {\sc ib1}. Both algorithms aim to exploit the trade-off between (i) optimization of search speed (as in {\sc igtree}), and (ii) maximal generalization accuracy. They do that by splitting the classification of new instances into a quick decision-tree ({\sc igtree}) traversal based on the first (most important and most class-disambiguating) features, followed by a slow but relatively accurate $k$-NN ({\sc ib1}) classification based on the remaining less important features. The difference between {\sc tribl} and {\sc tribl2} is that the former algorithm fixes the point in the feature ordering where {\sc igtree} is succeeded by {\sc ib1}, while {\sc tribl2} determines this switching point automatically per classification. We briefly describe both variants. For {\sc tribl}, a parameter ({\tt -q}) determines the switching point in the feature ordering from {\sc igtree} to {\sc ib1}. A heuristic that we have used with some success is based on {\em average feature information gain}; when the Information Gain of a feature exceeds the average Information Gain of all features $+$ one standard deviation of the average, then the feature is used for constructing an {\sc igtree}, including the computation of defaults on nodes. When the Information Gain of a feature is below this threshold, and the node is still ambiguous, tree construction halts and the leaf nodes at that point represent case bases containing subsets of the original training set. During search, the normal {\sc igtree} search algorithm is used, until the case-base nodes are reached, in which case regular {\sc ib1} nearest neighbor search is used on this sub-case-base. {\sc tribl2} does not employ a fixed switching point. Rather, during the classification of an instance it continues to use {\sc igtree} as long as it finds matching feature values in the weighting-governed feature ordering. Only when it finds a mismatch it reverts to {\sc ib1} classification on all remaining features. The reasoning behind this mismatch-based switching is that it offers a fairly optimal minimalisation of the use of {\sc ib1}; it is only invoked when mismatching occurs, which is the typical point in which {\sc ib1} can improve on decision-tree-style classification, which does not consider the other potentially matching features in the ordering \cite{Daelemans+99}. \section{IB2: Incremental editing} \label{ib2} In memory-based learning it seems sensible to keep any instance in memory that plays a (potentially) positive role in the correct classification of other instances. Alternatively, when it plays no role at all, or when it is disruptive for classification, it may be a good idea to discard, or {\em edit} it from memory. On top of not harming or even improving generalization performance, the editing of instances from memory could also alleviate the practical processing burden of the $k$-NN classifier kernel, since it would have less instances to compare new instances to. This potential double pay-off spawned a distinct line of work on editing in the $k$-NN classifier quite early \namecite{Hart68} and \namecite{Wilson72}. TiMBL offers an implementation of one particular editing algorithm called {\sc ib2} \cite{Aha+91}, an extension to the basic {\sc ib1} algorithm introduced in the same article. {\sc ib2} implements an incremental editing strategy. Starting from a seed memory filled with a certain (usually small) number of labeled training instances, {\sc ib2} adds instances incrementally to memory only when they are {\em misclassified}\/ by the $k$-NN classifier on the basis of the instances in memory at that point. These instances are added, since they are assumed to be representatives of a part of the complete instance space in which they themselves and potentially more nearest-neighbor instances have a particular class different from the class of neigboring instances already in memory. The economical idea behind {\sc ib2} is that this way typically only instances on the boundaries of such areas are stored, and not the insides of the areas; the classification of instances that would be positioned well inside such areas is assumed to be safeguarded by the memorized boundary instances surrounding it. Although the {\sc ib2} may optimize storage considerably, its strategy to store all misclassified instances incrementally makes {\sc ib2} sensitive to noise \cite{Aha+91}. It is also yet unclear what the effect is of the size of the seed. \section{Advanced evaluation metrics} \label{advancedstats} Aside from accuracy (the percentage of correctly classified test instances), TiMBL offers some more evaluation metrics that have become common in information retrieval and machine learning in general, namely precision, recall, and F-score, and ROC-space (with dimensions true positive rate and false positive rate), and AUC. We describe these metrics in more detail here. \begin{figure} \begin{center} \epsfig{file=pos-neg.eps, width=0.5\textwidth} \vspace*{-0.5cm} \end{center} \caption{Class-specific confusion matrix containing the basic counts used in the advanced performance metrics. \label{confmat}} \end{figure} Figure~\ref{confmat} displays the general confusion matrix\index{confusion matrix} for one class $C$, splitting all classifications on a test set into four cells. The TP or true positives cell contains a count of examples that have class $C$ and are predicted to have this class correctly by the classifier. The FP or false positives cell contains a count of examples of a different class that the classifier incorrectly classified as $C$. The FN or false negatives cell contains examples of class $C$ for which the classifier predicted a different class label than $C$. On the basis of these four numbers and the total number of positive examples $P=TP+FN$ and negative examples $N=FP+TN$, we can compute the following performance measures: \begin{description} \item[Precision]\index{precision} $= \frac{TP}{TP+FP}$, or the proportional number of times the classifier has correctly made the decision that some instance has class $C$. \item[Recall or True Positive Rate (TPR)]\index{TPR}\index{recall}\index{true positive rate} $= \frac{TP}{P}$, or the proportional number of times an example with class $C$ in the test data has indeed been classified as class $C$ by the classifier. \item[False Positive Rate (FPR)]\index{FPR}\index{false positive rate} $= \frac{FP}{N}$, or the proportional number of times an example with a different class than $C$ in the test data has been classified as class $C$ by the classifier. \item[F-score]\index{F-score} $= \frac{2 \times precision \times recall}{precision + recall}$, or the harmonic mean\index{harmonic mean} of precision and recall \cite{VanRijsbergen79}, is a commonly used metric to summarize precision and recall in one measure. The left part of Figure~\ref{spaces} shows F-score isolines in the two-dimensional space of recall (x-axis) and precision (y-axis). The curvature of the isolines is caused by the harmonic aspect of the formula (in contrast, the normal mean has straight isolines orthogonal to the $x=y$ diagonal), which penalizes large differences between precision and recall. The isolines could be likened to height isolines in a map, where the peak of the hill is at the upper right corner of the space. \item [AUC]\index{AUC}\index{area under the curve} or {\em area under the curve}\/ in the so-called ROC\index{ROC space} or {\em receiver operator characteristics}\/\index{receiver operator characteristics} space \cite{Egan75,Swets+00}, is the surface of the grey area in the right graph of Figure~\ref{spaces}. The ROC space is defined by the two dimensions FPR (false positive rate, x-axis) and TPR (true positive rate, or recall, y-axis). The difference with F-score is that it does not make use of the statistically unreliable precision metric; rather, it takes into account all cells of the matrix in Figure~\ref{confmat} including the TN (true negative) cell (for a more detailed description and arguments for using ROC analysis, cf. \cite{Fawcett04}). Its ``peak'' is in the upper left corner, at a FPR of zero and a TPR of 1. Rather than using the harmonic mean, it is common to report on the AUC, area under the classifier's TPR-FPR curve, where in the case of a discrete-output classifier such as {\sc TiMBL} this can be taken to mean the two lines connecting the experiment's TPR and FPR to the $(0,0)$ coordinate and the $(1,1)$ coordinate, respectively; the AUC is then the grey area between these points and coordinate $(1,0)$. \end{description} While these advanced statistics can be computed per class, they can also be averaged to produce a single outcome for a full test set. Common methods for averaging F-scores and AUC scores are micro-averaging and macro-averaging. In micro-averaging, each class' F-score or AUC is weighted proportionally to the frequency of the class in the test set. A macro-average sums the F-scores or AUCs and divides the total by the number of classes in the training set. In computing these averages, TiMBL bases itself on the classes in the training set. When a class does not re-occur in test material, it can have no recall, but it can have precision, hence it is always incorporated in averages. A class that occurs in test material but not in training material can never be predicted correctly, and is never included in averages. \begin{figure} \begin{center} \begin{minipage}[t]{0.53\textwidth} \epsfig{file=fspace.eps, width=\textwidth} \end{minipage}\hfill \begin{minipage}[t]{0.47\textwidth} \epsfig{file=roc-auc.eps, width=\textwidth} \end{minipage} \end{center} \caption{Precision--recall space with F-score isolines (left), and ROC space with an experimental outcome marked by the dot, and the outcome's AUC, the shaded surface between the dot and coordinates $(0,0)$, $(1,0)$, and $(1,1)$ (right). \label{spaces}} \end{figure} \section{NLP applications of TiMBL} \label{furtherreading} This section provides a brief historical overview of work, performed in the Tilburg and Antwerp groups and by others, with the application of {\sc mbl}-type algorithms to NLP tasks. For more historical background, see \cite{Daelemans+05}. The Tilburg and Antwerp groups have published a number of articles containing descriptions of the algorithms and specialised metrics collected in TiMBL, usually demonstrating their functioning using NLP tasks. The {\sc ib1-ig} algorithm was first introduced in \cite{Daelemans+92b} in the context of a comparison of memory-based approaches with error-back\-propagation learning for a hyphenation task. Predecessor versions of {\sc igtree} can be found in \cite{Daelemans+93c,VandenBosch+93} where they are applied to grapheme-to-phoneme conversion. See \cite{Daelemans+97} for a description and review of {\sc igtree} and {\sc ib1-ig}. {\sc tribl} is described in \cite{Daelemans+97d}. Experiments with distance-weighted class voting are described in \cite{Zavrel97}. Aspects of using binary-valued (unpacked multi-valued) features are discussed in \cite{VandenBosch+00}. Comparisons between memory-based learning and editing variants are reported in \cite{VandenBosch99,Daelemans+99}. A hybrid of TiMBL and the {\sc ripper} rule-induction algorithm \cite{Cohen95} is described in \cite{VandenBosch00,VandenBosch04}. Using TiMBL as a classifier combination method is discussed in \cite{Halteren+01}. \namecite{Raaijmakers00} describes an extension of TiMBL with error-correcting output codes. \namecite{Hendrickx+04} report on an experiment to import maximum-entropy matrices to replace {\sc mvdm} matrices \cite{Hendrickx+04}, improving over the maximum-entropy classifier. \namecite{VandenBosch04} presents a search algorithm to find optimal combinations of parameter settings automatically, given a labeled training set of examples, showing large gains of the default settings (also of other machine learning algorithms). Parallelization of TiMBL, through splitting either the training set or the test set in $n$ pieces in shared-memory multi-processor architectures, is explored in \cite{VandenBosch+07b}. The memory-based algorithms implemented in the TiMBL package have been targeted to a large range of Natural Language Processing tasks. Examples of applications in the morpho-phonological and speech areas are hyphenation and syllabification \cite{Daelemans+92b}; classifiying phonemes in speech \cite{Kocsor+00}; assignment of word stress \cite{Daelemans+94}; grapheme-to-phoneme conversion, \cite{VandenBosch+93,Daelemans+96,Canisius+06}; diminutive formation \cite{Daelemans+98a}; and morphological analysis \cite{VandenBosch+96,VandenBosch+99,Canisius+06}. Although these examples are applied mostly to Germanic languages (English, Dutch, and German), applications to other languages with more complicated writing systems or morphologies, or with limited resources, have also been presented: for example, letter-phoneme conversion in Scottish Gaelic \cite{Wolters+97}, morphological analysis of Arabic \cite{Marsi+05}, or diacritic restoration in languages with a diacritic-rich writing system \cite{Mihalcea02,DePauw+07}. At the syntactic sentence level TiMBL has been applied to part of speech tagging \cite{Daelemans+96b,Zavrel+99,Halteren+01}; PP-attachment \cite{Zavrel+97b}; subcategorization \cite{Buchholz98}; phrase chunking \cite{Veenstra98,Sang+99}; shallow parsing \cite{Daelemans+99a,Buchholz+99,Yeh00}; clause identification \cite{Orasan00,Sang01}; detecting the scope of negation markers \cite{Morante+08c}; sentence-boundary detection \cite{Stevenson+00}; predicting the order of prenominal adjectives for generation \cite{Malouf00} and article generation \cite{Minnen+00}; and, beyond the sentence level, to anaphora resolution \cite{Preiss02,Mitkov+02,Hoste05}. More recently, memory-based learning has been integrated as a classifier engine in more complicated dependency parsing systems \cite{Nivre+04,Sagae+05,Canisius+06b}, or dependency parsing in combination with semantic role labeling \cite{Morante+09b}. Memory-based learning has been applied succesfully to lexical semantics, in particular to word sense disambiguation \cite{Veenstra+00,Stevenson+99,Kokkinakis00,Mihalcea02,Hoste+02,DeCadt+04}, but also in other lexical semantic tasks such as determining noun countability \cite{Baldwin+03}, animacy \cite{Orasan+01}, and semantic relations within noun compounds \cite{Kim+06b,Nastase+06}. On the textual level, TiMBL has been used for information extraction \cite{Zavrel+00b,Zavrel+03,Ahn06}, text classification \cite{Spitters00}, question classification \cite{Garcia+06,Dridan+07}, spam filtering \cite{Androutsopoulos+00}, named-entity recognition \cite{Buchholz+00,Hendrickx+03,DeMeulder+03,Sporleder+06b,Leveling+06}, and error detection in textual databases \cite{Sporleder+06}. In the field of discourse and dialogue modeling, TiMBL has been used for shallow semantic analysis of speech-recognised utterances \cite{Gustafson+99,Krahmer+01,VandenBosch+01,Lendvai+02a,Lendvai+03}, in disfluency detection in transcribed spontaneous speech \cite{Lendvai+03c}, and in classifying ellipsis in dialogue \cite{Fernandez+04}. Relations to statistical language processing, in particular the interesting equivalence relations with back-off smoothing in probabilistic classifiers, are discussed in \cite{Zavrel+97}. Relations between classification-based word prediction and statistical language modeling are identified in \cite{VandenBosch05,VandenBosch06}. In machine translation, $k$-nearest neighbor classification offers a conceptual bridge between example-based machine translation (EBMT) and statistical MT. Pure memory-based approaches are described in \cite{VandenBosch+07,Canisius+09,VandenBosch+09,VanGompel+09}; hybrids with statistical machine translation are presented in \cite{Stroppa+07,Haque+09}. The first dissertation-length study devoted to the approach is \cite{VandenBosch97}, in which the approach is compared to alternative learning methods for NLP tasks related to English word pronunciation (stress assignment, syllabification, morphological analysis, alignment, grapheme-to-phoneme conversion). TiMBL is also central in the Ph.D. theses of \namecite{Buchholz02}, \namecite{Lendvai04}, \namecite{Hendrickx05} \namecite{Hoste05}, \namecite{Keuleers08}, and \namecite{Canisius09}. In 1999 a special issue of the {\em Journal for Experimental and Theoretical Artificial Intelligence} (Vol.~11(3), edited by Walter Daelemans) was devoted to Memory-Based Language Processing. The introduction to this special issue discusses the inspiration sources and alternative developments related to the memory-based approach taken in TiMBL \cite{Daelemans99b}. Whereas most work using TiMBL has been oriented towards natural language engineering applications, the linguistic and psycholinguistic relevance of memory-based learning is another focus of research in Antwerp, Tilburg and elsewhere. Work in this area has been done on stress assignment in Dutch \cite{Daelemans+94,Gillis+00}, reading aloud \cite{VandenBosch+00b}, phonological bootstrapping \cite{Durieux+00}, the prediction of linking morphemes in Dutch \cite{Krott+01}, morphology \cite{Eddington00,Eddington03}, and the Dutch plural inflection \cite{Keuleers+07}. A comparison to other analogical methods for linguistics is provided in \cite{Daelemans+97f,Daelemans02}. \ \\ {\it All Tilburg/Antwerp papers referred to in this section, as well as more recent papers, are available in electronic form from the {\sc ILK} home page: {\tt http://ilk.uvt.nl} and the {\sc CLiPS} home page: \\ {\tt http://www.clips.ua.ac.be/}.} \chapter{Software usage and options} \label{reference} \section{Command line options} \label{commandline} The user interacts with TiMBL through the use of command line arguments. When you have installed TiMBL successfully, and you type {\tt Timbl} at the command line without any further arguments, it will print an overview of the most basic command line options. {\footnotesize \begin{verbatim} TiMBL 6.3.0 (c) ILK 1998 - 2010. Tilburg Memory Based Learner Induction of Linguistic Knowledge Research Group, Tilburg University CLiPS Computational Linguistics Group, University of Antwerp Mon Oct 19 22:33:13 2009 usage: Timbl -f data-file {-t test-file} or see: Timbl -h for all possible options \end{verbatim} } If you are satisfied with all of the default settings, you can proceed with just these basics: \begin{description} \item {\tt -f } : supplies the name of the file with the training items. \item {\tt -t } : supplies the name of the file with the test items. \item {\tt -h} : prints a glossary of all available command line options. \end{description} The presence of a training file will make TiMBL pass through the first two phases of its cycle. In the first phase it examines the contents of the training file, and computes a number of statistics on it (feature weights etc.). In the second phase the instances from the training file are stored in memory. If no test file is specified, the program exits, possibly writing some of the results of learning to files (see below). If there is a test file, the selected classifier, trained on the present training data, is applied to it, and the results are written to a file the name of which is a combination of the name of the test file and a code representing the chosen algorithm settings. TiMBL then reports the percentage of correctly classified test items. The default settings for the classification phase are: a Memory-Based Learner, with Gain Ratio feature weighting, with $k=1$, and with optimizations for speedy search. If you need to change the settings, because you want to use a different type of classifier, or because you need to make a trade-off between speed and memory-use, then you can use the options that are shown using {\tt -h}. The sections below provide a reference to the use of these command line arguments, and they are roughly ordered by the type of action that the option has effect on. Note that some options (listed with ``{\tt +/-}'') can be turned on ({\tt +}) or off ({\tt -}). \subsection{Algorithm and metric selection} \begin{description} \item {\tt -a or } : determines the classification algorithm. Possible values are: \begin{description} \item {\tt 0} or {\tt IB1} -- the {\sc ib1} ($k$-NN) algorithm (default). See Sections~\ref{mbl} and~\ref{indexing}. \item {\tt 1} or {\tt IGTREE} -- {\sc igtree}, decision-tree-based optimization. See Section~\ref{igtree}. \item {\tt 2} or {\tt TRIBL} -- {\sc tribl}, a hybrid of {\sc ib1} and {\sc igtree}. See Section~\ref{tribl}. \item {\tt 3} or {\tt IB2} -- {\sc ib2}, incremental edited memory-based learning. See Section~\ref{ib2}. \item {\tt 4} or {\tt TRIBL2} -- {\sc tribl2}, a non-parameteric version of {\sc tribl}. See Section~\ref{tribl}. \end{description} \item {\tt -m } : determines which distance metrics are used for each feature. The format of this string is as follows:\\ {\tt GlobalMetric:MetricRange:MetricRange}\\ Where {\tt GlobalMetric} is used for alle features except for the ones that are assigned other metrics by following the restrictions given by {\tt :MetricRange}. A range can be written using comma's for lists, and hyphens for intervals. The metric code can be one of the following nine: \begin{itemize} \item {\tt O} -- Overlap (default; see Subsection~\ref{overlap}) \item {\tt M} -- Modified value difference ({\sc mvdm}; see Subsection~\ref{mvdm}) \item {\tt J} -- Jeffrey divergence (see Subsection~\ref{mvdm}) \item {\tt D} -- Dot product (see Subsection~\ref{dotproduct}) \item {\tt C} -- Cosine (see Subsection~\ref{dotproduct}) \item {\tt N} -- Numeric (all features are numeric. see Subsection~\ref{overlap}) \item {\tt L} -- Levenshtein (see Subsection~\ref{overlap}) \item {\tt DC} -- Dice coefficient (see Subsection~\ref{overlap}) \item {\tt I} -- Ignore (ignore specified features) \end{itemize} For example, {\tt -mO:N3:I2,5-7} sets the global metric to overlap, declares the third feature to be numeric, and ignores features 2 and 5, 6, and 7. Ignore {\em can}\/ be the global metric; it must be followed by a {\tt MetricRange} string with metric {\tt O}, {\tt M}, {\tt J}, {\tt D}, or {\tt N} specifying in the range which features are {\em not}\/ ignored. \item {\tt -w } : chooses between feature-weighting possibilities. The weights are used in the metric of {\sc ib1} and in the ordering of the {\sc igtree}. Possible values are: \begin{description} \item n=0 -- No weighting, i.e. all features have the same importance (weight = 1). \item n=1 -- Gain Ratio weighting (default). See section~\ref{infogain}. \item n=2 -- Information Gain weighting. See section~\ref{infogain}. \item n=3 -- Chi-squared ($\chi^2$) weighting. See section~\ref{chisquared}. \item n=4 -- Shared variance weighting. See section~\ref{chisquared}. \item n=$<$filename$>$:$<$number$>$ or n=$<$filename$>$ -- Instead of the five weight settings above we can supply a filename to the {\tt -w} option. This causes TiMBL to read this file and use its contents as weights. If only $<$filename$>$ is given as an argument, the file is supposed to contain one list of feature weights for all features. The $<$filename$>$:$<$number$>$ option assumes that a weights file generated by TiMBL with the {\tt -W} option (and possibly edited by the user) is read back in; the number refers to one of the five numbers above. See section~\ref{weightformat} for a description of the format of weights files. \end{description} \item {\tt -k } : number of nearest neighbors used for extrapolation. Only applicable in conjunction with {\sc ib1} ({\tt -a 0}), {\sc tribl} ({\tt -a 2}), {\sc tribl2} ({\tt -a 4}) and {\sc ib2} ({\tt -a 3}). The default is 1. Especially with the {\sc mvdm} metric it is often useful to determine a good value larger than 1 for this parameter (usually an odd number, to avoid ties). Note that due to ties (instances with exactly the same similarity to the test instance) the number of instances used to extrapolate might in fact be much larger than this parameter. \item {\tt -d } : The type of class voting weights that are used for extrapolation from the nearest neighbor set. {\tt val} can be one of: \begin{itemize} \item {\tt Z} : normal majority voting; all neighbors have equal weight (default). \item {\tt ID}: Inverse Distance weighting. See Section~\ref{distweightvote}, Equation~\ref{dudani_eq}. \item {\tt IL}: Inverse Linear weighting. See Section~\ref{distweightvote}, Equation~\ref{inverseweight}. \item {\tt ED::}: Exponential Decay weighting with decay parameters {\tt a} ($\alpha$) and {\tt b} ($\beta$). No spaces are allowed in the string. Parameter {\tt b} can be left unspecified: {\tt ED:} assumes $\beta=1$. The syntax used in previous TiMBL versions ({\tt ED}) is still supported but deprecated. See Section~\ref{distweightvote}, Equation~\ref{expdecayweight}. \end{itemize} \item {\tt -L } : frequency threshold for switching from the {\sc mvdm} or Jeffrey Divergence to the Overlap distance metric. The default is 1 (never switch). When in a pair of matched values one or both values occur less frequently than {\tt n} times in the learning material, TiMBL switches from {\sc mvdm} or Jeffrey Divergence to Overlap. Higher values of {\tt n} force TiMBL to use the Overlap metric more. Only applicable in conjunction with the {\sc mvdm} ({\tt -mM}) and Jeffrey divergence ({\tt -mJ}) distance metrics. \item {\tt -b } : determines n ($\geq 1$), the number of instances, to be taken from the top of the training file, to act as the bootstrap set of memorized instances before {\sc ib2} starts adding new instances. Only applicable in conjunction with {\sc ib2} ({\tt -a 3}). \item {\tt -q } : {\tt n} is the {\sc tribl} offset, the index number of the feature (counting from 1) after which {\sc tribl} should switch from {\sc igtree} to {\sc ib1}. Only applicable in conjunction with {\sc tribl} ({\tt -a 2}). \item {\tt -R } : Resolve ties in the classifier randomly, using a random generator with seed n. {\tt -R } causes the classification to be based on a random pick (with seed n) of a category according to the probability distribution in the nearest neighbor set. By default, {\tt -R} is not used, but rather the deterministic tie resolution scheme described in Subsection~\ref{overlap}. \item {\tt -t $<$@file$>$} : If the filename given after {\tt -t} starts with '{\tt @}', TiMBL will read commands for testing from {\tt file}. This file should contain one set of instructions per line. On each line new values can be set for the following command line options: {\tt -B -d -e -k -L -M -o -p -Q -R -t -u +/-v -w +/-x +/-\%}. It is compulsory that each line in {\tt file} contains a {\tt -t } argument to specify the name of the test file. \item {\tt -t } : the string {\tt } is the literal name of the file with the test items. \item {\tt -t leave\_one\_out} : No test file is read, but testing is done on each pattern of the training file, by treating each pattern of the training file in turn as a test case (and the whole remainder of the file as training cases). Only applicable in conjunction with {\sc ib1} ({\tt -a0}). \item {\tt -t cross\_validate} : An $n$-fold cross-validation experiment is performed on the basis of $n$ files (e.g. $1/n$ partitionings of an original data file). The names of these $n$ files need to be in a text file (one name per line) which is given as argument of {\tt -f}. In each fold $f=1 \ldots n$, file number $f$ is taken as test set, and the remaining $n-1$ files are concatenated to form the training set. Only applicable in conjunction with {\sc ib1} ({\tt -a0}). \end{description} \subsection{Input options} \begin{description} \item {\tt -f } : the string {\tt } is the literal name of the file with the training items, or (in conjunction with {\tt -t cross\_validate}, the file containing the names of the cross-validation files. \item {\tt -F } : Force TiMBL to interpret the training and test file as a specific data format. Possible values for this parameter are: {\tt Compact, C4.5, ARFF, Columns, Sparse, Binary} (case-insensitive). The default is that TiMBL guesses the format from the contents of the first line of the data file. ARFF is not automatically detected. See section~\ref{dataformats} for description of the data formats and the guessing rules. The {\tt Compact} format cannot be used with numeric features. \item {\tt -l } : Feature length. Only applicable with the Compact data format; {\tt } is the number of characters used for each feature-value and category symbol. \item {\tt -i } : Skip the first two training phases: instead of processing a training file, read a previously saved (see {\tt -I} option) instance-base or {\sc igtree} from the file {\tt treefile}. See section~\ref{treeformat} for the format of this file. \item {\tt --matrixin=} : Read value distance metrics (such as {\sc mvdm} or Jeffrey divergence matrices written to file with {\tt --matrixout=} : Replace the automatically computed value-class probability matrix with the matrices provided in this file. \item {\tt -P } : Specify a path to read the data files from. This path is ignored if the name of the data file already contains path information. \item {\tt -s}: Use the whitespace-delimited exemplar weights, given after each training instance in the training file {\tt }, during classification. {\tt } may contain exemplar weights, but they are not used in classification. If the test file does not have an exemplar weights column, you must specify {\tt -s1}. Exemplar weights can also be ignored (in both training and test files) by specifying {\tt -s0}. \end{description} \subsection{Output options} \begin{description} \item {\tt -I } : After phase two of learning, save the resulting tree-based representation of the instance-base or {\sc igtree} in a file. This file can later be read back in using the {\tt -i} option (see above). For {\sc igtree} this also automatically saves the current weights into {\tt treefile.wgt} unless this is overridden by {\tt -W}. See section~\ref{treeformat} for a description of the resulting file's format. \item {\tt --matrixout=} : Store calculated {\sc mvdm} or Jeffrey divergence distance metrics in file {\tt filename}. \item {\tt -X } : instead of the proprietary file format written with the {\tt -I} switch, {\tt -X} writes the TiMBL tree into an XML tree in {\tt }. This XML file cannot be read back into TiMBL. \item {\tt -W } : Save the currently used feature-weights in a file. \item {\tt -U } : Write the automatically computed value-class probability matrix to this file. \item {\tt -n } : Save the feature-value and target category symbols in a C4.5 style ``names file'' with the name {\tt }. Take caution of the fact that TiMBL does not mind creating a file with ',' '.' '$|$' and ':' values in features; C4.5 will produce errors on this. \item {\tt -p } : Indicate progress during training and testing after every n processed patterns. The default setting is 100,000. \item {\tt -e } : During testing, compute and print an estimate on how long it will take to classify n test patterns. Off by default. \item {\tt +/-v } : Verbosity Level; determines how much information is written to the output during a run. Unless indicated otherwise, this information is written to standard error. The use of {\tt +} turns a given verbosity level {\bf on}, whereas {\tt -} turns it {\bf off} (only useable in non-commandline contexts, such as client/server communication or {\tt -t @testcommandfile}). This parameter can take on the following values (case-insensitive): \begin{description} \item {\tt s}: work silently (turns off all set verbosity levels). \item {\tt o}: show all options set. \item {\tt f}: show calculated feature weights. (default) \item {\tt p}: show {\sc mvdm} matrices. \item {\tt e}: show exact matches. \item {\tt as}: show overall advanced statistics (micro and macro averages of F-score and AUC). \item {\tt cm}: show confusion matrix between actual and predicted classes. \item {\tt cs}: show per-class statistics (precision, recall, true positive rate, false positive rate, F-score, AUC). \item {\tt di}: add the distance of the nearest neighbor to the output file. \item {\tt db}: add class distribution in the nearest neighbor set to the output file. \item {\tt md}: add matching depth and node type (N for non-ending node, L for leaf) to output file. \item {\tt k}: add a summary of class distribution information of all nearest neighbors to the output file (sets {\tt -x}) \item {\tt n}: add nearest neigbors to the output file (sets {\tt -x}) \item {\tt b}: provide branching statistics of the internal tree, overall and per level. \end{description} You may combine levels using '{\tt +}' e.g. {\tt +v p+db} or {\tt -v o+di}. \item {\tt -G }: Normalize class distributions generated by {\tt +v db}. \begin{description} \item {\tt 0 (zero)}: Normalize distributions so that they add up to 1.0 \item {\tt 1:}: Smooth by adding floating-point $f$ to all class votes (e.g. {\tt -G1:1} performs add-one smoothing). \end{description} \item {\tt --Beam=}: Limit the number of returned classes and class votes returned by {\tt +v db} to $n$. Default is infinity (no limit). \item {\tt +/- \%} : Write the percentage of correctly classified test instances, the number of correctly classified instances, and the total number of classified instances (one number per line, three lines in total) to a file with the same name as the output file, but with the suffix ``{\tt .\%}''. \item {\tt -o $<$filename$>$} : Write the test output to filename. Useful for different runs with the same settings on the same testfile, where the default output file name would normally be the same. \item {\tt -O $<$path$>$} : Write all output to the path given here. The default is to write all output to the directory where the test file is located. \item {\tt -V} : Show the TiMBL version number. \end{description} \subsection{Internal representation options} \begin{description} \item {\tt -N } : (maximum) number of features. Obligatory for Sparse and Binary formats. When larger than a pre-defined constant (default 2500), N needs to be set explicitly for all algorithms. \item {\tt +/- x} : turns the shortcut search for exact matches on or off in {\sc ib1} (and {\sc ib2}, {\sc tribl}, and {\sc tribl2}). The default is to be off ({\tt -x}). Turning it on makes {\sc ib1} generally faster, but with $k>1$ the shortcut produces different results from a genuine $k$ nearest neighbors search, since absolute preference is given to the exact match. \item {\tt -M } : Set the maximum number of nearest neighbors printed using the {\tt +vn} verbosity option. By default this is set to 500, but when you are interested in the contents of really large nearest neighbor sets (which is possible with large $k$ or large data sets with few features), {\tt n} can be increased up to 100,000. \item {\tt +/- H} : Turn on/off hashing of feature values and class labels in TiMBL trees. Hashing is done by default, but with short (e.g. one-character) feature values and/or classes less memory is used when hashing is set off. \item {\tt -B } : Number of bins used for discretizing numeric data (only used for computing feature weights). \item {\tt -c } : Clipping (threshold) frequency for prestoring {\sc mvdm} matrices. Cells in the matrix are only stored if both feature values occur more than {\tt } times. \item {\tt -T } : Set the ordering of the TiMBL tree (with {\sc ib1} and {\sc ib2}), i.e., rank the features according to the metric identified by {\tt }. The default ordering is {\tt G/V} (according to gain ratio divided by the number of values), but some orderings may produce faster classification. Note that different orderings do {\em not}\/ change the classification behavior of {\sc ib1} and {\sc ib2}. {\tt } can take the following values: \begin{description} \item {\tt DO}: no ordering (the ordering of the features in the data file is taken) \item {\tt GRO}: gain ratio (eq.~\ref{IGgainratio}) \item {\tt IGO}: information gain (eq.~\ref{IGgain}) \item {\tt 1/V}: $1/V$, where $V$ is the number of values \item {\tt G/V}: gain ratio divided by the number of values \item {\tt I/V}: information gain divided by the number of values \item {\tt X2O}: \chisq \ (eq.~\ref{chisq-eq}) \item {\tt X/V}: \chisq \ divided by the number of values \item {\tt SVO}: shared variance (eq.~\ref{shared-variance-eq}) \item {\tt S/V}: shared variance divided by the number of values \item {\tt GxE}: gain ratio $\times si$, where $si$ is the split info of the feature (eq.~\ref{splitinfo}) \item {\tt IxE}: information gain $\times si$ \item {\tt 1/S}: $1/si$ \end{description} \end{description} \subsection{Hidden options} The commandline interface to TiMBL contains several hidden options that have been built in over time for particular reasons. Some have survived over time, and although their use is not for the faint-hearted, some may offer interesting functionalities. A small list of disclosed hidden options follows. \begin{description} \item {\tt --sloppy=\{true|false\}}: in combination with leave-one-out (LOO) testing, this option turns off all weight recomputation. By default, leaving out one training example out causes all feature weights, value-class matrices, and derived metrics such as {\sc mvdm} to be recomputed, because strictly the example-specific statistics should be absent when it is held out and classified. {\tt --sloppy} skips this, causing a significant speedup, and usually slightly better LOO scores. Use only if your experimental method allows it. Default value is {\tt false}. \item {\tt --silly=\{true|false\}}: set to {\tt true}, switches off the optimized nearest-neighbor search in {\sc ib1} and {\sc tribl}. This causes TiMBL to compare all feature values of a test instance to full paths in the TiMBL tree. This causes TiMBL to slow down dramatically on most datasets. Setting is available to enable testing the effect of optimized search. Default value is {\tt false}. \item{\tt --Diversify}: modifies all features weights by subtracting the smallest weight (plus $\epsilon$) from all weights. The smallest weight thus becomes $\epsilon$. This modification ``diversifies'' the feature weights, and was introduced to enhance the effect of {\sc Dimbl}, the multi-CPU variant of TiMBL\footnote{For {\sc Dimbl}, see \url{http://ilk.uvt.nl/dimbl}}. \end{description} \section{File formats} \label{fileformats} This section describes the format of the input and output files used by TiMBL. Where possible, the format is illustrated using the classical ``objects'' data set, which consists of 12 instances of 5 different everyday objects (nut, screw, key, pen, scissors), described by 3 discrete features (size, shape, and number of holes). \subsection{Data files} \label{dataformats} The training and test sets for the learner consist of descriptions of instances in terms of a fixed number of feature-values. TiMBL supports a number of different formats for the instances, but they all have in common that the files should contain one instance per line. The number of instances is determined automatically, and the format of each instance is inferred from the format of the first line in the training set. The last feature of the instance is assumed to be the target category. Should the guess of the format by TiMBL turn out to be wrong, you can force it to interpret the data as a particular format by using the {\tt -F} option. Note that TiMBL, by default, will interpret features as having {\em symbolic, discrete values}. Unless you specify explicitly that certain features are numeric, using the {\tt -m} option, TiMBL will interpret numbers as just another string of characters. If a feature is numeric, its values will be scaled to the interval [0,1] for purposes of distance computation (see Equation~\ref{overlapeq}). The computation of feature weights will be based on a discretization of the feature. Once TiMBL has determined the input format, it will skip and complain about all lines in the input which do not respect this format (e.g.~have a different number of feature-values with respect to that format). During testing, TiMBL writes the classifications of the test set to an output file. The format of this output file is by default the same as the input format, with the addition of the predicted category being appended after the correct category. If we turn on higher levels of verbosity, the output files will also contain distributions, distances and nearest neighbor sets. \subsubsection{Column format} \label{comlumnformat} The {\bf column format} uses white space as the separator between features. White space is defined as a sequence of one or more spaces or tab characters. Every instance of white space is interpreted as a feature separator, so it is not possible to have feature-values containing white space. The column format is auto-detected when an instance of white space is detected on the first line {\em before a comma has been encountered}. The example data set looks like this in the column format: \begin{footnotesize} \begin{verbatim} small compact 1 nut small long none screw small long 1 key small compact 1 nut large long 1 key small compact none screw small compact 1 nut large long none pen large long 2 scissors large long 1 pen large other 2 scissors small other 2 key \end{verbatim} \end{footnotesize} \subsubsection{C4.5 format} \label{c45format} This format is a derivative of the format that is used by the well-known C4.5 decision tree learning program~\cite{Quinlan93}. The separator between the features is a comma, and the category (viz. the last feature on the line) is followed by a period (although this is not mandatory: TiMBL is robust to missing periods)\footnote{The periods after the category are not reproduced in the output}. White space within the line is taken literally, so the pattern {\tt a,\ b\ c,d} will be interpreted as {\tt `a',`\ b\ c',`d'}. An exception is the class label, which should not contain any whitespace. When using this format, especially with linguistic data sets or with data sets containing floating point numbers, one should take special care that commas do not occur as feature values and that periods do not occur within the category. Note that TiMBL's C4.5 format does not require a so called {\em namesfile}. However, TiMBL can produce such a file for C4.5 with the {\tt -n} option. The C4.5 format is auto-detected when a comma is detected on the first line {\em before any white space has been encountered}. The example data set looks like this in the C4.5 format: \begin{footnotesize} \begin{verbatim} small,compact,1,nut. small,long,none,screw. small,long,1,key. small,compact,1,nut. large,long,1,key. small,compact,none,screw. small,compact,1,nut. large,long,none,pen. large,long,2,scissors. large,long,1,pen. large,other,2,scissors. small,other,2,key. \end{verbatim} \end{footnotesize} \subsubsection{ARFF format} \label{arffformat} ARFF is a format that is used by the WEKA machine learning workbench~\cite{Garner95,Witten+99}\footnote{WEKA is available from the Waikato University Department of Computer Science, \url{http://www.cs.waikato.ac.nz/~ml/weka}}. Although TiMBL at present does {\em not}\/ entirely follow the ARFF specification, it still tries to do as well as it can in reading this format. The ARFF format is {\em not}\/ autodetected, and needs to be specified on the commanline with {\tt -F ARFF}. In ARFF data, the actual data are preceded by a information on feature types, feature names, and names of values in case of symbolic features. TiMBL ignores all of these lines, and starts reading data from after the {\tt @data} statement until the end of the file. Feature-values are supposed to be separated by commas; white space is deleted entirely, so the pattern {\tt a, b c,d} will be interpreted as {\tt `a',`bc',`d'}. There should be no whitespace in class labels. We hope to include better support for the ARFF format in future releases. \begin{footnotesize} \begin{verbatim} % There are 4 attributes. % There are 12 instances. % Attribute information: Ints Reals Enum Miss % 'size' 0 0 12 0 % 'shape' 0 0 12 0 % 'n_holes' 9 0 3 0 % 'class.' 0 0 12 0 @relation 'example.data' @attribute 'size' { small, large} @attribute 'shape' { compact, long, other} @attribute 'n_holes' { 1, none, 2} @attribute 'class.' { nut., screw., key., pen., scissors.} @data small,compact,1,nut. small,long,none,screw. small,long,1,key. small,compact,1,nut. large,long,1,key. small,compact,none,screw. small,compact,1,nut. large,long,none,pen. large,long,2,scissors. large,long,1,pen. large,other,2,scissors. small,other,2,key. \end{verbatim} \end{footnotesize} \subsubsection{Compact format} \label{compactformat} The compact format is especially useful when dealing with very large data files. Because this format does not use any feature separators, file-size is reduced considerably in some cases. The price of this is that all features and class labels must be of equal length (in characters) and TiMBL needs to know beforehand what this length is. You must tell TiMBL by using the {\tt -l} option. The compact format is auto-detected when neither of the other formats applies. The same example data set might look like this in the column format (with two characters per feature): \begin{footnotesize} \begin{verbatim} smco1_nu smlonosc smlo1_ke smco1_nu lalo1_ke smconosc smco1_nu lalonope lalo2_sc lalo1_pe laot2_sc smot2_ke \end{verbatim} \end{footnotesize} \subsubsection{Sparse format} The sparse format is relevant for data with features of which a significant portion of the values is $0.0$ (numeric), $0$ (binary), or some ``null'' symbolic value. Storing only the non-null values typically takes less space on disk. Consider, for example, a data set in text classification with 10,000 features each representing the tf*idf weight of a term. It would be uneconomical to store instances as long lines of \begin{footnotesize} \begin{verbatim} 0.02, 0.0, 0.0, 0.0, 0.54, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... , 0.01,sports \end{verbatim} \end{footnotesize} The Sparse format allows to store such an instance as \begin{footnotesize} \begin{verbatim} (1,0.02)(5,0.54)...(10000,0.01)sports \end{verbatim} \end{footnotesize} That is, a sequence of ($,$) expressions between parentheses each indicating that the feature number $index$ has value $value$, with the class label at the end, directly following the last parenthesis. The feature index is assumed to start at 1. In case of symbolic values, whitespace included in the parentheses are considered significant (i.e., part of the values). A case with only null values can be represented as either `{\tt class} or {\tt ,class}. This option must be specified by the user ({\tt -F Sparse}); it is not guessed from the data. It must also be accompanied by a user declaration of the number of features ({\tt -N }). \subsubsection{Sparse Binary format} \label{binaryformat} The sparse binary format, a simplified version of the Sparse format, is especially useful when dealing with large numbers of two-valued (binary) features, of which each case only has a very few active ones, such as e.g. in text categorization. Thus instead of representing a case as: \begin{footnotesize} \begin{verbatim} 1,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,1,small. \end{verbatim} \end{footnotesize} We can represent it as: \begin{footnotesize} \begin{verbatim} 1,8,11,12,18,small. \end{verbatim} \end{footnotesize} This format allows one to specify only the index numbers of the active features (indexes start at one), while implicitly assuming that the value for all the remaining features is zero. Because each case has a different number of active features, we must specify in some other way what the actual number of features is. This must be done using the {\tt -N} option. As the format is very similar to numeric features, it must always be user-declared using {\tt -F Binary}. The last feature of a line is always interpreted as being the category string. A case with only zeroes can be represented as either `{\tt class} or {\tt ,class}. \subsection{Weight files} \label{weightformat} The feature weights used for computing similarities and for the internal organization of the memory base can be saved to a file {\tt -W}. These files can be read back into TiMBL with {\tt -w $<$filename$>$:$<$weight number$>$}, where the weight number refers to the five options in TiMBL. It is also possible to change these files manually before reading them in -- and additionally it is also possible to write a file from scratch and read this into TiMBL. This allows the experimenter to handcraft feature weights. The generic format for the weights file is as follows. The weights file may contain comments on lines that start with a \# character. The other lines contain the number of the feature followed by its numeric weight. An example of such a file is provided below. The numbering of the weights starts with 1. In this example, the data set has three features. \begin{footnotesize} \begin{verbatim} # DB Entropy: 2.29248 # Classes: 5 # Lines of data: 12 # Fea. Weight 1 0.765709 2 0.614222 3 0.73584 \end{verbatim} \end{footnotesize} Weight files written by TiMBL are of the same format, but write all weights in a concatenation, separated by \# lines that carry the abbreviated name of the weight (nw, gr, ig, x2, sv). The following example illustrates this format (which can be edited manually, as long as the same number of lines is kept): \begin{footnotesize} \begin{verbatim} # DB Entropy: 1.61789 # Classes: 5 # Lines of data: 2999 # nw # Fea. Weight 1 1 2 1 3 1 # # gr # Fea. Weight 1 0.0428445870345557 2 0.185070180760327 3 0.325371814230901 # # ig # Fea. Weight 1 0.213887591411729 2 0.669704582861074 3 1.27807624584789 # # sv # Fea. Weight 1 0.0762436694064095 2 0.233998145488354 3 0.596896311429044 # # x2 # Fea. Weight 1 914.619058199289 2 2807.0417532783 3 7160.36815190281 # \end{verbatim} \end{footnotesize} \subsection{Value difference files} \label{mvdmformat} Using the {\sc mvdm} metric, it can sometimes be interesting to inspect the matrix of conditional class probabilities from Equation~\ref{MVDMeq}. By using the {\tt -U} option, we can write the computed matrix to a file. This way we can see which values are considered to be similar by the metric. For each feature a row vector is given for each value, of the conditional probabilities of all the classes (columns) given that value. \begin{footnotesize} \begin{verbatim} targets A, B, C, D, E. feature # 1 Matrix: small 0.429 0.286 0.286 0.000 0.000 large 0.000 0.000 0.200 0.400 0.400 feature # 2 Matrix: compact 0.750 0.250 0.000 0.000 0.000 long 0.000 0.167 0.333 0.333 0.167 other 0.000 0.000 0.500 0.000 0.500 feature # 3 Matrix: 1 0.500 0.000 0.333 0.167 0.000 none 0.000 0.667 0.000 0.333 0.000 2 0.000 0.000 0.333 0.000 0.667 \end{verbatim} \end{footnotesize} As long as this format is observed, the file can be modified (manually or by substituting some other vector-based representations for the values), and the new matrix can be read in and used with the {\sc mvdm} metric. \subsection{Tree files} \label{treeformat} Although the learning phase in TiMBL is relatively fast, it can be useful to store the internal representation of the data set both for later usage and for faster subsequent learning. In TiMBL, the data set is stored internally in a tree structure (see Section~\ref{indexing}). When using {\sc ib1}, this tree representation contains all the training cases as full paths in the tree. When using {\sc igtree}, unambiguous paths in the tree are pruned before it is used for classification or written to file; on the same data, {\sc igtree} trees are usually considerably smaller than {\sc ib1} trees. In either tree type, the arcs represent feature values and nodes contain class distribution information. The features are in the same order throughout the tree. This order is either determined by memory-size considerations in {\sc ib1}, or by feature relevance in {\sc igtree}. It can explicitly be manipulated using the {\tt -T} option. We strongly advise to refrain from manually editing the tree file. However, the syntax of the tree file is as follows. First a header consisting of information about the status of the tree, the feature-ordering (the permutation from the order in the data file to the order in the tree), and the presence of numeric features is provided\footnote{Although in this header each line starts with '\#', these lines cannot be seen as comment lines.}. Subsequently, unless hashing has been set off explicitly ({\tt -H}), a legenda is given of numeric hash codes for the class names (one unique integer per class) and feature value names (one unique integer per value). Subsequently, the tree's nodes and arcs are given in a proprietary non-indented bracket notation. Starting from the root node, each node is denoted by an opening parenthesis ``('', followed by an integer coding the default class. After this, there is the class distribution list, within curly braces ``\{ \}'', containing a non-empty list of category codes followed by integer counts. After this comes an optional comma-separated list of arcs to child nodes, within ``[ ]'' brackets. An arc is labeled with a coded feature value. The node that the arc leads to again has a class distribution, and any number of child nodes pointed to by arcs. The {\sc ib1} tree constructed from our example data set looks as follows: \begin{footnotesize} \begin{verbatim} # Status: complete # Permutation: < 1, 3, 2 > # Numeric: . # Version 4 (Hashed) # Classes 1 nut 2 screw 3 key 4 pen 5 scissors Features 1 small 2 compact 3 1 4 long 5 none 6 large 7 2 8 other (1{ 1 3, 2 2, 3 3, 4 2, 5 2 }[1(1[3(1[2(1{ 1 3 }) ,4(3{ 3 1 }) ] ) ,5(2[2(2{ 2 1 }) ,4(2{ 2 1 }) ] ) ,7(3[8(3{ 3 1 }) ] ) ] ) ,6(4[3(3[4(3{ 3 1, 4 1 }) ] ) ,5(4[4(4{ 4 1 }) ] ) ,7(5[4(5{ 5 1 }) ,8(5{ 5 1 }) ] ) ] ) ] ) \end{verbatim} \end{footnotesize} The corresponding compressed {\sc igtree} version is considerably smaller. \begin{footnotesize} \begin{verbatim} # Status: pruned # Permutation: < 1, 3, 2 > # Numeric: . # Version 4 (Hashed) # Classes 1 nut 2 screw 3 key 4 pen 5 scissors Features 1 small 2 compact 3 1 4 long 5 none 6 large 7 2 8 other (1{ 1 3, 2 2, 3 3, 4 2, 5 2 }[1(1{ 1 3, 2 2, 3 2 }[3(1{ 1 3, 3 1 }[4(3{ 3 1 }) ] ) ,5(2{ 2 2 }) ,7(3{ 3 1 }) ] ) ,6(4{ 3 1, 4 2, 5 2 }[3(3{ 3 1, 4 1 }) ,7(5{ 5 2 }) ] ) ] ) \end{verbatim} \end{footnotesize} TiMBL tree files generated by versions 1.0 to 3.0 of TiMBL, which do not contain hashed class and value names, are no longer recognized in current TiMBL versions. Backward compatibility to trees generated by versions 1.0 to 3.0 is preserved in TiMBL version 4 up to release 4.3.1. \clearpage \bibliographystyle{fullname} \bibliography{../../ilkbib/ilk} \end{document} LanguageMachines-timbl-642727d/docs/texfiles/Timbl_6.4_Manual.tex000066400000000000000000005153771451477526200245620ustar00rootroot00000000000000% TiMBL 6.4 manual \documentclass{book} \usepackage{epsf} \usepackage{epsfig} \usepackage{a4wide} \usepackage{palatino} \usepackage{fullname} \usepackage{url} \newcommand{\chisq}{{$ \chi^2 $}} \author{Walter Daelemans$*$ \and Jakub Zavrel$\dagger$ \and Ko van der Sloot$\dagger\dagger$\\ \and Antal van den Bosch$\dagger\dagger$\\ \ \\ (*) CLiPS - Computational Linguistics Group\\ Department of Linguistics \\ University of Antwerp\\ \\ ($\dagger$) Textkernel B.V.\\ \\ % (**) Induction of Linguistic Knowledge Research Group\\ % Tilburg center for Cognition and Communication \\ % Tilburg University \\ \\ ($\dagger\dagger$) Centre for Language Studies \\ Radboud University Nijmegen \\ \\ URL: http://ilk.uvt.nl\thanks{This document is available from http://ilk.uvt.nl/downloads/pub/papers/ilk.XXXX.pdf. All rights reserved Centre for Language Studies, Nijmegen \ Induction of Linguistic Knowledge, Tilburg University and CLiPS, University of Antwerp.}} \title{{\huge TiMBL: Tilburg Memory-Based Learner} \\ \vspace*{0.5cm} {\bf version 6.4} \\ \vspace*{0.5cm}{\huge Reference Guide}\\ \vspace*{1cm} {\it ILK Technical Report -- ILK 11-01}} %better paragraph indentation \parindent 0pt \parskip 9pt \begin{document} \pagenumbering{roman} \maketitle \tableofcontents \chapter*{Preface} Memory-Based Learning ({\sc mbl}) is an elegantly simple and robust machine-learning method applicable to a wide range of tasks in Natural Language Processing (NLP). Starting from our research group at Tilburg University, we have been working since the end of the 1980s on the development of Memory-Based Learning techniques and algorithms. The foundations are bundled in \namecite{Daelemans+05}. Section~\ref{furtherreading} provides a historical overview of work on the application of {\sc mbl} in NLP. With the establishment of the ILK (Induction of Linguistic Knowledge) research group in 1997, and with the increasing use of {\sc mbl} at the CNTS (now CLiPS) research group of the University of Antwerp, the need for a well-coded and uniform tool for our main algorithms became more urgent. TiMBL was the result of combining ideas from a number of different {\sc mbl} implementations, cleaning up the interface, and using a whole bag of tricks to make it more efficient. We think it has become a useful tool for NLP research, and, for that matter, for many other domains where classification tasks are learned from examples, so we started to release the software in 1999. With the release of the sixth version of TiMBL we moved to releasing our software under the GPL license, for anyone to use, adapt and improve under the conditions stated in the license. Memory-Based Learning is a direct descendant of the classical $k$-Nearest Neighbor ($k$-NN) approach to classification, which has become known as a powerful pattern classification algorithm for numeric data. In typical NLP learning tasks, however, the focus is on discrete data, very large numbers of examples, and many attributes of differing relevance. Moreover, classification speed is a critical issue in any realistic application of Memory-Based Learning. These constraints demand non-trivial data structures and speedup optimizations for the core $k$-NN classifier. Our approach has resulted in an architecture which compresses the typical flat file organization found in straightforward $k$-NN implementations, into a decision-tree structure. While the decision tree can be used to retrieve the exact $k$-nearest neighbors (as happens in the {\sc ib1} algorithm within TiMBL), it can also be deterministically traversed as in a decision-tree classifier (the method adopted by the {\sc IGTree} algorithm). We believe that our optimizations make TiMBL one of the fastest discrete $k$-NN implementations around. The main effort in the development and maintenance of this software was and continues to be invested by Ko van der Sloot. The code started as a rewrite of {\tt nibl}, a piece of software developed by Peter Berck from a Common Lisp implementation by Walter Daelemans of {\sc ib1-ig}. Some of the index optimizations in TiMBL are due to Jakub Zavrel. The code has benefited substantially from trial, error and scrutiny by all past and present members of the ILK and CLiPS (formerly CNTS) groups in Tilburg and Antwerp. We are furthermore indebted to Ton Weijters of Eindhoven Technical University for his inspirational early work on $k$-NN and for his ideas on {\sc IGTree}. Our sincere thanks go to the many users of TiMBL who have contributed to it immensely by giving us feedback and reporting bugs, and to the two organisations that have supported and enabled its development: NWO, the Netherlands Organization for Scientific Research, and the School of Humanities of Tilburg University. NWO funding has spanned three subsequent periods totalling fourteen years. From 1997 until 2001 development was part of the ``Induction of Linguistic Knowledge'' research project funded by NWO's {\em Pionier}\/ programme. Between 2001 and 2006 it was funded as part of the ``Memory Models of Language'' research project under the NWO {\em Vernieuwingsimpuls}\/ programme, and between 2006 and 2011 as part of the ``Implicit Linguistics'' research project under the NWO Vici programme. The current release (version 6.4) coincides with a release of a Debian Science package for TiMBL. Its most significant change is a multiprocessor option that ``clones'' TiMBL internally so that it can classify a test set on $n$ available processors. An elaborate description of the changes from version 1.0 up to 6.4 can be found in Chapter~\ref{changes}. Although all new features have been tested for some time in our research groups, the software may still contain bugs and inconsistencies in some places. We would appreciate it if you would send bug reports, ideas about enhancements of the software and the manual, and any other comments you might have, to {\tt Timbl@uvt.nl}. This reference guide is structured as follows. In Chapter~\ref{license} you can find the terms of the open source license according to which you are allowed to use TiMBL. The subsequent chapter gives some instructions on how to install the TiMBL package on your computer. Chapter~\ref{changes} lists the changes that have taken place up to the current version. Next, Chapter~\ref{tutorial} offers a quick-start tutorial for readers who want to get to work with TiMBL right away. The tutorial describes, step-by-step, a case study with a sample data set (included with the software) representing the linguistic domain of predicting the diminutive inflection of Dutch nouns. Readers who are interested in the theoretical and technical details of Memory-Based Learning and of this implementation can refer to Chapter~\ref{algorithms}. Chapter~\ref{reference} provides full reference to the command line options of TiMBL and supported file formats. \chapter{GNU General Public License} \label{license} \pagenumbering{arabic} TiMBL is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. TiMBL is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with TiMBL. If not, see $<$http://www.gnu.org/licenses/$>$. In publication of research that makes use of TiMBL 6.4, a citation should be given of: {\em ``Walter Daelemans, Jakub Zavrel, Ko van der Sloot, and Antal van den Bosch (2012). TiMBL: Tilburg Memory Based Learner, version 6.4, Reference Guide. ILK Technical Report XX-XX Available from \\ {\tt http://ilk.uvt.nl/downloads/pub/papers/ilkXXXX.pdf}''} For information about commercial licenses for TiMBL 6.4, contact {\tt Timbl@uvt.nl}, or send your request in writing to: Prof. dr.~Walter Daelemans\\ CLiPS - Language Technology Group\\ Dept. of Linguistics \\ University of Antwerp\\ Prinsstraat 13, L-203, B-2000 Antwerp \\ Belgium \pagestyle{headings} \chapter{Installation} \vspace{-1cm} You can get the TiMBL package as a gzipped tar archive from: {\tt http://ilk.uvt.nl/timbl} Following the links from that page, you can download the file {\tt timbl-latest.tar.gz}. This file contains the complete source code (C++) for the TiMBL program, a few sample data sets, the license, and documentation. The installation should be relatively straightforward on most UNIX systems. To install the package on your computer, unzip the downloaded file ({\tt >} is the command line prompt): {\tt > tar xfz timbl-latest.tar.gz} This will make a directory {\tt timbl-6.4} under your current directory. (The numbering may be higher; the current guide refers to version 6.4). Alternatively you can do: {\tt > gunzip timbl-6.4.tar.gz} and unpack the tar archive: {\tt > tar xf timbl-6.4.tar} Go to the timbl-6.4 directory, and configure the package by typing {\tt > cd timbl-6.4} \\ {\tt > ./configure --prefix=} If you do not use the {\tt --prefix} option, TiMBL will try to install itself in the directory {\tt /usr/local/}. If you do not have {\tt root} access you can specify a different installation location such as {\tt \$HOME/install}. It is not obligatory to install TiMBL, but if you plan to install TiMBL-based extensions such as TimblServer\footnote{\url{http://ilk.uvt.nl/timbl}}, Mbt\footnote{\url{http://ilk.uvt.nl/mbt}}, or Frog\footnote{\url{http://ilk.uvt.nl/frog}}, or you want to build your own extensions using the TiMBL API, installing is the best choice. After {\tt configure} you can build TiMBL: {\tt > make} and (as recommended) install: {\tt > make install } If the process was completed successfully, you should now have an executable file named {\tt timbl} (low caps) in the directory {\tt /bin}, and a static library {\tt libTimbl.a} in the directory {\tt /lib}. Additionally, several demo programs named {\tt api\_test*}, {\tt classify} and {\tt tse} are created in the {\tt ./demos} subdirectory. Within the {\tt } directory a subdirectory is also created: {\tt share/doc/timbl} where the TiMBL 6.4 documentation can be found, and which in turn contains a subdirectory {\tt examples} with example data files. Some of these data sets are used in the Quick Start Section~\ref{tutorial} of this document; other data and source files are referred to in the API documentation. The latter, along with a pdf version of this document, can also be found in the {\tt doc} directory. Note that the API documentation is a beta-state document. TiMBL should now be ready for use. If you want to run the examples and demos from this manual, you should act as follows: \begin{itemize} \item Be sure to add {\tt /bin} to your PATH. In many shells something like {\tt > export PATH=\$PATH:/bin } will do. \item copy all the files from {\tt /share/doc/timbl/examples} to some working location. (By default, TiMBL writes its results to the directory where it finds the data.) \item and test: {\tt cd} to the working location, and then {\tt timbl -f dimin.train -t dimin.test} \end{itemize} If you did not install TiMBL, the executable can be found in the {\tt src} directory of the build. The demo files can be found in the {\tt demo} directory. The email address for problems with the installation, bug reports, comments and questions is {\tt Timbl@uvt.nl}. \chapter{Changes} \label{changes} This chapter gives a brief overview of the changes from all previously released versions (1.0 up to 6.4) for users already familiar with the program. \section{From version 6.3 to 6.4} \begin{itemize} \item With {\tt --clones=}, TiMBL can be told to classify a test set using $n$ processors in parallel. This option will speed up classification near-linearly with respect to $n$, although with larger $n$ the overhead causes the speedup to be increasingly sub-linear. \item We added Jensen-Shannon divergence as a pairwise similarity metric for feature values. With {\sc mvdm} and Jeffrey divergence, TiMBL now offers three class distribution distance functions for value differences. \item Euclidean distance was long overdue as an optional distance function ({\tt -mE}) for numeric features. \item {\tt -w 5} or {\tt -w SD} sets the feature weighting of numeric features to the standard deviation of its values. \item With {\tt --occurrences=\{train|test|both\}}, instances in instance base files can be accompanied by a (long) integer specifying a number of token occurrences of that instance, so that it does not count as a single instance in the TiMBL tree, but as {\tt } instances. These counts can be exclusively present in the training set, in the test set, or in both if a training and test action is invoked at once. \item The verbosity option {\tt +v b} adds overall statistics of IGTree nodes (leafs and non-ending nodes) and branching at each tree level. \item The executable file is now called {\tt timbl} instead of {\tt Timbl} to be more GNU/Linux compliant. Previous users of TiMBL may want to check scripts for this name change. \end{itemize} \section{From version 6.2 to 6.3} \begin{itemize} \item All server-related functionality is removed from TiMBL. A new TimblServer package is available wich provides the same interface as Timbl up to version 6.3, but also adds some extra features, such as running multiple but separate experiments on one TCP port. See the TimblServer package for more details. \item Starting with Timbl 6.3 we support installable packages for Debian and Ubuntu (.deb), RedHat (.rpm) and MacOSX (Fink)\footnote{\url{http://ilk.uvt.nl/timbl-packages}}. \item Some bugs and inconsistencies have been fixed. \end{itemize} \section{From version 6.1 to 6.2} Version 6.2 differs from 6.1 in a great number of internal changes aimed at making the code better maintainable and extendible, in some minor bug fixes, and in the following more prominent changes: \begin{itemize} \item A new distance metric, the Dice coefficient, has been added; the metric can be set with {\tt -mDC}. Analogous to the Levenshtein ({\tt -mL}) metric, the Dice coefficient operates at the feature value level; it computes the overlap in character bigrams of two value strings. \item Value difference matrices, as used by the {\sc mvdm} and Jeffrey divergence distance metrics, can now be written to file, and read into TiMBL, allowing for user-defined value difference metrics to be used. The new command line options are {\tt --matrixout=} and {\tt --matrixin=}. \item The {\sc IGTree} algorithm has been optimized beyond the improvements introduced in version 6.0. With very large training sets, {\sc IGTree} was reported to be exponentially slower in the later stages of training. Trees are now built in near-linear time. \end{itemize} Finally, besides minor bug fixes, a great number of internal changes were made to make the code better maintainable and extendible. \section{From version 6.0 to 6.1} Version 6.1 differs from 6.0 mainly in the changed configuration. It is now based on autotools and is delivered as an installable package. Some bugs have been fixed as well. \section{From version 5.1 to 6.0} Version 6.0 differs from 5.1 firstly in terms of internal changes aimed at increasing classification speed and lowering memory usage, of which the most prominent are \begin{itemize} \item The {\sc IGTree} algorithm has been optimized. Learning has been made more memory-lean, while classification has been optimized so that it is now orders of magnitude faster than before on most data sets. \item {\sc Mvdm} matrices are partly prestored; only the {\sc mvdm} values of pairs of frequent values are precomputed. The threshold frequency $n$ can now be determined with {\tt -c n}. This way memory can be traded for speed, up to a point. The default value 10 remains the recommended one. \end{itemize} Also, two metrics and several verbosity options and other command-line switches are added: \begin{itemize} \item Two distance metrics are added: {\tt -mC} sets the Cosine metric, and {\tt -mL} sets the Levenshtein metric. The latter metric operates at the feature-value level, and thus offers an alternative to the all-or-nothing Overlap metric for string-valued features. \item Class distribution output generated with {\tt +v db} can be normalized so that they add to $1.0$, with the additional {\tt -G} option (or {\tt -G0}). As a simple smoothing option, with {\tt -G1:double} all class votes are incremented by {\tt double} before normalization. For example, {\tt -G1:1} (or {\tt -G1} for short) is ``add one''-smoothing; {\tt -G1:0.5} adds $0.5$ to all class votes. \item With {\tt -Beam=n} (from version 6.2 onwards: {\tt --Beam=n}), where $n$ is an integer, the {\tt +v db} output is constrained to the $n$ classes receiving the highest votes. This special limit is useful in cases in which the {\tt +v db} output, typically used for further processing, generates far too much output in its default unconstrained setting. \item Class distributions are not stored on non-terminal nodes with {\sc IGTree} and {\sc tribl} by default. To revert this default, e.g. to be able to use {\tt +v db} with {\sc IGTree}, the setting {\tt +D} can be used. \item With {\tt -T n}, the user can specify that the $n$th column in the training set of labeled examples contains the label to be predicted, while all other columns represent the input features. By default, the final column is assumed to contain the class labels. \item After classification, TiMBL reports its classification speed at microsecond precision instead of in seconds. \item The verbosity option {\tt +v md} displays the level at which a classification was made by {\sc IGTree} ({\tt -a1}), and whether the class label was obtained from a leaf node or an end node. \item With {\tt -X [file]}, TiMBL dumps its internal TiMBL tree into a file containing an XML tree. This option is analogous to {\tt -I [file]}, which prints a TiMBL tree in TiMBL's proprietary format, the difference being that the latter format can be read into TiMBL again. \item Several minor bugs have been resolved. \end{itemize} \section{From version 5.0 to 5.1} Version 5.1 adds speed and memory improvements that are notable with datasets that have very large amounts of examples, features, feature values, or classes (and, especially, combinations of those). Previous versions exhibited exponential slowdown in some worst cases; this has been largely countered. On the outside, TiMBL has been updated in the following aspects: \begin{itemize} \item TiMBL offers extended performance reporting: next to accuracy it reports on micro and macro-averages of F-score and AUC (area under the ROC-curve) with {\tt +v as}. Optionally, it also shows each individual class' precision, recall (or true positive rate), and false positive rate with {\tt +v cs}. \item TiMBL always uses gain ratio feature weighting as the default case, if not specified by the user, also with the {\sc mvdm} and Jeffrey Divergence similarity metrics. \item Two additional feature orderings for the internal TiMBL trees are added, {\tt -TGxE} and {\tt -TIxE} (gain ratio $\times$ entropy and information gain $\times$ entropy, respectively) to potentially tackle the problem of unbalanced trees. \item Bugs in leave-one-out testing with numeric features and with exemplar weighting were fixed. \end{itemize} \section{From version 4.3 to 5.0} Version 5.0 is the conclusion of a number of recodings (mostly involving more generic treatment of variables to improve robustness, but also the removal of inverted indexing on the internal tree representation) that have changed the internals of TiMBL considerably. On the outside, TiMBL displays the following new characteristics: \begin{itemize} \item Next to the Overlap, {\sc mvdm}, and Numeric distance functions, TiMBL now features the Jeffrey divergence distance function and the Dot-product distance function. \item The exponential-decay distance weighting function can be set using a second parameter, which can change the shape of the function from normal exponential to bell-shaped. \item In addition to the ``binary'' format, TiMBL can now read a more generic sparse data format. This format allows instances to be coded by tuples of $<$ feature number, feature value $>$ where the value can be symbolic or numeric rather than only binary. \item Tree files generated by TiMBL versions 1.*, 2.* and 3.* are no longer supported. \item The command line interface has had the following additions, including the ones reflecting the above changes: \begin{itemize} \item {\tt -mJ} activates the Jeffrey divergence distance metric. \item {\tt -mD} activates the Dot-product distance metric. \item {\tt -dED::} (without whitespace) sets the $\alpha$ and new $\beta$ parameters. If unspecified, as in {\tt -d ED:} or the older (deprecated) {\tt -d ED }, $\beta$ is set to $1.0$. \item {\tt -F Sparse} declares that training and test files are in the sparse $<$ feature number, feature value $>$ tuple-format described in more detail in section~\ref{commandline}. \item {\tt +v k} is a new verbosity option that prints all class distributions per $k$-nearest distance per classified instance in the output file. It works analogous to the {\tt +v n} option, but does not print the neighbors themselves. \end{itemize} \end{itemize} \section{From version 3.0 to 4.3} As the last upgrade of the version 4 strain, version 4.3 added some command line functionality and internal code changes to version 4.2. Minor progressive changes from 4.0 to 4.3 are found at the bottom of this list and are marked as such. \begin{itemize} \item Distance weighting of the $k$ nearest neighbors. This classical exemplar weighting scheme \cite{Dudani76} allows closer nearest neighbors in the $k$ to have a more prominent vote in classification. TiMBL incorporates linear, inversed, and exponential distance weighting. \item Incremental edited memory-based learning with {\sc ib2} \cite{Aha+91}. This incremental version of {\sc ib1} adds instances to memory only when those instances are misclassified by the then-current set of instances in memory. \item Frequency-filtered {\sc mvdm} distance metric. The option, which is not selected by default, is an add-on of the {\sc mvdm} metric, that backs off from the {\sc mvdm} metric to the Overlap distance function whenever one or both in a pair of matched values occurs fewer times in the training material than a user-determined threshold. \item {\sc tribl2}. The {\sc tribl2} algorithm has been implemented as an additional trade-off between {\sc IGTree} and {\sc ib1}. In contrast to {\sc tribl}, {\sc tribl2} uses no threshold parameter. \item Exemplar weighting. TiMBL can read additional numeric exemplar weights (generated externally) when reading a data file, and use these weights during neighbor distance computation in $k$-NN classification. \item Cross-validation testing. Analogous to the leave-one-out testing option, with cross-validation testing it is possible to let TiMBL run systematic tests on different values of parameters, without completely re-initializing the classifier in every fold of the validation experiment. \item The number of concurrent connections to a TiMBL server has been restricted, but can be set to different values. \item The command line interface has had several additions reflecting the above changes, plus one extra verbosity option: \begin{itemize} \item the {\tt -d metriccode} option sets the distance weighting metric. Three metrics are available: inverse distance (code ID), inverse linear (IL), and exponential decay (ED, which takes an extra argument $a$, without whitespace, determining the factor of the exponential function). By default, no distance weighting is used (code Z). See Chapter~\ref{algorithms} for descriptions. \item the {\tt -L n} option sets the frequency threshold in the optional switch (backoff) from {\sc mvdm} or Jeffrey divergence to Overlap; whenever in an {\sc mvdm} or Jeffrey divergence distance computation one or both of a pair of values occur fewer than {\tt n} times, Overlap is used rather than the {\sc mvdm} metric. The default value for {\tt n} is 1 (no switching). \item the {\tt -a 3} or {\tt -a IB2} switch invokes the {\sc ib2} algorithm. This algorithm expects to have the {\tt -b} switch set. \item the {\tt -b n} option sets the number ($n$) of lines counting from the top of the training set file, which form the bootstrap set of memorized instances to which {\sc ib2} will start adding instances incrementally. \item the {\tt -a 4} or {\tt -a TRIBL2} switch invokes the {\sc tribl2} algorithm. \item the {\tt -C n} switch (default: {\tt n} set to 10) restricts the number of concurrent connections to a TiMBL server (cf. the {\tt -S} switch). \item the {\tt +v/-v} option has {\tt cm} as a new optional argument; it returns the confusion matrix, obtained after testing, between predicted and actual classes in the test data. \end{itemize} \item The ``programmer's reference'' or API section has been separated from this manual. This new API, describing the underlying structure of TiMBL, is available as a separate document in the TiMBL software distribution. \item Two bugs relating to a type of sparse data problem have been resolved. The first involved leave-one-out experiments on data sets with features that have values that occur only once in the training data. The second bug occurred with the use of the {\tt -F Binary} option with the same type of data. \item {\bf [4.1]} Exemplar weights are stored in the TiMBL-tree. \item {\bf [4.1]} The core representation of TiMBL-trees has been modified, causing no changes at the surface except that the {\sc tribl} variant uses less memory. \item {\bf [4.2]} Feature value and class information in the internal TiMBL tree is hashed, by default, except with binary features. Hashing can be explicitly set on or off through the flag {\tt +H} or {\tt -H}. \item {\bf [4.2]} The discretization of numeric features, used for computing feature weights, has changed from linear binning between minimum and maximum values, to equal-content binning. \item {\bf [4.2]} Tie resolution between equal class distributions in the nearest neighbors set is resolved by first expanding the $k$ by one value. If the tie persists after the enlargement of the nearest neighbor set, the original tie resolution method is applied. \item {\bf [4.3]} Internal changes in the code (with no effect on learning and classification functionality) have been implemented with respect to namespaces. \item {\bf [4.3]} A progress marker (one dot per 10 seconds) in computationally intensive operations on the internal representation of the instance base (e.g. pruning {\sc IGTree}s) is added in TiMBL's screen output. \item A number of bugs have been fixed, notably to handle erroneous input more robustly. \end{itemize} \section{From version 2.0 to 3.0} \begin{itemize} \item Server functionality. Apart from the standard processing of test items from a file, alternatively you can now specify a portnumber with {\tt -S portnumber} to open a socket and send commands for classification of test patterns or change of parameters to it. A sample client program is included in the distribution. This allows fast response times when small amounts of test material are presented at various intervals. It also opens the possibility of having large numbers of ``classification agents'' cooperate in real time, or of classication of the same data with different parameters. \item Leave-one-out testing. To get an estimate of the classification error, without setting aside part of one's data as a test set, one can now test by ``leave-one-out'' ({\tt -t leave\_one\_out}), in effect testing on every case once, while training on the rest of the cases, without completely re-initializing the classifier for every test case. \item Support for sparse binary features. For tasks with large numbers of sparse binary features, TiMBL now allows for an input format which lists only the ``active'' features, avoiding the listing of the many (zero-valued) features for each case. This format is described in Section~\ref{binaryformat}. \item Additional feature weighting metrics. We have added chi-squared and shared variance measures as weighting schemes. These weighting metrics are sometimes more robust to large numbers of feature values and other forms of data sparseness. \item Different metrics (Overlap, {\sc mvdm} or Numeric) can be applied to different features. \item The command line interface has slightly been cleaned up, and re-organized: \begin{itemize} \item The {\tt -m metricnumber} switch to choose metrics has been replaced by the use of a specification string following {\tt -m}. E.g.~you can specify to use {\sc mvdm} as the default metric, but use Overlap on features 5-7,9, Numeric on feature 1, and ignore feature 10 ({\tt -m M:O5-7,9:N1:I10}). \item All of the output needed for analysing the matching of nearest neighbors has been moved to the verbosity setting. \item Verbosity levels and some other options can be switched on {\tt +v} and off {\tt -v}, even between different classification actions. \item Because of the large amount of verbosity levels, the {\tt +v} option takes mnemonic abbreviations as arguments instead of numeric verbosity levels. Although the old (numeric) format is still supported, it's use is not encouraged as it will disappear in future versions. \end{itemize} \item Because of significant optimizations in the nearest neighbor search, the default is no longer to use inverted indexes. These can however still be turned on by using the {\tt +-} switch on the command line. \item You can now choose the output filename or have it generated by TiMBL on the basis of the test filename and the parameters. \item You can use TiMBL in a pipeline of commands by specifying '-' as either input, output or both. \item Several problems with the display of nearest neighbors in the output have been fixed. \item The API has been adapted a bit to allow more practical use of it. \end{itemize} \section{From version 1.0 to 2.0} \begin{itemize} \item We have added a new algorithm: {\sc tribl}, a hybrid between the fast {\sc IGTree} algorithm and real nearest neighbor search (for more details, see~\ref{tribl}, or~\namecite{Daelemans+97d}). This algorithm is invoked with the {\tt -a 2} switch and requires the specification of a so-called {\sc tribl}-offset, the feature where {\sc IGTree} stops and case bases are stored under the leaves of the constructed tree. \item Support for numeric features. Although the package has retained its focus on discrete features, it can now also process numeric features, scale them, and compute feature weights on them. You specify which features are numeric with the {\tt -N} option on the command line. \item The organization of the code is much more object-oriented than in version 1.0. %The main benefit of this is that: \item A Memory-Based Learning API is made available. You can define Memory-Based classification objects in your own C++ programs and access all of the functionality of TiMBL by linking to the TiMBL library. \item It has become easier to examine the way decisions are made from nearest neighbors, because several verbosity-levels allow you to dump similarity values ({\tt -D}), distributions ({\tt -v 16}), and nearest neighbor sets ({\tt -v 32}) to the output file. The {\tt -d} option for writing the distributions no longer exists. \item Better support for the manipulation of {\sc mvdm} matrices. Using the {\tt -U} and {\tt -u} options it is now possible to respectively save and read back value difference matrices (see Section~\ref{mvdmformat}). \item Both ``pre-stored'' and ``regular'' {\sc mvdm} experiments now generate filenames with ``{\tt mvd}'' in the suffix. This used to be ``{\tt pvd}'' and ``{\tt mvd}'' respectively. \item a number of minor bugs have been fixed. \end{itemize} \chapter{Quick-start Tutorial} \label{tutorial} This quick-start tutorial is meant to get you started with TiMBL right away. We discuss how to format the data of a task to serve as training examples, which choices can be made during the construction of the classifier, how various choices can be evaluated in terms of their generalization accuracy, and various other practical issues. The reader who is interested in more background information on TiMBL implementation issues and a formal description of Memory-Based Learning, is advised to read Chapter~\ref{algorithms}. Memory-Based Learning ({\sc mbl}) is based on the idea that intelligent behavior can be obtained by analogical reasoning, rather than by the application of abstract {\em mental rules} as in rule induction and rule-based processing. In particular, {\sc mbl} is founded in the hypothesis that the extrapolation of behavior from stored representations of earlier experience to new situations, based on the similarity of the old and the new situation, is of key importance. {\sc mbl} algorithms take a set of examples (fixed-length patterns of feature-values and their associated class) as input, and produce a {\em classifier} which can classify new, previously unseen, input patterns. Although TiMBL was designed with linguistic classification tasks in mind, it can in principle be applied to any kind of classification task with symbolic or numeric features and discrete (non-continuous) classes for which training data is available. As an example task for this tutorial we go through the application of TiMBL to the prediction of Dutch diminutive suffixes. The necessary data sets are included in the TiMBL distribution, so you can replicate the examples given below on your own system. \section{Data} The operation of TiMBL will be illustrated below by means of a real natural language processing task: prediction of the diminutive suffix form in Dutch~\cite{Daelemans+97b}. In Dutch, a noun can receive a diminutive suffix to indicate {\em small size} literally or metaphorically attributed to the referent of the noun; e.g. {\em mannetje} means {\em little man}. Diminutives are formed by a productive morphological rule which attaches a form of the Germanic suffix {\em -tje} to the singular base form of a noun. The suffix shows variation in its form (Table \ref{variation}). The task we consider here is to predict which suffix form is chosen for previously unseen nouns on the basis of their form. \begin{table}[ht] \begin{center} \begin{tabular}{l|l|l} Noun & Form & Suffix \\ \noalign{\smallskip} \hline \noalign{\smallskip} huis (house) & huisje & {\em -je} \\ man (man) & mannetje & {\em -etje\/} \\ raam (window) & raampje & {\em -pje\/} \\ woning (house) & woninkje & {\em -kje\/} \\ baan (job) & baantje & {\em -tje\/} \\ \end{tabular} \caption{Allomorphic variation in Dutch diminutives.}\label{variation} \end{center} \end{table} For these experiments, we collect a representation of nouns in terms of their syllable structure as training material\footnote{These words were collected form the {\sc celex} lexical database~\cite{Baayen+93}.}. For each of the last three syllables of the noun, four different features are collected: whether the syllable is stressed or not (values - or +), the string of consonants before the vocalic part of the syllable (i.e. its onset), its vocalic part (nucleus), and its post-vocalic part (coda). Whenever a feature value is not present (e.g. a syllable does not have an onset, or the noun has less than three syllables), the value `=' is used. The class to be predicted is either E ({\em -etje}), T ({\em -tje}), J ({\em -je}), K ({\em -kje}), or P ({\em -pje}). Some examples are given below (the word in the rightmost column is only provided for convenience and is not used). The values of the syllabic content features are given in phonetic notation. \begin{table}[ht] \begin{center} \begin{tabular}{cccccccccccc|l|l|l} + & b & i & = & - & z & @ & = & - & m & A & nt & J & {\em biezenmand} \\ = & = & = & = & = & = & = & = & + & b & I & x & E & {\em big}\\ = & = & = & = & + & b & K & = & - & b & a & n & T & {\em bijbaan}\\ = & = & = & = & + & b & K & = & - & b & @ & l & T & {\em bijbel}\\ \end{tabular} \end{center} \end{table} Our goal is to use TiMBL in order to train a classifier that can predict the class of new, previously unseen words as correctly as possible, given a set of training examples that are described by the features given above. Because the basis of classification in TiMBL is the storage of all training examples in memory, a test of the classifier's accuracy must be done on a separate test set. We will call these datasets {\tt dimin.train} and {\tt dimin.test}, respectively. The training set {\tt dimin.train} contains 2999 words and the test set contains 950 words, none of which are present in the training set. Although a single train/test partition suffices here for the purposes of explanation, it does not factor out the bias of choosing this particular split. Unless the test set is sufficiently large, a more reliable generalization accuracy measurement is used in real experiments, e.g.~10-fold cross-validation~\cite{Weiss+91}. This means that 10 separate experiments are performed, and in each ``fold'' 90\% of the data is used for training and 10\% for testing, in such a way that each instance is used as a test item exactly once. Another reliable way of testing the real error of a classifier is leave-one-out~\cite{Weiss+91}. In this approach, every data item in turn is selected once as a test item, and the classifier is trained on all remaining items. Accuracy of the classifier is then the number of data items correctly predicted. With the option {\tt -t leave\_one\_out}, this testing methodology is used by TiMBL. We will use this option in the tutorial on the file {\tt dimin.data}, the union of {\tt dimin.train} and {\tt dimin.test}. \section{Using TiMBL} Different formats are allowed for training and test data files. TiMBL is able to guess the type of format in most cases. We will use comma-separated values here, with the class as the final value. This format is called C4.5 format in TiMBL because it is the same as that used in Quinlan's well-known C4.5 program for learning decision trees~\cite{Quinlan93}. See Section~\ref{fileformats} for more information about this and other file formats. An experiment is started by executing TiMBL with the two files ({\tt dimin.train} and {\tt dimin.test}) as arguments (``$>$'' is the command line prompt): {\footnotesize \begin{verbatim} > timbl -f dimin.train -t dimin.test \end{verbatim} } Upon completion, a new file has been created with name {\small\tt dimin.test.IB1.O.gr.k1.out}, which is identical to the input test file except that an extra comma-separated column is added with the class predicted by TiMBL. The name of the file provides information about the {\sc mbl} algorithms and metrics used in the experiment (the default values in this case). We will describe these shortly. Apart from the result file, information about the operation of the algorithm is also sent to the standard output. It is therefore advisable to redirect the output to a file in order to make a log of the results. {\footnotesize \begin{verbatim} > timbl -f dimin.train -t dimin.test > dimin-exp1 \end{verbatim} } The defaults used in this case work reasonably well for most problems. We will now provide a point by point explanation of what goes on in the output. %\vspace{0.5cm} %\rule{\textwidth}{0.5mm} {\footnotesize \begin{verbatim} TiMBL 6.4.2 (c) ILK 1998 - 2011. Tilburg Memory Based Learner Induction of Linguistic Knowledge Research Group, Tilburg University CLiPS Computational Linguistics Group, University of Antwerp Mon Apr 23 15:43:26 2012 Examine datafile 'dimin.train' gave the following results: Number of Features: 12 InputFormat : C4.5 \end{verbatim} } %\rule{\textwidth}{0.5mm} %\vspace{0.5cm} TiMBL has detected 12 features and the C4.5 input format (comma-separated features, class at the end). %\rule{\textwidth}{0.5mm} {\footnotesize \begin{verbatim} Phase 1: Reading Datafile: dimin.train Start: 0 @ Mon Oct 19 21:30:00 2009 Finished: 2999 @ Mon Oct 19 21:30:00 2009 Calculating Entropy Mon Oct 19 21:30:00 2009 Lines of data : 2999 DB Entropy : 1.6178929 Number of Classes : 5 Feats Vals InfoGain GainRatio 1 3 0.030971064 0.024891536 2 50 0.060860038 0.027552191 3 19 0.039562857 0.018676787 4 37 0.052541227 0.052620750 5 3 0.074523225 0.047699231 6 61 0.10604433 0.024471911 7 20 0.12348668 0.034953203 8 69 0.097198760 0.043983864 9 2 0.045752381 0.046816705 10 64 0.21388759 0.042844587 11 18 0.66970458 0.18507018 12 43 1.2780762 0.32537181 Feature Permutation based on GainRatio/Values : < 9, 5, 11, 1, 12, 7, 4, 3, 10, 8, 2, 6 > \end{verbatim} } %\rule{\textwidth}{0.5mm} %\vspace{0.5cm} Phase 1 is the training data analysis phase. Time stamps for start and end of analysis are provided. Some preliminary analysis of the training data is done: number of training items, number of classes, entropy of the training data. For each feature, the number of values, and four variants of an information-theoretic measure of feature relevance are given. These are used both for memory organization during training and for feature relevance weighting during testing (see Chapter~\ref{algorithms}). Finally, an ordering (permutation) of the features is given. This ordering is used for building the tree-index to the case-base. %\vspace{0.5cm} %\rule{\textwidth}{0.5mm} {\footnotesize \begin{verbatim} Phase 2: Learning from Datafile: dimin.train Start: 0 @ Mon Oct 19 21:30:00 2009 Finished: 2999 @ Mon Oct 19 21:30:00 2009 Size of InstanceBase = 19231 Nodes, (769240 bytes), 49.77 % compression Examine datafile 'dimin.test' gave the following results: Number of Features: 12 InputFormat : C4.5 \end{verbatim} } %\rule{\textwidth}{0.5mm} %\vspace{0.5cm} Phase 2 is the learning phase: all training items are stored in an efficient way in memory for use during testing. Again timing information (real time) is provided, as well as information about the size of the data structure representing the stored examples and the amount of compression achieved. %\vspace{0.5cm} %\rule{\textwidth}{0.5mm} {\footnotesize \begin{verbatim} Starting to test, Testfile: dimin.test Writing output in: dimin.test.IB1.O.gr.k1.out Algorithm : IB1 Global metric : Overlap Deviant Feature Metrics:(none) Weighting : GainRatio Feature 1 : 0.024891535617620 Feature 2 : 0.027552191321752 Feature 3 : 0.018676787182524 Feature 4 : 0.052620750282779 Feature 5 : 0.047699230752236 Feature 6 : 0.024471910753751 Feature 7 : 0.034953203413051 Feature 8 : 0.043983864437713 Feature 9 : 0.046816704745507 Feature 10 : 0.042844587034556 Feature 11 : 0.185070180760327 Feature 12 : 0.325371814230901 Tested: 1 @ Fri Dec 24 20:27:07 2010 Tested: 2 @ Fri Dec 24 20:27:07 2010 Tested: 3 @ Fri Dec 24 20:27:07 2010 Tested: 4 @ Fri Dec 24 20:27:07 2010 Tested: 5 @ Fri Dec 24 20:27:07 2010 Tested: 6 @ Fri Dec 24 20:27:07 2010 Tested: 7 @ Fri Dec 24 20:27:07 2010 Tested: 8 @ Fri Dec 24 20:27:07 2010 Tested: 9 @ Fri Dec 24 20:27:07 2010 Tested: 10 @ Fri Dec 24 20:27:07 2010 Tested: 100 @ Fri Dec 24 20:27:07 2010 Ready: 950 @ Fri Dec 24 20:27:07 2010 Seconds taken: 0.0678 (14003.54 p/s) overall accuracy: 0.967368 (919/950), of which 39 exact matches There were 5 ties of which 4 (80.00%) were correctly resolved \end{verbatim} } %\rule{\textwidth}{0.5mm} %\vspace{0.5cm} In Phase 3, the trained classifier is applied to the test set. Because we have not specified which algorithm to use, the default settings are used ({\sc ib1} with information-theoretic feature weighting). This algorithm computes the similarity between a test item and each training item in terms of {\em weighted overlap}: the total difference between two patterns is the sum of the relevance weights of those features which are not equal. The class for the test item is decided on the basis of the least distant item(s) in memory. To compute relevance, Gain Ratio is used (an information-theoretic measure, see Section~\ref{infogain}). Time stamps indicate the progress of the testing phase. Finally, accuracy on the test set is logged, and the number of exact matches\footnote{An exact match in this experiment can occur when two different nouns have the same feature-value representation.} and ties (two or more classes are equally frequent in the nearest neighbor set). In this experiment, the diminutive suffix form of 96.7\% of the new words was correctly predicted. Train and test set overlap in 39 items, and the algorithm had to break five ties, four of which were broken correctly. The meaning of the output file names can be explained now:\\ {\tt dimin.test.IB1.O.gr.k1.out} means output file ({\tt .out}) for {\tt dimin.test} with algorithm {\sc mbl} (={\sc ib1}), similarity computed as {\em weighted overlap} ({\tt .O}), relevance weights computed with {\em gain ratio} ({\tt .gr}), and number of most similar memory patterns on which the output class was based equal to 1 ({\tt .k1}). \section{Algorithms and metrics} A precise discussion of the different algorithms and metrics implemented in TiMBL is given in Chapter~\ref{algorithms}. We will discuss the effect of the most important ones on our data set. A first choice in algorithms is between using {\sc ib1} and {\sc igtree}. In the trade-off between generalization accuracy and efficiency, {\sc ib1} usually, but not always, leads to more accuracy at the cost of more memory and slower computation, whereas {\sc igtree} is a fast heuristic approximation of {\sc ib1}, but sometimes less accurate. The {\sc IGTree} algorithm is used when {\tt -a 1} is given on the command line, whereas the {\sc ib1} algorithm used above (the default) would have been specified explicitly by {\tt -a 0}. {\footnotesize \begin{verbatim} > timbl -a1 -f dimin.train -t dimin.test \end{verbatim}} We see that {\sc IGTree} performs only slightly worse (96.6\%) than {\sc ib1} (96.8\%) for this train-test partitioning of the data --- it uses less memory and is faster, however. When using the {\sc ib1} algorithm, there is a choice of metrics for influencing the definition of similarity. With {\em weighted overlap}, each feature is assigned a weight, determining its relevance in solving the task. With the {\em modified value difference metric} ({\sc mvdm}), each pair of values of a particular feature is assigned a value difference. The intuition here is that in our diminutive problem, for example, the codas $n$ and $m$ should be regarded as being more similar than $n$ and $p$. These pair-wise differences are computed for each pair of values in each feature (see Section~\ref{mvdm}). Selection between weighted overlap and {\sc mvdm} is done by means of the {\tt -mM} parameter. The following selects {\sc mvdm}, whereas {\tt -mO} ({\em weighted overlap}) is the default. {\footnotesize \begin{verbatim} > timbl -mM -f dimin.train -t dimin.test \end{verbatim} } Especially when using {\sc mvdm}, but also in other cases, it may be useful to extrapolate not just from the most similar example in memory, which is the default, but from several. This can be achieved by using the $-k$ parameter followed by the wanted number of nearest neighbors. E.g., the following applies {\sc ib1} with the {\sc mvdm} metric, with extrapolation from the 5 nearest neighbors. {\footnotesize \begin{verbatim} > timbl -mM -k5 -f dimin.train -t dimin.test \end{verbatim} } Whenever more than one nearest neighbor is taken into account for extrapolation, it may be useful to weigh the influence of the neighbors on the final decision as a function of their distance from the test item. Several possible implementations of this distance function are provided. E.g., the following provides inverse distance: {\footnotesize \begin{verbatim} > timbl -mM -k5 -dID -f dimin.train -t dimin.test \end{verbatim} } Within the {\sc ib1} {\em weighted overlap}\/ option, the default feature weighting method is gain ratio. Other feature relevance weighting methods are available as well. By setting the parameter {\tt -w} to 0, an {\em unweighted overlap}\/ definition of similarity is created where each feature is considered equally relevant. In that case, similarity reduces to the number of equal values in the same position in the two patterns being compared. As an alternative weighting, users can provide their own weights by using the {\tt -w} parameter with a filename in which the feature weights are stored (see Section~\ref{weightformat} for a description of the format of the weights file). \begin{table} \begin{center} \begin{tabular}{l|rrrr} & no weight & gain & information & chi \\ & (overlap) & ratio & gain & squared \\ \noalign{\smallskip} \hline \noalign{\smallskip} Overlap, $-k1$ & 86.4 & 96.7 & 96.6 & 96.6 \\ Overlap, $-k3$ & 73.1 & 96.4 & 96.8 & 96.9 \\ Overlap, $-k5$ & 62.6 & 95.4 & 96.1 & 96.1 \\ \hline \noalign{\smallskip} {\sc mvdm}, $-k1$ & 95.8 & 96.3 & 96.1 & 96.2 \\ {\sc mvdm}, $-k3$ & 97.4 & 97.6 & 97.6 & 97.6 \\ {\sc mvdm}, $-k5$ & {\bf 97.8} & 97.7 & 97.7 & 97.7 \\ \hline \noalign{\smallskip} \end{tabular} \caption{Some results for diminutive prediction.} \label{diminresults} \end{center} \end{table} Table \ref{diminresults} shows a small matrix indicating the effect of distance metric (Overlap versus {\sc mvdm}) and weighting method choice on generalization accuracy, using the same training and test set as before, and increasing $k$ from 1 to 3 and 5. While increasing $k$ leads to a deterioration of generalization accuracy with the Overlap function, it leads to improvements with {\sc mvdm}. Another clear contrast is that the absence of feature weighting leads to the lowest scores with the Overlap function, and the highest score with {\sc mvdm} and $k=5$. Given that TiMBL offers several more hyperparameters than only $k$, the distance metric, and the feature weighting metric, it should be obvious that even with a single training and test set experiment, a large experimental matrix can be explored. Unfortunately, the location of the cell with the highest number in this matrix cannot be predicted upfront. It is therefore useful to try out a large set of reasonable combinations of options by cross-validation on the training data to achieve best results with {\sc mbl} \cite{VandenBosch04b}. The option {\tt -t @f} where {\tt f} is the name of a file, allows you to predefine various combinations of options to be tested and test them without having the training stages repeated each time. See Chapter \ref{commandline}. \section{More options} Several input and output options exist to make life easier while experimenting. See Chapter~\ref{commandline} for a detailed description of these options. One especially useful option for testing linguistic hypotheses is the ignore option, which allows you to skip certain features when computing similarity. E.g. if we want to test the hypothesis that only the rime (nucleus and coda) and the stress of the last syllable are actually relevant in determining the form of the diminutive suffix, we can execute the following with the previously best parameter settings to disregard all but the fourth-last and the last two features. As a result we get an accuracy of 97.1\%. {\footnotesize \begin{verbatim} > timbl -mM:I1-8,10 -k5 -w0 -f dimin.train -t dimin.test \end{verbatim} } The {\tt +/-v} (verbosity) option allows you to control the amount of information that is generated in the output, ranging from nearly nothing ({\tt +v s}) to a lot ({\tt +v as+cs+di+db+n+k}). Specific verbosity settings exist for dumping option settings ({\tt +v o}), feature relevance weights (default), value-class conditional probabilities ({\tt +v p}), exact matches ({\tt +v e}), distributions ({\tt +v db}), a confusion matrix ({\tt +v cm}), advanced statistics besides accuracy: micro-average and macro-average F-score and AUC ({\tt +v as}), per-class advanced statistics ({\tt +v cs}), the nearest neighbors on which decision are based ({\tt +v n}), just the class distributions per $k$-nearest distance per classified instance ({\tt +v k}), or the distances to the nearest neighbor ({\tt +v di}). E.g. the following command results in an output file with distributions. {\footnotesize \begin{verbatim} > timbl +v db -f dimin.train -t dimin.test \end{verbatim} } The resulting output file {\tt dimin.test.IB1.O.gr.k1.out} contains lines like the following. {\footnotesize \begin{verbatim} +,t,L,=,-,m,@,=,-,l,I,N,E,E { E 1.00000 } =,=,=,=,=,=,=,=,+,pr,O,p,J,J { E 3.00000, J 12.0000 } =,=,=,=,=,=,=,=,+,w,e,t,J,J { J 2.00000 } =,=,=,=,+,t,L,n,-,h,L,s,J,J { J 1.00000 } =,=,=,=,=,=,=,=,+,t,L,n,T,T { T 1.00000 } =,=,=,=,=,=,=,=,+,z,o,m,P,P { P 3.00000 } +,d,a,=,-,m,@,s,-,kr,A,ns,J,J { J 1.00000 } =,=,=,=,+,=,a,rd,-,m,A,n,E,E { E 2.00000 } =,=,=,=,=,=,=,=,+,f,M,n,T,T { T 43.0000, E 20.0000 } -,d,u,=,-,k,@,=,-,m,A,nt,J,J { J 1.00000 } \end{verbatim} } This information can e.g. be used to assign a certainty to a decision of the classifier, or to make available a second-best back-off option. Another verbosity option, {\tt +v di}, displays the distance to the nearest neighbor: {\footnotesize \begin{verbatim} > timbl +v di -f dimin.train -t dimin.test +,l,a,=,-,d,@,=,-,k,A,st,J,J 0.070701 -,s,i,=,-,f,E,r,-,st,O,k,J,J 0.000000 =,=,=,=,=,=,=,=,+,sp,a,n,T,T 0.042845 =,=,=,=,=,=,=,=,+,st,o,t,J,J 0.042845 =,=,=,=,+,sp,a,r,-,b,u,k,J,J 0.024472 +,h,I,N,-,k,@,l,-,bl,O,k,J,J 0.147489 -,m,e,=,-,d,A,l,+,j,O,n,E,E 0.182421 -,sn,u,=,-,p,@,=,+,r,K,=,T,T 0.046229 =,=,=,=,=,=,=,=,+,sp,A,N,E,E 0.042845 +,k,a,=,-,k,@,=,-,n,E,st,J,J 0.114685 \end{verbatim} } This can be used to study how very similar instances (low distance) and less similar patterns (higher distance) are used in the process of generalization. The listing of nearest neighbors is useful for the analysis of the behavior of a classifier. It can be used to interpret why particular decisions or errors occur. {\footnotesize \begin{verbatim} > timbl +v n+k -mM -k3 -w0 -f dimin.train -t dimin.test +,m,I,=,-,d,A,G,-,d,},t,J,J { J 3.00000 } # k=1, 1 Neighbor(s) at distance: 0.99179269134432 # +,p,a,=,-,t,@,rs,-,f,A,t,{ J 1.00000 } # k=2, 1 Neighbor(s) at distance: 0.99458957262696 # +,h,o,=,-,n,@,G,-,b,A,k,{ J 1.00000 } # k=3, 1 Neighbor(s) at distance: 1.0088291749842 # +,h,E,r,-,d,@,rs,-,t,A,s,{ J 1.00000 } -,t,@,=,-,l,|,=,-,G,@,n,T,T { T 3.00000 } # k=1, 1 Neighbor(s) at distance: 0.33024081383366 # -,x,@,=,+,h,|,=,-,G,@,n,{ T 1.00000 } # k=2, 1 Neighbor(s) at distance: 0.49144604610567 # -,d,@,r,-,w,a,=,-,G,@,n,{ T 1.00000 } # k=3, 1 Neighbor(s) at distance: 0.56944572926932 # -,st,@,=,-,l,I,=,-,N,@,=,{ T 1.00000 } \end{verbatim} } A confusion matrix, printed when the {\tt +v cm} option is selected, can bring to light specific errors of the classifier that would not be apparent from the overall accuracy. Applied to the diminutive data, the following confusion matrix is computed and printed: {\footnotesize \begin{verbatim} > timbl +v cm -f dimin.train -t dimin.test Confusion Matrix: T E J P K ----------------------------------- T | 453 0 2 0 0 E | 0 87 4 1 8 J | 1 5 346 0 0 P | 0 3 0 24 0 K | 0 7 0 0 9 -*- | 0 0 0 0 0 \end{verbatim} } The confusion matrix associates the class predicted by TiMBL (vertically) with the real class of the test items given (horizontally). All cells outside the diagonal contain errors of one class being mistaken for another. For example, the K class ({\em -kje}) is mispredicted seven times as class E ({\em -etje}). (The bottom line, labeled with {\tt -*-}, would contain aggregate counts of classes occuring in the test data that did not occur in the training data. In the diminutive data this does not occur.) In general, a confusion matrix allows a more fine-grained analysis of experimental results and better experimental designs (some parameter settings may work for some classes but not for others, or some may improve recall, and others precision, e.g.). From such a matrix, not only accuracy can be derived, but also a number of additional metrics that have become popular in machine learning, information retrieval, and subsequently also in computational linguistics: {\em recall}, {\em precision}, and their harmonic mean {\em F-score}, as well as {\em true positive rate}, {\em false positive rate}, and their joint measure {\em AUC} in ROC space. The details of these advanced statistics are given in Section~\ref{advancedstats}. They can be reported by TiMBL using the {\tt +v as} and {\tt +v cs} verbosity options: {\footnotesize \begin{verbatim} > timbl +v as+cs -f dimin.train -t dimin.test Scores per Value Class: class | TP FP TN FN precision recall(TPR) FPR F-score AUC T | 453 1 494 2 0.99780 0.99560 0.00202 0.99670 0.99679 E | 87 15 835 13 0.85294 0.87000 0.01765 0.86139 0.92618 J | 346 6 592 6 0.98295 0.98295 0.01003 0.98295 0.98646 P | 24 1 922 3 0.96000 0.88889 0.00108 0.92308 0.94390 K | 9 8 926 7 0.52941 0.56250 0.00857 0.54545 0.77697 F-Score beta=1, microav: 0.967160 F-Score beta=1, macroav: 0.861914 AUC, microav: 0.980138 AUC, macroav: 0.926060 overall accuracy: 0.967368 (919/950), of which 39 exact matches There were 5 ties of which 4 (80.00%) were correctly resolved \end{verbatim} } We hope that this tutorial has made it clear that, once you have coded your data in fixed-length feature-value patterns, it should be relatively straightforward to get the first results using TiMBL. You can then experiment with different metrics and algorithms to try and further improve your results. \chapter{Memory-based learning algorithms} \label{algorithms} TiMBL is a program implementing several memory-based learning algorithms. All implemented algorithms have in common that they store some representation of the training set explicitly in memory. During testing, new cases are classified by extrapolation from the most similar stored cases. The main differences among the algorithms incorporated in TiMBL lie in: \begin{itemize} \item The definition of {\em similarity}, \item The way the instances are stored in memory, and \item The way the search through memory is conducted. \end{itemize} In this chapter, various choices for these issues are described. We start in Section~\ref{mbl} with a formal description of the basic memory-based learning algorithm, i.e.~a nearest neighbor search. We then introduce different distance metrics, such as Information Gain weighting, which allows us to deal with features of differing importance, and the Modified Value Difference metric, which allows us to make a graded guess of the match between two different symbolic values, and describe the standard versus three distance-weighted versions of the class voting mechanism of the nearest neighbor classifier. In Section~\ref{indexing}, we give a description of various algorithmic optimizations for nearest neighbor search. Sections~\ref{igtree} to~\ref{ib2} describe three variants of the standard nearest neighbor classifier implemented within TiMBL, that optimize some intrinsic property of the standard algorithm. First, in Section~\ref{igtree}, we describe {\sc IGTree}, which replaces the exact nearest neighbor search with a very fast heuristic that exploits the difference in importance between features. Second, in Section~\ref{tribl}, we describe the {\sc tribl} algorithm, which is a hybrid between {\sc IGTree} and nearest neighbor search. Third, Section~\ref{ib2} describes the {\sc ib2} algorithm, which incrementally and selectively adds instances to memory during learning. The chapter is concluded by Section~\ref{furtherreading}, which provides an overview of further reading into theory and applications of memory-based learning to natural language processing tasks. \section{Memory-based learning} \label{mbl} Memory-based learning is founded on the hypothesis that performance in cognitive tasks is based on reasoning on the basis of similarity of new situations to {\em stored representations of earlier experiences}, rather than on the application of {\em mental rules}\/ abstracted from earlier experiences (as in rule induction and rule-based processing). The approach has surfaced in different contexts using a variety of alternative names such as similarity-based, example-based, exemplar-based, analogical, case-based, in\-stance-based, and lazy learning~\cite{Stanfill+86,Aha+91,Cost+93,Kolodner93,Aha97a}. Historically, memory-based learning algorithms are descendants of the $k$-nearest neighbor (henceforth $k$-{\sc nn}) algorithm \cite{Cover+67,Devijver+82,Aha+91}. An {\sc mbl} system, visualized schematically in Figure~\ref{mbl-method}, contains two components: a {\em learning component}\/ which is memory-based (from which {\sc mbl} borrows its name), and a {\em performance component}\/ which is similarity-based. The learning component of {\sc mbl} is memory-based as it involves adding training instances to memory (the {\em instance base} or case base); it is sometimes referred to as `lazy' as memory storage is done without abstraction or restructuring. An instance consists of a fixed-length vector of $n$ feature-value pairs, and an information field containing the classification of that particular feature-value vector. In the performance component of an {\sc mbl} system, the product of the learning component is used as a basis for mapping input to output; this usually takes the form of performing classification. During classification, a previously unseen test example is presented to the system. The similarity between the new instance $X$ and all examples $Y$ in memory is computed using some {\em distance metric} $\Delta(X,Y)$. The extrapolation is done by assigning the most frequent category within the found set of most similar example(s) (the $k$-nearest neighbors) as the category of the new test example. In case of a tie among categories, a tie breaking resolution method is used. This method is described in subsection~\ref{tiebreaking}. \begin{figure}[htb] \begin{center} \leavevmode \epsfxsize=8cm \epsffile{mble-method.eps} \caption{General architecture of an {\sc mbl} system. } \label{mbl-method} \end{center} \end{figure} \subsection{The Overlap metric} \label{overlap} The most basic metric that works for patterns with symbolic features is the {\bf Overlap metric}\footnote{This metric is also referred to as Hamming distance, Manhattan metric, city-block distance, or L1 metric.} given in Equations~\ref{distance} and~\ref{overlapeq}; where $\Delta(X,Y)$ is the distance between instances $X$ and $Y$, represented by $n$ features, and $\delta$ is the distance per feature. The distance between two patterns is simply the sum of the differences between the features. The $k$-{\sc nn} algorithm with this metric is called {\sc ib1} \cite{Aha+91}. \begin{equation} \Delta(X,Y) = \sum_{i=1}^{n} \delta(x_{i},y_{i}) \label{distance} \end{equation} where: \begin{equation} \delta(x_{i}, y_{i}) = \left\{ \begin{array}{ll} abs(\frac{x_{i}-y_{i}}{max_{i}-min_{i}}) & \mbox{if numeric, else}\\ 0 & \mbox{if $x_{i} = y_{i}$}\\ 1 & \mbox{if $x_{i} \neq y_{i}$}\\ \end{array} \right. \label{overlapeq} \end{equation} The major difference with the {\sc ib1} algorithm originally proposed by \cite{Aha+91}, is that in our version the value of $k$ refers to $k$-nearest {\em distances}\/ rather than $k$-nearest examples. With $k=1$, for instance, TiMBL's nearest neighbor set can contain several instances that are equally distant to the test instance. Arguably, our $k$-NN kernel could therefore be called $k$-nearest distances classification. Another difference with the original {\sc ib1} as well as with other implementations such as $k$-NN in the {\sc weka} machine learning toolkit \cite{Witten+99} is the way in which ties are resolved in choosing the majority category among the set of nearest neighbors. Since this method is independent of the distance function we discuss this issue separately in subsection~\ref{tiebreaking}. \paragraph{Variations on Overlap: Levenshtein and Dice coefficient metrics} The Overlap metric is all-or-nothing. For measuring the similarity between numeric or atomically symbolic values this may suffice, but there are cases (such as in natural language processing) in which string-valued feature values occur that can mismatch with other string values in a meaningfully graded way. For example, the value pair ``bathe'' and ``bathes'' only differs in one letter; counting them as more similar than ``bathe'' and ``rumour'', for example, may be useful for the classification task at hand. We implemented two additional metrics, Levenshtein distance and the Dice coefficient, that each provide a graded similarity score between pairs of strings. {\bf Levenshtein} distance is a classic {\em edit distance}\/ metric \cite{Levenshtein66} that counts the number of insertions, deletions, and substitutions to transform the one string into the other. In our (dynamic programming) implementation the three operations count equally heavily. The {\bf Dice} coefficient computes the overlap between the occurrences of character bigrams in two strings as in Equation~\ref{dice}, where $n_{x_{i} \cap y{i}}$ is the number of character bigrams (uniquely) occuring both in string value $x_{i}$ and in string value $y_{i}$ (and where $i$ is the index of the feature as introduced in Equation~\ref{distance})\footnote{Strings of length one are not handled by Dice; we back off to Overlap in these cases.}. The equation subtracts the similarity from 1, because we assume $\delta$ to produce a distance, not a similarity. \begin{equation} \delta(x_{i}, y_{i}) = 1 - \frac{2 n_{x_{i} \cap y_{i}}}{n_{x_{i}} + n_{y_{i}}} \label{dice} \end{equation} \subsection{Information-gain and gain ratio feature weighting} \label{infogain} The distance metric in Equation~\ref{overlapeq} straightforwardly counts the number of (mis)matching feature-values in both patterns. In the absence of information about feature relevance, this is a reasonable choice. Otherwise, we can add domain knowledge bias to weight or select different features (see e.g.~\namecite{Cardie96} for an application of linguistic bias in a language processing task), or look at the behavior of features in the set of examples used for training. We can compute statistics about the relevance of features by looking at which features are good predictors of the class labels. Information Theory gives us a useful tool for measuring feature relevance in this way~\cite{Quinlan86,Quinlan93}. {\bf Information Gain} (IG) weighting looks at each feature in isolation, and measures how much information it contributes to our knowledge of the correct class label. The Information Gain of feature $i$ is measured by computing the difference in uncertainty (i.e.\ entropy) between the situations without and with knowledge of the value of that feature (Equation~\ref{IGgain}). \begin{equation} w_{i} = H(C) - \sum_{v \in V_{i}} P(v) \times H(C|v) \label{IGgain} \end{equation} Where $C$ is the set of class labels, $H(C) = - \sum_{c \in C} P(c) \log_{2} P(c)$ is the entropy of the class labels, $V_{i}$ is the set of values for feature $i$, and $H(C|v)$ is the conditional entropy of the subset of the training examples that have value $v$ on feature $i$. The probabilities are estimated from relative frequencies in the training set. For numeric features, an intermediate step needs to be taken to apply the symbol-based computation of IG. All real values of a numeric features are temporarily discretized into a number (the default is 20) of intervals. Instances are ranked on their real value, and then spread evenly over the intervals; each interval contains the same number of instances (i.e., by default, $1/20$th of the total amount of instances). Instances in each of these intervals are then used in the IG computation as all having the same unordered, symbolic value per group. Note again that this discretization is only temporary; it is not used in the computation of the distance metric. It is important to realize that the IG weight is really a probability-weighted average of the informativity of the different values of the feature. On the one hand, this pre-empts the consideration of values with low frequency but high informativity. Such values ``disappear'' in the average. On the other hand, this also makes the IG weight very robust to estimation problems. Each parameter (weight) is estimated on the whole data set. Information Gain, however, tends to overestimate the relevance of features with large numbers of values. Imagine a data set of hospital patients, where one of the available features is a unique ``patient ID number''. This feature will have very high Information Gain, but it does not give any generalization to new instances. To normalize Information Gain for features with different numbers of values, Quinlan~\cite{Quinlan93} has introduced a normalized version, called {\bf Gain Ratio}, which is Information Gain divided by $si(i)$ (split info), the entropy of the feature-values (Equation~\ref{splitinfo}). \begin{equation} w_{i} = \frac{H(C) - \sum_{v \in V_{i}} P(v) \times H(C|v)}{si(i)} \label{IGgainratio} \end{equation} \begin{equation} si(i) = - \sum_{v \in V_{i}} P(v) \log_{2} P(v) \label{splitinfo} \end{equation} The resulting Gain Ratio values can then be used as weights $w_{f}$ in the weighted distance metric (Equation~\ref{distancew})\footnote{In a generic use IG refers both to Information Gain and to Gain Ratio throughout this manual. In specifying parameters for the software, the distinction between both needs to be made, because they often result in different behavior.}. The $k$-{\sc nn} algorithm with this metric is called {\sc ib1-ig} \cite{Daelemans+92b}. \begin{equation} \Delta(X,Y) = \sum_{i=1}^{n}\ w_{i} \ \delta(x_{i},y_{i}) \label{distancew} \end{equation} The possibility of automatically determining the relevance of features implies that many different and possibly irrelevant features can be added to the feature set. This is a very convenient methodology if domain knowledge does not constrain the choice enough beforehand, or if we wish to measure the importance of various information sources experimentally. However, because IG values are computed for each feature independently, this is not necessarily the best strategy. Sometimes better results can be obtained by leaving features out than by letting them in with a low weight. Very redundant features can also be challenging for {\sc ib1-ig}, because IG will overestimate their joint relevance. Imagine an informative feature which is duplicated. This results in an overestimation of IG weight by a factor two, and can lead to accuracy loss, because the doubled feature will dominate the distance metric. \subsection{Chi-squared and shared variance feature weighting} \label{chisquared} Unfortunately, as~\namecite{White+94} have shown, the Gain Ratio measure still has an unwanted bias towards features with more values. The reason for this is that the Gain Ratio statistic is not corrected for the number of degrees of freedom of the contingency table of classes and values. \namecite{White+94} proposed a feature selection measure based on the chi-squared statistic, as values of this statistic can be compared across conditions with different numbers of degrees of freedom. The \chisq statistic is computed from the same contingency table as the Information Gain measure by the following formula (Equation~\ref{chisq-eq}). \begin{equation} \chi^{2} = \sum_{i} \sum_{j} \frac{(E_{ij} - O_{ij})^{2}} {E_{ij}} \label{chisq-eq} \end{equation} where $O_{ij}$ is the observed number of cases with value $v_{i}$ in class $c_{j}$, i.e.~$O_{ij} = n_{ij}$, and $E_{ij}$ is the expected number of cases which should be in cell ($v_{i}$, $c_{j}$) in the contingency table, if the null hypothesis (of no predictive association between feature and class) is true (Equation~\ref{chisq-expect-eq}). Let $n_{.j}$ denote the marginal for class $j$ (i.e.~the sum over column $j$ of the table), $n_{i.}$ the marginal for value $i$, and $n_{..}$ the total number of cases (i.e.~the sum of all the cells of the contingency table). \begin{equation} E_{ij} = \frac{n_{.j} n_{i.}}{n_{..}} \label{chisq-expect-eq} \end{equation} The \chisq statistic is well approximated by the chi-square distribution with $\nu = (m-1)(n-1)$ degrees of freedom, where $m$ is the number of values and $n$ is the number of classes. We can then either use the \chisq values as feature weights in Equation~\ref{distancew}, or we can explicitly correct for the degrees of freedom by using the {\bf Shared Variance} measure (Equation~\ref{shared-variance-eq}). \begin{equation} SV_{i} = \frac{ \chi^2_{i}}{N \times ( min(|C|,|V_{i}|)-1 ) } \label{shared-variance-eq} \end{equation} Where $|C|$ and $|V_{i}|$ are the number of classes and the number of values of feature $i$, respectively, and $N$ is the number of instances\footnote{Note that with two classes, the shared variance weights of all features are simply divided by $N$, and will not be different from \chisq weights.}. We will refer to these variations of {\sc mbl} as {\sc ib1-\chisq} and {\sc ib1-sv}. One should keep in mind, that the correspondence to the chi-square distribution generally becomes poor if the expected frequencies in the contingency table cells become small. A common recommendation is that the \chisq test cannot be trusted when more than $20\%$ of the expected frequencies are less than $5$, or any are less than $1$. Chi-squared and shared variance weights of {\em numeric}\/ features are computed via a discretization preprocessing step (also used with computing IG and GR weights). Values are first discretized into a number (20 by default) of equally-spaced intervals between the minimum and maximum values of the feature. These groups are then used as discrete values in computing chi-squared and shared variance weights. \subsection{Modified value difference, Jensen-Shannon divergence, and Jeffrey divergence metrics} \label{mvdm} The choice of representation for instances in {\sc mbl} remains the key factor determining the strength of the approach. The features and categories in NLP tasks are usually represented by symbolic labels. The metrics that have been described so far, i.e.~Overlap and IG Overlap, are limited to counting exact matches between feature values. This means that all values of a feature are seen as equally dissimilar. However, if we think of an imaginary task in e.g.~the phonetic domain, we might want to use the information that 'b' and 'p' are more similar than 'b' and 'a'. For this purpose a metric was defined by \namecite{Stanfill+86} and further refined by \namecite{Cost+93}. It is called the (Modified) Value Difference Metric ({\sc mvdm}; Equation~\ref{MVDMeq}), and it is a method to determine the similarity of the values of a feature by looking at co-occurrence of values with target classes. For the distance between two values $v_{1},\ v_{2}$ of a feature, we compute the difference of the conditional distribution of the classes $C_{i}$ for these values. \begin{equation} \delta(v_{1}, v_{2}) = \sum_{i=1}^{n} \left| P(C_{i}|v_{1}) - P(C_{i}|v_{2}) \right| \label{MVDMeq} \end{equation} For computational efficiency, all pairwise $\delta(v_{1}, v_{2})$ values can be precomputed before the actual nearest neighbor search starts. Although the {\sc mvdm} metric does not explicitly compute feature relevance, an implicit feature weighting effect is present. If features are very informative, their conditional class probabilities will often be skewed towards a particular class. This implies that on average the $\delta(v_{1}, v_{2})$ will be large. For uninformative features, on the other hand, the conditional class probabilities will tend to be closer to the overall class distribution, so that on average the $\delta(v_{1}, v_{2})$ will be very small. {\sc mvdm} differs considerably from Overlap based metrics in its composition of the nearest neighbor sets. Overlap causes an abundance of ties in nearest neighbor position. For example, if the nearest neighbor is at a distance of one mismatch from the test instance, then the nearest neighbor set will contain the entire partition of the training set that matches all the other features but contains {\em any} value for the mismatching feature (see~\namecite{Zavrel+97} for a more detailed discussion). With the {\sc mvdm} metric, however, the nearest neighbor set will either contain patterns which have the value with the lowest $\delta(v_{1}, v_{2})$ in the mismatching position, or {\sc mvdm} will select a totally different nearest neighbor which has less exactly matching features, but a smaller distance in the mismatching features. In sum, this means that the nearest neighbor set is usually much smaller for {\sc mvdm} at the same value of $k$. In NLP tasks we have found it useful to experiment with values of $k$ larger than one for {\sc mvdm}, because this re-introduces some of the beneficial smoothing effects associated with large nearest neighbor sets. One cautionary note about this metric is connected with data sparsity. In many practical applications we are confronted with a very limited set of examples, with values occuring only a few times or once in the whole data set. If two such values occur with the same class, {\sc mvdm} will regard them as identical, and if they occur with two different classes their distance will be maximal. In cases of such extreme behaviour on the basis of low-frequency evidence, it may be safer to back off to the Overlap metric, where only an exact value match yields zero distance. TiMBL offers this back-off from {\sc mvdm} to Overlap through a frequency threshold, that switches from the {\sc mvdm} to the Overlap metric when one or both of a pair of matched values occurs fewer times in the learning material than this threshold. Jensen-Shannon divergence and Jeffrey divergence are offered as related, but more complex alternatives to {\sc mvdm}. They are both statistical dissimilarity metrics that can be used to compute the distance between class distributions of two values of the same feature. Functionally they are quite similar to {\sc mvdm} as well as to Kullback-Leibler (KL) divergence; Jeffrey divergence and Jensen-Shannon divergence are both symmetric versions of KL divergence, and symmetry is what we need for estimating value difference. First, Jeffrey divergence is best known for its application as a distance function in unsupervised vector space models, e.g. in image retrieval, where it is applied to histogram vectors. While {\sc mvdm} computes a straightforward geometrical distance between two class distribution vectors, Jeffrey divergence introduces a logarithm term, as seen in Equation~\ref{jd}. Jeffrey divergence is a symmetric variant of Kullback-Leibner distance; the $m$ term given in Equation~\ref{jdm} is used for this purpose. \begin{equation} \delta(v_{1}, v_{2}) = \sum_{i=1}^{n} ( P(C_{i}|v_{1}) log \frac{P(C_{i}|v_{1})}{m} + P(C_{i}|v_{2}) log \frac{P(C_{i}|v_{2})}{m} ) \label{jd} \end{equation} \begin{equation} m = \frac{P(C_{i}|v_{1}) + P(C{i}|v_{2})}{2} \label{jdm} \end{equation} Compared to {\sc mvdm}, Jeffrey divergence assigns relatively larger distances to value pairs of which the class distributions are more orthogonal. In other words, it assigns more prominence to zero probabilities, which in the case of sparse data (e.g, with Zipfian distributions of values) are generally better estimations than non-zero probabilities. This makes Jeffrey divergence in principle more robust than {\sc mvdm} with respect to sparse data. As with {\sc mvdm}, TiMBL offers an optional frequency-thresholded back-off from Jeffrey and Jensen-Shannon divergence to the Overlap metric to further remedy some negative effects due to data sparseness. \subsection{Dot-product and cosine metrics} \label{dotproduct} When features have numeric or binary values, TiMBL can also compute the distance between two instances via the dot product (or inner product) of their feature-value vectors. The dot product (which is higher with better matches) is subsequently inversed to a distance by subtracting it from the maximum dot product attainable, i.e. that on an exact match. In Equation~\ref{doteq} this maximal dot product is referred to as $dot_{max}$. \begin{equation} \label{doteq} \Delta(X,Y) = dot_{max} - \sum_{i=1}^{n} w_{i} x_{i} y_{i} \end{equation} As with the other distance metrics incorporated in TiMBL, we include the feature weight $w_{i}$ in the metric. When no weighting is set ({\tt -w 0}), all weights are set to $1.0$, and equation~\ref{doteq} reduces to the normal unweighted dot product. The dot-product metric is typically used with binary vectors or sparse vectors in general. When either $x_{i}$ or $y_{i}$ has a zero value, that value pair is not counted in the dot product. Consequently, the significant deviation from the Overlap metric is that matching values that both have a zero value do not count here, whereas they count as much as any other value match in the Overlap metric. A commonly used variant of the dot product metric, e.g. in information retrieval, is the cosine metric, which corrects for large differences in the length of the instance vectors. The cosine metric divides the dot product metric by the product of the length of the two vectors. As with the dot product, TiMBL converts the cosine metric similarity to a distance by subtracting it from a $cos_{max}$ term that is larger than the maximal cosine similarity, as given in Equation~\ref{coseq}. Again, feature weighting is included in the formula: \begin{equation} \label{coseq} \Delta(X,Y) = cos_{max} - \frac{\sum_{i=1}^{n} w_{i} x_{i} y_{i}}{\sqrt{\sum_{i=1}^{n} w_{i} x_{i}^2 \sum_{i=1}^{n} w_{i} y_{i}^2}} \end{equation} Due to its internal tree structure, TiMBL is not particularly suited to handle feature vectors with thousands or more features. Many features cause very deep and usually very unbalanced trees, from which retrieval can be rather inefficient (especially when there is little variance in the feature weights). Other internal data structures such as inverted indices are typically more suited to these types of vector spaces. For now, inverted indices are not implemented in TiMBL. \subsection{Distance-weighted class voting} \label{distweightvote} The most straightforward method for letting the $k$ nearest neighbors vote on the class of a new case is the {\em majority voting} method, in which the vote of each neighbor receives equal weight, and the class with the highest number of votes is chosen (or in case of a tie, some tie resolution is performed, cf. Subsection~\ref{tiebreaking}). We can see the voting process of the $k$-NN classifier as an attempt to make an optimal class decision, given an estimate of the conditional class probabilities in a local region of the data space. The radius of this region is determined by the distance of the $k$-furthest neighbor. Sometimes, if $k$ is small, and the data is very sparse, or the class labels are noisy, the ``local'' estimate is very unreliable. As it turns out in experimental work, using a larger value of $k$ can often lead to higher accuracy. The reason for this is that in densely populated regions, with larger $k$ the local estimates become more reliable, because they are "smoother". However, when the majority voting method is used, smoothing can easily become oversmoothing in sparser regions of the same data set. The reason for this is that the radius of the $k$-NN region can become extended far beyond the local neighborhood of the query point, but the far neighbors will receive equal influence as the close neighbors. This can result in classification errors that could easily have been avoided if the measure of influence would somehow be correlated with the measure of similarity. To remedy this, we have implemented three types of distance weighted voting functions in TiMBL. A voting rule in which the votes of different members of the nearest neighbor set are weighted by a function of their distance to the query, was first proposed by Dudani~\shortcite{Dudani76}. In this scheme, henceforth referred to as IL (for inverse-linear), a neighbor with smaller distance is weighted more heavily than one with a greater distance: the nearest neighbor gets a weight of 1, the furthest neighbor a weight of 0 and the other weights are scaled linearly to the interval in between (\namecite{Dudani76}, Equation~\ref{dudani_eq}.). \begin{equation} \label{dudani_eq} w_{j}= \left \{ \begin{array}{ll} \frac{d_{k} - d_{j}}{d_{k} - d_{1}} & \mbox{if $d_{k} \not= d_{1}$ } \\ 1 & \mbox{if $d_{k} = d_{1}$}\\ \end{array} \right. \end{equation} Where $d_{j}$ is the distance to the query of the $j$'th nearest neighbor, $d_{1}$ the distance of the nearest neighbor, and $d_{k}$ of the furthest ($k$'th) neighbor. Dudani (\namecite{Dudani76}, eq. 2 and 3) further proposed the {\em inverse distance weight} (henceforth ID). In Equation~\ref{inverseweight} a small constant is usually added to the denominator to avoid division by zero~\cite{Wettschereck94}. \begin{equation} \label{inverseweight} w_{j}= \left \{ \begin{array}{ll} \frac{1}{d_{j} + \epsilon} \\ \end{array} \right. \end{equation} Another weighting function considered here is based on the work of \namecite{Shepard87}, who argues for a universal perceptual law which states that the relevance of a previous stimulus for the generalization to a new stimulus is an exponentially decreasing function of its distance in a psychological space (henceforth ED). This gives the weighed voting function of Equation~\ref{expdecayweight}, where $\alpha$ and $\beta$ are constants determining the slope and the power of the exponential decay function. \begin{equation} \label{expdecayweight} w_{j}= e^{-\alpha d_{j}^\beta} \end{equation} Note that in Equations~\ref{inverseweight} and ~\ref{expdecayweight} the weight of the nearest and furthest neighbors and the slope between them depend on their absolute distance to the query. This assumes that the relationship between absolute distance and the relevance gradient is fixed over different datasets. This assumption is generally false; even within the same dataset, different feature weighting metrics can cause very different absolute distances. Figure~\ref{dist-weight-fig} visualises a part of the curves of ID and ED, the latter with a few varied settings of $\alpha$ and $\beta$. Generally, both distance weighting functions assign highly differing weights for close neighbors, and less differing weights for more distant neighbors. ID assigns very high votes (distance weights) to nearest neighbors at distances approaching 0.0 - in effect it assigns absolute preference to exact matches. In contrast, all ED variants have a vote of 1.0 for exact matches, and have a shallower curve than the ID curve for higher distances. Higher values of $\alpha$ in the ED function assign relatively higher weights to exact matches. When $\beta$ is set to larger values than $1.0$, the ED curve becomes bell-shaped, effectively assigning relatively less different weights between exact-matching neighbors and near-exact matching instances. \begin{figure}[htb] \begin{center} \leavevmode \epsfxsize=0.8\columnwidth \epsffile{distanceweight-ided.eps} \caption{Visualisation of the Inverse Distance weighting function (IL) and three variants of the Exponential Decay distance weighting function (ED) varying settings of $\alpha$ (1) and $\beta$ (b). } \label{dist-weight-fig} \end{center} \end{figure} Following Dudani's proposal, the benefits of weighted voting for $k$-NN have been discussed widely, e.g. \cite{Bailey+78,Morin+81,MacLeod+87}, but mostly from an analytical perspective. With the popularity of Instance-Based Learning applications, these issues have gained a more practical importance. In his thesis on $k$-NN classifiers, \namecite{Wettschereck94} cites Dudani, but proceeds to work with Equation~\ref{inverseweight}. He tested this function on a large amount of datasets and found weak evidence for performance increase over majority voting. An empirical comparison of the discussed weighted voting methods in~\cite{Zavrel97} has shown that weighted voting indeed often outperforms unweighted voting, and that Dudani's original method (Equation~\ref{dudani_eq}) mostly outperforms the other two methods. From that set of experiments, it also seems that Dudani's method shows its optimal performance at much larger values of $k$ than the other voting methods. \subsection{Tie breaking} \label{tiebreaking} Thus far we have described the last step of $k$-NN classification as taking the majority category among the set of nearest neighbors, where their vote is either unweighted or weighted by their distance (subsection~\ref{distweightvote}). Especially in case of unweighted voting, ties can occur; e.g.\ of a set of ten nearest neighbors, five vote for class $A$, and the other five for $B$. The procedure for breaking this tie in the $k$-NN classifier in TiMBL is as follows. First, the value of the $k$ parameter is incremented by $1$, and the additional nearest neighbors at this new $k$th distance are added to the current nearest neighbor set ($k$ is subsequently reset to its user-specified value). If the tie in the class distribution persists, then the class label is selected with the highest overall occurrence in the training set. If that is also equal, then the first class is taken that was encountered when reading the training instance file. Optionally, TiMBL can be set to avoid ties by making a {\em random}\/ choice of a classification from a class distribution in a nearest-neighbor set, weighted by the distribution of the classes in the set. \subsection{Exemplar weighting} \label{exemplar} Exemplar weighting in memory-based learning captures the intuition that some instances are better (more reliable, more typical, more regular) nearest neighbors than others. Classification accuracy could benefit from giving these instances some priority in the $k$-NN classification process. This idea has been explored in the context of on the one hand classification \cite{Salzberg90,Zhang92}, and on the other hand editing bad instances from memory \cite{Aha+91}. \namecite{Salzberg90}, as a classic example, uses {\em class-prediction strength}: the ratio of the number of times the instance type is a nearest neighbor of another instance with the same class and the number of times that the instance type is the nearest neighbor of another instance type regardless of the class. Another example is {\em typicality}\/ as used by \namecite{Zhang92}. Exemplar weights could in principle be used either as weights in the class voting (as distance weights, cf. Subsection~\ref{distweightvote}), or as weights in the distance metric (eq.~\ref{distancew}). TiMBL supports only the latter type, and in this respect exemplar weighting is not an intrinsic part of TiMBL. TiMBL does not compute exemplar weighting metrics itself, but only allows users to specify preprocessed exemplar weights with the {\tt -s} input option. Subsequently, when the distance between a test instance and a memory instance is computed, TiMBL uses the memory instance's weight as follows, where $\Delta^{E}(X,Y)$ is the exemplar-weighted distance between instances $X$ and $Y$, and $ew_{X}$ is the exemplar weight of memory instance $X$: \begin{equation} \Delta^{E}(X,Y) = \frac{\Delta(X,Y)}{ew_{X} + \epsilon} \label{exweight} \end{equation} $\epsilon$ is the smallest non-zero number, and is used to avoid division by zero. Exemplar weights approaching zero yield very large distances; relatively higher values yield relatively smaller distances. Note that when a training instance occurs more than once in a training set, TiMBL expects it to have the same example weight with all occurrences; TiMBL cannot handle different example weights for the same instance type. TiMBL produces a warning ({\em Warning: deviating exemplar weight in line \#$\ldots$}), and uses the first weight found for the instance. \section{Indexing optimizations} \label{indexing} The discussion of the algorithm and the metrics in the section above is based on a naive implementation of nearest neighbor search: a flat array of instances which is searched from beginning to end while computing the similarity of the test instance with each training instance. Such an implementation, unfortunately, reveals the flip side of the lazy learning coin. Although learning is very cheap: just storing the instances in memory, the computational price of classification can become very high for large data sets. The computational cost is proportional to $N$, the number of instances in the training set, times $f$, the number of features. In our current implementation of {\sc ib1} we use tree-based indexing to alleviate these costs. \subsection{Tree-based indexing} The tree-based memory indexing operation replaces the flat array by a tree structure. Instances are stored in the tree as paths from a root node to a leaf, the arcs of the path are the consecutive feature-values, and the leaf node contains a {\em distribution}\/ of classes, i.e.~a count of how many times which class occurs with this pattern of feature-values. % (see Figure~\ref{example2}). Due to this storage structure, instances with identical feature-values are collapsed into a single path, and only their separate class information needs to be stored in the distribution at the leaf node. Many different {\bf tokens} of a particular {\bf instance type} share one path from the root to a leaf node. Moreover, instances which share a prefix of feature-values, also share a partial path. This reduces storage space (although at the cost of some book-keeping overhead) and has two implications for nearest neighbor search efficiency. %\begin{figure}[htb] % \begin{center} % \leavevmode % \epsfxsize=0.8\columnwidth % \epsffile{example2.eps}\ % \caption{A tree-structured storage of the instance % base from figure~\ref{example1}. An exact match for % the test is in this case directly found by a top down % traversal of the tree (grey path). If there is no % exact match, all paths are interpreted as instances % and the distances are computed. The order of the % features in this tree is based on Gain Ratio. % } % \label{example2} % \end{center} %\end{figure} First, the tree can be searched top-down very quickly for {\em exact matches}. When $k=1$, an exact match ($\Delta(X,Y)=0$) can never be beaten, so then it is possible to omit any further distance computations. The shortcut is built into TiMBL, but by default it is not used with $k>1$. TiMBL does, however, offer the possibility to use the shortcut at any value of $k$, with the command line switch ({\tt +x}. Using it can speed up classification radically for some types of data, but with $k>1$, the shortcut is not guaranteed to give the same performance (for better or for worse) as classification without it. Second, the distance computation for the nearest neighbor search can re-use partial results for paths which share prefixes. This re-use of partial results is in the direction from the root to the leaves of the tree. When we have proceeded to a certain level of the tree, we know how much similarity (Equation~\ref{overlapeq}) can still contribute to the overall distance (Equation~\ref{distance}), and discard whole branches of the tree which will never be able to rise above the partial similarity of the current least similar nearest neighbor. By doing the search depth first\footnote{Suggested by Gert Durieux.}, the similarity threshold quickly gets initialized to a good value, so that large parts of the search space can be pruned\footnote{With the special command line setting {\tt --silly=true} this tree search shortcut is switched off; as the name of the setting suggests, this is not recommended, except for explicit speed comparisons.}. Disregarding this last constraint on search, the number of feature-value comparisons is equal to the number of arcs in the tree. Thus if we can find an ordering of the features which produces more overlap between partial paths, and hence a smaller tree, we can gain both space and time improvements. An ordering which was found to produce small trees for many of our NLP data sets is Gain Ratio divided by the number of feature-values (this is the default setting). Through the {\tt --Treeorder=} command line switch, however, the user is allowed to experiment with different orderings. Note that different orderings may only affect classification speed, not the actual classifications. \section{IGTree} \label{igtree} Using Information Gain rather than unweighted Overlap distance to define similarity in {\sc ib1} improves its performance on several {\sc nlp} tasks \cite{Daelemans+92b,VandenBosch+93,VandenBosch97}. The positive effect of Information Gain on performance prompted us to develop an alternative approach in which the instance memory is restructured in such a way that it contains the same information as before, but in a compressed decision tree structure. We call this algorithm {\sc IGTree}~\cite{Daelemans+97} %(see Figure~\ref{example3} %for an illustration). In this structure, similar to the tree-structured instance base described above, instances are stored as paths of connected nodes which contain classification information. Nodes are connected via arcs denoting feature values. Information Gain is used to determine the order in which instance feature-values are added as arcs to the tree. The reasoning behind this compression is that when the computation of information gain points to one feature clearly being the most important in classification, search can be restricted to matching a test instance to those memory instances that have the same feature-value as the test instance at that feature. Instead of indexing all memory instances only once on this feature, the instance memory can then be optimized further by examining the second most important feature, followed by the third most important feature, etc. Again, compression is obtained as similar instances share partial paths. %\begin{figure}[htb] % \begin{center} % \leavevmode % \epsfxsize=0.7\columnwidth % \epsffile{example3.eps} % \caption{A pruned {\sc IGTree} for the instance base % of Figure~\ref{example1}. The classification for % the test instance is found by top down search of the % tree, and returning the class label (default) of the % node after the last matching feature-value (arc). Note % that this tree is essentially a compressed version of % the tree in Figure~\ref{example2}. % } % \label{example3} % \end{center} %\end{figure} Because {\sc IGTree} makes a heuristic approximation of nearest neighbor search by a top down traversal of the tree in the order of feature relevance, we no longer need to store all the paths. The idea is that it is not necessary to fully store those feature-values of the instance that have lower Information Gain than those features which already fully disambiguate the instance classification. Apart from compressing all training instances in the tree structure, the {\sc IGTree} algorithm also stores with each non-terminal node information concerning the {\em most probable} or {\em default} classification given the path thus far, according to the bookkeeping information maintained by the tree construction algorithm. This extra information is essential when processing unknown test instances. Processing an unknown input involves traversing the tree (i.e., matching all feature-values of the test instance with arcs in the order of the overall feature Information Gain), and either retrieving a classification when a leaf is reached (i.e., an exact match was found), or using the default classification on the last matching non-terminal node if an exact match fails. In sum, it can be said that in the trade-off between computation during learning and computation during classification, the {\sc igtree} approach chooses to invest more time in organizing the instance base using Information Gain and compression, to obtain simplified and faster processing during classification, as compared to {\sc ib1} and {\sc ib1-ig}. The generalization accuracy of {\sc IGTree} is usually comparable to that of {\sc ib1-ig}; often slightly worse, but sometimes even better. The two causes for {\sc IGTree}'s surprisingly good accuracies attained with dramatically faster classification are that (i) most 'unseen' instances contain large parts that fully match stored parts of training instances, and (ii) the probabilistic information stored at non-terminal nodes (i.e., the default classifications) still produces strong `best guesses' when exact matching fails. The difference between the top-down traversal of the tree and precise nearest neighbor search becomes more pronounced when the differences in informativity between features are small. In such a case a slightly different weighting would have produced a switch in the ordering and a completely different tree. The result can be a considerable change in classification outcomes, and hence also in accuracy. However, we have found in our work on NLP datasets that when the goal is to obtain a very fast classifier for processing large amounts of text, the tradeoff between a somewhat lower accuracy against stellar speed increases can be very attractive. It should be noted that by design, {\sc IGTree} is not suited for numeric features, as it does not use some type of discretization. If present in data, numbers will simply be treated as literal strings by {\sc IGTree}. Moreover, one should realize that the success of {\sc igtree} is determined by a good judgement of feature relevance ordering. Hence {\sc IGTree} is not to be used with e.g. ``no weights'' ({\tt -w 0}). Also, setting the {\tt -k} parameter obviously has no effect on {\sc IGTree} performance. \section{The TRIBL and TRIBL2 hybrids} \label{tribl} The application of {\sc IGTree} on a number of common machine-learning datasets suggested that it is not applicable to problems where the relevance of the predictive features cannot be ordered in a straightforward way, e.g.~if the differences in Information Gain are only very small. In those cases, {\sc ib1-ig} or even {\sc ib1} tend to perform significantly better than {\sc IGTree}. For this reason we have designed {\sc tribl} \cite{Daelemans+97d} and {\sc tribl2} as hybrid combinations of {\sc IGTree} and {\sc ib1}. Both algorithms aim to exploit the trade-off between (i) optimization of search speed (as in {\sc IGTree}), and (ii) maximal generalization accuracy. They do that by splitting the classification of new instances into a quick decision-tree ({\sc IGTree}) traversal based on the first (most important and most class-disambiguating) features, followed by a slow but relatively accurate $k$-NN ({\sc ib1}) classification based on the remaining less important features. The difference between {\sc tribl} and {\sc tribl2} is that the former algorithm fixes the point in the feature ordering where {\sc IGTree} is succeeded by {\sc ib1}, while {\sc tribl2} determines this switching point automatically per classification. We briefly describe both variants. For {\sc tribl}, a parameter ({\tt -q}) determines the switching point in the feature ordering from {\sc IGTree} to {\sc ib1}. A heuristic that we have used with some success is based on {\em average feature information gain}; when the Information Gain of a feature exceeds the average Information Gain of all features $+$ one standard deviation of the average, then the feature is used for constructing an {\sc IGTree}, including the computation of defaults on nodes. When the Information Gain of a feature is below this threshold, and the node is still ambiguous, tree construction halts and the leaf nodes at that point represent case bases containing subsets of the original training set. During search, the normal {\sc IGTree} search algorithm is used, until the case-base nodes are reached, in which case regular {\sc ib1} nearest neighbor search is used on this sub-case-base. {\sc tribl2} does not employ a fixed switching point. Rather, during the classification of an instance it continues to use {\sc IGTree} as long as it finds matching feature values in the weighting-governed feature ordering. Only when it finds a mismatch it reverts to {\sc ib1} classification on all remaining features. The reasoning behind this mismatch-based switching is that it offers a fairly optimal minimalisation of the use of {\sc ib1}; it is only invoked when mismatching occurs, which is the typical point in which {\sc ib1} can improve on decision-tree-style classification, which does not consider the other potentially matching features in the ordering \cite{Daelemans+99}. A final note: as with {\sc IGTree}, it does not make sense to use {\sc tribl} and {\sc tribl2} without feature weighting, so do not combine {\sc tribl} or {\sc tribl2} with ({\tt -w 0}). \section{IB2: Incremental editing} \label{ib2} In memory-based learning it seems sensible to keep any instance in memory that plays a (potentially) positive role in the correct classification of other instances. Alternatively, when it plays no role at all, or when it is disruptive for classification, it may be a good idea to discard, or {\em edit} it from memory. On top of not harming or even improving generalization performance, the editing of instances from memory could also alleviate the practical processing burden of the $k$-NN classifier kernel, since it would have less instances to compare new instances to. This potential double pay-off spawned a distinct line of work on editing in the $k$-NN classifier quite early \namecite{Hart68} and \namecite{Wilson72}. TiMBL offers an implementation of one particular editing algorithm called {\sc ib2} \cite{Aha+91}, an extension to the basic {\sc ib1} algorithm introduced in the same article. {\sc ib2} implements an incremental editing strategy. Starting from a seed memory filled with a certain (usually small) number of labeled training instances, {\sc ib2} adds instances incrementally to memory only when they are {\em misclassified}\/ by the $k$-NN classifier on the basis of the instances in memory at that point. These instances are added, since they are assumed to be representatives of a part of the complete instance space in which they themselves and potentially more nearest-neighbor instances have a particular class different from the class of neigboring instances already in memory. The economical idea behind {\sc ib2} is that this way typically only instances on the boundaries of such areas are stored, and not the insides of the areas; the classification of instances that would be positioned well inside such areas is assumed to be safeguarded by the memorized boundary instances surrounding it. Although the {\sc ib2} may optimize storage considerably, its strategy to store all misclassified instances incrementally makes {\sc ib2} sensitive to noise \cite{Aha+91}. It is also yet unclear what the effect is of the size of the seed. \section{Advanced evaluation metrics} \label{advancedstats} Aside from accuracy (the percentage of correctly classified test instances), TiMBL offers some more evaluation metrics that have become common in information retrieval and machine learning in general, namely precision, recall, and F-score, and ROC-space (with dimensions true positive rate and false positive rate), and AUC. We describe these metrics in more detail here. \begin{figure} \begin{center} \epsfig{file=pos-neg.eps, width=0.5\textwidth} \vspace*{-0.5cm} \end{center} \caption{Class-specific confusion matrix containing the basic counts used in the advanced performance metrics. \label{confmat}} \end{figure} Figure~\ref{confmat} displays the general confusion matrix\index{confusion matrix} for one class $C$, splitting all classifications on a test set into four cells. The TP or true positives cell contains a count of examples that have class $C$ and are predicted to have this class correctly by the classifier. The FP or false positives cell contains a count of examples of a different class that the classifier incorrectly classified as $C$. The FN or false negatives cell contains examples of class $C$ for which the classifier predicted a different class label than $C$. On the basis of these four numbers and the total number of positive examples $P=TP+FN$ and negative examples $N=FP+TN$, we can compute the following performance measures: \begin{description} \item[Precision]\index{precision} $= \frac{TP}{TP+FP}$, or the proportional number of times the classifier has correctly made the decision that some instance has class $C$. \item[Recall or True Positive Rate (TPR)]\index{TPR}\index{recall}\index{true positive rate} $= \frac{TP}{P}$, or the proportional number of times an example with class $C$ in the test data has indeed been classified as class $C$ by the classifier. \item[False Positive Rate (FPR)]\index{FPR}\index{false positive rate} $= \frac{FP}{N}$, or the proportional number of times an example with a different class than $C$ in the test data has been classified as class $C$ by the classifier. \item[F-score]\index{F-score} $= \frac{2 \times precision \times recall}{precision + recall}$, or the harmonic mean\index{harmonic mean} of precision and recall \cite{VanRijsbergen79}, is a commonly used metric to summarize precision and recall in one measure. The left part of Figure~\ref{spaces} shows F-score isolines in the two-dimensional space of recall (x-axis) and precision (y-axis). The curvature of the isolines is caused by the harmonic aspect of the formula (in contrast, the normal mean has straight isolines orthogonal to the $x=y$ diagonal), which penalizes large differences between precision and recall. The isolines could be likened to height isolines in a map, where the peak of the hill is at the upper right corner of the space. \item [AUC]\index{AUC}\index{area under the curve} or {\em area under the curve}\/ in the so-called ROC\index{ROC space} or {\em receiver operator characteristics}\/\index{receiver operator characteristics} space \cite{Egan75,Swets+00}, is the surface of the grey area in the right graph of Figure~\ref{spaces}. The ROC space is defined by the two dimensions FPR (false positive rate, x-axis) and TPR (true positive rate, or recall, y-axis). The difference with F-score is that it does not make use of the statistically unreliable precision metric; rather, it takes into account all cells of the matrix in Figure~\ref{confmat} including the TN (true negative) cell (for a more detailed description and arguments for using ROC analysis, cf. \cite{Fawcett04}). Its ``peak'' is in the upper left corner, at a FPR of zero and a TPR of 1. Rather than using the harmonic mean, it is common to report on the AUC, area under the classifier's TPR-FPR curve, where in the case of a discrete-output classifier such as {\sc TiMBL} this can be taken to mean the two lines connecting the experiment's TPR and FPR to the $(0,0)$ coordinate and the $(1,1)$ coordinate, respectively; the AUC is then the grey area between these points and coordinate $(1,0)$. \end{description} While these advanced statistics can be computed per class, they can also be averaged to produce a single outcome for a full test set. Common methods for averaging F-scores and AUC scores are micro-averaging and macro-averaging. In micro-averaging, each class' F-score or AUC is weighted proportionally to the frequency of the class in the test set. A macro-average sums the F-scores or AUCs and divides the total by the number of classes in the training set. In computing these averages, TiMBL bases itself on the classes in the training set. When a class does not re-occur in test material, it can have no recall, but it can have precision, hence it is always incorporated in averages. A class that occurs in test material but not in training material can never be predicted correctly, and is never included in averages. \begin{figure} \begin{center} \begin{minipage}[t]{0.53\textwidth} \epsfig{file=fspace.eps, width=\textwidth} \end{minipage}\hfill \begin{minipage}[t]{0.47\textwidth} \epsfig{file=roc-auc.eps, width=\textwidth} \end{minipage} \end{center} \caption{Precision--recall space with F-score isolines (left), and ROC space with an experimental outcome marked by the dot, and the outcome's AUC, the shaded surface between the dot and coordinates $(0,0)$, $(1,0)$, and $(1,1)$ (right). \label{spaces}} \end{figure} \section{Applications of TiMBL} \label{furtherreading} This section provides a brief historical overview of work that has used TiMBL as a tool. Much of this work concerns tasks in natural language processing, but TiMBL has been applied in other domains as well. For historical background predating the 1990s, see \cite{Daelemans+05}. \subsubsection{Algorithmic development} As the original user and developer groups, the Tilburg and Antwerp groups have published a number of papers and articles containing descriptions of the algorithms and specialised metrics collected in TiMBL, usually demonstrating their functioning using NLP tasks. The {\sc ib1-ig} algorithm was first introduced in \cite{Daelemans+92b} in the context of a comparison of memory-based approaches with error-back\-propagation learning for a hyphenation task. Predecessor versions of {\sc IGTree} can be found in \cite{Daelemans+93c,VandenBosch+93} where they are applied to grapheme-to-phoneme conversion. See \cite{Daelemans+97} for a description and review of {\sc IGTree} and {\sc ib1-ig}. {\sc tribl} is described in \cite{Daelemans+97d}. Experiments with distance-weighted class voting are described in \cite{Zavrel97}. Aspects of using binary-valued (unpacked multi-valued) features are discussed in \cite{VandenBosch+00}. \namecite{Raaijmakers00} describes an extension of TiMBL with error-correcting output codes. \namecite{Hendrickx+04} report on an experiment to import maximum-entropy matrices to replace {\sc mvdm} matrices \cite{Hendrickx+04}, improving over the maximum-entropy classifier. Comparisons between memory-based learning and editing variants are reported in \cite{VandenBosch99,Daelemans+99}. A hybrid of TiMBL and the {\sc ripper} rule-induction algorithm \cite{Cohen95} is described in \cite{VandenBosch00,VandenBosch04}. Using TiMBL as a classifier combination method is discussed in \cite{Halteren+01}. \namecite{VandenBosch04} presents a search algorithm to find optimal combinations of parameter settings automatically, given a labeled training set of examples, showing large gains of the default settings (also of other machine learning algorithms). Parallelization of TiMBL, through splitting either the training set or the test set in $n$ pieces in shared-memory multi-processor architectures, is explored in \cite{VandenBosch+07b}. \subsubsection{Applications in morpho-phonology} The memory-based algorithms implemented in the TiMBL package have been targeted to a large range of Natural Language Processing tasks. Examples of applications in the {\bf morpho-phonological} area are hyphenation and syllabification \cite{Daelemans+92b}; classifiying phonemes in speech \cite{Kocsor+00}; assignment of word stress \cite{Daelemans+94}; grapheme-to-phoneme conversion, \cite{VandenBosch+93,Daelemans+96,Canisius+06}; diminutive formation \cite{Daelemans+98a}; and morphological analysis \cite{VandenBosch+96,VandenBosch+99,Canisius+06}. Although these examples are applied mostly to Germanic languages (English, Dutch, and German), applications to other languages with more complicated writing systems or morphologies, or with limited resources, have also been presented: for example, letter-phoneme conversion in Scottish Gaelic \cite{Wolters+97}, morphological analysis of Arabic \cite{Marsi+05}, or diacritic restoration in languages with a diacritic-rich writing system \cite{Mihalcea02,DePauw+07}. \subsubsection{Linguistic and psycholinguistic modelling} Whereas most work using TiMBL has been oriented towards natural language engineering applications, the {\bf linguistic} and {\bf psycholinguistic} relevance of memory-based learning is another focus of research in Antwerp, Tilburg and elsewhere. Work in this area has been done on stress assignment in Dutch simplex words \cite{Daelemans+94,Gillis+00} and English compounds \cite{Plag+07}, reading aloud \cite{VandenBosch+00b}, phonological bootstrapping \cite{Durieux+00}, the prediction of linking morphemes in Dutch \cite{Krott+01}, morphology \cite{Eddington00,Eddington03}, and the Dutch plural inflection \cite{Keuleers+07}. \namecite{Vandekerckhove+13} use TiMBL to develop a language bigram model and use this to explain behavior of patients with a specific impairment that prohibits them to assess adjective orderings as normal or less felicitous. A comparison to other analogical methods for linguistics is provided in \cite{Daelemans+97f,Daelemans02}. \namecite{VandenBosch+13b} offer links to work in psychology on models of human memory, in particular to episodic memory and fast memory access with global matching models \cite{Clark+96}. \subsubsection{Applications in syntax and semantics} At the {\bf syntactic} sentence level TiMBL has been applied to Part-of-Speech tagging \cite{Daelemans+96b,Zavrel+99,Halteren+01}; PP-attachment \cite{Zavrel+97b}; subcategorization \cite{Buchholz98}; phrase chunking \cite{Veenstra98,Sang+99}; shallow parsing of English \cite{Daelemans+99a,Buchholz+99,Yeh00} and Arabic \cite{Azmi+10}; clause identification \cite{Orasan00,Sang01}; detecting the scope of negation markers and hedge cues \cite{Morante+08c,Morante+09c}; sentence-boundary detection \cite{Stevenson+00}; and, beyond the sentence level, to co-reference resolution \cite{Preiss02,Mitkov+02,Hoste05,Klenner+08,Wunsch+09,Zhekova+11}. Memory-based learning has been integrated as a classifier engine in more complex dependency parsing systems \cite{Nivre+04,Sagae+05,Canisius+06b}, and dependency parsing in combination with semantic role labeling \cite{Morante+09b}. \subsubsection{Other applications} TiMBL has been involved in several subtasks of {\bf natural language generation}: predicting the order of prenominal adjectives for generation \cite{Malouf00}, article generation \cite{Minnen+00}; preposition generation \cite{Lee+08b}; generating referring expressions \cite{Hendrickx+08}; and ranking paraphrases of noun compounds \cite{Wubben10}. Memory-based learning has been applied succesfully to {\bf lexical semantics}, in particular to word sense disambiguation \cite{Veenstra+00,Stevenson+99,Kokkinakis00,Mihalcea02,Hoste+02,DeCadt+04}, but also in other lexical semantic tasks such as determining noun countability \cite{Baldwin+03}, animacy \cite{Orasan+01}, semantic relations within noun compounds \cite{Kim+06b,Nastase+06}, and cross-linguistic word sense disambigation, a subtask of machine translation \cite{VanGompel10,VanGompel+13,VanGompel+14}. As a tool for {\bf text mining}, TiMBL has been used for named-entity recognition \cite{Buchholz+00,Hendrickx+03,DeMeulder+03,Sporleder+06b,Leveling+06,VandenBosch+13c}, information extraction \cite{Zavrel+00b,Zavrel+03,Ahn06}, event extraction \cite{Morante+09d}, text classification \cite{Spitters00}, question classification \cite{Garcia+06,Dridan+07}, spam filtering \cite{Androutsopoulos+00}, and authorship attribution \cite{Luyckx+08,Kestemont+09}. In the field of {\bf discourse and dialogue}, TiMBL has been used for dialogue act classification and shallow semantic analysis of speech-recognised utterances \cite{Gustafson+99,Krahmer+01,VandenBosch+01,Lendvai+02a,Lendvai+03}, in disfluency detection in transcribed spontaneous speech \cite{Lendvai+03c}, in classifying ellipsis in dialogue \cite{Fernandez+04}, and in classifying errors in answers to reading comprehension tasks \cite{Bailey+08}. Relations to {\bf statistical language modeling}, in particular the interesting equivalence relations with back-off smoothing in probabilistic classifiers, are discussed in \cite{Zavrel+97}. Relations between classification-based word prediction and statistical language modeling are identified in \cite{VandenBosch05,VandenBosch06,Stehouwer+09b}. Next-word prediction with TiMBL has been specifically applied to {\bf word completion} \cite{VandenBosch+08,VandenBosch11,Verberne+12,Stoop+14}. TiMBL has been shown to be a useful {\bf error detector} and corrector, such as in the context of error detection in textual databases \cite{Sporleder+06}, in detecting dependency parsing annotation errors \cite{Dickinson09}, in confusible disambiguation \cite{Stehouwer+09}, and in preposition and determiner errors \cite{VandenBosch+12,VandenBosch+13}. In {\bf machine translation}, $k$-nearest neighbor classification offers a conceptual bridge between example-based machine translation (EBMT) and statistical MT. Pure memory-based approaches are described in \cite{VandenBosch+07,Canisius+09,VandenBosch+09,VanGompel+09}; hybrids with statistical machine translation are presented in \cite{Stroppa+07,Haque+09,Haque+10}; sub-sentential paraphrasing using pivot translations is described in \cite{Max09}. Although most of the applications of TiMBL are in the natural language processing and computational linguistics areas, it is occasionally used in other related domains as well. Examples are the classification of newborns' cries \cite{Feier+14}, scaffolded learning by robots \cite{Saunders+06}, or the detection of clinically important micro-organisms through an electronic nose \cite{Moens+06}. \subsubsection{Dissertations and special issue} The first dissertation-length study devoted to the approach was \cite{VandenBosch97}, in which the approach is compared to alternative learning methods for NLP tasks related to English word pronunciation (stress assignment, syllabification, morphological analysis, alignment, grapheme-to-phoneme conversion). TiMBL is also central in the Ph.D. theses of \namecite{Buchholz02}, \namecite{Lendvai04}, \namecite{Hendrickx05}, \namecite{Hoste05}, \namecite{Keuleers08}, \namecite{Canisius09}, and \namecite{Vandekerckhove13}. In 1999 a special issue of the {\em Journal for Experimental and Theoretical Artificial Intelligence} (Vol.~11(3), edited by Walter Daelemans) was devoted to Memory-Based Language Processing. The introduction to this special issue discusses the inspiration sources and alternative developments related to the memory-based approach taken in TiMBL \cite{Daelemans99b}. \ \\ {\it All Tilburg/Antwerp papers referred to in this section, as well as more recent papers, are available in electronic form from the {\sc ILK} home page: {\tt http://ilk.uvt.nl} and the {\sc CLiPS} home page: \\ {\tt http://www.clips.ua.ac.be/}.} \chapter{Software usage and options} \label{reference} \section{Command line options} \label{commandline} The user interacts with TiMBL through the use of command line arguments. When you have installed TiMBL successfully, and you type {\tt timbl} at the command line without any further arguments, it will print an overview of the most basic command line options. {\footnotesize \begin{verbatim} TiMBL 6.4.2 (c) ILK 1998 - 2012. Tilburg Memory Based Learner Induction of Linguistic Knowledge Research Group, Tilburg University CLiPS Computational Linguistics Group, University of Antwerp Mon Apr 23 15:43:26 2012 usage: timbl -f data-file {-t test-file} or see: timbl -h for all possible options \end{verbatim} } If you are satisfied with all of the default settings, you can proceed with just these basics: \begin{description} \item {\tt -f } : supplies the name of the file with the training items. \item {\tt -t } : supplies the name of the file with the test items. \item {\tt -h} : prints a glossary of all available command line options. \end{description} The presence of a training file will make TiMBL pass through the first two phases of its cycle. In the first phase it examines the contents of the training file, and computes a number of statistics on it (feature weights etc.). In the second phase the instances from the training file are stored in memory. If no test file is specified, the program exits, possibly writing some of the results of learning to files (see below). If there is a test file, the selected classifier, trained on the present training data, is applied to it, and the results are written to a file the name of which is a combination of the name of the test file and a code representing the chosen algorithm settings. TiMBL then reports the percentage of correctly classified test items. The default settings for the classification phase are: a Memory-Based Learner, with Gain Ratio feature weighting, with $k=1$, and with optimizations for speedy search. If you need to change the settings, because you want to use a different type of classifier, or because you need to make a trade-off between speed and memory-use, then you can use the options that are shown using {\tt -h}. The sections below provide a reference to the use of these command line arguments, and they are roughly ordered by the type of action that the option has effect on. Note that some options (listed with ``{\tt +/-}'') can be turned on ({\tt +}) or off ({\tt -}). \subsection{Algorithm and metric selection} \begin{description} \item {\tt -a or } : determines the classification algorithm. Possible values are: \begin{description} \item {\tt 0} or {\tt IB1} -- the {\sc ib1} ($k$-NN) algorithm (default). See Sections~\ref{mbl} and~\ref{indexing}. \item {\tt 1} or {\tt IGTREE} -- {\sc IGTree}, decision-tree-based optimization. See Section~\ref{igtree}. \item {\tt 2} or {\tt TRIBL} -- {\sc tribl}, a hybrid of {\sc ib1} and {\sc IGTree}. See Section~\ref{tribl}. \item {\tt 3} or {\tt IB2} -- {\sc ib2}, incremental edited memory-based learning. See Section~\ref{ib2}. \item {\tt 4} or {\tt TRIBL2} -- {\sc tribl2}, a non-parameteric version of {\sc tribl}. See Section~\ref{tribl}. \end{description} \item {\tt -m } : determines which distance metrics are used for each feature. The format of this string is as follows:\\ {\tt GlobalMetric:MetricRange:MetricRange}\\ Where {\tt GlobalMetric} is used for alle features except for the ones that are assigned other metrics by following the restrictions given by {\tt :MetricRange}. A range can be written using comma's for lists, and hyphens for intervals. The metric code can be one of the following nine: \begin{itemize} \item {\tt O} -- Overlap (default; see Subsection~\ref{overlap}) \item {\tt M} -- Modified value difference ({\sc mvdm}; see Subsection~\ref{mvdm}) \item {\tt J} -- Jeffrey divergence (see Subsection~\ref{mvdm}) \item {\tt S} -- Jensen-Shannon divergence (see Subsection~\ref{mvdm}) \item {\tt D} -- Dot product (see Subsection~\ref{dotproduct}) \item {\tt C} -- Cosine distance (see Subsection~\ref{dotproduct}) \item {\tt N} -- Numeric (for numeric features; see Subsection~\ref{overlap}) \item {\tt E} -- Euclidean distance (for numeric features; see Subsection~\ref{overlap}) \item {\tt L} -- Levenshtein (see Subsection~\ref{overlap}) \item {\tt DC} -- Dice coefficient (see Subsection~\ref{overlap}) \item {\tt I} -- Ignore (ignore specified features) \end{itemize} For example, {\tt -mO:N3:I2,5-7} sets the global metric to overlap, declares the third feature to be numeric, and ignores features 2 and 5, 6, and 7. Ignore {\em can}\/ be the global metric; it must be followed by a {\tt MetricRange} string with metric {\tt O}, {\tt M}, {\tt J}, {\tt D}, or {\tt N} specifying in the range which features are {\em not}\/ ignored. \item {\tt -w } : chooses between feature-weighting possibilities. The weights are used in the metric of {\sc ib1} and in the ordering of the {\sc IGTree}. Possible values are: \begin{description} \item {\tt -w0} or {\tt -w nw} -- No weighting, i.e. all features have the same importance (weight = 1). \item {\tt -w1} or {\tt -w gr} -- Gain Ratio weighting (default). See section~\ref{infogain}. \item {\tt -w2} or {\tt -w ig} -- Information Gain weighting. See section~\ref{infogain}. \item {\tt -w3} or {\tt -w x2} -- Chi-squared ($\chi^2$) weighting. See section~\ref{chisquared}. \item {\tt -w4} or {\tt -w sv} -- Shared Variance weighting. See section~\ref{chisquared}. \item {\tt -w5} or {\tt -w sd} -- Standard Deviation weighting. \item n=$<$filename$>$:$<$number$>$ or n=$<$filename$>$ -- Instead of the five weight settings above we can supply a filename to the {\tt -w} option. This causes TiMBL to read this file and use its contents as weights. If only $<$filename$>$ is given as an argument, the file is supposed to contain one list of feature weights for all features. The $<$filename$>$:$<$number$>$ option assumes that a weights file generated by TiMBL with the {\tt -W} option (and possibly edited by the user) is read back in; the number refers to one of the five numbers above. See section~\ref{weightformat} for a description of the format of weights files. \end{description} \item {\tt -k } : number of nearest neighbors used for extrapolation. Only applicable in conjunction with {\sc ib1} ({\tt -a 0}), {\sc tribl} ({\tt -a 2}), {\sc tribl2} ({\tt -a 4}) and {\sc ib2} ({\tt -a 3}). The default is 1. Especially with the {\sc mvdm} metric it is often useful to determine a good value larger than 1 for this parameter (usually an odd number, to avoid ties). Note that due to ties (instances with exactly the same similarity to the test instance) the number of instances used to extrapolate might in fact be much larger than this parameter. \item {\tt -d } : The type of class voting weights that are used for extrapolation from the nearest neighbor set. {\tt val} can be one of: \begin{itemize} \item {\tt Z} : normal majority voting; all neighbors have equal weight (default). \item {\tt ID} : Inverse Distance weighting. See Section~\ref{distweightvote}, Equation~\ref{inverseweight}. \item {\tt IL} : Inverse Linear weighting. See Section~\ref{distweightvote}, Equation~\ref{dudani_eq}. \item {\tt ED::} : Exponential Decay weighting with decay parameters {\tt a} ($\alpha$) and {\tt b} ($\beta$). No spaces are allowed in the string. Parameter {\tt b} can be left unspecified: {\tt ED:} assumes $\beta=1$. The syntax used in previous TiMBL versions ({\tt ED}) is still supported but deprecated. See Section~\ref{distweightvote}, Equation~\ref{expdecayweight}. \end{itemize} \item {\tt -L } : frequency threshold for switching from the {\sc mvdm} or Jeffrey Divergence to the Overlap distance metric. The default is 1 (never switch). When in a pair of matched values one or both values occur less frequently than {\tt n} times in the learning material, TiMBL switches from {\sc mvdm} or Jeffrey Divergence to Overlap. Higher values of {\tt n} force TiMBL to use the Overlap metric more. Only applicable in conjunction with the {\sc mvdm} ({\tt -mM}) and Jeffrey divergence ({\tt -mJ}) or Jensen-Shannon divergence ({\tt -mS}) distance metrics. \item {\tt -b } : determines n ($\geq 1$), the number of instances, to be taken from the top of the training file, to act as the bootstrap set of memorized instances before {\sc ib2} starts adding new instances. Only applicable in conjunction with {\sc ib2} ({\tt -a 3}). \item {\tt -q } : {\tt n} is the {\sc tribl} offset, the index number of the feature (counting from 1) after which {\sc tribl} should switch from {\sc IGTree} to {\sc ib1}. Only applicable in conjunction with {\sc tribl} ({\tt -a 2}). \item {\tt -R } : Resolve ties in the classifier randomly, using a random generator with seed n. {\tt -R } causes the classification to be based on a random pick (with seed n) of a category according to the probability distribution in the nearest neighbor set. By default, {\tt -R} is not used, but rather the deterministic tie resolution scheme described in Subsection~\ref{overlap}. \item {\tt -t $<$@file$>$} : If the filename given after {\tt -t} starts with '{\tt @}', TiMBL will read commands for testing from {\tt file}. This file should contain one set of instructions per line. On each line new values can be set for the following command line options: {\tt -B -d -e -k -L -M -o -p -Q -R -t -u +/-v -w +/-x +/-\%}. It is compulsory that each line in {\tt file} contains a {\tt -t } argument to specify the name of the test file. \item {\tt -t } : the string {\tt } is the literal name of the file with the test items. \item {\tt -t leave\_one\_out} : No test file is read, but testing is done on each pattern of the training file, by treating each pattern of the training file in turn as a test case (and the whole remainder of the file as training cases). Only applicable in conjunction with {\sc ib1} ({\tt -a0}). \item {\tt -t cross\_validate} : An $n$-fold cross-validation experiment is performed on the basis of $n$ files (e.g. $1/n$ partitionings of an original data file). The names of these $n$ files need to be in a text file (one name per line) which is given as argument of {\tt -f}. In each fold $f=1 \ldots n$, file number $f$ is taken as test set, and the remaining $n-1$ files are concatenated to form the training set. Only applicable in conjunction with {\sc ib1} ({\tt -a0}). \item {\tt -T } : The $n$th column in the training set of labeled examples contains the label to be predicted, while all other columns represent the input features. By default, the last column is assumed to contain the class labels. \end{description} \subsection{Input options} \begin{description} \item {\tt -f } : the string {\tt } is the literal name of the file with the training items, or (in conjunction with {\tt -t cross\_validate}, the file containing the names of the cross-validation files. \item {\tt -F } : Force TiMBL to interpret the training and test file as a specific data format. Possible values for this parameter are: {\tt Compact, C4.5, ARFF, Columns, Sparse, Binary} (case-insensitive). The default is that TiMBL guesses the format from the contents of the first line of the data file. ARFF is not automatically detected. See section~\ref{dataformats} for description of the data formats and the guessing rules. The {\tt Compact} format cannot be used with numeric features. \item {\tt -l } : Feature length. Only applicable with the Compact data format; {\tt } is the number of characters used for each feature-value and category symbol. \item {\tt -i } : Skip the first two training phases: instead of processing a training file, read a previously saved (see {\tt -I} option) instance-base or {\sc IGTree} from the file {\tt treefile}. See section~\ref{treeformat} for the format of this file. \item {\tt --matrixin=} : Read value distance metrics (such as {\sc mvdm} or Jeffrey divergence matrices written to file with {\tt --matrixout=} : Replace the automatically computed value-class probability matrix with the matrices provided in this file. \item {\tt -P } : Specify a path to read the data files from. This path is ignored if the name of the data file already contains path information. \item {\tt -s} : Use the whitespace-delimited exemplar weights, given after each training instance in the training file {\tt }, during classification. {\tt } may contain exemplar weights, but they are not used in classification. If the test file does not have an exemplar weights column, you must specify {\tt -s1}. Exemplar weights can also be ignored (in both training and test files) by specifying {\tt -s0}. Does not work simultaneously with {\tt --occurrences}. \item {\tt --occurrences=\{train|test|both\}} :Use the whitepace-delimited integers, given after each instances in a training or test file (or both when training and test actions are invoked at once), as representing the number of occurrences that instance should be counted as. By default these counts are not given, in which case TiMBL counts every instance as a single token. Does not work simultaneously with {\tt -s}. \end{description} \subsection{Output options} \begin{description} \item {\tt -I } : After phase two of learning, save the resulting tree-based representation of the instance-base or {\sc igtree} in a file. This file can later be read back in using the {\tt -i} option (see above). For {\sc IGTree} this also automatically saves the current weights into {\tt treefile.wgt} unless this is overridden by {\tt -W}. See section~\ref{treeformat} for a description of the resulting file's format. \item {\tt --matrixout=} : Store calculated {\sc mvdm} or Jeffrey divergence distance metrics in file {\tt filename}. \item {\tt -X } : instead of the proprietary file format written with the {\tt -I} switch, {\tt -X} writes the TiMBL tree into an XML tree in {\tt }. This XML file cannot be read back into TiMBL. \item {\tt -W } : Save the currently used feature-weights in a file. \item {\tt -U } : Write the automatically computed value-class probability matrix to this file. \item {\tt -n } : Save the feature-value and target category symbols in a C4.5 style ``names file'' with the name {\tt }. Take caution of the fact that TiMBL does not mind creating a file with ',' '.' '$|$' and ':' values in features; C4.5 will produce errors on this. \item {\tt -p } : Indicate progress during training and testing after every n processed patterns. The default setting is 100,000. \item {\tt -e } : During testing, compute and print an estimate on how long it will take to classify n test patterns. Off by default. \item {\tt +/-v } : Verbosity Level; determines how much information is written to the output during a run. Unless indicated otherwise, this information is written to standard error. The use of {\tt +} turns a given verbosity level {\bf on}, whereas {\tt -} turns it {\bf off} (only useable in non-commandline contexts, such as client/server communication or {\tt -t @testcommandfile}). This parameter can take on the following values (case-insensitive): \begin{description} \item {\tt s} : work silently (turns off all set verbosity levels). \item {\tt o} : show all options set. \item {\tt b}: show node/branch count and branching factor. \item {\tt f} : show calculated feature weights. (default) \item {\tt p} : show {\sc mvdm} matrices. \item {\tt e} : show exact matches. \item {\tt as} : show overall advanced statistics (micro and macro averages of F-score and AUC). \item {\tt cm} : show confusion matrix between actual and predicted classes. \item {\tt cs} : show per-class statistics (precision, recall, true positive rate, false positive rate, F-score, AUC). \item {\tt di} : add the distance of the nearest neighbor to the output file. \item {\tt db} : add class distribution in the nearest neighbor set to the output file. \item {\tt md} : add matching depth and node type (N for non-ending node, L for leaf) to output file. \item {\tt k} : add a summary of class distribution information of all nearest neighbors to the output file (sets {\tt -x}) \item {\tt n} : add nearest neigbors to the output file (sets {\tt -x}) \end{description} You may combine levels using '{\tt +}' e.g. {\tt +v p+db} or {\tt -v o+di}. \item {\tt -G } : Normalize class distributions generated by {\tt +v db}. \begin{description} \item {\tt 0 (zero)} : Normalize distributions so that they add up to 1.0 \item {\tt 1:} : Smooth by adding floating-point $f$ to all class votes (e.g. {\tt -G1:1} performs add-one smoothing). \end{description} \item {\tt --Beam=} : Limit the number of returned classes and class votes returned by {\tt +v db} to $n$. Default is infinity (no limit). \item {\tt +/- \%} : Write the percentage of correctly classified test instances, the number of correctly classified instances, and the total number of classified instances (one number per line, three lines in total) to a file with the same name as the output file, but with the suffix ``{\tt .\%}''. \item {\tt -o $<$filename$>$} : Write the test output to filename. Useful for different runs with the same settings on the same testfile, where the default output file name would normally be the same. \item {\tt -O $<$path$>$} : Write all output to the path given here. The default is to write all output to the directory where the test file is located. \item {\tt -V} : Show the TiMBL version number. \end{description} \subsection{Internal representation options} \begin{description} \item {\tt -N } : (maximum) number of features. Obligatory for Sparse and Binary formats. When larger than a pre-defined constant (default 2500), N needs to be set explicitly for all algorithms. \item {\tt +/-x} : turns the shortcut search for exact matches on or off in {\sc ib1} (and {\sc ib2}, {\sc tribl}, and {\sc tribl2}). The default is off ({\tt -x}). Turning it on makes {\sc ib1} generally faster, but with $k>1$ the shortcut produces different results from a genuine $k$ nearest neighbors search, since absolute preference is given to the exact match. \item {\tt -M } : Set the maximum number of nearest neighbors printed using the {\tt +vn} verbosity option. By default this is set to 500, but when you are interested in the contents of really large nearest neighbor sets (which is possible with large $k$ or large data sets with few features), {\tt n} can be increased up to 100,000. \item {\tt +/-H} : Turn on/off hashing of feature values and class labels in TiMBL trees. Hashing is done by default, but with short (e.g. one-character) feature values and/or classes less memory is used when hashing is set off. \item {\tt +/-D} : Store class distributions on all nodes in the TiMBL tree. Default is off ({\tt -D}). Setting {\tt +D} costs memory, and is only necessary when using IGTree in combination with the {\tt +v db} verbosity setting (which prints class distributions in the classifier output). \item {\tt -B } : Number of bins used for discretizing numeric data (only used for computing feature weights). \item {\tt -c } : Clipping (threshold) frequency for prestoring {\sc mvdm} matrices. Cells in the matrix are only stored if both feature values occur more than {\tt } times. \item {\tt --clones=} : Classify a test set using $n$ processors in parallel. \item {\tt --Treeorder=} : Set the ordering of the TiMBL tree (with {\sc ib1} and {\sc ib2}), i.e., rank the features according to the metric identified by {\tt }. The default ordering is {\tt G/V} (according to gain ratio divided by the number of values), but some orderings may produce faster classification. Note that different orderings do {\em not}\/ change the classification behavior of {\sc ib1} and {\sc ib2}. {\tt } can take the following values: \begin{description} \item {\tt DO} : no ordering (the ordering of the features in the data file is taken) \item {\tt GRO} : gain ratio (eq.~\ref{IGgainratio}) \item {\tt IGO} : information gain (eq.~\ref{IGgain}) \item {\tt 1/V} : $1/V$, where $V$ is the number of values \item {\tt G/V} : gain ratio divided by the number of values \item {\tt I/V} : information gain divided by the number of values \item {\tt X2O} : \chisq \ (eq.~\ref{chisq-eq}) \item {\tt X/V} : \chisq \ divided by the number of values \item {\tt SVO} : shared variance (eq.~\ref{shared-variance-eq}) \item {\tt S/V} : shared variance divided by the number of values \item {\tt GxE} : gain ratio $\times si$, where $si$ is the split info of the feature (eq.~\ref{splitinfo}) \item {\tt IxE} : information gain $\times si$ \item {\tt 1/S} : $1/si$ \end{description} \end{description} \subsection{Hidden options} The commandline interface to TiMBL contains several hidden options that have been built in over time for particular reasons. Some have survived over time, and although their use is not for the faint-hearted, some may offer interesting functionalities. A small list of disclosed hidden options follows. \begin{description} \item {\tt --sloppy=\{true|false\}}: in combination with leave-one-out (LOO) testing, this option turns off all weight recomputation. By default, leaving out one training example out causes all feature weights, value-class matrices, and derived metrics such as {\sc mvdm} to be recomputed, because strictly the example-specific statistics should be absent when it is held out and classified. {\tt --sloppy} skips this, causing a significant speedup, and usually slightly better LOO scores. Use only if your experimental method allows it. Default value is {\tt false}. \item {\tt --silly=\{true|false\}}: set to {\tt true}, switches off the optimized nearest-neighbor search in {\sc ib1} and {\sc tribl}. This causes TiMBL to compare all feature values of a test instance to full paths in the TiMBL tree. This causes TiMBL to slow down dramatically on most datasets. Setting is available to enable testing the effect of optimized search. Default value is {\tt false}. \item{\tt --Diversify}: modifies all features weights by subtracting the smallest weight (plus $\epsilon$) from all weights. The smallest weight thus becomes $\epsilon$. This modification ``diversifies'' the feature weights, and was introduced to enhance the effect of {\sc Dimbl}, the multi-CPU variant of TiMBL\footnote{For {\sc Dimbl}, see \url{http://ilk.uvt.nl/dimbl}}. \end{description} \section{File formats} \label{fileformats} This section describes the format of the input and output files used by TiMBL. Where possible, the format is illustrated using the classical ``objects'' data set, which consists of 12 instances of 5 different everyday objects (nut, screw, key, pen, scissors), described by 3 discrete features (size, shape, and number of holes). \subsection{Data files} \label{dataformats} The training and test sets for the learner consist of descriptions of instances in terms of a fixed number of feature-values. TiMBL supports a number of different formats for the instances, but they all have in common that the files should contain one instance per line. The number of instances is determined automatically, and the format of each instance is inferred from the format of the first line in the training set. The last feature of the instance is assumed to be the target category\footnote{unless the {\tt -s} or {\tt --occurrences} options are used, which both assume a final column with numbers; the class is then the before-last feature.}. Should the guess of the format by TiMBL turn out to be wrong, you can force it to interpret the data as a particular format by using the {\tt -F} option. Note that TiMBL, by default, will interpret features as having {\em symbolic, discrete values}. Unless you specify explicitly that certain features are numeric, using the {\tt -m} option, TiMBL will interpret numbers as just another string of characters. If a feature is numeric, its values will be scaled to the interval [0,1] for purposes of distance computation (see Equation~\ref{overlapeq}). The computation of feature weights will be based on a discretization of the feature. Once TiMBL has determined the input format, it will skip and complain about all lines in the input which do not respect this format (e.g.~have a different number of feature-values with respect to that format). During testing, TiMBL writes the classifications of the test set to an output file. The format of this output file is by default the same as the input format, with the addition of the predicted category being appended after the correct category. If we turn on higher levels of verbosity, the output files will also contain distributions, distances and nearest neighbor sets. \subsubsection{Column format} \label{comlumnformat} The {\bf column format} uses white space as the separator between features. White space is defined as a sequence of one or more spaces or tab characters. Every instance of white space is interpreted as a feature separator, so it is not possible to have feature-values containing white space. The column format is auto-detected when an instance of white space is detected on the first line before a comma has been encountered. The example data set looks like this in the column format: \begin{footnotesize} \begin{verbatim} small compact 1 nut small long none screw small long 1 key small compact 1 nut large long 1 key small compact none screw small compact 1 nut large long none pen large long 2 scissors large long 1 pen large other 2 scissors small other 2 key \end{verbatim} \end{footnotesize} \subsubsection{C4.5 format} \label{c45format} This format is a derivative of the format that is used by the well-known C4.5 decision tree learning program~\cite{Quinlan93}. The separator between the features is a comma, and the category (viz. the last feature on the line) is followed by a period (although this is not mandatory: TiMBL is robust to missing periods)\footnote{The periods after the category are not reproduced in the output}. White space within the line is taken literally, so the pattern {\tt a,\ b\ c,d} will be interpreted as {\tt `a',`\ b\ c',`d'}. An exception is the class label, which should not contain any whitespace. When using this format, especially with linguistic data sets or with data sets containing floating point numbers, one should take special care that commas do not occur as feature values and that periods do not occur within the category. Note that TiMBL's C4.5 format does not require a so called {\em namesfile}. However, TiMBL can produce such a file for C4.5 with the {\tt -n} option. The C4.5 format is auto-detected when a comma is detected on the first line before any white space has been encountered. The example data set looks like this in the C4.5 format: \begin{footnotesize} \begin{verbatim} small,compact,1,nut. small,long,none,screw. small,long,1,key. small,compact,1,nut. large,long,1,key. small,compact,none,screw. small,compact,1,nut. large,long,none,pen. large,long,2,scissors. large,long,1,pen. large,other,2,scissors. small,other,2,key. \end{verbatim} \end{footnotesize} \subsubsection{ARFF format} \label{arffformat} ARFF is a format that is used by the WEKA machine learning workbench~\cite{Garner95,Witten+99}\footnote{WEKA is available from the Waikato University Department of Computer Science, \url{http://www.cs.waikato.ac.nz/~ml/weka}}. Although TiMBL at present does not entirely follow the ARFF specification, it still tries to do as well as it can in reading this format. The ARFF format is not autodetected, and needs to be specified on the commanline with {\tt -F ARFF}. In ARFF data, the actual data are preceded by a information on feature types, feature names, and names of values in case of symbolic features. TiMBL ignores all of these lines, and starts reading data from after the {\tt @data} statement until the end of the file. Feature-values are supposed to be separated by commas; white space is deleted entirely, so the pattern {\tt a, b c,d} will be interpreted as {\tt `a',`bc',`d'}. There should be no whitespace in class labels. % We hope to include better support for the ARFF format %in future releases. \begin{footnotesize} \begin{verbatim} % There are 4 attributes. % There are 12 instances. % Attribute information: Ints Reals Enum Miss % 'size' 0 0 12 0 % 'shape' 0 0 12 0 % 'n_holes' 9 0 3 0 % 'class.' 0 0 12 0 @relation 'example.data' @attribute 'size' { small, large} @attribute 'shape' { compact, long, other} @attribute 'n_holes' { 1, none, 2} @attribute 'class.' { nut., screw., key., pen., scissors.} @data small,compact,1,nut. small,long,none,screw. small,long,1,key. small,compact,1,nut. large,long,1,key. small,compact,none,screw. small,compact,1,nut. large,long,none,pen. large,long,2,scissors. large,long,1,pen. large,other,2,scissors. small,other,2,key. \end{verbatim} \end{footnotesize} \subsubsection{Compact format} \label{compactformat} The compact format is especially useful when dealing with very large data files. Because this format does not use any feature separators, file size is reduced considerably in some cases. The price of this is that all features and class labels must be of equal length (in characters) and TiMBL needs to know beforehand what this length is. You must tell TiMBL by using the {\tt -l} option. The compact format is auto-detected when neither of the other formats applies. The same example data set might look like this in the column format with two characters per feature: \begin{footnotesize} \begin{verbatim} smco1_nu smlonosc smlo1_ke smco1_nu lalo1_ke smconosc smco1_nu lalonope lalo2_sc lalo1_pe laot2_sc smot2_ke \end{verbatim} \end{footnotesize} \subsubsection{Sparse format} The sparse format is relevant for data with features of which a significant portion of the values is $0.0$ (numeric), $0$ (binary), or some ``null'' symbolic value. Storing only the non-null values typically takes less space on disk. Consider, for example, a data set in text classification with 10,000 features each representing the tf*idf weight of a term. It would be uneconomical to store instances as long lines of \begin{footnotesize} \begin{verbatim} 0.02, 0.0, 0.0, 0.0, 0.54, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ... , 0.01,sports \end{verbatim} \end{footnotesize} The Sparse format allows to store such an instance as \begin{footnotesize} \begin{verbatim} (1,0.02)(5,0.54)...(10000,0.01)sports \end{verbatim} \end{footnotesize} That is, a sequence of ($,$) expressions between parentheses each indicating that the feature number $index$ has value $value$, with the class label at the end, directly following the last parenthesis. The feature index is assumed to start at 1. In case of symbolic values, whitespace included in the parentheses are considered significant (i.e., part of the values). A case with only null values can be represented as either `{\tt class} or {\tt ,class}. This option must be specified by the user ({\tt -F Sparse}); it is not guessed from the data. It must also be accompanied by a user declaration of the number of features ({\tt -N }). \subsubsection{Sparse Binary format} \label{binaryformat} The sparse binary format, a simplified version of the Sparse format, is especially useful when dealing with large numbers of two-valued (binary) features, of which each case only has a very few active ones, such as e.g. in text categorization. Thus instead of representing a case as: \begin{footnotesize} \begin{verbatim} 1,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,1,small. \end{verbatim} \end{footnotesize} We can represent it as: \begin{footnotesize} \begin{verbatim} 1,8,11,12,18,small. \end{verbatim} \end{footnotesize} This format allows one to specify only the index numbers of the active features (indices start at one), while implicitly assuming that the value for all the remaining features is zero. Because each case has a different number of active features, we must specify in some other way what the actual number of features is. This must be done using the {\tt -N} option. As the format is very similar to numeric features, it must always be user-declared using {\tt -F Binary}. The last feature of a line is always interpreted as being the category string. A case with only zeroes can be represented as either `{\tt class} or {\tt ,class}. \subsection{Weight files} \label{weightformat} The feature weights used for computing similarities and for the internal organization of the memory base can be saved to a file {\tt -W}. These files can be read back into TiMBL with {\tt -w $<$filename$>$:$<$weight number$>$}, where the weight number refers to the five options in TiMBL. It is also possible to change these files manually before reading them in -- and additionally it is also possible to write a file from scratch and read this into TiMBL. This allows the experimenter to handcraft feature weights. The generic format for the weights file is as follows. The weights file may contain comments on lines that start with a \# character. The other lines contain the number of the feature followed by its numeric weight. An example of such a file is provided below. The numbering of the weights starts with 1. In this example, the data set has three features. \begin{footnotesize} \begin{verbatim} # DB Entropy: 2.29248 # Classes: 5 # Lines of data: 12 # Fea. Weight 1 0.765709 2 0.614222 3 0.73584 \end{verbatim} \end{footnotesize} Weight files written by TiMBL are of the same format, but write all weights in a concatenation, separated by \# lines that carry the abbreviated name of the weight (nw, gr, ig, x2, sv). The following example illustrates this format (which can be edited manually, as long as the same number of lines is kept): \begin{footnotesize} \begin{verbatim} # DB Entropy: 1.61789 # Classes: 5 # Lines of data: 2999 # nw # Fea. Weight 1 1 2 1 3 1 # # gr # Fea. Weight 1 0.0428445870345557 2 0.185070180760327 3 0.325371814230901 # # ig # Fea. Weight 1 0.213887591411729 2 0.669704582861074 3 1.27807624584789 # # sv # Fea. Weight 1 0.0762436694064095 2 0.233998145488354 3 0.596896311429044 # # x2 # Fea. Weight 1 914.619058199289 2 2807.0417532783 3 7160.36815190281 # \end{verbatim} \end{footnotesize} \subsection{Value difference files} \label{mvdmformat} Using the {\sc mvdm} metric, it can sometimes be interesting to inspect the matrix of conditional class probabilities from Equation~\ref{MVDMeq}. By using the {\tt -U} option, we can write the computed matrix to a file. This way we can see which values are considered to be similar by the metric. For each feature a row vector is given for each value, of the conditional probabilities of all the classes (columns) given that value. \begin{footnotesize} \begin{verbatim} targets A, B, C, D, E. feature # 1 Matrix: small 0.429 0.286 0.286 0.000 0.000 large 0.000 0.000 0.200 0.400 0.400 feature # 2 Matrix: compact 0.750 0.250 0.000 0.000 0.000 long 0.000 0.167 0.333 0.333 0.167 other 0.000 0.000 0.500 0.000 0.500 feature # 3 Matrix: 1 0.500 0.000 0.333 0.167 0.000 none 0.000 0.667 0.000 0.333 0.000 2 0.000 0.000 0.333 0.000 0.667 \end{verbatim} \end{footnotesize} As long as this format is observed, the file can be modified (manually or by substituting other vector-based representations for the values), and the new matrix can be read in and used with the {\sc mvdm} metric with the {\tt -u} option. \subsection{Tree files} \label{treeformat} Although the learning phase in TiMBL is relatively fast, it can be useful to store the internal representation of the data set both for later usage and for faster subsequent learning. In TiMBL, the data set is stored internally in a tree structure (see Section~\ref{indexing}). When using {\sc ib1}, this tree representation contains all the training cases as full paths in the tree. When using {\sc IGTree}, unambiguous paths in the tree are pruned before it is used for classification or written to file; on the same data, {\sc IGTree} trees are usually considerably smaller than {\sc ib1} trees. In either tree type, the arcs represent feature values and nodes contain class distribution information. The features are in the same order throughout the tree. This order is either determined by memory-size considerations in {\sc ib1}, or by feature relevance in {\sc IGTree}. It can explicitly be manipulated using the {\tt -T} option. We strongly advise to refrain from manually editing the tree file. However, the syntax of the tree file is as follows. First a header consisting of information about the status of the tree, the feature-ordering (the permutation from the order in the data file to the order in the tree), and the presence of numeric features is provided\footnote{Although in this header each line starts with '\#', these lines cannot be seen as comment lines.}. Subsequently, unless hashing has been set off explicitly ({\tt -H}), a legenda is given of numeric hash codes for the class names (one unique integer per class) and feature value names (one unique integer per value). Subsequently, the tree's nodes and arcs are given in a proprietary non-indented bracket notation. Starting from the root node, each node is denoted by an opening parenthesis ``('', followed by an integer coding the default class. After this, there is the class distribution list, within curly braces ``\{ \}'', containing a non-empty list of category codes followed by integer counts. After this comes an optional comma-separated list of arcs to child nodes, within ``[ ]'' brackets. An arc is labeled with a coded feature value. The node that the arc leads to again has a class distribution, and any number of child nodes pointed to by arcs. The {\sc ib1} tree constructed from our example data set looks as follows: \begin{footnotesize} \begin{verbatim} # Status: complete # Permutation: < 1, 3, 2 > # Numeric: . # Version 4 (Hashed) # Classes 1 nut 2 screw 3 key 4 pen 5 scissors Features 1 small 2 compact 3 1 4 long 5 none 6 large 7 2 8 other (1{ 1 3, 2 2, 3 3, 4 2, 5 2 }[1(1[3(1[2(1{ 1 3 }) ,4(3{ 3 1 }) ] ) ,5(2[2(2{ 2 1 }) ,4(2{ 2 1 }) ] ) ,7(3[8(3{ 3 1 }) ] ) ] ) ,6(4[3(3[4(3{ 3 1, 4 1 }) ] ) ,5(4[4(4{ 4 1 }) ] ) ,7(5[4(5{ 5 1 }) ,8(5{ 5 1 }) ] ) ] ) ] ) \end{verbatim} \end{footnotesize} The corresponding compressed {\sc IGTree} version is considerably smaller. \begin{footnotesize} \begin{verbatim} # Status: pruned # Permutation: < 1, 3, 2 > # Numeric: . # Version 4 (Hashed) # Classes 1 nut 2 screw 3 key 4 pen 5 scissors Features 1 small 2 compact 3 1 4 long 5 none 6 large 7 2 8 other (1{ 1 3, 2 2, 3 3, 4 2, 5 2 }[1(1{ 1 3, 2 2, 3 2 }[3(1{ 1 3, 3 1 }[4(3{ 3 1 }) ] ) ,5(2{ 2 2 }) ,7(3{ 3 1 }) ] ) ,6(4{ 3 1, 4 2, 5 2 }[3(3{ 3 1, 4 1 }) ,7(5{ 5 2 }) ] ) ] ) \end{verbatim} \end{footnotesize} TiMBL tree files generated by versions 1.0 to 3.0 of TiMBL, which do not contain hashed class and value names, are no longer recognized in current TiMBL versions. Backward compatibility to trees generated by versions 1.0 to 3.0 is preserved in TiMBL version 4 up to release 4.3.1. \clearpage \bibliographystyle{fullname} \bibliography{ilk} \end{document} LanguageMachines-timbl-642727d/docs/texfiles/distanceweight-ided.eps000077500000000000000000000354521451477526200255150ustar00rootroot00000000000000%!PS-Adobe-2.0 EPSF-2.0 %%Title: distanceweight-ided.eps %%Creator: gnuplot 3.7 patchlevel 2 %%CreationDate: Fri Sep 12 14:55:25 2003 %%DocumentFonts: (atend) %%BoundingBox: 50 50 410 302 %%Orientation: Portrait %%EndComments /gnudict 256 dict def gnudict begin /Color false def /Solid false def /gnulinewidth 5.000 def /userlinewidth gnulinewidth def /vshift -73 def /dl {10 mul} def /hpt_ 31.5 def /vpt_ 31.5 def /hpt hpt_ def /vpt vpt_ def /M {moveto} bind def /L {lineto} bind def /R {rmoveto} bind def /V {rlineto} bind def /vpt2 vpt 2 mul def /hpt2 hpt 2 mul def /Lshow { currentpoint stroke M 0 vshift R show } def /Rshow { currentpoint stroke M dup stringwidth pop neg vshift R show } def /Cshow { currentpoint stroke M dup stringwidth pop -2 div vshift R show } def /UP { dup vpt_ mul /vpt exch def hpt_ mul /hpt exch def /hpt2 hpt 2 mul def /vpt2 vpt 2 mul def } def /DL { Color {setrgbcolor Solid {pop []} if 0 setdash } {pop pop pop Solid {pop []} if 0 setdash} ifelse } def /BL { stroke userlinewidth 2 mul setlinewidth } def /AL { stroke userlinewidth 2 div setlinewidth } def /UL { dup gnulinewidth mul /userlinewidth exch def dup 1 lt {pop 1} if 10 mul /udl exch def } def /PL { stroke userlinewidth setlinewidth } def /LTb { BL [] 0 0 0 DL } def /LTa { AL [1 udl mul 2 udl mul] 0 setdash 0 0 0 setrgbcolor } def /LT0 { PL [] 1 0 0 DL } def /LT1 { PL [4 dl 2 dl] 0 1 0 DL } def /LT2 { PL [2 dl 3 dl] 0 0 1 DL } def /LT3 { PL [1 dl 1.5 dl] 1 0 1 DL } def /LT4 { PL [5 dl 2 dl 1 dl 2 dl] 0 1 1 DL } def /LT5 { PL [4 dl 3 dl 1 dl 3 dl] 1 1 0 DL } def /LT6 { PL [2 dl 2 dl 2 dl 4 dl] 0 0 0 DL } def /LT7 { PL [2 dl 2 dl 2 dl 2 dl 2 dl 4 dl] 1 0.3 0 DL } def /LT8 { PL [2 dl 2 dl 2 dl 2 dl 2 dl 2 dl 2 dl 4 dl] 0.5 0.5 0.5 DL } def /Pnt { stroke [] 0 setdash gsave 1 setlinecap M 0 0 V stroke grestore } def /Dia { stroke [] 0 setdash 2 copy vpt add M hpt neg vpt neg V hpt vpt neg V hpt vpt V hpt neg vpt V closepath stroke Pnt } def /Pls { stroke [] 0 setdash vpt sub M 0 vpt2 V currentpoint stroke M hpt neg vpt neg R hpt2 0 V stroke } def /Box { stroke [] 0 setdash 2 copy exch hpt sub exch vpt add M 0 vpt2 neg V hpt2 0 V 0 vpt2 V hpt2 neg 0 V closepath stroke Pnt } def /Crs { stroke [] 0 setdash exch hpt sub exch vpt add M hpt2 vpt2 neg V currentpoint stroke M hpt2 neg 0 R hpt2 vpt2 V stroke } def /TriU { stroke [] 0 setdash 2 copy vpt 1.12 mul add M hpt neg vpt -1.62 mul V hpt 2 mul 0 V hpt neg vpt 1.62 mul V closepath stroke Pnt } def /Star { 2 copy Pls Crs } def /BoxF { stroke [] 0 setdash exch hpt sub exch vpt add M 0 vpt2 neg V hpt2 0 V 0 vpt2 V hpt2 neg 0 V closepath fill } def /TriUF { stroke [] 0 setdash vpt 1.12 mul add M hpt neg vpt -1.62 mul V hpt 2 mul 0 V hpt neg vpt 1.62 mul V closepath fill } def /TriD { stroke [] 0 setdash 2 copy vpt 1.12 mul sub M hpt neg vpt 1.62 mul V hpt 2 mul 0 V hpt neg vpt -1.62 mul V closepath stroke Pnt } def /TriDF { stroke [] 0 setdash vpt 1.12 mul sub M hpt neg vpt 1.62 mul V hpt 2 mul 0 V hpt neg vpt -1.62 mul V closepath fill} def /DiaF { stroke [] 0 setdash vpt add M hpt neg vpt neg V hpt vpt neg V hpt vpt V hpt neg vpt V closepath fill } def /Pent { stroke [] 0 setdash 2 copy gsave translate 0 hpt M 4 {72 rotate 0 hpt L} repeat closepath stroke grestore Pnt } def /PentF { stroke [] 0 setdash gsave translate 0 hpt M 4 {72 rotate 0 hpt L} repeat closepath fill grestore } def /Circle { stroke [] 0 setdash 2 copy hpt 0 360 arc stroke Pnt } def /CircleF { stroke [] 0 setdash hpt 0 360 arc fill } def /C0 { BL [] 0 setdash 2 copy moveto vpt 90 450 arc } bind def /C1 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 90 arc closepath fill vpt 0 360 arc closepath } bind def /C2 { BL [] 0 setdash 2 copy moveto 2 copy vpt 90 180 arc closepath fill vpt 0 360 arc closepath } bind def /C3 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 180 arc closepath fill vpt 0 360 arc closepath } bind def /C4 { BL [] 0 setdash 2 copy moveto 2 copy vpt 180 270 arc closepath fill vpt 0 360 arc closepath } bind def /C5 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 90 arc 2 copy moveto 2 copy vpt 180 270 arc closepath fill vpt 0 360 arc } bind def /C6 { BL [] 0 setdash 2 copy moveto 2 copy vpt 90 270 arc closepath fill vpt 0 360 arc closepath } bind def /C7 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 270 arc closepath fill vpt 0 360 arc closepath } bind def /C8 { BL [] 0 setdash 2 copy moveto 2 copy vpt 270 360 arc closepath fill vpt 0 360 arc closepath } bind def /C9 { BL [] 0 setdash 2 copy moveto 2 copy vpt 270 450 arc closepath fill vpt 0 360 arc closepath } bind def /C10 { BL [] 0 setdash 2 copy 2 copy moveto vpt 270 360 arc closepath fill 2 copy moveto 2 copy vpt 90 180 arc closepath fill vpt 0 360 arc closepath } bind def /C11 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 180 arc closepath fill 2 copy moveto 2 copy vpt 270 360 arc closepath fill vpt 0 360 arc closepath } bind def /C12 { BL [] 0 setdash 2 copy moveto 2 copy vpt 180 360 arc closepath fill vpt 0 360 arc closepath } bind def /C13 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 90 arc closepath fill 2 copy moveto 2 copy vpt 180 360 arc closepath fill vpt 0 360 arc closepath } bind def /C14 { BL [] 0 setdash 2 copy moveto 2 copy vpt 90 360 arc closepath fill vpt 0 360 arc } bind def /C15 { BL [] 0 setdash 2 copy vpt 0 360 arc closepath fill vpt 0 360 arc closepath } bind def /Rec { newpath 4 2 roll moveto 1 index 0 rlineto 0 exch rlineto neg 0 rlineto closepath } bind def /Square { dup Rec } bind def /Bsquare { vpt sub exch vpt sub exch vpt2 Square } bind def /S0 { BL [] 0 setdash 2 copy moveto 0 vpt rlineto BL Bsquare } bind def /S1 { BL [] 0 setdash 2 copy vpt Square fill Bsquare } bind def /S2 { BL [] 0 setdash 2 copy exch vpt sub exch vpt Square fill Bsquare } bind def /S3 { BL [] 0 setdash 2 copy exch vpt sub exch vpt2 vpt Rec fill Bsquare } bind def /S4 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt Square fill Bsquare } bind def /S5 { BL [] 0 setdash 2 copy 2 copy vpt Square fill exch vpt sub exch vpt sub vpt Square fill Bsquare } bind def /S6 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt vpt2 Rec fill Bsquare } bind def /S7 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt vpt2 Rec fill 2 copy vpt Square fill Bsquare } bind def /S8 { BL [] 0 setdash 2 copy vpt sub vpt Square fill Bsquare } bind def /S9 { BL [] 0 setdash 2 copy vpt sub vpt vpt2 Rec fill Bsquare } bind def /S10 { BL [] 0 setdash 2 copy vpt sub vpt Square fill 2 copy exch vpt sub exch vpt Square fill Bsquare } bind def /S11 { BL [] 0 setdash 2 copy vpt sub vpt Square fill 2 copy exch vpt sub exch vpt2 vpt Rec fill Bsquare } bind def /S12 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt2 vpt Rec fill Bsquare } bind def /S13 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt2 vpt Rec fill 2 copy vpt Square fill Bsquare } bind def /S14 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt2 vpt Rec fill 2 copy exch vpt sub exch vpt Square fill Bsquare } bind def /S15 { BL [] 0 setdash 2 copy Bsquare fill Bsquare } bind def /D0 { gsave translate 45 rotate 0 0 S0 stroke grestore } bind def /D1 { gsave translate 45 rotate 0 0 S1 stroke grestore } bind def /D2 { gsave translate 45 rotate 0 0 S2 stroke grestore } bind def /D3 { gsave translate 45 rotate 0 0 S3 stroke grestore } bind def /D4 { gsave translate 45 rotate 0 0 S4 stroke grestore } bind def /D5 { gsave translate 45 rotate 0 0 S5 stroke grestore } bind def /D6 { gsave translate 45 rotate 0 0 S6 stroke grestore } bind def /D7 { gsave translate 45 rotate 0 0 S7 stroke grestore } bind def /D8 { gsave translate 45 rotate 0 0 S8 stroke grestore } bind def /D9 { gsave translate 45 rotate 0 0 S9 stroke grestore } bind def /D10 { gsave translate 45 rotate 0 0 S10 stroke grestore } bind def /D11 { gsave translate 45 rotate 0 0 S11 stroke grestore } bind def /D12 { gsave translate 45 rotate 0 0 S12 stroke grestore } bind def /D13 { gsave translate 45 rotate 0 0 S13 stroke grestore } bind def /D14 { gsave translate 45 rotate 0 0 S14 stroke grestore } bind def /D15 { gsave translate 45 rotate 0 0 S15 stroke grestore } bind def /DiaE { stroke [] 0 setdash vpt add M hpt neg vpt neg V hpt vpt neg V hpt vpt V hpt neg vpt V closepath stroke } def /BoxE { stroke [] 0 setdash exch hpt sub exch vpt add M 0 vpt2 neg V hpt2 0 V 0 vpt2 V hpt2 neg 0 V closepath stroke } def /TriUE { stroke [] 0 setdash vpt 1.12 mul add M hpt neg vpt -1.62 mul V hpt 2 mul 0 V hpt neg vpt 1.62 mul V closepath stroke } def /TriDE { stroke [] 0 setdash vpt 1.12 mul sub M hpt neg vpt 1.62 mul V hpt 2 mul 0 V hpt neg vpt -1.62 mul V closepath stroke } def /PentE { stroke [] 0 setdash gsave translate 0 hpt M 4 {72 rotate 0 hpt L} repeat closepath stroke grestore } def /CircE { stroke [] 0 setdash hpt 0 360 arc stroke } def /Opaque { gsave closepath 1 setgray fill grestore 0 setgray closepath } def /DiaW { stroke [] 0 setdash vpt add M hpt neg vpt neg V hpt vpt neg V hpt vpt V hpt neg vpt V Opaque stroke } def /BoxW { stroke [] 0 setdash exch hpt sub exch vpt add M 0 vpt2 neg V hpt2 0 V 0 vpt2 V hpt2 neg 0 V Opaque stroke } def /TriUW { stroke [] 0 setdash vpt 1.12 mul add M hpt neg vpt -1.62 mul V hpt 2 mul 0 V hpt neg vpt 1.62 mul V Opaque stroke } def /TriDW { stroke [] 0 setdash vpt 1.12 mul sub M hpt neg vpt 1.62 mul V hpt 2 mul 0 V hpt neg vpt -1.62 mul V Opaque stroke } def /PentW { stroke [] 0 setdash gsave translate 0 hpt M 4 {72 rotate 0 hpt L} repeat Opaque stroke grestore } def /CircW { stroke [] 0 setdash hpt 0 360 arc Opaque stroke } def /BoxFill { gsave Rec 1 setgray fill grestore } def /Symbol-Oblique /Symbol findfont [1 0 .167 1 0 0] makefont dup length dict begin {1 index /FID eq {pop pop} {def} ifelse} forall currentdict end definefont end %%EndProlog gnudict begin gsave 50 50 translate 0.050 0.050 scale 0 setgray newpath (Helvetica) findfont 220 scalefont setfont 1.000 UL LTb 1053 723 M -63 0 V 6037 0 R 63 0 V 858 723 M (0.0) Rshow 1053 1223 M -63 0 V 6037 0 R 63 0 V -6232 0 R (0.2) Rshow 1053 1724 M -63 0 V 6037 0 R 63 0 V -6232 0 R (0.4) Rshow 1053 2224 M -63 0 V 6037 0 R 63 0 V -6232 0 R (0.6) Rshow 1053 2725 M -63 0 V 6037 0 R 63 0 V -6232 0 R (0.8) Rshow 1053 3225 M -63 0 V 6037 0 R 63 0 V -6232 0 R (1.0) Rshow 1053 3726 M -63 0 V 6037 0 R 63 0 V -6232 0 R (1.2) Rshow 1053 4226 M -63 0 V 6037 0 R 63 0 V -6232 0 R (1.4) Rshow 1053 4727 M -63 0 V 6037 0 R 63 0 V -6232 0 R (1.6) Rshow 1053 723 M 0 -63 V 0 4317 R 0 63 V 0 -4600 R ( 0) Cshow 2049 723 M 0 -63 V 0 4317 R 0 63 V 0 -4600 R ( 0.5) Cshow 3044 723 M 0 -63 V 0 4317 R 0 63 V 0 -4600 R ( 1) Cshow 4040 723 M 0 -63 V 0 4317 R 0 63 V 0 -4600 R ( 1.5) Cshow 5036 723 M 0 -63 V 0 4317 R 0 63 V 0 -4600 R ( 2) Cshow 6031 723 M 0 -63 V 0 4317 R 0 63 V 0 -4600 R ( 2.5) Cshow 7027 723 M 0 -63 V 0 4317 R 0 63 V 0 -4600 R ( 3) Cshow 1.000 UL LTb 1053 723 M 5974 0 V 0 4254 V -5974 0 V 0 -4254 V 220 2850 M currentpoint gsave translate 90 rotate 0 0 M (vote) Cshow grestore 4040 110 M (distance) Cshow 1.000 UL LT0 6040 4804 M (ID) Rshow 6172 4804 M 591 0 V 2225 4977 M 35 -125 V 60 -197 V 61 -178 V 60 -164 V 60 -149 V 61 -138 V 60 -127 V 60 -118 V 61 -109 V 60 -101 V 60 -95 V 61 -89 V 60 -83 V 60 -79 V 61 -73 V 60 -70 V 60 -65 V 61 -62 V 60 -59 V 60 -56 V 61 -53 V 60 -50 V 60 -48 V 61 -46 V 60 -43 V 60 -42 V 61 -40 V 60 -38 V 60 -37 V 61 -35 V 60 -33 V 61 -33 V 60 -31 V 60 -30 V 61 -29 V 60 -28 V 60 -26 V 61 -26 V 60 -25 V 60 -24 V 61 -24 V 60 -22 V 60 -22 V 61 -21 V 60 -21 V 60 -20 V 61 -19 V 60 -18 V 60 -19 V 61 -17 V 60 -17 V 60 -17 V 61 -16 V 60 -16 V 60 -15 V 61 -15 V 60 -14 V 60 -15 V 61 -13 V 60 -14 V 60 -13 V 61 -13 V 60 -12 V 61 -12 V 60 -12 V 60 -11 V 61 -12 V 60 -11 V 60 -11 V 61 -10 V 60 -10 V 60 -11 V 61 -9 V 60 -10 V 60 -10 V 61 -9 V 60 -9 V 60 -9 V 61 -8 V 60 -9 V 1.000 UL LT1 6040 4584 M (ED a=1,b=1) Rshow 6172 4584 M 591 0 V 1053 3225 M 60 -74 V 61 -73 V 60 -70 V 60 -68 V 61 -66 V 60 -65 V 60 -62 V 61 -60 V 60 -59 V 60 -57 V 61 -55 V 60 -54 V 60 -51 V 61 -51 V 60 -49 V 60 -47 V 61 -46 V 60 -45 V 61 -43 V 60 -42 V 60 -41 V 61 -39 V 60 -39 V 60 -37 V 61 -36 V 60 -35 V 60 -34 V 61 -33 V 60 -32 V 60 -31 V 61 -30 V 60 -29 V 60 -28 V 61 -28 V 60 -27 V 60 -25 V 61 -26 V 60 -24 V 60 -23 V 61 -23 V 60 -23 V 60 -21 V 61 -21 V 60 -20 V 60 -20 V 61 -19 V 60 -19 V 60 -18 V 61 -17 V 60 -17 V 61 -16 V 60 -16 V 60 -16 V 61 -15 V 60 -14 V 60 -14 V 61 -14 V 60 -13 V 60 -13 V 61 -13 V 60 -12 V 60 -12 V 61 -11 V 60 -11 V 60 -11 V 61 -10 V 60 -10 V 60 -10 V 61 -10 V 60 -9 V 60 -9 V 61 -9 V 60 -8 V 60 -8 V 61 -8 V 60 -8 V 60 -7 V 61 -8 V 60 -7 V 60 -6 V 61 -7 V 60 -6 V 61 -7 V 60 -6 V 60 -6 V 61 -5 V 60 -6 V 60 -5 V 61 -5 V 60 -5 V 60 -5 V 61 -5 V 60 -5 V 60 -4 V 61 -4 V 60 -5 V 60 -4 V 61 -4 V 60 -3 V 1.000 UL LT2 6040 4364 M (ED a=2,b=1) Rshow 6172 4364 M 591 0 V 1053 3225 M 60 -147 V 61 -138 V 60 -131 V 60 -122 V 61 -116 V 60 -109 V 60 -102 V 61 -96 V 60 -91 V 60 -85 V 61 -80 V 60 -76 V 60 -71 V 61 -67 V 60 -63 V 60 -59 V 61 -56 V 60 -52 V 61 -50 V 60 -46 V 60 -44 V 61 -41 V 60 -39 V 60 -37 V 61 -34 V 60 -32 V 60 -31 V 61 -28 V 60 -27 V 60 -26 V 61 -24 V 60 -22 V 60 -21 V 61 -20 V 60 -19 V 60 -18 V 61 -16 V 60 -16 V 60 -15 V 61 -13 V 60 -13 V 60 -13 V 61 -11 V 60 -11 V 60 -10 V 61 -10 V 60 -9 V 60 -9 V 61 -8 V 60 -7 V 61 -7 V 60 -7 V 60 -6 V 61 -6 V 60 -6 V 60 -5 V 61 -5 V 60 -5 V 60 -4 V 61 -4 V 60 -4 V 60 -4 V 61 -3 V 60 -3 V 60 -3 V 61 -3 V 60 -3 V 60 -2 V 61 -3 V 60 -2 V 60 -2 V 61 -2 V 60 -2 V 60 -2 V 61 -1 V 60 -2 V 60 -1 V 61 -2 V 60 -1 V 60 -1 V 61 -2 V 60 -1 V 61 -1 V 60 -1 V 60 -1 V 61 0 V 60 -1 V 60 -1 V 61 -1 V 60 0 V 60 -1 V 61 -1 V 60 0 V 60 -1 V 61 0 V 60 -1 V 60 0 V 61 0 V 60 -1 V 1.000 UL LT3 6040 4144 M (ED a=1,b=2) Rshow 6172 4144 M 591 0 V 1053 3225 M 60 -2 V 61 -7 V 60 -11 V 60 -16 V 61 -20 V 60 -25 V 60 -29 V 61 -32 V 60 -37 V 60 -40 V 61 -44 V 60 -47 V 60 -49 V 61 -53 V 60 -55 V 60 -57 V 61 -59 V 60 -61 V 61 -62 V 60 -63 V 60 -64 V 61 -65 V 60 -64 V 60 -66 V 61 -64 V 60 -65 V 60 -64 V 61 -63 V 60 -62 V 60 -61 V 61 -60 V 60 -58 V 60 -56 V 61 -55 V 60 -54 V 60 -51 V 61 -49 V 60 -48 V 60 -45 V 61 -43 V 60 -41 V 60 -40 V 61 -37 V 60 -35 V 60 -33 V 61 -32 V 60 -29 V 60 -27 V 61 -26 V 60 -24 V 61 -22 V 60 -21 V 60 -19 V 61 -18 V 60 -16 V 60 -15 V 61 -14 V 60 -13 V 60 -12 V 61 -10 V 60 -10 V 60 -9 V 61 -8 V 60 -7 V 60 -6 V 61 -6 V 60 -5 V 60 -5 V 61 -4 V 60 -4 V 60 -4 V 61 -3 V 60 -2 V 60 -3 V 61 -2 V 60 -2 V 60 -1 V 61 -2 V 60 -1 V 60 -1 V 61 -1 V 60 -1 V 61 -1 V 60 0 V 60 -1 V 61 0 V 60 -1 V 60 0 V 61 0 V 60 -1 V 60 0 V 61 0 V 60 0 V 60 0 V 61 0 V 60 0 V 60 -1 V 61 0 V 60 0 V stroke grestore end showpage %%Trailer %%DocumentFonts: Helvetica LanguageMachines-timbl-642727d/docs/texfiles/fspace.eps000077500000000000000000000464451451477526200230550ustar00rootroot00000000000000%!PS-Adobe-2.0 EPSF-2.0 %%Title: fspace.eps %%Creator: gnuplot 3.8j patchlevel 0 %%CreationDate: Wed Dec 29 12:18:14 2004 %%DocumentFonts: (atend) %%BoundingBox: 50 50 410 339 %%Orientation: Portrait %%EndComments /gnudict 256 dict def gnudict begin /Color false def /Solid false def /gnulinewidth 5.000 def /userlinewidth gnulinewidth def /vshift -73 def /dl {10.0 mul} def /hpt_ 31.5 def /vpt_ 31.5 def /hpt hpt_ def /vpt vpt_ def /Rounded false def /M {moveto} bind def /L {lineto} bind def /R {rmoveto} bind def /V {rlineto} bind def /N {newpath moveto} bind def /C {setrgbcolor} bind def /f {rlineto fill} bind def /vpt2 vpt 2 mul def /hpt2 hpt 2 mul def /Lshow { currentpoint stroke M 0 vshift R show } def /Rshow { currentpoint stroke M dup stringwidth pop neg vshift R show } def /Cshow { currentpoint stroke M dup stringwidth pop -2 div vshift R show } def /UP { dup vpt_ mul /vpt exch def hpt_ mul /hpt exch def /hpt2 hpt 2 mul def /vpt2 vpt 2 mul def } def /DL { Color {setrgbcolor Solid {pop []} if 0 setdash } {pop pop pop 0 setgray Solid {pop []} if 0 setdash} ifelse } def /BL { stroke userlinewidth 2 mul setlinewidth Rounded { 1 setlinejoin 1 setlinecap } if } def /AL { stroke userlinewidth 2 div setlinewidth Rounded { 1 setlinejoin 1 setlinecap } if } def /UL { dup gnulinewidth mul /userlinewidth exch def dup 1 lt {pop 1} if 10 mul /udl exch def } def /PL { stroke userlinewidth setlinewidth Rounded { 1 setlinejoin 1 setlinecap } if } def /LTb { BL [] 0 0 0 DL } def /LTa { AL [1 udl mul 2 udl mul] 0 setdash 0 0 0 setrgbcolor } def /LT0 { PL [] 1 0 0 DL } def /LT1 { PL [4 dl 2 dl] 0 1 0 DL } def /LT2 { PL [2 dl 3 dl] 0 0 1 DL } def /LT3 { PL [1 dl 1.5 dl] 1 0 1 DL } def /LT4 { PL [5 dl 2 dl 1 dl 2 dl] 0 1 1 DL } def /LT5 { PL [4 dl 3 dl 1 dl 3 dl] 1 1 0 DL } def /LT6 { PL [2 dl 2 dl 2 dl 4 dl] 0 0 0 DL } def /LT7 { PL [2 dl 2 dl 2 dl 2 dl 2 dl 4 dl] 1 0.3 0 DL } def /LT8 { PL [2 dl 2 dl 2 dl 2 dl 2 dl 2 dl 2 dl 4 dl] 0.5 0.5 0.5 DL } def /Pnt { stroke [] 0 setdash gsave 1 setlinecap M 0 0 V stroke grestore } def /Dia { stroke [] 0 setdash 2 copy vpt add M hpt neg vpt neg V hpt vpt neg V hpt vpt V hpt neg vpt V closepath stroke Pnt } def /Pls { stroke [] 0 setdash vpt sub M 0 vpt2 V currentpoint stroke M hpt neg vpt neg R hpt2 0 V stroke } def /Box { stroke [] 0 setdash 2 copy exch hpt sub exch vpt add M 0 vpt2 neg V hpt2 0 V 0 vpt2 V hpt2 neg 0 V closepath stroke Pnt } def /Crs { stroke [] 0 setdash exch hpt sub exch vpt add M hpt2 vpt2 neg V currentpoint stroke M hpt2 neg 0 R hpt2 vpt2 V stroke } def /TriU { stroke [] 0 setdash 2 copy vpt 1.12 mul add M hpt neg vpt -1.62 mul V hpt 2 mul 0 V hpt neg vpt 1.62 mul V closepath stroke Pnt } def /Star { 2 copy Pls Crs } def /BoxF { stroke [] 0 setdash exch hpt sub exch vpt add M 0 vpt2 neg V hpt2 0 V 0 vpt2 V hpt2 neg 0 V closepath fill } def /TriUF { stroke [] 0 setdash vpt 1.12 mul add M hpt neg vpt -1.62 mul V hpt 2 mul 0 V hpt neg vpt 1.62 mul V closepath fill } def /TriD { stroke [] 0 setdash 2 copy vpt 1.12 mul sub M hpt neg vpt 1.62 mul V hpt 2 mul 0 V hpt neg vpt -1.62 mul V closepath stroke Pnt } def /TriDF { stroke [] 0 setdash vpt 1.12 mul sub M hpt neg vpt 1.62 mul V hpt 2 mul 0 V hpt neg vpt -1.62 mul V closepath fill} def /DiaF { stroke [] 0 setdash vpt add M hpt neg vpt neg V hpt vpt neg V hpt vpt V hpt neg vpt V closepath fill } def /Pent { stroke [] 0 setdash 2 copy gsave translate 0 hpt M 4 {72 rotate 0 hpt L} repeat closepath stroke grestore Pnt } def /PentF { stroke [] 0 setdash gsave translate 0 hpt M 4 {72 rotate 0 hpt L} repeat closepath fill grestore } def /Circle { stroke [] 0 setdash 2 copy hpt 0 360 arc stroke Pnt } def /CircleF { stroke [] 0 setdash hpt 0 360 arc fill } def /C0 { BL [] 0 setdash 2 copy moveto vpt 90 450 arc } bind def /C1 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 90 arc closepath fill vpt 0 360 arc closepath } bind def /C2 { BL [] 0 setdash 2 copy moveto 2 copy vpt 90 180 arc closepath fill vpt 0 360 arc closepath } bind def /C3 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 180 arc closepath fill vpt 0 360 arc closepath } bind def /C4 { BL [] 0 setdash 2 copy moveto 2 copy vpt 180 270 arc closepath fill vpt 0 360 arc closepath } bind def /C5 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 90 arc 2 copy moveto 2 copy vpt 180 270 arc closepath fill vpt 0 360 arc } bind def /C6 { BL [] 0 setdash 2 copy moveto 2 copy vpt 90 270 arc closepath fill vpt 0 360 arc closepath } bind def /C7 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 270 arc closepath fill vpt 0 360 arc closepath } bind def /C8 { BL [] 0 setdash 2 copy moveto 2 copy vpt 270 360 arc closepath fill vpt 0 360 arc closepath } bind def /C9 { BL [] 0 setdash 2 copy moveto 2 copy vpt 270 450 arc closepath fill vpt 0 360 arc closepath } bind def /C10 { BL [] 0 setdash 2 copy 2 copy moveto vpt 270 360 arc closepath fill 2 copy moveto 2 copy vpt 90 180 arc closepath fill vpt 0 360 arc closepath } bind def /C11 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 180 arc closepath fill 2 copy moveto 2 copy vpt 270 360 arc closepath fill vpt 0 360 arc closepath } bind def /C12 { BL [] 0 setdash 2 copy moveto 2 copy vpt 180 360 arc closepath fill vpt 0 360 arc closepath } bind def /C13 { BL [] 0 setdash 2 copy moveto 2 copy vpt 0 90 arc closepath fill 2 copy moveto 2 copy vpt 180 360 arc closepath fill vpt 0 360 arc closepath } bind def /C14 { BL [] 0 setdash 2 copy moveto 2 copy vpt 90 360 arc closepath fill vpt 0 360 arc } bind def /C15 { BL [] 0 setdash 2 copy vpt 0 360 arc closepath fill vpt 0 360 arc closepath } bind def /Rec { newpath 4 2 roll moveto 1 index 0 rlineto 0 exch rlineto neg 0 rlineto closepath } bind def /Square { dup Rec } bind def /Bsquare { vpt sub exch vpt sub exch vpt2 Square } bind def /S0 { BL [] 0 setdash 2 copy moveto 0 vpt rlineto BL Bsquare } bind def /S1 { BL [] 0 setdash 2 copy vpt Square fill Bsquare } bind def /S2 { BL [] 0 setdash 2 copy exch vpt sub exch vpt Square fill Bsquare } bind def /S3 { BL [] 0 setdash 2 copy exch vpt sub exch vpt2 vpt Rec fill Bsquare } bind def /S4 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt Square fill Bsquare } bind def /S5 { BL [] 0 setdash 2 copy 2 copy vpt Square fill exch vpt sub exch vpt sub vpt Square fill Bsquare } bind def /S6 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt vpt2 Rec fill Bsquare } bind def /S7 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt vpt2 Rec fill 2 copy vpt Square fill Bsquare } bind def /S8 { BL [] 0 setdash 2 copy vpt sub vpt Square fill Bsquare } bind def /S9 { BL [] 0 setdash 2 copy vpt sub vpt vpt2 Rec fill Bsquare } bind def /S10 { BL [] 0 setdash 2 copy vpt sub vpt Square fill 2 copy exch vpt sub exch vpt Square fill Bsquare } bind def /S11 { BL [] 0 setdash 2 copy vpt sub vpt Square fill 2 copy exch vpt sub exch vpt2 vpt Rec fill Bsquare } bind def /S12 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt2 vpt Rec fill Bsquare } bind def /S13 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt2 vpt Rec fill 2 copy vpt Square fill Bsquare } bind def /S14 { BL [] 0 setdash 2 copy exch vpt sub exch vpt sub vpt2 vpt Rec fill 2 copy exch vpt sub exch vpt Square fill Bsquare } bind def /S15 { BL [] 0 setdash 2 copy Bsquare fill Bsquare } bind def /D0 { gsave translate 45 rotate 0 0 S0 stroke grestore } bind def /D1 { gsave translate 45 rotate 0 0 S1 stroke grestore } bind def /D2 { gsave translate 45 rotate 0 0 S2 stroke grestore } bind def /D3 { gsave translate 45 rotate 0 0 S3 stroke grestore } bind def /D4 { gsave translate 45 rotate 0 0 S4 stroke grestore } bind def /D5 { gsave translate 45 rotate 0 0 S5 stroke grestore } bind def /D6 { gsave translate 45 rotate 0 0 S6 stroke grestore } bind def /D7 { gsave translate 45 rotate 0 0 S7 stroke grestore } bind def /D8 { gsave translate 45 rotate 0 0 S8 stroke grestore } bind def /D9 { gsave translate 45 rotate 0 0 S9 stroke grestore } bind def /D10 { gsave translate 45 rotate 0 0 S10 stroke grestore } bind def /D11 { gsave translate 45 rotate 0 0 S11 stroke grestore } bind def /D12 { gsave translate 45 rotate 0 0 S12 stroke grestore } bind def /D13 { gsave translate 45 rotate 0 0 S13 stroke grestore } bind def /D14 { gsave translate 45 rotate 0 0 S14 stroke grestore } bind def /D15 { gsave translate 45 rotate 0 0 S15 stroke grestore } bind def /DiaE { stroke [] 0 setdash vpt add M hpt neg vpt neg V hpt vpt neg V hpt vpt V hpt neg vpt V closepath stroke } def /BoxE { stroke [] 0 setdash exch hpt sub exch vpt add M 0 vpt2 neg V hpt2 0 V 0 vpt2 V hpt2 neg 0 V closepath stroke } def /TriUE { stroke [] 0 setdash vpt 1.12 mul add M hpt neg vpt -1.62 mul V hpt 2 mul 0 V hpt neg vpt 1.62 mul V closepath stroke } def /TriDE { stroke [] 0 setdash vpt 1.12 mul sub M hpt neg vpt 1.62 mul V hpt 2 mul 0 V hpt neg vpt -1.62 mul V closepath stroke } def /PentE { stroke [] 0 setdash gsave translate 0 hpt M 4 {72 rotate 0 hpt L} repeat closepath stroke grestore } def /CircE { stroke [] 0 setdash hpt 0 360 arc stroke } def /Opaque { gsave closepath 1 setgray fill grestore 0 setgray closepath } def /DiaW { stroke [] 0 setdash vpt add M hpt neg vpt neg V hpt vpt neg V hpt vpt V hpt neg vpt V Opaque stroke } def /BoxW { stroke [] 0 setdash exch hpt sub exch vpt add M 0 vpt2 neg V hpt2 0 V 0 vpt2 V hpt2 neg 0 V Opaque stroke } def /TriUW { stroke [] 0 setdash vpt 1.12 mul add M hpt neg vpt -1.62 mul V hpt 2 mul 0 V hpt neg vpt 1.62 mul V Opaque stroke } def /TriDW { stroke [] 0 setdash vpt 1.12 mul sub M hpt neg vpt 1.62 mul V hpt 2 mul 0 V hpt neg vpt -1.62 mul V Opaque stroke } def /PentW { stroke [] 0 setdash gsave translate 0 hpt M 4 {72 rotate 0 hpt L} repeat Opaque stroke grestore } def /CircW { stroke [] 0 setdash hpt 0 360 arc Opaque stroke } def /BoxFill { gsave Rec 1 setgray fill grestore } def /BoxColFill { gsave Rec /Fillden exch def currentrgbcolor /ColB exch def /ColG exch def /ColR exch def /ColR ColR Fillden mul Fillden sub 1 add def /ColG ColG Fillden mul Fillden sub 1 add def /ColB ColB Fillden mul Fillden sub 1 add def ColR ColG ColB setrgbcolor fill grestore } def % % PostScript Level 1 Pattern Fill routine % Usage: x y w h s a XX PatternFill % x,y = lower left corner of box to be filled % w,h = width and height of box % a = angle in degrees between lines and x-axis % XX = 0/1 for no/yes cross-hatch % /PatternFill { gsave /PFa [ 9 2 roll ] def PFa 0 get PFa 2 get 2 div add PFa 1 get PFa 3 get 2 div add translate PFa 2 get -2 div PFa 3 get -2 div PFa 2 get PFa 3 get Rec gsave 1 setgray fill grestore clip currentlinewidth 0.5 mul setlinewidth /PFs PFa 2 get dup mul PFa 3 get dup mul add sqrt def 0 0 M PFa 5 get rotate PFs -2 div dup translate 0 1 PFs PFa 4 get div 1 add floor cvi { PFa 4 get mul 0 M 0 PFs V } for 0 PFa 6 get ne { 0 1 PFs PFa 4 get div 1 add floor cvi { PFa 4 get mul 0 2 1 roll M PFs 0 V } for } if stroke grestore } def % /Symbol-Oblique /Symbol findfont [1 0 .167 1 0 0] makefont dup length dict begin {1 index /FID eq {pop pop} {def} ifelse} forall currentdict end definefont pop end %%EndProlog gnudict begin gsave 50 50 translate 0.050 0.050 scale 0 setgray newpath (Helvetica) findfont 220 scalefont setfont 1.000 UL LTb 1185 723 M -63 0 V 5050 0 R 63 0 V 990 723 M gsave 0 setgray ( 0) Rshow grestore 1.000 UL LTb 1185 1725 M -63 0 V 5050 0 R 63 0 V -5245 0 R gsave 0 setgray ( 0.2) Rshow grestore 1.000 UL LTb 1185 2727 M -63 0 V 5050 0 R 63 0 V -5245 0 R gsave 0 setgray ( 0.4) Rshow grestore 1.000 UL LTb 1185 3729 M -63 0 V 5050 0 R 63 0 V -5245 0 R gsave 0 setgray ( 0.6) Rshow grestore 1.000 UL LTb 1185 4731 M -63 0 V 5050 0 R 63 0 V -5245 0 R gsave 0 setgray ( 0.8) Rshow grestore 1.000 UL LTb 1185 5733 M -63 0 V 5050 0 R 63 0 V -5245 0 R gsave 0 setgray ( 1) Rshow grestore 1.000 UL LTb 1185 723 M 0 -63 V 0 5073 R 0 63 V 0 -5356 R gsave 0 setgray ( 0) Cshow grestore 1.000 UL LTb 2182 723 M 0 -63 V 0 5073 R 0 63 V 0 -5356 R gsave 0 setgray ( 0.2) Cshow grestore 1.000 UL LTb 3180 723 M 0 -63 V 0 5073 R 0 63 V 0 -5356 R gsave 0 setgray ( 0.4) Cshow grestore 1.000 UL LTb 4177 723 M 0 -63 V 0 5073 R 0 63 V 0 -5356 R gsave 0 setgray ( 0.6) Cshow grestore 1.000 UL LTb 5175 723 M 0 -63 V 0 5073 R 0 63 V 0 -5356 R gsave 0 setgray ( 0.8) Cshow grestore 1.000 UL LTb 6172 723 M 0 -63 V 0 5073 R 0 63 V 0 -5356 R gsave 0 setgray ( 1) Cshow grestore 1.000 UL LTb 1.000 UL LTb 1185 723 M 4987 0 V 0 5010 V -4987 0 V 0 -5010 V LTb 220 3228 M gsave 0 setgray currentpoint gsave translate 90 rotate 0 0 M (precision) Cshow grestore grestore LTb 3678 110 M gsave 0 setgray (recall) Cshow grestore 1.000 UP LTb 5823 5182 M gsave 0 setgray (0.9) Lshow grestore LTb 5324 4681 M gsave 0 setgray (0.8) Lshow grestore LTb 4826 4180 M gsave 0 setgray (0.7) Lshow grestore LTb 4327 3679 M gsave 0 setgray (0.6) Lshow grestore LTb 3828 3178 M gsave 0 setgray (0.5) Lshow grestore LTb 3329 2677 M gsave 0 setgray (0.4) Lshow grestore LTb 2831 2176 M gsave 0 setgray (0.3) Lshow grestore LTb 2382 1675 M gsave 0 setgray (0.2) Lshow grestore LTb 1933 1224 M gsave 0 setgray (0.1) Lshow grestore 1.000 UL LT0 1185 723 M 50 51 V 51 50 V 50 51 V 50 50 V 51 51 V 50 51 V 51 50 V 50 51 V 50 50 V 51 51 V 50 51 V 50 50 V 51 51 V 50 50 V 51 51 V 50 51 V 50 50 V 51 51 V 50 51 V 50 50 V 51 51 V 50 50 V 51 51 V 50 51 V 50 50 V 51 51 V 50 50 V 50 51 V 51 51 V 50 50 V 51 51 V 50 50 V 50 51 V 51 51 V 50 50 V 50 51 V 51 50 V 50 51 V 51 51 V 50 50 V 50 51 V 51 50 V 50 51 V 50 51 V 51 50 V 50 51 V 51 50 V 50 51 V 50 51 V 51 50 V 50 51 V 50 51 V 51 50 V 50 51 V 51 50 V 50 51 V 50 51 V 51 50 V 50 51 V 50 50 V 51 51 V 50 51 V 51 50 V 50 51 V 50 50 V 51 51 V 50 51 V 50 50 V 51 51 V 50 50 V 51 51 V 50 51 V 50 50 V 51 51 V 50 50 V 50 51 V 51 51 V 50 50 V 51 51 V 50 50 V 50 51 V 51 51 V 50 50 V 50 51 V 51 51 V 50 50 V 51 51 V 50 50 V 50 51 V 51 51 V 50 50 V 50 51 V 51 50 V 50 51 V 51 51 V 50 50 V 50 51 V 51 50 V 50 51 V 1.000 UL LT0 1185 723 M 5265 5733 M 51 -74 V 50 -69 V 50 -67 V 51 -62 V 50 -60 V 51 -57 V 50 -55 V 50 -52 V 51 -50 V 50 -47 V 50 -46 V 51 -43 V 50 -42 V 51 -41 V 50 -38 V 50 -38 V 51 -35 V 50 -35 V 1.000 UL LT0 1185 723 M 4510 5733 M 50 -110 V 50 -102 V 51 -95 V 50 -89 V 51 -83 V 50 -78 V 50 -73 V 51 -69 V 50 -65 V 50 -62 V 51 -58 V 50 -55 V 51 -53 V 50 -50 V 50 -47 V 51 -45 V 50 -43 V 50 -42 V 51 -39 V 50 -38 V 51 -36 V 50 -34 V 50 -34 V 51 -31 V 50 -31 V 50 -30 V 51 -28 V 50 -27 V 51 -27 V 50 -25 V 50 -24 V 51 -24 V 50 -23 V 1.000 UL LT0 1185 723 M 3871 5733 M 34 -117 V 51 -154 V 50 -140 V 50 -127 V 51 -116 V 50 -107 V 50 -99 V 51 -91 V 50 -84 V 51 -78 V 50 -73 V 50 -69 V 51 -63 V 50 -60 V 50 -57 V 51 -53 V 50 -50 V 51 -47 V 50 -45 V 50 -42 V 51 -40 V 50 -39 V 50 -36 V 51 -35 V 50 -33 V 51 -32 V 50 -30 V 50 -29 V 51 -27 V 50 -27 V 50 -25 V 51 -25 V 50 -23 V 51 -23 V 50 -22 V 50 -21 V 51 -20 V 50 -19 V 50 -19 V 51 -18 V 50 -18 V 51 -17 V 50 -16 V 50 -16 V 51 -15 V 50 -15 V 1.000 UL LT0 1185 723 M 3323 5733 M 28 -151 V 50 -234 V 51 -204 V 50 -179 V 51 -159 V 50 -141 V 50 -126 V 51 -114 V 50 -103 V 50 -94 V 51 -86 V 50 -79 V 51 -73 V 50 -67 V 50 -62 V 51 -58 V 50 -53 V 50 -51 V 51 -47 V 50 -44 V 51 -41 V 50 -40 V 50 -36 V 51 -35 V 50 -33 V 50 -31 V 51 -30 V 50 -28 V 51 -27 V 50 -26 V 50 -24 V 51 -23 V 50 -23 V 50 -21 V 51 -20 V 50 -20 V 51 -19 V 50 -18 V 50 -17 V 51 -16 V 50 -16 V 50 -16 V 51 -15 V 50 -14 V 51 -14 V 50 -13 V 50 -13 V 51 -13 V 50 -12 V 50 -11 V 51 -12 V 50 -11 V 51 -10 V 50 -11 V 50 -10 V 51 -9 V 50 -10 V 1.000 UL LT0 1185 723 M 2847 5733 M 51 -406 V 50 -327 V 50 -269 V 51 -225 V 50 -191 V 51 -164 V 50 -143 V 50 -125 V 51 -110 V 50 -99 V 50 -88 V 51 -80 V 50 -72 V 51 -65 V 50 -60 V 50 -55 V 51 -51 V 50 -47 V 50 -43 V 51 -40 V 50 -38 V 51 -35 V 50 -33 V 50 -30 V 51 -29 V 50 -27 V 50 -26 V 51 -24 V 50 -23 V 51 -22 V 50 -21 V 50 -19 V 51 -19 V 50 -18 V 50 -17 V 51 -16 V 50 -15 V 51 -15 V 50 -14 V 50 -14 V 51 -13 V 50 -13 V 50 -12 V 51 -11 V 50 -11 V 51 -11 V 50 -10 V 50 -10 V 51 -10 V 50 -9 V 50 -9 V 51 -9 V 50 -8 V 51 -9 V 50 -7 V 50 -8 V 51 -7 V 50 -8 V 50 -7 V 51 -6 V 50 -7 V 51 -6 V 50 -6 V 50 -6 V 51 -6 V 50 -6 V 1.000 UL LT0 1185 723 M 2434 5733 M 10 -193 V 51 -615 V 50 -445 V 50 -336 V 51 -263 V 50 -211 V 51 -174 V 50 -145 V 50 -123 V 51 -106 V 50 -92 V 50 -80 V 51 -72 V 50 -63 V 51 -57 V 50 -51 V 50 -46 V 51 -42 V 50 -39 V 50 -35 V 51 -33 V 50 -30 V 51 -28 V 50 -25 V 50 -25 V 51 -22 V 50 -21 V 50 -20 V 51 -18 V 50 -18 V 51 -16 V 50 -16 V 50 -15 V 51 -14 V 50 -13 V 50 -12 V 51 -12 V 50 -12 V 51 -11 V 50 -10 V 50 -10 V 51 -10 V 50 -9 V 50 -8 V 51 -9 V 50 -8 V 51 -8 V 50 -7 V 50 -7 V 51 -7 V 50 -7 V 50 -6 V 51 -6 V 50 -6 V 51 -6 V 50 -5 V 50 -6 V 51 -5 V 50 -5 V 50 -5 V 51 -5 V 50 -4 V 51 -5 V 50 -4 V 50 -4 V 51 -4 V 50 -4 V 50 -4 V 51 -4 V 50 -4 V 51 -3 V 50 -4 V 50 -3 V 51 -3 V 50 -3 V 1.000 UL LT0 1185 723 M 885 5010 R 22 -716 V 50 -853 V 50 -523 V 51 -352 V 50 -254 V 51 -191 V 50 -150 V 50 -120 V 51 -99 V 50 -82 V 50 -70 V 51 -60 V 50 -52 V 51 -45 V 50 -41 V 50 -36 V 51 -32 V 50 -29 V 50 -26 V 51 -24 V 50 -21 V 51 -20 V 50 -19 V 50 -17 V 51 -15 V 50 -15 V 50 -14 V 51 -12 V 50 -12 V 51 -11 V 50 -11 V 50 -10 V 51 -9 V 50 -9 V 50 -8 V 51 -8 V 50 -7 V 51 -8 V 50 -6 V 50 -7 V 51 -6 V 50 -6 V 50 -5 V 51 -6 V 50 -5 V 51 -5 V 50 -4 V 50 -5 V 51 -4 V 50 -5 V 50 -4 V 51 -3 V 50 -4 V 51 -4 V 50 -3 V 50 -4 V 51 -3 V 50 -3 V 50 -3 V 51 -3 V 50 -3 V 51 -3 V 50 -2 V 50 -3 V 51 -2 V 50 -3 V 50 -2 V 51 -2 V 50 -3 V 51 -2 V 50 -2 V 50 -2 V 51 -2 V 50 -2 V 50 -2 V 51 -2 V 50 -2 V 51 -1 V 50 -2 V 50 -2 V 51 -1 V 50 -2 V 1.000 UL LT0 1185 723 M 554 5010 R 50 -2147 V 51 -762 V 50 -390 V 51 -237 V 50 -160 V 50 -114 V 51 -87 V 50 -67 V 50 -54 V 51 -44 V 50 -37 V 51 -31 V 50 -27 V 50 -24 V 51 -20 V 50 -18 V 50 -16 V 51 -14 V 50 -13 V 51 -12 V 50 -11 V 50 -9 V 51 -9 V 50 -8 V 50 -8 V 51 -7 V 50 -6 V 51 -7 V 50 -5 V 50 -6 V 51 -4 V 50 -5 V 50 -5 V 51 -4 V 50 -4 V 51 -3 V 50 -4 V 50 -3 V 51 -3 V 50 -3 V 50 -3 V 51 -3 V 50 -3 V 51 -2 V 50 -2 V 50 -3 V 51 -2 V 50 -2 V 50 -2 V 51 -2 V 50 -2 V 51 -2 V 50 -1 V 50 -2 V 51 -2 V 50 -1 V 50 -2 V 51 -1 V 50 -1 V 51 -2 V 50 -1 V 50 -1 V 51 -2 V 50 -1 V 50 -1 V 51 -1 V 50 -1 V 51 -1 V 50 -1 V 50 -1 V 51 -1 V 50 -1 V 50 -1 V 51 -1 V 50 -1 V 51 -1 V 50 0 V 50 -1 V 51 -1 V 50 -1 V 50 -1 V 51 0 V 50 -1 V 51 -1 V 50 0 V 50 -1 V 51 -1 V 50 0 V 1.000 UL LT0 1185 723 M 295 5010 R 7 -3579 V 51 -576 V 50 -198 V 50 -100 V 51 -61 V 50 -41 V 50 -29 V 51 -21 V 50 -17 V 51 -14 V 50 -11 V 50 -10 V 51 -7 V 50 -7 V 50 -6 V 51 -5 V 50 -5 V 51 -4 V 50 -3 V 50 -4 V 51 -3 V 50 -2 V 50 -3 V 51 -2 V 50 -2 V 51 -2 V 50 -2 V 50 -1 V 51 -2 V 50 -1 V 50 -2 V 51 -1 V 50 -1 V 51 -1 V 50 -1 V 50 -1 V 51 -1 V 50 -1 V 50 -1 V 51 -1 V 50 0 V 51 -1 V 50 -1 V 50 0 V 51 -1 V 50 -1 V 50 0 V 51 -1 V 50 0 V 51 -1 V 50 0 V 50 -1 V 51 0 V 50 -1 V 50 0 V 51 0 V 50 -1 V 51 0 V 50 -1 V 50 0 V 51 0 V 50 -1 V 50 0 V 51 0 V 50 0 V 51 -1 V 50 0 V 50 0 V 51 -1 V 50 0 V 50 0 V 51 0 V 50 -1 V 51 0 V 50 0 V 50 0 V 51 0 V 50 -1 V 50 0 V 51 0 V 50 0 V 51 0 V 50 -1 V 50 0 V 51 0 V 50 0 V 50 0 V 51 0 V 50 -1 V 51 0 V 50 0 V 50 0 V 51 0 V 50 0 V 1.000 UL LTb 1185 723 M 4987 0 V 0 5010 V -4987 0 V 0 -5010 V 1.000 UP stroke grestore end showpage %%Trailer %%DocumentFonts: Helvetica LanguageMachines-timbl-642727d/docs/texfiles/fullname.bst000077500000000000000000000606611451477526200234140ustar00rootroot00000000000000%%% fullname.bst %%% %%% Stuart M. Shieber %%% Mon Mar 30 17:23:36 EST 1992 %%% Modification of BibTeX style file "aaai-named" by Peter %%% Patel-Schneider %%% Intended for use with the style file "fullname.sty" % BibTeX `aaai-named' style file for BibTeX version 0.99c, LaTeX version 2.09 % This version was made by modifying the master file made by % Oren Patashnik (PATASHNIK@SCORE.STANFORD.EDU) % Copyright (C) 1985, all rights reserved. % Modifications Copyright 1988, Peter F. Patel-Schneider % Copying of this file is authorized only if either % (1) you make absolutely no changes to your copy, including name, or % (2) if you do make changes, you name it something other than % btxbst.doc, plain.bst, unsrt.bst, alpha.bst, and abbrv.bst. % This restriction helps ensure that all standard styles are identical. % There are undoubtably bugs in this style. If you make bug fixes, % improvements, etc. please let me know. My e-mail address is: % pfps@spar.slb.com % Citation format: [author-last-name, year] % [author-last-name and author-last-name, year] % [author-last-name {\em et al.}, year] % % Reference list ordering: alphabetical by author or whatever passes % for author in the absence of one. % % This BibTeX style has support for short (year only) citations. This % is done by having the citations actually look like % \citename{name-info, }year % The LaTeX style has to have the following % \let\@internalcite\cite % \def\cite{\def\citename##1{##1}\@internalcite} % \def\shortcite{\def\citename##1{}\@internalcite} % \def\@biblabel#1{\def\citename##1{##1}[#1]\hfill} % which makes \shortcite the macro for short citations. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Changes made by SMS for thesis style % no emphasis on "et al." % "Ph.D." includes periods (not "PhD") % moved year to immediately after author's name % Changes made by SMS for compatability with MIT Press style % inverted first and last names for first author %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% ENTRY { address author booktitle chapter edition editor howpublished institution journal key month note number organization pages publisher school series title type volume year } {} { label extra.label sort.label } INTEGERS { output.state before.all mid.sentence after.sentence after.block } FUNCTION {init.state.consts} { #0 'before.all := #1 'mid.sentence := #2 'after.sentence := #3 'after.block := } STRINGS { s t } FUNCTION {output.nonnull} { 's := output.state mid.sentence = { ", " * write$ } { output.state after.block = { add.period$ write$ newline$ "\newblock " write$ } { output.state before.all = 'write$ { add.period$ " " * write$ } if$ } if$ mid.sentence 'output.state := } if$ s } FUNCTION {output} { duplicate$ empty$ 'pop$ 'output.nonnull if$ } FUNCTION {output.check} { 't := duplicate$ empty$ { pop$ "empty " t * " in " * cite$ * warning$ } 'output.nonnull if$ } FUNCTION {output.bibitem} { newline$ "\bibitem[" write$ label write$ "]{" write$ cite$ write$ "}" write$ newline$ "" before.all 'output.state := } FUNCTION {fin.entry} { add.period$ write$ newline$ } FUNCTION {new.block} { output.state before.all = 'skip$ { after.block 'output.state := } if$ } FUNCTION {new.sentence} { output.state after.block = 'skip$ { output.state before.all = 'skip$ { after.sentence 'output.state := } if$ } if$ } FUNCTION {not} { { #0 } { #1 } if$ } FUNCTION {and} { 'skip$ { pop$ #0 } if$ } FUNCTION {or} { { pop$ #1 } 'skip$ if$ } FUNCTION {new.block.checka} { empty$ 'skip$ 'new.block if$ } FUNCTION {new.block.checkb} { empty$ swap$ empty$ and 'skip$ 'new.block if$ } FUNCTION {new.sentence.checka} { empty$ 'skip$ 'new.sentence if$ } FUNCTION {new.sentence.checkb} { empty$ swap$ empty$ and 'skip$ 'new.sentence if$ } FUNCTION {field.or.null} { duplicate$ empty$ { pop$ "" } 'skip$ if$ } FUNCTION {emphasize} { duplicate$ empty$ { pop$ "" } { "{\em " swap$ * "}" * } if$ } INTEGERS { nameptr namesleft numnames } %%% This function reverses the first name in the author list, e.g., %%% Dewey, John, Thomas Cheatham, and Elias Howe. %%% For editor names, no reversal is appropriate, so %%% format.names.unreversed is used. FUNCTION {format.names} { 's := #1 'nameptr := s num.names$ 'numnames := numnames 'namesleft := { namesleft #0 > } { s nameptr nameptr #1 > { "{ff~}{vv~}{ll}{, jj}" } { "{vv~}{ll}{, jj}{, ff}" } if$ format.name$ 't := nameptr #1 > { namesleft #1 > { ", " * t * } { numnames #2 > { "," * } 'skip$ if$ t "others" = { " et~al." * } { " and " * t * } if$ } if$ } 't if$ nameptr #1 + 'nameptr := namesleft #1 - 'namesleft := } while$ } FUNCTION {format.authors} { author empty$ { "" } { author format.names } if$ } FUNCTION {format.names.unreversed} { 's := #1 'nameptr := s num.names$ 'numnames := numnames 'namesleft := { namesleft #0 > } { s nameptr "{ff~}{vv~}{ll}{, jj}" format.name$ 't := nameptr #1 > { namesleft #1 > { ", " * t * } { numnames #2 > { "," * } 'skip$ if$ t "others" = { " et~al." * } { " and " * t * } if$ } if$ } 't if$ nameptr #1 + 'nameptr := namesleft #1 - 'namesleft := } while$ } FUNCTION {format.editors} { editor empty$ { "" } { editor format.names editor num.names$ #1 > { ", editors" * } { ", editor" * } if$ } if$ } FUNCTION {format.editors.unreversed} { editor empty$ { "" } { editor format.names.unreversed editor num.names$ #1 > { ", editors" * } { ", editor" * } if$ } if$ } FUNCTION {format.title} { title empty$ { "" } { title "t" change.case$ } if$ } FUNCTION {n.dashify} { 't := "" { t empty$ not } { t #1 #1 substring$ "-" = { t #1 #2 substring$ "--" = not { "--" * t #2 global.max$ substring$ 't := } { { t #1 #1 substring$ "-" = } { "-" * t #2 global.max$ substring$ 't := } while$ } if$ } { t #1 #1 substring$ * t #2 global.max$ substring$ 't := } if$ } while$ } FUNCTION {format.date} { year empty$ { month empty$ { "" } { "there's a month but no year in " cite$ * warning$ month } if$ } { month empty$ { "" } { month } if$ } if$ } FUNCTION {format.btitle} { title emphasize } FUNCTION {tie.or.space.connect} { duplicate$ text.length$ #3 < { "~" } { " " } if$ swap$ * * } FUNCTION {either.or.check} { empty$ 'pop$ { "can't use both " swap$ * " fields in " * cite$ * warning$ } if$ } FUNCTION {format.bvolume} { volume empty$ { "" } { "volume" volume tie.or.space.connect series empty$ 'skip$ { " of " * series emphasize * } if$ "volume and number" number either.or.check } if$ } FUNCTION {format.number.series} { volume empty$ { number empty$ { series field.or.null } { output.state mid.sentence = { "number" } { "Number" } if$ number tie.or.space.connect series empty$ { "there's a number but no series in " cite$ * warning$ } { " in " * series * } if$ } if$ } { "" } if$ } FUNCTION {format.edition} { edition empty$ { "" } { output.state mid.sentence = { edition "l" change.case$ " edition" * } { edition "t" change.case$ " edition" * } if$ } if$ } INTEGERS { multiresult } FUNCTION {multi.page.check} { 't := #0 'multiresult := { multiresult not t empty$ not and } { t #1 #1 substring$ duplicate$ "-" = swap$ duplicate$ "," = swap$ "+" = or or { #1 'multiresult := } { t #2 global.max$ substring$ 't := } if$ } while$ multiresult } FUNCTION {format.pages} { pages empty$ { "" } { pages multi.page.check { "pages" pages n.dashify tie.or.space.connect } { "page" pages tie.or.space.connect } if$ } if$ } FUNCTION {format.year.label} { year extra.label * } FUNCTION {format.vol.num.pages} { volume field.or.null number empty$ 'skip$ { "(" number * ")" * * volume empty$ { "there's a number but no volume in " cite$ * warning$ } 'skip$ if$ } if$ pages empty$ 'skip$ { duplicate$ empty$ { pop$ format.pages } { ":" * pages n.dashify * } if$ } if$ } FUNCTION {format.chapter.pages} { chapter empty$ 'format.pages { type empty$ { "chapter" } { type "l" change.case$ } if$ chapter tie.or.space.connect pages empty$ 'skip$ { ", " * format.pages * } if$ } if$ } FUNCTION {format.in.ed.booktitle} { booktitle empty$ { "" } { editor empty$ { "In " booktitle emphasize * } { "In " format.editors.unreversed * ", " * booktitle emphasize * } if$ } if$ } FUNCTION {empty.misc.check} { author empty$ title empty$ howpublished empty$ month empty$ year empty$ note empty$ and and and and and key empty$ not and { "all relevant fields are empty in " cite$ * warning$ } 'skip$ if$ } FUNCTION {format.thesis.type} { type empty$ 'skip$ { pop$ type "t" change.case$ } if$ } FUNCTION {format.tr.number} { type empty$ { "Technical Report" } 'type if$ number empty$ { "t" change.case$ } { number tie.or.space.connect } if$ } FUNCTION {format.article.crossref} { key empty$ { journal empty$ { "need key or journal for " cite$ * " to crossref " * crossref * warning$ "" } { "In {\em " journal * "\/}" * } if$ } { "In " key * } if$ " \cite{" * crossref * "}" * } FUNCTION {format.crossref.editor} { editor #1 "{vv~}{ll}" format.name$ editor num.names$ duplicate$ #2 > { pop$ " et~al." * } { #2 < 'skip$ { editor #2 "{ff }{vv }{ll}{ jj}" format.name$ "others" = { " et~al." * } { " and " * editor #2 "{vv~}{ll}" format.name$ * } if$ } if$ } if$ } FUNCTION {format.book.crossref} { volume empty$ { "empty volume in " cite$ * "'s crossref of " * crossref * warning$ "In " } { "Volume" volume tie.or.space.connect " of " * } if$ editor empty$ editor field.or.null author field.or.null = or { key empty$ { series empty$ { "need editor, key, or series for " cite$ * " to crossref " * crossref * warning$ "" * } { "{\em " * series * "\/}" * } if$ } { key * } if$ } { format.crossref.editor * } if$ " \cite{" * crossref * "}" * } FUNCTION {format.incoll.inproc.crossref} { editor empty$ editor field.or.null author field.or.null = or { key empty$ { booktitle empty$ { "need editor, key, or booktitle for " cite$ * " to crossref " * crossref * warning$ "" } { "In {\em " booktitle * "\/}" * } if$ } { "In " key * } if$ } { "In " format.crossref.editor * } if$ " \cite{" * crossref * "}" * } FUNCTION {article} { output.bibitem format.authors "author" output.check new.block format.year.label "year" output.check new.block format.title "title" output.check new.block crossref missing$ { journal emphasize "journal" output.check format.vol.num.pages output format.date output } { format.article.crossref output.nonnull format.pages output } if$ new.block note output fin.entry } FUNCTION {book} { output.bibitem author empty$ { format.editors "author and editor" output.check } { format.authors output.nonnull crossref missing$ { "author and editor" editor either.or.check } 'skip$ if$ } if$ new.block format.year.label "year" output.check new.block format.btitle "title" output.check crossref missing$ { format.bvolume output new.block format.number.series output new.sentence publisher "publisher" output.check address output } { new.block format.book.crossref output.nonnull } if$ format.edition output format.date output new.block note output fin.entry } FUNCTION {booklet} { output.bibitem format.authors output new.block format.year.label "year" output.check new.block format.title "title" output.check howpublished address new.block.checkb howpublished output address output format.date output new.block note output fin.entry } FUNCTION {inbook} { output.bibitem author empty$ { format.editors "author and editor" output.check } { format.authors output.nonnull crossref missing$ { "author and editor" editor either.or.check } 'skip$ if$ } if$ format.year.label "year" output.check new.block format.btitle "title" output.check crossref missing$ { format.bvolume output format.chapter.pages "chapter and pages" output.check new.block format.number.series output new.sentence publisher "publisher" output.check address output } { format.chapter.pages "chapter and pages" output.check new.block format.book.crossref output.nonnull } if$ format.edition output format.date output new.block note output fin.entry } FUNCTION {incollection} { output.bibitem format.authors "author" output.check new.block format.year.label "year" output.check new.block format.title "title" output.check new.block crossref missing$ { format.in.ed.booktitle "booktitle" output.check format.bvolume output format.number.series output new.sentence publisher "publisher" output.check address output format.edition output format.date output format.chapter.pages output } { format.incoll.inproc.crossref output.nonnull format.chapter.pages output } if$ new.block note output fin.entry } FUNCTION {inproceedings} { output.bibitem format.authors "author" output.check new.block format.year.label "year" output.check new.block format.title "title" output.check new.block crossref missing$ { format.in.ed.booktitle "booktitle" output.check format.bvolume output format.number.series output format.pages output address empty$ { organization publisher new.sentence.checkb organization output publisher output format.date output } { address output.nonnull format.date output new.sentence organization output publisher output } if$ } { format.incoll.inproc.crossref output.nonnull format.pages output } if$ new.block note output fin.entry } FUNCTION {conference} { inproceedings } FUNCTION {manual} { output.bibitem author empty$ { organization empty$ 'skip$ { organization output.nonnull address output } if$ } { format.authors output.nonnull } if$ format.year.label "year" output.check new.block new.block format.btitle "title" output.check author empty$ { organization empty$ { address new.block.checka address output } 'skip$ if$ } { organization address new.block.checkb organization output address output } if$ format.edition output format.date output new.block note output fin.entry } FUNCTION {mastersthesis} { output.bibitem format.authors "author" output.check new.block format.year.label "year" output.check new.block format.title "title" output.check new.block "Master's thesis" format.thesis.type output.nonnull school "school" output.check address output format.date output new.block note output fin.entry } FUNCTION {misc} { output.bibitem format.authors output new.block format.year.label output new.block title howpublished new.block.checkb format.title output howpublished new.block.checka howpublished output format.date output new.block note output fin.entry empty.misc.check } FUNCTION {phdthesis} { output.bibitem format.authors "author" output.check new.block format.year.label "year" output.check new.block format.btitle "title" output.check new.block "{Ph.D.} thesis" format.thesis.type output.nonnull school "school" output.check address output format.date output new.block note output fin.entry } FUNCTION {proceedings} { output.bibitem editor empty$ { organization output } { format.editors output.nonnull } if$ new.block format.year.label "year" output.check new.block format.btitle "title" output.check format.bvolume output format.number.series output address empty$ { editor empty$ { publisher new.sentence.checka } { organization publisher new.sentence.checkb organization output } if$ publisher output format.date output } { address output.nonnull format.date output new.sentence editor empty$ 'skip$ { organization output } if$ publisher output } if$ new.block note output fin.entry } FUNCTION {techreport} { output.bibitem format.authors "author" output.check new.block format.year.label "year" output.check new.block format.title "title" output.check new.block format.tr.number output.nonnull institution "institution" output.check address output format.date output new.block note output fin.entry } FUNCTION {unpublished} { output.bibitem format.authors "author" output.check new.block format.year.label "year" output.check new.block format.title "title" output.check new.block note "note" output.check format.date output fin.entry } FUNCTION {default.type} { misc } MACRO {jan} {"January"} MACRO {feb} {"February"} MACRO {mar} {"March"} MACRO {apr} {"April"} MACRO {may} {"May"} MACRO {jun} {"June"} MACRO {jul} {"July"} MACRO {aug} {"August"} MACRO {sep} {"September"} MACRO {oct} {"October"} MACRO {nov} {"November"} MACRO {dec} {"December"} MACRO {acmcs} {"ACM Computing Surveys"} MACRO {acta} {"Acta Informatica"} MACRO {cacm} {"Communications of the ACM"} MACRO {ibmjrd} {"IBM Journal of Research and Development"} MACRO {ibmsj} {"IBM Systems Journal"} MACRO {ieeese} {"IEEE Transactions on Software Engineering"} MACRO {ieeetc} {"IEEE Transactions on Computers"} MACRO {ieeetcad} {"IEEE Transactions on Computer-Aided Design of Integrated Circuits"} MACRO {ipl} {"Information Processing Letters"} MACRO {jacm} {"Journal of the ACM"} MACRO {jcss} {"Journal of Computer and System Sciences"} MACRO {scp} {"Science of Computer Programming"} MACRO {sicomp} {"SIAM Journal on Computing"} MACRO {tocs} {"ACM Transactions on Computer Systems"} MACRO {tods} {"ACM Transactions on Database Systems"} MACRO {tog} {"ACM Transactions on Graphics"} MACRO {toms} {"ACM Transactions on Mathematical Software"} MACRO {toois} {"ACM Transactions on Office Information Systems"} MACRO {toplas} {"ACM Transactions on Programming Languages and Systems"} MACRO {tcs} {"Theoretical Computer Science"} READ FUNCTION {sortify} { purify$ "l" change.case$ } INTEGERS { len } FUNCTION {chop.word} { 's := 'len := s #1 len substring$ = { s len #1 + global.max$ substring$ } 's if$ } INTEGERS { et.al.char.used } FUNCTION {initialize.et.al.char.used} { #0 'et.al.char.used := } EXECUTE {initialize.et.al.char.used} FUNCTION {format.lab.names} { 's := s num.names$ 'numnames := numnames #1 = { s #1 "{vv }{ll}" format.name$ } { numnames #2 = { s #1 "{vv }{ll }and " format.name$ s #2 "{vv }{ll}" format.name$ * } { numnames #3 = { s #1 "{vv }{ll}, " format.name$ s #2 "{vv }{ll}, and " format.name$ * s #3 "{vv }{ll}" format.name$ * } { s #1 "{vv }{ll }\bgroup et al.\egroup " format.name$ } if$ } if$ } if$ } FUNCTION {author.key.label} { author empty$ { key empty$ { cite$ #1 #3 substring$ } { key #3 text.prefix$ } if$ } { author format.lab.names } if$ } FUNCTION {author.editor.key.label} { author empty$ { editor empty$ { key empty$ { cite$ #1 #3 substring$ } { key #3 text.prefix$ } if$ } { editor format.lab.names } if$ } { author format.lab.names } if$ } FUNCTION {author.key.organization.label} { author empty$ { key empty$ { organization empty$ { cite$ #1 #3 substring$ } { "The " #4 organization chop.word #3 text.prefix$ } if$ } { key #3 text.prefix$ } if$ } { author format.lab.names } if$ } FUNCTION {editor.key.organization.label} { editor empty$ { key empty$ { organization empty$ { cite$ #1 #3 substring$ } { "The " #4 organization chop.word #3 text.prefix$ } if$ } { key #3 text.prefix$ } if$ } { editor format.lab.names } if$ } FUNCTION {calc.label} { type$ "book" = type$ "inbook" = or 'author.editor.key.label { type$ "proceedings" = 'editor.key.organization.label { type$ "manual" = 'author.key.organization.label 'author.key.label if$ } if$ } if$ duplicate$ "\protect\citename{" swap$ * "}" * year field.or.null purify$ * 'label := year field.or.null purify$ * sortify 'sort.label := } FUNCTION {sort.format.names} { 's := #1 'nameptr := "" s num.names$ 'numnames := numnames 'namesleft := { namesleft #0 > } { nameptr #1 > { " " * } 'skip$ if$ s nameptr "{vv{ } }{ll{ }}{ ff{ }}{ jj{ }}" format.name$ 't := nameptr numnames = t "others" = and { "et al" * } { t sortify * } if$ nameptr #1 + 'nameptr := namesleft #1 - 'namesleft := } while$ } FUNCTION {sort.format.title} { 't := "A " #2 "An " #3 "The " #4 t chop.word chop.word chop.word sortify #1 global.max$ substring$ } FUNCTION {author.sort} { author empty$ { key empty$ { "to sort, need author or key in " cite$ * warning$ "" } { key sortify } if$ } { author sort.format.names } if$ } FUNCTION {author.editor.sort} { author empty$ { editor empty$ { key empty$ { "to sort, need author, editor, or key in " cite$ * warning$ "" } { key sortify } if$ } { editor sort.format.names } if$ } { author sort.format.names } if$ } FUNCTION {author.organization.sort} { author empty$ { organization empty$ { key empty$ { "to sort, need author, organization, or key in " cite$ * warning$ "" } { key sortify } if$ } { "The " #4 organization chop.word sortify } if$ } { author sort.format.names } if$ } FUNCTION {editor.organization.sort} { editor empty$ { organization empty$ { key empty$ { "to sort, need editor, organization, or key in " cite$ * warning$ "" } { key sortify } if$ } { "The " #4 organization chop.word sortify } if$ } { editor sort.format.names } if$ } FUNCTION {presort} { " " calc.label sort.label pop$ type$ "book" = type$ "inbook" = or 'author.editor.sort { type$ "proceedings" = 'editor.organization.sort { type$ "manual" = 'author.organization.sort 'author.sort if$ } if$ } if$ * " " * year field.or.null sortify * " " * title field.or.null sort.format.title * #1 entry.max$ substring$ 'sort.key$ := } ITERATE {presort} SORT STRINGS { longest.label last.sort.label next.extra } INTEGERS { longest.label.width last.extra.num } FUNCTION {initialize.longest.label} { "" 'longest.label := #0 int.to.chr$ 'last.sort.label := "" 'next.extra := #0 'longest.label.width := #0 'last.extra.num := } FUNCTION {forward.pass} { last.sort.label sort.label = { last.extra.num #1 + 'last.extra.num := last.extra.num int.to.chr$ 'extra.label := } { "a" chr.to.int$ 'last.extra.num := "" 'extra.label := sort.label 'last.sort.label := } if$ } FUNCTION {reverse.pass} { next.extra "b" = { "a" 'extra.label := } 'skip$ if$ label extra.label * 'label := label width$ longest.label.width > { label 'longest.label := label width$ 'longest.label.width := } 'skip$ if$ extra.label 'next.extra := } EXECUTE {initialize.longest.label} ITERATE {forward.pass} REVERSE {reverse.pass} FUNCTION {begin.bib} { et.al.char.used { "\newcommand{\etalchar}[1]{$^{#1}$}" write$ newline$ } 'skip$ if$ preamble$ empty$ 'skip$ { preamble$ write$ newline$ } if$ "\begin{thebibliography}{" "}" * write$ newline$ } EXECUTE {begin.bib} EXECUTE {init.state.consts} ITERATE {call.type$} FUNCTION {end.bib} { newline$ "\end{thebibliography}" write$ newline$ } EXECUTE {end.bib} LanguageMachines-timbl-642727d/docs/texfiles/fullname.sty000077500000000000000000000042621451477526200234360ustar00rootroot00000000000000%%% fullname.sty %%% %%% Stuart M. Shieber %%% Mon Mar 30 17:23:36 EST 1992 %%% Modifications to the citation macros intended to be used with the %%% fullname.bst style. %%% Some material taken from Peter Patel-Schneider's AAAI style for %%% use in conjunction with AAAI-named bibliography style. %%% %%% Citation forms: %%% %%% Macro Output format %%% ----------- ----------------------------------------- %%% \cite: (Dewey, 1988) %%% (Dewey, 1988, page 15) %%% (Dewey, 1988; Cheatham, 1987; Howe, 1903) %%% \shortcite: (1988) %%% (1988, page 15) %%% \namecite: Dewey (1988) %%% Dewey (1988, page 15) % don't box citations, add space between multiple citations, separate with ; \def\@citex[#1]#2{\if@filesw\immediate\write\@auxout{\string\citation{#2}}\fi \def\@citea{}\@cite{\@for\@citeb:=#2\do {\@citea\def\@citea{; }\@ifundefined {b@\@citeb}{{\bf ?}\@warning {Citation `\@citeb' on page \thepage \space undefined}}% {\csname b@\@citeb\endcsname}}}{#1}} % Allow short (name-less) citations, when used in % conjunction with a bibliography style that creates labels like % \citename{, } \let\@internalcite\cite \def\cite{\def\citename##1{##1, }(\@internalcite} \def\shortcite{\def\citename##1{}(\@internalcite} \def\namecite{\def\citename##1{##1 (}\@internalcite} \def\@biblabel#1{\def\citename##1{##1}[#1]\hfill} %%% More changes made by SMS (originals in latex.tex) % Use parentheses instead of square brackets in the text. \def\@cite#1#2{{#1\if@tempswa , #2\fi})} % Don't put a label in the bibliography at all. Just use the unlabeled format % instead. \def\thebibliography#1{\section*{References\@mkboth {References}{References}}\list {}{\setlength{\labelwidth}{0pt}\setlength{\leftmargin}{\parindent} \setlength{\itemindent}{-\parindent}} \def\newblock{\hskip .11em plus .33em minus -.07em} \sloppy\clubpenalty4000\widowpenalty4000 \sfcode`\.=1000\relax} \let\endthebibliography=\endlist \def\@lbibitem[#1]#2{\item[]\if@filesw { \def\protect##1{\string ##1\space}\immediate \write\@auxout{\string\bibcite{#2}{#1}}\fi\ignorespaces}} \def\@bibitem#1{\item\if@filesw \immediate\write\@auxout {\string\bibcite{#1}{\the\c@enumi}}\fi\ignorespaces} LanguageMachines-timbl-642727d/docs/texfiles/ilk.bib000077500000000000000000030747641451477526200223500ustar00rootroot00000000000000%% This BibTeX bibliography file was created using BibDesk. %% http://bibdesk.sourceforge.net/ %% Created for Antal at 2015-06-08 12:07:42 +0200 %% Saved with string encoding Unicode (UTF-8) @string{aaai = {Proceedings of AAAI}} @string{ac = {{IEEE} Transactions on Automatic Control}} @string{acl = {Proceedings of the Annual Meeting of the ACL}} @string{acl00 = {Proceedings of the 38th Annual Meeting of the Association for Computational Linguistics}} @string{acl01 = {Proceedings of the 39th Annual Meeting of the Association for Computational Linguistics}} @string{acl02 = {Proceedings of the 40th Annual Meeting of the Association for Computational Linguistics}} @string{acl03 = {Proceedings of the 41th Annual Meeting of the Association for Computational Linguistics}} @string{acl09 = {Proceedings of the 47th Annual Meeting of the Association for Computational Linguistics}} @string{acl10 = {Proceedings of the 48th Annual Meeting of the Association for Computational Linguistics}} @string{acl87 = {Proceedings of the 25th Annual Meeting of the Association for Computational Linguistics}} @string{acl88 = {Proceedings of the 26th Annual Meeting of the Association for Computational Linguistics}} @string{acl89 = {Proceedings of the 27th Annual Meeting of the Association for Computational Linguistics}} @string{acl90 = {Proceedings of the 28th Annual Meeting of the Association for Computational Linguistics}} @string{acl91 = {Proceedings of the 29th Annual Meeting of the Association for Computational Linguistics}} @string{acl92 = {Proceedings of the 30th Annual Meeting of the Association for Computational Linguistics}} @string{acl93 = {Proceedings of the 31st Annual Meeting of the Association for Computational Linguistics}} @string{acl94 = {Proceedings of the 32nd Annual Meeting of the Association for Computational Linguistics}} @string{acl95 = {Proceedings of the 33rd Annual Meeting of the Association for Computational Linguistics}} @string{acl96 = {Proceedings of the 34th Annual Meeting of the Association for Computational Linguistics}} @string{acl99 = {Proceedings of the 37th Annual Meeting of the Association for Computational Linguistics}} @string{ai = {Artificial Intelligence}} @string{anlp = {Proceedings of ACL Conference on Applied Natural Language Processing}} @string{arpa = {Proceedings of the ARPA Workshop on Human Language Technology}} @string{benjamin = {Benjamin/Cummings Publishing Company, Inc.}} @string{cacm = {Communications of the ACM}} @string{cl = {Computational Linguistics}} @string{coling = {Proceedings of COLING}} @string{coling/acl98 = {Proceedings of the 36th Annual Meeting of the Association for Computational Linguistics and 17th International Conference on Computational Linguistics, {Montr\'{e}al}, Quebec, Canada}} @string{coling00 = {Proceedings of the 18th International Conference on Computational Linguistics, COLING-2000}} @string{coling88 = {Proceedings of the 12th International Conference on Computational Linguistics, COLING-88}} @string{coling90 = {Proceedings of the 13th International Conference on Computational Linguistics, COLING-90}} @string{coling92 = {Proceedings of the 14th International Conference on Computational Linguistics, COLING-92}} @string{coling94 = {Proceedings of the 15th International Conference on Computational Linguistics, COLING-94}} @string{coling96 = {Proceedings of the 16th International Conference on Computational Linguistics, COLING-96, Copenhagen, Denmark}} @string{conll = {Computational Natural Language Learning}} @string{csl = {Computer Speech and Language}} @string{cup = {Cambridge University Press}} @string{cvpr = {Proceedings of the {IEEE} Conference on Computer Vision and Pattern Recognition}} @string{darpa = {Proceedings of the DARPA Workshop on Speech and Natural Language}} @string{eacl/acl97 = {Proceedings of the 35th Annual Meeting of the Association for Computational Linguistics and the 8th Annual Meeting of the European Chapter of the Association for Computational Linguistics}} @string{ecml = {Proceedings of the European Conference on Machine Learning}} @string{focs = {IEEE Symposium of Foundation of Computer Science}} @string{icassp = {Proceedings of ICASSP}} @string{iccv = {Proceedings of the International Conference on Computer Vision}} @string{icml = {Proceedings of the International Conference on Machine Learning}} @string{icra = {Proceedings of the International Conference on Robotics and Automation}} @string{ijcai = {Proceedings of the International Joint Conference on Artificial Intelligence}} @string{ijcnn = {Proceedings of the International Joint Conference on Neural Networks}} @string{ijcv = {International Journal of Computer Vision}} @string{infctrl = {Information and Control}} @string{ipw = {Proceedings of the International Parsing Workshop}} @string{iros = {Proceedings of the {IEEE/RSJ} International Workshop on Intelligent Robots and Systems}} @string{it = {IEEE Transactions on information theory}} @string{jacm = {Journal of the ACM}} @string{jnle = {Journal for Natural Language Engineering}} @string{mit = {The MIT Press}} @string{ml = {Machine Learning}} @string{r&a = {{IEEE} Journal of Robotics and Automation}} @string{sigir = {Proceedings of International Conference on Research and Development in Information Retrieval, SIGIR}} @string{sijad = {SIAM Journal of Algebraic and Discrete Methods}} @string{sijc = {SIAM Journal of Computing}} @string{smc = {{IEEE} Transactions on Systems, Man, and Cybernetics}} @string{stoc = {ACM Symposium of the Theory of Computing}} @string{tassp = {IEEE Transactions on Acoustics, speech, and Signal Processing}} @string{tcs = {Theoretical Computer Science}} @string{tmi = {Proceedings of the International Conference on Theoretical and Methodological Issues in Machine Translation}} @string{uwoed = {Proceedings of the Annual Conference of the UW Center for the New OED and Text Research}} @string{wvlc95 = {Proceedings of the 3rd ACL/SIGDAT Workshop on Very Large Corpora, Cambridge, Massachusetts, USA}} @string{wvlc96 = {Proceedings of the 4th ACL/SIGDAT Workshop on Very Large Corpora, Copenhagen, Denmark}} @string{wvlc97 = {Proceedings of the 5th ACL/SIGDAT Workshop on Very Large Corpora, Beijing, China, and Hong Kong}} @string{wvlc98 = {Proceedings of the 6th ACL/SIGDAT Workshop on Very Large Corpora, {Montr'{e}al}, Quebec, Canada}} @inproceedings{Brousseau+95, Author = {J. Brousseau and C. Drouin and G. F. Foster and P. Isabelle and R. Kuhn and Y. Normandin and P. Plamondon}, Booktitle = {Proceedings of Eurospeech-1995}, Date-Added = {2015-06-06 12:36:39 +0000}, Date-Modified = {2015-06-06 12:37:47 +0000}, Title = {French speech recognition in an automatic dictation system for translators: the transtalk project.}, Year = {1995}} @inproceedings{Fougeron+01, Author = {C. Fougeron and J.-P. Goldman and A. Dart and L. Gu{\'e}lat and C. Jeager}, Booktitle = {Actes de TALN}, Date-Added = {2015-06-06 10:26:44 +0000}, Date-Modified = {2015-06-06 10:27:46 +0000}, Pages = {173--182}, Title = {Influence de facteurs stylistiques, syntaxiques et lexicaux sur la r{\'e}alisation de la liaison en fran{\c{c}}ais}, Year = {2001}} @inproceedings{Pontes+10b, Author = {J. de Jesus Aguiar Pontes and S. Furui}, Booktitle = {Proceedings of Interspeech-2010}, Date-Added = {2015-06-06 10:24:01 +0000}, Date-Modified = {2015-06-06 10:25:15 +0000}, Pages = {186--189}, Title = {Modeling liaison in {F}rench by using decision trees}, Year = {2010}} @article{Pontes+10, Author = {J. de Jesus Aguiar Pontes and and S. Furui}, Date-Added = {2015-06-06 10:22:52 +0000}, Date-Modified = {2015-06-06 10:23:31 +0000}, Journal = {Speech Communication}, Number = {10}, Pages = {847--862}, Title = {Predicting the phonetic realizations of word-final consonants in context--A challenge for French grapheme-to-phoneme converters}, Volume = {52}, Year = {2010}} @inproceedings{Karsdorp+15c, Author = {F. Karsdorp and M. Kestemont and C. Sch\"{o}ch and A. {Van den Bosch}}, Booktitle = {Proceedings of the 6th Workshop on Computational Models of Narrative (CMN-2015)}, Date-Added = {2015-05-31 13:01:59 +0000}, Date-Modified = {2015-05-31 13:03:48 +0000}, Editor = {M. A. Finlayson and B. Miller and A. Lieto and R. Ronfard}, Pages = {89--107}, Title = {The love equation: Computational modeling of romantic relationships in French classical drama}, Year = {2015}} @inproceedings{Karsdorp+15b, Author = {F. Karsdorp and M. {Van der Meulen} and T. Meder and A. {Van den Bosch}}, Booktitle = {Proceedings of the 6th Workshop on Computational Models of Narrative (CMN-2015)}, Date-Added = {2015-05-31 13:00:08 +0000}, Date-Modified = {2015-05-31 13:04:27 +0000}, Editor = {M. A. Finlayson and B. Miller and A. Lieto and R. Ronfard}, Pages = {82--97}, Title = {Animacy detection in stories}, Year = {2015}} @inproceedings{Parisien+10, Author = {C. Parisien and S. Stevenson}, Booktitle = {Proceedings of the 32nd Annual Meeting of the Cognitive Science Society}, Date-Added = {2015-05-03 19:57:33 +0000}, Date-Modified = {2015-05-03 19:58:03 +0000}, Title = {Learning verb alternations in a usage-based {B}ayesian model}, Year = {2010}} @article{Baker79, Author = {C. L. Baker}, Date-Added = {2015-05-03 19:48:59 +0000}, Date-Modified = {2015-05-03 19:49:34 +0000}, Journal = {Linguistic Inquiry}, Number = {4}, Pages = {533--581}, Title = {Syntactic theory and the projection problem}, Volume = {10}, Year = {1979}} @article{Perfors+10, Author = {A. Perfors and J. Tenenbaum and E. Wonnacott}, Date-Added = {2015-05-03 19:35:45 +0000}, Date-Modified = {2015-05-03 19:36:29 +0000}, Journal = {Journal of Child Language}, Number = {3}, Pages = {607--642}, Publisher = {Cambridge University Press}, Title = {Variability, negative evidence, and the acquisition of verb argument constructions}, Volume = {37}, Year = {2010}} @inproceedings{Villavicencio+13, Address = {Sofia, Bulgaria}, Author = {A. Villavicencio and M. Idiart and R. Berwick and I. Malioutov}, Booktitle = {Proceedings of the 51st Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)}, Date-Added = {2015-05-03 19:33:12 +0000}, Date-Modified = {2015-05-03 19:33:47 +0000}, Month = {August}, Pages = {1321--1330}, Publisher = {Association for Computational Linguistics}, Title = {Language Acquisition and Probabilistic Models: keeping it simple}, Url = {http://www.aclweb.org/anthology/P13-1130}, Year = {2013}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/P13-1130}} @article{Verberne+14, Author = {S. Verberne and E. D'hondt and M. Marx and A. {Van den Bosch}}, Date-Added = {2015-05-02 19:25:43 +0000}, Date-Modified = {2015-05-02 19:26:41 +0000}, Journal = {Information Processing \& Management}, Number = {4}, Pages = {554--567}, Title = {Automatic thematic classification of election manifestos}, Volume = {50}, Year = {2014}} @article{VandenBosch14, Author = {A. {Van den Bosch}}, Date-Added = {2015-05-02 19:24:42 +0000}, Date-Modified = {2015-05-02 19:25:28 +0000}, Journal = {Machine Translation}, Pages = {57--60}, Title = {Book review: Peter Spyns and Jan Odijk (eds): Essential speech and language technology for Dutch: results by the STEVIN programme}, Volume = {28}, Year = {2014}} @inproceedings{Zervanou+14, Address = {Heidelberg}, Author = {K. Zervanou and M. D\"{u}ring and I. Hendrickx and A. {Van den Bosch}}, Booktitle = {Social Informatics}, Date-Added = {2015-05-02 19:22:48 +0000}, Date-Modified = {2015-05-02 19:24:30 +0000}, Editor = {A. Nadamoto, A. Jatowt, A. Wierzbicki, and J. L. Leidner}, Keywords = {strikes}, Pages = {120--133}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Documenting social unrest: Detecting strikes in historical daily newspapers}, Volume = {8359}, Year = {2014}} @incollection{VanGompel+14c, Address = {Ljouwert, Friesland}, Author = {M. {Van Gompel} and A. {Van den Bosch} and A. Dijkstra}, Booktitle = {Philologia Frisica anno 2012}, Date-Added = {2015-05-02 13:00:29 +0000}, Date-Modified = {2015-05-02 13:01:49 +0000}, Pages = {287--296}, Publisher = {Fryske Akademy}, Title = {Oersetter: Frisian-Dutch statistical machine translation}, Year = {2014}} @inproceedings{VanGompel+14b, Address = {Dublin, Ireland}, Author = {M. {Van Gompel} and I. Hendrickx and A. {Van den Bosch} and E. Lefever and V. Hoste}, Booktitle = {Proceedings of the 8th International Workshop on Semantic Evaluation}, Date-Added = {2015-05-02 12:58:49 +0000}, Date-Modified = {2015-05-02 13:00:22 +0000}, Pages = {36--44}, Title = {SemEval 2014 Task 5 - L2 Writing Assistant}, Year = {2014}} @incollection{During+14, Address = {Berlin}, Author = {M. D\"{u}ring and A. {Van den Bosch}}, Booktitle = {Text Mining: From Ontology Learning to Automated Text Processing Applications}, Chapter = {10}, Date-Added = {2015-05-02 12:56:44 +0000}, Date-Modified = {2015-05-02 12:57:47 +0000}, Editor = {C. Biemann and A. Mehler}, Publisher = {Springer}, Title = {Multi-perspective event detection in texts documenting the 1944 Battle of Arnhem}, Year = {2014}} @article{Kunneman+14c, Author = {F. Kunneman and A. H\"{u}rriyeto\u{g}lu and N. Oostdijk and A. {Van den Bosch}}, Date-Added = {2015-05-02 12:55:30 +0000}, Date-Modified = {2015-05-02 12:56:34 +0000}, Journal = {Computational Linguistics in the Netherlands Journal}, Pages = {39--52}, Title = {Timely identification of event start dates from Twitter}, Volume = {4}, Year = {2014}} @article{Tellings+14, Author = {A. Tellings and M. Hulsbosch and A. Vermeer and A. {Van den Bosch}}, Date-Added = {2015-05-02 12:54:43 +0000}, Date-Modified = {2015-05-02 12:55:20 +0000}, Journal = {Computational Linguistics in the Netherlands Journal}, Pages = {191--208}, Title = {BasiLex: An 11.5 million words corpus of Dutch texts written for children}, Volume = {4}, Year = {2014}} @article{PanderMaat+14, Author = {H. {Pander Maat} and R. Kraf. and A. {Van den Bosch} and N. Dekker and M. {Van Gompel} and S. Kleijn and T. Sanders and K. {Van der Sloot}}, Date-Added = {2015-05-02 12:53:35 +0000}, Date-Modified = {2015-05-02 12:54:38 +0000}, Journal = {Computational Linguistics in the Netherlands Journal}, Pages = {53--74}, Title = {T-Scan: A new tool for analyzing Dutch text}, Volume = {4}, Year = {2014}} @article{Stoop+14b, Author = {W. Stoop and A. {Van den Bosch}}, Date-Added = {2015-05-02 12:51:15 +0000}, Date-Modified = {2015-05-02 12:53:14 +0000}, Journal = {Dutch Journal for Applied Linguistics}, Keywords = {idiolect, sociolect, word completion, augmentative technology}, Number = {2}, Pages = {136--153}, Title = {Improving word prediction for augmentative communication by using idiolects and sociolects}, Volume = {3}, Year = {2014}} @incollection{VanderBeek+15, Address = {New York, NY}, Author = {L. {Van der Beek} and A. {Van den Bosch}}, Booktitle = {The Routledge Encyclopedia of Translation Technology}, Chapter = {21}, Date-Added = {2015-05-02 12:31:53 +0000}, Date-Modified = {2015-05-02 12:33:15 +0000}, Editor = {S.-W. Chan}, Pages = {352--363}, Publisher = {Routledge}, Title = {Translation technology in the Netherlands and Belgium}, Year = {2015}} @article{Karsdorp+15, Author = {F. Karsdorp and M. {Van der Meulen} and T. Meder and A. {Van den Bosch}}, Date-Added = {2015-05-02 12:30:22 +0000}, Date-Modified = {2015-05-02 12:31:44 +0000}, Journal = {Folklore}, Keywords = {motifs, folktales, tunes and tales}, Number = {1}, Pages = {37--52}, Title = {MOMFER: A search engine of Thompson's Motif-Index of Folk Literature}, Volume = {126}, Year = {2015}} @inproceedings{Koolen+15, Author = {M. Koolen and T. Bogers and A. {Van Den Bosch} and J. Kamps}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/ecir/KoolenBBK15}, Booktitle = {Advances in Information Retrieval - 37th European Conference on {IR} Research, {ECIR} 2015, Vienna, Austria, March 29 - April 2, 2015. Proceedings}, Crossref = {DBLP:conf/ecir/2015}, Date-Added = {2015-05-02 12:28:40 +0000}, Date-Modified = {2015-05-02 12:34:24 +0000}, Doi = {10.1007/978-3-319-16354-3_19}, Pages = {184--196}, Timestamp = {Tue, 17 Mar 2015 15:14:29 +0100}, Title = {Looking for Books in Social Media: An Analysis of Complex Search Requests}, Url = {http://dx.doi.org/10.1007/978-3-319-16354-3_19}, Year = {2015}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-319-16354-3_19}} @inproceedings{Kunneman+14b, Author = {F. Kunneman and A. {Van den Bosch}}, Booktitle = {Proceedings of the 26th Benelux Conference on Artificial Intelligence}, Date-Added = {2015-04-18 17:09:25 +0000}, Date-Modified = {2015-04-18 17:10:24 +0000}, Editor = {F. Grootjen and M. Otworowska and J. Kwisthout}, Pages = {65--72}, Title = {Event detection in Twitter: A machine-learning approach based on term pivoting}, Year = {2014}} @article{Vandekerckhove+15, Author = {B. Vandekerckhove and D. Sandra and W. Daelemans}, Date-Added = {2015-03-08 21:48:04 +0000}, Date-Modified = {2015-03-08 21:50:26 +0000}, Doi = {10.1080/23273798.2015.1016977}, Journal = {Language, Cognition and Neuroscience}, Title = {Effects of online abstraction on adjective order preferences}, Year = {2015}, Bdsk-Url-1 = {http://dx.doi.org/10.1080/23273798.2015.1016977}} @inproceedings{VandenBosch+98, Address = {Sydney, Australia}, Author = {A. {Van den Bosch} and A. Weijters and W. Daelemans}, Booktitle = {Proceedings of NeMLaP3/CoNLL98}, Date-Added = {2015-02-22 16:46:21 +0000}, Date-Modified = {2015-02-22 16:48:44 +0000}, Editor = {D. Powers}, Pages = {185--194}, Title = {Modularity in inductively-learned word pronunciation systems}, Year = {1998}} @article{Bloem+13, Attachments = {http://www.clinjournal.org/sites/default/files/06-Bloem-Bouma-CLIN2013.pdf}, Author = {J. Bloem and G. Bouma}, Date-Added = {2015-01-26 15:16:35 +0000}, Date-Modified = {2015-01-26 15:16:53 +0000}, Issn = {2211-4009}, Journal = {Computational Linguistics in the Netherlands Journal}, Month = {12/2013}, Pages = {82-102}, Title = {Automatic animacy classification for {D}utch}, Volume = {3}, Year = {2013}} @inproceedings{Ovrelid09, Address = {Athens, Greece}, Author = {{\O}vrelid, Lilja}, Booktitle = {Proceedings of the 12th Conference of the European Chapter of the ACL (EACL 2009)}, Date-Added = {2015-01-26 15:16:22 +0000}, Date-Modified = {2015-01-26 15:16:22 +0000}, Month = {March}, Pages = {630--638}, Publisher = {Association for Computational Linguistics}, Title = {Empirical Evaluations of Animacy Annotation}, Url = {http://www.aclweb.org/anthology/E09-1072}, Year = {2009}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/E09-1072}} @article{Sakkis+03, Author = {G. Sakkis and I. Androutsopoulos and C. Spyropoulos}, Date-Added = {2015-01-26 15:15:41 +0000}, Date-Modified = {2015-01-26 15:15:41 +0000}, Journal = {Information Retrieval}, Pages = {49--73}, Title = {A Memory-Based Approach to Anti-Spam Filtering for Mailing Lists}, Volume = {6}, Year = {2003}} @incollection{Abate+14, Author = {M. Abate and Y. Assabie}, Booktitle = {Advances in Natural Language Processing}, Date-Added = {2015-01-26 15:15:31 +0000}, Date-Modified = {2015-01-26 15:15:31 +0000}, Pages = {1--13}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Development of Amharic Morphological Analyzer Using Memory-Based Learning}, Volume = {8686}, Year = {2014}} @phdthesis{Millett06, Author = {R. P. Millett}, Date-Added = {2015-01-26 15:13:42 +0000}, Date-Modified = {2015-01-26 15:13:42 +0000}, School = {Brigham Young University. Department of Linguistics and English Language}, Title = {Automatic holistic scoring of ESL essays using linguistic maturity attributes}, Year = {2006}} @incollection{Gaillat+14, Address = {Amsterdam/New York}, Author = {T. Gaillat and P. S{\'e}billot and N. Ballier}, Booktitle = {Recent Advances in Corpus Linguistics-Developing and Exploiting Corpora}, Date-Added = {2015-01-26 15:13:05 +0000}, Date-Modified = {2015-01-26 15:13:05 +0000}, Editor = {L. Vandelanotte and K. Davidse and C. Gentens and D. Kimps}, Pages = {309--324}, Publisher = {Rodopi}, Title = {Automated classification of unexpected uses of {\em this} and {\em that} in a learner corpus of English}, Year = {2014}} @inproceedings{Silva+14, Address = {Dublin, Ireland}, Author = {E. Silva-Schlenker and S. Jimenez and J. Baquero}, Booktitle = {Proceedings of the 8th International Workshop on Semantic Evaluation (SemEval 2014)}, Date-Added = {2015-01-26 15:12:57 +0000}, Date-Modified = {2015-01-26 15:12:57 +0000}, Month = {August}, Pages = {743--747}, Publisher = {Association for Computational Linguistics and Dublin City University}, Title = {UNAL-NLP: Cross-Lingual Phrase Sense Disambiguation with Syntactic Dependency Trees}, Url = {http://www.aclweb.org/anthology/S14-2132}, Year = {2014}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/S14-2132}} @article{Way09, Author = {A. Way}, Date-Added = {2015-01-26 15:11:02 +0000}, Date-Modified = {2015-01-26 15:11:02 +0000}, Editor = {W. Daelemans and V. Hoste}, Journal = {Journal of Translation and Interpreting Studies: Special Issue on Evaluation of Translation Technology}, Pages = {17--41}, Publisher = {Academic and Scientific Publishers}, Title = {A Critique of Statistical Machine Translation}, Volume = {8}, Year = {2009}} @techreport{Cohen+13, Address = {The Hague, The Netherlands}, Author = {M. J. Cohen and G. J. M. Brink and O. M. J. Adang and J. A. G. M. Dijk and T. Boeschoten}, Date-Added = {2014-10-12 09:24:37 +0000}, Date-Modified = {2014-10-12 09:29:10 +0000}, Institution = {Ministerie van Veiligheid en Justitie}, Title = {Twee werelden: You only live once}, Year = {2013}} @article{Moens+06, Author = {M. Moens and A. Smet and B. Naudts and J. Verhoeven and M. Ieven and P. Jorens and H. J. Geise and F. Blockhuys}, Date-Added = {2014-07-17 19:19:29 +0000}, Date-Modified = {2014-07-17 19:20:25 +0000}, Journal = {Letters in applied microbiology}, Number = {2}, Pages = {121--126}, Publisher = {Wiley Online Library}, Title = {Fast identification of ten clinically important micro-organisms using an electronic nose}, Volume = {42}, Year = {2006}} @inproceedings{Saunders+06, Author = {J. Saunders and C. L. Nehaniv and K. Dautenhahn}, Booktitle = {Proceedings of the 1st ACM SIGCHI/SIGART conference on Human-robot interaction}, Date-Added = {2014-07-17 19:09:33 +0000}, Date-Modified = {2014-07-17 19:10:03 +0000}, Organization = {ACM}, Pages = {118--125}, Title = {Teaching robots by moulding behavior and scaffolding the environment}, Year = {2006}} @inproceedings{Feier+14, Author = {F. Feier and I. and Enatescu and C. Ilie and I. Silea}, Booktitle = {Optimization of Electrical and Electronic Equipment (OPTIM), 2014 International Conference on}, Date-Added = {2014-07-15 11:57:43 +0000}, Date-Modified = {2014-07-15 11:58:17 +0000}, Organization = {IEEE}, Pages = {880--885}, Title = {Newborns' cry analysis classification using signal processing and data mining}, Year = {2014}} @inproceedings{VanGompel+14, Address = {Baltimore, Maryland}, Author = {M. {Van Gompel} and A. {Van den Bosch}}, Booktitle = {Proceedings of the 52nd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)}, Date-Added = {2014-07-15 11:40:10 +0000}, Date-Modified = {2014-07-15 11:40:45 +0000}, Month = {June}, Pages = {871--880}, Publisher = {Association for Computational Linguistics}, Title = {Translation Assistance by Translation of {L1} Fragments in an {L2} Context}, Url = {http://www.aclweb.org/anthology/P14-1082}, Year = {2014}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/P14-1082}} @phdthesis{Vandekerckhove13, Author = {B. Vandekerckhove}, Date-Added = {2014-07-15 11:33:47 +0000}, Date-Modified = {2014-07-15 11:34:44 +0000}, Keywords = {clips}, School = {University of Antwerp}, Title = {Exemplar-based generalisation at the interface between syntax and semantics}, Year = {2013}} @article{Clark+96, Author = {S. E. Clark and S. D. Gronlund}, Date-Added = {2014-07-15 11:26:55 +0000}, Date-Modified = {2014-07-15 11:27:41 +0000}, Journal = {Psychonomic Bulletin \& Review}, Number = {1}, Pages = {37--60}, Title = {Global matching models of recognition memory: How the models match the data}, Volume = {3}, Year = {1996}} @article{Vandekerckhove+13, Author = {B. Vandekerckhove and D. Sandra and W. Daelemans}, Date-Added = {2014-07-15 11:17:35 +0000}, Date-Modified = {2014-07-15 11:28:06 +0000}, Journal = {Journal of Neurolinguistics}, Number = {1}, Pages = {46--72}, Title = {Selective impairment of adjective order constraints as overeager abstraction: An elaboration on Kemmerer et al.(2009)}, Volume = {26}, Year = {2013}} @inproceedings{Underwood+13, Author = {T. Underwood and M. L. Black and L. Auvil and B. Capitanu}, Booktitle = {Proceedings of the 2013 IEEE International Conference on Big Data}, Date-Added = {2014-06-22 15:01:13 +0000}, Date-Modified = {2014-06-22 15:02:10 +0000}, Organization = {IEEE}, Pages = {95--103}, Title = {Mapping mutable genres in structurally complex volumes}, Year = {2013}} @article{Brin+98, Author = {S. Brin and L. Page}, Date-Added = {2014-06-20 12:49:10 +0000}, Date-Modified = {2014-06-20 12:49:36 +0000}, Journal = {Computer networks and ISDN systems}, Number = {1}, Pages = {107--117}, Title = {The anatomy of a large-scale hypertextual Web search engine}, Volume = {30}, Year = {1998}} @article{Briun, Date-Added = {2014-06-20 12:48:21 +0000}, Date-Modified = {2014-06-20 12:49:05 +0000}} @inproceedings{Hall+08, Address = {Stroudsburg, PA, USA}, Author = {D. Hall and D. Jurafsky and C. Manning}, Booktitle = {Proceedings of the Conference on Empirical Methods in Natural Language Processing}, Date-Added = {2014-06-20 12:45:53 +0000}, Date-Modified = {2014-06-20 12:46:51 +0000}, Pages = {363--371}, Publisher = {ACL}, Title = {Studying the history of ideas using topic models}, Year = {2008}} @inproceedings{Byrne+10, Address = {Williamsburg, VA}, Author = {K. Byrne and E. Klein}, Booktitle = {Proceedings of Computer Applications and Quantitative Methods in Archaeology}, Date-Added = {2014-06-20 12:42:42 +0000}, Date-Modified = {2014-06-20 12:44:31 +0000}, Title = {Automatic Extraction of Archaeological Events from Text}, Year = {2010}} @book{Cornelius95, Address = {New York, NY}, Author = {R. Cornelius}, Date-Added = {2014-06-20 12:39:21 +0000}, Date-Modified = {2014-06-21 08:04:54 +0000}, Edition = {1st}, Publisher = {Simon \& Schuster}, Title = {A bridge too far}, Year = {1995}} @book{Gerritsen+06, Address = {Groesbeek, the Netherlands}, Author = {S. Gerritsen and W. Lenders}, Date-Added = {2014-06-20 12:35:28 +0000}, Date-Modified = {2014-06-20 12:36:35 +0000}, Publisher = {Nationaal Bevrijdingsmuseum 1944-1945}, Title = {Verhalen Die Blijven: Beleefde Geschiedenis in de Grensregio}, Year = {2006}} @book{Stacey67, Author = {C. P. Stacey}, Date-Added = {2014-06-20 11:07:56 +0000}, Date-Modified = {2014-06-20 20:48:37 +0000}, Publisher = {R. Duhamel, Queen's Printer}, Title = {Official History of the {C}anadian {A}rmy in the {S}econd {W}orld {W}ar: The victory campaign: The operations in {N}orthwest {E}urope, 1944--45}, Year = {1967}} @book{Kershaw90, Author = {R. Kershaw}, Date-Added = {2014-06-20 11:04:47 +0000}, Date-Modified = {2014-06-20 20:49:18 +0000}, Publisher = {Crowood}, Title = {It never snows in {S}eptember: The {G}erman view of {M}arket-{G}arden and the {B}attle of {A}rnhem, {S}eptember 1944}, Year = {1990}} @book{Bennett08, Address = {Great Britain}, Author = {D. Bennett}, Date-Added = {2014-06-20 11:01:30 +0000}, Date-Modified = {2014-06-21 08:01:49 +0000}, Publisher = {Casemate}, Title = {Magnificent disaster: The Failure of {M}arket {G}arden, the {A}rnhem {O}peration, {S}eptember 1944}, Year = {2008}} @book{Henke96, Author = {K. D. Henke}, Date-Added = {2014-06-19 22:10:56 +0000}, Date-Modified = {2014-06-21 08:01:02 +0000}, Publisher = {Oldenbourg Verlag}, Title = {Die amerikanische Besetzung Deutschlands}, Year = {1996}} @unpublished{McCallum02, Author = {Andrew Kachites McCallum}, Date-Added = {2014-06-19 22:00:11 +0000}, Date-Modified = {2014-06-19 22:00:18 +0000}, Note = {http://mallet.cs.umass.edu}, Title = {MALLET: A Machine Learning for Language Toolkit}, Year = 2002} @book{Jockers13, Author = {M. L. Jockers}, Date-Added = {2014-06-19 21:54:35 +0000}, Date-Modified = {2014-06-19 21:54:55 +0000}, Publisher = {University of Illinois Press}, Title = {Macroanalysis: Digital methods and literary history}, Year = {2013}} @article{Sporleder10, Author = {C. Sporleder}, Date-Added = {2014-06-19 21:27:15 +0000}, Date-Modified = {2014-06-19 21:28:11 +0000}, Journal = {Language and Linguistics Compass}, Number = {9}, Pages = {750--768}, Title = {Natural Language Processing for Cultural Heritage Domains}, Volume = {4}, Year = {2010}} @incollection{Duering14, Address = {Wiesbaden}, Author = {M. D\"{u}ring}, Booktitle = {Visualisierung Sozialer Netzwerke}, Date-Added = {2014-06-19 21:18:57 +0000}, Date-Modified = {2014-06-19 21:22:30 +0000}, Editor = {R. Haussling}, Publisher = {VS Verlag f{\"u}r Sozialwissenschaften}, Title = {Netzwerkvisualisierungen in den {G}eschichtswissenschaften zwischen explorativer {Q}uellenanalyse und der {S}uggestionskraft des {B}ildes}, Year = {2014}} @inproceedings{Wubben+14, Address = {Reykjavik, Iceland}, Author = {S. Wubben and A. {Van Den Bosch} and E. Krahmer}, Booktitle = {Proceedings of the Ninth International Conference on Language Resources and Evaluation (LREC'14)}, Date = {26-31}, Date-Added = {2014-05-28 07:29:29 +0000}, Date-Modified = {2015-05-02 12:34:38 +0000}, Isbn = {978-2-9517408-8-4}, Language = {english}, Month = {May}, Publisher = {European Language Resources Association (ELRA)}, Title = {Creating and Using Large Monolingual Parallel Corpora for Sentential Paraphrase Generation}, Year = {2014}} @inproceedings{Hurriyetoglu+14, Address = {Gothenburg, Sweden}, Author = {A. H\"{u}rriyeto\u{g}lu and N. Oostdijk and A. {Van den Bosch}}, Booktitle = {Proceedings of the 5th Workshop on Language Analysis for Social Media (LASM)}, Date-Added = {2014-04-02 10:02:40 +0000}, Date-Modified = {2015-05-02 12:34:06 +0000}, Month = {April}, Pages = {8--16}, Publisher = {Association for Computational Linguistics}, Title = {Estimating Time to Event from Tweets Using Temporal Expressions}, Url = {http://www.aclweb.org/anthology/W14-1302}, Year = {2014}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W14-1302}} @inproceedings{Stoop+14, Address = {Gothenburg, Sweden}, Author = {W. Stoop and A. {Van den Bosch}}, Booktitle = {Proceedings of the 14th Conference of the European Chapter of the Association for Computational Linguistics}, Date-Added = {2014-04-02 10:02:02 +0000}, Date-Modified = {2015-02-26 23:02:23 +0000}, Month = {April}, Pages = {318--327}, Publisher = {Association for Computational Linguistics}, Title = {Using idiolects and sociolects to improve word prediction}, Url = {http://www.aclweb.org/anthology/E14-1034}, Year = {2014}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/E14-1034}} @inproceedings{Kunneman+14, Address = {Gothenburg, Sweden}, Author = {F. Kunneman and C. Liebrecht and A. {Van den Bosch}}, Booktitle = {Proceedings of the 5th Workshop on Language Analysis for Social Media (LASM)}, Date-Added = {2014-04-02 10:00:22 +0000}, Date-Modified = {2015-05-02 12:35:01 +0000}, Month = {April}, Pages = {26--34}, Publisher = {Association for Computational Linguistics}, Title = {The (Un)Predictability of Emotional Hashtags in Twitter}, Url = {http://www.aclweb.org/anthology/W14-1304}, Year = {2014}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W14-1304}} @inproceedings{lee2008correcting, Author = {Lee, John and Seneff, Stephanie}, Booktitle = {ACL}, Date-Added = {2014-03-12 22:20:11 +0000}, Date-Modified = {2014-03-12 22:20:11 +0000}, Pages = {174--182}, Title = {Correcting Misuse of Verb Forms.}, Year = {2008}} @inproceedings{Foster+09, Author = {J. Foster and {\O}. Andersen}, Booktitle = {Proceedings of the Fourth Workshop on Innovative Use of {NLP} for Building Educational Applications}, Date-Added = {2014-03-12 22:09:46 +0000}, Date-Modified = {2014-03-12 22:11:12 +0000}, Organization = {Association for Computational Linguistics}, Pages = {82--90}, Title = {GenERRate: generating errors for use in grammatical error detection}, Year = {2009}} @inproceedings{Sjobergh+05, Author = {J. Sj{\"o}bergh and O. Knutsson}, Booktitle = {Proceeding of the International Conference Recent Advances in Natural Language Processing}, Date-Added = {2014-03-12 22:05:14 +0000}, Date-Modified = {2014-03-12 22:06:50 +0000}, Title = {Faking errors to avoid making errors: Very weakly supervised learning for error detection in writing}, Year = {2005}} @inproceedings{Rozovskaya+11, Author = {A. Rozovskaya and D. Roth}, Booktitle = {Proceedings of the 49th Annual Meeting of the Association for Computational Linguistics: Human Language Technologies-Volume 1}, Date-Added = {2014-03-12 22:01:21 +0000}, Date-Modified = {2014-03-12 22:02:05 +0000}, Organization = {Association for Computational Linguistics}, Pages = {924--933}, Title = {Algorithm selection and model adaptation for {ESL} correction tasks}, Year = {2011}} @inproceedings{Langlais+00, Author = {P. Langlais and G. Foster and G. Lapalme}, Booktitle = {Proceedings of the 2000 NAACL-ANLP Workshop on Embedded machine translation systems-Volume 5}, Date-Added = {2014-02-12 21:39:52 +0000}, Date-Modified = {2014-02-12 21:40:27 +0000}, Organization = {Association for Computational Linguistics}, Pages = {46--51}, Title = {TransType: A computer-aided translation typing system}, Year = {2000}} @article{Goodman01, Author = {Goodman, J.}, Date-Added = {2014-02-12 21:37:26 +0000}, Date-Modified = {2014-02-12 21:37:47 +0000}, Journal = {Computer Speech \& Language}, Number = {4}, Pages = {403--434}, Publisher = {Elsevier}, Title = {A bit of progress in language modeling}, Volume = {15}, Year = {2001}} @article{Ehsan+13, Author = {Ehsan, N. and Faili, H.}, Date-Added = {2014-01-05 18:41:28 +0000}, Date-Modified = {2014-01-05 18:41:56 +0000}, Journal = {Software: Practice and Experience}, Number = {2}, Pages = {187--206}, Title = {Grammatical and context-sensitive error correction using a statistical machine translation framework}, Volume = {43}, Year = {2013}} @inproceedings{Brockett+06, Author = {Brockett, C. and Dolan, W. B. and Gamon, M.}, Booktitle = {Proceedings of the 21st International Conference on Computational Linguistics and the 44th annual meeting of the Association for Computational Linguistics}, Date-Added = {2014-01-05 18:39:43 +0000}, Date-Modified = {2014-01-05 18:40:07 +0000}, Organization = {Association for Computational Linguistics}, Pages = {249--256}, Title = {Correcting ESL errors using phrasal SMT techniques}, Year = {2006}} @inproceedings{Yuan+13, Address = {Sofia, Bulgaria}, Author = {Yuan, Z. and Felice, M.}, Booktitle = {Proceedings of the Seventeenth Conference on Computational Natural Language Learning: Shared Task}, Date-Added = {2014-01-05 18:37:05 +0000}, Date-Modified = {2014-01-05 18:40:22 +0000}, Month = {August}, Pages = {52--61}, Publisher = {Association for Computational Linguistics}, Title = {Constrained Grammatical Error Correction using Statistical Machine Translation}, Url = {http://www.aclweb.org/anthology/W13-3607}, Year = {2013}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W13-3607}} @inproceedings{Ng+13, Address = {Sofia, Bulgaria}, Author = {Ng, H. T. and Wu, S. M. and Wu, Y. and Hadiwinoto, C. and Tetreault, J.}, Booktitle = {Proceedings of the Seventeenth Conference on Computational Natural Language Learning: Shared Task}, Date-Added = {2014-01-05 18:19:01 +0000}, Date-Modified = {2014-01-06 20:33:42 +0000}, Month = {August}, Pages = {1--12}, Publisher = {Association for Computational Linguistics}, Title = {The {CoNLL}-2013 Shared Task on Grammatical Error Correction}, Url = {http://www.aclweb.org/anthology/W13-3601}, Year = {2013}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W13-3601}} @inproceedings{Karsdorp+12b, Address = {Lisbon, Portugal}, Author = {F. Karsdorp and P. {Van Kranenburg} and T. Meder and A. {Van den Bosch}}, Booktitle = {Proceedings of the Second Workshop on Annotation of Corpora for Research in the Humanities (ACRH-2)}, Date-Added = {2013-12-21 16:30:42 +0000}, Date-Modified = {2013-12-21 16:32:46 +0000}, Editor = {F. Mambrini, M. Passarotti, and C. Sporleder}, Pages = {39--50}, Publisher = {Edi\,{c}\n{o}es Colibri}, Title = {Casting a spell: Identification and ranking of actors in folktales}, Year = {2012}} @article{VandenBosch+12c, Author = {A. {Van den Bosch} and R. Morante and S. Canisius}, Date-Added = {2013-12-21 16:27:49 +0000}, Date-Modified = {2013-12-21 16:29:03 +0000}, Journal = {Computational Linguistics in the Netherlands Journal}, Pages = {97--117}, Title = {Joint learning of dependency parsing and semantic role labeling}, Volume = {2}, Year = {2013}, Bdsk-Url-1 = {http://www.clinjournal.org/sites/default/files/6VanDenBosch2012_0.pdf}} @article{Berendsen+13, Author = {R. Berendsen and M. {De Rijke} and K. Balog and T. Bogers and A. {Van den Bosch}}, Date-Added = {2013-12-21 16:25:41 +0000}, Date-Modified = {2013-12-21 16:26:50 +0000}, Journal = {Journal of the American Society for Information Science and Technology,}, Number = {10}, Pages = {2024--2044}, Title = {On the assessment of expertise profiles}, Volume = {64}, Year = {2013}, Bdsk-Url-1 = {http://onlinelibrary.wiley.com/doi/10.1002/asi.22908/pdf}} @inproceedings{Karsdorp+13, Address = {Nijmegen, The Netherlands}, Author = {F. Karsdorp and A. {Van den Bosch}}, Booktitle = {Proceedings of the 22 Annual Belgian-Dutch Conference on Machine Learning}, Date-Added = {2013-12-21 16:24:08 +0000}, Date-Modified = {2013-12-21 16:24:58 +0000}, Pages = {41--49}, Title = {Identifying motifs in folktales using topic models}, Year = {2013}} @inproceedings{VandenBosch+13c, Address = {Rio de Janeiro}, Author = {A. {Van den Bosch} and T. Bogers}, Booktitle = {\#MSM2013 Workshop Concept Extraction Challenge Proceedings,}, Date-Added = {2013-12-21 16:22:15 +0000}, Date-Modified = {2014-07-15 11:54:02 +0000}, Editor = {A. Cano and M. Rowe and M. Stankovic and A.-S. Dadzie}, Pages = {40--43}, Title = {Memory-based named entity recognition in tweets}, Year = {2013}} @inproceedings{Tops+13, Address = {Delft, The Netherlands}, Author = {H. Tops and A. {Van den Bosch} and F. Kunneman}, Booktitle = {Proceedings of the 25th Benelux Artificial Intelligence Conference}, Date-Added = {2013-12-21 16:20:22 +0000}, Date-Modified = {2013-12-21 16:21:56 +0000}, Editor = {K. Hindriks, M. de Weerdt, B. van Riemsdijk, and M. Warnier}, Title = {Predicting time-to-event from Twitter messages Predicting time-to-event from Twitter messages Predicting time-to-event from Twitter messages}, Year = {2013}, Bdsk-Url-1 = {http://bnaic2013.tudelft.nl/proceedings/papers/paper_45.pdf}} @article{Sang+13, Abstract = {As data sets keep growing, computational linguists are experiencing more big data problems: challenging demands on storage and processing caused by very large data sets. An example of this is dealing with social media data: including metadata, the messages of the social media site Twitter in 2012 comprise more than 250 terabytes of structured text. Handling data volumes like this requires parallel computing architectures with appropriate software tools. In this paper we present our experiences in working with such a big data set, a collection of two billion Dutch tweets. We show how we collected and stored the data. Next we deal with searching in the data using the Hadoop framework and visualizing search results. In order to determine the usefulness of this tweet analysis resource, we have performed three case studies based on the data: relating word frequency to real-life events, finding words related to a topic, and gathering information about conversations. The three case studies are presented in this paper. Access to this current and expanding tweet data set is offered via the website twiqs.nl.}, Attachments = {http://clinjournal.org/sites/default/files/08-TjongKimSang-vandenBosch-CLIN2013.pdf}, Author = {E. {Tjong Kim Sang} and A. {Van den Bosch}}, Date-Added = {2013-12-21 16:16:17 +0000}, Date-Modified = {2013-12-21 16:18:12 +0000}, Issn = {2211-4009}, Journal = {Computational Linguistics in the Netherlands Journal}, Month = {12/2013}, Pages = {121-134}, Title = {Dealing with big data: The case of Twitter}, Volume = {3}, Year = {2013}, Bdsk-Url-1 = {http://clinjournal.org/sites/default/files/08-TjongKimSang-vandenBosch-CLIN2013.pdf}} @inproceedings{Hendrickx+13, Address = {Sofia, Bulgaria}, Author = {I. Hendrickx and M. D{\"u}ring and K. Zervanou and A. {Van Den Bosch}}, Booktitle = {Proceedings of the Third Workshop on Annotation of Corpora for Research in the Humanities (ACRH-3)}, Date-Added = {2013-12-16 12:57:13 +0000}, Date-Modified = {2013-12-16 12:59:00 +0000}, Isbn = {978-954-91700-5-4}, Keywords = {isher, strikes, iish, ehumanities, digital humanities, computational history}, Month = {12th December 2013}, Opteditor = {F. Mambrini, M. Passarotti and C. Sporleder}, Pages = {25--36}, Publisher = {The Institute of Information and Communication Technologies, Bulgarian Academy of Scienes}, Title = {Searching and Finding Strikes in the {N}ew {Y}ork {T}imes}, Year = {2013}} @article{Bresnan+08, Author = {J. Bresnan and J. Hay}, Date-Added = {2013-12-08 23:02:27 +0000}, Date-Modified = {2013-12-08 23:03:18 +0000}, Journal = {Lingua}, Number = {2}, Pages = {245--259}, Title = {Gradient grammar: An effect of animacy on the syntax of {\it give} in {N}ew {Z}ealand and {A}merican {E}nglish}, Volume = {118}, Year = {2008}} @book{Milroy+03, Author = {L. Milroy and M. Gordon}, Date-Added = {2013-12-08 22:44:41 +0000}, Date-Modified = {2013-12-08 22:45:25 +0000}, Publisher = {Wiley}, Title = {Sociolinguistics: Method and interpretation}, Year = {2003}} @url{Davies13, Author = {M. Davies}, Date-Added = {2013-12-08 21:10:19 +0000}, Date-Modified = {2013-12-08 23:16:59 +0000}, Lastchecked = {December 8, 2013}, Title = {{Corpus of Global Web-Based English}: 1.9 billion words from speakers in 20 countries}, Urldate = {http://corpus2.byu.edu/glowbe/}, Year = {2013--}} @article{Campbell+01, Author = {A. Campbell and M. Tomasello}, Date-Added = {2013-11-30 21:36:52 +0000}, Date-Modified = {2013-11-30 21:37:20 +0000}, Journal = {Applied Psycholinguistics}, Number = {2}, Pages = {253--267}, Title = {The acquisition of English dative constructions}, Volume = {22}, Year = {2001}} @book{Harrell01, Author = {F. E. Harrell}, Date-Added = {2013-11-29 19:14:34 +0000}, Date-Modified = {2013-11-30 21:40:57 +0000}, Publisher = {Springer}, Title = {Regression modeling strategies: with applications to linear models, logistic regression, and survival analysis}, Year = {2001}} @article{Mollin09, Author = {S. Mollin}, Date-Added = {2013-11-27 08:23:26 +0000}, Date-Modified = {2013-11-27 08:23:37 +0000}, Journal = {Journal of Corpus Linguistics}, Pages = {367--392}, Title = {``I entirely understand'' is a {B}lairism: The methodology of identifying idiolectal collocations}, Volume = {14 (3)}, Year = {2009}} @article{Louwerse04, Author = {M. M. Louwerse}, Date-Added = {2013-11-27 08:22:42 +0000}, Date-Modified = {2013-11-27 08:22:50 +0000}, Journal = {Computers and the Humanities}, Number = {2}, Pages = {207--221}, Title = {Semantic Variation in Idiolect and Sociolect: Corpus Linguistic Evidence from Literary Texts}, Volume = {38}, Year = {2004}} @book{MacWhinney00, Address = {Mahwah, NJ}, Author = {B. MacWhinney}, Date-Added = {2013-11-27 06:53:09 +0000}, Date-Modified = {2013-11-27 06:55:05 +0000}, Publisher = {Lawrence Erlbaum}, Series = {The {CHILDES} project: Tools for analyzing talk}, Title = {The database}, Volume = {2}, Year = {2000}} @incollection{Bresnan+07, Address = {Amsterdam, The Netherlands}, Author = {J. Bresnan and A. Cueni and T. Nikitina and R. H. Baayen}, Booktitle = {Cognitive foundations of interpretation}, Date-Added = {2013-11-09 21:56:02 +0000}, Date-Modified = {2013-11-09 21:57:57 +0000}, Editor = {G. Bouma and I. Kr\"{a}mer and J. Zwarts}, Keywords = {dative alternation}, Pages = {69--94}, Publisher = {Royal Netherlands Academy of Arts and Sciences}, Title = {Predicting the dative alternation}, Year = {2007}} @article{DeMarneffe+12, Author = {M.-C. {De Marneffe} and S. Grimm and I. Arnon and S. Kirby and J. Bresnan}, Date-Added = {2013-11-09 21:39:25 +0000}, Date-Modified = {2013-11-30 21:48:12 +0000}, Journal = {Language and Cognitive Processes}, Keywords = {dative alternation; CHILDES}, Number = {1}, Pages = {25--61}, Title = {A statistical model of the grammatical choices in child production of dative sentences}, Volume = {27}, Year = {2012}} @inproceedings{Strik+02, Address = {Denver, CO.}, Author = {H. Strik and W. Daelemans and D. Binnenpoorte and J. Sturm and F. {De Vriend} and C. Cucchiarini}, Booktitle = {Proceedings of ICSLP}, Date-Added = {2013-11-04 11:22:52 +0000}, Date-Modified = {2013-12-15 13:11:31 +0000}, Pages = {1549--1552}, Title = {Dutch {HLT} resources: from {BLARK} to priority lists}, Year = {2002}} @inproceedings{Krauwer03, Author = {S. Krauwer}, Booktitle = {Proceedings of the International Workshop on Speech and Computer (SPECOM-2003)}, Date-Added = {2013-11-04 11:16:51 +0000}, Date-Modified = {2013-12-15 13:12:53 +0000}, Pages = {8--15}, Title = {The {B}asic {L}anguage {R}esource {K}it ({BLARK}) as the first milestone for the Language Resources Roadmap}, Year = {2003}} @article{Bouma+01, Author = {G. Bouma and G. {Van Noord} R. and Malouf}, Date-Added = {2013-10-06 09:02:11 +0000}, Date-Modified = {2013-10-06 09:02:48 +0000}, Journal = {Language and Computers}, Number = {1}, Pages = {45--59}, Publisher = {Rodopi}, Title = {Alpino: Wide-coverage computational analysis of Dutch}, Volume = {37}, Year = {2001}} @book{Haeseryn+97, Address = {Groningen, The Netherlands}, Author = {W. Haeseryn and K. Romijn and G. Geerts and J. {De Rooij} and M. {Van der Toorn}}, Date-Added = {2013-08-18 19:55:03 +0000}, Date-Modified = {2013-08-18 20:05:47 +0000}, Keywords = {Dutch syntax}, Publisher = {Martinus Nijhoff}, Title = {{Algemene Nederlandse Spraakkunst}}, Volume = {2}, Year = {1997}} @techreport{Burnard00, Address = {Oxford, UK}, Author = {L. Burnard}, Date-Added = {2013-08-13 15:02:13 +0000}, Date-Modified = {2015-02-26 23:02:53 +0000}, Institution = {Oxford University}, Publisher = {Oxford University Computing Services}, Title = {Reference Guide for the British National Corpus (World Edition)}, Year = {2000}} @article{VandenBosch+13b, Author = {A. {Van den Bosch} and W. Daelemans}, Date-Added = {2013-08-07 20:10:18 +0000}, Date-Modified = {2013-11-30 21:47:51 +0000}, Journal = {Language and Speech}, Number = {3}, Pages = {308--326}, Title = {Implicit Schemata and Categories in Memory-based Language Processing}, Volume = {56}, Year = {2013}} @article{VanderWouden09, Author = {T. {Van der Wouden}}, Date-Added = {2013-08-07 08:54:10 +0000}, Date-Modified = {2013-08-07 09:25:16 +0000}, Journal = {Nederlandse Taalkunde}, Number = {14}, Pages = {300-306}, Title = {Er staat op de laatste plaats van het middenveld een onbepaalde zelfstandignaamwoordgroep}, Year = {2009}} @article{Grondelaers+02, Author = {S. Grondelaers and M. Brysbaert and D. Speelman and D. Geeraerts}, Date-Added = {2013-08-07 08:18:09 +0000}, Date-Modified = {2013-08-07 08:19:55 +0000}, Journal = {{Gramma/TTT Tijdschrift voor Taalkunde}}, Keywords = {syntactic variation}, Pages = {1--22}, Title = {Er als accessibility marker: On- en offline evidentie voor een procedurele duiding van presentatieve zinnen}, Volume = {9}, Year = {2002}} @article{Grondelaers+09, Author = {S. Grondelaers and D. Speelman and D. Drieghe and M. Brysbaert and D. Geeraerts}, Date-Added = {2013-08-07 08:14:37 +0000}, Date-Modified = {2013-08-07 08:15:44 +0000}, Journal = {Acta psychologica}, Keywords = {syntactic variation}, Number = {2}, Pages = {153--160}, Publisher = {Elsevier}, Title = {Introducing a new entity into discourse: Comprehension and production evidence for the status of Dutch {\it er} ``there'' as a higher-level expectancy monitor}, Volume = {130}, Year = {2009}} @phdthesis{Theijssen12, Author = {D. Theijssen}, Date-Added = {2013-08-07 08:08:22 +0000}, Date-Modified = {2013-08-07 08:09:23 +0000}, Keywords = {dative alternation}, Month = {June}, School = {Radboud University Nijmegen}, Title = {Making choices: Modelling the English dative alternation}, Year = {2012}} @incollection{Grondelaers+08, Address = {Berlin}, Author = {S. Grondelaers and D. Speelman and D. Geeraerts}, Booktitle = {Cognitive socionlinguistics: Language variation, cultural models, social systems}, Date-Added = {2013-08-07 07:55:11 +0000}, Date-Modified = {2013-08-07 07:57:27 +0000}, Editor = {G. Kristiansen and R. Dirven}, Keywords = {syntactic variation, corpus linguistics, sociolinguistics}, Pages = {153-2004}, Publisher = {Mouton de Gruyter}, Title = {National variation in the use of {\it er} "there": Regional and diachronic constraints on cognitive explanations}, Year = {2008}} @incollection{Grondelaers+07, Address = {Amsterdam/Philadelphia}, Author = {S. Grondelaers and D. Geeraerts and D. Speelman}, Booktitle = {Methods in cognitive linguistics}, Date-Added = {2013-08-07 07:52:29 +0000}, Date-Modified = {2013-08-07 07:54:47 +0000}, Editor = {M. Gonzales-Marques and I. Mittelberg and S. Coulson and M.J. Spivey}, Keywords = {corpus linguistics, syntactic variation}, Pages = {149-169}, Publisher = {John Benjamins}, Title = {A case for cognitive corpus linguistics}, Year = {2007}} @inproceedings{Wubben+13, Address = {Sofia, Bulgaria}, Author = {S. Wubben and E. Krahmer and A. {Van den Bosch}}, Booktitle = {Proceedings of the 7th Workshop on Language Technology for Cultural Heritage, Social Sciences, and Humanities}, Date-Added = {2013-08-06 10:12:56 +0000}, Date-Modified = {2013-08-07 07:50:49 +0000}, Keywords = {vi, language transformation, machine translation, Middle Dutch}, Month = {August}, Pages = {11--19}, Publisher = {Association for Computational Linguistics}, Title = {Using character overlap to improve language transformation}, Url = {http://www.aclweb.org/anthology/W13-2702}, Year = {2013}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W13-2702}} @inproceedings{VandenBosch+13, Address = {Sofia, Bulgaria}, Author = {A. {Van den Bosch} and P. Berck}, Booktitle = {Proceedings of the Seventeenth Conference on Computational Natural Language Learning: Shared Task}, Date-Added = {2013-08-06 10:12:07 +0000}, Date-Modified = {2013-08-07 07:49:53 +0000}, Keywords = {vi, spelling correction, grammatical error correction, memory-based language modeling}, Month = {August}, Pages = {102--108}, Publisher = {Association for Computational Linguistics}, Title = {Memory-based grammatical error correction}, Url = {http://www.aclweb.org/anthology/W13-3614}, Year = {2013}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W13-3614}} @inproceedings{Liebrecht+13, Address = {Atlanta, Georgia}, Author = {C. Liebrecht and F. Kunneman and A. {Van den Bosch}}, Booktitle = {Proceedings of the 4th Workshop on Computational Approaches to Subjectivity, Sentiment and Social Media Analysis}, Date-Added = {2013-06-17 19:19:18 +0000}, Date-Modified = {2013-09-30 22:08:43 +0000}, Month = {June}, Pages = {29--37}, Publisher = {Association for Computational Linguistics}, Title = {The perfect solution for detecting sarcasm in tweets \#not}, Url = {http://www.aclweb.org/anthology/W13-1605}, Year = {2013}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W13-1605}} @inproceedings{VanGompel+13, Address = {Atlanta, Georgia, USA}, Author = {M. {Van Gompel} and A. {Van den Bosch}}, Booktitle = {Second Joint Conference on Lexical and Computational Semantics (*SEM), Volume 2: Proceedings of the Seventh International Workshop on Semantic Evaluation (SemEval 2013)}, Date-Added = {2013-06-17 19:17:11 +0000}, Date-Modified = {2013-06-17 19:17:43 +0000}, Month = {June}, Pages = {183--187}, Publisher = {Association for Computational Linguistics}, Title = {WSD2: Parameter optimisation for Memory-based Cross-Lingual Word-Sense Disambiguation}, Url = {http://www.aclweb.org/anthology/S13-2033}, Year = {2013}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/S13-2033}} @inproceedings{Hurriyetoglu+13, Author = {A. H\"urriyetoglu and F. Kunneman and A. {Van den Bosch}}, Crossref = {DIR-2013}, Date-Added = {2013-05-27 21:47:57 +0000}, Date-Modified = {2013-05-27 21:49:55 +0000}, Keywords = {adnext twitter}, Pages = {20--23}, Title = {Estimating the Time between Twitter Messages and Future Events}, Url = {http://ceur-ws.org/Vol-986/paper_23.pdf}, Bdsk-Url-1 = {http://ceur-ws.org/Vol-986/paper_23.pdf}} @inproceedings{Sanders+13, Author = {E. Sanders and A. {Van den Bosch}}, Crossref = {DIR-2013}, Date-Added = {2013-05-27 21:45:27 +0000}, Date-Modified = {2013-05-27 21:46:34 +0000}, Keywords = {twitter, elections, adnext, politics}, Pages = {68--71}, Title = {Relating Political Party Mentions on Twitter with Polls and Election Results}, Url = {http://ceur-ws.org/Vol-986/paper_9.pdf}, Bdsk-Url-1 = {http://ceur-ws.org/Vol-986/paper_9.pdf}} @proceedings{Eickhoff+13, Address = {Aachen}, Booktitle = {Proceedings of the 13th Dutch-Belgian Workshop on Information Retrieval (DIR)}, Date-Added = {2013-05-27 21:44:36 +0000}, Date-Modified = {2013-05-27 21:45:00 +0000}, Editor = {C. Eickhoff and A. P. de Vries}, Eventdate = {2013-04-26}, Issn = {1613-0073}, Number = 986, Series = {CEUR Workshop Proceedings}, Title = {Proceedings of the 13th Dutch-Belgian Workshop on Information Retrieval (DIR)}, Url = {http://ceur-ws.org/Vol-986/}, Venue = {Delft, The Netherlands}, Year = 2013, Bdsk-Url-1 = {http://ceur-ws.org/Vol-986/}} @inproceedings{Steinberger+11, Author = {Steinberger, Ralf and Pouliquen, Bruno and Kabadjov, Mijail and Belyaeva, Jenya and van der Goot, Erik}, Booktitle = {Proceedings of the 8th International Conference `Recent Advances in Natural Language Processing}, Date-Added = {2013-03-20 00:40:57 +0000}, Date-Modified = {2013-03-20 00:41:05 +0000}, Pages = {104--110}, Title = {JRC-Names: A freely available, highly multilingual named entity resource}, Year = {2011}} @inproceedings{Li+12, Author = {Li, Chenliang and Weng, Jianshu and He, Qi and Yao, Yuxia and Datta, Anwitaman and Sun, Aixin and Lee, Bu-Sung}, Booktitle = {Proceedings of the 35th international ACM SIGIR conference on Research and development in information retrieval}, Date-Added = {2013-03-20 00:11:51 +0000}, Date-Modified = {2013-03-20 00:11:59 +0000}, Organization = {ACM}, Pages = {721--730}, Title = {Twiner: Named entity recognition in targeted twitter stream}, Year = {2012}} @inproceedings{Ritter+11, Author = {Ritter, Alan and Clark, Sam and Etzioni, Oren and others}, Booktitle = {Proceedings of the Conference on Empirical Methods in Natural Language Processing}, Date-Added = {2013-03-20 00:11:04 +0000}, Date-Modified = {2013-03-20 00:11:13 +0000}, Organization = {Association for Computational Linguistics}, Pages = {1524--1534}, Title = {Named entity recognition in tweets: an experimental study}, Year = {2011}} @article{Liu+13, Author = {Liu, Xiaohua and Wei, Furu and Zhang, Shaodian and Zhou, Ming}, Date-Added = {2013-03-20 00:09:51 +0000}, Date-Modified = {2013-03-20 00:10:08 +0000}, Journal = {ACM Transactions on Intelligent Systems and Technology (TIST)}, Number = {1}, Pages = {3}, Publisher = {ACM}, Title = {Named entity recognition for tweets}, Volume = {4}, Year = {2013}} @article{Attardo00, Author = {S. Attardo}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Journal of Pragmatics}, Number = {6}, Pages = {793--826}, Title = {Irony as relevant inappropriateness}, Volume = {32}, Year = {2000}} @article{Attardo00b, Author = {S. Attardo}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {RASK}, Pages = {3--20}, Title = {Irony markers and functions: Towards a goal-oriented theory of irony and its processing}, Volume = {12}, Year = {2000}} @incollection{Attardo07, Address = {New York, NY}, Author = {S. Attardo}, Booktitle = {Irony in language and thought: A cognitive science reader}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. W. Gibbs and R. W. Gibbs Jr. and H. Colston}, Pages = {135--170}, Publisher = {Lawrence Erlbaum}, Title = {Irony as relevant inappropriateness}, Year = {2007}} @article{Attardo+03, Author = {Attardo, S. and Eisterhold, J. and Hay, J. and Poggi, I.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Humor}, Number = {2}, Pages = {243--260}, Title = {Visual markers of irony and sarcasm}, Volume = {16}, Year = {2003}} @incollection{Brown80, Address = {Washington, DC}, Author = {Brown, R. L.}, Booktitle = {Language use and the uses of language}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. W. Shuy and A. Shnukal}, Pages = {111--127}, Publisher = {Georgetown University Press}, Title = {The pragmatics of verbal irony}, Year = {1980}} @article{Bryant+05, Author = {Bryant, G. A. and Tree, J. E. Fox}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Language and Speech}, Number = {3}, Pages = {257--277}, Publisher = {SAGE Publications}, Title = {Is there an ironic tone of voice?}, Volume = {48}, Year = {2005}} @book{Burgers10, Address = {Nijmegen, The Netherlands}, Author = {Burgers, C. F.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Publisher = {Ipskamp}, Title = {Verbal irony: Use and effects in written discourse}, Year = {2010}} @article{Colston+04, Author = {Colston, H. L. and Lee, S. Y.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Metaphor and Symbol}, Number = {4}, Pages = {289--306}, Title = {Gender differences in verbal irony use}, Volume = {19}, Year = {2004}} @article{Ducharme94, Author = {Ducharme, L. J.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Symbolic interaction}, Number = {1}, Pages = {51--62}, Title = {Sarcasm and interactional politics}, Volume = {17}, Year = {1994}} @article{Gibbs86, Author = {Gibbs, R. W.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Journal of Experimental Psychology: General}, Number = {1}, Pages = {3}, Title = {On the psycholinguistics of sarcasm}, Volume = {115}, Year = {1986}} @incollection{Gibbs07, Address = {New York, NY}, Author = {R. W. Gibbs}, Booktitle = {Irony in language and thougt: A cognitive science reader}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. W. Gibbs and R. W. Gibbs Jr. and H. Colston}, Pages = {173--200}, Publisher = {Lawrence Erlbaum}, Title = {On the psycholinguistics of sarcasm}, Year = {2007}} @incollection{Gibbs07b, Address = {New York, NY}, Author = {R. W. Gibbs}, Booktitle = {Irony in language and thougt: A cognitive science reader}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. W. Gibbs and R. W. Gibbs Jr. and H. Colston}, Pages = {339--360}, Publisher = {Lawrence Erlbaum}, Title = {Irony in talk among friends}, Year = {2007}} @incollection{Gibbs+07, Address = {New York, NY}, Author = {R. W. Gibbs and H. Colston}, Booktitle = {Irony in language and thougt: A cognitive science reader}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. W. Gibbs and R. W. Gibbs Jr. and H. Colston}, Pages = {581--595}, Publisher = {Lawrence Erlbaum}, Title = {Irony as persuasive communication}, Year = {2007}} @incollection{Gibbs+05, Address = {New York, NY}, Author = {R. W. Gibbs and C. Izett}, Booktitle = {Figurative language comprehension: Social and cultural influences}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {H. Colston and A. Katz}, Pages = {131--151}, Publisher = {Lawrence Erlbaum}, Title = {Irony as persuasive communication}, Year = {2005}} @article{Gibbs+91, Author = {Gibbs, R. W. and O'Brien, J.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Journal of pragmatics}, Number = {6}, Pages = {523--530}, Title = {Psychological aspects of irony understanding}, Volume = {16}, Year = {1991}} @article{Giora95, Author = {Giora, R.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Discourse processes}, Number = {2}, Pages = {239--264}, Title = {On irony and negation}, Volume = {19}, Year = {1995}} @book{Giora03, Author = {Giora, R.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Publisher = {Oxford University Press}, Title = {On our mind: Salience, context, and figurative language}, Year = {2003}} @incollection{Grice75, Address = {New York, NY}, Author = {H. Grice}, Booktitle = {Speech acts: Syntax and semantics}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {P. Cole and J. Morgan}, Pages = {41--58}, Publisher = {Academic Press}, Title = {Logic and conversation}, Year = {1975}} @incollection{Grice78, Address = {New York, NY}, Author = {H. Grice}, Booktitle = {Pragmatics: syntax and semantics}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {P. Cole}, Pages = {113--127}, Publisher = {Academic Press}, Title = {Further notes on logic and conversation}, Year = {1978}} @incollection{Hamamoto98, Address = {Amsterdam, The Netherlands}, Author = {H. Hamamoto}, Booktitle = {Relevance theory: Applications and implications}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. Carston and S. Uchida}, Pages = {257--270}, Publisher = {John Benjamins}, Title = {Irony from a cognitive perspective}, Year = {1998}} @book{Jahandarie99, Author = {Jahandarie, K.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Publisher = {Greenwood Publishing Group}, Title = {Spoken and written discourse: A multi-disciplinary perspective}, Year = {1999}} @article{Katz+04, Author = {Katz, A. N. and Blasko, D. G. and Kazmerski, V. A.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Current Directions in Psychological Science}, Number = {5}, Pages = {186--189}, Title = {Saying what you don't mean social influences on sarcastic language processing}, Volume = {13}, Year = {2004}} @article{Kotthoff03, Author = {Kotthoff, H.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Journal of Pragmatics}, Number = {9}, Pages = {1387--1411}, Title = {Responding to irony in different contexts: On cognition in conversation}, Volume = {35}, Year = {2003}} @article{Kreuz+93, Author = {Kreuz, R. J. and Roberts, R. M.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Poetics}, Number = {1}, Pages = {151--169}, Title = {The empirical study of figurative language in literature}, Volume = {22}, Year = {1993}} @article{Kreuz+95, Author = {Kreuz, R. J. and Roberts, R. M.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Metaphor and symbol}, Number = {1}, Pages = {21--31}, Title = {Two cues for verbal irony: Hyperbole and the ironic tone of voice}, Volume = {10}, Year = {1995}} @incollection{Kreuz+96, Address = {Norwood, NJ}, Author = {Kreuz, R. and Roberts, R. and Johnson, B. and Bertus, E.}, Booktitle = {Empirical approaches to literature and aesthetics}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. Kreuz and M. MacNealy}, Pages = {83--97}, Publisher = {Ablex}, Title = {Figurative language occurrence and co-occurrence in contemporary literature}, Year = {1996}} @incollection{Kumon-Nakamura+07, Address = {New York, NY}, Author = {Kumon-Nakamura, S. and Glucksberg, S. and Brown, M.}, Booktitle = {Irony in language and thougt: A cognitive science reader}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. W. Gibbs and R. W. Gibbs Jr. and H. Colston}, Pages = {57--95}, Publisher = {Lawrence Erlbaum}, Title = {How about another piece of pie: The allusional pretense theory of discourse irony}, Year = {2007}} @article{Leigh94, Author = {Leigh, J. H}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Journal of Advertising}, Pages = {17--33}, Title = {The use of figures of speech in print ad headlines}, Year = {1994}} @article{Livnat04, Author = {Livnat, Zohar}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Pragmatics \& Cognition}, Number = {1}, Pages = {57--70}, Title = {On verbal irony, meta-linguistic knowledge and echoic interpretation}, Volume = {12}, Year = {2004}} @book{Mizzau84, Address = {Milan, Italy}, Author = {M. Mizzau}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Publisher = {Feltrinelli}, Title = {L'ironia: la contraddizione consentita}, Year = {1984}} @book{Muecke69, Author = {Muecke, D. C.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Publisher = {Oxford Univ Press}, Title = {The compass of irony}, Year = {1969}} @article{Muecke78, Author = {Muecke, D. C.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Poetics}, Number = {4}, Pages = {363--375}, Title = {Irony markers}, Volume = {7}, Year = {1978}} @article{Partington07, Author = {Partington, A.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Journal of Pragmatics}, Number = {9}, Pages = {1547--1569}, Title = {Irony and reversal of evaluation}, Volume = {39}, Year = {2007}} @incollection{Pexman05, Address = {New York, NY}, Author = {P. Pexman}, Booktitle = {Figurative language comprehension: Social and cultural influences}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {H. Colston and A. Katz}, Pages = {131--151}, Publisher = {Lawrence Erlbaum}, Title = {Social factors in the interpretation of verbal irony: The role of speaker and listener characteristics}, Year = {2005}} @article{Reyes+12, Author = {Reyes, A. and Rosso, P. and Veale, T.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Language Resources and Evaluation}, Pages = {1--30}, Title = {A multidimensional approach for detecting irony in Twitter}, Year = {2012}} @article{Rockwell03, Author = {Rockwell, P.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Perceptual and motor skills}, Number = {1}, Pages = {251--256}, Title = {Empathy and the expression and recognition of sarcasm by close relations or strangers}, Volume = {97}, Year = {2003}} @article{Rockwell07, Author = {Rockwell, P.}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Journal of psycholinguistic research}, Number = {5}, Pages = {361--369}, Title = {Vocal features of conversational sarcasm: A comparison of methods}, Volume = {36}, Year = {2007}} @incollection{Seto98, Address = {Amsterdam, The Netherlands}, Author = {Seto, K.-i.}, Booktitle = {Relevance theory: Applications and implications}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. Carson and S. Uchida}, Pages = {239--255}, Publisher = {John Benjamins}, Title = {On non-echoic irony}, Year = {1998}} @book{Sperber+95, Address = {Oxford, UK}, Author = {D. Sperber and D. Wilson}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Edition = {2nd}, Publisher = {Blackwell Publishers}, Title = {Relevance: Communication and cognition}, Year = {1995}} @incollection{Srinarawat05, Address = {Amsterdam, The Netherlands}, Author = {Srinarawat, D.}, Booktitle = {Broadening the horizon of linguistic politeness}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. Lakoff and S. Ide}, Pages = {175--193}, Publisher = {John Benjamins}, Title = {Indirectness as a politeness strategy of {T}hai speakers}, Year = {2005}} @inproceedings{Tsur+10, Author = {Tsur, Oren and Davidov, Dmitry and Rappoport, Ari}, Booktitle = {Proceedings of the Fourth International AAAI Conference on Weblogs and Social Media}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Pages = {162--169}, Title = {ICWSM--A great catchy name: Semi-supervised recognition of sarcastic sentences in online product reviews}, Year = {2010}} @article{VanMulken+12, Author = {M. {Van Mulken} and P. J. Schellens}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Journal = {Tijdschrift voor taalbeheersing}, Number = {1}, Pages = {26--53}, Title = {Over loodzware bassen en wapperende broekspijpen. Gebruik en perceptie van taalintensiverende stijlmiddelen}, Volume = {34}, Year = {2012}} @incollection{Wilson+07, Address = {New York, NY}, Author = {D. Wilson and D. Sperber}, Booktitle = {Irony in language and thought: A cognitive science reader}, Date-Added = {2013-03-11 19:50:15 +0000}, Date-Modified = {2013-03-11 19:50:15 +0000}, Editor = {R. W. Gibbs and R. W. Gibbs Jr. and H. Colston}, Pages = {35--55}, Publisher = {Lawrence Erlbaum}, Title = {On verbal irony}, Year = {2007}} @book{Paul20, Address = {Halle, Germany}, Author = {H. Paul}, Date-Added = {2012-11-29 20:22:12 +0000}, Date-Modified = {2012-11-29 21:00:31 +0000}, Edition = {Fifth}, Keywords = {neogrammarians, linguistics}, Publisher = {Max Niemeyer}, Title = {Prinzipien der Sprachgeschichte}, Year = {1920}} @article{EuropeanParliamentCouncil01, Author = {{The European Parliament and the Council}}, Date-Added = {2012-11-28 20:13:30 +0000}, Date-Modified = {2012-11-28 20:20:24 +0000}, Journal = {Official Journal of the European Union}, Keywords = {European Union}, Month = {May}, Pages = {43---48}, Title = {{Regulation (EC) No 1049/2001 of the European Parliament and of the Council regarding public access to European Parliament, Council and Commission documents}}, Volume = {145}, Year = {2001}} @book{Sinninghe33, Address = {Zutphen, The Netherlands}, Author = {J.R.W. Sinninghe}, Date-Added = {2012-11-18 12:53:16 +0000}, Date-Modified = {2012-11-18 12:53:44 +0000}, Publisher = {Thieme}, Title = {Noord-Brabantsch sagenboek}, Year = {1933}} @book{Dijkstra+84, Address = {Groningen, The Netherlands}, Author = {T. Dijkstra and G. Kempen}, Date-Added = {2012-11-18 12:49:06 +0000}, Date-Modified = {2012-11-18 12:50:17 +0000}, Publisher = {Wolters-Noordhoff}, Title = {Taal in uitvoering}, Year = {1984}} @book{Odijk12, Author = {J. Odijk}, Date-Added = {2012-11-18 12:11:56 +0000}, Date-Modified = {2012-11-18 12:17:05 +0000}, Isbn = {978-3-642-25977-7}, Note = {Available online at \url{http://www.meta-net.eu/whitepapers}}, Publisher = {Springer}, Series = {META-NET White Paper Series}, Title = {{Het Nederlands in het Digitale Tijdperk -- The Dutch Language in the Digital Age}}, Year = {2012}} @article{Bannard+08, Author = {C. Bannard and D. Matthews}, Date-Added = {2012-11-18 12:06:25 +0000}, Date-Modified = {2012-11-18 12:09:30 +0000}, Journal = {Psychological Science}, Keywords = {psycholinguistics, frequency effects, ngrams}, Number = {3}, Pages = {241--248}, Title = {Stored word sequences in language learning: The effect of familiarity on children's repetition of four-word combinations}, Volume = {19}, Year = {2008}} @book{VanPamelen12, Author = {F. {Van Pamelen}}, Date-Added = {2012-11-18 11:58:28 +0000}, Date-Modified = {2012-11-18 12:16:34 +0000}, Publisher = {De Fontein}, Title = {De zin van de ommezijde}, Year = {2012}} @article{VandeCamp+12, Author = {M. {Van de Camp} and A. {Van den Bosch}}, Date-Added = {2012-10-28 10:32:25 +0000}, Date-Modified = {2012-10-28 10:33:42 +0000}, Journal = {Decision Support Systems}, Keywords = {hitime, cls, lst, social networks, social history}, Number = {4}, Pages = {761--769}, Title = {The socialist network}, Volume = {53}, Year = {2012}} @proceedings{VandenBosch+12b, Address = {New Brunswick, NJ}, Date-Added = {2012-10-28 10:30:32 +0000}, Date-Modified = {2012-10-28 10:31:48 +0000}, Editor = {A. {Van den Bosch} and H. Shatkay}, Keywords = {amicus, dssd, scientific discourse}, Publisher = {ACL}, Title = {Proceedings of the ACL 2012 workshop on Detecting Structure in Scholarly Discourse (DSSD-2012)}, Year = {2012}} @proceedings{Zervanou+12, Address = {New Brunswick, NJ}, Date-Added = {2012-10-28 10:29:01 +0000}, Date-Modified = {2012-10-28 10:30:27 +0000}, Editor = {K. Zervanou and A. {Van den Bosch}}, Keywords = {latech, isher, cls, lst}, Publisher = {ACL}, Title = {Proceedings of the EACL 2012 workshop on Language Technology for Cultural Heritage, Social Sciences, and Humanities (LaTeCH-2012)}, Year = {2012}} @inproceedings{Verberne+12, Address = {New Brunswick, NJ}, Author = {S. Verberne and A. {Van den Bosch} and H. Strik and L. Boves}, Booktitle = {Proceedings of the 13th Conference of the European Chapter of the Association for Computational Linguistics}, Date-Added = {2012-10-28 10:27:13 +0000}, Date-Modified = {2012-10-28 10:28:54 +0000}, Keywords = {cls, lst, predictive text entry, text completion, wopr}, Pages = {561--569}, Publisher = {ACL}, Title = {The effect of domain and text type on text prediction quality}, Year = {2012}} @incollection{Mos+12, Address = {Berlin, Germany}, Author = {M. Mos and A. {Van den Bosch} and P. Berck}, Booktitle = {Frequency Effects in Language Learning and Processing}, Date-Added = {2012-10-28 10:24:35 +0000}, Date-Modified = {2012-10-28 10:35:08 +0000}, Editor = {S. Gries and D. Divjak}, Keywords = {vici, cls, lst, language modeling, human language processing, perplexity}, Pages = {207--240}, Publisher = {Mouton De Gruyter}, Title = {The predictive value of word-level perplexity in human sentence processing: A case study on fixed adjective--preposition constructions in Dutch}, Volume = {1}, Year = {2012}} @inproceedings{Karsdorp+12, Address = {Istanbul, Turkey}, Author = {F. Karsdorp and P. {Van Kranenburg} and T. Meder and D. Trieschnigg and A. {Van den Bosch}}, Booktitle = {Proceedings of the 2012 Computational Models of Narrative Workshop}, Date-Added = {2012-10-28 10:22:34 +0000}, Date-Modified = {2012-10-28 10:24:19 +0000}, Keywords = {tunes and tales, cls, lst, folktales, motifs}, Pages = {22--26}, Title = {In search of an appropriate abstraction level for motif annotations}, Year = {2012}} @inproceedings{Vossen+12, Address = {Istanbul, Turkey}, Author = {P. Vossen and A. G\"{o}r\"{o}g and R. Izquierdo and A. {Van den Bosch}}, Booktitle = {Proceedings of the Eighth International Conference on Language Resources and Evaluation}, Date-Added = {2012-10-28 10:20:48 +0000}, Date-Modified = {2012-10-28 10:22:21 +0000}, Keywords = {dutchsemcor, cls, lst}, Pages = {584--589}, Title = {{DutchSemCor}: Targeting the ideal sense-tagged corpus}, Year = {2012}} @inproceedings{VandenBosch+12, Address = {New Brunswick, NJ}, Author = {A. {Van den Bosch} and P. Berck}, Booktitle = {Proceedings of the 7th Workshop on the Innovative Use of NLP for Building Educational Applications}, Date-Added = {2012-10-28 10:18:21 +0000}, Date-Modified = {2012-10-28 10:35:37 +0000}, Keywords = {vici, cls, lst, grammatical error correction}, Pages = {289--294}, Publisher = {ACL}, Title = {Memory-based text correction for preposition and determiner errors}, Year = {2012}} @inproceedings{Wubben+12, Address = {New Brunswick, NJ}, Author = {S. Wubben and E. Krahmer and A. {Van den Bosch}}, Booktitle = {Proceedings of the 50th Annual Meeting of the Association for Computational Linguistics}, Date-Added = {2012-10-28 10:16:52 +0000}, Date-Modified = {2012-10-28 10:20:37 +0000}, Keywords = {ilk, memphix, sentence simplification, monolingual machine translation}, Organization = {ACL}, Pages = {1015--1024.}, Title = {Sentence simplification by monolingual machine translation}, Year = {2012}} @inproceedings{Kunneman+12, Address = {Maastricht, The Netherlands}, Author = {F. Kunneman and A. {Van den Bosch}}, Booktitle = {Proceedings of the 24th Benelux Conference on Artficial Intelligence}, Date-Added = {2012-10-28 10:13:40 +0000}, Date-Modified = {2012-10-28 10:15:33 +0000}, Editor = {N. Roos and M. Winands and J. Uiterwijk}, Keywords = {cls, lst, adnext}, Pages = {147--154}, Title = {Leveraging unscheduled event prediction through mining scheduled event tweets}, Year = {2012}} @techreport{Asur+10, Author = {Asur, S. and Huberman, B. A.}, Date-Added = {2012-06-14 21:22:10 +0000}, Date-Modified = {2012-06-15 07:52:48 +0000}, Institution = {HP Labs}, Title = {{Predicting the Future with Social Media}}, Year = {2010}} @inproceedings{Ritterman+09, Author = {Ritterman, J. and Osborne, M. and Klein, E.}, Booktitle = {1st International Workshop on Mining Social Media}, Date-Added = {2012-06-14 21:21:44 +0000}, Date-Modified = {2012-06-14 21:21:44 +0000}, Title = {{Using Prediction Markets and Twitter to Predict a Swine Flu Pandemic}}, Year = {2009}} @inproceedings{Radinsky+12, Author = {Radinsky, K. and Davidovich, S. and Markovitch, S.}, Booktitle = {Proceedings of the 21st International Conference on the World Wide Web}, Date-Added = {2012-06-14 21:20:46 +0000}, Date-Modified = {2012-06-14 21:21:30 +0000}, Title = {{Learning causality for news event prediction}}, Year = {2012}} @inproceedings{Lanagan+11, Author = {Lanagan, J. and Smeaton, A. F.}, Booktitle = {Fifth International AAAI Conference on Weblogs and Social Media}, Citeulike-Article-Id = {10745556}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Posted-At = {2012-06-05 22:06:11}, Priority = {0}, Title = {{Using Twitter to Detect and Tag Important Events in Live Sports}}, Year = {2011}} @inproceedings{Sajnani+11, Author = {Sajnani, H. and Javanmardi, S. and McDonald, David W. and Lopes, Cristina V.}, Booktitle = {Analyzing Microtext: Papers from the 2011 AAAI Workshop}, Citeulike-Article-Id = {10745543}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Posted-At = {2012-06-05 22:02:36}, Priority = {2}, Title = {{Multi-Label Classification of Short Text: A Study on Wikipedia Barnstars}}, Year = {2011}} @inproceedings{Abel+12, Author = {Abel, F. and Hauff, C. and Houben, G. and Tao, K. and Stronkman, R.}, Booktitle = {Proceedings of the 23rd ACM Conference on Hypertext and Social Media, HT 2012}, Citeulike-Article-Id = {10745539}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Posted-At = {2012-06-05 21:59:30}, Priority = {2}, Title = {{Semantics + Filtering + Search = Twitcident, Exploring Information in Social Web Streams}}, Year = {2012}} @inproceedings{Kapanipathi+11, Author = {Kapanipathi, P. and Thomas, C. and Mendes, Pablo N. and Sheth, A.}, Booktitle = {Proceedings of Manufacturing \& Service Operations Management (MSOM), 2011}, Citeulike-Article-Id = {10745537}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Posted-At = {2012-06-05 21:55:17}, Priority = {2}, Title = {{Continuous Semantics: Dynamically Following Events}}, Year = {2011}} @inproceedings{Becker+11, Author = {Becker, H. and Chen, F. and Iter, D. and Naaman, M. and Gravano, L.}, Booktitle = {Proceedings of the Fifth International AAAI Conference on Weblogs and Social Media (ICWSM '11)}, Citeulike-Article-Id = {10745535}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-14 19:20:14 +0000}, Posted-At = {2012-06-05 21:49:45}, Priority = {2}, Title = {{Automatic Identification and Presentation of Twitter Content for Planned Events}}, Year = {2011}} @inproceedings{Choudhury+11, Author = {Choudhury, S. and Breslin, John G.}, Booktitle = {Proceedings of the ESWC2011 Workshop on Making Sense of Microposts}, Citeulike-Article-Id = {10745510}, Citeulike-Linkout-0 = {http://oro.open.ac.uk/32460/}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-14 19:21:43 +0000}, Day = {30}, Month = may, Posted-At = {2012-06-05 21:32:23}, Priority = {2}, Title = {{Extracting Semantic Entities and Events from Sports Tweets}}, Url = {http://oro.open.ac.uk/32460/}, Year = {2011}, Bdsk-Url-1 = {http://oro.open.ac.uk/32460/}} @inproceedings{Sriram+10, Abstract = {{In microblogging services such as Twitter, the users may become overwhelmed by the raw data. One solution to this problem is the classification of short text messages. As short texts do not provide sufficient word occurrences, traditional classification methods such as "Bag-Of-Words" have limitations. To address this problem, we propose to use a small set of domain-specific features extracted from the author's profile and text. The proposed approach effectively classifies the text to a predefined set of generic classes such as News, Events, Opinions, Deals, and Private Messages.}}, Address = {New York, NY, USA}, Author = {Sriram, Bharath and Fuhry, Dave and Demir, Engin and Ferhatosmanoglu, Hakan and Demirbas, Murat}, Booktitle = {Proceedings of the 33rd international ACM SIGIR conference on Research and development in information retrieval}, Citeulike-Article-Id = {7573100}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1835643}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/1835449.1835643}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Doi = {10.1145/1835449.1835643}, Isbn = {978-1-4503-0153-4}, Location = {Geneva, Switzerland}, Pages = {841--842}, Posted-At = {2012-06-05 21:27:20}, Priority = {0}, Publisher = {ACM}, Series = {SIGIR '10}, Title = {{Short text classification in twitter to improve information filtering}}, Url = {http://dx.doi.org/10.1145/1835449.1835643}, Year = {2010}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/1835449.1835643}} @inproceedings{Sankaranarayanan+09, Abstract = {{Twitter is an electronic medium that allows a large user populace to communicate with each other simultaneously. Inherent to Twitter is an asymmetrical relationship between friends and followers that provides an interesting social network like structure among the users of Twitter. Twitter messages, called tweets, are restricted to 140 characters and thus are usually very focused. We investigate the use of Twitter to build a news processing system, called TwitterStand, from Twitter tweets. The idea is to capture tweets that correspond to late breaking news. The result is analogous to a distributed news wire service. The difference is that the identities of the contributors/reporters are not known in advance and there may be many of them. Furthermore, tweets are not sent according to a schedule: they occur as news is happening, and tend to be noisy while usually arriving at a high throughput rate. Some of the issues addressed include removing the noise, determining tweet clusters of interest bearing in mind that the methods must be online, and determining the relevant locations associated with the tweets.}}, Address = {New York, NY, USA}, Author = {Sankaranarayanan, Jagan and Samet, Hanan and Teitler, Benjamin E. and Lieberman, Michael D. and Sperling, Jon}, Booktitle = {Proceedings of the 17th ACM SIGSPATIAL International Conference on Advances in Geographic Information Systems}, Citeulike-Article-Id = {6369732}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1653771.1653781}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/1653771.1653781}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Doi = {10.1145/1653771.1653781}, Isbn = {978-1-60558-649-6}, Keywords = {twitter}, Location = {Seattle, Washington}, Pages = {42--51}, Posted-At = {2012-06-05 21:23:11}, Priority = {2}, Publisher = {ACM}, Series = {GIS '09}, Title = {{TwitterStand: news in tweets}}, Url = {http://dx.doi.org/10.1145/1653771.1653781}, Year = {2009}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/1653771.1653781}} @inproceedings{Jackoway+11, Abstract = {{Twitter presents a source of information that cannot easily be obtained anywhere else. However, though many posts on Twitter reveal up-to-the-minute information about events in the world or interesting sentiments, far more posts are of no interest to the general audience. A method to determine which Twitter users are posting reliable information and which posts are interesting is presented. Using this information a search through a large, online news corpus is conducted to discover future events before they occur along with information about the location of the event. These events can be identified with a high degree of accuracy by verifying that an event found in one news article is found in other similar news articles, since any event interesting to a general audience will likely have more than one news story written about it. Twitter posts near the time of the event can then be identified as interesting if they match the event in terms of keywords or location. This method enables the discovery of interesting posts about current and future events and helps in the identification of reliable users.}}, Address = {New York, NY, USA}, Author = {Jackoway, Alan and Samet, Hanan and Sankaranarayanan, Jagan}, Booktitle = {Proceedings of the 3rd ACM SIGSPATIAL International Workshop on Location-Based Social Networks}, Citeulike-Article-Id = {10001986}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=2063224}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/2063212.2063224}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-14 19:18:36 +0000}, Doi = {10.1145/2063212.2063224}, Isbn = {978-1-4503-1033-8}, Keywords = {event\_prediction, twitter}, Location = {Chicago, Illinois}, Pages = {25--32}, Posted-At = {2012-06-05 21:22:18}, Priority = {0}, Publisher = {ACM}, Series = {LBSN '11}, Title = {{Identification of live news events using Twitter}}, Url = {http://dx.doi.org/10.1145/2063212.2063224}, Year = {2011}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/2063212.2063224}} @article{Phuvipadawat+10, Abstract = {{Twitter has been used as one of the communication channels for spreading breaking news. We propose a method to collect, group, rank and track breaking news in Twitter. Since short length messages make similarity comparison difficult, we boost scores on proper nouns to improve the grouping results. Each group is ranked based on popularity and reliability factors. Current detection method is limited to facts part of messages. We developed an application called '' Hotstream'' based on the proposed method. Users can discover breaking news from the Twitter timeline. Each story is provided with the information of message originator, story development and activity chart. This provides a convenient way for people to follow breaking news and stay informed with real-time updates.}}, Address = {Los Alamitos, CA, USA}, Author = {Phuvipadawat, Swit and Murata, Tsuyoshi}, Booktitle = {Web Intelligence and Intelligent Agent Technology (WI-IAT), 2010 IEEE/WIC/ACM International Conference on}, Citeulike-Article-Id = {9760493}, Citeulike-Linkout-0 = {http://doi.ieeecomputersociety.org/10.1109/WI-IAT.2010.205}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1109/WI-IAT.2010.205}, Citeulike-Linkout-2 = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=5616930}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Doi = {10.1109/WI-IAT.2010.205}, Isbn = {978-0-7695-4191-4}, Journal = {Web Intelligence and Intelligent Agent Technology, IEEE/WIC/ACM International Conference on}, Month = aug, Pages = {120--123}, Posted-At = {2012-06-05 21:19:16}, Priority = {2}, Publisher = {IEEE Computer Society}, Title = {{Breaking News Detection and Tracking in Twitter}}, Url = {http://dx.doi.org/10.1109/WI-IAT.2010.205}, Volume = {3}, Year = {2010}, Bdsk-Url-1 = {http://dx.doi.org/10.1109/WI-IAT.2010.205}} @inproceedings{Finin+2010, Abstract = {{We describe our experience using both Amazon Mechanical Turk (MTurk) and Crowd-Flower to collect simple named entity annotations for Twitter status updates. Unlike most genres that have traditionally been the focus of named entity experiments, Twitter is far more informal and abbreviated. The collected annotations and annotation techniques will provide a first step towards the full study of named entity recognition in domains like Facebook and Twitter. We also briefly describe how to use MTurk to collect judgements on the quality of "word clouds."}}, Address = {Stroudsburg, PA, USA}, Author = {Finin, Tim and Murnane, Will and Karandikar, Anand and Keller, Nicholas and Martineau, Justin and Dredze, Mark}, Booktitle = {Proceedings of the NAACL HLT 2010 Workshop on Creating Speech and Language Data with Amazon's Mechanical Turk}, Citeulike-Article-Id = {8825082}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1866709}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Location = {Los Angeles, California}, Pages = {80--88}, Posted-At = {2012-02-21 11:08:18}, Priority = {3}, Publisher = {Association for Computational Linguistics}, Series = {CSLDAMT '10}, Title = {{Annotating named entities in Twitter data with crowdsourcing}}, Url = {http://portal.acm.org/citation.cfm?id=1866709}, Year = {2010}, Bdsk-Url-1 = {http://portal.acm.org/citation.cfm?id=1866709}} @inproceedings{Treeratpituk+06, Abstract = {{Government agencies must often quickly organize and analyze large amounts of textual information, for example comments received as part of notice and comment rulemaking. Hierarchical organization is popular because it represents information at different levels of detail and is convenient for interactive browsing. Good hierarchical clustering algorithms are available, but there are few good solutions for automatically labeling the nodes in a cluster hierarchy.This paper presents a simple algorithm that automatically assigns labels to hierarchical clusters. The algorithm evaluates candidate labels using information from the cluster, the parent cluster, and corpus statistics. A trainable threshold enables the algorithm to assign just a few high-quality labels to each cluster. Experiments with Open Directory Project (ODP) hierarchies indicate that the algorithm creates cluster labels that are similar to labels created by ODP editors.}}, Address = {New York, NY, USA}, Author = {Treeratpituk, Pucktada and Callan, Jamie}, Booktitle = {Proceedings of the 2006 international conference on Digital government research}, Citeulike-Article-Id = {8569756}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1146650}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/1146598.1146650}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Doi = {10.1145/1146598.1146650}, Location = {San Diego, California}, Pages = {167--176}, Posted-At = {2012-01-18 11:21:03}, Priority = {2}, Publisher = {ACM}, Series = {dg.o '06}, Title = {{Automatically labeling hierarchical clusters}}, Url = {http://dx.doi.org/10.1145/1146598.1146650}, Year = {2006}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/1146598.1146650}} @inproceedings{Kumaran+05, Abstract = {{New Event Detection (NED) involves monitoring chronologically-ordered news streams to automatically detect the stories that report on new events. We compare two stories by finding three cosine similarities based on names, topics and the full text. These additional comparisons suggest treating the NED problem as a binary classification problem with the comparison scores serving as features. The classifier models we learned show statistically significant improvement over the baseline vector space model system on all the collections we tested, including the latest TDT5 collection.The presence of automatic speech recognizer (ASR) output of broadcast news in news streams can reduce performance and render our named entity recognition based approaches ineffective. We provide a solution to this problem achieving statistically significant improvements.}}, Address = {Stroudsburg, PA, USA}, Author = {Kumaran, Giridhar and Allan, James}, Booktitle = {Proceedings of the conference on Human Language Technology and Empirical Methods in Natural Language Processing}, Citeulike-Article-Id = {3486907}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1220575.1220591}, Citeulike-Linkout-1 = {http://dx.doi.org/10.3115/1220575.1220591}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:47:51 +0000}, Doi = {10.3115/1220575.1220591}, Location = {Vancouver, British Columbia, Canada}, Pages = {121--128}, Posted-At = {2012-01-10 14:15:09}, Priority = {0}, Publisher = {Association for Computational Linguistics}, Series = {HLT '05}, Title = {{Using names and topics for new event detection}}, Url = {http://dx.doi.org/10.3115/1220575.1220591}, Year = {2005}, Bdsk-Url-1 = {http://dx.doi.org/10.3115/1220575.1220591}} @inproceedings{Petrovic+10, Abstract = {{With the recent rise in popularity and size of social media, there is a growing need for systems that can extract useful information from this amount of data. We address the problem of detecting new events from a stream of Twitter posts. To make event detection feasible on web-scale corpora, we present an algorithm based on locality-sensitive hashing which is able overcome the limitations of traditional approaches, while maintaining competitive results. In particular, a comparison with a state-of-the-art system on the first story detection task shows that we achieve over an order of magnitude speedup in processing time, while retaining comparable performance. Event detection experiments on a collection of 160 million Twitter posts show that celebrity deaths are the fastest spreading news on Twitter.}}, Address = {Stroudsburg, PA, USA}, Author = {Petrovi\'{c}, Sa\v{s}a and Osborne, Miles and Lavrenko, Victor}, Booktitle = {Human Language Technologies: The 2010 Annual Conference of the North American Chapter of the Association for Computational Linguistics}, Citeulike-Article-Id = {8882414}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1858020}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:49:17 +0000}, Isbn = {1-932432-65-5}, Keywords = {first\_story\_detection, locality\_sensitive\_hashing, tdt, twitter}, Location = {Los Angeles, California}, Pages = {181--189}, Posted-At = {2012-01-06 13:47:39}, Priority = {0}, Publisher = {Association for Computational Linguistics}, Series = {HLT '10}, Title = {{Streaming first story detection with application to Twitter}}, Url = {http://portal.acm.org/citation.cfm?id=1858020}, Year = {2010}, Bdsk-Url-1 = {http://portal.acm.org/citation.cfm?id=1858020}} @mastersthesis{Kunneman11, Author = {F. Kunneman}, Citeulike-Article-Id = {10200587}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-10-28 10:16:30 +0000}, Institution = {Radboud University Nijmegen}, Keywords = {machine\_learning, text\_cohesion, topic\_clustering, topic\_segmentation}, Posted-At = {2012-01-05 13:30:23}, Priority = {0}, Title = {The automatic topic segmentation and topic tracking of television programs}} @article{Moon96, Abstract = {{A common task in signal processing is the estimation of the parameters of a probability distribution function. Perhaps the most frequently encountered estimation problem is the estimation of the mean of a signal in noise. In many parameter estimation problems the situation is more complicated because direct access to the data necessary to estimate the parameters is impossible, or some of the data are missing. Such difficulties arise when an outcome is a result of an accumulation of simpler outcomes, or when outcomes are clumped together, for example, in a binning or histogram operation. There may also be data dropouts or clustering in such a way that the number of underlying data points is unknown (censoring and/or truncation). The EM (expectation-maximization) algorithm is ideally suited to problems of this sort, in that it produces maximum-likelihood (ML) estimates of parameters when there is a many-to-one mapping from an underlying distribution to the distribution governing the observation. The EM algorithm is presented at a level suitable for signal processing practitioners who have had some exposure to estimation theory}}, Author = {Moon, T. K.}, Citeulike-Article-Id = {989971}, Citeulike-Linkout-0 = {http://dx.doi.org/10.1109/79.543975}, Citeulike-Linkout-1 = {http://ieeexplore.ieee.org/xpls/abs\_all.jsp?arnumber=543975}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:47:16 +0000}, Doi = {10.1109/79.543975}, Institution = {Electr. \& Comput. Eng. Dept., Utah State Univ., Logan, UT}, Issn = {10535888}, Journal = {IEEE Signal Processing Magazine}, Keywords = {expectation-maximization}, Month = nov, Number = {6}, Pages = {47--60}, Posted-At = {2012-01-02 13:22:36}, Priority = {3}, Publisher = {IEEE}, Title = {{The expectation-maximization algorithm}}, Url = {http://dx.doi.org/10.1109/79.543975}, Volume = {13}, Year = {1996}, Bdsk-Url-1 = {http://dx.doi.org/10.1109/79.543975}} @inproceedings{Sakaki+10, Abstract = {{Twitter, a popular microblogging service, has received much attention recently. An important characteristic of Twitter is its real-time nature. For example, when an earthquake occurs, people make many Twitter posts (tweets) related to the earthquake, which enables detection of earthquake occurrence promptly, simply by observing the tweets. As described in this paper, we investigate the real-time interaction of events such as earthquakes in Twitter and propose an algorithm to monitor tweets and to detect a target event. To detect a target event, we devise a classifier of tweets based on features such as the keywords in a tweet, the number of words, and their context. Subsequently, we produce a probabilistic spatiotemporal model for the target event that can find the center and the trajectory of the event location. We consider each Twitter user as a sensor and apply Kalman filtering and particle filtering, which are widely used for location estimation in ubiquitous/pervasive computing. The particle filter works better than other comparable methods for estimating the centers of earthquakes and the trajectories of typhoons. As an application, we construct an earthquake reporting system in Japan. Because of the numerous earthquakes and the large number of Twitter users throughout the country, we can detect an earthquake with high probability (96\% of earthquakes of Japan Meteorological Agency (JMA) seismic intensity scale 3 or more are detected) merely by monitoring tweets. Our system detects earthquakes promptly and sends e-mails to registered users. Notification is delivered much faster than the announcements that are broadcast by the JMA.}}, Address = {New York, NY, USA}, Author = {Sakaki, Takeshi and Okazaki, Makoto and Matsuo, Yutaka}, Booktitle = {Proceedings of the 19th international conference on World wide web}, Citeulike-Article-Id = {7097817}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1772690.1772777}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/1772690.1772777}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:46:57 +0000}, Doi = {10.1145/1772690.1772777}, Isbn = {978-1-60558-799-8}, Location = {Raleigh, North Carolina, USA}, Pages = {851--860}, Posted-At = {2012-01-02 13:20:54}, Priority = {0}, Publisher = {ACM}, Series = {WWW '10}, Title = {{Earthquake shakes Twitter users: real-time event detection by social sensors}}, Url = {http://dx.doi.org/10.1145/1772690.1772777}, Year = {2010}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/1772690.1772777}} @inproceedings{Cataldi+10, Abstract = {{Twitter is a user-generated content system that allows its users to share short text messages, called tweets, for a variety of purposes, including daily conversations, URLs sharing and information news. Considering its world-wide distributed network of users of any age and social condition, it represents a low level news flashes portal that, in its impressive short response time, has the principal advantage. In this paper we recognize this primary role of Twitter and we propose a novel topic detection technique that permits to retrieve in real-time the most emergent topics expressed by the community. First, we extract the contents (set of terms) of the tweets and model the term life cycle according to a novel aging theory intended to mine the emerging ones. A term can be defined as emerging if it frequently occurs in the specified time interval and it was relatively rare in the past. Moreover, considering that the importance of a content also depends on its source, we analyze the social relationships in the network with the well-known Page Rank algorithm in order to determine the authority of the users. Finally, we leverage a navigable topic graph which connects the emerging terms with other semantically related keywords, allowing the detection of the emerging topics, under user-specified time constraints. We provide different case studies which show the validity of the proposed approach.}}, Address = {New York, NY, USA}, Author = {Cataldi, Mario and Di Caro, Luigi and Schifanella, Claudio}, Booktitle = {Proceedings of the Tenth International Workshop on Multimedia Data Mining}, Citeulike-Article-Id = {8500602}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1814249}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/1814245.1814249}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:48:00 +0000}, Doi = {10.1145/1814245.1814249}, Isbn = {978-1-4503-0220-3}, Keywords = {aging\_theory, first\_story\_detection, new\_event\_detection, social\_network, text\_analysis, topic\_detection, twitter}, Location = {Washington, D.C.}, Posted-At = {2012-01-02 13:19:55}, Priority = {0}, Publisher = {ACM}, Series = {MDMKDD '10}, Title = {{Emerging topic detection on Twitter based on temporal and social terms evaluation}}, Url = {http://dx.doi.org/10.1145/1814245.1814249}, Year = {2010}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/1814245.1814249}} @inproceedings{Benhardus10, Author = {Benhardus, J.}, Citeulike-Article-Id = {10191863}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:45:33 +0000}, Journal = {UCCS REU fore Artificial Intelligence, Natural Language Processing and Information Retrieval, Final Report}, Keywords = {natural\_language\_processing, trend\_detection, twitter}, Posted-At = {2012-01-02 13:17:59}, Priority = {0}, Title = {{Streaming Trend Detection in Twitter}}, Year = {2010}} @inproceedings{Bethard+06, Abstract = {{Complex tasks like question answering need to be able to identify events in text and the relations among those events. We show that this event identification task and a related task, identifying the semantic class of these events, can both be formulated as classification problems in a word-chunking paradigm. We introduce a variety of linguistically motivated features for this task and then train a system that is able to identify events with a precision of 82\% and a recall of 71\%. We then show a variety of analyses of this model, and their implications for the event identification task.}}, Address = {Stroudsburg, PA, USA}, Author = {S. Bethard and J. H. Martin}, Booktitle = {Proceedings of the 2006 Conference on Empirical Methods in Natural Language Processing}, Citeulike-Article-Id = {10191852}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1610098}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2013-08-07 09:27:42 +0000}, Isbn = {1-932432-73-6}, Keywords = {event\_detection, semantic\_class\_event}, Location = {Sydney, Australia}, Pages = {146--154}, Posted-At = {2012-01-02 13:04:00}, Priority = {0}, Publisher = {Association for Computational Linguistics}, Series = {EMNLP '06}, Title = {{Identification of event mentions and their semantic class}}, Url = {http://portal.acm.org/citation.cfm?id=1610098}, Year = {2006}, Bdsk-Url-1 = {http://portal.acm.org/citation.cfm?id=1610098}} @inproceedings{Mei+05, Abstract = {{Temporal Text Mining (TTM) is concerned with discovering temporal patterns in text information collected over time. Since most text information bears some time stamps, TTM has many applications in multiple domains, such as summarizing events in news articles and revealing research trends in scientific literature. In this paper, we study a particular TTM task -- discovering and summarizing the evolutionary patterns of themes in a text stream. We define this new text mining problem and present general probabilistic methods for solving this problem through (1) discovering latent themes from text; (2) constructing an evolution graph of themes; and (3) analyzing life cycles of themes. Evaluation of the proposed methods on two different domains (i.e., news articles and literature) shows that the proposed methods can discover interesting evolutionary theme patterns effectively.}}, Address = {New York, NY, USA}, Author = {Mei, Qiaozhu and Zhai, ChengXiang}, Booktitle = {Proceedings of the eleventh ACM SIGKDD international conference on Knowledge discovery in data mining}, Citeulike-Article-Id = {1720074}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1081870.1081895}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/1081870.1081895}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:47:23 +0000}, Doi = {10.1145/1081870.1081895}, Isbn = {1-59593-135-X}, Keywords = {clustering, evolutionary\_theme\_patterns, temporal\_text\_mining}, Location = {Chicago, Illinois, USA}, Pages = {198--207}, Posted-At = {2012-01-02 13:01:52}, Priority = {0}, Publisher = {ACM}, Series = {KDD '05}, Title = {{Discovering evolutionary theme patterns from text: an exploration of temporal text mining}}, Url = {http://dx.doi.org/10.1145/1081870.1081895}, Year = {2005}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/1081870.1081895}} @inproceedings{Singh+10, Abstract = {{Events are the fundamental abstractions to study the dynamic world. We believe that the next generation of web (i.e. event-web), will focus on interconnections between events as they occur across space and time [3]. In fact we argue that the real value of large volumes of microblog data being created daily lies in its inherent spatio-temporality, and its correlation with the real-world events. In this context, we studied the structural properties of a corpus of 5,835,237 Twitter microblogs, and found it to exhibit Power laws across space and time, much like those exhibited by events in multiple domains. The properties studied over microblogs on different topics can be applied to study relationships between related events, as well as data organization for event-based, real-time, and location-aware applications.}}, Address = {New York, NY, USA}, Author = {Singh, Vivek K. and Jain, Ramesh}, Booktitle = {Proceedings of the 19th international conference on World wide web}, Citeulike-Article-Id = {10191848}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1772865}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/1772690.1772865}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:47:02 +0000}, Doi = {10.1145/1772690.1772865}, Isbn = {978-1-60558-799-8}, Keywords = {event-web, pagerank, twitter, zipfs\_law}, Location = {Raleigh, North Carolina, USA}, Pages = {1183--1184}, Posted-At = {2012-01-02 13:00:12}, Priority = {0}, Publisher = {ACM}, Series = {WWW '10}, Title = {{Structural analysis of the emerging event-web}}, Url = {http://dx.doi.org/10.1145/1772690.1772865}, Year = {2010}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/1772690.1772865}} @inproceedings{Bethard+08, Author = {Bethard, S. and Corvey, W. and Klingenstein, S. and Martin, James H.}, Citeulike-Article-Id = {10191845}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:46:11 +0000}, Journal = {LREC-2008}, Keywords = {causal\_structure, corpus, time\_expressions}, Posted-At = {2012-01-02 12:58:49}, Priority = {0}, Title = {{Building a Corpus of Temporal-Causal Structure}}, Year = {2008}} @article{Bethard+08b, Author = {Bethard, S. and Martin, James H. and Klingenstein, Sara K.}, Citeulike-Article-Id = {10191824}, Citeulike-Linkout-0 = {http://clear.colorado.edu/\~{}bethard/documents/bethard\_2007\_ijsc\_temporal.pdf}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:46:20 +0000}, Day = {12}, Journal = {International Journal of Semantic Computing}, Keywords = {machine\_learning, temporal\_relations, timelines}, Month = oct, Posted-At = {2012-01-02 12:52:42}, Priority = {0}, Title = {{Finding Temporal Structure in Text: Machine Learning of Syntactic Temporal Relations}}, Url = {http://clear.colorado.edu/\~{}bethard/documents/bethard\_2007\_ijsc\_temporal.pdf}, Year = {2008}, Bdsk-Url-1 = {http://clear.colorado.edu/%5C~%7B%7Dbethard/documents/bethard%5C_2007%5C_ijsc%5C_temporal.pdf}} @inproceedings{Li+05, Abstract = {{Retrospective news event detection (RED) is defined as the discovery of previously unidentified events in historical news corpus. Although both the contents and time information of news articles are helpful to RED, most researches focus on the utilization of the contents of news articles. Few research works have been carried out on finding better usages of time information. In this paper, we do some explorations on both directions based on the following two characteristics of news articles. On the one hand, news articles are always aroused by events; on the other hand, similar articles reporting the same event often redundantly appear on many news sources. The former hints a generative model of news articles, and the latter provides data enriched environments to perform RED. With consideration of these characteristics, we propose a probabilistic model to incorporate both content and time information in a unified framework. This model gives new representations of both news articles and news events. Furthermore, based on this approach, we build an interactive RED system, HISCOVERY, which provides additional functions to present events, Photo Story and Chronicle.}}, Address = {New York, NY, USA}, Author = {Li, Zhiwei and Wang, Bin and Li, Mingjing and Ma, Wei Y.}, Booktitle = {Proceedings of the 28th annual international ACM SIGIR conference on Research and development in information retrieval}, Citeulike-Article-Id = {319837}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1076034.1076055}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/1076034.1076055}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Doi = {10.1145/1076034.1076055}, Isbn = {1-59593-034-5}, Keywords = {news\_event\_detection, probabilistics}, Location = {Salvador, Brazil}, Pages = {106--113}, Posted-At = {2012-01-02 11:52:15}, Priority = {0}, Publisher = {ACM}, Series = {SIGIR '05}, Title = {{A probabilistic model for retrospective news event detection}}, Url = {http://dx.doi.org/10.1145/1076034.1076055}, Year = {2005}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/1076034.1076055}} @inproceedings{Kumaran+04, Abstract = {{New Event Detection is a challenging task that still offers scope for great improvement after years of effort. In this paper we show how performance on New Event Detection (NED) can be improved by the use of text classification techniques as well as by using named entities in a new way. We explore modifications to the document representation in a vector space-based NED system. We also show that addressing named entities preferentially is useful only in certain situations. A combination of all the above results in a multi-stage NED system that performs much better than baseline single-stage NED systems.}}, Address = {New York, NY, USA}, Author = {Kumaran, Giridhar and Allan, James}, Booktitle = {Proceedings of the 27th annual international ACM SIGIR conference on Research and development in information retrieval}, Citeulike-Article-Id = {1219863}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1009044}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/1008992.1009044}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:47:44 +0000}, Doi = {10.1145/1008992.1009044}, Isbn = {1-58113-881-4}, Keywords = {cosine\_similarity, named\_entity\_detection, new\_event\_detection, text\_classification}, Location = {Sheffield, United Kingdom}, Pages = {297--304}, Posted-At = {2012-01-02 11:50:07}, Priority = {0}, Publisher = {ACM}, Series = {SIGIR '04}, Title = {{Text classification and named entities for new event detection}}, Url = {http://dx.doi.org/10.1145/1008992.1009044}, Year = {2004}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/1008992.1009044}} @inproceedings{Mazur+10, Abstract = {{The reliable extraction of knowledge from text requires an appropriate treatment of the time at which reported events take place. Unfortunately, there are very few annotated data sets that support the development of techniques for event time-stamping and tracking the progression of time through a narrative. In this paper, we present a new corpus of temporally-rich documents sourced from English Wikipedia, which we have annotated with TIMEX2 tags. The corpus contains around 120000 tokens, and 2600 TIMEX2 expressions, thus comparing favourably in size to other existing corpora used in these areas. We describe the preparation of the corpus, and compare the profile of the data with other existing temporally annotated corpora. We also report the results obtained when we use DANTE, our temporal expression tagger, to process this corpus, and point to where further work is required. The corpus is publicly available for research purposes.}}, Address = {Stroudsburg, PA, USA}, Author = {Mazur, Pawet and Dale, Robert}, Booktitle = {Proceedings of the 2010 Conference on Empirical Methods in Natural Language Processing}, Citeulike-Article-Id = {10191724}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1870747}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:47:33 +0000}, Keywords = {corpus, time\_expressions, wikipedia}, Location = {Cambridge, Massachusetts}, Pages = {913--922}, Posted-At = {2012-01-02 11:46:10}, Priority = {0}, Publisher = {Association for Computational Linguistics}, Series = {EMNLP '10}, Title = {{WikiWars: a new corpus for research on temporal expressions}}, Url = {http://portal.acm.org/citation.cfm?id=1870747}, Year = {2010}, Bdsk-Url-1 = {http://portal.acm.org/citation.cfm?id=1870747}} @article{Anderson07, Author = {Anderson, P.}, Citeulike-Article-Id = {10191227}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:45:24 +0000}, Journal = {JISC Technology and Standards Watch}, Keywords = {20, web}, Month = feb, Posted-At = {2012-01-02 08:25:09}, Priority = {0}, Title = {{What is Web 2.0? Ideas, technologies and implications for education}}, Year = {2007}} @book{Tan+05, Author = {Tan, P. N. and Steinbach, M. and Kumar, V.}, Citeulike-Article-Id = {10191210}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Publisher = {Addison-Wesley}, Title = {{Introduction to Data Mining}}, Year = {2005}} @article{Brock+08, Author = {Brock, G. and Pihur, V. and Datta, S. and Datta, S.}, Citeulike-Article-Id = {10191209}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {J {S}tat {S}oftw}, Keywords = {*file-import-12-01-02}, Pages = {1--22}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Title = {{clValid: an R package for cluster validation}}, Volume = {25}, Year = {2008}} @article{Shriberg+01, Author = {Shriberg, E. and T\"{u}r, G. and Hakkani-T\"{u}r, D. and Stolcke, A.}, Citeulike-Article-Id = {10191208}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {Computational {L}inguistics}, Keywords = {*file-import-12-01-02}, Pages = {31--57}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Title = {{Integrating prosodic and lexical cues for automatic topic segmentation}}, Volume = {27}, Year = {2001}} @incollection{Karlgren+01, Author = {Karlgren, J. and Sahlgren, M.}, Booktitle = {Foundations of real-world understanding}, Citeulike-Article-Id = {10191207}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Editor = {Uesaka, Y. and Kanerva, P. and Asoh, H.}, Keywords = {*file-import-12-01-02}, Pages = {294--308}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Title = {{From words to understanding}}, Year = {2001}} @article{Meila+01, Author = {Meil\`{a}, M. and Heckerman, D.}, Citeulike-Article-Id = {10191206}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {Machine {L}earning}, Keywords = {*file-import-12-01-02}, Pages = {9--29}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Title = {An {E}xperimental {C}omparison of {M}odel-{B}ased {C}lustering {M}ethods}, Volume = {42}, Year = {2001}} @book{Manning+08, Author = {Manning, C. and Raghavan, P. and Sch\"{u}tze, H.}, Citeulike-Article-Id = {10191205}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Publisher = {Cambridge {U}niversity {P}ress}, Title = {Introduction to {I}nformation {R}etrieval}, Year = {2008}} @article{Jain+99, Author = {Jain, A. K. and Murty, M. N. and Flynn, P. J.}, Citeulike-Article-Id = {10191204}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {A{C}{M} {C}omputing {S}urveys}, Keywords = {*file-import-12-01-02}, Number = {3}, Pages = {264--323}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Title = {Data {C}lustering: {A} {R}eview}, Volume = {31}, Year = {1999}} @techreport{Steinbach+00b, Author = {Steinbach, M. and Karypis, G. and Kumar, V.}, Citeulike-Article-Id = {10191203}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:50:25 +0000}, Keywords = {*file-import-12-01-02}, Number = {34}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Title = {{A comparison of document clustering techniques}}, Year = {2000}} @article{Amaral+07, Author = {Amaral, R. and Meinedo, H. and Caseiro, D. and Trancoso, I. and Neto, J.}, Citeulike-Article-Id = {10191202}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {E{U}{R}{A}{S}{I}{P} {J}ournal on {A}dvances in {S}ignal {P}rocessing}, Keywords = {*file-import-12-01-02}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Title = {A {P}rototype {S}ystem for {S}elective {D}issemination of {B}roadcast {N}ews in {E}uropean {P}ortuguese}, Year = {2007}} @inproceedings{Rosenberg+06, Author = {Rosenberg, A. and Hirschberg, J.}, Booktitle = {HLT-NAACL}, Citeulike-Article-Id = {10191201}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {125--128}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Title = {{Story segmentation of broadcast news in english, mandarin and arabic}}, Year = {2006}} @book{Huijbregts08, Author = {Huijbregts, M.}, Citeulike-Article-Id = {10191199}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Posted-At = {2012-01-02 08:15:28}, Priority = {0}, Title = {Segmentation, {D}iarization and {S}peech {T}ranscription: {S}urprise {D}ata {U}nraveled}, Year = {2008}} @article{Hearst91, Author = {Hearst, M. A.}, Citeulike-Article-Id = {10191198}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {Computational Linguistics}, Keywords = {*file-import-12-01-02}, Number = {1}, Pages = {33--64}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{TexTiling: Segmenting text into multi-paragraph suptopic passages}}, Volume = {23}, Year = {1991}} @inproceedings{Reynar99, Author = {Reynar, J. C.}, Booktitle = {Proceedings of the 37th Annual Meeting of the ACL}, Citeulike-Article-Id = {10191197}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {357--364}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Statistical Models for Topic Segmentation}}, Year = {1999}} @article{Shahnaz+06, Author = {Shahnaz, F. and Berry, M. W. and Pauca, V. P. and Plemmons, R. J.}, Citeulike-Article-Id = {10191196}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {Information Processing \& Management}, Keywords = {*file-import-12-01-02}, Number = {2}, Pages = {373--386}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Document clustering using nonnegative matrix factorization}}, Volume = {42}, Year = {2006}} @inproceedings{Xu+03, Author = {Xu, W. and Liu, X. and Gong, Yihong}, Booktitle = {Proceedings of SIGIR'03}, Citeulike-Article-Id = {10191195}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {267--273}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Document Clustering Based On Non-Negative Matrix Factorization}}, Year = {2003}} @article{Hammouda+04, Author = {Hammouda, K. M. and Kamel, M. S.}, Citeulike-Article-Id = {10191194}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {IEEE Transactions on Knowledge and Data Engineering}, Keywords = {*file-import-12-01-02}, Number = {10}, Pages = {1279--1296}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Efficient Phrase-Based Document Indexing for Web Document Clustering}}, Volume = {16}, Year = {2004}} @article{He+10, Author = {He, Q. and Chang, K. and Lim, E. and Banerjee, A.}, Citeulike-Article-Id = {10191193}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {IEEE Transaction on Pattern Analysis and Machine Intelligence}, Keywords = {*file-import-12-01-02}, Number = {10}, Pages = {1795--1808}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Keep It Simple with Time: A Re-examination of Probabilistic Topic Detection Models}}, Volume = {32}, Year = {2010}} @article{Makkonen+04, Author = {Makkonen, J. and Ahonen-Myka, H. and Salmenkivi, M.}, Citeulike-Article-Id = {10191192}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {Information Retrieval}, Keywords = {*file-import-12-01-02}, Pages = {347--368}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Simple Semantics in Topic Detection and Tracking}}, Volume = {7}, Year = {2004}} @inproceedings{Choi+01, Author = {Choi, F. Y. Y. and Wiemer-Hastings, P. and Moore, J.}, Booktitle = {Proceedings of NAACL'01}, Citeulike-Article-Id = {10191191}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {109--117}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Latent Semantic Analysis for Text Segmentation}}, Year = {2001}} @article{Bestgen06, Author = {Bestgen, Y.}, Citeulike-Article-Id = {10191190}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {Computational Linguistics}, Keywords = {*file-import-12-01-02}, Number = {1}, Pages = {5--12}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Improving Text Segmentation Using Latent Semantic Analysis: A Reanalysis of Choi, Wiemer-Hastings, and Moore (2001)}}, Volume = {32}, Year = {2006}} @inproceedings{Choi00, Author = {Choi, F. Y. Y.}, Booktitle = {Proceedings of the Sixth Applied Natural Language Conference (ANLP-00) and the First Meeting of the North American Chapter of the Association for Computational Linguistics (NAACL-00)}, Citeulike-Article-Id = {10191189}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {26--33}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Advances in domain independent linear text segmentation}}, Year = {2000}} @article{Fujii+03, Author = {Fujii, A. and Itou, K. and Ishikawa, T.}, Citeulike-Article-Id = {10191188}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {AAAI Technical Report}, Keywords = {*file-import-12-01-02}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{A System for On-demand Video Lectures}}, Volume = {8}, Year = {2003}} @inproceedings{Ginter+08, Author = {Ginter, F. and Suominen, H. and Pyysalo, S. and Salakoski, T.}, Booktitle = {Proceedings of SMBM '08 the 38th Annual Hawaii International Conference on System Sciences}, Citeulike-Article-Id = {10191187}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Combining Hidden Markov Models and Latent Semantic Analysis for Topic Segmentation and Labeling: Method and Clinical Application}}, Year = {2008}} @inproceedings{Allan+05, Author = {Allan, J. and Harding, S. and Fisher, D. and Bolivar, A. and Guzman-Lara, S. and Amstutz, P.}, Booktitle = {Proceedings of the 38th Annual Hawaii International Conference on System Sciences}, Citeulike-Article-Id = {10191186}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {1--10}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Taking Topic Detection From Evaluation to Practice}}, Year = {2005}} @article{Yang+99, Author = {Yang, Y. and Carbonell, J. G. and Brown, R. D. and Pierce, T. and Archibald, B. T.}, Citeulike-Article-Id = {10191185}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {IEEE Intelligent Systems}, Keywords = {*file-import-12-01-02}, Number = {4}, Pages = {32--43}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Learning Approaches for Detecting and Tracking News Events}}, Volume = {14}, Year = {1999}} @article{Allan02, Author = {Allan, J.}, Citeulike-Article-Id = {10191184}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {Information Retrieval}, Keywords = {*file-import-12-01-02}, Pages = {139--157}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Detection As Multi-Topic Tracking}}, Volume = {5}, Year = {2002}} @inproceedings{Allan+98, Author = {Allan, J. and Carbonell, J. and Doddington, G. and Yamron, J. and Yang, Y.}, Booktitle = {Proceedings of the DARPA Broadcast News Transcription and Understanding Workshop}, Citeulike-Article-Id = {10191183}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {194--218}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{Topic Detection and Tracking Pilot Study: Final Report}}, Year = {1998}} @inproceedings{Yamron+98, Address = {Seattle, USA}, Author = {Yamron, J. and Carp, I. and Gillick, L. and Lowe, S. and van Mulbregt, P.}, Booktitle = {Proceedings of the IEEE Conference on Acoustics, Speech and Signal Processing '98}, Citeulike-Article-Id = {10191182}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {333--336}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{A hidden Markov model approach to text segmentation and event tracking}}, Year = {1998}} @inproceedings{Carbonell+99, Author = {Carbonell, J. and Yang, Y. and Lafferty, J.}, Booktitle = {Proceedings of the DARPA Broadcast News Transcription and Understanding Workshop}, Citeulike-Article-Id = {10191181}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {117--120}, Posted-At = {2012-01-02 08:15:27}, Priority = {0}, Title = {{CMU Report on TDT-2: Segmentation, Detection and Tracking}}, Year = {1999}} @inproceedings{Palmer+04, Author = {Palmer, D. D. and Reichman, M. and Yaich, E.}, Booktitle = {Human Language Technology / North American chapter of the Association for Computational Linguistics 2004: Short Papers}, Citeulike-Article-Id = {10191180}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {89--92}, Posted-At = {2012-01-02 08:15:26}, Priority = {0}, Title = {{Feature Selection for Trainable Multilingual Broadcast News Segmentation}}, Year = {2004}} @inproceedings{Utiyama+01, Author = {Utiyama, M. and Isahara, H.}, Booktitle = {Proceedings of the ACL}, Citeulike-Article-Id = {10191179}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {499--506}, Posted-At = {2012-01-02 08:15:26}, Priority = {0}, Title = {{A Statistical Model for Domain-Independent Text Segmentation}}, Year = {2001}} @inproceedings{Misra+09, Address = {Hong Kong, China}, Author = {Misra, H. and Yvon, F. and Jose, J. M. and Capp\'{e}, O.}, Booktitle = {Proceedings of CIKM}, Citeulike-Article-Id = {10191178}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {1553--1556}, Posted-At = {2012-01-02 08:15:26}, Priority = {0}, Title = {{Text Segmentation via Topic Modeling: An Analytical Study}}, Year = {2009}} @article{Fragkou+04, Author = {Fragkou, P. and Petridis, V. and Kehagias, A. T. H.}, Citeulike-Article-Id = {10191177}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Journal = {Journal of Intelligent Information Systems}, Keywords = {*file-import-12-01-02}, Number = {2}, Pages = {179--197}, Posted-At = {2012-01-02 08:15:26}, Priority = {0}, Title = {{A Dynamic Programming Algorithm for Linear Text Segmentation}}, Volume = {23}, Year = {2004}} @inproceedings{Eisenstein+08, Author = {Eisenstein, J. and Barzilay, R.}, Booktitle = {Emperical Methods in Natural Language Processing (EMNLP)}, Citeulike-Article-Id = {10191176}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Pages = {334--343}, Posted-At = {2012-01-02 08:15:26}, Priority = {0}, Title = {{Bayesian Unsupervised Topic Segmentation}}, Year = {2008}} @book{Morville05, Author = {Morville, P.}, Citeulike-Article-Id = {10191175}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-06-06 06:43:32 +0000}, Keywords = {*file-import-12-01-02}, Posted-At = {2012-01-02 08:15:26}, Priority = {0}, Publisher = {O'{R}eilly}, Title = {Ambient {F}indability}, Year = {2005}} @mastersthesis{Kunneman05, Author = {F. Kunneman}, Citeulike-Article-Id = {10191174}, Date-Added = {2012-06-06 06:43:32 +0000}, Date-Modified = {2012-10-28 10:15:53 +0000}, Keywords = {*file-import-12-01-02}, Posted-At = {2012-01-02 08:15:26}, Priority = {0}, Title = {De toekomst van het {W}eb, een toekomst visie op de integratie van {W}eb 2.0 en het {S}emantic {W}eb}, Year = {2010}} @inproceedings{Smordal+12, Address = {Amsterdam, The Netherlands}, Author = {G. Sm\ordal Losnegaard and G. Inger Lyse}, Booktitle = {Exploring Newspaper Language: Using the web to create and investigate a large corpus of modern Norwegian}, Date-Added = {2012-05-28 19:22:35 +0000}, Date-Modified = {2012-05-28 19:32:34 +0000}, Editor = {G. Andersen}, Keywords = {TiMBL, anglicisms}, Pages = {131--154}, Publisher = {John Benjamins}, Title = {A data-driven approach to anglicism identification in {N}orwegian}, Year = {2012}} @inproceedings{Dale+12, Address = {Montreal, Canada}, Author = {R. Dale and I. Anisimoff and G. Narroway}, Booktitle = {Proceedings of the Seventh Workshop on Innovative Use of NLP for Building Educational Applications}, Date-Added = {2012-05-04 21:02:43 +0000}, Date-Modified = {2012-05-04 21:04:30 +0000}, Keywords = {spelling correction, grammatical error correction, shared task, hoo}, Title = {{HOO 2012}: A Report on the Preposition and Determiner Error Correction Shared Task}, Year = {2012}} @misc{Brants+06, Author = {T. Brants and A. Franz}, Date-Added = {2012-05-04 20:10:58 +0000}, Date-Modified = {2012-05-04 21:16:54 +0000}, Publisher = {Linguistic Data Consortium, Philadelphia}, Title = {{LDC2006T13: Web 1T 5-gram Version 1}}, Year = {2006}} @inproceedings{Oostdijk00, Author = {N. Oostdijk}, Booktitle = {Proceedings of LREC-2000, Athens}, Date-Added = {2012-03-27 18:43:06 +0000}, Date-Modified = {2012-03-27 18:43:33 +0000}, Pages = {887--894}, Title = {{The spoken Dutch corpus: overview and first evaluation}}, Volume = {2}, Year = {2000}} @article{MacKenzie+02, Date-Added = {2012-01-15 16:56:35 +0000}, Date-Modified = {2012-01-15 16:56:54 +0000}} @article{VandenBosch11, Author = {A. {Van den Bosch}}, Date-Added = {2012-01-15 15:58:00 +0000}, Date-Modified = {2012-10-28 10:34:14 +0000}, Journal = {Computational Linguistics in the Netherlands Journal}, Keywords = {vici, ilk, word completion, predictive text entry}, Pages = {79--94}, Title = {Effects of context and recency in scaled word completion}, Volume = {1}, Year = {2011}} @book{Sporleder+11, Address = {Berlin}, Date-Added = {2011-11-08 12:12:04 +0000}, Date-Modified = {2011-11-08 12:13:23 +0000}, Editor = {C. Sporleder and A. {Van den Bosch} and K. Zervanou}, Keywords = {hitime, vici, latech, cultural heritage, ilk}, Publisher = {Springer Verlag}, Series = {Theory and Applications of Natural Language Processing: Edited Volumes}, Title = {Language technology for cultural heritage}, Year = {2011}} @book{VandenBosch+11, Address = {Berlin}, Date-Added = {2011-11-08 12:10:42 +0000}, Date-Modified = {2011-11-08 12:11:54 +0000}, Editor = {A. {Van den Bosch} and G. Bouma}, Keywords = {imix, vici, question answering}, Publisher = {Springer Verlag}, Series = {Theory and Applications of Natural Language Processing: Edited Volumes}, Title = {Interactive multi-modal question answering}, Year = {2011}} @incollection{Canisius+11, Author = {S. Canisius and A. {Van den Bosch} and W. Daelemans}, Booktitle = {Interactive multi-modal question answering}, Date-Added = {2011-11-08 12:08:45 +0000}, Date-Modified = {2011-11-08 12:10:29 +0000}, Editor = {A. {Van den Bosch} and G. {Bouma}}, Keywords = {imix, vici, ilk, constraint satisfaction inference, named-entity recognition, information extraction}, Publisher = {Springer Verlag}, Series = {Theory and Applications of Natural Language Processing: Edited Volumes}, Title = {Constraint satisfaction inference for entity recognition}, Year = {2011}} @article{Haque+11, Author = {R. Haque and S. Kumar Naskar and A. {Van den Bosch} and A. Way}, Date-Added = {2011-11-08 11:42:40 +0000}, Date-Modified = {2011-11-08 11:45:03 +0000}, Journal = {Machine Translation}, Keywords = {ilk, vici, mt, machine translation, IGTree}, Month = {September}, Number = {3}, Pages = {239--285}, Title = {Integrating source-language context into phrase-based statistical machine translation}, Volume = {25}, Year = {2011}} @inproceedings{Ordelman+01, Author = {R. Ordelman and A. {Van Hessen} and F. {De Jong}}, Booktitle = {Proceedings of Eurospeech 2001}, Date-Added = {2011-10-17 21:26:52 +0000}, Date-Modified = {2011-10-17 21:27:57 +0000}, Title = {Lexicon optimization for Dutch speech recognition in spoken document retrieval}, Year = {2001}} @incollection{Daelemans+10b, Address = {Oxford, UK}, Author = {W. Daelemans and A. {Van den Bosch}}, Booktitle = {Handbook of Computational Linguistics and Natural Language Processing}, Date-Added = {2011-09-11 14:57:52 +0000}, Date-Modified = {2011-09-11 14:59:49 +0000}, Editor = {A. Clark and C. Fox and S. Lappin}, Keywords = {ilk, vici, memory-based learning, memory-based language processing, computational linguistics}, Pages = {154--179}, Publisher = {Wiley-Blackwell Publishers}, Title = {Memory-based learning}, Year = {2010}, Bdsk-Url-1 = {http://eu.wiley.com/WileyCDA/WileyTitle/productCd-1405155817.html}} @inproceedings{Wubben+10, Address = {Dublin, Ireland}, Author = {S. Wubben and A. {Van den Bosch} and E. Krahmer}, Booktitle = {Proceedings of the 10th International Workshop on Natural Language Generation (INLG 2010)}, Date-Added = {2011-09-11 14:55:20 +0000}, Date-Modified = {2011-09-11 14:57:16 +0000}, Editor = {J. Kelleher and B. Mac Namee and I. {Van der Sluis}}, Keywords = {memphix, vici, ilk, paraphrasing, statistical machine translation}, Pages = {203--207}, Title = {Paraphrase generation as monolingual translation: Data and evaluation}, Year = {2010}, Bdsk-Url-1 = {http://ilk.uvt.nl/~swubben/publications/INLG2010.pdf}} @inproceedings{VandenBosch+10, Address = {Menlo Park, CA}, Author = {A. {Van den Bosch} and P. Nauts and N. Eckhardt}, Booktitle = {Commonsense Knowledge: Papers from the AAAI Fall Symposium}, Date-Added = {2011-09-11 14:49:50 +0000}, Date-Modified = {2011-09-11 14:52:09 +0000}, Editor = {C. Havasi and D. Lenat and B. {Van Durme}}, Keywords = {ilk, vici, open mind, common sense, child development}, Pages = {114--119}, Publisher = {AAAI Press}, Title = {A kid's {O}pen {M}ind {C}ommon {S}ense}, Year = {2010}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/FS10-02-026.pdf}} @inproceedings{Zhekova+11, Address = {Portland, Oregon, USA}, Author = {D. Zhekova and S. K\"{u}bler}, Booktitle = {Proceedings of the Fifteenth Conference on Computational Natural Language Learning: Shared Task}, Date-Added = {2011-09-10 10:16:31 +0000}, Date-Modified = {2011-09-10 10:31:53 +0000}, Keywords = {co-reference resolution}, Month = {June}, Pages = {112--116}, Publisher = {Association for Computational Linguistics}, Title = {UBIU: A Robust System for Resolving Unrestricted Coreference}, Url = {http://www.aclweb.org/anthology/W11-1918}, Year = {2011}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W11-1918}} @inproceedings{Wunsch+09, Address = {Borovets, Bulgaria}, Author = {H. Wunsch and S. K\"{u}bler and R. Cantrell}, Booktitle = {Proceedings of the International Conference RANLP-2009}, Date-Added = {2011-09-10 10:12:29 +0000}, Date-Modified = {2011-09-10 10:13:09 +0000}, Keywords = {pronoun resolution, co-reference resolution}, Month = {September}, Pages = {478--483}, Publisher = {Association for Computational Linguistics}, Title = {Instance Sampling Methods for Pronoun Resolution}, Url = {http://www.aclweb.org/anthology/R09-1085}, Year = {2009}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/R09-1085}} @inproceedings{Klenner+08, Author = {Klenner, M. and Ailloud, E.}, Booktitle = {Proceedings of the Second Workshop on Anaphora Resolution (WAR II)}, Date-Added = {2011-09-10 10:10:13 +0000}, Date-Modified = {2011-09-10 10:11:05 +0000}, Keywords = {anaphora resolution, co-reference resolution}, Pages = {31--40}, Title = {Enhancing coreference clustering}, Volume = {2}, Year = {2008}} @inproceedings{Bailey+08, Address = {Columbus, Ohio}, Author = {S. Bailey and D. Meurers}, Booktitle = {Proceedings of the Third Workshop on Innovative Use of NLP for Building Educational Applications}, Date-Added = {2011-09-10 10:02:02 +0000}, Date-Modified = {2011-09-10 10:02:44 +0000}, Keywords = {reading comprehension}, Month = {June}, Pages = {107--115}, Publisher = {Association for Computational Linguistics}, Title = {Diagnosing Meaning Errors in Short Answers to Reading Comprehension Questions}, Url = {http://www.aclweb.org/anthology/W/W08/W08-0913}, Year = {2008}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W/W08/W08-0913}} @inproceedings{Haque+10, Abstract = {Rejwanul Haque, Sudip Kumar Naskar, Antal van den Bosch and Andy Way. 2010. Supertags as Source Language Context in Hierarchical Phrase-Based SMT. In AMTA 2010: The Ninth Conference of the Association for Machine Translation in the Americas, Proceedings, Denver, CO., pp. 210--219.}, Address = {Denver, CO.}, Author = {R. Haque and S. Kumar Naskar and A. {Van den Bosch} and A. Way}, Booktitle = {AMTA 2010: The Ninth Conference of the Association for Machine Translation in the Americas, Proceedings}, Date-Added = {2011-09-10 09:55:25 +0000}, Date-Modified = {2012-10-28 10:34:47 +0000}, Keywords = {ilk, vici, supertagging, statistical machine translation, igtree}, Pages = {210--219}, Title = {Supertags as source language context in hierarchical phrase-based {SMT}}, Year = {2010}} @inproceedings{Wubben10, Address = {Uppsala, Sweden}, Author = {S. Wubben}, Booktitle = {Proceedings of the 5th International Workshop on Semantic Evaluation}, Date-Added = {2011-09-10 09:47:11 +0000}, Date-Modified = {2011-09-10 09:48:48 +0000}, Keywords = {ilk, paraphrasing, memphix}, Pages = {260--263}, Title = {UvT: Memory-based pairwise ranking of paraphrasing verbs}, Year = {2010}} @inproceedings{VandeCamp+11, Address = {Portland, Oregon}, Author = {M. {Van de Camp} and A. {Van den Bosch}}, Booktitle = {Proceedings of the 2nd Workshop on Computational Approaches to Subjectivity and Sentiment Analysis (WASSA 2.011)}, Date-Added = {2011-06-28 10:50:51 +0200}, Date-Modified = {2011-06-28 10:51:19 +0200}, Month = {June}, Pages = {61--69}, Publisher = {Association for Computational Linguistics}, Title = {A Link to the Past: Constructing Historical Social Networks}, Url = {http://www.aclweb.org/anthology/W11-1708}, Year = {2011}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W11-1708}} @inproceedings{Wubben+11b, Address = {Portland, Oregon}, Author = {S. Wubben and E. Marsi and A. {Van den Bosch} and E. Krahmer}, Booktitle = {Proceedings of the Workshop on Monolingual Text-To-Text Generation}, Date-Added = {2011-06-28 10:50:02 +0200}, Date-Modified = {2011-09-11 14:54:06 +0000}, Keywords = {ilk, vici, memphix, paraphrasing, DAESO}, Month = {June}, Pages = {27--33}, Publisher = {Association for Computational Linguistics}, Title = {Comparing Phrase-based and Syntax-based Paraphrase Generation}, Url = {http://www.aclweb.org/anthology/W11-1604}, Year = {2011}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W11-1604}} @inproceedings{Zervanou+11, Address = {Portland, OR, USA}, Author = {K. Zervanou and I. Korkontzelos and A. {Van den Bosch} and S. Ananiadou}, Booktitle = {Proceedings of the 5th ACL-HLT Workshop on Language Technology for Cultural Heritage, Social Sciences, and Humanities}, Date-Added = {2011-06-28 10:49:27 +0200}, Date-Modified = {2011-06-28 10:52:19 +0200}, Month = {June}, Pages = {44--53}, Publisher = {Association for Computational Linguistics}, Title = {Enrichment and Structuring of Archival Description Metadata}, Url = {http://www.aclweb.org/anthology/W11-1507}, Year = {2011}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W11-1507}} @article{Morris+92, Author = {C. Morris and A. Newell and L. Booth and I. Ricketts and J. Arnott}, Date-Added = {2011-06-23 17:22:52 +0200}, Date-Modified = {2011-06-23 17:41:33 +0200}, Journal = {Assistive Technology}, Pages = {51--59}, Title = {Syntax {PAL}: A system to improve the written syntax of language-impaired users}, Volume = {4}, Year = {1992}} @article{McNemar47, Abstract = {{Abstract Two formulas are presented for judging the significance of the difference between correlated proportions. The chi square equivalent of one of the developed formulas is pointed out.}}, Author = {McNemar, Quinn}, Citeulike-Article-Id = {6015799}, Citeulike-Linkout-0 = {http://dx.doi.org/10.1007/BF02295996}, Citeulike-Linkout-1 = {http://www.springerlink.com/content/843g84t135765212}, Day = {18}, Doi = {10.1007/BF02295996}, Issn = {0033-3123}, Journal = {Psychometrika}, Keywords = {phdbib, seminal, statistics}, Month = jun, Number = {2}, Pages = {153--157}, Posted-At = {2009-10-27 13:33:05}, Priority = {2}, Publisher = {Springer New York}, Title = {{Note on the Sampling Error of the Difference Between Correlated Proportions or Percentages}}, Url = {http://dx.doi.org/10.1007/BF02295996}, Volume = {12}, Year = {1947}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/BF02295996}} @article{Horstmann-Koester+96, Author = {H. {Horstmann Koester} and S. Levine}, Date-Added = {2011-06-23 17:20:31 +0200}, Date-Modified = {2011-06-23 18:20:06 +0200}, Journal = {Augmentative and Alternative Communication}, Keywords = {word completion, word prediction}, Pages = {155--168}, Title = {Effect of a word prediction feature on user performance}, Volume = {12}, Year = {1996}} @inproceedings{Lesher+99, Author = {G. W. Lesher and B. J. Moulton and D. J. Higginbotham}, Booktitle = {Proceedings of the Annual Conference of the RESNA}, Date-Added = {2011-06-23 16:48:43 +0200}, Date-Modified = {2011-06-23 17:18:27 +0200}, Keywords = {word completion, word prediction}, Title = {Effects of ngram order and training text size on word prediction}, Year = {1999}} @inproceedings{Bilmes+03, Author = {J.A. Bilmes and K. Kirchhof}, Booktitle = {Proceedings of HLT/NACCL, 2003}, Date-Added = {2011-05-16 11:04:52 +0200}, Date-Modified = {2011-06-19 16:09:30 +0200}, Title = {Factored language models and generalized parallel backoff}, Year = {2003}} @book{Hale+02, Address = {Cambridge, MA}, Author = {K. Hale and J. Keyser}, Date-Added = {2011-05-09 22:03:56 +0200}, Date-Modified = {2011-05-09 22:04:07 +0200}, Publisher = {MIT Press}, Title = {Prolegomenon to a theory of argument structure}, Year = {2002}} @book{Randall09, Address = {Amsterdam}, Author = {J.H. Randall}, Date-Added = {2011-05-09 22:03:36 +0200}, Date-Modified = {2011-05-09 22:03:36 +0200}, Publisher = {Springer}, Title = {Linking: The geometry of argument structure}, Year = {2009}} @incollection{Hale+93, Address = {Cambridge, MA}, Author = {K. Hale and J. Keyser}, Booktitle = {The View from Building 20}, Date-Added = {2011-05-09 22:03:02 +0200}, Date-Modified = {2011-05-09 22:03:15 +0200}, Editor = {K. Hale and J. Keyser}, Pages = {53--110}, Publisher = {MIT Press}, Title = {On argument structure and the lexical representation of syntactic relations}, Year = {1993}} @book{Levin+95, Address = {Cambridge, MA}, Author = {B. Levin and M. Rappaport}, Date-Added = {2011-05-09 22:02:28 +0200}, Date-Modified = {2011-05-09 22:02:37 +0200}, Publisher = {MIT Press}, Title = {Unaccusativity}, Year = {1995}} @inproceedings{Roth+09, Address = {Clearwater Beach, Florida}, Author = {D. Roth and K. Small and I. Titov}, Booktitle = {Proceedings of AISTATS}, Date-Added = {2011-05-09 22:01:45 +0200}, Date-Modified = {2011-05-09 22:02:16 +0200}, Title = {Sequential learning of classifiers for structured prediction patterns}, Year = {2009}} @inproceedings{Merlo+08, Address = {Manchester}, Author = {P. Merlo and G. Musillo}, Booktitle = {Proceedings of the Twelfth Conference on Computational Natural Language Learning, CoNLL-2008}, Date-Added = {2011-05-09 22:00:04 +0200}, Date-Modified = {2011-05-09 22:01:31 +0200}, Keywords = {semantic role labeling}, Pages = {1--8}, Title = {Semantic parsing for high precision semantic role labelling}, Year = {2008}} @inproceedings{Musillo+06, Address = {New York}, Author = {G. Musillo and P. Merlo}, Booktitle = {Proceedings of the Human Language Technology Conference of the North American Chapter of the {ACL}}, Date-Added = {2011-05-09 21:59:30 +0200}, Date-Modified = {2011-05-09 21:59:40 +0200}, Pages = {101--104}, Title = {Accurate parsing of the {Proposition Bank}}, Year = {2006}} @book{Levin+05, Address = {Cambridge, UK}, Author = {B. Levin and M. Rappaport}, Date-Added = {2011-05-09 21:58:14 +0200}, Date-Modified = {2011-05-09 21:58:47 +0200}, Publisher = {Cambridge University Press}, Title = {Argument realization}, Year = {2005}} @inproceedings{Nivre+06, Author = {J. Nivre and J. Hall and J. Nilsson}, Booktitle = {Proceedings of LREC-2006}, Date-Added = {2011-05-08 15:54:25 +0200}, Date-Modified = {2011-06-19 23:01:47 +0200}, Pages = {2216--2219}, Title = {MaltParser: A data-driven parser-generator for dependency parsing}, Year = {2006}} @phdthesis{Seginer07b, Author = {Y. Seginer}, Date-Added = {2011-05-08 15:20:15 +0200}, Date-Modified = {2011-05-08 15:20:57 +0200}, School = {University of Amsterdam}, Title = {Learning Syntactic Structure}, Year = {2007}} @phdthesis{VanErp10, Author = {M. {Van Erp}}, Date-Added = {2011-04-25 23:09:17 +0200}, Date-Modified = {2011-04-25 23:12:02 +0200}, Keywords = {ilk, mitch, catch, natural history, ontology learning}, School = {Tilburg University}, Title = {Accessing Natural History: Discoveries in Data Cleaning, Structuring, and Retrieval}, Year = {2010}, Bdsk-Url-1 = {http://www.cs.vu.nl/~marieke/thesis/MvanErpPhDThesis.pdf}} @book{grunwald07, Address = {US}, Author = {P. D. Gr\"unwald}, Date-Added = {2011-04-18 11:27:38 +0200}, Date-Modified = {2011-06-19 16:28:56 +0200}, Publisher = {{MIT} Press}, Title = {The Minimum Description Length Principle}, Year = {2007}} @article{Yamamoto+01, Author = {M. Yamamoto and K.W. Church}, Date-Added = {2011-04-12 22:03:48 +0200}, Date-Modified = {2011-04-12 22:05:05 +0200}, Journal = {Computational Linguistics}, Pages = {28--37}, Title = {Using suffix arrays to compute term frequency and document frequency for all substrings in a corpus}, Volume = {27}, Year = {2001}} @inproceedings{Biemann06, Acmid = {1557859}, Address = {Stroudsburg, PA, USA}, Author = {Biemann, Chris}, Booktitle = {Proceedings of the 21st International Conference on computational Linguistics and 44th Annual Meeting of the Association for Computational Linguistics: Student Research Workshop}, Date-Added = {2011-04-10 23:46:15 +0200}, Date-Modified = {2011-04-10 23:46:33 +0200}, Location = {Sydney, Australia}, Numpages = {6}, Pages = {7--12}, Publisher = {Association for Computational Linguistics}, Series = {COLING ACL '06}, Title = {Unsupervised part-of-speech tagging employing efficient graph clustering}, Url = {http://portal.acm.org/citation.cfm?id=1557856.1557859}, Year = {2006}, Bdsk-Url-1 = {http://portal.acm.org/citation.cfm?id=1557856.1557859}} @book{Huang92, Author = {Xuedong Huang and Fileno Alleva and Hsiao-Wuen Hon and Mei-Yuh Hwang and Ronald Rosenfeld}, Date-Added = {2011-04-10 11:53:06 +0200}, Date-Modified = {2011-04-10 13:59:32 +0200}, Publisher = {Pittsburgh, Pa. : School of Computer Science, Carnegie Mellon University}, Title = {The SPHINX-II speech recognition system : an overview}, Year = {1992}} @article{Jelinek77, Author = {F. Jelinek and R. L. Mercer and L. R. Bahl and J. K. Baker}, Date-Added = {2011-04-07 14:02:04 +0200}, Date-Modified = {2011-04-07 14:07:50 +0200}, Journal = {The Journal of the Acoustical Society of America}, Number = {S1}, Pages = {S63-S63}, Title = {Perplexity---a measure of the difficulty of speech recognition tasks}, Volume = {62}, Year = {1977}} @inproceedings{Church00, Author = {K. W. Church}, Booktitle = {Proceedings of the 18th Conference on Computational Linguistics}, Date-Added = {2011-03-28 20:55:49 +0200}, Date-Modified = {2011-03-28 20:57:16 +0200}, Keywords = {word frequency, Poisson}, Pages = {180--186}, Title = {Empirical estimates of adaptation: The chance of two {N}oriegas is closer to $p/2$ than $p^2$}, Volume = {1}, Year = {2000}} @article{Azmi+10, Author = {A. M. Azmi and N. bin Badia}, Date-Added = {2011-03-27 18:09:16 +0200}, Date-Modified = {2011-03-27 18:11:25 +0200}, Journal = {The Arabian Journal for Science and Engineering}, Keywords = {shallow parsing, Arabic, memory-based learning}, Number = {2C}, Title = {{e-Narrator} -- An application for creating an ontology of {\it Hadiths} narration tree semantically and graphically}, Volume = {35}, Year = {2010}} @inproceedings{Fazly+03, Author = {A. Fazly and G. Hirst}, Booktitle = {Proceedings of the 2003 EACL Workshop on Language Modeling for Text Entry Methods}, Date-Added = {2011-03-26 23:59:27 +0100}, Date-Modified = {2011-03-27 00:01:49 +0100}, Keywords = {word completion, text completion}, Pages = {9--16}, Title = {Testing the efficacy of part-of-speech information in word completion}, Year = {2003}} @article{Trost+05, Author = {H. Trost and J. Matiasek and M. Baroni}, Date-Added = {2011-03-26 23:57:45 +0100}, Date-Modified = {2011-03-26 23:59:06 +0100}, Journal = {Applied Artificial Intelligence}, Keywords = {text completion, word completion}, Number = {8}, Pages = {743--781}, Title = {The Language Component of the {FASTY} Text Prediction System}, Volume = {19}, Year = {2005}} @inproceedings{Matiasek+03, Address = {Budapest, Hungary}, Author = {J. Matiasek and M. Baroni}, Booktitle = {Proceedings of the EACL Workshop on Language Modeling for Text Entry Methods}, Date-Added = {2011-03-26 23:52:49 +0100}, Date-Modified = {2011-03-27 00:02:40 +0100}, Keywords = {text completion, word completion}, Pages = {1--8}, Title = {Exploiting Long Distance Collocational Relations in Predictive Typing}, Year = {2003}} @incollection{Matiasek+02, Address = {Berlin, Germany}, Author = {J. Matiasek and M. Baroni and H. Trost}, Booktitle = {Computers Helping People With Special Needs}, Date-Added = {2011-03-26 23:49:58 +0100}, Date-Modified = {2011-03-26 23:52:36 +0100}, Keywords = {word prediction, word completion, augmentative technology, text completion}, Pages = {165--176}, Publisher = {Springer Verlag}, Title = {{FASTY}: A multi-lingual approach to text prediction}, Year = {2002}} @phdthesis{Palazuelos01, Address = {Madrid, Spain}, Author = {S. E. Palazuelos Cagigas}, Date-Added = {2011-03-26 23:47:54 +0100}, Date-Modified = {2011-03-27 22:32:27 +0200}, Keywords = {word completion, word prediction, augmentative technology}, School = {Universidad Politechnica de Madrid}, Title = {Contribution to Word Prediction in Spanish and its Integration in Technical Aids for People With Physical Disabilities}, Year = {2001}} @inproceedings{Shein+01, Author = {F. Shein and T. Nantais and R. Nishiyama and C. Tam and P. Marshall}, Booktitle = {Proceedings of CSUN 16th Annual Conference on Technology for Persons with Disabilities}, Date-Added = {2011-03-26 23:46:31 +0100}, Date-Modified = {2011-03-26 23:47:44 +0100}, Keywords = {word completion, text completion, augmentative technology}, Title = {Word cueing for persons with writing difficulties: WordQ}, Year = {2001}} @inproceedings{Nantais+01, Author = {T. Nantais and F. Shein and M. Johansson}, Booktitle = {Proceedings of the 24th Annual Conference on Technology and Disability}, Date-Added = {2011-03-26 23:44:12 +0100}, Date-Modified = {2011-03-26 23:46:22 +0100}, Keywords = {word completion, text completion, augmentative technology}, Publisher = {RESNA}, Title = {Efficacy of the word prediction algorithm in WordQ}, Year = {2001}} @inproceedings{Copestake97, Address = {Madrid, Spain}, Author = {A. Copestake}, Booktitle = {Proceedings of the ACL workshop on Natural Language Processing for Communication Aids}, Date-Added = {2011-03-26 23:42:06 +0100}, Date-Modified = {2011-03-26 23:44:05 +0100}, Keywords = {word completion, text completion, augmentative technology}, Pages = {37--42}, Title = {Augmented and alternative NLP techniques for Augmented and alternative NLP techniques for augmentative and alternative communication}, Year = {1997}} @inproceedings{Carlberger+97b, Author = {A. Carlberger and T. Magnuson and J. Carlberger and H. Wachtmeister and S. Hunnicutt}, Booktitle = {Proceedings of the Fonetik '97 Conference}, Date-Added = {2011-03-26 23:40:12 +0100}, Date-Modified = {2011-03-26 23:41:44 +0100}, Keywords = {word completion, word prediction, text completion, augmentative technology}, Pages = {17--20}, Title = {Probability-based word prediction for writing suport in dyslexia}, Volume = {4}, Year = {1997}} @inproceedings{Carlberger+97, Address = {Madrid, Spain}, Author = {A. Carlberger and J. Carlberger and T. Magnuson and S. Hunnicutt and S. E. Palazuelos Cagigas and S. Aguilera Navarro}, Booktitle = {ACL Workshop on Natural Language Processing for Communication Aids}, Date-Added = {2011-03-26 23:37:55 +0100}, Date-Modified = {2011-03-26 23:48:47 +0100}, Keywords = {augmentative technology, word completion, text completion}, Pages = {23--28}, Title = {Profet, a new generation of word prediction: An evaluation study}, Year = {1997}} @inproceedings{Hermens+93, Address = {Orlando, FA}, Author = {L. A. Hermens and J. C. Schlimmer}, Booktitle = {Proceedings of the Ninth Conference on Artificial Intelligence for Applications}, Date-Added = {2011-03-26 23:35:23 +0100}, Date-Modified = {2011-03-26 23:36:58 +0100}, Keywords = {form completion, word completion}, Pages = {164--170}, Title = {A machine-learning apprentice for the completion of repetitive forms}, Year = {1993}} @techreport{VanDyke91, Author = {J. A. VanDyke}, Date-Added = {2011-03-26 23:33:48 +0100}, Date-Modified = {2011-03-26 23:35:21 +0100}, Institution = {Dept. of Computer and Information Sciences, University of Delaware}, Keywords = {text completion, word completion, augmentative technology}, Number = {92-03}, Title = {A syntactic predictor to enhance communication for disabled users}, Year = {1991}} @inproceedings{Bentrup87, Author = {J. A. Bentrup}, Booktitle = {Proceedings of the 10th Annual Conference on Rehabilitation Technology}, Date-Added = {2011-03-26 23:31:57 +0100}, Date-Modified = {2011-03-27 22:48:47 +0200}, Keywords = {word completion, rehabilitation technology, augmentative technology}, Pages = {121--123}, Publisher = {RESNA}, Title = {Exploiting word frequencies and their sequential dependencies.}, Year = {1987}} @inproceedings{Swiffin+85, Author = {A. L. Swiffin and J. A. Pickering and J. L. Arnott and A. F. Newell}, Booktitle = {Proceedings of the 8th Annual Conference on Rehahilitation Technology}, Date-Added = {2011-03-26 23:30:03 +0100}, Date-Modified = {2011-06-23 17:41:45 +0200}, Keywords = {word completion, rehabilitation technology, augmentative technology}, Pages = {197--199}, Publisher = {RESNA}, Title = {{PAL}: An effort efficient portable communication aid and keyboard emulator}, Year = {1985}} @article{Good66, Author = {I. J. Good}, Date-Added = {2011-03-24 13:28:32 +0100}, Date-Modified = {2011-03-24 13:30:45 +0100}, Journal = {Biometrika}, Month = {August}, Number = {2}, Pages = {393--396}, Title = {Studies in the History of Probability and Statistics. XXXVII A. M. Turing's Statistical Work in World War II}, Volume = 66, Year = 1966} @inproceedings{Clarkson97, Author = {Clarkson, Philip and Rosenfeld, Ronald}, Booktitle = {Proc. of EUROSPEECH'97}, Date-Added = {2011-03-17 11:11:06 +0100}, Date-Modified = {2011-03-17 11:13:00 +0100}, Pages = {2707-2710}, Title = {Statistical language modeling using the CMU-cambridge toolkit}, Year = {1997}} @techreport{VandenHoven10, Author = {M. {Van den Hoven}}, Date-Added = {2011-03-06 17:58:00 +0100}, Date-Modified = {2011-03-06 17:59:09 +0100}, Institution = {Tilburg University}, Keywords = {ilk, master's thesis, hitime, social history, strikes, text mining}, Number = {10-05}, Title = {Strikes that never happened: Text mining in historical data}, Type = {ILK Research Group Technical Report Series}, Year = {2010}} @techreport{Wubben08, Author = {S. Wubben}, Date-Added = {2011-03-06 17:56:43 +0100}, Date-Modified = {2012-10-28 10:35:45 +0000}, Institution = {Tilburg University}, Keywords = {ilk, vici, open mind common sense, semantic relatedness}, Number = {08-01}, Title = {Using free link structure to calculate semantic relatedness}, Type = {ILK Research Group Technical Report Series}, Year = {2008}} @techreport{Bogers+07b, Author = {T. Bogers and K. Balog}, Date-Added = {2011-03-06 17:55:18 +0100}, Date-Modified = {2011-03-06 17:56:30 +0100}, Institution = {Tilburg University}, Keywords = {ilk, apropos, expert search, uvt, tilburg}, Number = {07-06}, Title = {UvT Expert Collection documentation}, Type = {ILK Research Group Technical Report Series}, Year = {2007}} @techreport{Tesink07, Author = {S. Tesink}, Date-Added = {2011-03-06 13:25:09 +0100}, Date-Modified = {2012-10-28 10:35:22 +0000}, Institution = {Tilburg University}, Keywords = {ilk, vici, master's thesis, intrusion detection, data mining}, Number = {07-02}, Title = {Improving intrusion detection systems through machine learning}, Type = {ILK Research Group Technical Report Series}, Year = {2007}} @techreport{Tillemans07, Author = {M. Tillemans}, Date-Added = {2011-03-06 13:23:54 +0100}, Date-Modified = {2012-10-28 10:35:29 +0000}, Institution = {Tilburg University}, Keywords = {ilk, vici, spelling correction, Dutch, master's thesis}, Number = {07-02}, Title = {Diffusing the d/dt disambiguation problem}, Type = {ILK Research Group Technical Report Series}, Year = {2007}} @techreport{Sporleder+06d, Author = {C. Sporleder and M. {Van Erp} and T. Porcelijn and A. {Van den Bosch} and P. Arntzen and E. {Van Nieukerken}}, Date-Added = {2011-03-06 13:21:52 +0100}, Date-Modified = {2011-03-06 13:23:37 +0100}, Institution = {Tilburg University}, Keywords = {ilk, mitch, natural history, reptiles, amphibians, taxonomy}, Number = {06-01}, Title = {Cleaning and enriching research data on reptiles and amphibians: The MITCH pilot project and "nulmeting"}, Type = {ILK Research Group Technical Report Series}, Year = {2006}} @techreport{Divina+06, Author = {F. Divina and P. Vogt}, Date-Added = {2011-03-06 13:20:43 +0100}, Date-Modified = {2011-03-06 13:21:30 +0100}, Institution = {Tilburg University}, Keywords = {ilk, language evolution}, Number = {06-02}, Title = {Modelling language evolution in a complex ecological environment}, Type = {ILK Research Group Technical Report Series}, Year = {2006}} @techreport{Morante06, Author = {R. Morante}, Date-Added = {2011-03-06 13:19:23 +0100}, Date-Modified = {2011-03-06 13:20:22 +0100}, Institution = {Tilburg University}, Keywords = {ilk, semantic role labeling}, Number = {06-03}, Title = {Semantic role annotation in the Cast3LB-CoNLL-SemRol corpus}, Type = {ILK Research Group Technical Report Series}, Year = {2006}} @techreport{Geertzen03, Author = {J. Geertzen}, Date-Added = {2011-03-06 13:15:32 +0100}, Date-Modified = {2011-03-06 13:18:55 +0100}, Institution = {Tilburg University}, Keywords = {ilk, suffix trees}, Number = {03-11}, Title = {String alignment in grammatical induction: What suffix trees can do}, Type = {ILK Research Group Technical Report Series}, Year = {2003}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/ilk0311.pdf}} @inproceedings{VanErp+10, Address = {Nijmegen, The Netherlands}, Author = {M. {Van Erp} and S. Hunt}, Booktitle = {Proceedings of the 10th Dutch-Belgian Information Retrieval Workshop}, Date-Added = {2011-02-27 12:08:56 +0100}, Date-Modified = {2011-02-27 12:13:17 +0100}, Keywords = {ilk, hitime, mitch, natural history}, Pages = {31--38}, Publisher = {Radboud University Nijmegen}, Title = {Knowledge-driven information retrieval for natural history}, Year = {2010}, Bdsk-Url-1 = {http://www.ru.nl/publish/pages/544689/proceedings_dir2010.pdf}} @inproceedings{Zervanou10, Address = {Uppsala, Sweden}, Author = {K. Zervanou}, Booktitle = {Proceedings of the 5th International Workshop on Semantic Evaluation}, Date-Added = {2011-02-27 11:22:17 +0100}, Date-Modified = {2011-02-27 11:23:15 +0100}, Keywords = {ilk, hitime, term extraction, keyphrase extraction}, Month = {July}, Pages = {194--197}, Publisher = {Association for Computational Linguistics}, Title = {UvT: The UvT Term Extraction System in the Keyphrase Extraction Task}, Url = {http://www.aclweb.org/anthology/S10-1042}, Year = {2010}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/S10-1042}} @inproceedings{headdeniii+09, Address = {Boulder, Colorado}, Author = {Headden III, William P. and Johnson, Mark and McClosky, David}, Booktitle = {Proceedings of Human Language Technologies: The 2009 Annual Conference of the North American Chapter of the Association for Computational Linguistics}, Date-Added = {2011-02-20 15:44:06 +0100}, Date-Modified = {2011-02-20 15:44:22 +0100}, Month = {June}, Pages = {101--109}, Publisher = {Association for Computational Linguistics}, Title = {Improving Unsupervised Dependency Parsing with Richer Contexts and Smoothing}, Url = {http://www.aclweb.org/anthology/N/N09/N09-1012}, Year = {2009}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/N/N09/N09-1012}} @inproceedings{Gao+03, Acmid = {1075162}, Address = {Stroudsburg, PA, USA}, Author = {Gao, J. and Suzuki, H.}, Booktitle = {Proceedings of the 41st Annual Meeting on Association for Computational Linguistics - Volume 1}, Date-Added = {2011-02-20 15:34:39 +0100}, Date-Modified = {2011-02-20 15:34:53 +0100}, Doi = {http://dx.doi.org/10.3115/1075096.1075162}, Location = {Sapporo, Japan}, Numpages = {8}, Pages = {521--528}, Publisher = {Association for Computational Linguistics}, Series = {ACL '03}, Title = {Unsupervised learning of dependency structure for language modeling}, Url = {http://dx.doi.org/10.3115/1075096.1075162}, Year = {2003}, Bdsk-Url-1 = {http://dx.doi.org/10.3115/1075096.1075162}} @inproceedings{Spitkovsky+09, Author = {Spitkovsky, V. I. and Alshawi, H. and Jurafsky, D.}, Booktitle = {NIPS: Grammar Induction, Representation of Language and Language Learning}, Date-Added = {2011-02-20 13:36:28 +0100}, Date-Modified = {2011-06-19 23:06:46 +0200}, Pages = {1--10}, Title = {{B}aby {S}teps: How ``{L}ess is {M}ore'' in Unsupervised Dependency Parsing}, Year = {2009}} @inproceedings{Gorla+07, Acmid = {1619931}, Address = {Vancouver, British Columbia, Canada}, Author = {J. Gorla and A. Goyal and R. Sangal}, Booktitle = {Proceedings of the 22nd national conference on Artificial intelligence - Volume 2}, Date-Added = {2011-02-19 21:45:29 +0100}, Date-Modified = {2011-07-02 22:20:50 +0200}, Isbn = {978-1-57735-323-2}, Location = {Vancouver, British Columbia, Canada}, Numpages = {2}, Pages = {1860--1861}, Publisher = {AAAI Press}, Title = {Two approaches for building an unsupervised dependency parser and their other applications}, Url = {http://portal.acm.org/citation.cfm?id=1619797.1619931}, Year = {2007}, Bdsk-Url-1 = {http://portal.acm.org/citation.cfm?id=1619797.1619931}} @inproceedings{Wubben+11, Address = {Utrecht, the Netherlands}, Author = {S. Wubben and A. {Van den Bosch} and E. Krahmer}, Booktitle = {Computational Linguistics in the Netherlands 2010: Selected Papers from the Twentieth CLIN Meetingfrom the Twentieth CLIN Meeting}, Date-Added = {2011-02-19 13:16:29 +0100}, Date-Modified = {2011-02-19 13:18:29 +0100}, Editor = {T. Markus and P. Monachesi and E. Westerhout}, Keywords = {ilk, memphix, vici, paraphrasing, headlines, clin}, Pages = {169--183}, Publisher = {LOT}, Title = {Paraphrasing headlines by machine translation}, Year = {2011}} @inproceedings{VanGompel+11, Address = {Utrecht, the Netherlands}, Author = {M. {Van Gompel} and A. {Van den Bosch} and P. Berck}, Booktitle = {Computational Linguistics in the Netherlands 2010: Selected Papers from the Twentieth CLIN Meeting}, Date-Added = {2011-02-19 13:13:10 +0100}, Date-Modified = {2011-09-11 14:54:36 +0000}, Editor = {T. Markus and P. Monachesi and E. Westerhout}, Keywords = {vici, pbmbmt, mbmt, clin, ilk}, Pages = {45--58}, Publisher = {LOT}, Title = {Extending memory-based machine translation to phrases}, Year = {2011}} @article{Bogers+11, Author = {T. Bogers and A. {Van den Bosch}}, Date-Added = {2011-02-09 09:35:19 +0100}, Date-Modified = {2013-08-07 07:50:07 +0000}, Journal = {International Journal of Electronic Commerce}, Keywords = {apropos, ilk, recommender systems, vici, social bookmarking, web 2.0}, Month = {Spring}, Number = {3}, Pages = {33-75}, Title = {Fusing Recommendations for Social Bookmarking Websites}, Volume = {15}, Year = {2011}} @inproceedings{Luyckx+08, Address = {Manchester, UK}, Author = {K. Luyckx and W. Daelemans}, Booktitle = {Proceedings of the Twenty-Second International Conference on Computational Linguistics (COLING 2008)}, Date-Added = {2011-02-07 21:09:28 +0100}, Date-Modified = {2012-01-15 20:23:08 +0000}, Keywords = {authorship attribution, authorship verification, TiMBL, clips}, Pages = {513--520}, Publisher = {Coling 2008 Organizing Committee}, Title = {Authorship Attribution and Verification with Many Authors and Limited Data}, Year = {2008}} @inproceedings{Morante+09d, Author = {Morante, R. and Van Asch, V. and Daelemans, W.}, Booktitle = {Proceedings of the Workshop on BioNLP: Shared Task}, Date-Added = {2011-02-07 21:03:30 +0100}, Date-Modified = {2011-02-07 21:04:12 +0100}, Keywords = {TiMBL, event extraction}, Organization = {Association for Computational Linguistics}, Pages = {59--67}, Title = {{A memory-based learning approach to event extraction in biomedical texts}}, Year = {2009}} @inproceedings{Dickinson09, Author = {Dickinson, M.}, Booktitle = {Proceedings of the 12th Conference of the European Chapter of the Association for Computational Linguistics}, Date-Added = {2011-02-07 21:00:08 +0100}, Date-Modified = {2011-02-07 21:00:39 +0100}, Keywords = {TiMBL, error correction, dependency parsing}, Organization = {Association for Computational Linguistics}, Pages = {193--201}, Title = {{Correcting dependency annotation errors}}, Year = {2009}} @conference{Kestemont+09, Author = {M. Kestemont and K. {Van Dalen-Oskam}}, Booktitle = {Proceedings of the Twenty-first Benelux Conference on Artificial Intelligence (BNAIC 2009)}, Date-Added = {2011-02-07 20:52:54 +0100}, Date-Modified = {2015-05-31 13:04:40 +0000}, Issn = {1568-7805}, Keywords = {TiMBL, authorship attribution, author discrimination}, Pages = {121--128}, Title = {{Predicting the past: memory-based copyist and author discrimination in medieval epics}}, Volume = {21}, Year = {2009}} @inproceedings{Hendrickx+08, Author = {Hendrickx, I. and Daelemans, W. and Luyckx, K. and Morante, R. and Van Asch, V.}, Booktitle = {Proceedings of the Fifth International Natural Language Generation Conference}, Date-Added = {2011-02-07 20:44:56 +0100}, Date-Modified = {2011-02-07 20:45:35 +0100}, Keywords = {TiMBL, referential expressions, natural language generation}, Organization = {Association for Computational Linguistics}, Pages = {194--195}, Title = {{CNTS: Memory-based learning of generating repeated references}}, Year = {2008}} @inproceedings{Max09, Author = {Max, A.}, Booktitle = {Proceedings of the 2009 Workshop on Applied Textual Inference}, Date-Added = {2011-02-07 20:41:13 +0100}, Date-Modified = {2011-02-07 20:42:03 +0100}, Keywords = {TiMBL, paraphrasing, pivot translation}, Organization = {Association for Computational Linguistics}, Pages = {18--26}, Title = {{Sub-sentential paraphrasing by contextual pivot translation}}, Year = {2009}} @conference{Morante+09c, Author = {Morante, R. and Daelemans, W.}, Booktitle = {Proceedings of the Workshop on BioNLP}, Date-Added = {2011-02-07 19:43:16 +0100}, Date-Modified = {2011-02-07 19:43:51 +0100}, Keywords = {hedging, TiMBL}, Organization = {Association for Computational Linguistics}, Pages = {28--36}, Title = {{Learning the scope of hedge cues in biomedical texts}}, Year = {2009}} @article{Plag+07, Author = {Plag, I. and Kunter, G. and Lappe, S.}, Date-Added = {2011-02-07 19:23:04 +0100}, Date-Modified = {2011-02-07 19:23:44 +0100}, Issn = {1613-7027}, Journal = {Corpus Linguistics and Lingustic Theory}, Keywords = {timbl, analogical modeling, compounding, stress assignment, English}, Number = {2}, Pages = {199--232}, Publisher = {Citeseer}, Title = {{Testing hypotheses about compound stress assignment in English: a corpus-based investigation}}, Volume = {3}, Year = {2007}} @techreport{DeClerq+10, Author = {Orph{\'e}e De Clerq and Martin Reynaert}, Date-Added = {2011-02-03 20:38:45 +0100}, Date-Modified = {2011-02-03 20:42:08 +0100}, Institution = {LT3 Research Group -- Hogeschool Gent}, Keywords = {ilk, lt3, sonar, corpus, IPR}, Number = {LT3 10-02}, Title = {{S}o{N}a{R} {A}cquisition {M}anual Version 1.0}, Url = {http://lt3.hogent.be/en/publications/}, Year = {2010}, Bdsk-Url-1 = {http://lt3.hogent.be/en/publications/}} @inproceedings{Reynaert+10, Address = {Valletta, Malta}, Author = {M. Reynaert and N. Oostdijk and O. {De Clercq} and H. {Van den Heuvel} and F. {De Jong}}, Booktitle = {Proceedings of the Seventh conference on International Language Resources and Evaluation (LREC'10)}, Date = {19-21}, Date-Added = {2011-02-03 20:36:39 +0100}, Date-Modified = {2012-11-28 22:30:07 +0000}, Isbn = {2-9517408-6-7}, Keywords = {ilk, corpus, corpus cleanup, IPR, sonar}, Language = {english}, Month = {May}, Publisher = {European Language Resources Association (ELRA)}, Title = {Balancing {SoNaR}: {IPR} versus Processing Issues in a 500-Million-Word Written {D}utch Reference Corpus}, Year = {2010}} @inproceedings{vanZaanen00, Author = {{Van Zaanen}, Menno}, Booktitle = {Proceedings of the 18th International Conference on Computational Linguistics ({COLING}); Saarbr{\"u}cken, Germany}, Date-Added = {2011-01-27 01:11:05 +0100}, Date-Modified = {2011-01-27 01:20:36 +0100}, Month = {jul # {~31--} # aug # {~4}}, Organization = {Association for Computational Linguistics}, Pages = {961--967}, Title = {{ABL}: {A}lignment-{B}ased {L}earning}, Year = {2000}} @inproceedings{Farkas+10, Annote = {URL: http://www.aclweb.org/anthology/W10-3001}, Author = {Farkas, Rich\'{a}rd and Vincze, Veronika and M\'{o}ra, Gy\"{o}rgy and Csirik, J\'{a}nos and Szarvas, Gy\"{o}rgy}, Booktitle = {Proceedings of the Fourteenth Conference on Computational Natural Language Learning}, Date-Added = {2011-01-26 21:38:19 +0100}, Date-Modified = {2011-01-29 16:44:08 +0100}, Month = {July}, Pages = {1--12}, Publisher = {Association for Computational Linguistics}, Title = {The {CoNLL}-2010 Shared Task: Learning to Detect Hedges and their Scope in Natural Language Text}, Year = {2010}} @inproceedings{McDonald+07, Author = {McDonald, R. and Nivre, J.}, Booktitle = {Proceedings of the 2007 Joint Conference on Empirical Methods in Natural Language Processing and Computational Natural Language Learning}, Date-Added = {2011-01-25 00:33:25 +0100}, Date-Modified = {2011-01-25 00:35:18 +0100}, Month = {June}, Organization = {Association for Computational Linguistics}, Pages = {pp. 122--131}, Title = {Characterizing the Errors of Data-Driven Dependency Parsing Models}, Year = {2007}} @inproceedings{Nivre+08, Address = {Columbus, Ohio}, Author = {J. Nivre and R. McDonald}, Booktitle = {Proceedings of ACL-08: HLT}, Date-Added = {2011-01-25 00:27:22 +0100}, Date-Modified = {2011-07-02 22:20:14 +0200}, Month = {June}, Pages = {950--958}, Publisher = {Association for Computational Linguistics}, Title = {Integrating Graph-Based and Transition-Based Dependency Parsers}, Url = {http://www.aclweb.org/anthology/P/P08/P08-1108}, Year = {2008}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P08/P08-1108}} @inproceedings{Nivre+07b, Address = {Prague, Czech Republic}, Author = {Nivre, J and Hall, J and K\"ubler, S. and McDonald, R. and Nilsson, J. and Riedel, S. and Yuret, D.}, Booktitle = {Proceedings of the CoNLL Shared Task Session of EMNLP-CoNLL 2007}, Date-Added = {2011-01-24 21:11:16 +0100}, Date-Modified = {2011-07-02 22:19:59 +0200}, Month = {June}, Pages = {915--932}, Publisher = {Association for Computational Linguistics}, Title = {The {CoNLL} 2007 Shared Task on Dependency Parsing}, Url = {http://www.aclweb.org/anthology/D/D07/D07-1096}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/D/D07/D07-1096}} @inproceedings{Warnock+88, Author = {T. Warnock and B. Wendroff}, Booktitle = {ICCA Journal}, Date-Added = {2010-12-17 14:12:41 +0100}, Date-Modified = {2011-01-13 19:18:59 +0100}, Pages = {10--13}, Title = {Search Tables in Computer Chess}, Volume = {11--1}, Year = {1988}} @inproceedings{Zobrist70, Author = {A. L. Zobrist}, Booktitle = {ICCA Journal (republished)}, Date-Added = {2010-12-17 14:06:38 +0100}, Date-Modified = {2011-01-13 19:19:19 +0100}, Pages = {69--73}, Title = {A New Hashing Method With Application For Game Playing}, Volume = {13--2}, Year = {1970}} @article{Daelemans+94, Author = {W. Daelemans and S. Gillis and G. Durieux}, Date-Added = {2010-12-08 23:33:52 +0100}, Date-Modified = {2010-12-08 23:50:06 +0100}, Journal = {Computational Linguistics}, Keywords = {ilk, stress assignment, Dutch}, Number = {3}, Pages = {421--451}, Title = {The acquisition of stress: a data-oriented approach.}, Volume = {20}, Year = {1994}} @conference{Post+09, Author = {M. Post and D. Gildea}, Booktitle = {Proceedings of the ACL-IJCNLP 2009 Conference Short Papers}, Date-Added = {2010-12-08 13:52:42 +0100}, Date-Modified = {2010-12-08 13:53:05 +0100}, Organization = {Association for Computational Linguistics}, Pages = {45--48}, Title = {Bayesian learning of a tree substitution grammar}, Year = {2009}} @article{Arnon+10, Author = {I. Arnon and N. Snider}, Date-Added = {2010-12-08 13:39:14 +0100}, Date-Modified = {2010-12-08 13:40:36 +0100}, Journal = {Journal of Memory and Language}, Number = {1}, Pages = {67--82}, Title = {More than words: Frequency effects for multi-word phrases}, Volume = {62}, Year = {2010}} @book{Goldberg06, Author = {Goldberg, A.E.}, Date-Added = {2010-12-08 13:13:05 +0100}, Date-Modified = {2010-12-08 13:13:31 +0100}, Isbn = {0199268525}, Publisher = {Oxford University Press, USA}, Title = {Constructions at work: The nature of generalization in language}, Year = {2006}} @book{Bod+03, Date-Added = {2010-12-03 23:15:16 +0100}, Date-Modified = {2010-12-03 23:16:34 +0100}, Editor = {R. Bod and R. Scha and K. Sima'an}, Publisher = {CSLI Publications}, Series = {Studies in Computational Linguistics}, Title = {Data-oriented Parsing}, Year = {2003}} @incollection{Krott+02, Address = {Amsterdam, The Netherlands}, Author = {A. Krott and R. Schreuder and H. Baayen}, Booktitle = {Analogical modeling: An exemplar-based approach to language}, Date-Added = {2010-12-03 23:12:18 +0100}, Date-Modified = {2010-12-03 23:13:45 +0100}, Editor = {R. Skousen and D. Lonsdale and D.B. Parkinson}, Publisher = {John Benjamins}, Title = {Analogical hierarchy: Exemplar-based modeling of linkers in {D}utch noun--noun compounds}, Year = {2002}} @incollection{Eddington02, Address = {Amsterdam, The Netherlands}, Author = {D. Eddington}, Booktitle = {Analogical modeling: An exemplar-based approach to language}, Date-Added = {2010-12-03 23:10:21 +0100}, Date-Modified = {2010-12-03 23:11:51 +0100}, Editor = {R. Skousen and D. Lonsdale and D.B. Parkinson}, Publisher = {John Benjamins}, Title = {A comparison of two analogical models: Tilburg memory-based learner versus analogical modeling}, Year = {2002}} @inproceedings{Langlais+09, Address = {Athens, Greece}, Author = {P. Langlais and F. Yvon and P. Zweigenbaum}, Booktitle = {Proceedings of the 12th Conference of the European Chapter of the Association for Computational Linguistics (EACL-09)}, Date-Added = {2010-12-03 23:08:02 +0100}, Date-Modified = {2010-12-03 23:09:39 +0100}, Pages = {487--495}, Title = {Improvements in analogical learning: Application to translating multi-terms of the medical domain}, Year = {2009}} @article{Younger67, Author = {D.H. Younger}, Citeulike-Article-Id = {5914327}, Journal = {Information and Control}, Keywords = {parsing}, Number = {2}, Pages = {189--208}, Posted-At = {2009-10-09 06:23:37}, Priority = {2}, Title = {{Recognition and parsing of context-free languages in time $n^{3}$}}, Volume = {10}, Year = {1967}} @article{Yvon+07, Author = {F. Yvon and N. Stroppa}, Date-Added = {2010-12-03 23:06:12 +0100}, Date-Modified = {2010-12-03 23:07:47 +0100}, Journal = {Lingue e linguaggio}, Pages = {201--226}, Title = {Proportions in the lexicon: (Re) {D}iscovering paradigms}, Volume = {2}, Year = {2007}} @book{Bolinger89, Author = {Bolinger, D.}, Date-Added = {2010-12-03 23:04:22 +0100}, Date-Modified = {2010-12-03 23:04:28 +0100}, Key = {phrasing, chunking}, Publisher = {Edward Arnold, London}, Title = {Intonation and its Uses: Melody in Grammar and Discourse}, Year = {1989}} @inproceedings{VanHerwijnen+01, Author = {Herwijnen, O.M. van and Terken, J.M.B.}, Booktitle = {Proceedings of Eurospeech 2001}, Date-Added = {2010-12-03 23:03:26 +0100}, Date-Modified = {2010-12-03 23:04:04 +0100}, Pages = {529-532}, Title = {Evaluation of {PROS}-3 for the assignment of prosodic structu\ re, compared to assignment by human experts}, Volume = {1}, Year = {2001}} @inproceedings{Bear90, Author = {Bear, J. and Price, P.}, Booktitle = {Proceedings of the Annual Meeting of the Association for Computational Linguistics}, Date-Added = {2010-12-03 23:01:57 +0100}, Date-Modified = {2010-12-03 23:02:37 +0100}, Key = {parsing, prosody}, Pages = {17-22}, Title = {Prosody, Syntax and Parsing}, Year = {1990}} @article{Sanderman97, Author = {Sanderman, A.A. and Collier, R.}, Date-Added = {2010-12-03 23:00:48 +0100}, Date-Modified = {2010-12-03 23:00:57 +0100}, Journal = {Language and Speech}, Key = {prosodic phrasing}, Number = {4}, Pages = {391-409}, Title = {Prosodic Phrasing and Comprehension}, Volume = {40}, Year = {1997}} @incollection{VandenBosch09, Address = {Berlin}, Author = {A. {Van den Bosch}}, Booktitle = {Corpus Linguistics: An International Handbook}, Date-Added = {2010-11-30 16:32:03 +0100}, Date-Modified = {2010-11-30 16:34:54 +0100}, Editor = {A. L\"{u}deling and M. Kyt\"{o}}, Keywords = {ilk, vici, corpus linguistics, machine learning, corpus annotation}, Pages = {855--872}, Publisher = {Walter de Gruyter}, Title = {Machine Learning}, Volume = {2}, Year = {2009}} @book{Baase+00, Address = {Boston}, Author = {S. Baase and A. Van Gelder}, Date-Added = {2010-11-11 20:41:07 +0100}, Date-Modified = {2010-11-11 20:41:48 +0100}, Publisher = {Addison Wesley Longman}, Title = {Computer algorithms}, Year = {2000}} @misc{vanZaanen10, Author = {M.M. {Van Zaanen}}, Date-Added = {2010-11-04 13:47:02 +0100}, Date-Modified = {2010-12-20 21:59:32 +0100}, Howpublished = {\href{http://ilk.uvt.nl/~menno/research/software/suffixtree}{http://ilk.uvt.nl/~menno/research/software/suffixtree}}, Keywords = {suffix trees}, Title = {Suffix Tree Package}, Year = {2010}} @inproceedings{Stehouwer+10c, Author = {H. Stehouwer and M. {Van Zaanen}}, Booktitle = {Proceedings of Computational Linguistics---Applications, 2010}, Date-Added = {2010-11-04 11:28:01 +0100}, Date-Modified = {2010-11-04 12:09:34 +0100}, Keywords = {ilk, vici, suffix arrays, suffix trees, ngrams}, Organization = {International Multiconference on Computer Science and Information Technology}, Pages = {151--158}, Title = {Finding patterns in strings using suffixarrays}, Year = {2010}} @article{Reynaert10, Affiliation = {Tilburg Centre for Cognition and Communication, Tilburg University, Kamer D 342, P.O. Box 90153, 5000 LE Tilburg, Netherlands}, Author = {Reynaert, Martin}, Date-Added = {2010-11-04 10:53:38 +0100}, Date-Modified = {2010-11-04 10:54:25 +0100}, Issn = {1433-2833}, Journal = {International Journal on Document Analysis and Recognition}, Keywords = {ilk, vici, corpus cleanup, spelling correction, spelling variation, ticcl}, Note = {10.1007/s10032-010-0133-5}, Pages = {1-15}, Publisher = {Springer Berlin / Heidelberg}, Title = {Character confusion versus focus word-based correction of spelling and OCR variants in corpora}, Url = {http://dx.doi.org/10.1007/s10032-010-0133-5}, Year = {2010}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/s10032-010-0133-5}} @inproceedings{Stehouwer+10b, Author = {H. Stehouwer and M. {Van Zaanen}}, Booktitle = {Proceedings of the 22nd Benelux Conference on Artificial Intelligence (BNAIC)}, Date-Added = {2010-11-02 21:09:48 +0100}, Date-Modified = {2011-01-26 17:19:59 +0100}, Keywords = {ilk, vici, suffix arrays, language modeling}, Title = {Using suffix arrays as language models: Scaling the $n$-gram}, Year = {2010}} @incollection{Stehouwer+10, Author = {H. Stehouwer and M. {Van Zaanen}}, Booktitle = {Grammatical Inference: Theoretical Results and Applications}, Date-Added = {2010-11-02 21:07:59 +0100}, Date-Modified = {2010-11-02 21:11:17 +0100}, Editor = {Sempere, Jos{\'e} and Garc{\'\i}a, Pedro}, Keywords = {ilk, vici, suffix arrays, language modeling}, Pages = {305--308}, Publisher = {Springer Berlin / Heidelberg}, Series = {Lecture Notes in Computer Science}, Title = {Enhanced suffix arrays as language models: Virtual $k$-testable languages}, Volume = {6339}, Year = {2010}} @incollection{Katrenko+10, Author = {S. Katrenko and M. {Van Zaanen}}, Booktitle = {Grammatical Inference: Theoretical Results and Applications}, Date-Added = {2010-11-02 21:06:20 +0100}, Date-Modified = {2010-11-02 21:07:00 +0100}, Editor = {Sempere, Jos{\'e} and Garc{\'\i}a, Pedro}, Keywords = {ilk, vici, grammar induction}, Pages = {293-296}, Publisher = {Springer Berlin / Heidelberg}, Series = {Lecture Notes in Computer Science}, Title = {Rademacher Complexity and Grammar Induction Algorithms: What It May (Not) Tell Us}, Volume = {6339}, Year = {2010}} @incollection{VanZaanen+10b, Author = {M. {Van Zaanen} and T. Gaustad}, Booktitle = {Grammatical Inference: Theoretical Results and Applications}, Date-Added = {2010-11-02 21:05:38 +0100}, Date-Modified = {2010-11-02 21:07:29 +0100}, Editor = {Sempere, Jos{\'e} and Garc{\'\i}a, Pedro}, Keywords = {ilk, vici, grammar induction}, Pages = {245--257}, Publisher = {Springer Berlin / Heidelberg}, Series = {Lecture Notes in Computer Science}, Title = {Grammatical Inference as Class Discrimination}, Volume = {6339}, Year = {2010}} @incollection{VanZaanen+10, Author = {M. {Van Zaanen} and Pieter Kanters}, Booktitle = {11th International Society for Music Information Retrieval Conference (ISMIR 2010)}, Date-Added = {2010-11-02 21:04:19 +0100}, Date-Modified = {2010-11-02 21:07:48 +0100}, Keywords = {ilk, vici, grammar induction, mood}, Pages = {75--80}, Title = {Automatic Mood Classification using tf*idf based on Lyrics}, Year = {2010}} @article{Hirst+05, Author = {G. Hirst and A. Budanitsky}, Date-Added = {2010-11-01 22:23:13 +0100}, Date-Modified = {2010-11-01 22:24:56 +0100}, Journal = {Natural Language Engineering}, Keywords = {spelling correction, trigrams}, Pages = {87--111}, Title = {Correcting real-word spelling errors by restoring lexical cohesion}, Volume = {11(1)}, Year = {2005}} @book{Mitton96, Address = {Harlow, Essex, UK}, Annote = {Online here: http://eprints.bbk.ac.uk/469/ at the time of writing (1-11-2010)}, Author = {R. Mitton}, Date-Added = {2010-11-01 21:51:15 +0100}, Date-Modified = {2011-06-19 23:01:09 +0200}, Publisher = {Longman}, Title = {English Spelling and the Computer}, Year = {1996}} @inproceedings{VandenHoven+10, Address = {Vienna, Austria}, Author = {M. {Van den Hoven} and A. {Van den Bosch} and K. Zervanou}, Booktitle = {Proceedings of the First International AMICUS Workshop on Automated Motif Discovery in Cultural Heritage and Scientific Communication Texts}, Date-Added = {2010-10-31 11:26:30 +0100}, Date-Modified = {2011-01-16 21:52:58 +0100}, Editor = {S. Dar\'{a}nyi and P. Lendvai}, Keywords = {hitime, ilk, social history, iish, iisg, strikes}, Pages = {20--28}, Title = {Beyond reported history: Strikes that never happened}, Year = {2010}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/Amicus10-vdh-vdb-z.pdf}} @techreport{Dake03, Author = {J. Dake}, Date-Added = {2010-10-06 14:43:23 +0200}, Date-Modified = {2010-10-06 14:45:28 +0200}, Institution = {Tilburg University}, Keywords = {ilk, master's thesis, timbl, igtree, tribl, tribl2, vi}, Number = {03-02}, Title = {Explorations of the speed-accuracy trade-off in memory-based learning algorithms}, Type = {ILK Research Group Technical Report Series}, Year = {2003}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/ilk0302.pdf}} @inproceedings{DeRijke+10, Author = {M. {De Rijke} and K. Balog and T. Bogers and A. {Van den Bosch}}, Booktitle = {CLEF 2010: Conference on Multilingual and Multimodal Information Access Evaluation}, Date-Added = {2010-09-28 10:39:58 +0200}, Date-Modified = {2010-09-28 10:54:46 +0200}, Keywords = {entity profiling, recommender systems, apropos, ilk}, Pages = {94--99}, Title = {On the Evaluation of Entity Profiles}, Year = {2010}, Bdsk-Url-1 = {http://bit.ly/9awTeq}} @inproceedings{Zens+04, Author = {R. Zens and H. Ney}, Booktitle = {Proceedings of the 41st Annual Meeting on Association for Computational Linguistics}, Date-Added = {2010-08-24 23:10:18 +0200}, Date-Modified = {2010-08-24 23:11:44 +0200}, Organization = {Association for Computational Linguistics}, Pages = {144---151}, Title = {comparative study on reordering constraints in statistical machine translation}, Year = {2003}} @inproceedings{Al-Onaizan+06, Author = {Al-Onaizan, Y. and Papineni, K.}, Booktitle = {Proceedings of the 21st International Conference on Computational Linguistics and the 44th annual meeting of the Association for Computational Linguistics}, Date-Added = {2010-08-24 22:35:57 +0200}, Date-Modified = {2010-08-24 22:37:05 +0200}, Organization = {Association for Computational Linguistics}, Pages = {529--536}, Title = {Distortion models for statistical machine translation}, Year = {2006}} @inproceedings{Che+09, Address = {Boulder, Colorado}, Author = {W. Che and Z. Li and Y. Li and Y. Guo and B. Qin and T. Liu}, Booktitle = {Proceedings of the Thirteenth Conference on Computational Natural Language Learning (CoNLL 2009): Shared Task}, Date-Added = {2010-07-17 15:04:32 +0200}, Date-Modified = {2010-07-17 15:05:21 +0200}, Month = {June}, Pages = {49--54}, Publisher = {Association for Computational Linguistics}, Title = {Multilingual Dependency-based Syntactic and Semantic Parsing}, Url = {http://www.aclweb.org/anthology/W09-1207}, Year = {2009}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W09-1207}} @inproceedings{Wang+08, Address = {Berlin}, Author = {X. Wang and J. Nie and D. Luo and X. Wu}, Booktitle = {Proceedings of the European Conference on Machine Learning and Knowledge Discovery}, Date-Added = {2010-07-12 11:50:15 +0200}, Date-Modified = {2010-07-12 11:51:42 +0200}, Pages = {538--549}, Publisher = {Springer Verlag}, Title = {A joint segmentation and labeling approach for {C}hinese lexical analysis}, Year = {2008}} @techreport{Kieras+94, Author = {D. Kieras and B. John}, Date-Added = {2010-06-25 21:09:28 +0200}, Date-Modified = {2010-06-25 21:10:11 +0200}, Institution = {Carnegie Mellon University}, Number = {CMU-HCII-94-106}, Title = {The {GOMS} Family of Analysis Techniques: Tools for Design and Evaluation}, Year = {1994}} @inproceedings{Lee+08b, Author = {J. Lee and O. Knutsson}, Booktitle = {Proceedings of CICLing 2008, 9th International Conference on Intelligent Text Processing and Computational Linguistics}, Date-Added = {2010-06-08 23:04:39 +0200}, Date-Modified = {2010-08-02 11:41:38 +0200}, Editor = {A. Gelbukh}, Number = {4919}, Pages = {643--654}, Publisher = {Springer Verlag}, Series = {Lecture Notes in Computer Science}, Title = {The Role of PP Attachment in Preposition Generation}, Year = {2008}} @book{Inhelder+58, Address = {New York, NY}, Author = {B. Inhelder and J. Piaget}, Date-Added = {2010-05-20 16:16:36 +0200}, Date-Modified = {2010-05-20 16:17:48 +0200}, Publisher = {Basic Books}, Title = {The Growth of Logical Thinking from Childhood to Adolescence}, Year = {1958}} @techreport{Singh02, Author = {P. Singh}, Date-Added = {2010-05-16 13:49:00 +0200}, Date-Modified = {2010-05-19 22:32:56 +0200}, Institution = {MIT}, Keywords = {common sense}, Title = {The Open Mind Common Sense Project}, Urldate = {http://web.media.mit.edu/~push/OMCSProject.pdf}, Year = {2002}} @inproceedings{VonAhn+04, Address = {New York, NY}, Author = {L. {Von Ahn} and L. Dabbish}, Booktitle = {Proceedings of the SIGCHI Conference on Human Factors in Computing Systemsn Human Factors in Computing Systems}, Date-Added = {2010-05-16 13:47:08 +0200}, Date-Modified = {2010-05-16 13:48:20 +0200}, Pages = {319--326}, Publisher = {ACM}, Title = {Labeling images with a computer game}, Year = {2004}} @inproceedings{Speer+09, Address = {New Brunswick, NJ, USA}, Author = {R. Speer and J. Krishnamurti and C. Havasi and D. Smith and H. Lieberman and K. Arnold}, Booktitle = {Proceedings of the 13th International Conference on Intelligent User Interfaces}, Date-Added = {2010-05-16 13:45:00 +0200}, Date-Modified = {2010-05-16 13:46:56 +0200}, Pages = {137--146}, Publisher = {ACM}, Title = {An interface for targeted collection of common sense knowledge using a mixture model}, Year = {2009}} @inproceedings{Vossen+07, Author = {P. Vossen and K. Hofmann and M. {De Rijke} and E. {Tjong Kim Sang} and K. Deschacht}, Booktitle = {Proceedings of the Seventh Dutch-Belgian Information Retrieval Workshop (DIR 2007)}, Date-Added = {2010-05-10 19:48:55 +0200}, Date-Modified = {2010-09-28 10:50:36 +0200}, Editor = {Moens, M.-F. and Tuytelaars, T. and de Vries, A.}, Pages = {89--96}, Title = {The {C}ornetto database: Architecture and user-scenarios.}, Year = {2007}} @inproceedings{VanMulken+98, Author = {S. {Van Mulken} and E. Andr\'{e} and J. Muller}, Booktitle = {People and Computers XIII: Proceedings of HCI-98}, Date-Added = {2010-05-10 17:26:08 +0200}, Date-Modified = {2010-05-10 19:12:50 +0200}, Editor = {H. Johnson and L. Nigay and C. Roast}, Pages = {53--66}, Title = {The persona effect: How substantial is it?}, Year = {1998}} @incollection{Ball+97, Address = {Menlo Park, CA}, Author = {Ball, G. and Ling, D. and Kurlander, D. and Miller, J. and Pugh, D. and Skelly, T. and Stankosky, A. and Thiel, D. and Van Dantzich, M. and Wax, T.}, Booktitle = {Software Agents}, Date-Added = {2010-05-10 17:24:10 +0200}, Date-Modified = {2010-05-10 19:09:49 +0200}, Editor = {J.M. Bradshaw}, Journal = {Software agents}, Pages = {191-222}, Publisher = {AAAI/MIT Press}, Title = {Lifelike computer characters: The persona project at {M}icrosoft research}, Year = {1997}} @article{Liu+04, Author = {H. Liu and P. Singh}, Date-Added = {2010-05-10 17:21:02 +0200}, Date-Modified = {2010-05-10 17:22:50 +0200}, Journal = {BT Technology Journal}, Keywords = {common sense}, Number = {4}, Pages = {211-226}, Title = {ConceptNet: A Practical Common-Sense Reasoning Toolkit}, Volume = {22}, Year = {2004}} @incollection{McCarthy+69, Author = {J. McCarthy and P. J. Hayes}, Booktitle = {Machine Intelligence}, Date-Added = {2010-05-10 16:56:40 +0200}, Date-Modified = {2010-05-10 16:58:12 +0200}, Editor = {D. Michie}, Keywords = {common sense, artificial intelligence}, Publisher = {American Elsevier}, Title = {Some Philosophical Problems from the Standpoint of Artificial Intelligence}, Volume = {4}, Year = {1969}} @inproceedings{McCarthy59, Address = {London, UK}, Author = {J. McCarthy}, Booktitle = {Proceedings of the Teddington Conference on the Mechanization of Thought Processes}, Date-Added = {2010-05-10 16:54:57 +0200}, Date-Modified = {2010-05-10 16:56:21 +0200}, Organization = {Her Majesty's Stationery Office}, Title = {Programs with Common Sense}, Year = {1959}} @article{Kuipers04, Author = {B. Kuipers}, Date-Added = {2010-05-10 12:19:44 +0200}, Date-Modified = {2010-05-10 12:20:58 +0200}, Journal = {Ubiquity}, Keywords = {common sense}, Number = {4}, Pages = {13--19}, Title = {Making Sense of Common Sense Knowledge}, Volume = {45}, Year = {2004}} @article{VonAhn+08, Author = {L. {Von Ahn} and L. Dabbish}, Date-Added = {2010-05-10 12:00:14 +0200}, Date-Modified = {2010-05-10 12:02:08 +0200}, Journal = {Communications of the {ACM}}, Pages = {58--67}, Title = {Designing games with a purpose}, Volume = {51}, Year = {2008}} @inproceedings{VonAhn+06, Address = {New York, NY}, Author = {L. {Von Ahn} and M. Kedia and M. Blum}, Booktitle = {Proceedings of the SIGCHI Conference on Human Factors in Computing Systems}, Date-Added = {2010-05-10 11:58:00 +0200}, Date-Modified = {2010-05-10 12:01:09 +0200}, Pages = {75--78}, Publisher = {ACM Press}, Title = {Verbosity: A game for collecting common-sense facts}, Year = {2006}} @article{Stork99, Author = {D. Stork}, Date-Added = {2010-05-10 11:52:14 +0200}, Date-Modified = {2010-05-10 12:16:55 +0200}, Journal = {IEEE Expert Systems and Their Applications}, Keywords = {common sense, collaborative systems, human computing}, Pages = {16--20}, Title = {The {O}pen {M}ind Initiative}, Volume = {14}, Year = {1999}} @article{Singh03, Author = {P. Singh}, Date-Added = {2010-05-10 11:18:00 +0200}, Date-Modified = {2010-05-10 11:22:10 +0200}, Journal = {Computing and Informatics}, Keywords = {common sense}, Number = {5}, Pages = {521--543}, Title = {Examining the Society of Mind}, Volume = {22}, Year = {2003}} @book{Minsky06, Address = {New York, NY}, Author = {M. L. Minsky}, Date-Added = {2010-05-10 11:12:17 +0200}, Date-Modified = {2010-05-16 13:50:22 +0200}, Keywords = {common sense, artificial intelligence, emotions}, Publisher = {Simon \& Schuster}, Title = {The Emotion Machine: Commonsense Thinking, Artificial Intelligence, and the Future of the Human Mind}, Year = {2006}} @conference{Ohta+02, Author = {Ohta, T. and Tateisi, Y. and Kim, J.D.}, Booktitle = {Proceedings of the second international conference on Human Language Technology Research}, Date-Added = {2010-05-01 22:54:26 +0200}, Date-Modified = {2010-05-01 22:54:41 +0200}, Organization = {Morgan Kaufmann Publishers Inc.}, Pages = {86}, Title = {{The GENIA corpus: An annotated research abstract corpus in molecular biology domain}}, Year = {2002}} @inproceedings{Roth+05, Annote = {D. Roth and W. Yih, Integer Linear Programming Inference for Conditional Random Fields, in Proceedings of the 22nd International Conference on Machine Learning (ICML-2005), Association for Computing Machinery, Inc., 9 August 2005}, Author = {D. Roth and W. Yih}, Booktitle = {Proceedings of the 22nd International Conference on Machine Learning}, Date-Added = {2010-05-01 22:51:23 +0200}, Date-Modified = {2010-05-01 22:53:25 +0200}, Pages = {743}, Publisher = {ACM}, Title = {Integer Linear Programming Inference for Conditional Random Fields}, Year = {2005}} @techreport{Daelemans+09, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Added = {2010-03-29 10:27:02 +0200}, Date-Modified = {2010-03-29 10:27:46 +0200}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, TiMBL, memory-based learning}, Number = {ILK 09-01}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 6.2, Reference Guide}, Year = 2009} @techreport{Daelemans+07, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Added = {2010-03-29 10:26:27 +0200}, Date-Modified = {2010-03-29 10:26:27 +0200}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, TiMBL, memory-based learning}, Number = {ILK 07-07}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 6.1, Reference Guide}, Year = 2007} @phdthesis{Keuleers08, Author = {E. Keuleers}, Date-Added = {2010-03-29 10:21:05 +0200}, Date-Modified = {2010-03-29 10:22:19 +0200}, School = {University of Antwerp}, Title = {Memory-based learning of inflectional morphology}, Year = {2008}} @phdthesis{Canisius09, Author = {S. Canisius}, Date-Added = {2010-03-29 10:18:48 +0200}, Date-Modified = {2010-03-29 10:20:41 +0200}, Keywords = {ilk, constraint satisfaction inference, memory-based learning, memory-based language processing}, School = {Tilburg University}, Title = {Structured prediction for natural language processing: A constraint satisfaction approach}, Year = {2009}} @inproceedings{DePauw+07, Address = {Berlin, Germany}, Author = {G. {De Pauw} and P. Waiganjo and G.-M. {De Schryver}}, Booktitle = {Proceedings of Text, Speech and Dialogue, Tenth International Conference}, Date-Added = {2010-03-29 10:14:36 +0200}, Date-Modified = {2010-03-29 10:16:45 +0200}, Keywords = {diacritic restoration, resource-scarce languages, memory-based language modeling}, Pages = {170--179}, Publisher = {Springer Verlag}, Series = {Lecture Notes in Computer Science}, Title = {Automatic diacritic restoration for resource-scarce languages}, Volume = {4629}, Year = {2007}} @article{Aamodt+94, Author = {A. Aamodt and E. Plaza}, Date-Modified = {2008-07-23 17:02:54 +0200}, Journal = {AI Communications}, Keywords = {case-based reasoning}, Pages = {39--59}, Title = {Case-Based Reasoning: Foundational issues, methodological variations, and system approaches}, Volume = 7, Year = 1994} @inproceedings{Abney+00, Author = {S. Abney and M. Collins and A. Singhal}, Booktitle = {Proceedings of the 6th ANLP}, Organization = {ACL}, Title = {Answer extraction}, Year = 2000} @incollection{Abney+96, Address = {Cambridge, MA}, Author = {S. Abney and V. Hatzivassiloglou}, Booktitle = {The Balancing Act: Combining Symbolic and Statistical Approaches to Language}, Date-Modified = {2010-09-20 00:24:06 +0200}, Editor = {J. L. Klavans and P. Resnik}, Pages = {vii--xii}, Publisher = MIT, Title = {Preface}, Year = 1996} @inproceedings{Abney+99, Author = {Abney, S. and Schapire, R.E. and Singer, Y.}, Booktitle = {Proceedings of the 1999 Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora}, Date-Modified = {2010-11-26 19:41:16 +0100}, Keywords = {boosting, tagging, PP attachment}, Pages = {38--45}, Title = {Boosting applied to tagging and {PP} attachment}, Year = 1999} @inproceedings{Abney02, Author = {S. Abney}, Booktitle = ACL, Date-Modified = {2008-07-23 17:03:10 +0200}, Pages = {360--367}, Title = {Bootstrapping}, Year = 2002} @incollection{Abney91, Address = {Dordrecht}, Author = {S. Abney}, Booktitle = {Principle-Based Parsing}, Date-Modified = {2010-11-26 19:41:25 +0100}, Editors = {R. Berwick and S. Abney and C. Tenny}, Pages = {257--278}, Publisher = {Kluwer Academic Publishers}, Title = {Parsing by chunks}, Year = 1991} @inproceedings{Abney95, Author = {S. Abney}, Booktitle = {Computational Linguistics and the Foundations of Linguistic Theory}, Date-Modified = {2010-11-26 19:41:44 +0100}, Organization = {CSLI}, Title = {Chunks and dependencies: Bringing processing evidence to bear on syntax}, Year = 1995} @incollection{Abney96, Address = {Cambridge, MA}, Author = {S. Abney}, Booktitle = {The Balancing Act: Combining Symbolic and Statistical Approaches to Language}, Editor = {J. L. Klavans and P. Resnik}, Pages = {1--26}, Publisher = MIT, Title = {Statistical Methods and Linguistics}, Year = 1996} @inproceedings{Abney96b, Address = {Prague}, Author = {S. Abney}, Booktitle = {Proceedings of the ESSLLI '96 Robust Parsing Workshop}, Date-Modified = {2011-02-01 22:28:20 +0100}, Editor = {J. Carroll and T. Briscoe}, Keywords = {shallow parsing, partial parsing, parsing}, Pages = {8--15}, Publisher = {ESSLLI}, Title = {Partial Parsing via Finite-state Cascades}, Year = 1996} @article{Abney97, Author = {S. P. Abney}, Date-Added = {2010-02-01 23:19:01 +0100}, Date-Modified = {2013-08-07 09:26:57 +0000}, Journal = {Computational Linguistics}, Number = {4}, Pages = {597--618}, Title = {Stochastic Attribute-Value Grammars}, Volume = {23}, Year = {1997}} @article{Abouelhoda+04, Address = {Amsterdam, The Netherlands}, Author = {M.I. Abouelhoda and S. Kurtz and E. Ohlebusch}, Date-Modified = {2011-06-21 18:24:50 +0200}, Doi = {http://dx.doi.org/10.1016/S1570-8667(03)00065-0}, Issn = {1570-8667}, Journal = {Journal of Discrete Algorithms}, Number = {1}, Pages = {53--86}, Publisher = {Elsevier Science Publishers B. V.}, Title = {Replacing suffix trees with enhanced suffix arrays}, Volume = {2}, Year = {2004}, Bdsk-Url-1 = {http://dx.doi.org/10.1016/S1570-8667(03)00065-0}} @inproceedings{Adams+91, Author = {L. Adams and T. Macfarland}, Booktitle = {Proceedings of the Second Annual Meeting of the Formal Linguistics Society of Midamerica}, Organization = {Formal Linguistics Society of Midamerica}, Title = {Testing for adjuncts}, Year = 1991} @inproceedings{Adamson+96, Author = {M. Adamson and R. Damper}, Booktitle = {Proceedings of the International Conference on Spoken Language Processing, ICSLP-96}, Pages = {1704--1707}, Title = {A recurrent network that learns to pronounce English text}, Volume = 4, Year = 1996} @article{Ades+82, Author = {A. E. Ades and M. J. Steedman}, Date-Added = {2010-02-01 23:19:21 +0100}, Date-Modified = {2010-02-01 23:19:27 +0100}, Journal = {Linguistics and Philosophy}, Pages = {517--558}, Title = {On the Order of Words}, Volume = {4}, Year = {1982}} @article{Adriaans+04, Author = {P. Adriaans and H. Fernau and C. de la Higuera and M. {Van Zaanen}}, Date-Modified = {2010-02-17 20:30:09 +0100}, Journal = {Grammars}, Keywords = {ilk, grammar induction}, Pages = {41--43}, Title = {Introduction to the special issue on grammar induction}, Volume = 7, Year = 2004} @article{Adriaans+04b, Author = {P. Adriaans and M. {Van Zaanen}}, Date-Modified = {2010-09-14 22:19:31 +0200}, Journal = {Grammars}, Keywords = {ilk, grammar induction}, Note = {Special issue with the theme ``Grammar Induction''}, Pages = {57--68}, Title = {Computational Grammar Induction for Linguists}, Volume = {7}, Year = {2004}} @incollection{Agirre+06, Address = {New York, NY, USA}, Author = {E. Agirre and M. Stevenson}, Booktitle = {Word Sense Disambiguation: Algorithms and Applications}, Date-Added = {2009-11-15 00:05:48 +0100}, Date-Modified = {2009-11-15 00:05:59 +0100}, Pages = {217--252}, Publisher = {Springer-Verlag}, Series = {Text, Speech and Language Technology}, Title = {Knowledge Sources for WSD}, Volume = 33, Year = 2006} @inproceedings{Agirre+98, Author = {Agirre, E. and Gojenola, K. and Sarasola, K. and Voutilainen, A.}, Booktitle = {{COLING-ACL '98 }}, Pages = {22-28}, Title = {{Towards a Single Proposal in Spelling Correction}}, Year = 1998} @book{Agresti90, Author = {A. Agresti}, Date-Modified = {2010-06-25 21:32:30 +0200}, Publisher = {John Wiley \& Sons}, Title = {Categorical Data Analysis}, Year = 1990} @article{Aha+91, Author = {D. W. Aha and D. Kibler and M. Albert}, Journal = {Machine Learning}, Pages = {37--66}, Title = {Instance-based Learning Algorithms}, Volume = 6, Year = 1991} @inproceedings{Aha+92, Address = {Bloomington, IN}, Author = {D. W. Aha and R. L. Goldstone}, Booktitle = {Proceedings of the Fourteenth Annual Conference of the Cognitive Science Society}, Pages = {534--539}, Publisher = {Lawrence Erlbaum}, Title = {Concept learning and flexible weighting}, Year = 1992} @inproceedings{Aha+94, Author = {Aha, D.W. and Bankert, R.L.}, Booktitle = {Proceedings of the 1994 {AAAI} Workshop on Case-Based Reasoning}, Pages = {106--112}, Publisher = {AAAI Press}, Title = {Feature selection for case-based classification of cloud types: An empirical comparison}, Year = 1994} @incollection{Aha+94b, Address = {New York, NY}, Author = {D. Aha and S. Salzberg}, Booktitle = {Selecting models from data: Artificial intelligence and statistics}, Editor = {P. Cheeseman and R. W. Oldford}, Publisher = {Springer Verlag}, Title = {Learning to catch: Applying nearest neigbor algorithms to dynamic control tasks}, Volume = {IV}, Year = 1994} @inproceedings{Aha91, Address = {Evanston, ILL}, Author = {D. W. Aha}, Booktitle = {Proceedings of the Eighth International Workshop on Machine Learning}, Pages = {117--121}, Publisher = {Morgan Kaufmann}, Title = {Incremental constructive induction: an instance-based approach}, Year = 1991} @inproceedings{Aha92, Address = {San Mateo, CA}, Author = {D. W. Aha}, Booktitle = {Proceedings of the Ninth International Conference on Machine Learning}, Date-Modified = {2009-11-14 17:29:19 +0100}, Pages = {1--10}, Publisher = {Morgan Kaufmann}, Title = {Generalizing from case studies: a case study}, Year = 1992} @book{Aha97, Address = {Dordrecht}, Date-Modified = {2009-11-14 17:28:40 +0100}, Editor = {D. W. Aha}, Keywords = {lazy learning, memory-based learning, instance-based learning}, Publisher = {Kluwer Academic Publishers}, Title = {Lazy learning}, Year = 1997} @article{Aha97a, Author = {D. W. Aha}, Journal = {Artificial Intelligence Review}, Pages = {7--10}, Title = {Lazy learning: Special issue editorial}, Volume = 11, Year = 1997} @inproceedings{Ahn+05, Author = {D. Ahn and V. Jijkoun and K. M{\"u}ller and M. {De Rijke} and E. {Tjong Kim Sang}}, Booktitle = {Working Notes for the CLEF 2005 Workshop}, Date-Modified = {2010-09-28 10:51:03 +0200}, Publisher = {Vienna, Austria}, Title = {The University of Amsterdam at QA@CLEF 2005}, Year = 2005} @inproceedings{Ahn+06, Author = {D. Ahn and S. Fissaha and V. Jijkoun and K. M{\"u}ller and M. {De Rijke} and E. {Tjong Kim Sang}}, Booktitle = {Proceedings of the Fourteenth Text Retrieval Conference (TREC 2005)}, Date-Modified = {2010-09-28 10:49:14 +0200}, Publisher = {NIST}, Title = {Towards a Multi-Stream Question Answering-As-XML-Retrieval Strategy}, Year = 2006} @inproceedings{Ahn06, Address = {Sydney, Australia}, Author = {D. Ahn}, Booktitle = {Proceedings of the COLING-ACL 2006 Workshop on Annotating and Reasoning about Time and Events}, Pages = {1--8}, Title = {The stages of event extraction}, Year = 2006} @book{Aho+86, Address = {Reading, MA}, Author = {Albert V. Aho and Ravi Sethi and Jeffery D. Ullman}, Date-Added = {2010-02-01 23:19:48 +0100}, Date-Modified = {2010-02-01 23:19:59 +0100}, Publisher = {Addison-Wesley}, Title = {Compilers: {P}rinciples, Techniques, and Tools}, Year = {1986}} @inproceedings{Ait-Mokhtar+97, Author = {Salah A\"{\i}t-Mokhtar and Jean-Pierre Chanod}, Booktitle = {Proceedings of {ANLP'97}, Washington}, Date-Modified = {2010-09-14 22:12:42 +0200}, Keywords = {finite-state machines}, Pages = {72--79}, Title = {Incremental Finite-State Parsing}, Year = 1997} @inproceedings{Ait-Mokhtar+97b, Author = {Salah A\"{\i}t-Mokhtar and Jean-Pierre Chanod}, Booktitle = {Proceedings of {ACL'97} Workshop on Information Extraction and the Building of Lexical Semantic Resources for NLP Applications, Madrid}, Title = {Subject and Object Dependency Extraction Using Finite-State Transducers}, Year = 1997} @book{Alcalay90, Author = {R. Alcalay}, Publisher = {Massada}, Title = {The Complete Hebrew-English Dictionary}, Year = 1990} @inproceedings{Alegre+99, Author = {Alegre, M. and Sopena, J. and Lloberas, A.}, Booktitle = {{Proceedings of the 1999 Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora}}, Date-Modified = {2009-11-14 17:29:42 +0100}, Keywords = {PP attachment, ensembles}, Pages = {231--238}, Title = {{PP-Attachment: A Committee Machine Approach}}, Year = 1999} @article{Alexander+89, Author = {Alexander, M. and Benson, D.F. and Stuss, D.T.}, Journal = {Brain and Language}, Pages = {656--691}, Title = {Frontal Lobes and Language}, Volume = 37, Year = 1989} @article{Ali+96, Author = {Ali, K.M. and Pazzani, M.J.}, Journal = {{Machine Learning}}, Number = 3, Pages = {{173--202}}, Publisher = {{Kluwer Academic}}, Title = {{Error Reduction through Learning Multiple Descriptions}}, Volume = 24, Year = 1996} @phdthesis{Ali96, Author = {K. Ali}, School = {Department of Information and Computer Science, University of California at Irvine}, Title = {Learning probabilistic relational concept descriptions}, Year = 1996} @inproceedings{Alishahia+05, Address = {Ann Arbor, Michigan}, Author = {A. Alishahia and S. Stevenson}, Booktitle = {Proceedings of the Workshop on Psychocomputational Models of Human Language Acquisition}, Date-Added = {2009-12-26 20:57:18 +0100}, Date-Modified = {2010-02-14 23:13:00 +0100}, Pages = {82--90}, Publisher = {ACL}, Title = {The Acquisition and Use of Argument Structure Constructions: A {B}ayesian Model}, Year = {2005}} @book{Allen+87, Address = {Cambridge, England}, Author = {J. Allen and M. S. Hunnicutt and D. Klatt}, Publisher = CUP, Title = {From Text to Speech: The {MIT}alk System}, Year = 1987} @book{Allen87, Author = {James Allen}, Publisher = Benjamin, Title = {Natural Language Understanding}, Year = 1987} @book{Allen95, Address = {Redwood City, CA}, Author = {James Allen}, Edition = {Second}, Publisher = {The Benjamin/Cummings Publishing Company}, Title = {Natural Language Understanding}, Year = 1995} @inproceedings{Alpaydin98, Author = {E. Alpaydin}, Booktitle = {Proceedings of Engineering of Intelligent Systems}, Editor = {E. Alpaydin}, Pages = {{6--12}}, Title = {{Techniques for Combining Multiple Learners}}, Year = 1998} @article{Altmann87, Author = {G. Altmann}, Journal = {Theoretical Linguistics}, Pages = {227-239}, Title = {The levels of linguistic investigation}, Volume = {14th}, Year = 1987} @inproceedings{Altun+03, Author = {Altun, Y. and Tsochantaridis, I. and Hofmann, T.}, Booktitle = {Proceedings of the Twentieth International Conference on Machine Learning (ICML 2003)}, Date-Added = {2009-11-15 00:06:22 +0100}, Date-Modified = {2009-11-15 00:06:35 +0100}, Editor = {T. Fawcett and N. Mishra}, Pages = {3--10}, Title = {{Hidden markov support vector machines}}, Year = {2003}} @conference{Provost+98, Author = {Provost, F. and Fawcett, T. and Kohavi, R.}, Booktitle = {Proceedings of the Fifteenth International Conference on Machine Learning}, Date-Added = {2010-02-01 21:27:35 +0100}, Date-Modified = {2010-02-01 21:54:52 +0100}, Pages = {445--453}, Title = {{The case against accuracy estimation for comparing induction algorithms}}, Year = {1998}} @phdthesis{Altun05, Author = {Altun, Y.}, Date-Added = {2009-11-15 00:06:22 +0100}, Date-Modified = {2009-11-15 00:06:49 +0100}, School = {Brown University}, Title = {Discriminative methods for label sequence learning}, Year = {2005}} @inproceedings{Anand+93, Author = {T. Anand and G. Kahn}, Booktitle = {Proceedings of the 1993 workshop on Knowledge Discovery in Databases}, Title = {Opportunity Explorer: Navigating Large Databases Using Knowledge Discovery Templates}, Year = 1993} @inproceedings{Andersen+94, Author = {O. Andersen and P. Dalsgaard}, Booktitle = {Proceedings of the International Conference on Spoken Language Processes}, Pages = {1627--1630}, Title = {A self-learning approach to transcription of {D}anish proper names}, Year = 1994} @proceedings{Anderson+88, Address = {Cambridge, MA}, Editor = {J. A. Anderson and E. Rosenfeld}, Publisher = MIT, Title = {Neurocomputing: Foundations of research}, Year = 1988} @book{Anderson85, Address = {Cambridge}, Author = {S. R. Anderson}, Date-Added = {2010-02-01 23:20:34 +0100}, Date-Modified = {2010-09-20 00:22:49 +0200}, Publisher = {Cambridge University Press}, Title = {Phonology in the Twentieth Century}, Year = {1985}} @inproceedings{Ando+05, Author = {Ando, R.K. and Zhang, T.}, Booktitle = {Proceedings of the 43rd Annual Meeting of the Association for Computational Linguistics}, Date-Added = {2009-11-15 00:06:22 +0100}, Date-Modified = {2009-11-15 00:07:03 +0100}, Pages = {1--9}, Title = {A high-performance semi-supervised learning method for text chunking}, Year = {2005}} @article{Andrews89, Author = {S. Andrews}, Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition}, Pages = {802--814}, Title = {Frequency and neighborhood effects on lexical access: Activation or Search?}, Volume = 15, Year = 1989} @article{Andrews92, Author = {S. Andrews}, Journal = {Journal of Experimental Psychology: Learning, Memory and Cognition}, Pages = {234--254}, Title = {Frequency and neighborhood effects on lexical access: lexical similarity or orthographic redundancy?}, Volume = 18, Year = 1992} @inproceedings{Androutsopoulos+00, Author = {I. Androutsopoulos and G. Paliouras and V. Karkaletsis and G. Sakkis and C. D. Spyropoulos and P. Stamatopoulos}, Booktitle = {Proceedings of the "Machine Learning and Textual Information Access" Workshop of the 4th European Conference on Principles and Practice of Knowledge Discovery in Databases}, Title = {Learning to Filter Spam E-Mail: A Comparison of a {N}aive {B}ayesian and a Memory-Based Approach}, Year = 2000} @book{Antworth90, Address = {Dallas, TX}, Author = {Evan L. Antworth}, Number = 16, Publisher = {Summer Institute of Linguistics}, Series = {Occasional Publications in Academic Computing}, Title = {PC-KIMMO: a two-level processor for morphological analysis}, Year = 1990} @inproceedings{Aparicio+06, Address = {Porto, Portugal}, Author = {G. Apar{\'{\i}}cio and I. Blanquer and V. Hern{\'a}ndez}, Booktitle = {Proceedings of the 7th Meeting of the International Meeting on High Performance Computing for Computational Science}, Title = {A parallel implementation of the $k$ nearest neighbours classifier in three levels: Threads, {MPI} processes and the {Grid}}, Year = 2006} @inproceedings{Appelt+93, Author = {D. Appelt and J. Hobbs and J. Bear and D. Israel and M. Tyson}, Booktitle = {Proceedings International Joint Conference on Artificial Intelligence}, Title = {{FASTUS}: A Finite-State Processor for Information Extraction From Real-World Text}, Year = 1993} @book{Appelt85, Address = {Cambridge}, Author = {Douglas E. Appelt}, Date-Added = {2010-01-29 15:05:43 +0100}, Date-Modified = {2010-01-29 15:05:52 +0100}, Publisher = {Cambridge University Press}, Title = {Natural Language Generation}, Year = {1985}} @inproceedings{Appelt90, Author = {Appelt, D. E.}, Booktitle = {AAAI Spring Symposium on {A}utomated {A}bduction}, Pages = {67--71}, Title = {A theory of Abduction based on model preserence}, Year = 1990} @inproceedings{Apte+94, Author = {C. Apte and F. Damerau and S. Weiss}, Booktitle = {Proceedings of ACM-SIGIR Conference on Information Retrieval}, Title = {Towards language independent automated learning of text categorization models}, Year = 1994} @book{Archangeli+97, Address = {Oxford}, Date-Added = {2010-02-01 23:21:01 +0100}, Date-Modified = {2010-11-04 10:48:42 +0100}, Editor = {D. Archangeli and D. T. Langendoen}, Publisher = {Basil Blackwell}, Title = {Optimality Theory: An Overview}, Year = {1997}} @article{Ardila+86, Author = {A. Ardila and P. Montanes and J. Gempeler}, Journal = {Brain and Language}, Pages = {134--140}, Title = {Echoic Memory and Language Perception}, Volume = 29, Year = 1986} @inproceedings{Argamon+98, Address = {Montreal}, Author = {S. Argamon and I. Dagan and Y. Krymolowski}, Booktitle = {Proceedings of 36th annual meeting of the ACL}, Pages = {67--73}, Title = {A Memory-Based Approach to Learning Shallow Natural Language Patterns}, Year = 1998} @article{Argamon+99, Author = {S. Argamon and I. Dagan and Y. Krymolowski}, Journal = {Journal of Experimental and Theoretical Artificial Intelligence, special issue on memory-based learning}, Pages = {1--22}, Title = {A Memory-Based Approach to Learning Shallow Natural Language Patterns}, Volume = 10, Year = 1999} @inproceedings{Armstrong06, Address = {Trento, Italy}, Author = {S. Armstrong and M. Flanagan and Y. Graham and D. Groves and B. Mellebeek and S. Morrissey and N. Stroppa and A. Way}, Booktitle = {TC-STAR OpenLab on Speech Translation}, Title = {MaTrEx: Machine Translation Using Examples}, Year = 2006} @article{Atkeson+97, Author = {C. Atkeson and A. Moore and S. Schaal}, Date-Modified = {2010-01-03 11:00:04 +0100}, Journal = {Artificial Intelligence Review}, Keywords = {locally weighted learning, local learning}, Number = {1--5}, Pages = {11--73}, Title = {Locally weighted learning}, Volume = 11, Year = 1997} @article{Atkeson+97b, Author = {C. Atkeson and A. Moore and S. Schaal}, Date-Modified = {2010-01-03 11:00:16 +0100}, Journal = {Artificial Intelligence Review}, Keywords = {locally weighted learning, local learning}, Number = {1--5}, Pages = {75--113}, Title = {Locally weighted learning for control}, Volume = 11, Year = 1997} @inproceedings{Atkins93, Address = {Budapest}, Author = {Sue Atkins}, Booktitle = {Papers in Computational Lexicography, COMPLEX'93}, Title = {Tools for computer-aided lexicography: the {HECTOR} project}, Year = 1993} @inproceedings{Averbuch+87, Author = {A. Averbuch and L. Bahl and R. Bakis and P. Brown and G. Dagget and S. Das and K. Davies and S. De Gennaro and P. deSouza and E. Epstein and D. Fraleigh and F. Jelinek and B. Lewis and R. Mercer and J. Moorhead and A. Nadas and D. Nahamoo and M. Picheny and G. Shichman and P. Spinelli and D. Van Compernolle and H. Wilkens}, Booktitle = ICASSP, Date-Modified = {2011-01-29 16:37:22 +0100}, Title = {Experiments with the {Tangora} 20,000 word speech recognizer}, Year = 1987} @book{Baayen+93, Address = {Philadelphia, PA}, Author = {R. H. Baayen and R. Piepenbrock and H. {van Rijn}}, Date-Modified = {2010-01-03 11:00:36 +0100}, Keywords = {CELEX, lexicon}, Publisher = {Linguistic Data Consortium}, Title = {The {CELEX} lexical data base on {CD-ROM}}, Year = 1993} @article{Baayen+96, Author = {Harald Baayen and Richard Sproat}, Date-Added = {2010-01-29 15:06:07 +0100}, Date-Modified = {2010-01-29 15:06:15 +0100}, Journal = {Computational Linguistics}, Number = {2}, Pages = {155--166}, Title = {Estimating Lexical Priors for Low-Frequency Morphologically Ambiguous Forms}, Volume = {22}, Year = {1996}} @book{Baayen01, Address = {Dordrecht}, Author = {R. Harald Baayen}, Date-Added = {2010-01-29 15:06:48 +0100}, Date-Modified = {2010-01-29 15:06:54 +0100}, Note = {Text, Speech and Language Technology, vol. 18, series editors: Nancy Ide and Jean V\'{e}ronis}, Publisher = {Kluwer Academic Publishers}, Title = {Word Frequency Distributions}, Year = {2001}} @article{Baayen96, Address = {Cambridge, MA, USA}, Author = {R. Harald Baayen}, Date-Added = {2010-01-29 15:06:22 +0100}, Date-Modified = {2010-01-29 15:06:26 +0100}, Issn = {0891-2017}, Journal = {Computational Linguistics}, Number = {4}, Pages = {455--480}, Publisher = {MIT Press}, Title = {The effects of lexical specialization on the growth curve of the vocabulary}, Volume = {22}, Year = {1996}} @incollection{Baddeley95, Address = {London}, Author = {A. Baddeley}, Booktitle = {The Cognitive Neuroscience}, Chapter = 47, Editor = {M.S. Gazzaniga}, Pages = {755--764}, Publisher = MIT, Title = {Working Memory}, Year = 1995} @article{Baeza-Yates+92, Author = {Ricardo Baeza-Yates and Gaston H. Gonnet}, Journal = {Communications of the {ACM}}, Month = {October}, Number = 10, Title = {A New Approach to Text Searching}, Volume = 35, Year = 1992} @article{Bahl+83, Author = {L. R. Bahl and F. Jelinek and R. L. Mercer}, Journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, Number = 2, Pages = {179--190}, Title = {A Maximum Likelihood Approach to Continuous Speech Recognition}, Volume = {PAMI-5}, Year = 1983} @article{Bailey+78, Author = {T. Bailey and A. K. Jain}, Journal = {IEEE Transactions on Systems, Man, and Cybernetics}, Number = 4, Pages = {311--313}, Title = {A Note on Distance-Weighted $k$-Nearest Neighbor Rules}, Volume = {SMC-8}, Year = 1978} @inproceedings{Baker+98, Address = {Montreal, Canada}, Author = {C. Baker and C. Fillmore and J. Lowe}, Booktitle = {Proceedings of the COLING-ACL}, Organization = {University of Montreal}, Pages = {86--90}, Title = {The {B}erkeley {F}rame{N}et Project}, Year = 1998} @article{Baker75, Author = {Baker, J.}, Date-Added = {2009-11-15 00:07:24 +0100}, Date-Modified = {2011-01-29 16:37:38 +0100}, Journal = {IEEE Transactions on Acoustics, Speech, and Signal Processing}, Number = {1}, Pages = {24--29}, Title = {The {DRAGON} system -- An overview}, Volume = {23}, Year = {1975}} @inproceedings{Bakir+04, Author = {Bak{\i}r, G. and Weston, J. and Sch\"olkopf, B.}, Booktitle = {Advances in Neural Information Processing Systems}, Date-Added = {2009-11-15 00:07:58 +0100}, Date-Modified = {2009-11-15 00:08:09 +0100}, Editor = {S. Thrun and L.K. Saul and B. Sch\"olkopf}, Pages = {449--456}, Title = {{Learning to Find Pre-Images}}, Volume = {16}, Year = {2004}} @inproceedings{Baldewein+04, Address = {Barcelona, Spain}, Author = {U. Baldewein and K. Erk and S. Pad\'{o} and D. Prescher}, Booktitle = {Senseval-3: Third International Workshop on the Evaluation of Systems for the Semantic Analysis of Text}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2010-02-13 00:00:25 +0100}, Editor = {R. Mihalcea and P. Edmonds}, Pages = {64--68}, Publisher = {ACL}, Title = {Semantic role labelling with similarity-based generalization using EM-based clustering}, Year = {2004}} @inproceedings{Baldwin+03, Address = {New Brunswick, NJ}, Author = {T. Baldwin and F. Bond}, Booktitle = {Proceedings of the 2003 Conference on Empirical Methods in Natural Language Processing}, Editor = {M. Collins and M. Steedman}, Pages = {73--80}, Publisher = {ACL}, Title = {A plethora of methods for learning {E}nglish countability}, Year = 2003} @inproceedings{Balog+07, Address = {Amsterdam, The Netherlands}, Author = {K. Balog and T. Bogers and L. Azzopardi and M. {De Rijke} and A. {Van den Bosch}}, Booktitle = {SIGIR'07: Proceedings of the 30th Annual International ACM SIGIR Conference on Research and Development in Information Retrieval}, Date-Added = {2010-01-03 01:11:05 +0100}, Date-Modified = {2010-09-14 12:48:16 +0200}, Keywords = {ilk, information retrieval, expert retrieval, expertise retrieval, apropos}, Pages = {551--558}, Title = {Broad expertise retrieval for sparse data environments}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/balog.2007.sigir2007-paper.pdf}} @article{Balota+84, Author = {D. A. Balota and J. I. Chumbley}, Journal = {Journal of Experimental Psychology: Human Perception and Performance}, Pages = {340--357}, Title = {Are Lexical Decisions a Good Measure of Lexical Access? The Role of Word Frequency in the Neglected Decision Stage}, Volume = 10, Year = 1984} @article{Balota+85, Author = {D. A. Balota and J. I. Chumbley}, Journal = {Journal of Memory and Language}, Pages = {89--106}, Title = {The Locus of Word-frequency Effects in the Production Task: Lexical Access and/or Production?}, Volume = 24, Year = 1985} @article{Balota+90, Author = {D. A. Balota and J. I. Chumbley}, Journal = {Journal of Experimental Psychology: General}, Pages = {231--237}, Title = {Where are the effects of frequency in visual word recognition tasks? Right where we said they were! Comment on Monsell, Doyle, and Haggard (1989).}, Volume = 119, Year = 1990} @inproceedings{Bangalore+07, Address = {Prague, Czech Republic}, Author = {S. Bangalore and P. Haffner and S. Kanthak}, Booktitle = {Proceedings of the 45th Annual Meeting of the Association of Computational Linguistics}, Month = {June}, Pages = {152--159}, Publisher = {Association for Computational Linguistics}, Title = {Statistical Machine Translation through Global Lexical Selection and Sentence Reconstruction}, Url = {http://www.aclweb.org/anthology/P/P07/P07-1020}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P07/P07-1020}} @article{Bangalore+99, Author = {Srinivas Bangalore and Aravind K. Joshi}, Journal = {Computational Linguistics}, Number = 2, Pages = {237-265}, Title = {Supertagging: An Approach to Almost Parsing}, Volume = 25, Year = 1999} @inproceedings{Banerjee+03, Address = {Mexico City}, Author = {Banerjee, S. and Pedersen, T.}, Booktitle = {Proceedings of the Fourth International Conference on Intelligent Text Processing and Computational Linguistics}, Month = {February}, Pages = {370-381}, Title = {The Design, Implementation, and Use of the {N}gram {S}tatistic {P}ackage}, Year = {2003}} @inproceedings{Banko+01, Author = {Banko, M. and Brill, E.}, Booktitle = ACL01, Pages = {26--33}, Publisher = {Association for Computational Linguistics}, Title = {Scaling to Very Very Large Corpora for Natural Language Disambiguation}, Year = 2001} @incollection{Bar-Hillel60, Address = {New York, NY}, Author = {Y. Bar-Hillel}, Booktitle = {Advances in Computers}, Editor = {F. L. Alt}, Pages = {91-163}, Publisher = {Academic Press}, Title = {The present status of automatic translation of languages}, Volume = {I}, Year = 1960} @book{Bar-Hillel70, Address = {Amsterdam, The Netherlands}, Author = {Y. Bar-Hillel}, Date-Modified = {2011-06-21 18:25:05 +0200}, Publisher = {North Holland}, Title = {Aspects of language}, Year = 1970} @book{Barg96, Address = {T\"{u}bingen}, Author = {Petra Barg}, Publisher = {Max Niemeyer Verlag}, Series = {Linguistische Arbeiten}, Title = {Automatischer Erwerb von linguistischem Wissen: Ein Ansatz zur Inferenz von DATR-Theorien}, Volume = 352, Year = 1996} @book{Barlow+00, Address = {Stanford}, Author = {M. Barlow and S. Kemmer}, Date-Modified = {2010-02-14 23:13:11 +0100}, Publisher = {CSLI Publications}, Title = {Usage-Based Models of Language}, Year = 2000} @incollection{Barlow85, Author = {H.B. Barlow}, Booktitle = {Models of the visual cortex}, Editor = {D.Rose and V.G. Dobson}, Title = {Cerebral cortex as model builder}, Year = 1985} @article{Baron+76, Author = {J. Baron and C. Strawson}, Journal = {Journal of Experimental Psychology: Human Perception and Performance}, Pages = {386--393}, Title = {Use of orthographic and word-specific knowledge in reading words aloud}, Volume = 2, Year = 1976} @book{Barton+87, Address = {Cambridge, MA}, Author = {G. E. Barton and R. Berwick and E. Ristad}, Publisher = MIT, Title = {Computational complexity and natural language}, Year = 1987} @article{Basili+96, Author = {R. Basili and M. T. Pazienza and P. Velardi}, Journal = {Artificial Intelligence}, Pages = {59--99}, Title = {An empirical symbolic approach to natural language processing}, Volume = 85, Year = 1996} @techreport{Bates+92, Author = {E. A. Bates and J. L. Elman}, Institution = {Centre for Research in Language, University of California, San Diego}, Number = 9202, Title = {Connectionism and the study of change}, Year = 1992} @article{Bates97, Author = {Robin Bates}, Date-Added = {2010-01-29 15:27:36 +0100}, Date-Modified = {2010-01-29 15:27:41 +0100}, Journal = {Lingua Franca}, Note = {October}, Title = {The Corrections Officer: {C}an {J}ohn {K}idd save {U}lysses}, Year = {1997}} @book{Bauer83, Address = {Cambridge, UK}, Author = {L. Bauer}, Publisher = CUP, Title = {English word-formation}, Year = 1983} @book{Bauer88, Address = {Edinburgh, UK}, Author = {L. Bauer}, Publisher = {Edinburgh University Press}, Title = {Introducing linguistic morphology}, Year = 1988} @article{Baum72, Author = {L. E. Baum}, Journal = {Inequalities}, Title = {An inequality and an associated maximization technique in statistical estimation of probabilistic functions of a Markov process}, Volume = {3:1-8}, Year = 1972} @inproceedings{Beale88, Author = {A. D. Beale}, Booktitle = ANLP, Date-Modified = {2010-09-18 14:36:44 +0200}, Keywords = {part-of-speech tagging}, Title = {Lexicon and grammar in probabilistic tagging of written English}, Year = 1988} @misc{Beckwith+??, Author = {Richard Beckwith and George A. Miller and Randee Tengi}, Title = {Design and Implementation of the WordNet Lexical Database and Searching Software}} @inproceedings{Beesley90, Author = {K. Beesley}, Booktitle = {Proceedings of the {Second Cambridge Conference}: Bilingual Computing in {Arabic} and {English}}, Pages = {no pagination}, Title = {Finite-State Description of {Arabic} Morphology}, Year = 1990} @inproceedings{Beesley98, Author = {K. Beesley}, Booktitle = COLING/ACL98, Location = {Montreal, Quebec, Canada}, Pages = {117--123}, Title = {Consonant Spreading in {Arabic} Stems}, Year = 1998} @inproceedings{Bejan+04, Address = {Barcelona, Spain}, Author = {C. Bejan and A. Moschitti and P. Mor\u{a}rescu and G. Nicolae and S. Harabagiu}, Booktitle = {Senseval-3: Third International Workshop on the Evaluation of Systems for the Semantic Analysis of Text}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2013-08-07 09:26:42 +0000}, Editor = {Rada Mihalcea and Phil Edmonds}, Month = {July}, Pages = {73--76}, Publisher = {ACL}, Title = {Semantic parsing based on FrameNet}, Year = {2004}} @book{Bellman57, Address = {Princeton, NJ}, Author = {Richard Bellman}, Date-Added = {2010-02-01 23:21:34 +0100}, Date-Modified = {2010-02-01 23:21:34 +0100}, Publisher = {Princeton University Press}, Title = {Dynamic Programming}, Year = {1957}} @unpublished{Benedict90, Author = {P. Benedict}, Date-Modified = {2011-01-29 16:38:00 +0100}, Note = {University of Illinois, Urbana-Champaign, Inductive Learning Group, Beckman Institute for Advanced Technology and Science}, Title = {The second data generation program -- {DGP/2}}, Year = 1990} @article{Bentley+79, Author = {J. L. Bentley and J. H. Friedman}, Journal = {ACM Computing Surveys}, Number = 4, Pages = {397--409}, Title = {Data Structures for Range Searching}, Volume = 11, Year = 1979} @unpublished{Berck93, Author = {P. Berck}, Date-Modified = {2010-09-14 13:00:58 +0200}, Keywords = {ilk}, Note = {Unpublished report}, Title = {Category disambiguation with a neural network}, Year = 1993} @article{Berger+96, Author = {A. Berger and S. {Della Pietra} and V. {Della Pietra}}, Journal = CL, Number = 1, Publisher = {{ACL}}, Title = {{Maximum Entropy Approach to Natural Language Processing}}, Volume = 22, Year = 1996} @inproceedings{Berger+98, Address = {Sydney, Australia}, Author = {A. Berger and H. Printz}, Booktitle = {International Conference on Spoken Language Processing (ICSLP'98)}, Title = {Recognition performance of a large-scale dependency-grammar language model}, Year = 1998} @mastersthesis{Berghmans94, Author = {J. Berghmans}, Date-Modified = {2010-09-18 14:40:34 +0200}, Keywords = {part-of-speech tagging, POS tagging}, School = {Dept. of Language and Speech, University of Nijmegen}, Title = {Wotan, een automatische grammatikale tagger voor het {N}ederlands}, Year = 1994} @mastersthesis{Berghmans95, Address = {Nijmegen, The Netherlands}, Author = {J. Berghmans}, Date-Modified = {2011-01-29 16:38:17 +0100}, Keywords = {POS tagging, part-of-speech tagging, wotan}, School = {TOSCA Research Group, University of Nijmegen}, Title = {WOTAN - Een probabilistische grammatikale tagger voor het {N}ederlands - Een probabilistische grammatikale tagger voor het {N}ederlands}, Year = 1995} @inproceedings{Berkel+88, Address = {Morristown, NJ, USA}, Author = {Brigitte van Berkel and Koenraad de Smedt}, Booktitle = {Proceedings of the second conference on Applied natural language processing}, Date-Added = {2010-01-29 15:27:46 +0100}, Date-Modified = {2010-01-29 15:27:53 +0100}, Location = {Austin, Texas}, Pages = {77--83}, Publisher = {Association for Computational Linguistics}, Title = {Triphone analysis: a combined method for the correction of orthographical and typographical errors}, Year = {1988}} @article{Berleant95, Author = {D. Berleant}, Journal = JNLE, Pages = {339--362}, Title = {Engineering word-experts for word disambiguation}, Year = 1995} @inproceedings{Besling94, Author = {S. Besling}, Booktitle = {Proceedings of {{\sc konvens}}-94}, Title = {Heuristical and statistical methods for grapheme-to-phoneme conversion}, Year = 1994} @article{Besner+90, Author = {D. Besner and L. Twilley and R. S. McCann and K. Seergobin}, Journal = {Psychological Review}, Pages = {432--446}, Title = {On the Connection Between Connectionism and Data: Are a Few Words Necessary?}, Volume = 97, Year = 1990} @incollection{Bever70, Address = {New York}, Author = {T. Bever}, Booktitle = {Cognition and the development of language}, Editor = {Hayes, J.}, Publisher = {Wiley}, Title = {The cognitive basis for linguistic structures}, Year = 1970} @inproceedings{Bharati+05, Address = {Ann Arbor, Michigan}, Author = {Akshar Bharati and Sriram Venkatapathy and Prashanth Reddy}, Booktitle = {Proceedings of the Ninth Conference on Computational Natural Language Learning (CoNLL-2005)}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 20:58:48 +0100}, Month = {June}, Pages = {165--168}, Publisher = {ACL}, Title = {Inferring Semantic Roles Using Sub-Categorization Frames and Maximum Entropy Model}, Year = {2005}} @article{Biber93, Author = {Douglas Biber}, Journal = CL, Number = 3, Pages = {531--538}, Title = {Co-occurrence Patterns among Collocations: A Tool for Corpus-Based Lexical Knowledge Acquisition}, Volume = 19, Year = 1993} @inproceedings{Bick06, Author = {E. Bick}, Booktitle = {A Man of Measure: Festschrift in Honour of Fred Karlsson on his 60th Birthday; a special supplement to SKY Journal of Linguistics}, Pages = {387--396}, Title = {A Constraint Grammar Based Spellchecker for Danish with a Special Focus on Dyslexics}, Year = {2006}} @phdthesis{Biemann07, Author = {C. Biemann}, Date-Modified = {2010-09-20 00:28:14 +0200}, School = {Leipzig University}, Title = {Unsupervised and Knowledge-free Natural Language Processing in the Structure Discovery Paradigm}, Year = 2007} @techreport{Bies+95, Address = {Philadelphia}, Author = {A. Bies and M. Ferguson and K. Katz and R. MacIntyre}, Institution = {University of Pennsylvania}, Title = {Bracketing Guidelines for Treebank II Style, {Penn Treebank Project}}, Year = 1995} @article{Biewer+85, Author = {A. Biewer and C. Feneyrol and J. Ritzke and E. Stegentritt}, Date-Modified = {2011-01-29 16:38:29 +0100}, Journal = CL, Number = {2--3}, Title = {{ASCOF} -- A modular multilevel system for French-German translation}, Volume = 11, Year = 1985} @article{Bird94, Author = {S. Bird}, Journal = {Computational Linguistics}, Number = 3, Pages = {1--8}, Title = {Introduction to {C}omputational {P}honology}, Volume = 20, Year = 1994} @book{Bird96, Address = {Cambridge, UK}, Author = {S. Bird}, Publisher = CUP, Title = {Computational phonology: a constraint-based approach}, Year = 1996} @article{Bistarelli97, Author = {Bistarelli, S. and Montanari, U. and Rossi, F.}, Date-Added = {2009-11-15 00:08:41 +0100}, Date-Modified = {2009-11-15 00:08:52 +0100}, Journal = {Journal of the ACM}, Number = {2}, Pages = {201--236}, Title = {{Semiring-Based Constraint Satisfaction and Optimization}}, Volume = {44}, Year = {1997}} @inproceedings{Black+92, Address = {Arden Conference Center, Harriman, New York}, Annote = {decision trees, stochastic language models}, Author = {E. Black and F. Jelinek and J. Lafferty and D. Magerman and R. Mercer and S. Roukos}, Booktitle = {Fifth DARPA Workshop on Speech and Natural Language}, Date-Modified = {2009-11-14 19:03:32 +0100}, Editor = {M. Marcus}, Keywords = {history-based grammars}, Month = feb, Title = {Towards history-based grammars: using richer models for probabilistic parsing}, Year = 1992} @inproceedings{Black+93, Author = {E. Black and F. Jelinek and J. Lafferty and D. Magerman and R. Mercer and S. Roukos}, Booktitle = ACL, Pages = {31--37}, Title = {Towards history-based grammars: using richer models for probabilistic parsing}, Year = 1993} @inproceedings{Blaheta+00, Author = {D. Blaheta and E. Charniak}, Booktitle = {Proceedings of NAACL 2000}, Date-Modified = {2010-06-25 21:30:38 +0200}, Pages = {234--240}, Title = {Assigning Function Tags to Parsed Text}, Year = 2000} @article{Blair60, Author = {Charles R. Blair}, Date-Added = {2010-01-29 15:08:16 +0100}, Date-Modified = {2010-02-17 19:01:12 +0100}, Journal = {Information and Control}, Keywords = {spelling correction}, Pages = {60--67}, Title = {A Program for Correcting Spelling Errors}, Volume = {3}, Year = {1960}} @misc{Blake+98, Author = {C. Blake and C.J. Merz}, Date-Modified = {2011-01-29 16:38:46 +0100}, Institution = {University of California, Irvine, Dept. of Information and Computer Sciences}, Keywords = {machine learning, benchmarks}, Note = {Available on: {\tt\small http://www.ics.uci.edu/ mlearn/MLRepository.html}}, Title = {{UCI} Repository of machine learning databases}, Year = 1998} @article{Blei+03, Author = {D. Blei and A. Ng and M. Jordan}, Journal = {Journal of Machine Learning Research}, Pages = {993--1002}, Title = {Latent Dirichlet Allocation}, Volume = 3, Year = 2003} @incollection{Blevins95, Address = {Cambridge, MA}, Author = {J. Blevins}, Booktitle = {The handbook of phonological theory}, Editor = {J. A. Goldsmith}, Pages = {206--244}, Publisher = {Blackwell Publishers}, Title = {The syllable in phonological theory}, Year = 1995} @book{Bloomfield33, Address = {New York, NY}, Author = {L. Bloomfield}, Notes = {Twelfth Impression, 1973}, Publisher = {Holt, Rinehard and Winston}, Title = {Language}, Year = 1933} @inproceedings{Blum+98, Author = {A. Blum and T. Mitchell}, Booktitle = {Proceedings of the 11th Annual Conference on Computational Learning Theory}, Pages = {92--100}, Title = {Combining labeled and unlabeled data with co-training}, Year = 1998} @article{Bobrow+68, Author = {D. G. Bobrow and J. B. Fraser}, Journal = {Communications of the ACM}, Pages = {766--772}, Title = {A phonological rule tester}, Volume = 11, Year = 1968} @incollection{Bock+89, Author = {Bock, J. and Kroch, A. S.}, Booktitle = {Linguistic structure in language processing}, Editor = {Carlson, G. and Tanenhaus, M.}, Pages = {157--196}, Title = {The isolability of syntactic processing}, Year = 1989} @inproceedings{Bod92, Author = {R. Bod}, Booktitle = COLING92, Pages = {855--859}, Title = {A Computational Model of Language Performance: Data Oriented Parsing}, Year = 1992} @inproceedings{Bod93, Address = {Utrecht}, Author = {Bod, R.}, Booktitle = {Proceedings of EACL}, Title = {Using an annotated corpus as a stochastic grammar}, Year = 1993} @phdthesis{Bod95, Address = {Amsterdam, The Netherlands}, Author = {R. Bod}, School = {ILLC, Universiteit van Amsterdam}, Title = {Enriching linguistics with statistics: Performance models of natural language}, Year = 1995} @book{Bod98, Author = {Rens Bod}, Publisher = CUP, Series = {CSLI Publications}, Title = {Beyond Grammar: An experience-based theory of language}, Year = 1998} @unpublished{Boers+93, Author = {E. J. W. Boers and H. Kuiper and B. L. M. Happel and I. G. Sprinkhuizen-Kuyper}, Note = {Unpublished manuscript}, Title = {Designing modular artificial neural networks}, Year = 1993} @inproceedings{Bogers+06, Address = {Enschede}, Author = {T. Bogers and A. {Van den Bosch}}, Booktitle = {Proceedings of the Sixth Belgian-Dutch Information Retrieval Workshop, DIR-2006}, Date-Modified = {2010-09-14 12:49:57 +0200}, Editor = {F. de Jong and W. Kraaij}, Keywords = {ilk, expert retrieval, information retrieval, apropos}, Pages = {49--55}, Publisher = {Neslia Paniculata}, Title = {Authoritative re-ranking in fusing authorship-based subcollection search results}, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/bogers.2006.ecir2006-poster.pdf}} @inproceedings{Bogers+06b, Author = {T. Bogers and A. {Van den Bosch}}, Booktitle = {Proceedings of the 28th European Conference on Information Retrieval, ECIR-2006}, Date-Modified = {2010-09-14 12:48:30 +0200}, Keywords = {ilk, expert search, information retrieval,apropos}, Pages = {519--522}, Publisher = {Springer Verlag}, Series = {Lecture Notes in Computer Science}, Title = {Authoritative re-ranking of search results}, Volume = 3936, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/bogers.2006.ecir2006-poster.pdf}} @inproceedings{Bogers+06c, Address = {Austin, TX}, Author = {T. Bogers and W. Thoonen and A. {Van den Bosch}}, Booktitle = {Proceedings of the 17th Annual ASIS&T SIG/CR workshop on Social Classification}, Date-Added = {2010-01-03 00:50:03 +0100}, Date-Modified = {2010-09-14 12:49:40 +0200}, Keywords = {ilk, expert retrieval, text classification, apropos}, Title = {Expertise classification: Collaborative classification vs. automatic extraction}, Year = {2006}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/bogers.2006.sigcr2006-paper.pdf}} @inproceedings{Bogers+07, Address = {Minneapolis, MN}, Author = {T. Bogers and A. {Van den Bosch}}, Booktitle = {Proceedings of the 2007 ACM Conference on Recommender Systems}, Date-Added = {2010-01-03 01:19:24 +0100}, Date-Modified = {2010-09-14 12:48:38 +0200}, Keywords = {ilk, recommender systems, news recommendation, apropos}, Pages = {141--144}, Publisher = {ACM Press}, Title = {Comparing and evaluating information retrieval algorithms for news recommendation}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/bogers.2007.recsys2007-poster.pdf}} @inproceedings{Bogers+08, Address = {Maastricht, The Netherlands}, Author = {T. Bogers and K. Kox and A. {Van den Bosch}}, Booktitle = {Proceedings of the 8th Belgian-Dutch Information Retrieval Workshop (DIR 2008)}, Date-Added = {2010-01-03 10:33:43 +0100}, Date-Modified = {2010-09-14 12:49:01 +0200}, Editor = {E. Hoenkamp and M. de Cock and V. Hoste}, Keywords = {ilk, expert retrieval, apropos}, Pages = {21--28}, Title = {Using citation analysis for expert retrieval in workgroups}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/bogers.2008.dir2008-paper.pdf}} @inproceedings{Bogers+08b, Address = {Antwerp, Belgium}, Author = {T. Bogers and A. {Van den Bosch}}, Booktitle = {Proceedings of 2008 ECML/PKDD Discovery Challenge Workshop}, Date-Added = {2010-01-03 10:45:18 +0100}, Date-Modified = {2010-09-14 12:48:45 +0200}, Keywords = {ilk, spam detection, social bookmarking, language modeling, apropos}, Pages = {1--12}, Title = {Using Language Models for Spam Detection in Social Bookmarking}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/bogers.2008.discoverychallenge2008-paper.pdf}} @inproceedings{Bogers+08c, Author = {T. Bogers and A. {Van den Bosch}}, Booktitle = {RecSys '08: Proceedings of the 2008 ACM Conference on Recommender Systems}, Date-Added = {2010-01-03 10:46:17 +0100}, Date-Modified = {2011-02-09 09:37:06 +0100}, Keywords = {ilk, recommender systems, social bookmarking, apropos, web 2.0}, Month = {October}, Pages = {287--290}, Publisher = {ACM Press}, Title = {Recommending Scientific Articles using CiteULike}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/bogers.2008.recsys2008-paper.pdf}} @inproceedings{Bogers+09, Author = {T. Bogers and A. {Van den Bosch}}, Booktitle = {Proceedings of the 9th Belgian-Dutch Information Retrieval Workshop (DIR 2009)}, Date-Added = {2010-01-02 19:15:41 +0100}, Date-Modified = {2011-02-09 09:36:54 +0100}, Editor = {R. Aly and C. Hauff and I. den Hamer and D. Hiemstra and T. Huibers and F. de Jong}, Keywords = {ilk, spam, social bookmarking, apropos, spam detection, spam filtering, vici, web 2.0}, Pages = {87--94}, Title = {Using language modeling for spam detection in social reference manager websites}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/bogers.2009.dir2009-paper.pdf}} @inproceedings{Bogers+09b, Author = {T. Bogers and A. {Van den Bosch}}, Booktitle = {Proceedings of the ACM RecSys '09 workshop on Recommender Systems and the Social Web}, Date-Added = {2010-01-03 10:56:27 +0100}, Date-Modified = {2011-02-09 09:36:38 +0100}, Editor = {D. Jannach, W. Geyer and J. Freyne and S. S. Anand and C. Dugan and B. Mobasher and A. Kobsa}, Keywords = {ilk, collaborative filtering, recommender systems, social bookmarking, apropos, web 2.0}, Month = {October}, Pages = {9--16}, Title = {Collaborative and Content-based Filtering for Item Recommendation on Social Bookmarking Websites}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/bogers.2009.recsys2009-workshop.pdf}} @phdthesis{Bogers09, Address = {Tilburg, The Netherlands}, Author = {T. Bogers}, Date-Added = {2010-01-03 10:54:56 +0100}, Date-Modified = {2011-02-09 09:36:27 +0100}, Keywords = {ilk, recommender systems, social bookmarking, apropos, web 2.0}, Month = {December}, School = {Tilburg University}, Title = {Recommender Systems for Social Bookmarking}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/phd-thesis/index.html}} @inproceedings{Boguraev+87, Author = {B. Boguraev and E. Briscoe and J. Carroll and D. Carter and C. Grover}, Booktitle = ACL87, Title = {{The derivation of a grammatically-indexed lexicon from the Longman Dictionary of Contemporary English}}, Year = 1987} @article{Bohanec+90, Author = {M. Bohanec and V. Rajkovic}, Journal = {Sistemica}, Number = 1, Pages = {145--157}, Title = {{DEX}: An expert system shell for decision support}, Volume = 1, Year = 1990} @article{Boland+98, Author = {Boland, J. E. and Boehm-Jernigan, H.}, Journal = {Journal of Memory and Language}, Number = 4, Pages = {684--719}, Title = {Lexical constraints and prepositional phrase attachment}, Volume = 39, Year = 1998} @book{Booij01, Address = {Oxford, UK}, Author = {G. Booij}, Publisher = {Oxford University Press}, Title = {The morphology of {D}utch}, Year = 2001} @book{Booij95, Author = {G. Booij}, Publisher = {Oxford: Clarendon Press}, Title = {The phonology of {D}utch}, Year = 1995} @inproceedings{Boser92, Author = {Boser, B.E. and Guyon, I.M. and Vapnik, V.N.}, Booktitle = {Proceedings of the Fifth Annual ACM Conference on Computational Learning Theory (COLT 1992)}, Date-Added = {2009-11-15 00:09:01 +0100}, Date-Modified = {2009-11-15 00:09:27 +0100}, Keywords = {support vector machines}, Pages = {144--152}, Title = {A training algorithm for optimal margin classifiers}, Year = {1992}} @article{Bottou+92, Author = {L. Bottou and V. Vapnik}, Journal = {Neural Computation}, Pages = {888--900}, Title = {Local learning algorithms}, Volume = 4, Year = 1992} @phdthesis{Bottou91, Author = {Bottou, L.}, Date-Added = {2009-11-15 00:09:37 +0100}, Date-Modified = {2009-11-15 00:09:47 +0100}, School = {Universit\'{e} de Paris XI}, Title = {Une Approche th\'eorique de l'Apprentissage Connexionniste: Applications \`a la Reconnaissance de la Parole}, Year = {1991}} @inproceedings{Bourigault92, Author = {Didier Bourigault}, Booktitle = COLING, Pages = {977-981}, Title = {Surface grammatical analysis for the extraction of terminological noun phrases}, Year = 1992} @article{Boutell04, Author = {Boutell, M.R. and Luo, J. and Shen, X. and Brown, C.M.}, Date-Added = {2009-11-15 00:09:56 +0100}, Date-Modified = {2009-11-15 00:10:03 +0100}, Journal = {Pattern Recognition}, Number = {9}, Pages = {1757--1771}, Title = {{Learning multi-label scene classification}}, Volume = {37}, Year = {2004}} @article{Brachman+93, Author = {R. Brachman and P. Selfridge and L. Terveen and B. Altman and A. Borgida and F. Halper and T. Kirk and A. Lazar and D. Mc{Guinness} and L. Resnick}, Journal = {International Journal of Intelligent and Cooperative Information Systems}, Title = {Integrated support for data archeology}, Year = 1993} @article{Braine92, Author = {Braine, M.}, Journal = {{Cognition}}, Pages = {77--100}, Title = {{What sort of innate structure is needed to ``bootstrap'' into syntax?}}, Volume = 45, Year = 1992} @inproceedings{Brants+97, Author = {Thorsten Brants and Wojciech Skut and Brigitte Krenn}, Booktitle = {Proceedings of EMNLP-2, Providence, Rhode Island}, Title = {Tagging Grammatical Functions}, Year = 1997} @inproceedings{Brants+98, Author = {Thorsten Brants and Wojciech Skut}, Booktitle = {Proceedings of the Conference on New Methods in Language Processing (NeMLaP-3), Australia}, Date-Modified = {2008-07-23 17:03:34 +0200}, Keywords = {treebanks}, Title = {Automation of Treebank Annotation}, Year = 1998} @inproceedings{Brants00, Author = {T. Brants}, Booktitle = {Proceedings of the 6th Applied NLP Conference, ANLP-2000, April 29 -- May 3, 2000, Seattle, WA}, Date-Modified = {2011-06-19 16:11:58 +0200}, Keywords = {part-of-speech tagging}, Pages = {224-231}, Title = {{TnT} -- A statistical Part-of-speech tagger}, Year = 2000} @techreport{Brants96, Address = {Saarbr{\"u}cken}, Author = {Brants, T.}, Date-Modified = {2011-01-29 16:39:26 +0100}, Institution = {Universit{\"a}t des Saarlandes, Computational Linguistics}, Keywords = {POS tagging, part-of-speech tagging, trigrams, HMM}, Title = {{TnT} -- A Statistical Part-of-Speech Tagger}, Year = 1996} @book{Brants99, Address = {Saarbr{\"u}cken, Germany}, Author = {Brants, T.}, Publisher = {German Research Center for Artificial Intelligence and Saarland University}, Series = {Saarbr{\"u}cken Dissertations in Computational Linguistics and Language Technology}, Title = {Tagging and Parsing with Cascaded Markov Models -- Automation of Corpus Annotation}, Year = 1999} @inproceedings{Breck+00, Author = {E. Breck and J. Burger and L. Ferro and D. House and M. Light and I. Mani}, Booktitle = TREC8, Date-Modified = {2012-01-15 20:25:12 +0000}, Pages = {499--506}, Title = {A Sys Called Qanda}, Year = 2000} @book{Breiman+84, Address = {Belmont, CA}, Author = {L. Breiman and J. Friedman and R. Ohlsen and C. Stone}, Publisher = {Wadsworth International Group}, Title = {Classification and regression trees}, Year = 1984} @techreport{Breiman96, Address = {Berkeley, CA}, Author = {L. Breiman}, Institution = {University of California, Statistics Department}, Number = 460, Title = {Bias, variance and arcing classifiers}, Year = 1996} @article{Breiman96a, Author = {L. Breiman}, Date-Modified = {2009-11-14 19:05:34 +0100}, Journal = {Machine Learning}, Keywords = {bagging}, Number = 2, Pages = {123--140}, Publisher = {Kluwer Academic}, Title = {Bagging Predictors}, Volume = 24, Year = 1996} @article{Breiman96b, Author = {L. Breiman}, Journal = {{Machine Learning}}, Number = 3, Pages = {{49--64}}, Publisher = {{Kluwer Academic}}, Title = {{Stacked Regressions}}, Volume = 24, Year = 1996} @inproceedings{Brennan+87, Author = {Suzan Brennan and Marylin Friedman and Carl Pollard}, Booktitle = ACL, Title = {A centering approach to pronouns}, Year = 1987} @inproceedings{Brent+96, Address = {Berlin}, Author = {M. Brent and T. Cartwright}, Booktitle = {Grammatical Inference: Learning Syntax from Sentences; Third International Colloquium, ICGI-96, Montpellier, France}, Date-Modified = {2010-09-20 00:26:49 +0200}, Editor = {L. Miclet and C. {de la Higuera}}, Pages = {84--94}, Publisher = {Springer}, Series = {Lecture Notes in Artificial Intelligence}, Title = {Lexical Categorization: Fitting Template Grammars by Incremental MDL Optimization}, Volume = 1147, Year = 1996} @article{Brent+96b, Author = {M. Brent and T. Cartwright}, Journal = {Cognition}, Pages = {93--125}, Title = {Distributional regularity and phonotactic constraints are useful for segmentation}, Volume = 61, Year = 1996} @book{Brent91, Address = {Cambridge MA}, Author = {Michael R. Brent}, Publisher = MIT, Title = {Automatic acquisition of subcategorization frames from unrestricted English}, Year = 1991} @inproceedings{Brent91b, Author = {Michael R. Brent}, Booktitle = ACL91, Pages = {209--214}, Title = {Automatic acquisition of subcategorization frames from untagged text}, Year = 1991} @misc{Brent93, Author = {Michael R. Brent}, Title = {Surface Cues and Robust Inference as a Basis for the Early Acquisition of Subcategorization Frames}, Year = 1993} @article{Brent93b, Author = {Michael R. Brent}, Journal = CL, Number = 2, Pages = {243-262}, Title = {From Grammar to Lexicon: Unsupervised Learning of Lexical Syntax}, Volume = 19, Year = 1993} @incollection{Brent94, Address = {{Cambridge, MA.}}, Author = {M. Brent}, Booktitle = {Reprinted in: Acquisition of the lexicon}, Editor = {Gleitman, L. and Landau, B.}, Publisher = MIT, Title = {{Acquisition of subcategorization frames using aggregated evidence from local syntactic cues}}, Year = 1994} @techreport{Breuker+93, Author = {D. M. Breuker and L. V. Allis and H. J. {Van den Herik}}, Institution = {University of Limburg, Maastricht: Department of Computer Science}, Number = {CS 93-01}, Title = {Syllabification using expert rules}, Year = 1993} @article{Brighton+02, Author = {H. Brighton and C. Mellish}, Journal = {Data Mining and Knowledge Engineering}, Pages = {153--172}, Title = {Advances in Instance Selection for Instance-Based Learning Algorithms}, Volume = 6, Year = 2002} @inproceedings{Brill+00, Author = {E. Brill and R. C. Moore}, Booktitle = {Proceedings of the 38th Annual Meeting of the ACL}, Date-Added = {2010-01-29 15:08:27 +0100}, Date-Modified = {2010-02-17 19:01:21 +0100}, Keywords = {spelling correction}, Pages = {286--293}, Title = {An improved error model for noisy channel spelling correction}, Year = {2000}} @inproceedings{Brill+90, Author = {Eric Brill and David Magerman and Mitchell P. Marcus and Beatrice Santorini}, Booktitle = {DARPA Speech and Natural Language Workshop}, Month = {June}, Title = {Deducing linguistic structure from the statistics of large corpora}, Year = 1990} @inproceedings{Brill+94, Author = {Brill, E. and P. Resnik}, Booktitle = {Proceedings of 15th annual conference on Computational Linguistics}, Title = {A Rule-Based Approach to Prepositional Phrase Attachment Disambiguation}, Year = 1994} @article{Brill+98, Author = {Eric Brill and Raymond J. Mooney}, Journal = {The {AI} Magazine}, Number = 4, Pages = {13--24}, Title = {An Overview of Empirical Natural Language Processing}, Volume = 18, Year = 1998} @inproceedings{Brill+98b, Address = {Montreal, Canada}, Author = {Brill, E. and Jun Wu}, Booktitle = {COLING-ACL'98}, Month = {August 10-14}, Pages = {191--195}, Title = {Classifier Combination for Improved Lexical Disambiguation}, Year = 1998} @book{Brill+98c, Editor = {E. Brill and R. Mooney}, Publisher = {AAAI}, Title = {The {AI} Magazine: Special Issue on Empirical Natural Language Processing}, Volume = 18, Year = 1998} @conference{Vandeghinste+04b, Author = {Vincent Vandeghinste and Yi Pan}, Booktitle = {Proceedings of the ACL Workshop on Text Summarization}, Date-Added = {2009-11-15 14:36:23 +0100}, Date-Modified = {2009-11-15 14:36:39 +0100}, Pages = {89--95}, Title = {Sentence compression for automated subtitling: A hybrid approach}, Year = {2004}} @conference{Vandeghinste+04, Author = {Vincent Vandeghinste and Erik {Tjong Kim Sang}}, Booktitle = {Proceedings of LREC 2004}, Date-Added = {2009-11-15 14:35:54 +0100}, Date-Modified = {2010-11-26 19:40:10 +0100}, Title = {Using a parallel transcript/subtitle corpus for sentence compression}, Year = {2004}} @inproceedings{Brill91, Author = {Eric Brill}, Booktitle = ACL91, Pages = {339--340}, Title = {Discovering the lexical features of a language}, Year = 1991} @inproceedings{Brill92, Address = {Trento, Italy}, Author = {E. Brill}, Booktitle = {Proceedings of the Third ACL Applied NLP}, Date-Modified = {2009-11-14 18:57:49 +0100}, Keywords = {part-of-speech tagging}, Pages = {152-155}, Title = {A simple rule-based part-of-speech tagger}, Year = 1992} @inproceedings{Brill92a, Author = {Eric Brill}, Booktitle = DARPA, Date-Modified = {2009-11-14 17:36:06 +0100}, Keywords = {part-of-speech tagging}, Title = {A simple rule-based part of speech tagger}, Year = 1992} @phdthesis{Brill93, Author = {E. Brill}, School = {University of Pennsylvania}, Title = {A Corpus-Based Approach to Language Learning}, Year = 1993} @inproceedings{Brill93b, Author = {Eric Brill}, Booktitle = ACL, Title = {Automatic grammar induction and parsing free text: A transformation-based approach}, Year = 1993} @conference{Lin03, Author = {Chin-Yew Lin}, Booktitle = {Proceedings of the Sixth International Workshop on Information Retrieval with Asian Languages}, Date-Added = {2009-11-15 14:34:16 +0100}, Date-Modified = {2009-11-15 14:34:24 +0100}, Pages = {1--9}, Title = {Improving summarization performance by sentence compression - A pilot study}, Volume = {2003}, Year = {2003}} @inproceedings{Brill94, Author = {E. Brill}, Booktitle = {Proceedings AAAI '94}, Title = {Some advances in Transformation-based part-of-speech tagging}, Year = 1994} @article{Brill95, Author = {Brill, E.}, Date-Modified = {2009-11-14 17:36:00 +0100}, Journal = {Computational Linguistics}, Keywords = {part-of-speech tagging}, Number = 4, Pages = {543--565}, Title = {Transformation-Based Error-Driven Learning and Natural Language Processing: A Case Study in Part of Speech Tagging}, Volume = 21, Year = 1995} @incollection{Brill97, Author = {E. Brill}, Booktitle = {Natural Language Processing Using Very Large Corpora}, Date-Modified = {2009-11-14 17:35:33 +0100}, Keywords = {part-of-speech tagging}, Publisher = {Kluwer Academic Press}, Title = {Unsupervised Learning of Disambiguation Rules for Part of Speech Tagging}, Year = 1997} @article{Briscoe+93, Author = {T. Briscoe and J. Carroll}, Journal = CL, Number = 1, Pages = {25--60}, Title = {Generalized probabilistic {LR} parsing of natural language corpora with unification-based grammars}, Volume = 19, Year = 1993} @conference{Cohn+08, Author = {Cohn, T. and Lapata, M.}, Booktitle = {Proceedings of the 22nd International Conference on Computational Linguistics-Volume 1}, Date-Added = {2009-11-15 14:32:22 +0100}, Date-Modified = {2009-11-15 14:32:33 +0100}, Organization = {Association for Computational Linguistics}, Pages = {137--144}, Title = {Sentence compression beyond word deletion}, Year = {2008}} @inproceedings{Briscoe+97, Author = {T. Briscoe and J. Carroll}, Booktitle = {Proceedings of ANLP}, Title = {Automatic Extraction of Subcategorization from Corpora}, Year = 1997} @article{Britt94, Author = {M. A. Britt}, Journal = {Journal of Memory and Language}, Pages = {251--283}, Title = {The Interaction of Referential Ambiguity and Argument Structure in the Parsing of Prepositional Phrases}, Volume = 33, Year = 1994} @article{Broca61, Author = {P. Broca}, Journal = {{Bulletin de la soci\'{e}t\'{e} anatomique de Paris}}, Pages = {330--356}, Title = {{Remarque sur le siege de la facult\'{e} du language articul\'{e}, suivie d'une observation d'aph\'{e}mie (perte de la parole)}}, Volume = 36, Year = 1861} @inproceedings{Brown+03, Author = {R.D. Brown and P.N. Bennett and J.G. Carbonell and R. Hutchinson and P. Jansen}, Booktitle = {Proceedings of MT Summit IX}, Date-Added = {2009-11-15 00:10:22 +0100}, Date-Modified = {2009-11-15 00:10:48 +0100}, Keywords = {machine translation}, Pages = {24--31}, Title = {Reducing boundary friction using translation-fragment overlap}, Year = {2003}} @incollection{Brown+85, Address = {{Orlando}}, Author = {J. Brown and E. Perecman}, Booktitle = {Speech and Language Evaluation in Neurology: Adult Disorders}, Date-Modified = {2008-07-23 16:24:43 +0200}, Editor = {Darby, J.}, Keywords = {microgenesis, human language processing, brain, aphasia, evolution}, Publisher = {Grune \& Stratton Inc.}, Title = {Neurological Basis of Language Processing}, Year = 1985} @inproceedings{Brown+88, Author = {P. Brown and J. Cocke and S. {Della Pietra} and V. {Della Pietra} and F. Jelinek and R.L. Mercer and P.S. Roossin}, Booktitle = COLING, Title = {A statistical approach to language translation}, Year = 1988} @article{Brown+90, Author = {P. Brown and J. Cocke and S. {Della Pietra} and V. {Della Pietra} and F. Jelinek and R.L. Mercer and P.S. Roossin}, Journal = CL, Number = 2, Pages = {79--85}, Title = {A statistical approach to language translation}, Volume = 16, Year = 1990} @inproceedings{Brown+91, Author = {P. Brown and S. {Della Pietra} and V. {Della Pietra} and R. Mercer}, Booktitle = ACL, Date-Modified = {2009-09-06 20:34:07 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {264--270}, Title = {Word sense disambiguation using statistical methods}, Year = 1991} @inproceedings{Brown+91b, Author = {P. Brown and J. Lai and R. Mercer}, Booktitle = ACL, Title = {Aligning sentences in parallel corpora}, Year = 1991} @article{Brown+92, Author = {P. Brown and V. {Della Pietra} and P. deSouza and J. Lai and R.L. Mercer}, Journal = CL, Number = 4, Pages = {467--479}, Title = {Class-based N-gram Models of Natural Language}, Volume = 18, Year = 1992} @inproceedings{Brown+92b, Author = {P. Brown and S. {Della Pietra} and V. {Della Pietra} and J. Lafferty and R. Mercer}, Booktitle = TMI, Pages = {83--100}, Title = {Analysis, statistical transfer, and synthesis in machine translation}, Year = 1992} @article{Brown+93, Author = {P. Brown and S. {Della Pietra} and V. {Della Pietra} and Robert Mercer}, Journal = CL, Number = 2, Pages = {263--311}, Title = {The mathematics of statistical machine translation: parameter estimation}, Volume = 19, Year = 1993} @inproceedings{Brown+93b, Author = {P. Brown and S. {Della Pietra} and V. {Della Pietra} and R. Mercer}, Booktitle = ARPA, Pages = {202--205}, Title = {But dictionaries are data too}, Year = 1993} @article{Brown87, Author = {G. D. Brown}, Journal = {Journal of Memory and Language}, Pages = {1--23}, Title = {Resolving inconsistency: A Computational Model of Word Naming}, Volume = 26, Year = 1987} @article{Brown93, Author = {P. Brown and S. {Della Pietra} and V. {Della Pietra} and R.L. Mercer}, Journal = {Computational Linguistics}, Number = 2, Pages = {263--311}, Title = {The Mathematics of Statistical Machine Translation: Parameter Estimation}, Volume = 19, Year = 1993} @inproceedings{Brown99, Address = {Chester, England}, Author = {R. Brown}, Booktitle = {Proceedings of TMI 1999}, Pages = {22--32}, Title = {Adding Linguistic Knowledge to a Lexical Example-based Translation System}, Year = 1999} @inproceedings{Bruce+94, Author = {R. Bruce and J. Wiebe}, Booktitle = ACL94, Date-Modified = {2009-09-06 20:34:23 +0200}, Keywords = {wsd, word sense disambiguation}, Note = {retr. from: http://xxx.lanl.gov/list/cmp-lg/9406}, Title = {Word-Sense Disambiguation Using Decomposable Models}, Year = 1994} @book{Brunak+90, Address = {Singapore}, Author = {S. Brunak and B. Lautrup}, Publisher = {World Scientific}, Title = {Neural networks: Computers with intuition}, Year = 1990} @article{Brysbaert+96, Author = {M. Brysbaert and D.C. Mitchell}, Journal = {Quarterly Journal of Experimental Psychology}, Title = {Modifier attachment in sentence parsing: Evidence from Dutch}, Volume = {49A}, Year = 1996} @inproceedings{Buchholz+00, Address = {Athens, Greece}, Author = {S. Buchholz and A. {Van den Bosch}}, Booktitle = {Proceedings of the Second International Conference on Language Resources and Evaluation}, Date-Modified = {2010-09-14 22:23:12 +0200}, Keywords = {ilk, named-entity recognition, knaw}, Pages = {1215--1221}, Title = {Integrating seed names and n-grams for a named entity list and classifier}, Year = 2000} @article{Buchholz+01, Author = {S. Buchholz and W. Daelemans}, Date-Modified = {2010-01-02 19:54:47 +0100}, Journal = JNLE, Keywords = {question answering, ilk}, Title = {Complex answers: A case study using a {WWW} question answering system}, Year = 2001} @inproceedings{Buchholz+06, Address = {New York, NY}, Author = {S. Buchholz and E. Marsi}, Booktitle = {Proceedings of CoNLL-X, the Tenth Conference on Computational Natural Language Learning}, Date-Modified = {2010-09-14 11:56:26 +0200}, Keywords = {ilk, dependency parsing, shared task, CoNLL shared task, CoNLL}, Title = {{CoNLL-X} Shared Task on Multilingual Dependency Parsing}, Year = 2006} @inproceedings{Buchholz+99, Author = {S. Buchholz and J. Veenstra and W. Daelemans}, Booktitle = {EMNLP-VLC'99, the Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora}, Date-Modified = {2010-01-02 19:54:03 +0100}, Keywords = {ilk, grammatical relation finding}, Month = Jun, Title = {Cascaded Grammatical Relation Assignment}, Year = 1999} @book{Buchholz02, Author = {S. Buchholz}, Date-Modified = {2010-01-02 19:53:52 +0100}, Keywords = {ilk, grammatical relation finding}, Publisher = {PhD thesis, University of Tilburg}, Title = {Memory-Based Grammatical Relation Finding}, Year = 2002} @inproceedings{Buchholz98, Author = {S. Buchholz}, Booktitle = {Proceedings of the ESSLLI-98 Workshop on Automated Acquisition of Syntax and Parsing, {Saarbr\"{u}cken}, Germany}, Date-Modified = {2010-01-02 19:53:44 +0100}, Keywords = {ilk}, Title = {Distinguishing Complements from Adjuncts using Memory-Based Learning}, Year = 1998} @techreport{Buckwalter02, Author = {T. Buckwalter}, Date-Modified = {2010-09-14 22:12:18 +0200}, Institution = {Linguistic Data Consortium}, Keywords = {Arabic}, Note = {available from: {\tt\small http://www.ldc.upenn.edu/}}, Number = {LDC2002L49}, Title = {Buckwalter {Arabic} Morphological Analyzer Version 1.0}, Year = 2002} @phdthesis{Buitelaar98, Author = {P. Buitelaar}, Month = {Feb}, School = {Computer Science Department, Brandeis University}, Title = {CoreLex: Systematic Polysemy and Underspecification}, Year = 1998} @inproceedings{Bullinaria93, Author = {J. Bullinaria}, Booktitle = {Proceedings of The Cognitive Science of Natural Language Processing Workshop '93, Dublin}, Title = {Connectionist modelling of reading aloud}, Year = 1993} @inproceedings{Burchardt+06, Address = {Genoa, Italy}, Author = {A. Burchardt and K. Erk and A. Frank and A. Kowalski and S. Pad\'o and M. Pinkal}, Booktitle = {Proceedings of the 5th {International Conference on Language Resources and Evaluation (LREC-2006)}}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2010-02-13 00:00:48 +0100}, Title = {The {SALSA} corpus: a {G}erman corpus resource for lexical semantics}, Year = {2006}} @inproceedings{Burges96, Author = {C.J.C. Burges}, Booktitle = {Proceedings of the Thirteenth International Conference on Machine Learning (ICML '96)}, Date-Added = {2009-11-15 00:11:19 +0100}, Date-Modified = {2009-11-15 00:11:34 +0100}, Editor = {L. Saitta}, Keywords = {support vector machines}, Pages = {71-77}, Title = {Simplified Support Vector Decision Rules}, Year = {1996}} @article{Burgess+92, Author = {N. Burgess and G. Hitch}, Journal = {Journal of Memory and Language}, Pages = {429--460}, Title = {Toward a Network Model of the Articulatory Loop}, Volume = 31, Year = 1992} @manual{Burnage90, Author = {G. Burnage}, Date-Modified = {2009-11-14 17:41:33 +0100}, Organization = {Centre for Lexical Information, Nijmegen}, Title = {{CELEX}: A guide for users}, Year = 1990} @manual{R10, Address = {Vienna, Austria}, Author = {{R Development Core Team}}, Note = {{ISBN} 3-900051-07-0}, Organization = {R Foundation for Statistical Computing}, Title = {R: A Language and Environment for Statistical Computing}, Url = {http://www.R-project.org}, Year = {2010}, Bdsk-Url-1 = {http://www.R-project.org}} @inproceedings{Busser+05, Address = {Granada, Spain}, Author = {G.J. Busser and R. Morante}, Booktitle = {Proceedings of the XXI Congresso de la Sociedad Espanola para el Procesamiento del Lenguaje Natural, SEPLN-2005}, Date-Modified = {2010-01-02 19:55:26 +0100}, Keywords = {ilk, active learning, annotation}, Pages = {375--381}, Title = {Designing an active learning based system for corpus annotation}, Year = 2005, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/active_learning_system_annotation.pdf}} @inproceedings{Busser98, Address = {Czech Republic}, Author = {G.J. Busser}, Booktitle = {Proceedings TSD Conferenc}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2011-01-29 16:40:02 +0100}, Editor = {P. Sojka and V. Matousek and K. Pala and I. Kopecek}, Keywords = {ilk, grapheme-phoneme conversion, speech synthesis}, Pages = {3--8}, Publisher = {Masaryk University}, Title = {{TreeTalk-D}: A Machine Learning approach to Dutch word pronunciation}, Year = {1998}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/TSD98.ps.gz}} @article{Butts+02, Address = {Los Alamitos, CA, USA}, Author = {L. Butts and A. Cockburn}, Date-Modified = {2010-10-06 16:44:04 +0200}, Journal = {Australian Computer Science Communications}, Keywords = {text completion, mobile devices}, Number = 4, Pages = {55--59}, Publisher = {IEEE Computer Society Press}, Title = {An evaluation of mobile phone text input methods}, Volume = 24, Year = 2002} @book{Bybee85, Address = {Amsterdam, The Netherlands}, Author = {Bybee, Joan L.}, Date-Added = {2010-02-01 23:22:08 +0100}, Date-Modified = {2011-06-21 18:25:17 +0200}, Publisher = {John Benjamins}, Title = {Morphology: {A} study of the relation between meaning and form}, Year = {1985}} @incollection{Bybee88, Address = {San Diego}, Author = {J. Bybee}, Booktitle = {Theoretical morphology. Approaches in modern linguistics}, Editor = {M. Hammond and M. Noonan}, Pages = {119--141}, Publisher = {Academic Press}, Title = {Morphology as Lexical organization}, Year = 1988} @inproceedings{Califf+99, Author = {M. Califf and R. Mooney}, Booktitle = AAAI, Date-Modified = {2010-09-14 22:20:49 +0200}, Keywords = {information extraction}, Title = {Relational Learning of Pattern-Match Rules for Information Extraction}, Year = 1999} @incollection{Canisius+04, Author = {S. Canisius and A. {Van den Bosch}}, Booktitle = {Selected papers from the Thirteenth Computational Linguistics in the {N}etherlands Meeting}, Date-Modified = {2010-09-14 12:51:11 +0200}, Editor = {B. Decadt and G. {De Pauw} and V. Hoste}, Keywords = {ilk, shallow parsing, Dutch, vi}, Pages = {31--45}, Publisher = {University of Antwerp}, Title = {A memory-based shallow parser for spoken {D}utch}, Year = 2004, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/clin2003.pdf}} @inproceedings{Canisius+05, Address = {Bonn, Germany}, Author = {S. Canisius and A. {Van den Bosch} and W. Daelemans}, Booktitle = {Proceedings of the Fourth Learning Language in Logic Workshop}, Date-Modified = {2010-09-14 13:00:22 +0200}, Editor = {J. Cussens and C. Nedellec}, Keywords = {ilk, imix, structured output learning, vi}, Pages = {3--10}, Title = {Rule meta-learning for trigram-based sequence processing}, Year = 2005} @inproceedings{Canisius+06, Address = {Trento, Italy}, Author = {S. Canisius and A. {Van den Bosch} and W. Daelemans}, Booktitle = {Proceedings of the EACL 2006 Workshop on Learning Structured Information in Natural Language Applications}, Date-Modified = {2010-09-18 14:21:20 +0200}, Keywords = {ilk, constraint satisfaction inference, information extraction, imix, vici}, Pages = {9--16}, Title = {Constraint satisfaction inference: Non-probabilistic global inference for sequence labelling}, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/~sander/publications/eacl06-ws.pdf}} @inproceedings{Canisius+06b, Address = {New York, NY}, Author = {S. Canisius and T. Bogers and A. {Van den Bosch} and J. Geertzen and E. {Tjong Kim Sang}}, Booktitle = {Proceedings of the Tenth Conference on Computational Natural Language Learning, CoNLL-X}, Date-Modified = {2011-02-08 15:52:03 +0100}, Keywords = {ilk, shared task, dependency parsing, constraint satisfaction inference, imix, apropos, vici}, Month = {June}, Pages = {176--180}, Title = {Dependency parsing by inference over high-recall dependency predictions}, Year = 2006, Bdsk-Url-1 = {http://www.aclweb.org/anthology-new/W/W06/W06-2924.pdf}} @inproceedings{Canisius+06c, Address = {Namur, Belgium}, Author = {S. Canisius and A. {Van den Bosch} and W. Daelemans}, Booktitle = {Proceedings of the Eighteenth Belgian-Dutch Conference on Artificial Intelligence, BNAIC-2006}, Date-Modified = {2010-09-18 14:21:39 +0200}, Editor = {P.-Y. Schobbens and W. Vanhoof and G. Schwanen}, Keywords = {ilk, information extraction, imix, structured output learning, vici}, Pages = {75--82}, Title = {Discrete versus probabilistic sequence classifiers for domain-specific entity chunking}, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/~sander/publications/bnaic06.pdf}} @inproceedings{Canisius+07, Address = {Prague, Czech Republic}, Author = {S. Canisius and E. {Tjong Kim Sang}}, Booktitle = {Proc. of the CoNLL Shared Task Session of EMNLP-CoNLL 2007}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2010-11-26 19:38:55 +0100}, Keywords = {ilk, dependency parsing, constraint satisfaction inference, imix}, Pages = {1124--1128}, Title = {A constraint satisfaction approach to dependency parsing}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/D/D07/D07-1121.pdf}} @inproceedings{Canisius+07b, Address = {Amsterdam, The Netherlands}, Author = {S. Canisius and C. Sporleder}, Booktitle = {Proceedings of the 18th BENELEARN Conference}, Date-Added = {2010-01-03 01:01:17 +0100}, Date-Modified = {2010-09-14 13:00:33 +0200}, Editor = {P. Adriaans and M. van Someren and and S. Katrenko}, Keywords = {ilk, information extraction, structured output learning, imix, mitch}, Title = {Learning to segment and label semi-structured documents with little or no supervision}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/~sander/publications/benelearn07.pdf}} @inproceedings{Canisius+07c, Address = {Prague, Czech Republic}, Author = {S. Canisius and C. Sporleder}, Booktitle = {Proceedings of the 2007 Joint Conference on Empirical Methods in Natural Language Processing and Computational Natural Language Learning (EMNLP-CoNLL)}, Date-Added = {2010-01-03 01:05:50 +0100}, Date-Modified = {2012-11-19 21:00:49 +0000}, Keywords = {ilk, information extraction, unsupervised learning, text mining, imix, mitch}, Pages = {827--836}, Title = {Bootstrapping information extraction from field books}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/D/D07/D07-1087.pdf}} @inproceedings{Canisius+07d, Address = {Borovets, Bulgaria}, Author = {S. Canisius and A. {Van den Bosch}}, Booktitle = {Proceedings of the International Conference on Recent Advances in Natural Language Processing (RANLP-2007)}, Date-Added = {2010-01-03 01:14:34 +0100}, Date-Modified = {2010-09-18 14:22:01 +0200}, Keywords = {ilk, dependency parsing, constraint satisfaction inference, imix, vici}, Pages = {104--108}, Title = {Recompiling a knowledge-based dependency parser into memory}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/mb-alpino.pdf}} @inproceedings{Canisius+09, Author = {S. Canisius and A. {Van den Bosch}}, Booktitle = {Proceedings of the 13th Annual Conference of the European Association for Machine Translation (EAMT-2009)}, Date-Added = {2010-01-02 19:34:35 +0100}, Date-Modified = {2010-09-14 12:50:37 +0200}, Keywords = {ilk, vici, imix, memory-based machine translation, constraint satisfaction inference}, Pages = {182--189}, Title = {A constraint satisfaction approach to machine translation}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/mbmt-csi-final.pdf}} @incollection{Caplan95, Address = {Bradford}, Author = {D. Caplan}, Booktitle = {The Cognitive Neurosciences}, Chapter = 56, Editor = {M.S. Gazzaniga}, Pages = {871--879}, Publisher = MIT, Title = {The Cognitive Neuroscience of Syntactic Processing}, Year = 1995} @article{Caramazza88, Author = {A. Carammazza}, Journal = {{Am. Rev. Neurosci.}}, Pages = {395--421}, Title = {{Some Aspects of Language Processing Revealed through the Analysis of Acquired Dyslexia: the Lexical System}}, Volume = 11, Year = 1988} @book{Carberry90, Address = {Cambridge, MA}, Author = {Sandra Carberry}, Date-Added = {2010-02-01 23:22:22 +0100}, Date-Modified = {2010-02-01 23:22:27 +0100}, Publisher = {MIT Press}, Title = {Plan Recognition in Natural Language Dialog}, Year = {1990}} @inproceedings{Carbonell+88, Author = {J. Carbonell and R.D. Brown}, Booktitle = COLING, Title = {Anaphora resolution: A multi strategy approach}, Year = 1988} @article{Carbonell89, Author = {J. Carbonell}, Journal = {Artificial Intelligence}, Pages = {1--9}, Title = {Introduction: Paradigms for machine learning}, Volume = 40, Year = 1989} @inproceedings{Cardie+98, Address = {Montreal}, Author = {C. Cardie and D. Pierce}, Booktitle = {Proceedings of 36th annual meeting of the ACL}, Pages = {218--224}, Title = {Error-Driven Pruning of Treebank Grammars for Base Noun Phrase Identification}, Year = 1998} @inproceedings{Cardie+99, Author = {C. Cardie and D. Pierce}, Booktitle = {Proceedings of the Sixteenth International Conference on Machine Learning}, Title = {Combining Error-Driven Pruning and Classification for Partial Parsing}, Year = 1999} @book{Cardie+99b, Editor = {C. Cardie and R. Mooney}, Publisher = {Kluwer Academic Publishers}, Title = {Machine Learning: Special issue on Machine Learning and Natural Language}, Volume = 34, Year = 1999} @incollection{Cardie+99c, Author = {C. Cardie and R. Mooney}, Journal = {Machine Learning}, Pages = {1--5}, Title = {Guest Editors' Introduction: Machine Learning and Natural Language}, Volume = 11, Year = 1999} @inproceedings{Cardie93, Author = {C. Cardie}, Booktitle = {{AAAI-93}}, Pages = {798--803}, Title = {A case-based approach to knowledge acquisition for domain-specific sentence analysis}, Year = 1993} @phdthesis{Cardie94, Address = {Amherst, MA}, Author = {C. Cardie}, School = {University of Massachusets}, Title = {Domain Specific Knowledge Acquisition for Conceptual Sentence Analysis}, Year = 1994} @inproceedings{Cardie96, Author = {C. Cardie}, Booktitle = {Proceedings of the Conference on Empirical Methods in NLP}, Organization = {University of Pennsylvania}, Title = {Automatic Feature Set Selection for Case-Based Learning of Linguistic Knowledge}, Year = 1996} @incollection{Cardie96b, Author = {Cardie, C.}, Booktitle = {Connetionist, Statistics and Symbolic Approaches to Learning for natural language processing, {L}ecture {N}otes in {A}rtificial {I}ntelligence}, Editor = {S. Wertment and E. Riloff and G. Scheler}, Pages = {315--328}, Publisher = {Springer}, Title = {Embedded Machine Learning Systems for natural language processing: A general framework}, Year = 1996} @incollection{Carello+92, Author = {C. Carello and M. T. Turvey and G. Lukatela}, Booktitle = {Haskins Laboratories Status Report on Speech Research}, Pages = {193--204}, Publisher = {Haskins Laboratories}, Title = {Can theories of word recognition remain stubbornly nonphonological?}, Year = 1992} @book{Carl+03, Address = {Dordrecht, The Netherlands}, Author = {M. Carl and A. Way}, Publisher = {Kluwer Academic Publishers}, Series = {Text, Speech and Language Technology}, Title = {Recent Advances in Example-Based Machine Translation}, Volume = 21, Year = 2003} @inproceedings{Carlson+01, Author = {A. J. Carlson and J. Rosen and D. Roth}, Booktitle = {IAAI}, Date-Added = {2010-01-29 15:28:00 +0100}, Date-Modified = {2010-09-18 14:38:28 +0200}, Keywords = {spelling correction}, Organization = {American Association for Artificial Intelligence}, Pages = {45-50}, Title = {Scaling Up Context-Sensitive Text Correction.}, Year = {2001}} @inproceedings{Carlson+07, Author = {A. Carlson and I. Fette}, Booktitle = {Proceedings of the IEEE International Conference on Machine Learning and Applications (ICMLA)}, Date-Added = {2010-02-12 22:06:09 +0100}, Date-Modified = {2010-09-18 14:37:00 +0200}, Keywords = {spelling correction}, Pages = {166--171}, Title = {Memory-Based Context-Sensitive Spelling Correction at Web Scale}, Year = {2007}} @techreport{Carlson+99, Address = {Urbana, Illinois}, Author = {A. J. Carlson and C. M. Cumby and J. L. Rosen and D. Roth}, Date-Modified = {2011-01-29 16:40:31 +0100}, Institution = {Cognitive Computation Group, Computer Science Department, University of Illinois}, Keywords = {winnow, snow, machine learning}, Number = {UIUCDCS-R-99-2101}, Title = {{SNoW} User Guide}, Year = 1999} @article{Carmesin94, Author = {H. O. Carmesin}, Journal = {Acta Physica Slovaca}, Pages = {311--330}, Title = {Statistical Neurodynamics: A model for universal properties of EEG -- data and perception}, Volume = 44, Year = 1994} @book{Carpenter+92, Address = {Cambridge, MA}, Author = {G. Carpenter and S. Grossberg}, Publisher = {{Bradford Books}}, Title = {{Pattern Recognition by Self-Organizing Neural Networks}}, Year = 1992} @incollection{Carpenter66, Address = {{London}}, Author = {Carpenter, E.}, Booktitle = {The isolability of syntactic processing}, Editor = {G. Kepes}, Publisher = {Studio Vista}, Title = {Image making in arctic art}, Year = 1966} @inproceedings{Carpuat+07, Author = {M. Carpuat and D. Wu}, Booktitle = {Proceedings of the 2007 Joint Conference on Empirical Methods in Natural Language Processing and Computational Natural Language Learning (EMNLP-CoNLL)}, Date-Modified = {2009-09-06 20:37:29 +0200}, Keywords = {wsd, word sense disambiguation, mt, machine translation}, Pages = {61--72}, Title = {Improving Statistical Machine Translation Using Word Sense Disambiguation}, Url = {http://www.aclweb.org/anthology/D/D07/D07-1007}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/D/D07/D07-1007}} @inproceedings{Carreras+02, Author = {X. Carreras and L. M\`arquez and V. Punyakanok and D. Roth}, Booktitle = ECML, Date-Modified = {2008-07-23 17:04:00 +0200}, Keywords = {clause identification}, Pages = {35--47}, Title = {Learning and Inference for Clause Identification}, Year = 2002} @inproceedings{Carreras+02b, Author = {X. Carreras and L. M{\`a}rques and L. Padr{\'o}}, Booktitle = {Proceedings of CoNLL-2002}, Date-Modified = {2010-09-14 12:53:08 +0200}, Editors = {D. Roth and A. {Van den Bosch}}, Keywords = {named-entity recognition, adaboost}, Pages = {167--170}, Publisher = {Taipei, Taiwan}, Title = {Named Entity Extraction using AdaBoost}, Year = 2002} @inproceedings{Carreras+03, Author = {X. Carreras and L. M{\`a}rquez and L. Padr{\'o}}, Booktitle = {Proceedings of the seventh Conference on Natural Language Learning at HLT-NAACL 2003}, Date-Added = {2009-11-15 00:12:28 +0100}, Date-Modified = {2010-09-14 22:13:53 +0200}, Editor = {W. Daelemans and M. Osborne}, Keywords = {named-entity recognition}, Pages = {152--155}, Title = {A Simple Named Entity Extractor using AdaBoost}, Year = {2003}} @inproceedings{Carreras+04, Author = {X. Carreras and L. M{\`a}rquez}, Booktitle = {Proceedings of CoNLL-2004}, Date-Modified = {2011-06-21 18:11:11 +0200}, Editors = {Hwee Tou Ng and Ellen Riloff}, Publisher = {Boston, MA}, Title = {Introduction to the CoNLL-2004 Shared Task: Semantic Role Labe ling}, Year = 2004} @inproceedings{Carreras+05, Address = {Ann Arbor, Michigan}, Author = {X. Carreras and L. M{\`a}rquez}, Booktitle = {Proceedings of the Ninth Conference on Computational Natural Language Learning (CoNLL-2005)}, Date-Modified = {2011-07-02 22:18:58 +0200}, Pages = {152--164}, Publisher = {Association for Computational Linguistics}, Title = {Introduction to the {CoNLL}-2005 Shared Task: Semantic Role Labeling}, Year = 2005} @inproceedings{Carreras07, Author = {Carreras, X.}, Booktitle = {Proceedings of the CoNLL Shared Task Session of EMNLP-CoNLL 2007}, Date-Added = {2009-11-15 00:12:28 +0100}, Date-Modified = {2009-11-15 00:12:53 +0100}, Pages = {957--961}, Title = {Experiments with a Higher-Order Projective Dependency Parser}, Year = {2007}} @unpublished{Carroll+96, Author = {Glenn Carroll and Mats Rooth}, Month = {August}, Note = {available from: {\tt\small http://xxx.lanl.gov/cmp-lg/}}, Title = {Valence Induction with a Head-Lexicalized {PCFG}}, Year = 1996} @inproceedings{Carroll+98, Author = {J. Carroll and G. Minnen and T. Briscoe}, Booktitle = WVLC98, Date-Modified = {2012-01-15 20:23:45 +0000}, Title = {Can Subcategorisation Probabilities Help a Statistical Parser?}, Year = 1998} @book{Carroll65, Author = {L. Carroll}, Publisher = {Project Gutenberg}, Title = {Alice's Adventures in Wonderland}, Year = 1865} @inproceedings{Caruana+04, Author = {R. Caruana and A. Niculescu-Mizil}, Booktitle = {Proceedings of the Tenth ACM SIGKDD Conference}, Publisher = {ACM}, Title = {Data mining in metric space: An empirical analysis of supervised learning performance criteria}, Year = 2004} @inproceedings{Caruana+94, Address = {New Brunswick, NJ, USA}, Author = {Caruana, R. and Freitag, D.}, Booktitle = {Proceedings of the Eleventh International Conference on Machine Learning}, Date-Modified = {2008-07-23 17:04:24 +0200}, Keywords = {feature selection}, Pages = {28--36}, Publisher = {Morgan Kaufman}, Title = {Greedy Attribute Selection}, Year = 1994} @article{Castellanos+98, Author = {Antonio Castellanos}, Journal = {Computer Speech and Language}, Title = {Language Understanding and Subsequential Transducer Learning}, Year = 1998} @article{Cayley89, Author = {Cayley, A.}, Date-Added = {2009-11-15 00:13:03 +0100}, Date-Modified = {2009-11-15 00:13:17 +0100}, Journal = {Quarterly Journal of Pure and Applied Mathematics}, Pages = {376--378}, Title = {A theorem on trees}, Volume = {23}, Year = {1889}} @inproceedings{Chan+07, Author = {Chan, Yee Seng and Ng, Hwee Tou and Chiang, David}, Booktitle = {Proceedings of the 45th Annual Meeting of the Association of Computational Linguistics}, Date-Added = {2009-11-15 00:13:27 +0100}, Date-Modified = {2009-11-15 00:13:40 +0100}, Pages = {33--40}, Title = {Word Sense Disambiguation Improves Statistical Machine Translation}, Year = {2007}} @inproceedings{Chan+95, Author = {P. K. Chan and S. J. Stolfo}, Booktitle = {Proceedings of the Twelfth International Conference on Machine Learning}, Date-Modified = {2008-07-23 17:04:42 +0200}, Keywords = {meta-learning, voting, ensembles}, Pages = {90--98}, Title = {A comparative evaluation of voting and meta-learning of partitioned data}, Year = 1995} @unpublished{Chan+97, Author = {P. K. Chan and S. J. Stolfo}, Note = {submitted}, Title = {Metrics for analysing the integration of multiple learned classifiers}, Year = 1997} @article{Chan+99, Author = {Chan, P.K. and Stolfo, S.J. and Wolpert, D.}, Journal = {{Machine Learning}}, Number = {1--2}, Pages = {{5--7}}, Publisher = {{Kluwer Academic}}, Title = {{Guest Editors' Introduction. Special Issue on Integrating Multiple Learned Models for Improving and Scaling Machine Learning Algorithms}}, Volume = 36, Year = 1999} @article{Chandler92, Author = {Chandler, S.}, Journal = {Journal of Psycholinguistic research}, Number = 6, Pages = {593--606}, Title = {Are rules and modules really necessary for explaining language?}, Volume = 22, Year = 1992} @misc{Chang+05, Author = {C.C. Chang and C.J. Lin}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2011-01-29 16:40:46 +0100}, Howpublished = {URL:http:// www.csie.ntu.edu.tw/ $\sim$cjlin/ papers/ libsvm.pdf}, Keywords = {svm, support vector machines}, Title = {{LIBSVM}: A Library for Support Vector Machines}, Year = {2005}} @techreport{Chapman05, Address = {Copenhagen, Denmark}, Author = {A. Chapman}, Institution = {Global Biodiversity Information Facility}, Number = {version 1.0}, Title = {Principles and methods of data cleaning: Primary species and species-occurrence data}, Year = {2005}} @book{Charivarius22, Address = {Haarlem}, Author = {{Charivarius (pseudonym of G. Nolst Trenit\'{e})}}, Publisher = {Tjeenk Willink}, Title = {Ruize-rijmen}, Year = 1922} @inproceedings{Charniak+03, Author = {Charniak, E. and Knight, K. and Yamada, K.}, Booktitle = {Proceedings of MT Summit IX}, Date-Added = {2009-11-15 00:13:51 +0100}, Date-Modified = {2009-11-15 00:14:01 +0100}, Pages = {40--46}, Title = {{Syntax-based language models for statistical machine translation}}, Year = {2003}} @inproceedings{Charniak+88, Author = {Charniak, E. and R. P. Goldman}, Booktitle = {Proceedings of ACL-88}, Date-Modified = {2009-12-26 21:10:28 +0100}, Pages = {87--94}, Title = {A Logic for semantic interpretation}, Year = 1988} @article{Charniak+96, Author = {E. Charniak and G. Carroll and J. Adcock and A. Cassandra and Y. Gotoh and J. Catz and M. Littman and J. McCann}, Date-Modified = {2010-06-25 21:31:14 +0200}, Journal = {Artificial Intelligence}, Pages = {45--57}, Title = {Taggers for Parsers}, Volume = 85, Year = 1996} @book{Charniak93, Address = {Cambridge, MA}, Author = {E. Charniak}, Publisher = MIT, Title = {{Statistical Language Learning}}, Year = 1993} @inproceedings{Charniak97, Author = {Charniak, E.}, Booktitle = AAAI, Title = {Statistical parsing with a context-free grammar and word statistics}, Year = 1997} @article{Charniak97a, Author = {Charniak, E.}, Journal = {The AI Magazine}, Title = {Statistical techniques for natural language parsing}, Year = 1997} @inproceedings{Cheeseman+88, Address = {Ann Arbor, MI}, Author = {P. Cheeseman and J. Kelly and M. Self and J. Stutz and W. Taylor and D. Freedman}, Booktitle = {Proceedings of the Fifth International Machine Learning Conference}, Date-Modified = {2009-11-14 17:40:16 +0100}, Pages = {54--64}, Publisher = {Morgan Kaufmann}, Title = {{AUTOCLASS}: a {B}ayesian classification system}, Year = 1988} @inproceedings{Chelba97, Address = {Morristown, NJ, USA}, Author = {Chelba, Ciprian}, Booktitle = {Proceedings of the eighth conference on European chapter of the Association for Computational Linguistics}, Citeulike-Article-Id = {3836927}, Doi = {10.3115/979617.979681}, Keywords = {lm, structured}, Location = {Madrid, Spain}, Pages = {498--500}, Posted-At = {2008-12-30 21:54:38}, Priority = {0}, Publisher = {Association for Computational Linguistics}, Title = {A structured language model}, Url = {http://dx.doi.org/10.3115/979617.979681}, Year = {1997}, Bdsk-Url-1 = {http://dx.doi.org/10.3115/979617.979681}} @inproceedings{Chelba+98, Author = {C. Chelba and F. Jelinek}, Booktitle = COLING/ACL98, Title = {Exploiting Syntactic Structure for Language Modeling}, Year = 1998} @inproceedings{Chen+03, Address = {Sapporo, Japan}, Author = {J. Chen and O. Rambow}, Booktitle = {Proceedings of the 2003 Conference on Empirical Methods in Natural Language Processing}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2012-01-15 20:24:49 +0000}, Title = {Use of Deep Linguistic Features for the Recognition and Labeling of Semantic Arguments}, Year = {2003}} @inproceedings{Chen+96, Author = {S.F. Chen and J. Goodman}, Booktitle = {Proceedings of the 34th Annual Meeting of the ACL}, Location = {Santa Cruz, CA}, Month = {June}, Organization = {ACL}, Pages = {310--318}, Title = {An Empirical Study of Smoothing Techniques for Language Modelling}, Year = 1996} @techreport{Chen+98, Author = {S. Chen and J. Goodman}, Institution = {Harvard University}, Number = {TR-10-98}, Title = {An empirical study of smoothing techniques for language modeling}, Year = 1998} @inproceedings{Chen+98b, Author = {S. F. Chen and D. Beeferman and R. Rosenfeld}, Booktitle = {DARPA Broadcast News Transcription and Understanding Workshop}, Title = {Evaluation Metrics for Language Models}, Year = 1998} @inproceedings{Chen+99, Author = {Chen, J. and Bangalore, S. and Vijay-Shanker, K.}, Booktitle = {{Proceedings of the 1999 Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora}}, Pages = {188--195}, Title = {{New Models for Improving Supertag Disambiguation}}, Year = 1999} @inproceedings{Chen+99b, Address = {Phoenix, Arizona}, Author = {S. F. Chen and R. Rosenfeld}, Booktitle = {ICASSP-99}, Title = {Efficient Sampling and Feature Selection in Whole Sentence Maximum Entropy Language Models}, Year = 1999} @inproceedings{Chen93, Author = {Stanley Chen}, Booktitle = ACL, Title = {Aligning sentences in bilingual corpora using lexical information}, Year = 1993} @inproceedings{Cherkauer96, Author = {K.J. Cherkauer}, Booktitle = {Working Notes of the AAAI Workshop on Integrating Multiple Learned Models}, Editor = {P. Chan}, Pages = {{15--21}}, Title = {{Human Expert--level Performance on a Scientific Image Analysis Task by a System Using Combined Artificial Neural Networks}}, Year = 1996} @incollection{Cherry90, Author = {L. L. Cherry}, Booktitle = {Unix Research System Papers}, Edition = 10, Pages = {609--610}, Publisher = {AT\&T}, Title = {Index}, Volume = 2, Year = 1990} @inproceedings{Chieu+03, Author = {Chieu, H.L. and Ng, H.T.}, Booktitle = {Proceedings of the seventh Conference on Natural Language Learning at HLT-NAACL 2003}, Date-Added = {2009-11-15 00:14:45 +0100}, Date-Modified = {2010-09-14 22:14:01 +0200}, Editor = {W. Daelemans and M. Osborne}, Keywords = {named-entity recognition}, Pages = {160--163}, Title = {Named Entity Recognition with a Maximum Entropy Approach}, Year = {2003}} @inproceedings{Chinchor95, Author = {N.A. Chinchor}, Booktitle = {Proceedings of the Sixth Message Understanding Conference (MUC-6)}, Date-Added = {2009-11-15 00:15:01 +0100}, Date-Modified = {2009-11-15 00:15:09 +0100}, Pages = {317--332}, Title = {Named Entity Task Definition}, Year = {1995}} @inproceedings{Chodorow+85, Author = {M. S. Chodorow and R. J. Byrd and G. E. Heidron}, Booktitle = ACL, Pages = {299--304}, Title = {Extracting Semantic Hierarchies from a Large On-Line Dictionary}, Year = 1985} @inproceedings{Chodorow+00, Author = {M. Chodorow and C. Leacock}, Booktitle = {Proceedings of NAACL`00}, Date-Modified = {2011-06-19 16:12:24 +0200}, Pages = {140--147}, Title = {An Unsupervised Method for Detecting Grammatical Errors}, Year = {2000}} @inproceedings{Chodorow+07, Address = {Prague, Czech Republic}, Author = {M. Chodorow and J. Tetreault and N.-R. Han}, Booktitle = {Proceedings of the Fourth ACL-SIGSEM Workshop on Prepositions}, Date-Modified = {2010-06-25 22:08:37 +0200}, Month = {June}, Pages = {25--30}, Publisher = {Association for Computational Linguistics}, Title = {Detection of Grammatical Errors Involving Prepositions}, Url = {http://www.aclweb.org/anthology/W/W07/W07-1604}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W/W07/W07-1604}} @book{Chomsky+68, Address = {New York, NY}, Author = {Noam Chomsky and Morris Halle}, Publisher = {Harper \& Row}, Title = {The Sound Pattern of {E}nglish}, Year = 1968} @article{Chomsky56, Author = {Chomsky, N.}, Date-Added = {2009-11-15 00:15:17 +0100}, Date-Modified = {2009-11-15 00:15:30 +0100}, Journal = {IEEE Transactions on Information Theory}, Number = {3}, Pages = {113--124}, Title = {Three models for the description of language}, Volume = {2}, Year = {1956}} @book{Chomsky57, Address = {Den Haag}, Author = {N. Chomsky}, Publisher = {Mouton}, Title = {Syntactic structures}, Year = 1957} @book{Chomsky57b, Address = {{The Hague Holland}}, Author = {N. Chomsky}, Publisher = {{Mouton}}, Title = {{The structure of language}}, Year = 1957} @book{Chomsky65, Address = {Cambridge, MA}, Author = {N. Chomsky}, Publisher = MIT, Title = {Aspects of the theory of syntax}, Year = 1965} @book{Chomsky75, Address = {New York, NY}, Author = {N. Chomsky}, Publisher = {Pantheon Books}, Title = {Reflections on language}, Year = 1975} @book{Chomsky80, Address = {Oxford}, Author = {N. Chomsky}, Publisher = {Blackwell}, Title = {Rules and representations}, Year = 1980} @book{Chomsky81, Address = {Dordrecht}, Author = {Noam Chomsky}, Date-Added = {2010-02-01 23:22:50 +0100}, Date-Modified = {2010-02-01 23:22:50 +0100}, Publisher = {Foris}, Title = {Lectures on Government and Binding}, Year = {1981}} @book{Chomsky95, Address = {Cambridge, MA}, Author = {N. Chomsky}, Publisher = MIT, Title = {The minimalist program}, Year = 1995} @article{Choueka+85, Author = {Y. Choueka and S. Lusignan}, Journal = {Computers and the Humanities}, Title = {Disambiguation by Short Contexts}, Volume = 19, Year = 1985} @inproceedings{Christ94, Author = {Oliver Christ}, Booktitle = {Proceedings of COMPLEX, Conference on Computational Lexicography and Text Research}, Pages = {23--32}, Title = {A modular and flexible architecture for an integrated corpus query system}, Year = 1994} @book{Christianini+00, Author = {N. Christianini and J. Shawe-Taylor}, Publisher = CUP, Title = {An introduction to support vector machines (and othre kernel-based learning methods)}, Year = 2000} @article{Chu+65, Author = {Chu, Y. and Liu, T.H.}, Date-Added = {2009-11-15 00:15:39 +0100}, Date-Modified = {2009-11-15 00:15:52 +0100}, Journal = {Science Sinica}, Pages = {1396--1400}, Title = {On the shortest arborescence of a directed graph}, Volume = {14}, Year = {1965}} @inproceedings{Church+89, Author = {K.W. Church and W.A. Gale and P. Hanks and D. Hindle}, Booktitle = IPW, Title = {Parsing, word association and typical predicate argument structure}, Year = 1989} @article{Church+90, Author = {K.W. Church and P. Hanks}, Journal = CL, Number = 1, Pages = {22--29}, Title = {Word association norms, mutual information, and Lexicography}, Volume = 16, Year = 1990} @article{Church+91, Author = {K.W. Church and W.A. Gale}, Journal = {Computer Speech and Language}, Number = 5, Pages = {19--54}, Title = {A comparison of the enhanced {Good-Turing} and deleted estimation methods for estimating probabilities of {English} bigrams}, Volume = 19, Year = 1991} @incollection{Church+91b, Address = {Hillsdale, NJ}, Author = {K.W. Church and W.A. Gale and P.Hanks and D.M. Hindle}, Booktitle = {Lexical Acquisition: Exploiting On-Line Resources to Build a Lexicon}, Editor = {U. Zernik}, Publisher = {Lawrence Erlbaum Associates}, Title = {Using statistics in lexical analysis}, Year = 1991} @article{Church+91c, Author = {Kenneth W. Church and William A. Gale}, Journal = {Statistics and Computing}, Month = {December}, Number = {2}, Pages = {93--103}, Publisher = {Springer Netherlands}, Title = {Probability scoring for spelling correction}, Volume = {1}, Year = {1991}} @article{Church+93, Author = {K.W. Church and R.L. Mercer}, Journal = CL, Pages = {1--24}, Title = {{Introduction to the Special Issue on Computational Linguistics Using Large Corpora}}, Volume = 19, Year = 1993} @inproceedings{Church+93b, Author = {K.W. Church and I. Dagan and W.A. Gale and P. Fung and J. Helfman and B. Satish}, Booktitle = {Proceedings of the Pacific Asia Conference on Formal and Computational Linguistics}, Title = {Aligning Parallel Texts: Do Methods Developed for {E}nglish-{F}rench Generalize to Asian Languages?}, Year = 1993} @inproceedings{Church+95, Author = {K.W. Church and W.A. Gale}, Booktitle = {Proceedings of Third the Workshop on Very Large Corpora}, Pages = {121--130}, Title = {Inverse document frequency (IDF): a measure of deviations from {Poisson}}, Year = 1995} @inproceedings{Church88, Author = {K.W. Church}, Booktitle = {Proceedings of Second Applied NLP (ACL)}, Title = {A Stochastic Parts Program and Noun Phrase Parser for Unrestricted Text}, Year = 1988} @incollection{Church92, Address = {University of Michigan, Ann Arbor}, Author = {K.W. Church}, Booktitle = {For Henry Kucera}, Editor = {A.W. Mackie and T.K. McAuley and C. Simmons}, Publisher = {Michigan Slavic Publications}, Title = {Current practice in part of speech tagging and suggestions for the future}, Year = 1992} @inproceedings{Church93, Author = {K.W. Church}, Booktitle = ACL, Title = {Char\_align: A program for aligning parallel texts at the character level}, Year = 1993} @inbook{Civit+06, Address = {Berlin}, Author = {M. Civit and {M.A.} Mart\'i and N. Buf\'i}, Chapter = {{Cat3LB} and {Cast3LB}: from constituents to dependencies}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 21:03:10 +0100}, Pages = {141--153}, Publisher = {Springer Verlag}, Title = {Advances in Natural Language Processing ({LNAI}, 4139)}, Year = {2006}} @techreport{Civit02, Author = {M. Civit}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 20:57:57 +0100}, Institution = {CliC--UB}, Title = {Gu\'ia para la anotaci\'on morfol\'ogica del corpus {CLiC-TALP} (versi\'on 3)}, Type = {{X-TRACT-II WP-00-06}}, Year = {2002}} @techreport{Civit03, Author = {M. Civit}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 20:57:57 +0100}, Institution = {CliC--UB}, Title = {Gu\'ia para la anotaci\'on sint\'actica de {Cast3LB}: un corpus del espa\~nol con anotaci\'on sint\'actica, sem\'antica y pragm\'atica}, Type = {{X-TRACT-II WP-03-06 y 3LB-WP-02-01}}, Year = {2003}} @article{Clahsen99, Author = {H. Clahsen}, Journal = {Behavioral and Brain Sciences}, Pages = {991--1060}, Title = {Lexical entries and rules of language: A multidisciplinary study of {G}erman inflection}, Volume = 22, Year = 1999} @book{Clark+77, Address = {San Diego}, Author = {Herbert H. Clark and Eve V. Clark}, Date-Added = {2010-02-01 23:23:09 +0100}, Date-Modified = {2010-02-01 23:23:18 +0100}, Publisher = {Harcourt Brace Jovanovich}, Title = {Psychology and Language}, Year = {1977}} @article{Clark+89, Author = {P. Clark and T. Niblett}, Date-Modified = {2011-01-29 16:41:21 +0100}, Journal = {Machine Learning}, Pages = {261--284}, Title = {The {CN2} rule induction algorithm}, Volume = 3, Year = 1989} @inproceedings{Clark+91, Author = {P. Clark and R. Boswell}, Booktitle = {Proceedings of the Sixth European Working Session on Learning}, Pages = {151--163}, Publisher = {Berlin: Springer Verlag}, Title = {Rule induction with {CN2}: Some recent improvements}, Year = 1991} @inproceedings{Clark02, Address = {New Brunswick, NJ}, Author = {A. Clark}, Booktitle = {Proceedings of the 40th Meeting of the Association for Computational Linguistics}, Pages = {513--520}, Publisher = {ACL}, Title = {Memory-Based Learning of Morphology with Stochastic Transducers}, Year = 2002} @book{Clark93, Author = {E. Clark}, Publisher = CUP, Title = {The Lexicon in Acquisition}, Year = 1993} @inproceedings{Clarke+01, Author = {C.L.A. Clarke and G.V. Cormack and D.I.E. Kisman and T.R. Lynam}, Booktitle = {Proceedings of TREC-9}, Title = {Question Answering by Passage Selection (MlutiText Experiments for TREC-9)}, Year = 2001} @inproceedings{Clarke+06, Address = {Morristown, NJ, USA}, Author = {James Clarke and Mirella Lapata}, Booktitle = {Proceedings of the 21st International Conference on Computational Linguistics and the 44th annual meeting of the Association for Computational Linguistics}, Date-Added = {2009-11-15 14:31:56 +0100}, Date-Modified = {2009-11-15 14:32:08 +0100}, Pages = {377--384}, Title = {Models for sentence compression: a comparison across domains, training requirements and evaluation measures}, Year = {2006}} @article{Clarke+08, Author = {James Clarke and Mirella Lapata}, Date-Added = {2009-11-15 14:31:56 +0100}, Date-Modified = {2009-11-15 14:32:17 +0100}, Journal = {Journal of Artificial Intelligence Research}, Pages = {399--429}, Title = {Global Inference for Sentence Compression An Integer Linear Programming Approach}, Volume = {31}, Year = {2008}} @article{Clco95, Author = {C. L. Clarke and G. V. Cormack and and F. J. Burkowski}, Journal = {The Computer Journal}, Pages = {43--56}, Title = {An Algebra for Structured Text Search and A Framework for its Implementation}, Volume = {38(1)}, Year = 1995} @incollection{Clements+95, Address = {Cambridge, MA}, Author = {G. N. Clements and E. V. Hume}, Booktitle = {The handbook of phonological theory}, Editor = {J. A. Goldsmith}, Pages = {245--306}, Publisher = {Blackwell}, Title = {The internal organization of speech sounds}, Year = 1995} @inproceedings{Coccaro+98, Address = {Sydney}, Author = {N. Coccaro and D. Jurafsky}, Booktitle = {ICSLP-98}, Title = {Towards better integration os semantic predictors in statistical language modeling}, Year = 1998} @inproceedings{Cohen+05, Author = {W.W. Cohen and V.R. Carvalho}, Booktitle = {Proceedings of the 19th International Joint Conference on Artificial Intelligence (IJCAI)}, Date-Added = {2009-11-15 00:16:03 +0100}, Date-Modified = {2009-11-15 00:16:20 +0100}, Pages = {671--676}, Title = {Stacked Sequential Learning}, Year = {2005}} @inproceedings{Cohen+96, Author = {W. Cohen and Y. Singer}, Booktitle = {Proceedings of the 19th Annual Int. ACM Conference on Research and Development in Information Retrieval}, Page = {307--315}, Title = {Context-sensitive learning methods for text categorization}, Year = 1996} @inproceedings{Cohen95, Author = {W. Cohen}, Booktitle = {Proceedings of the 12th International Conference on Machine Learning}, Location = {San Mateo, CA}, Pages = {115--123}, Publisher = {Morgan Kaufmann}, Title = {Fast effective rule induction}, Year = 1995} @article{Cohn+09, Author = {T. Cohn and M. Lapata}, Date-Added = {2009-11-15 14:32:22 +0100}, Date-Modified = {2012-11-21 08:14:04 +0000}, Journal = {Journal of Artificial Intelligence Research}, Number = {1}, Pages = {637--674}, Title = {Sentence compression as tree transduction}, Volume = {34}, Year = {2009}} @inproceedings{Coker+90, Address = {Autrans, France}, Author = {C. Coker and K. W. Church and M. Liberman}, Booktitle = {Proceedings of the First {{\sc esca}} Workshop on Speech Synthesis}, Editor = {G. Bailly and C. Beno{{\^{\i}}}t}, Organization = {European Speech Communication Association}, Pages = {83--86}, Title = {Morphology and rhyming: two powerful alternatives to letter-to-sound rules in speech synthesis}, Year = 1990} @inproceedings{Collins+02, Author = {Collins, M. and Duffy, N.}, Booktitle = {Advances in Neural Information Processing Systems}, Date-Added = {2009-11-15 00:16:34 +0100}, Date-Modified = {2009-11-15 00:16:45 +0100}, Editor = {T.G. Dietterich and S. Becker and Z. Ghahramani}, Pages = {625--632}, Title = {{Convolution kernels for natural language}}, Volume = {14}, Year = {2001}} @inproceedings{Collins+95, Address = {Cambridge}, Author = {Collins, M.J and J. Brooks}, Booktitle = {Proceedings of the Third Workshop on Very Large Corpora}, Title = {Prepositional Phrase Attachment through a Backed-Off Model}, Year = 1995} @inproceedings{Collins+99, Author = {M. Collins and Y. Singer}, Booktitle = {EMNLP-VLC'99, the Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora}, Month = Jun, Title = {Unsupervised models for name entity classification}, Year = 1999} @inproceedings{Collins02, Author = {Collins, M.}, Booktitle = {Proceedings of the ACL-02 conference on Empirical Methods in Natural Language Processing}, Date-Added = {2009-11-15 00:16:34 +0100}, Date-Modified = {2009-11-15 00:16:58 +0100}, Editor = {J. Hajic and Y. Matsumoto}, Pages = {1--8}, Title = {Discriminative training methods for hidden Markov models: theory and experiments with perceptron algorithms}, Year = {2002}} @inproceedings{Collins96, Author = {M.J. Collins}, Booktitle = ACL96, Date-Modified = {2012-01-15 20:24:05 +0000}, Month = {June}, Organization = {University of California, Santa Cruz, California, USA}, Title = {A New Statistical Parser Based on Bigram Lexical Dependencies}, Year = 1996} @inproceedings{Collins97, Author = {M. Collins}, Booktitle = EACL/ACL97, Title = {Three Generative, Lexicalised Models for Statistical Parsing}, Year = 1997} @phdthesis{Collins99, Author = {Michael Collins}, School = {University of Pennsylvania}, Title = {Head-Driven Statistical Models for natural Language Parsing}, Year = 1999} @incollection{Coltheart+77, Address = {Hillsdale, NJ}, Author = {M. Coltheart and E. Davelaar and T. Jonasson and D. Besner}, Booktitle = {Attention and Performance VI}, Editor = {S. Dornic}, Publisher = {Lawrence Erlbaum Associates}, Title = {Access to the internal lexicon}, Year = 1977} @article{Coltheart+93, Author = {M. Coltheart and B. Curtis and P. Atkins and M. Halter}, Journal = {Psychological Review}, Pages = {589--608}, Title = {Models of reading aloud: Dual-route and parallel-distributed-processing approaches}, Volume = 100, Year = 1993} @incollection{Coltheart78, Address = {London}, Author = {M. Coltheart}, Booktitle = {Strategies of Information Processing}, Editor = {G. Underwood}, Pages = {151--216}, Publisher = {Academic Press}, Title = {Lexical access in simple reading tasks}, Year = 1978} @article{Comeau+04, Author = {D. C. Comeau and W. J. Wilbur}, Date-Added = {2010-01-29 15:08:42 +0100}, Date-Modified = {2012-01-15 20:24:36 +0000}, Journal = {Journal of the {A}merican {S}ociety for {I}nformation {S}cience and {T}echnology}, Number = {2}, Pages = {169--177}, Title = {Non-word identification or spell checking without a dictionary}, Volume = {55}, Year = {2004}} @article{Comtat+95, Author = {Comtat, C. and C. Morel}, Date-Modified = {2008-07-23 16:01:30 +0200}, Journal = {{IEEE transactions on neural networks}}, Keywords = {PET, Kohonen maps, neurosciences, self-organisation}, Pages = {783--789}, Title = {{Approximate Reconstruction of PET Data with a Self-0rganizing Neural NEtwork}}, Volume = {6(3)}, Year = 1995} @article{Content+90, Author = {A. Content and P. Mousty and M. Radeau}, Journal = {L'Ann\'{e}e Psychologique}, Pages = {551--566}, Title = {Brulex: Une Base de Donn\'{e}es Lexicales Informatis\'{e}e pour le Fran\c{c}ais Ecrit et Parl\'{e}}, Volume = 90, Year = 1990} @techreport{Cooper+96, Author = {Robin Cooper and Dick Crouch and Jan {van Eijck} and Chris Fox and Josef {van Genabith} and Jan Jaspars and Hans Kamp and David Milward and Manfred Pinkal and Massimo Poesio and Steve Pulman}, Institution = {???}, Month = {January}, Note = {deliverable D16 -- final draft; with additional contributions from: Ted Briscoe, Holger Maier and Karsten Konrad}, Number = {LRE 62-051}, Title = {{FraCaS}: Using the Framework}, Year = 1996} @inproceedings{Cordeiro+09, Address = {Suntec, Singapore}, Author = {Cordeiro, Joao and Dias, Gael and Brazdil, Pavel}, Booktitle = {Proceedings of the 2009 Workshop on Language Generation and Summarisation (UCNLG+Sum 2009)}, Date-Added = {2009-11-15 14:32:57 +0100}, Date-Modified = {2009-11-15 14:33:09 +0100}, Month = {August}, Pages = {15--22}, Publisher = {Association for Computational Linguistics}, Title = {Unsupervised Induction of Sentence Compression Rules}, Year = {2009}} @inproceedings{Cortes+05, Author = {Cortes, C. and Mohri, M. and Weston, J.}, Booktitle = {Proceedings of the Twenty-Second International Conference on Machine Learning (ICML 2005)}, Date-Added = {2009-11-15 00:17:10 +0100}, Date-Modified = {2009-11-15 00:17:23 +0100}, Editor = {L. De Raedt and S. Wrobel}, Pages = {153--160}, Title = {A general regression technique for learning transductions}, Year = {2005}} @article{Cortes+95, Author = {C. Cortes and V. Vapnik}, Journal = {Machine Learning}, Pages = {273--297}, Title = {Support vector networks}, Volume = 20, Year = 1995} @article{Cossu+88, Author = {G. Cossu and D. Shankweiler and I. Y. Liberman and L. Katz and G. Tola}, Journal = {Applied Psycholinguistics}, Pages = {1--16}, Title = {Awareness of phonological segments and reading ability in Italian children}, Volume = 9, Year = 1988} @article{Cost+93, Author = {S. Cost and S. Salzberg}, Journal = {{Machine Learning}}, Pages = {57--78}, Title = {{A weighted nearest neighbour algorithm for learning with symbolic features}}, Volume = 10, Year = 1993} @book{Coulmas89, Author = {F. Coulmas}, Publisher = {Oxford, UK: Blackwell Publishers}, Title = {The writing systems of the world}, Year = 1989} @article{Cover+67, Author = {T. M. Cover and P. E. Hart}, Journal = {Institute of Electrical and Electronics Engineers Transactions on Information Theory}, Pages = {21--27}, Title = {Nearest neighbor pattern classification}, Volume = 13, Year = 1967} @book{Cover+91, Address = {New York}, Author = {Thomas M. Cover and Joy A. Thomas}, Date-Added = {2010-02-01 23:23:30 +0100}, Date-Modified = {2010-02-01 23:23:36 +0100}, Publisher = {Wiley}, Title = {Elements of information theory}, Year = {1991}} @inproceedings{Covington01, Author = {Covington, M.A.}, Booktitle = {Proceedings of the 39th Annual ACM Southeast Conference}, Date-Added = {2009-11-15 00:17:32 +0100}, Date-Modified = {2009-11-15 00:17:45 +0100}, Pages = {95--102}, Title = {A fundamental algorithm for dependency parsing}, Year = {2001}} @article{Crammer+01, Author = {Crammer, K. and Singer, Y.}, Date-Added = {2009-11-15 00:18:08 +0100}, Date-Modified = {2009-11-15 00:18:21 +0100}, Journal = {Journal of Machine Learning Research}, Pages = {265--292}, Title = {On the Algorithmic Implementation of Multiclass Kernel-based Vector Machines}, Volume = {2}, Year = {2001}} @article{Crammer+02, Author = {Crammer, K. and Singer, Y.}, Journal = {Machine Learning}, Number = 2, Pages = {201--233}, Publisher = {Springer}, Title = {On the Learnability and Design of Output Codes for Multiclass Problems}, Volume = 47, Year = 2002} @book{Cristiani+00, Address = {Cambridge, UK}, Author = {Cristiani, N. and Shawe-Taylor, J.}, Publisher = {Cambridge University Press}, Title = {An introduction to support vector machines}, Year = 2000} @book{Croft+03, Address = {Cambridge}, Author = {W. Croft and A. Cruse}, Date-Modified = {2010-02-14 19:59:55 +0100}, Publisher = CUP, Series = {Cambridge Textbooks in Linguistics}, Title = {Cognitive Linguistics}, Year = 2003} @book{Croft90, Address = {Cambridge}, Author = {W. Croft}, Date-Added = {2010-02-01 23:23:43 +0100}, Date-Modified = {2010-02-14 23:07:51 +0100}, Publisher = {Cambridge University Press}, Title = {Typology and Universals}, Year = {1990}} @book{Crystal69, Address = {Cambridge}, Author = {David Crystal}, Date-Added = {2010-02-01 23:23:55 +0100}, Date-Modified = {2010-02-01 23:23:55 +0100}, Publisher = {Cambridge University Press}, Title = {Prosodic systems and intonation in {E}nglish}, Year = {1969}} @book{Crystal85, Address = {Oxford}, Author = {David Crystal}, Date-Added = {2010-02-01 23:23:55 +0100}, Date-Modified = {2010-02-01 23:23:55 +0100}, Publisher = {Basil Blackwell}, Title = {A dictionary of {L}inguistics and {P}honetics}, Year = {1985}} @inproceedings{Cucerzan+04, Address = {Barcelona, Spain}, Author = {S. Cucerzan and E. Brill}, Booktitle = {Proceedings of EMNLP 2004}, Date-Added = {2010-01-29 15:08:58 +0100}, Date-Modified = {2010-02-14 19:58:26 +0100}, Editor = {D. Lin and D. Wu}, Month = {July}, Pages = {293--300}, Publisher = {Association for Computational Linguistics}, Title = {Spelling Correction as an Iterative Process that Exploits the Collective Knowledge of Web Users}, Year = 2004} @article{Cuenod+95, Author = {C. Cuenod and S. Bookheimer and L. Hertz-Pannier and T. Zeffiro and W. Theodore and D. Le bihan}, Date-Modified = {2008-07-23 16:57:06 +0200}, Journal = {{Neurology}}, Keywords = {fMRI, neurosciences, human language processing}, Pages = {1821--}, Title = {{Functional MRI during word generation, using conventional equipment: A potential tool for \dots}}, Volume = 45, Year = 1995} @article{Cutler+86, Author = {A. Cutler and J. Mehler and D. Norris and J. Segui}, Journal = {Journal of Memory and Language}, Pages = {385--400}, Title = {The syllable's differing role in segmentation of {F}rench and {E}nglish}, Volume = 25, Year = 1986} @article{Cutler+88, Author = {A. Cutler and D. Norris}, Journal = {Journal of Experimental Psychology: Human Perception and Performance}, Pages = {113--121}, Title = {The role of strong syllables in segmentation for lexical access}, Volume = 14, Year = 1988} @inproceedings{Cutting+92, Address = {Trento, Italy}, Author = {D. Cutting and J. Kupiec and J. Pedersen and P. Sibun}, Booktitle = {Proceedings Third ACL Applied NLP}, Date-Modified = {2009-11-14 17:36:32 +0100}, Keywords = {part-of-speech tagging}, Pages = {133--140}, Title = {A practical {Part-of-Speech} tagger}, Year = 1992} @inproceedings{Cutting+93, Author = {C. Cutting and D. Karger and J. Pedersen}, Booktitle = {Proceedings of ACM-SIGIR Conference on Information Retrieval}, Title = {Constant interaction-time Scatter/Gather browsing of very large document collections}, Year = 1993} @inproceedings{Daciuk+02, Address = {London, UK}, Author = {J. Daciuk and G. {Van Noord}}, Booktitle = {CIAA '01: Revised Papers from the 6th International Conference on Implementation and Application of Automata}, Date-Modified = {2010-09-20 00:14:17 +0200}, Isbn = {3-540-00400-9}, Pages = {65--73}, Publisher = {Springer-Verlag}, Title = {Finite Automata for Compact Representation of Language Models in {NLP}}, Year = {2002}} @techreport{Daelemans+00, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:21:45 +0100}, Institution = {ILK, Tilburg University}, Keywords = {ilk, TiMBL, memory-based learning}, Number = {ILK-0001}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 3.0, Reference Manual}, Year = 2000} @incollection{Daelemans+01, Address = {Dordrecht}, Author = {W. Daelemans and A. {Van den Bosch}}, Booktitle = {Data-driven techniques in speech synthesis}, Date-Modified = {2011-01-29 16:41:59 +0100}, Editor = {R. Damper}, Keywords = {ilk, grapheme-phoneme conversion, speech synthesis}, Pages = {149--172}, Publisher = {Kluwer Academic Publishers}, Title = {{TreeTalk}: Memory-Based Word Phonemisation}, Year = 2001} @techreport{Daelemans+01a, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:21:04 +0100}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, TiMBL, memory-based learning}, Number = {ILK Technical Report 01-04}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 4.0, Reference Guide}, Year = 2001} @inproceedings{Daelemans+01b, Author = {W. Daelemans and V. Hoste}, Booktitle = {Proceedings of the Third International Conference on Language Resources and Evaluation}, Date-Modified = {2010-01-02 21:20:52 +0100}, Keywords = {ilk}, Pages = {to appear}, Title = {Evaluation of Machine Learning Methods for Natural Language Processing Tasks}, Year = 2002} @techreport{Daelemans+02, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:20:45 +0100}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, TiMBL, memory-based learning}, Number = {ILK 02-10}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 4.3, Reference Guide}, Year = 2002} @inproceedings{Daelemans+02b, Address = {Las Palmas, Gran Canaria}, Author = {W. Daelemans and V. Hoste}, Booktitle = {Proceedings of the Third International Conference on Language Resources and Evaluation}, Date-Modified = {2010-01-02 21:20:33 +0100}, Keywords = {ilk}, Pages = {755--760}, Title = {Evaluation of Machine Learning Methods for Natural Language Processing Tasks}, Year = 2002} @techreport{Daelemans+03, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:20:21 +0100}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, TiMBL, memory-based learning}, Number = {ILK 03-10}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 5.0, Reference Guide}, Year = 2003} @inproceedings{Daelemans+03b, Archive = {Iris, web}, Author = {W. Daelemans and V. Hoste and F. {De Meulder} and B. Naudts}, Booktitle = {Proceedings of the 14th European Conference on Machine Learning ( ECML-2003)}, Date-Modified = {2010-01-02 21:20:15 +0100}, Keywords = {ilk, feature selection}, Pages = {84-95}, Source = {Iris}, Title = {Combined optimization of feature selection and algorithm param eter interaction in machine learning of language}, Year = 2003, Bdsk-Url-1 = {http://www.cnts.ua.ac.be/~walter/papers/2003/dhdn03.pdf}} @techreport{Daelemans+03c, Author = {W. Daelemans and J. Zavrel and A. {Van den Bosch} and K. {Van der Sloot}}, Date-Modified = {2010-01-02 21:20:02 +0100}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, MBT, part-of-speech tagging, memory-based learning}, Number = {ILK 03-13}, Title = {{MBT}: Memory Based Tagger, version 2.0, Reference Guide}, Year = 2003} @techreport{Daelemans+03d, Author = {W. Daelemans and J. Zavrel and A. {Van den Bosch} and K. {Van der Sloot}}, Date-Modified = {2011-01-29 16:42:13 +0100}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, MBT, memory-based learning, part-of-speech tagging}, Number = {ILK 03-13}, Title = {{MBT}: Memory Based Tagger, version 2.0, Reference Guide}, Year = 2003} @techreport{Daelemans+04, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:19:51 +0100}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, TiMBL, memory-based learning}, Number = {ILK 04-02}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 5.1.0, Reference Guide}, Year = 2004} @inproceedings{Daelemans+04b, Author = {W. Daelemans and A. H\"othker and E. {Tjong Kim Sang}}, Booktitle = {Proceedings of the 4th International Conference on Language Resources and Evaluation}, Date-Added = {2009-11-15 14:33:14 +0100}, Date-Modified = {2010-09-14 22:11:13 +0200}, Keywords = {clips, subtitling}, Pages = {1045--1048}, Title = {Automatic Sentence Simplification for Subtitling in {Dutch} and {English}}, Year = {2004}} @book{Daelemans+05, Address = {Cambridge, UK}, Author = {W. Daelemans and A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:19:23 +0100}, Keywords = {ilk, memory-based learning, memory-based language processing}, Publisher = {Cambridge University Press}, Title = {Memory-based language processing}, Year = 2005} @techreport{Daelemans+10, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Modified = {2010-03-29 10:26:50 +0200}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, TiMBL, memory-based learning}, Number = {ILK 10-01}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 6.3, Reference Guide}, Year = 2010} @techreport{Daelemans+07b, Author = {W. Daelemans and J. Zavrel and A. {Van den Bosch} and K. {Van der Sloot}}, Date-Modified = {2010-09-18 14:40:48 +0200}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, memory-based learning, POS tagging, memory-based tagging, part-of-speech tagging}, Number = {ILK 07-04}, Title = {{MBT}: Memory Based Tagger, version 3.0, Reference Guide}, Year = 2007} @incollection{Daelemans+07c, Address = {Ghent, Belgium}, Author = {W. Daelemans and A. {Van den Bosch}}, Booktitle = {Tussen taal, spelling en onderwijs: Essays bij het emeritaat van Frans Daems.}, Date-Added = {2010-01-03 01:25:23 +0100}, Date-Modified = {2010-01-03 01:26:59 +0100}, Editor = {D. Sandra and R. Rymenans and P. Cuvelier and P. {Van Petegem}}, Keywords = {ilk, homophones, confusible disambiguation, phonology, spelling, Dutch}, Pages = {199--210}, Publisher = {Academia Press}, Title = {Dat gebeurd mei niet: Computationele modellen voor verwarbare homofonen}, Year = {2007}, Bdsk-Url-1 = {http://www.cnts.ua.ac.be/~walter/papers/2007/db07.pdf}} @inproceedings{Daelemans+92, Address = {Amsterdam, The Netherlands}, Author = {W. Daelemans and A. {Van den Bosch}}, Booktitle = {Artificial Neural Networks 2}, Date-Modified = {2011-06-21 18:25:29 +0200}, Editor = {I. Aleksander and J. Taylor}, Keywords = {ilk, hyphenation}, Pages = {1647--1650}, Publisher = {North-Holland}, Title = {A neural network for hyphenation}, Volume = 2, Year = 1992} @inproceedings{Daelemans+92b, Address = {Enschede}, Author = {W. Daelemans and A. {Van den Bosch}}, Booktitle = {Proceedings of TWLT3: Connectionism and Natural Language Processing}, Date-Modified = {2010-01-02 21:18:38 +0100}, Editor = {M. F. J. Drossaers and A. Nijholt}, Keywords = {ilk, syllabification}, Organization = {Twente University}, Pages = {27--37}, Title = {Generalisation Performance of Backpropagation Learning on a Syllabification Task}, Year = 1992} @article{Daelemans+92c, Author = {W. Daelemans and K. {De Smedt} and G. Gazdar}, Date-Modified = {2010-01-02 21:18:21 +0100}, Journal = CL, Keywords = {inheritance}, Number = 2, Pages = {205--218}, Title = {Inheritance in natural language processing}, Volume = 18, Year = 1992} @incollection{Daelemans+93, Author = {W. Daelemans and S. Gillis and G. Durieux and A. {Van den Bosch}}, Booktitle = {Computational phonology}, Date-Modified = {2010-01-02 21:18:06 +0100}, Editor = {T. M. Ellison and J. M. Scobbie}, Keywords = {ilk, stress assignment}, Pages = {157--178}, Publisher = {Edinburgh, UK: Centre for Cognitive Sciences}, Series = {Edinburgh Working Papers in Cognitive Science}, Title = {Learnability and markedness in data-driven acquisition of stress}, Volume = 8, Year = 1993} @inproceedings{Daelemans+93b, Author = {W. Daelemans and A. {Van den Bosch} and S. Gillis and G. Durieux}, Booktitle = {Proceedings of ECML workshop on ML techniques for Text Analysis, Vienna, 1993}, Date-Modified = {2010-01-02 21:17:46 +0100}, Keywords = {ilk, stress assignment}, Title = {A data-driven approach to stress acquisition}, Year = 1993} @inproceedings{Daelemans+93c, Address = {Berlin}, Author = {W. Daelemans and A. {Van den Bosch}}, Booktitle = {Proceedings of Eurospeech '93}, Date-Modified = {2011-01-29 16:42:38 +0100}, Keywords = {ilk, grapheme-phoneme conversion}, Pages = {1459--1466}, Publisher = {T.U. Berlin}, Title = {{TabTalk}: reusability in data-oriented grapheme-to-phoneme conversion}, Year = 1993} @inproceedings{Daelemans+94a, Author = {W. Daelemans and S. Gillis and G. Durieux}, Booktitle = {Proceedings of the First NeMLaP Conference, Manchester, UK}, Date-Modified = {2010-01-02 21:17:24 +0100}, Keywords = {ilk, memory-based language processing, analogical modeling}, Title = {Skousen's analogical modeling algorithm: A comparison with lazy learning}, Year = 1994} @inproceedings{Daelemans+94c, Author = {W. Daelemans and A. {Van den Bosch}}, Booktitle = {Proceedings of the Second {{\sc esca/ieee}} Workshop on Speech Synthesis, New York}, Date-Modified = {2010-01-02 21:17:11 +0100}, Keywords = {ilk, grapheme-phoneme conversion}, Organization = {{{\sc esca/ieee}}}, Pages = {199--203}, Title = {A language-independent, data-oriented architecture for grapheme-to-phoneme conversion}, Year = 1994} @inproceedings{Daelemans+94d, Address = {Rotterdam}, Author = {W. Daelemans and S. Gillis and G. Durieux}, Booktitle = {Proceedings of the 4th {B}elgian-{D}utch Conference on Machine Learning}, Date-Modified = {2010-01-02 21:17:02 +0100}, Editor = {J. C. Bioch and S. H. Nienhuys-Cheng}, Keywords = {ilk, analogical modeling}, Pages = {302--310}, Publisher = {Erasmus University}, Title = {Skousen's analogical modeling algorithm}, Year = 1994} @incollection{Daelemans+96, Address = {Berlin}, Author = {W. Daelemans and A. {Van den Bosch}}, Booktitle = {Progress in Speech Processing}, Date-Modified = {2010-01-02 21:16:54 +0100}, Editor = {J. P. H. {Van Santen} and R. W. Sproat and J. P. Olive and J. Hirschberg}, Keywords = {ilk, grapheme-phoneme conversion}, Pages = {77--89}, Publisher = {Springer-Verlag}, Title = {Language-independent data-oriented grapheme-to-phoneme conversion}, Year = 1996} @inproceedings{Daelemans+96b, Author = {W. Daelemans and J. Zavrel and P. Berck and S. Gillis}, Booktitle = {Proceedings of the Fourth Workshop on Very Large Corpora}, Date-Modified = {2010-01-02 21:16:43 +0100}, Editor = {E. Ejerhed and I. Dagan}, Keywords = {ilk, MBT, part-of-speech tagging, memory-based learning}, Organization = {ACL SIGDAT}, Pages = {14--27}, Title = {{MBT}: A Memory-Based Part of Speech Tagger Generator}, Year = 1996} @inproceedings{Daelemans+96c, Author = {W. Daelemans and P. Berck and S. Gillis}, Booktitle = COLING96, Date-Modified = {2010-01-02 21:16:37 +0100}, Keywords = {ilk, phonology}, Organization = {Center for Sprogteknologi}, Pages = {95--100}, Title = {Unsupervised discovery of phonological categories through supervised learning of morphological rules}, Year = 1996} @inproceedings{Daelemans+96d, Address = {The Netherlands}, Author = {W. Daelemans and J. Zavrel and P. Berck}, Booktitle = {Informatiewetenschap 1996, Wetenschappelijke bij\-drage aan de Vierde Interdisciplinaire Onderzoeksconferentie Informatiewetenchap}, Date-Modified = {2010-09-20 00:15:32 +0200}, Editor = {K. {Van der Meer}}, Keywords = {ilk, part-of-speech tagging, MBT, memory-based learning, memory-based language processing}, Organization = {TU Delft}, Pages = {33--40}, Title = {Part-of-Speech Tagging for {D}utch with {MBT}, a Memory-based Tagger Generator}, Year = 1996} @article{Daelemans+97, Author = {W. Daelemans and A. {Van den Bosch} and A. Weijters}, Date-Modified = {2010-01-02 21:16:19 +0100}, Journal = {Artificial Intelligence Review}, Keywords = {ilk, IGTree}, Pages = {407--423}, Title = {{IGT}ree: using trees for compression and classification in lazy learning algorithms}, Volume = 11, Year = 1997} @article{Daelemans+97b, Author = {W. Daelemans and P. Berck and S. Gillis}, Date-Modified = {2010-01-02 21:16:00 +0100}, Journal = {Folia Linguistica}, Keywords = {ilk, diminutive inflection, Dutch}, Number = {1--2}, Pages = {57--75}, Title = {Data mining as a method for linguistic analysis: Dutch diminutives}, Volume = {XXXI}, Year = 1997} @incollection{Daelemans+97c, Address = {London}, Author = {W. Daelemans and S. Gillis and G. Durieux}, Booktitle = {New Methods in Language Processing}, Date-Modified = {2010-01-02 21:15:39 +0100}, Editor = {D. Jones and H. Somers}, Keywords = {ilk, analogical modeling, memory-based language processing}, Pages = {3--15}, Publisher = {UCL Press}, Title = {Skousen's Analogical Modeling Algorithm: A comparison with Lazy Learning}, Year = 1997} @proceedings{Daelemans+97d, Address = {Prague, Czech Republic}, Date-Modified = {2010-01-02 21:15:20 +0100}, Editor = {W. Daelemans and A. Weijters and A. {Van den Bosch}}, Keywords = {ilk}, Publisher = {University of Economics}, Title = {Workshop Notes of the ECML/MLnet familiarisation workshop on Empirical learning of natural language processing tasks}, Year = 1997} @inproceedings{Daelemans+97f, Address = {Berlin}, Author = {W. Daelemans and A. Weijters and A. {Van den Bosch}}, Booktitle = {Machine Learning: Proceedings of ECML-97}, Date-Modified = {2010-01-02 21:15:15 +0100}, Editor = {M. {Van Someren} and G. Widmer}, Keywords = {ilk}, Number = 1224, Pages = {337--344}, Publisher = {Springer-Verlag}, Series = {Lecture Notes in Artificial Intelligence}, Title = {Empirical learning of natural language processing tasks}, Year = 1997} @inproceedings{Daelemans+97g, Address = {Prague, Czech Republic}, Author = {W. Daelemans and A. {Van den Bosch} and J. Zavrel}, Booktitle = {Poster Papers of the Ninth European Conference on Machine Learning}, Date-Modified = {2010-01-02 21:15:07 +0100}, Editor = {M. {Van Someren} and G. Widmer}, Keywords = {ilk, IGTree}, Pages = {29--38}, Publisher = {University of Economics}, Title = {A feature-relevance heuristic for indexing and compressing large case bases}, Year = 1997} @techreport{Daelemans+98, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:14:56 +0100}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, TiMBL}, Number = {ILK 98-03}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 1.0, Reference Manualilburg {M}emory {B}ased {L}earner, version 1.0, Reference Manual}, Year = 1998} @techreport{Daelemans+98a, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:14:27 +0100}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, TiMBL}, Number = {ILK 98-03}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 1.0, Reference Manual}, Year = 1998} @inproceedings{Daelemans+98b, Author = {W. Daelemans and A. {Van den Bosch} and J. Zavrel and J. Veenstra and S. Buchholz and G. J. Busser}, Booktitle = {Proceedings of {ELSNET} in {W}onderland, March, 1998}, Date-Modified = {2010-01-02 21:14:06 +0100}, Keywords = {ilk, memory-based language processing}, Pages = {105--113}, Publisher = {ELSNET}, Title = {Rapid development of {NLP} modules with {M}emory-{B}ased {L}earning}, Year = 1998} @article{Daelemans+99, Author = {W. Daelemans and A. {Van den Bosch} and J. Zavrel}, Date-Modified = {2010-09-14 12:52:41 +0200}, Journal = {Machine Learning, Special issue on Natural Language Learning}, Keywords = {ilk, memory-based language processing, lazy learning, decision trees}, Pages = {11--41}, Title = {Forgetting exceptions is harmful in language learning}, Volume = 34, Year = 1999} @inproceedings{Daelemans+99a, Address = {Bergen, Norway}, Author = {W. Daelemans and S. Buchholz and J. Veenstra}, Booktitle = {Proceedings of CoNLL}, Date-Modified = {2010-01-02 21:13:50 +0100}, Keywords = {ilk, shallow parsing}, Title = {Memory-based shallow parsing}, Year = 1999} @techreport{Daelemans+99b, Author = {W. Daelemans and J. Zavrel and K. {Van der Sloot} and A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:13:12 +0100}, Institution = {ILK Research Group, Tilburg University}, Keywords = {ilk, TiMBL, memory-based learning}, Number = {ILK 99-01}, Title = {{TiMBL}: Tilburg Memory Based Learner, version 2.0, Reference Manual}, Year = 1999} @inproceedings{Daelemans+99c, Address = {Bergen, Norway}, Author = {W. Daelemans and S. Buchholz and J. Veenstra}, Booktitle = {Proceedings of CoNLL}, Date-Modified = {2010-01-02 21:12:32 +0100}, Keywords = {ilk, shallow parsing}, Title = {Memory-based shallow parsing}, Year = 1999} @incollection{Daelemans02, Address = {Amsterdam, The Netherlands}, Author = {W. Daelemans}, Booktitle = {Analogical Modeling}, Date-Modified = {2010-01-02 21:12:06 +0100}, Editor = {R. Skousen and D. Lonsdale and D.B. Parkinson}, Keywords = {ilk, analogical modeling, memory-based language processing}, Publisher = {John Benjamins}, Title = {A comparison of analogical modeling to memory-based language processing}, Year = 2002} @phdthesis{Daelemans87, Author = {W. Daelemans}, Date-Modified = {2010-01-02 19:59:41 +0100}, Keywords = {morphology}, School = {Katholieke Universiteit Leuven}, Title = {Studies in language technology: An object-oriented model of morphophonological aspects of {D}utch}, Year = 1987} @inproceedings{Daelemans88, Author = {W. Daelemans}, Booktitle = COLING88, Date-Modified = {2011-01-29 16:43:02 +0100}, Keywords = {grapheme-phoneme conversion, Dutch}, Pages = {133--138}, Title = {{GRAFON}: A grapheme-to-phoneme system for {D}utch}, Year = 1988} @incollection{Daelemans88b, Address = {Leuven}, Author = {W. Daelemans}, Booktitle = {Worlds behind Words}, Date-Modified = {2010-01-02 19:59:18 +0100}, Editor = {F.J. Heyvaert and F. Steurs}, Keywords = {hyphenation}, Pages = {347-364}, Publisher = {Leuven University Press}, Title = {Automatic hyphenation: linguistics versus engineering}, Year = 1988} @incollection{Daelemans89, Address = {Leuven}, Author = {W. Daelemans}, Booktitle = {Worlds behind Words}, Date-Modified = {2010-01-02 19:59:09 +0100}, Editor = {F. J. Heyvaert and F. Steurs}, Keywords = {hyphenation}, Pages = {347--364}, Publisher = {Leuven University Press}, Title = {Automatic hyphenation: Linguistics versus engineering}, Year = 1989} @incollection{Daelemans95, Address = {Berlin}, Author = {W. Daelemans}, Booktitle = {Machine Translation and the Lexicon}, Date-Modified = {2010-01-02 19:59:01 +0100}, Editor = {P. Steffens}, Keywords = {ilk, lexicon}, Pages = {85--98}, Publisher = {Springer-Verlag}, Series = {Lecture Notes in Artificial Intelligence}, Title = {Memory-based lexical acquisition and processing}, Volume = 898, Year = 1995} @incollection{Daelemans96, Address = {Tilburg}, Author = {W. Daelemans}, Booktitle = {Proceedings of the {CLS} Opening Academic Year 1996-1997}, Date-Modified = {2010-01-02 19:58:37 +0100}, Editor = {M. {Van der Avoird} and C. Corsius}, Keywords = {ilk}, Pages = {83--95}, Publisher = {CLS}, Title = {Experience-driven language acquisition and processing}, Year = 1996} @inproceedings{Daelemans96b, Address = {Maastricht, The Netherlands}, Author = {W. Daelemans}, Booktitle = {Proceedings of the Sixth Belgian--Dutch Conference on Machine Learning}, Date-Modified = {2010-01-02 19:58:31 +0100}, Editor = {H. J. {Van den Herik} and A. Weijters}, Keywords = {ilk, memory-based learning}, Organization = {{{\sc matriks}}}, Pages = {3--12}, Title = {Abstraction considered harmful: lazy learning of language processing}, Year = 1996} @book{Daelemans99, Date-Modified = {2010-01-02 19:58:23 +0100}, Editor = {W. Daelemans}, Keywords = {ilk, memory-based language processing}, Number = 3, Publisher = {Taylor \& Francis}, Series = {Special Issue of Journal of Experimental and Theoretical AI}, Title = {Memory-based language processing}, Volume = 11, Year = 1999} @article{Daelemans99b, Author = {W. Daelemans}, Date-Modified = {2010-01-02 19:58:01 +0100}, Journal = {Journal of Experimental and Theoretical Artificial Intelligence}, Keywords = {ilk, memory-based language processing}, Number = 3, Pages = {287-296}, Title = {Memory-based language processing}, Volume = 11, Year = 1999} @inproceedings{Dagan+90, Author = {Ido Dagan and Alon Itai}, Booktitle = CL, Pages = {330--332}, Title = {Automatic Acquisition of Constraints for the Resolution of Anaphora References and Syntactic Ambiguities}, Volume = 3, Year = 1990} @inproceedings{Dagan+91, Author = {Ido Dagan and Alon Itai and Ulrike Schwall}, Booktitle = ACL, Pages = {130--137}, Title = {Two languages are more informative than one}, Year = 1991} @incollection{Dagan+91b, Author = {Ido Dagan and Alon Itai}, Booktitle = {Artificial Intelligence and Computer Vision}, Editor = {Y. A. Feldman and A. Bruckstein}, Note = {(The Proceedings of the 7th Israeli Sym. on Artificial Intelligence and Computer Vision, 1990)}, Pages = {125--135}, Publisher = {Elsevier Science Publishers B.V.}, Title = {A Statistical Filter for Resolving Pronoun References}, Year = 1991} @inproceedings{Dagan+93, Author = {I. Dagan and S. Markus and S. Markovitch}, Booktitle = {Proceedings of the 30th Annual Meeting of the ACL}, Organization = {ACL}, Pages = {164--171}, Place = {Columbus, OH}, Title = {Contextual Word Similarity and Estimation from Sparse Data}, Year = 1993} @inproceedings{Dagan+93a, Author = {I. Dagan and K. Church and W. Gale}, Booktitle = {Proceedings of the Workshop on Very Large Corpora: Academic and Industrial Perspectives}, Date-Modified = {2010-02-14 20:00:08 +0100}, Pages = {1--8}, Place = {Columbus, Ohio}, Title = {Robust bilingual word alignment for machine aided translation}, Year = 1993} @inproceedings{Dagan+94, Author = {I. Dagan and F. Pereira and L. Lee}, Booktitle = {Proceedings of the 32nd Annual Meeting of the ACL, Las Cruces, New Mexico}, Month = {June}, Organization = {ACL}, Pages = {272--278}, Title = {Similarity-Based Estimation of Word Cooccurrence Probabilities}, Year = 1994} @inproceedings{Dagan+94c, Author = {I. Dagan and K. Church}, Booktitle = ANLP, Pages = {34--40}, Title = {{\it Termight}: Identifying and translating technical terminology}, Year = 1994} @article{Dagan+95, Author = {Ido Dagan and Shaul Marcus and Shaul Markovitch}, Journal = CSL, Pages = {123-152}, Title = {Contextual word similarity and estimation from sparse data}, Volume = 9, Year = 1995} @inproceedings{Dagan+95c, Author = {Ido Dagan and Sean Engelson}, Booktitle = ICML, Month = {July}, Title = {Committee-Based Sampling for Training Probabilistic Classifiers}, Year = 1995} @inproceedings{Dagan+95d, Author = {Ido Dagan and Sean Engelson}, Booktitle = {{IJCAI-95} Workshop On New Approaches to Learning for Natural Language Processing}, Month = {August}, Title = {Selective sampling in natural language learning.}, Year = 1995} @inproceedings{Dagan+97, Author = {I. Dagan and L. Lee and F. Pereira}, Booktitle = EACL/ACL97, Date-Modified = {2009-09-06 20:34:55 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {56--63}, Title = {Similarity-Based Methods for Word Sense Disambiguation}, Year = 1997} @article{Dagan+99, Author = {I. Dagan and L. Lee and F. Pereira}, Journal = {Machine Learning}, Number = {1-3}, Pages = {43--69}, Title = {Similarity-Based Models of Word Cooccurrence Probabilities}, Volume = 34, Year = 1999} @phdthesis{Dagan92, Address = {Haifa}, Author = {Ido Dagan}, Month = {May}, Note = {(in Hebrew)}, School = {Computer Science Department, Technion - Israel Institute of Technology}, Title = {Multilingual Statistical Approaches for Natural Language Disambiguation}, Year = 1992} @article{Dagan+94b, Author = {Dagan, I. and A. Itai}, Date-Modified = {2010-09-14 22:22:16 +0200}, Journal = CL, Keywords = {wsd, word sense disambiguation}, Number = 4, Pages = {563--596}, Title = {Word Sense Disambiguation Using a Second Language Monolingual Corpus}, Volume = 20, Year = 1994} @inproceedings{Dagan+97b, Author = {I. Dagan and Y. Karov and D. Roth}, Booktitle = {EMNLP-97, The Second Conference on Empirical Methods in Natural Language Processing}, Date-Modified = {2010-09-14 22:22:01 +0200}, Keywords = {text classification}, Pages = {55-63}, Title = {Mistake-Driven Learning in Text Categorization}, Year = {August 1997}} @incollection{Dahlgren88, Author = {K. Dahlgren}, Booktitle = {Natural Language Understanding and Logic Programming, II}, Date-Modified = {2009-09-06 20:35:19 +0200}, Editor = {Veronice Dahl and Patrick Saint-Dizier}, Keywords = {wsd, word sense disambiguation}, Pages = {255--275}, Publisher = {North-Holland}, Title = {Using common sense knowledge to disambiguate word senses}, Year = 1988} @inproceedings{Dai+09, Address = {Boulder, CO}, Author = {Q. Dai and E. Chen and L. Shi}, Booktitle = {Proc. of the {CoNLL} 2009: Shared Task}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 21:05:40 +0100}, Pages = {19--24}, Title = {An Iterative Approach for Joint Dependency Parsing and Semantic Role Labeling}, Year = {2009}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W09-1202}} @inproceedings{Daille94a, Author = {Beatrice Daille}, Booktitle = {The Balancing Act, Combining Symbolic and Statistical Approaches to Language -- Proceedings of the Workshop}, Pages = {29--36}, Title = {Study and implementation of combined techniques for automatic extraction of terminology}, Year = 1994} @inproceedings{Daille94b, Author = {Beatrice Daille}, Booktitle = COLING94, Pages = {515--521}, Title = {Towards automatic extraction of monolingual and bilingual terminology}, Year = 1994} @book{Dale+00, Address = {New York}, Editor = {Robert Dale and Hermann Moisl and Harold Somers}, Publisher = {Marcel Dekker Inc.}, Title = {Handbook of Natural Language Processing}, Year = 2000} @book{Dale92, Address = {Cambridge, MA}, Author = {Robert Dale}, Date-Added = {2010-02-01 23:24:03 +0100}, Date-Modified = {2010-02-01 23:24:08 +0100}, Publisher = {MIT Press}, Title = {Generating Referring Expressions: Constructing Descriptions in a Domain of Objects and Processes}, Year = {1992}} @article{Damasio+92, Author = {A. Damasio and H. Damasio}, Date-Modified = {2008-07-23 16:20:44 +0200}, Journal = {{Scientific American, Special Issue on Mind and Brain}}, Keywords = {verbs, nouns, human language processing, brain, neurosciences, PET}, Pages = {89--95}, Title = {{Brain and Language}}, Volume = {267(3)}, Year = 1992} @article{Damerau64, Address = {New York, NY, USA}, Author = {Fred J. Damerau}, Date-Added = {2010-01-29 15:09:16 +0100}, Date-Modified = {2010-02-17 19:01:31 +0100}, Journal = {Communications of the {ACM}}, Keywords = {spelling correction}, Pages = {171--176}, Publisher = {ACM Press}, Title = {A technique for computer detection and correction of spelling errors}, Volume = {Volume 7, Issue 3 (March 1964)}, Year = {1964}} @article{Damerau93, Author = {F. Damerau}, Journal = {Information Processing \& Management}, Number = 4, Pages = {433--447}, Title = {Generating and evaluating domain-oriented multi-word terms from texts}, Volume = 29, Year = 1993} @article{Damper+97, Author = {R. Damper and J. Eastmond}, Journal = {Language and Speech}, Pages = {1--23}, Title = {Pronunciation by analogy: impact of implementational choices on performance}, Volume = 40, Year = 1997} @incollection{Damper95, Address = {London, UK}, Author = {R. Damper}, Booktitle = {Connectionist Models of Memory and Language}, Editor = {J. Levy and D. Bairaktaris and J. Bullinaria and P. Cairns}, Pages = {117--144}, Publisher = {UCL Press}, Title = {Self-learning and connectionist approaches to text-phoneme conversion}, Year = 1995} @inproceedings{Danyluk+93, Address = {San Mateo, CA}, Author = {A. P. Danyluk and F. J. Provost}, Booktitle = {Proceedings of the Tenth International Conference on Machine Learning}, Pages = {81--88}, Publisher = {Morgan Kaufmann}, Title = {Small disjuncts in action: learning to diagnose errors in the local loop of the telephone network 81}, Year = 1993} @article{Darragh+90, Address = {Los Alamitos, CA, USA}, Author = {J. Darragh and I. Witten and M. James}, Date-Modified = {2010-06-25 22:16:56 +0200}, Journal = {Computer}, Number = 11, Pages = {41--49}, Publisher = {IEEE Computer Society}, Title = {The reactive keyboard: A predictive typing aid}, Volume = 23, Year = 1990} @article{Darroch+72, Author = {J. N. Darroch and D. Ratcliff}, Journal = {Annals of Mathematical Statistics}, Number = 5, Pages = {1470--1480}, Title = {Generalized iterative scaling for log-linear models}, Volume = 43, Year = 1972} @article{Dasarathy80, Author = {B. V. Dasarathy}, Date-Modified = {2010-01-03 10:51:15 +0100}, Journal = {Pattern Analysis and Machine Intelligence}, Keywords = {k-NN}, Pages = {67--71}, Title = {Nosing around the neighborhood: A new system structure and classification rule for recognition in partially exposed environments}, Volume = 2, Year = 1980} @book{Dasarathy91, Address = {Los Alamitos, CA}, Author = {B. V. Dasarathy}, Date-Modified = {2010-01-03 10:51:21 +0100}, Keywords = {k-NN}, Publisher = {IEEE Computer Society Press}, Title = {Nearest Neighbor ({NN}) Norms: {NN} Pattern Classification Techniques}, Year = 1991} @article{Daume+06, Author = {H. {Daum{\'e} {III}} and D. Marcu}, Journal = {Journal of Artificial Intelligence Research}, Pages = {101--126}, Title = {Domain adaptation for statistical classifiers}, Volume = 26, Year = 2006} @phdthesis{Daume06, Author = {H. {Daum\'e III}}, Date-Added = {2009-11-15 00:18:55 +0100}, Date-Modified = {2009-11-15 00:19:02 +0100}, School = {University of Southern California}, Title = {Practical Structured Learning Techniques for Natural Language Processing}, Year = {2006}} @inproceedings{Daya+04, Author = {E. Daya and D. Roth and S. Wintner}, Booktitle = {Proceedings of the Conference on Empirical Methods for Natural Language Processing (EMNLP)}, Date-Added = {2009-11-15 00:19:26 +0100}, Date-Modified = {2009-11-15 00:19:35 +0100}, Pages = {168--178}, Title = {Learning Hebrew Roots: Machine Learning with Linguistic Constraints}, Year = {2004}} @article{Daya+08, Author = {E. Daya and D. Roth and S. Wintner}, Date-Added = {2009-11-15 00:19:26 +0100}, Date-Modified = {2009-11-15 00:19:41 +0100}, Journal = {Computational Linguistics}, Number = {3}, Pages = {429--448}, Title = {Learning Hebrew Roots: Machine Learning with Linguistic Constraints}, Volume = {34}, Year = {2008}} @book{DeFrancis89, Address = {Honolulu}, Author = {J. DeFrancis}, Publisher = {University of Hawaii Press}, Title = {Visible Speech: The Diverse Oneness of Writing Systems}, Year = 1989} @unpublished{DeGelder93, Author = {B. {De Gelder}}, Note = {Paper to appear in the Journal of Chinese Linguistics}, Title = {Reading acquisition: The rough road and the silken route}, Year = 1993} @book{DeHaas+93, Address = {'s Gravenhage, The Netherlands}, Author = {W. {De Haas} and M. Trommelen}, Publisher = {SDU}, Title = {Morfologisch handboek van het Nederlands: Een overzicht van de woordvorming}, Year = 1993} @inproceedings{DeMeulder+03, Author = {F. {De Meulder} and W. Daelemans}, Booktitle = {Proceedings of CoNLL-2003}, Date-Modified = {2010-09-14 22:14:11 +0200}, Editor = {W. Daelemans and M. Osborne}, Keywords = {named-entity recognition, clips}, Pages = {208--211}, Publisher = {Edmonton, Canada}, Title = {Memory-Based Named Entity Recognition using Unannotated Data}, Year = 2003} @inproceedings{DeRaedt+91, Author = {L. {De Raedt} and M. Bruyooghe}, Booktitle = {Proceedings of the First International Workshop on Multistrategy Learning}, Date-Modified = {2009-11-14 17:41:47 +0100}, Pages = {175--190}, Publisher = {Harpers Ferry, WV}, Title = {{CLINT}: A multi-strategy interactive concept learner and theory revision system}, Year = 1991} @book{DeRaedt92, Address = {London}, Author = {L. {De Raedt}}, Publisher = {Academic Press}, Title = {Interactive theory revision: An inductive logic programming approach}, Year = 1992} @article{DeRose88, Author = {S. DeRose}, Journal = CL, Pages = {31--39}, Title = {{Grammatical category disambiguation by statistical optimization}}, Volume = 14, Year = 1988} @inproceedings{DeSitter+03, Address = {Catvat-Dubrovnik, Croatia}, Author = {A. {De Sitter} and W. Daelemans}, Booktitle = {Proceedings of the International Workshop on Adaptive Text Extraction and Mining}, Date-Modified = {2010-09-14 22:14:41 +0200}, Keywords = {clips, information extraction}, Pages = {66--73}, Title = {Information extraction via double classification}, Year = 2003} @article{DeVries95, Author = {P. de Vries}, Journal = {{Cognitive Systems}}, Number = 2, Pages = {233--246}, Title = {{Downwards Emergence in Conceptual Networks}}, Volume = 4, Year = 1995} @inproceedings{Dean+92, Author = {T. Dean and D. Angluin and K. Basye and S. Engelson and L. Kaelbling and E. Kokkevis and O. Maron}, Booktitle = AAAI, Date-Modified = {2010-02-14 23:09:15 +0100}, Pages = {208--214}, Title = {Inferring Finite Automata with Stochastic Output Functions and an Application to Map Learning}, Year = 1992} @article{Dean+95, Author = {Dean, T. and Angluin, D. and Basye, K. and Engelson, S. and Kaelbling, L. and Kokkevis, E. and Maron, O.}, Date-Added = {2010-02-14 23:10:55 +0100}, Date-Modified = {2010-02-14 23:11:15 +0100}, Journal = {Machine Learning}, Number = {1}, Pages = {81--108}, Publisher = {Springer}, Title = {{Inferring finite automata with stochastic output functions and an application to map learning}}, Volume = {18}, Year = {1995}} @inproceedings{Debili+94, Author = {F. Debili and E. Sammouda and A. Zribi}, Booktitle = {Proceedings 4th Conference on Applied Natural Language Processing}, Date-Modified = {2010-02-14 23:11:44 +0100}, Note = {(poster presentation)}, Title = {Using Syntactic Dependencies for Word Alignment}, Year = 1994} @inproceedings{Decadt+04, Address = {New Brunswick, NJ}, Author = {B. Decadt and V. Hoste and W. Daelemans and A. {Van den Bosch}}, Booktitle = {Proceedings of the Third International Workshop on the Evaluation of Systems for the Semantic Analysis of Text (Senseval-3)}, Date-Modified = {2010-09-18 14:22:54 +0200}, Editor = {R. Mihalcea and P. Edmonds}, Keywords = {wsd, word sense disambiguation, genetic algorithms, memory-based learning, memory-based language processing, vi}, Pages = {108--112}, Publisher = {ACL}, Title = {{GAMBL}, genetic algorithm optimization of memory-based {WSD}}, Year = 2004, Bdsk-Url-1 = {http://www.cnts.ua.ac.be/papers/2004/val04.pdf}} @book{Dechter03, Address = {San Francisco, CA, USA}, Author = {R. Dechter}, Date-Added = {2009-11-15 00:19:49 +0100}, Date-Modified = {2010-02-14 23:11:55 +0100}, Publisher = {Morgan Kaufmann}, Title = {Constraint Processing}, Year = {2003}} @article{Dedina+91, Author = {M. J. Dedina and H. C. Nusbaum}, Date-Modified = {2009-11-14 17:42:37 +0100}, Journal = {Computer Speech and Language}, Pages = {55--64}, Title = {{PRONOUNCE}: a program for pronunciation by analogy}, Volume = 5, Year = 1991} @inproceedings{Dehaspe97, Author = {L. Dehaspe}, Booktitle = {{Inductive Logic Programming: Proceedings of the 7th International Workshop (ILP-97), Lecture Notes in Artificial Intelligence, 1297}}, Pages = {109--124}, Publisher = {Springer Verlag}, Title = {{Maximum entropy modeling with clausal constraints}}, Year = 1997} @article{Dejean02, Author = {Dejean, H.}, Issue = {Special Issue on Machine Learning Approaches to Shallow Parsing}, Journal = {Journal of Machine Learning Research}, Title = {When Linguistic Knowledge Improves Learning}, Year = 2002} @inproceedings{Delden+04, Address = {Las Vegas, NV}, Author = {S. {Van Delden} and D. B. Bracewell and F. Gomez}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {Proceedings of the 2004 IEEE International Conference on Information Reuse and Integration}, Date-Added = {2010-02-10 20:35:44 +0100}, Date-Modified = {2010-02-13 00:01:05 +0100}, Editor = {Du Zhang and {\'E}ric Gr{\'e}goire and Doug DeGroot}, Pages = {530-535}, Title = {Supervised and Unsupervised Automatic Spelling Correction Algorithms}, Year = {2004}} @article{DellaPietra+97, Author = {S. Della Pietra and V. Della Pietra and J. Lafferty}, Date-Modified = {2010-01-05 22:32:07 +0100}, Journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, Month = Apr, Number = 4, Pages = {1--13}, Title = {Inducing Features of Random Fields}, Volume = {PAMI-19}, Year = 1997} @phdthesis{Demartines94, Author = {P. Demartines}, School = {L'Institut National Polytechnique de Grenoble}, Title = {{Analyse de Donn\'ees par R\'eseaux de Neurones Auto-Organis\'es}}, Year = 1994} @article{Dempster+77, Author = {A.P. Dempster and N.M. Laird and D.B. Rubin}, Journal = {Journal of the Royal Statistical Society, Series B (Methodological)}, Number = 1, Pages = {1--38}, Title = {Maximum Likelihood from Incomplete Data via the {EM} Algorithm}, Volume = 39, Year = 1977} @inproceedings{Deng+95, Address = {San Mateo, CA}, Author = {K. Deng and A. W. Moore}, Booktitle = {Proceedings of the 14th International Joint Conference on Artificial Intelligence}, Editor = {C. S. Mellish}, Pages = {1233--1239}, Publisher = {Morgan Kaufmann}, Title = {Multiresolution instance-based learning}, Year = 1995} @inproceedings{DePauw+04, Author = {G. {De Pauw} and T. Laureys and W. Daelemans and H. {Van hamme}}, Booktitle = {Proceedings of the ACL 2004 Workshop on Current Themes in Computational Phonology and Morphology}, Date-Modified = {2010-09-14 22:14:32 +0200}, Keywords = {clips, morphological analysis}, Pages = {62--69}, Title = {A Comparison of Two Different Approaches to Morphological Analysis of {D}utch}, Year = 2004} @inproceedings{DePauw+99, Author = {G. {De Pauw} and W. Daelemans}, Booktitle = {Proceedings of the Fourth Conference on Computational Language Learning}, Date-Modified = {2010-09-14 22:14:18 +0200}, Keywords = {clips}, Pages = {19--24}, Title = {The Role of Algorithm Bias vs Information Source in Learning Algorithms for Morphosyntactic Disambiguation}, Year = 1999} @article{Derwing+89, Author = {B. L. Derwing and R. Skousen}, Journal = {Berkeley Linguistic Society}, Pages = {48--62}, Title = {Real time morphology: Symbolic rules or analogical networks?}, Volume = 15, Year = 1989} @book{Deschutter78, Author = {De Schutter, G.}, Publisher = {Antwerp Papers In Linguistics}, Title = {Aspekten van de Nederlandse klankstruktuur}, Volume = 15, Year = 1978} @inproceedings{Devijver+80, Author = {P. A. {De\-vij\-ver} and J. Kittler}, Booktitle = {Proceedings of the Fifth International Conference on Pattern Recognition}, Publisher = {The Institute of Electrical and Electronics Engineers}, Title = {On the edited nearest neighbor rule}, Year = 1980} @book{Devijver+82, Address = {London, UK}, Author = {P. .A. {De\-vij\-ver} and J. Kittler}, Publisher = {Prentice-Hall}, Title = {Pattern recognition. A statistical approach}, Year = 1982} @inproceedings{Diab+04, Address = {Boston, MA}, Author = {M. Diab and K. Hacioglu and D. Jurafsky}, Booktitle = {Proceedings of HLT-NAACL 2004}, Pages = {149--152}, Title = {Automatic Tagging of Arabic Text: From raw text to Base Phrase Chunks}, Year = 2004} @inproceedings{Dietterich+91, Author = {T. G. Dietterich and G. Bakiri}, Booktitle = {Proceedings of AAAI-91}, Pages = {572--577}, Publisher = {Menlo Park, CA}, Title = {Error-correcting output codes: A general method for improving multiclass inductive learning programs}, Year = 1991} @article{Dietterich+95, Author = {T. G. Dietterich and G. Bakiri}, Journal = {{Journal of Artificial Intelligence Research}}, Pages = {{263--286}}, Title = {{Solving Multiclass Learning Problems via Error--Correcting Output Codes}}, Volume = 2, Year = 1995} @article{Dietterich+95a, Author = {T. G. Dietterich and H. Hild and G. Bakiri}, Date-Modified = {2009-11-15 00:20:26 +0100}, Journal = {Machine Learning}, Number = 1, Pages = {5--28}, Title = {A comparison of {ID3} and Backpropagation for {E}nglish text-to-speech mapping}, Volume = 19, Year = 1995} @article{Dietterich+95b, Author = {T. G. Dietterich and G. Bakiri}, Journal = {Journal of Artificial Intelligence Research}, Pages = {263--286}, Title = {Solving multiclass learning problems via error-correcting output codes}, Volume = 2, Year = 1995} @inproceedings{Dietterich00, Author = {Dietterich, T.G.}, Booktitle = {Proceedings of the Eleventh International Conference on Algorithmic Learning Theory}, Date-Added = {2009-11-15 00:20:33 +0100}, Date-Modified = {2009-11-15 00:20:46 +0100}, Editor = {Arimura, H. and Jain, S. and Sharma, A.}, Pages = {13--26}, Title = {The divide-and-conquer manifesto}, Year = {2000}} @incollection{Dietterich02, Address = {Cambridge, MA, USA}, Author = {Dietterich, T.G.}, Booktitle = {The Handbook of Brain Theory and Neural Networks, Second edition}, Date-Added = {2009-11-15 00:20:51 +0100}, Date-Modified = {2009-11-15 00:21:01 +0100}, Editor = {M.A. Arbib}, Keywords = {ensembles}, Pages = {405--408}, Publisher = {MIT Press}, Title = {Ensemble learning}, Year = {2002}} @inproceedings{Dietterich02b, Author = {Dietterich, T.G.}, Booktitle = {Proceedings of the Joint IAPR International Workshop on Structural, Syntactic, and Statistical Pattern Recognition}, Date-Added = {2009-11-15 00:21:10 +0100}, Date-Modified = {2009-11-15 00:21:32 +0100}, Editor = {Caelli, T. and Amin, A. and Duin, R.P.W. and Kamel, M. and de Ridder, D.}, Pages = {15--30}, Title = {{Machine Learning for Sequential Data: A Review}}, Year = {2002}} @article{Dietterich97, Author = {Dietterich, T.G.}, Journal = {AI Magazine}, Number = 4, Pages = {97--136}, Title = {Machine Learning Research: Four Current Directions}, Volume = 18, Year = 1997} @article{Dietterich98, Author = {T.G. Dietterich}, Journal = {{Neural Computation}}, Number = 7, Pages = {{1895--1924}}, Title = {{Approximate Statistical Tests for Comparing Supervised Classification Learning Algorithms}}, Volume = 10, Year = 1998} @book{Dijkstra+96, Address = {London}, Author = {T. Dijkstra and K. de Smedt}, Key = {Dijkstra1996}, Publisher = {Taylor \& Francis}, Title = {Computational Psycholinguistics}, Year = 1996} @inproceedings{Doddington+04, Author = {Doddington, G. and Mitchell, A. and Przybocki, M. and Ramshaw, L. and Strassel, S. and Weischedel, R.}, Booktitle = {Proceedings of the Fourth International Conference on Language Resources and Evaluation (LREC'04)}, Date-Added = {2009-11-15 00:25:41 +0100}, Date-Modified = {2009-11-15 00:25:55 +0100}, Keywords = {ACE}, Pages = {837--840}, Title = {The Automatic Content Extraction (ACE) Program--Tasks, Data, and Evaluation}, Year = {2004}} @inproceedings{Dogaru+95, Author = {Dogaru, R. and A. Murgan}, Booktitle = {Proceedings of ICCN'95}, Organization = {ICCN}, Pages = {3048--3052}, Title = {Chaotic Resonance Theory, a New Approach for Pattern Storage and Retrieval in Neural Networks}, Volume = 6, Year = 1995} @techreport{Domingos95, Address = {Irvine, CA}, Author = {P. Domingos}, Institution = {University of California at Irvine, Department of Information and Computer Science}, Number = {95-2}, Title = {The {RISE} 2.0 system: A case study in multistrategy learning}, Year = 1995} @article{Domingos96, Author = {P. Domingos}, Journal = {Machine Learning}, Pages = {141--168}, Title = {Unifying instance-based and rule-based induction}, Volume = 24, Year = 1996} @inproceedings{Dorr+96, Author = {Bonnie J. Dorr and Doug Jones}, Booktitle = COLING96, Date-Modified = {2009-09-06 20:35:34 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {322--327}, Title = {Role of Word Sense Disambiguation in Lexical Acquisition: Predicting Semantics from Syntactic Clues}, Year = 1996} @inproceedings{Dougherty+95, Author = {Dougherty, J. and Kohavi, R. and Sahami, M}, Booktitle = {Proceedings of the International Conference on Machine Learning}, Title = {Supervised and unsupervised discretization of continuous features}, Year = 1995} @article{Dresher+90, Author = {E. Dresher and J. Kaye}, Journal = {Cognition}, Number = 2, Pages = {137--195}, Title = {A computational learning model for metrical phonology}, Volume = 32, Year = 1990} @inproceedings{Dridan+07, Address = {Melbourne, Australia}, Author = {R. Dridan and T. Baldwin}, Booktitle = {Proceedings of the 10th Conference of the Pacific Association for Computational Linguistics}, Date-Modified = {2009-11-14 18:51:47 +0100}, Keywords = {question answering}, Pages = {333--341}, Title = {What to classify and how: Experiments in question classification for Japanese}, Year = 2007} @article{Dronkers96, Author = {N.F. Dronkers}, Journal = {Nature}, Title = {A new brain region for speech: The insula and articulatory planning}, Year = 1996} @inproceedings{Duboue+01, Address = {Toulouse, France}, Author = {Pablo A. Duboue and Kathleen R. McKeown}, Booktitle = {Proceedings of 39th Annual Meeting of the Association for Computational Linguistics}, Doi = {10.3115/1073012.1073035}, Month = {July}, Pages = {172--179}, Publisher = {Association for Computational Linguistics}, Title = {Empirically Estimating Order Constraints for Content Planning in Generation}, Url = {http://www.aclweb.org/anthology/P01-1023}, Year = {2001}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/P01-1023}, Bdsk-Url-2 = {http://dx.doi.org/10.3115/1073012.1073035}} @inproceedings{Dudani76, Author = {Dudani, S.A.}, Booktitle = {IEEE Transactions on Systems, Man, and Cybernetics}, Pages = {325--327}, Title = {The Distance-Weighted $k$-Nearest Neighbor Rule}, Volume = {SMC-6}, Year = 1976} @inproceedings{Duffy86, Author = {Gavan Duffy}, Booktitle = AAAI, Title = {Categorical disambiguation}, Year = 1986} @article{Dunlop+00, Author = {M. D. Dunlop and A. Crossan}, Date-Modified = {2010-09-18 14:37:41 +0200}, Journal = {Personal Technologies}, Keywords = {predictive text entry}, Number = 2, Title = {Predictive Text Entry Methods for Mobile Phones}, Volume = 4, Year = 2000} @incollection{Durieux+00, Address = {Amsterdam, The Netherlands}, Author = {G. Durieux and S. Gillis}, Booktitle = {Approaches to bootstrapping: {P}honological, syntactic and neurophysiological aspects of early language acquisition}, Date-Modified = {2011-06-21 18:25:40 +0200}, Editor = {B. H\"ohle and J. Weissenborn}, Pages = {189-232}, Publisher = {Benjamins}, Title = {Predicting grammatical classes from phonological cues: An empirical test}, Year = 2000} @misc{Durieux+98, Author = {G. Durieux and W. Daelemans and S. Gillis}, Title = {Paper read at the Corsendonk Round Table}, Year = 1998} @inproceedings{Dutriaux+91, Address = {San Mateo, CA}, Author = {A. Dutriaux and D. Zipser}, Booktitle = {Connectionist Models: Proceedings of the 1990 Summer School}, Editor = {D.S. Touretzky and J.L. Elman and T.J. Sejnowski and G.E. Hinton}, Publisher = {Morgan Kaufmann}, Title = {Unsupervised Discovery of Speech Segments Using Recurrent Networks}, Year = 1991} @article{Earley70, Author = {Earley, J.}, Date-Added = {2009-11-15 00:26:06 +0100}, Date-Modified = {2009-11-15 00:26:19 +0100}, Journal = {Communications of the ACM}, Number = {2}, Pages = {94--102}, Title = {An efficient context-free parsing algorithm}, Volume = {13}, Year = {1970}} @inproceedings{Eckle+96, Author = {Judith Eckle and Ulrich Heid}, Booktitle = {Proceedings of the 4th International Conference on Computational Lexicography, COMPLEX '96, Budapest, Hungary}, Title = {Extracting raw material for a German subcategorization lexicon from newspaper text}, Year = 1996} @unpublished{Eckle+98, Author = {Judith Eckle-Kohler}, Month = {August}, Note = {to appear in: Proceedings of the Eighth Euralex International Congress, University of Li\`{e}ge, Belgium}, Title = {Methods for quality assurance in semi-automatic lexicon acquisition from corpora}, Year = 1998} @misc{Eckle+??, Author = {Judith Eckle and Ulrich Heid}, Title = {Extracting raw material for a German subcategorization lexicon from newspaper text}} @article{Eddington00, Author = {David Eddington}, Journal = {Lingua}, Pages = {281--298}, Title = {Analogy and the dual-route model of morphology}, Volume = 110, Year = 2000} @article{Eddington03, Author = {David Eddington}, Journal = {Lingua}, Pages = {849--871}, Title = {Issues in modeling language processing analogically}, Volume = 114, Year = 2003} @article{Edmonds67, Author = {Edmonds, J.}, Date-Added = {2009-11-15 00:26:25 +0100}, Date-Modified = {2009-11-15 00:26:36 +0100}, Journal = {Journal of Research of the National Bureau of Standards}, Pages = {233--240}, Title = {Optimum branchings}, Volume = {71}, Year = {1967}} @article{Efron+87, Author = {Bradley Efron and Ronald Thisted}, Journal = {Biometrika}, Pages = {445-455}, Title = {Did Shakespeare write a newly discovered poem?}, Volume = 74, Year = 1987} @book{Egan75, Address = {New York, NY}, Author = {J. P. Egan}, Publisher = {Academic Press}, Series = {Series in Cognition and Perception}, Title = {Signal detection theory and {ROC} analysis}, Year = 1975} @inproceedings{Egedi+94, Author = {Dania Egedi and Patrick MArtin}, Booktitle = {Proceedings of the International Workshop on Sharable Natural Language Resources, Nara, Japan}, Month = {August}, Pages = {123--130}, Title = {A Freely Avaiable Syntactic Lexicon for English}, Year = 1994} @inproceedings{Eijk93, Author = {P. {Van der Eijk}}, Booktitle = {EACL}, Pages = {113--119}, Title = {Automating the acquisition of bilingual terminology}, Year = 1993} @incollection{Eisner00, Address = {Norwell, MA, USA}, Author = {Eisner, J.}, Booktitle = {Advances in Probabilistic and Other Parsing Technologies}, Date-Added = {2009-11-15 00:26:45 +0100}, Date-Modified = {2009-11-15 00:26:59 +0100}, Editor = {H. Bunt and A. Nijholt}, Pages = {29--62}, Publisher = {Kluwer Academic Publishers}, Title = {Bilexical grammars and their cubic-time parsing algorithms}, Year = {2000}} @book{Eisner96, Author = {J. Eisner}, Publisher = {Technical Report IRCS-96-11, Institute for Research in Cognitive Science, University of Pennsylvania}, Title = {An Empirical Comparison of Probability Models for Dependency Grammar}, Year = 1996} @techreport{Ejerhed+92, Author = {E. Ejerhed and G. Kallgren and O. Wennstedt and M. Astrom}, Institution = {Department of General Linguistics, University of Umea}, Title = {The Linguistic Annotation System of the Stockholm-Umea Project}, Year = 1992} @inproceedings{Elisseeff+02, Author = {Elisseeff, A. and Weston, J.}, Booktitle = {Advances in Neural Information Processing Systems}, Date-Added = {2009-11-15 00:27:06 +0100}, Date-Modified = {2009-11-15 00:27:20 +0100}, Pages = {681--688}, Title = {A Kernel Method for Multi-Labelled Classification}, Volume = {14}, Year = {2002}} @phdthesis{Ellison93, Author = {T. M. Ellison}, School = {University of Western Australia}, Title = {Machine learning of phonological structure}, Year = 1993} @article{Elman+88, Author = {J. L. Elman and D. Zipser}, Journal = {Journal of the Acoustical Society of America}, Pages = {1615--1625}, Title = {Learning the Hidden Structure of Speech}, Volume = 83, Year = 1988} @article{Elman90, Author = {J. Elman}, Journal = {Cognitive Science}, Pages = {179--211}, Title = {Finding Structure in Time}, Volume = 14, Year = 1990} @article{Elman90b, Author = {Elman, J.}, Journal = {{Cognition}}, Number = 2, Title = {{Learning and development in neural networks: the importance of starting small}}, Volume = 14, Year = 1990} @article{Elman93, Author = {Elman, J.}, Journal = {{Cognitive Science}}, Pages = {71--99}, Title = {{Finding structure in time}}, Volume = 48, Year = 1993} @inproceedings{Elworthy00, Author = {D. Elworthy}, Booktitle = {Proceedings of TREC-9}, Organization = {NIST}, Title = {Question Answering Using a Large NLP System}, Year = 2001} @inproceedings{Elworthy94, Author = {David Elworthy}, Booktitle = ANLP, Pages = {53--58}, Title = {Does {Baum-Welch} re-estimation improve taggers?}, Year = 1994} @inproceedings{Ernst-Gerlach+06, Abstract = {In this paper, we describe a new approach for retrieval in texts with non-standard spelling, which is important for historic texts in English or German. For this purpose, we present a new algorithm for generating search term variants in ancient orthography. By applying a spell checker on a corpus of historic texts, we generate a list of candidate terms for which the contemporary spellings have to be assigned manually. Then our algorithm produces a set of probabilistic rules. These probabilities can be considered for ranking in the retrieval stage. An experimental comparison shows that our approach outperforms competing methods.}, Author = {Andrea Ernst-Gerlach and Norbert Fuhr}, Booktitle = {28th European Conference on Information Retrieval Research ({ECIR} 2006)}, Date-Added = {2010-01-29 15:09:33 +0100}, Date-Modified = {2010-01-29 15:09:39 +0100}, Publisher = {Springer}, Title = {Generating Search Term Variants for Text Collections with Historic Spellings}, Year = 2006} @inproceedings{Ernst-Gerlach+07, Address = {New York, NY, USA}, Author = {Andrea Ernst-Gerlach and Norbert Fuhr}, Booktitle = {JCDL '07: Proceedings of the 2007 conference on Digital libraries}, Date-Added = {2010-01-29 15:09:47 +0100}, Date-Modified = {2010-01-29 15:09:53 +0100}, Doi = {http://doi.acm.org/10.1145/1255175.1255242}, Isbn = {978-1-59593-644-8}, Location = {Vancouver, BC, Canada}, Pages = {333--341}, Publisher = {ACM Press}, Title = {Retrieval in text collections with historic spelling using linguistic and spelling variants}, Year = {2007}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/1255175.1255242}} @inproceedings{Escudero+00, Author = {Escudero, G. and Marquez, L. and Rigau, G.}, Booktitle = {European Conference on Machine Learning}, Date-Modified = {2009-09-06 20:35:43 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {129-141}, Title = {Boosting Applied to Word Sense Disambiguation}, Year = 2000} @inproceedings{Essen+93, Author = {Ute Essen and Volker Steinbiss}, Booktitle = {Proceedings of ICASSP}, Organization = {IEEE}, Pages = {161--164}, Title = {Coocurrence Smoothing for Stochastic Language Modeling}, Volume = 1, Year = 1993} @inproceedings{Essen92, Author = {Ute Essen and Volker Steinbiss}, Booktitle = {Proceedings of {ICASSP}}, Pages = {161--164}, Publisher = {{IEEE}}, Title = {Cooccurrence Smoothing for Stochastic Language Modeling}, Volume = {I}, Year = 1992} @book{Estes94, Address = {New York}, Author = {W. K. Estes}, Date-Modified = {2010-02-14 23:07:58 +0100}, Publisher = {Oxford University Press}, Series = {Oxford Psychology Series}, Title = {Classification and cognition}, Volume = 22, Year = 1994} @article{Evans+96, Author = {Roger Evans and Gerald Gazdar}, Journal = {Computational Linguistics}, Number = 2, Pages = {167--216}, Title = {{DATR}: A Language for Lexical Knowledge Representation}, Volume = 22, Year = 1996} @article{Evans01, Author = {R. Evans}, Journal = {Journal of Literary and Linguistic Computing}, Number = 1, Pages = {45--57}, Title = {Applying Machine Learning Toward an Automatic Classification of {\it It}}, Volume = 16, Year = 2001} @inproceedings{Even-Zohar+00, Address = {New Brunswick, NJ}, Author = {Y. Even-Zohar and D. Roth}, Booktitle = {Proceedings of the First North-American Conference on Computational Linguistics}, Pages = {124--131}, Publisher = {ACL}, Title = {A classification approach to word prediction}, Year = 2000} @inproceedings{Even-Zohar+99, Address = {Bar-Ilan, Israel}, Author = {Y. Even-Zohar and D. Roth and D. Zelenko}, Booktitle = {BISFAI99}, Title = {Word Clustering via Classification}, Year = 1999} @techreport{Fahlman+90, Author = {S. E. Fahlman and C. Lebi{{\`{e}}}re}, Institution = {School of Computer Science, Carnegie-Mellon University, Pittsburgh, PA}, Number = {CMU-CS-90-100}, Title = {The Cascade-correlation Learning Architecture}, Year = 1990} @techreport{Fahlman88, Author = {S. E. Fahlman}, Institution = {Carnegie--Mellon University}, Number = {CMU-CS-88-162}, Title = {An empirical study of learning speed in back-propagation networks}, Year = 1988} @inproceedings{Fahlman88b, Address = {San Mateo, CA}, Author = {S. E. Fahlman}, Booktitle = {Proceedings of the 1988 Connectionist Summer School}, Pages = {xx-xx}, Publisher = {Morgan Kaufmann}, Title = {Faster-learning variations on Back-propagation: An empirical study}, Year = 1988} @book{Fano61, Address = {Cambridge, MA}, Author = {R. Fano}, Publisher = MIT, Title = {Transmission of Information}, Year = 1961} @article{Farago+93, Author = {A. Farag{\'{o}} and G. Lugosi}, Journal = {{IEEE} Transactions on Information Theory}, Pages = {1146--1151}, Title = {Strong universal consistency of neural network classifiers}, Volume = 39, Year = 1993} @article{Farah93, Author = {Farah, M.}, Date-Modified = {2009-02-21 19:45:30 +0100}, Journal = {{Brain and Behavioral Sciences}}, Keywords = {cognitive architecture, face recognition lesions. localization, modularity, neural networks, neuropsychology semantics, vision}, Pages = {?}, Title = {{Neuropsychological Inference with an Interactive Brain: A Critique of the locality assumption}}, Volume = {?}, Year = 1993} @techreport{Fawcett04, Author = {T. Fawcett}, Date-Modified = {2009-02-21 19:45:59 +0100}, Institution = {Hewlett Packard Labs}, Keywords = {ROC, AUC, evaluation, methodology}, Number = {HPL-2003-4}, Title = {{ROC} Graphs: Notes and Practical Considerations for Researchers}, Year = 2004} @incollection{Federici+96, Address = {London}, Author = {S. Federici and V. Pirelli}, Booktitle = {New Methods in Language Processing}, Publisher = {UCL Press}, Title = {Analogy, Computation and Linguistic Theory}, Year = 1996} @techreport{Feldman94, Address = {Ramat-Gan, Israel}, Author = {R. Feldman}, Institution = {Bar-Ilan University}, Title = {Knowledge Discovery in Textual Databases}, Year = 1994} @inproceedings{Feldman95a, Author = {R. Feldman and I. Dagan}, Booktitle = {Proceedings of the First International Conference on Knowledge Discovery (KDD-95)}, Month = {August}, Place = {Montreal}, Title = {{KDT} -- Knowledge Discovery in Texts}, Year = 1995} @inproceedings{Feldman95b, Author = {R. Feldman and I. Dagan}, Booktitle = {Proceedings of the ECML-95 Workshop in Knowledge Discovery}, Month = {May}, Place = {Crete}, Title = {Knowledge Discovery in Textual Databases}, Year = 1995} @misc{Fellbaum+93, Author = {C. Fellbaum and D. Gross and K. Miller}, Title = {Adjectives in WordNet}, Year = 1993} @misc{Fellbaum93, Author = {C. Fellbaum}, Howpublished = {available from: {\tt\small http://www.cogsci.princeton.edu/\ { }wn/}}, Title = {English Verbs as a Semantic Net}, Year = 1993} @book{Fellbaum98, Address = {Cambridge, MA}, Author = {C. Fellbaum}, Date-Modified = {2011-01-29 16:44:26 +0100}, Keywords = {Wordnet}, Publisher = MIT, Title = {{WordNet}: An Electronic Lexical Database}, Year = 1998} @inproceedings{Fernandez+04, Address = {Geneva, Switzerland}, Author = {R. Fern\'andez and J. Ginzburg and S. Lappin}, Booktitle = {Proceedings of the 20th International Conference on Computational Linguistics, COLING 2004}, Month = {August}, Pages = {240--246}, Title = {Classifying Ellipsis in Dialogue: A Machine Learning Approach}, Year = 2004} @article{Ferrer+01, Author = {Ferrer i Cancho, Ramon and Ricard V. Sol\'{e}}, Date-Added = {2010-01-29 14:37:12 +0100}, Date-Modified = {2010-09-18 14:39:57 +0200}, Journal = {Proceedings of The Royal Society of London. Series B, Biological Sciences}, Keywords = {small-world semantic networks, scaling, lexical networks, human language}, Month = {November}, Number = {1482}, Pages = {2261--2265}, Title = {The small world of human language}, Url = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer01theSmall.html}, Volume = {268}, Year = {2001}, Bdsk-Url-1 = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer01theSmall.html}} @article{Ferrer+01b, Author = {Ferrer i Cancho, Ramon and Ricard V. Sol\'{e}}, Date-Added = {2010-01-29 14:37:12 +0100}, Date-Modified = {2010-01-29 14:38:03 +0100}, Journal = {Journal of Quantitative Linguistics}, Keywords = {{Z}ipf's law, origin of language, scaling}, Number = {3}, Pages = {165--173}, Title = {Two regimes in the frequency of words and the origins of complex lexicons: {Z}ipf's law revisited}, Url = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer01twoRegimes.html}, Volume = {8}, Year = {2001}, Bdsk-Url-1 = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer01twoRegimes.html}} @article{Ferrer+02, Author = {Ferrer i Cancho, Ramon and Ricard V. Sol\'{e}}, Date-Added = {2010-01-29 14:37:12 +0100}, Date-Modified = {2010-01-29 14:38:15 +0100}, Journal = {Advances in Complex Systems}, Keywords = {Human language; scaling; {Z}ipf's law; monkey languages; random texts}, Number = {1}, Pages = {1--6}, Title = {{Z}ipf's law and random texts}, Url = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer02zipf.html}, Volume = {5}, Year = {2002}, Bdsk-Url-1 = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer02zipf.html}} @techreport{Ferrer+03, Author = {Ferrer i Cancho, Ramon and R. V. Sol\'{e} and R. Kohler}, Date-Added = {2010-01-29 14:37:12 +0100}, Date-Modified = {2010-09-18 14:40:09 +0200}, Institution = {Santa Fe Institute}, Keywords = {complex networks, linguistic universals, small-world semantic networks, syntax, scaling}, Note = {Santa Fe Working paper 03-06-042}, Title = {Universality in syntactic dependencies}, Url = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrericancho2003SFI.html}, Year = {2003}, Bdsk-Url-1 = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrericancho2003SFI.html}} @article{Ferrer+03b, Author = {Ferrer i Cancho, Ramon and Ricard V. Sol\'{e}}, Date-Added = {2010-01-29 14:37:12 +0100}, Date-Modified = {2010-01-29 14:38:28 +0100}, Journal = {PNAS}, Pages = {788--791}, Title = {Least effort and the origins of scaling in human language}, Url = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer03leasteffort.html}, Volume = {100}, Year = {2003}, Bdsk-Url-1 = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer03leasteffort.html}} @article{Ferrer+04, Author = {Ferrer i Cancho, Ramon and Ricard V. Sol\'{e}}, Date-Added = {2010-01-29 14:37:12 +0100}, Date-Modified = {2010-01-29 14:39:18 +0100}, Journal = {Physical Review E}, Number = {051915}, Title = {Patterns in syntactic dependency networks}, Url = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer04syntaxPRE.html}, Volume = {69}, Year = {2004}, Bdsk-Url-1 = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer04syntaxPRE.html}} @article{Ferrer+05, Author = {Ferrer i Cancho, Ramon and Oliver Riordan and Bela Bollobas}, Date-Added = {2010-01-29 14:37:12 +0100}, Date-Modified = {2010-01-29 14:39:01 +0100}, Journal = {Proceedings of The Royal Society of London. Series B, Biological Sciences}, Keywords = {{Z}ipf's Law, Syntax, Symbolic Reference, Human Language}, Title = {The consequences of {Z}ipf's law for syntax and symbolic reference}, Url = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer05consequencesOfZipfLaw.html}, Year = {2005}, Bdsk-Url-1 = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer05consequencesOfZipfLaw.html}} @phdthesis{Ferrer03, Author = {Ferrer i Cancho, Ramon}, Date-Added = {2010-01-29 14:37:12 +0100}, Date-Modified = {2010-01-29 14:39:35 +0100}, Title = {Language: universals, principles and origins}, Url = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrericancho2003phdthesis.html}, Year = {2003}, Bdsk-Url-1 = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrericancho2003phdthesis.html}} @article{Ferrer04, Author = {Ferrer i Cancho, Ramon}, Date-Added = {2010-01-29 14:37:12 +0100}, Date-Modified = {2010-01-29 14:38:40 +0100}, Journal = {Physica A: Statistical Mechanics and its Applications}, Keywords = {{Z}ipf's law; Scaling; Human language; Animal communication}, Month = {January}, Number = {1-2}, Pages = {275--284}, Title = {Decoding least effort and scaling in signal frequency distributions}, Url = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer04physicaA.html}, Volume = {345}, Year = {2005}, Bdsk-Url-1 = {http://www.isrl.uiuc.edu/~amag/langev/paper/ferrer04physicaA.html}} @article{Ferrer05, Author = {Ferrer i Cancho, Ramon}, Date-Added = {2010-01-29 14:37:12 +0100}, Date-Modified = {2010-01-29 14:39:11 +0100}, Journal = {European Physical Journal B}, Pages = {249--257}, Publisher = {EDP Sciences, Societ\`{a} Italiana di Fisica and Springer-Verlag}, Title = {The variation of {Z}ipf's law in human language}, Volume = {44}, Year = {2005}} @article{Fiez+92, Author = {Fiez, J.A. and E.A. Raife and D.A. Balota and J.P. Schwartz and M.E. Raichle and S.E. Stevenson}, Date-Modified = {2008-07-23 16:57:38 +0200}, Journal = {{The Journal of Neuroscience}}, Keywords = {PET, human language processing, working memory, neuroimaging, prefrontal cortex}, Pages = {802--822}, Title = {{A Positron Emission Tomography Study of the Short-Term Maintenance of Verval Information}}, Volume = {16(2)}, Year = 1996} @article{Fillmore+03, Author = {C. Fillmore and C. Johnson and M. Petruck}, Date-Modified = {2009-11-14 19:05:49 +0100}, Journal = {International Journal of Lexicography}, Keywords = {FrameNet, lexicography}, Number = 3, Pages = {235--250}, Title = {Background to FrameNet}, Volume = 16, Year = 2003} @inproceedings{Finch+92, Address = {Bloomington, IN}, Author = {S. Finch and N. Chater}, Booktitle = {Proceedings of the Fourteenth Annual Conference of the Cognitive Science Society}, Pages = {820--825}, Publisher = {Lawrence Erlbaum}, Title = {Bootstrapping Syntactic Categories}, Year = 1992} @phdthesis{Finch93, Author = {S. Finch}, School = {University of Edinburgh}, Title = {Finding structure in language}, Year = 1993} @inproceedings{Finch94, Author = {S. Finch}, Booktitle = {Proceedings of the 4th Conference on Applied Natural Language Processing}, Title = {Exploiting sophisticated representations for document retrieval}, Year = 1994} @inproceedings{Finkel+05, Author = {Finkel, J.R. and Grenager, T. and Manning, C.}, Booktitle = {Proceedings of the 43rd Annual Meeting of the Association for Computational Linguistics (ACL'05)}, Date-Added = {2009-11-15 00:27:29 +0100}, Date-Modified = {2009-11-15 00:27:44 +0100}, Pages = {363--370}, Title = {Incorporating non-local information into information extraction systems by Gibbs sampling}, Year = {2005}} @inproceedings{Finkel+09, Address = {Boulder, Colorado}, Author = {J. R. Finkel and C. D. Manning}, Booktitle = {Proc. of Human Language Technologies: The 2009 Annual Conference of the NAACL}, Date-Added = {2009-12-26 21:25:40 +0100}, Date-Modified = {2010-09-14 12:53:20 +0200}, Keywords = {joint learning, named-entity recognition, parsing}, Organization = {ACL}, Pages = {326--334}, Title = {Joint parsing and named entity recognition}, Year = {2009}} @inproceedings{Finkel+10, Address = {Uppsala, Sweden}, Author = {Finkel, {J. R.} and Manning, {C. D.}}, Booktitle = {Proceedings of the 48th Annual Meeting of the Association for Computational Linguistics}, Month = {July}, Pages = {720--728}, Publisher = {Association for Computational Linguistics}, Title = {Hierarchical Joint Learning: Improving Joint Parsing and Named Entity Recognition with Non-Jointly Labeled Data}, Year = {2010}} @article{Fisher87, Author = {D. Fisher}, Journal = {Machine Learning}, Pages = {139--172}, Title = {Knowledge acquisition via incremental conceptual clustering}, Volume = 2, Year = 1987} @techreport{Fix+51, Author = {E. Fix and J. L. Hodges}, Date-Modified = {2009-12-27 17:14:10 +0100}, Institution = {USAF School of Aviation Medicine}, Keywords = {k-NN}, Number = {Project 21-49-004, Report No. 4}, Title = {Disciminatory analysis---nonparametric discrimination; consistency properties}, Year = 1951} @techreport{Fix+52, Author = {E. Fix and J. L. Hodges}, Date-Modified = {2009-12-27 17:14:17 +0100}, Institution = {USAF School of Aviation Medicine}, Keywords = {k-NN}, Number = {Project 21-49-004, Report No. unknown}, Title = {Discriminatory Analysis: Small Sample Performance}, Year = 1952} @phdthesis{Flach95, Author = {P. Flach}, School = {Katholieke Universiteit Brabant, Tilburg, The Netherlands}, Title = {Conjectures: an inquiry concerning the logic of induction}, Year = 1995} @inproceedings{Florian+03, Author = {R. Florian and A. Ittycheriah and H. Jing and T. Zhang}, Booktitle = {Proceedings of the seventh Conference on Natural Language Learning at HLT-NAACL 2003}, Date-Added = {2009-11-15 00:27:53 +0100}, Date-Modified = {2009-12-27 17:14:32 +0100}, Editor = {W. Daelemans and M. Osborne}, Keywords = {named-entity recognition}, Pages = {168--171}, Title = {Named Entity Recognition through Classifier Combination}, Year = {2003}} @inproceedings{Florian+99, Address = {College Park, MD, USA}, Author = {R. Florian and D. Yarowsky}, Booktitle = {In Proceedings of ACL'99}, Title = {Dynamic Non-local Language Modeling via Hierarchical Topic-Based Adaptation}, Year = 1999} @book{Fodor+74, Address = {{New York}}, Author = {Fodor,J. A. and Bever,T. G. and Garrett,M. F.}, Publisher = {{McGraw-Hill}}, Title = {{The psychology of language : an introduction to psycholinguistics and generative grammar}}, Year = 1974} @article{Fodor+88, Author = {J. A. Fodor and Z. W. Pylyshyn}, Journal = {Cognition}, Pages = {2--71}, Title = {Connectionism and Cognitive Architecture: A Critical Analysis}, Volume = 28, Year = 1988} @book{Fodor83, Address = {New York}, Author = {J. A. Fodor}, Publisher = MIT, Title = {The Modularity of Mind}, Year = {1983}} @incollection{Fong+96, Address = {Berlin}, Author = {Eva Wai-man Fong and Dekai Wu}, Booktitle = {Connectionist, Statistical and Symbolic Approaches to Learning for Natural Language Processing}, Editor = {Stefan Wermter and Ellen Riloff and Gabriele Scheler}, Pages = {173--187}, Publisher = {Springer-Verlag}, Series = {Lecture Notes in Artificial Intelligence}, Title = {Learning Restricted Probabilistic Link Grammars}, Volume = 1040, Year = 1996} @article{Forster+73, Author = {K. I. Forster and S. M. Chambers}, Journal = {Journal of Verbal Learning and Verbal Behaviour}, Pages = {627--635}, Title = {Lexical access and naming time}, Volume = 12, Year = 1973} @inbook{Fourcin75, Address = {Paris}, Author = {A.J. Fourcin}, Booktitle = {Foundations of Language Development, a Multidisciplanary approach}, Editor = {Lenneberg, E. H. and Lenneberg E.}, Publisher = {The UNESCO Press}, Title = {Language Development in the Absence of Expressive Speech}, Volume = 2, Year = 1975} @book{Francis+82, Address = {Boston, MA}, Author = {W. Francis and H. Ku\v{c}era}, Date-Modified = {2011-06-21 18:11:30 +0200}, Publisher = {Houghton Mifflin Company}, Title = {Frequency Analysis of English Usage}, Year = 1982} @incollection{Franz96, Address = {New York}, Author = {Franz, A.}, Booktitle = {Connectionist, Statistical, and Symbolic Approaches to Learning for Natural Language Processing}, Date-Modified = {2011-01-29 16:44:50 +0100}, Editor = {Wermter, S. and Riloff, E. and Scheler, G.}, Keywords = {PP attachment}, Pages = {188--202}, Publisher = {Springer-Verlag}, Series = {Lecture Notes in Artificial Intelligence}, Title = {Learning {PP} Attachment from Corpus Statistics}, Volume = 1040, Year = 1996} @article{Frauenfelder+93, Author = {U. H. Frauenfelder and R. H. Baayen and F. M. Hellwig and R. Schreuder}, Journal = {Journal of Memory and Language}, Pages = {781--804}, Title = {Neighborhood Density and Frequency across Languages and Modalities}, Volume = 32, Year = 1993} @inproceedings{Frawley+91, Address = {Cambridge, MA}, Author = {W.J. Frawley and G. Piatetsky-Shapiro and C.J. Matheus}, Booktitle = {Knowledge Discovery in Databases}, Editor = {G. Piatetsky-Shapiro and W. Frawley}, Pages = {1--27}, Publisher = MIT, Title = {Knowledge Discovery in Databases: An Overview}, Year = 1991} @incollection{Frawley91, Author = {W. J. Frawley and G. Piatetsky-Shapiro and C. J. Matheus}, Booktitle = {Knowledge Discovery in Databases}, Editor = {G. Piatetsky-Shapiro and W. J. Frawley}, Pages = {1--27}, Publisher = MIT, Title = {Knowledge discovery in databases: an overview}, Year = 1991} @article{Frazier+82, Author = {Frazier, L. and K. Rayner}, Journal = {Cognitive Psychology}, Pages = {178--210}, Title = {Making and correcting errors during sentence comprehension: Eye movements in the analysis of structurally ambiguous sentences}, Volume = 14, Year = 1982} @book{Frazier+98, Address = {Cambridge, MA}, Author = {Frazier, L. and Clifton, C.}, Publisher = {MIT Press}, Title = {Construal}, Year = 1998} @phdthesis{Frazier79, Author = {L. Frazier}, School = {University of Connecticut}, Title = {On comprehending sentences: Syntactic parsing strategies}, Year = 1979} @article{Fredkin60, Author = {E. Fredkin}, Journal = {Communications of the ACM}, Pages = {490--499}, Title = {Trie Memory}, Year = 1960} @book{Freeman+91, Address = {Reading, MA}, Author = {J. Freeman and D. Skapura}, Publisher = {Addison-Wesley}, Title = {Neural Networks, Algorithms, Applications, and Programming Techniques}, Year = 1991} @inproceedings{Freeman01, Address = {Toulouse, France}, Author = {A. Freeman}, Booktitle = {ACL/EACL-2001 Workshop on Arabic Language Processing: Status and Prospects}, Note = {Available on: {\tt\small http://www.elsnet.org/acl2001-arabic.html}}, Title = {Brill's {POS} tagger and a Morphology parser for {Arabic}}, Year = 2001} @article{Freitag00, Author = {D. Freitag}, Date-Modified = {2011-06-21 18:11:50 +0200}, Journal = ML, Number = {2/3}, Pages = {169--202}, Title = {Machine Learning for Information Extraction in Informal Domains}, Volume = 39, Year = 2000} @inproceedings{Freitag04, Address = {Morristown, NJ, USA}, Author = {Freitag, Dayne}, Booktitle = {COLING '04: Proceedings of the 20th international conference on Computational Linguistics}, Doi = {http://dx.doi.org/10.3115/1220355.1220407}, Location = {Geneva, Switzerland}, Pages = {357}, Publisher = {Association for Computational Linguistics}, Title = {Toward unsupervised whole-corpus tagging}, Year = {2004}, Bdsk-Url-1 = {http://dx.doi.org/10.3115/1220355.1220407}} @inproceedings{French94, Author = {R. M. French}, Booktitle = {Proceedings of the 16th Annual Cognitive Science Society Conference}, Title = {Dynamically constraining connectionist networks to produce distributed, orthogonal representations to reduce catastrophic inference}, Year = 1994} @inproceedings{Freund+96, Address = {San Francisco, CA}, Author = {Y. Freund and R. E. Shapire}, Booktitle = {Proceedings of the Thirteenth International Conference on Machine Learning}, Editor = {L. Saitta}, Pages = {148--156}, Publisher = {Morgan Kaufmann}, Title = {Experiments with a new boosting algorithm}, Year = 1996} @article{Freund+99, Author = {Freund, Y. and Schapire, R.E.}, Date-Added = {2009-11-15 00:28:20 +0100}, Date-Modified = {2009-11-15 00:28:34 +0100}, Journal = {Machine Learning}, Number = {3}, Pages = {277--296}, Publisher = {Springer}, Title = {Large Margin Classification Using the Perceptron Algorithm}, Volume = {37}, Year = {1999}} @inproceedings{Freund94, Author = {Yoav Freund}, Booktitle = {Working Notes of the Workshop on Relevance, AAAI Fall Symposium Series}, Pages = {85--89}, Title = {Sifting informative examples from a random source}, Year = 1994} @article{Friedman+77, Author = {Friedman, J. and Bentley, J. and Ari Finkel, R.}, Journal = {ACM Transactions on Mathematical Software}, Number = 3, Title = {An algorithm for finding best matches in logarithmic expected time}, Volume = 3, Year = 1977} @inproceedings{Friedman+96, Address = {Cambridge, MA}, Author = {J. H. Friedman and R. Kohavi and Y. Yun}, Booktitle = {Proceedings of the Thirteenth National Conference on Artificial Intelligence}, Date-Modified = {2009-11-14 19:02:08 +0100}, Keywords = {decision trees, lazy decision trees}, Pages = {717--724}, Publisher = MIT, Title = {Lazy decision trees}, Year = 1996} @phdthesis{Frisch96, Address = {Ill.}, Author = {Stefan Frisch}, School = {Evanston}, Title = {Smilarity and Frequency in Phonology}, Year = 1996} @inproceedings{Fritzke91, Author = {B. Fritzke}, Booktitle = {Proceedings of IJCNN-91, Seattle}, Title = {Unsupervised clustering with growing cell structures}, Year = 1991} @inproceedings{Fritzke91b, Author = {B. Fritzke}, Booktitle = {Proceedings of {{\sc icann}}-91, Helsinki}, Title = {Let it grow: Self-organizing feature maps with problem-dependent cell structure}, Year = 1991} @article{Fritzke95, Author = {Bernd Fritzke}, Journal = {Neural Processing Letters}, Number = 5, Pages = {xx-yy}, Title = {Growing Grid: a self-organizing network with constant neighborhood range and adaptation strength}, Volume = 2, Year = 1995} @article{Frost+13, Author = {R. Frost and L. Katz and S. Bentin}, Journal = {Journal of Experimental Psychology: Human Perception and Performance}, Pages = {104--115}, Title = {Strategies for visual word recognition and orthographical depth: a multilingual comparison}, Volume = 13, Year = 1987} @inproceedings{Fujii+96, Author = {Fujii, A. and Inui, K. and Tokunaga, T. and Tanaka, H.}, Booktitle = {Proceedings of Fourth Workshop on Very Large Corpora}, Date-Modified = {2009-09-06 20:35:57 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {55--69}, Title = {Selective Sampling of Effective Example Sentence Sets for Word Sense Disambiguation}, Year = 1996} @inproceedings{Fujisaki89, Author = {T. Fujisaki and F. Jelinek and J. Cocke and E. Black and T. Nishino}, Booktitle = IPW, Title = {A probabilistic parsing method for sentence disambiguation}, Year = 1989} @inproceedings{Fung+95, Address = {Leuven, Belgium}, Author = {Pascale Fung and Dekai Wu}, Booktitle = {TMI-95, Proceedings of the Sixth International Conference on Theoretical and Methodological Issues in Machine Translation}, Month = Jul, Pages = {240--255}, Title = {Coerced Markov Models for Cross-Lingual Lexical Tag Relations}, Volume = 1, Year = 1995} @inproceedings{Fung94, Author = {P. Fung and K. McKeown}, Booktitle = {Proceedings of AMTA-94: Association of Machine Translation in the Americas}, Date-Modified = {2009-11-14 19:04:32 +0100}, Pages = {81--88}, Title = {Aligning noisy parallel corpora across language groups: word pair feature matching by dynamic time warping}, Year = 1994} @inproceedings{Fung94b, Author = {Pascale Fung and Ken Church}, Booktitle = COLING, Title = {K-vec: a new approach for aligning parallel texts}, Year = 1994} @inproceedings{Fung95, Author = {Pascale Fung}, Booktitle = ACL, Pages = {236--233}, Title = {A pattern matching method for finding noun and proper noun translations from noisy parallel corpora}, Year = 1995} @inproceedings{Fung95b, Author = {Pascale Fung}, Booktitle = {Proceedings of the Third Annual Workshop on Very Large Corpora}, Pages = {173--183}, Title = {Compiling Bilingual Lexicon Entries from a Non-Parallel English-Chinese Corpus}, Year = 1995} @inproceedings{Fung96, Author = {Pascale Fung}, Booktitle = {Proceedings of ICASSP: International Conference on Acoustics, Signal and Speech Processing}, Pages = {184--187}, Title = {Domain Word Translation by Space-Frequency Analysis of Context Length Histograms}, Volume = {I}, Year = 1996} @article{Fung96b, Author = {Pascale Fung and Katheleen McKeown}, Journal = {The Machine Translation Journal}, Title = {A Technical Word and Term Translation Aid using Noisy Parallel Corpora Across Language Groups}, Year = 1996} @inproceedings{Furnkranz01, Address = {Williamstown, MA}, Author = {J. F{\"u}rnkranz}, Booktitle = {Proceedings of the 18th International Conference on Machine Learning (ICML-01)}, Editor = {C. E. Brodley and A. P. Danyluk}, Pages = {146--153}, Publisher = {Morgan Kaufmann Publishers}, Title = {Round Robin Rule Learning}, Year = 2001} @book{Gak76, Address = {Paris}, Author = {V. G. Gak}, Publisher = {S.E.L.A.F.}, Title = {L'orthographe du fran{\c{c}}ais}, Year = 1976} @article{Gale+93, Author = {W. A. Gale and K. W. Church}, Journal = {Computational Linguistics}, Pages = {75--102}, Title = {A Program for Aligning Sentences in Bilingual Corpora}, Volume = 19, Year = 1993} @article{Gale+93b, Author = {W. A. Gale and K. W. Church and D. Yarowsky}, Date-Modified = {2009-09-06 20:36:03 +0200}, Journal = {Computers and the Humanities}, Keywords = {wsd, word sense disambiguation}, Pages = {415--439}, Title = {A Method for Disambiguating Word Senses in a Large Corpus}, Volume = 26, Year = 1993} @incollection{Gale+94, Author = {Gale, W. A. and W. Church and D. Yarowsky}, Booktitle = {Current issue in Computational Linguistics: In Honor of {D}on {W}alker}, Pages = {429--450}, Publisher = {Kluwer Academic Publishers}, Title = {Discrimination decisions for 100,000-dimensional spaces}, Year = 1994} @inproceedings{Gale91, Author = {W. Gale and K. Church}, Booktitle = DARPA, Date-Modified = {2010-02-14 23:08:08 +0100}, Title = {Identifying word correspondence in parallel text}, Year = 1991} @inproceedings{Gale91b, Author = {W. Gale and K. Church}, Booktitle = ACL, Date-Modified = {2010-02-14 23:08:16 +0100}, Title = {A program for aligning sentences in bilingual corpora}, Year = 1991} @inproceedings{Gale91c, Author = {W. Gale and K. Church}, Booktitle = {Proceedings of the Seventh Annual Conference of the UW Center for the New OED and Text Research, Using Corpora}, Date-Modified = {2010-02-14 23:08:24 +0100}, Pages = {40--62}, Place = {Oxford, U.K.}, Title = {Concordances for parallel texts}, Year = 1991} @techreport{Gale92, Author = {W. Gale and K. Church and D. Yarowsky}, Date-Modified = {2009-09-06 20:36:10 +0200}, Institution = {AT\&T Bell Laboratories}, Keywords = {wsd, word sense disambiguation}, Number = {Statistical Research Report, No. 104}, Title = {A method for disambiguating word senses in a large corpus}, Year = 1992} @inproceedings{Gale92b, Author = {W. Gale and K. Church and D. Yarowsky}, Booktitle = {Working Notes, AAAI Fall Symposium Series, Probabilistic Approaches to Natural Language}, Date-Modified = {2009-09-06 20:36:18 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {54--60}, Title = {Work on statistical methods for word sense disambiguation}, Year = 1992} @inproceedings{Gale92c, Author = {W. Gale and K. Church and D. Yarowsky}, Booktitle = TMI, Date-Modified = {2009-09-06 20:36:38 +0200}, Keywords = {wsd, word sense disambiguation, bilingual}, Pages = {101--112}, Title = {Using bilingual materials to develop word sense disambiguation methods}, Year = 1992} @article{Gale93, Author = {W. Gale and K. Church}, Date-Modified = {2010-02-14 23:08:32 +0100}, Journal = CL, Number = 1, Pages = {75--102}, Title = {A program for aligning sentences in bilingual corpora}, Volume = 19, Year = 1993} @inproceedings{Gale+94b, Author = {William A. Gale and Kenneth W. Church}, Booktitle = {Corpus-Based Research into Language}, Date-Modified = {2011-06-19 16:20:52 +0200}, Pages = {189--198}, Title = {What's wrong with adding one?}, Year = {1994}} @book{Gamma95, Author = {Erich Gamma and Richard Helm and Ralph Johnson and John Vlissides}, Publisher = {Addison-Wesley}, Title = {Design Patterns: Elements of Reusable Object-Oriented Software}, Year = 1995} @article{Gamon+08, Author = {Michael Gamon, Jianfeng Gao, Chris Brockett, Alexandre Klementiev, William B. Dolan, Dmitriy Belenko, Lucy Vanderwende}, Journal = {IJCNLP}, Title = {Using Contextual Speller Techniques and Language Modeling for ESL Error Correction}, Year = {2008}} @article{Garay-Vitoria+06, Author = {Nestor Garay-Vitoria and Julio Abascal}, Journal = {{U}niversal {A}ccess in the {I}nformation {S}ociety}, Number = 3, Pages = {188--203}, Publisher = {Springer}, Title = {{T}ext {P}rediction {S}ystems: {A} {S}urvey}, Volume = 4, Year = 2006} @inproceedings{Garay-Vitoria+97, Author = {N. Garay-Vitoria and J. Gonz{\'a}lez-Abascal}, Booktitle = {Proceedings of the 2nd International Conference on Intelligent User Interfaces}, Pages = {241--244}, Title = {Intelligent word-prediction to enhance text input rate}, Year = 1997} @inproceedings{Garcia+06, Address = {Trento, Italy}, Author = {M.A. Garc{\'{\i}}a Cumbreras and L.A. Ure{\~n}a L{\'o}pez and F. Mart{\'{\i}}nez Santiago}, Booktitle = {Proceedings of the EACL 2006 Workshop on Multilingual Question Answering}, Date-Modified = {2011-01-29 16:41:41 +0100}, Keywords = {question classification}, Pages = {39--44}, Title = {{BRUJA}: Question classification for Spanish}, Year = 2006} @book{Gardner87, Address = {New York, NY}, Author = {H. Gardner}, Edition = {paperback}, Publisher = {Basic Books}, Title = {The mind's new science: A history of the cognitive revolution}, Year = 1987} @inproceedings{Garg+01, Author = {A. Garg and D. Roth}, Booktitle = ECML, Pages = {179--191}, Title = {Understanding Probabilistic Classifiers}, Year = 2001} @inproceedings{Garner95, Author = {S.R. Garner}, Booktitle = {Proceedings of the New Zealand Computer Science Research Students Conference}, Date-Modified = {2009-11-14 17:41:15 +0100}, Pages = {57--64}, Title = {{WEKA}: The {W}aikato {E}nvironment for {K}nowledge {A}nalysis}, Year = 1995} @article{Garnsey+96, Author = {S.M. Garnsey and N.J Pearlmutter and E. Myers and M.A. Lotocky}, Journal = {submitted}, Title = {The relative contributions of verb bias and plausibility to the comprehension of temporarily ambiguous sentences}, Year = 1996} @article{Garnsey93, Author = {S. M. Garnsey}, Journal = {{Language and Cognitive Processes}}, Number = 4, Pages = {337--356}, Title = {{Event-related Brain Potentials in the STudy of Language: an Introduction}}, Volume = 8, Year = 1993} @incollection{Garrett95, Address = {Bradford}, Author = {Garrett, M.}, Booktitle = {The Cognitive Neurosciences}, Chapter = 57, Editor = {Gazzaniga, M.S.}, Pages = {881--899}, Publisher = MIT, Title = {The Structure of Language Processing: Neuropsychological Evidence}, Year = 1995} @book{Garside+87, Address = {London, UK}, Author = {Garside, R. and Leech, G. and Sampson, G.}, Date-Added = {2009-11-15 00:28:48 +0100}, Date-Modified = {2010-02-01 23:24:54 +0100}, Publisher = {Longman}, Title = {The computational analysis of {E}nglish: a corpus-based approach}, Year = {1987}} @book{Garside+97, Address = {London and New York}, Author = {Roger Garside and Geoffrey Leech and Anthony McEnery}, Date-Added = {2010-02-01 23:24:24 +0100}, Date-Modified = {2010-02-01 23:24:30 +0100}, Publisher = {Longman}, Title = {Corpus Annotation}, Year = {1997}} @article{Gasser+90, Author = {M. Gasser and C.-D. Lee}, Journal = {Connection Science}, Pages = {265--278}, Title = {Networks that learn about phonological feature persistance}, Volume = 2, Year = 1990} @inproceedings{Gasser92, Address = {Hillsdale, NJ}, Author = {M. Gasser}, Booktitle = {Proceedings of the Fourteenth Annual Conference of the Cognitive Science Society}, Pages = {396--401}, Publisher = {Lawrence Erlbaum Associates}, Title = {Learning distributed representations for syllables}, Year = 1992} @book{Gates37, Address = {New York}, Author = {Arthur Irving Gates}, Date-Added = {2010-01-29 15:09:57 +0100}, Date-Modified = {2010-01-29 15:10:06 +0100}, Publisher = {Teachers College, Columbia University}, Title = {Spelling difficulties in 3867 words}, Year = {1937}} @article{Gates72, Author = {G. W. Gates}, Journal = {IEEE Transactions on Information Theory}, Pages = {431--433}, Title = {The reduced nearest neighbor rule}, Volume = 18, Year = 1972} @inproceedings{Gaussier92, Author = {E. Gaussier and J. M. Lange and F. Meunier}, Booktitle = {Proceedings of ALLC/ACH Conference}, Title = {Towards bilingual terminology}, Year = 1992} @book{Gazdar+89, Address = {Reading, MA}, Author = {Gerald Gazdar and Chris Mellish}, Publisher = {Addison-Wesley}, Title = {Natural Language Processing in {LISP}}, Year = 1989} @article{Gazdar85, Author = {G. Gazdar}, Journal = {Linguistics}, Number = 4, Pages = {597--607}, Title = {Finite-state morphology: a review of Koskenniemi (1983)}, Volume = 23, Year = 1985} @article{Gee83, Author = {Gee, J. P. and F. Grosjean}, Journal = {Cognitive Psychology}, Pages = {411--458}, Title = {Performance structures:A psycholinguistic and linguistic appraisal}, Volume = 15, Year = 1983} @book{Geerts+84, Author = {G. Geerts and W. Haeseryn and J. {De Rooij} and M. {Van der Toorn}}, Date-Modified = {2010-09-20 00:14:54 +0200}, Publisher = {Wolters-Noordhoff, Groningen and Wolters, Leuven}, Title = {Algemene Nederlandse Spraakkunst}, Year = 1984} @proceedings{Gelbukh08, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CICLing}, Date-Added = {2010-02-10 20:38:32 +0100}, Date-Modified = {2010-02-10 20:38:55 +0100}, Editor = {Alexander F. Gelbukh}, Isbn = {978-3-540-78134-9}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Computational Linguistics and Intelligent Text Processing, 9th International Conference, CICLing 2008, Haifa, Israel, February 17-23, 2008, Proceedings}, Volume = {4919}, Year = {2008}} @article{Geman+84, Author = {S. Geman and D. Geman}, Date-Added = {2009-11-15 00:29:33 +0100}, Date-Modified = {2009-11-15 00:29:43 +0100}, Journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, Pages = {721--741}, Title = {Stochastic Relaxation, {Gibbs} Distributions, and the {Bayesian} Restoration of Images}, Volume = {6}, Year = {1984}} @article{Geman+92, Author = {Stuart Geman and Elie Bienenstock and Ren\'{e} Doursat}, Journal = {Neural Computation}, Pages = {1--58}, Title = {Neural Networks and the Bias/Variance Dilemma}, Volume = 4, Year = 1992} @inproceedings{Germann+01, Author = {U. Germann and M. Jahr and K. Knight and D. Marcu and K. Yamada}, Booktitle = {Proceedings of 39th Annual Meeting of the Association for Computational Linguistics}, Date-Added = {2009-11-15 00:29:52 +0100}, Date-Modified = {2009-11-15 00:30:07 +0100}, Pages = {228--235}, Title = {Fast Decoding and Optimal Decoding for Machine Translation}, Year = {2001}} @misc{Germann01, Author = {Germann, U.}, Date-Added = {2009-11-15 00:29:52 +0100}, Date-Modified = {2009-11-15 00:30:21 +0100}, Publisher = {Release}, Title = {Aligned Hansards of the 36th Parliament of Canada}, Year = {2001}} @inproceedings{Germann03, Author = {Germann, U.}, Booktitle = {Proceedings of the 2003 Conference of the North American Chapter of the Association for Computational Linguistics on Human Language Technology}, Date-Added = {2009-11-15 00:29:52 +0100}, Date-Modified = {2009-11-15 00:30:30 +0100}, Pages = {1--8}, Title = {{Greedy decoding for statistical machine translation in almost linear time}}, Year = {2003}} @article{Gernsbacher+91, Author = {Gernsbacher, A. and Faust, M.}, Journal = {{Journal of Experimental Psychology: Learning, Memory and Cognition}}, Number = 2, Pages = {245--262}, Title = {{The Mechanism of Suppression: A Component of Genetal Comprehension Skill}}, Volume = 17, Year = 1991} @book{Gernsbacher94, Address = {San Diego, CA}, Author = {Gernsbacher, M.}, Date-Modified = {2011-06-22 15:07:51 +0200}, Publisher = {{Academic Press}}, Title = {{Handbook of Psycholinguistics}}, Year = 1994} @article{Geschwind72, Author = {Geschwind, N.}, Date-Modified = {2008-07-23 16:58:09 +0200}, Journal = {{Scientific American}}, Keywords = {brain, human language processing}, Pages = {76--83}, Title = {{Language and the Brain}}, Volume = 226, Year = 1972} @inproceedings{Gesmundo+09, Address = {Boulder, CO}, Author = {A. Gesmundo and J. Henderson and P. Merlo and I. Titov}, Booktitle = {Proc. of the {CoNLL} 2009: Shared Task}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 21:07:07 +0100}, Pages = {37--42}, Title = {A latent variable model of synchronous syntactic-semantic parsing for multiple languages}, Year = {2009}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W09-1205}} @inproceedings{Geurts+06, Author = {Geurts, P. and Wehenkel, L. and Alch\'e-Buc, F.}, Booktitle = {Proceedings of the Twenty-Third International Conference on Machine Learning (ICML 2006)}, Date-Added = {2009-11-15 00:30:39 +0100}, Date-Modified = {2009-11-15 00:30:54 +0100}, Editor = {W. Cohen and A. Moore}, Pages = {345--352}, Title = {Kernelizing the output of tree-based methods}, Year = {2006}} @inproceedings{Ghamrawi+05, Author = {Ghamrawi, N. and McCallum, A.}, Booktitle = {Proceedings of the 14th ACM International Conference on Information and Knowledge Management}, Date-Added = {2009-11-15 00:31:03 +0100}, Date-Modified = {2009-11-15 00:31:19 +0100}, Pages = {195--200}, Title = {Collective multi-label classification}, Year = {2005}} @article{Gibson+94, Author = {E. Gibson and G. Hickok and C. T. Schuetze}, Journal = {The Journal of Psycholinuistic Research}, Title = {Processing Empty Categories: A parallel Approach}, Year = 1994} @incollection{Gibson+94a, Address = {Hillsdale, NJ}, Author = {E. Gibson and N.J Pearlmutter}, Booktitle = {Perspectives on Sentence Processing}, Chapter = 8, Editor = {C. Clifton Jr. and L. Frazier and K. Rayner}, Publisher = {Lawrence Erlbaum Associates}, Title = {A Corpus-Based Analysis of Psycholinguistic Constraints on Prepositional-Phrase Attachment}, Year = 1994} @article{Gibson+96, Author = {Gibson, E. and Pearlmutter, N. and Canseco-Gonzalez, E. and Hickok, G.}, Journal = {Cognition}, Number = 1, Pages = {23--59}, Title = {Recency preference in the human sentence processing mechanism}, Volume = 59, Year = 1996} @article{Gibson89, Author = {Gibson, E.}, Journal = {{??}}, Title = {{Recency Preference and Garden-Path Effects}}, Year = 1989} @inproceedings{Gibson90, Author = {E. Gibson}, Organization = {ACL}, Title = {Memory Capacity and Sentence Processing}, Year = 1990} @article{Gildea+96, Author = {D. Gildea and D. Jurafsky}, Journal = {Computational Linguistics}, Number = 4, Pages = {497-530}, Title = {Learning Bias and Phonological-Rule Induction}, Volume = 22, Year = 1996} @inproceedings{Gildea03, Author = {D. Gildea}, Booktitle = {Proceedings of the 41st Annual Meeting of the Association for Computational Linguistics}, Date-Added = {2009-11-15 00:33:23 +0100}, Date-Modified = {2009-11-15 00:33:33 +0100}, Pages = {80--87}, Title = {Loosely Tree-Based Alignment for Machine Translation}, Year = {2003}} @inproceedings{Gillis+00, Author = {S. Gillis and G. Durieux and W. Daelemans}, Booktitle = {Cognitive Models of Language Acquisition}, Editor = {P. Broeder and J. Murre}, Pages = {76-99}, Publisher = CUP, Title = {Lazy Learning: A comparison of Natural and Machine Learning of Stress}, Year = 2000} @book{Gillis+92, Address = {Antwerp}, Author = {S. Gillis and G. Durieux and W. Daelemans and A. {Van den Bosch}}, Date-Modified = {2010-09-14 22:15:09 +0200}, Keywords = {Dutch, diminutive inflection}, Publisher = {UIA}, Series = {Antwerp Papers in Linguistics}, Title = {Exploring artificial learning algorithms: Learning to stress {D}utch simplex words}, Volume = 71, Year = 1992} @inproceedings{Gillis+93, Author = {S. Gillis and G. Durieux and W. Daelemans and A. {Van den Bosch}}, Booktitle = {Proceedings of the 15th Conference of the Cognitive Science Society 1993, Boulder, CO}, Date-Modified = {2010-09-14 22:16:56 +0200}, Keywords = {ilk, Dutch, stress assignment}, Pages = {452--457}, Title = {Learnability and markedness: {D}utch stress assignment}, Year = 1993} @incollection{Gillis+95, Author = {S. Gillis and G. Durieux and W. Daelemans}, Booktitle = {Approaches to parameter setting}, Editor = {M. Verrips and F. Wijnen}, Pages = {135--173}, Series = {Amsterdam Studies in Child Langage Development}, Title = {A computational model of {P\&P}: Dresher and Kaye (1990) revisited}, Volume = 4, Year = 1995} @inproceedings{Gillis92, Address = {Tilburg University, The Netherlands}, Author = {S. Gillis}, Booktitle = {Background and Experiments in Machine Learning of Natural Language: Proceedings of the Firts SHOE Workshop}, Editor = {W. Daelemans and D. Powers}, Pages = {25--52}, Publisher = {Institute for Language Technology and AI}, Title = {Topics in 'Natural' Natural Language Learning}, Year = 1992} @unpublished{Gillis99, Author = {Gillis, S.}, Note = {Paper presented at the International Workshop about Design and Annotation of Speech Corpora, Tilburg}, Title = {Phonemic transcriptions: qualitative and quantitative aspects}, Year = 1999} @inproceedings{Gimenez+07, Address = {Prague, Czech Republic}, Author = {J. Gim\'{e}nez and L. M\`{a}rquez}, Booktitle = {Proceedings of the Second Workshop on Statistical Machine Translation}, Month = {June}, Pages = {159--166}, Publisher = {Association for Computational Linguistics}, Title = {Context-aware Discriminative Phrase Selection for Statistical Machine Translation}, Url = {http://www.aclweb.org/anthology/W/W07/W07-0719}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W/W07/W07-0719}} @article{Gleitman90, Author = {L. Gleitman}, Journal = {Language Acquisition}, Number = 1, Pages = {3--56}, Title = {The structural sources of verb meaning}, Volume = 1, Year = 1990} @article{Glushko79, Author = {R. J. Glushko}, Journal = {Journal of Experimental Psychology: Human Perception and Performance}, Pages = {647--691}, Title = {The organisation and activation of orthographic knowledge in reading aloud}, Volume = 5, Year = 1979} @inproceedings{Golan88, Author = {Igal Golan and Shalom Lappin and Mori Rimon}, Booktitle = COLING, Pages = {205--211}, Title = {An active bilingual lexicon for machine translation}, Year = 1988} @article{Gold67, Author = {E. M. Gold}, Journal = {Information and Control}, Pages = {447-474}, Title = {Language identification in the limit}, Volume = 10, Year = 1967} @inbook{Goldberg76, Address = {Amsterdam, The Netherlands}, Author = {E. A. Goldberg}, Booktitle = {Soviet Studies in Language and Language Behavior}, Date-Modified = {2011-06-21 18:25:53 +0200}, Editor = {J. Pr\.{u}cha}, Publisher = {North-Holland Publishing Company}, Title = {A Neuropsychological Analysis of the Semantic Aspects of Text Perception}, Year = 1976} @book{Goldberg94, Address = {Chicago, Il}, Author = {A. Goldberg}, Publisher = {{Chicago University Press}}, Title = {{Constructions, A Construction Grammar Approach to Argument Structure}}, Year = 1994} @inproceedings{Golding+96, Author = {A. R. Golding and D. Roth}, Booktitle = ML, Date-Modified = {2014-01-06 20:33:57 +0000}, Pages = {182--190}, Title = {Applying {W}innow to Context-Sensitive Spelling Correction}, Year = 1996} @inproceedings{Golding+96b, Address = {Santa Cruz, CA}, Author = {A. R. Golding and Y. Schabes}, Booktitle = {Proceedings 34th Annual Meeting of the Association for Computational Linguistics}, Date-Modified = {2009-11-14 23:08:30 +0100}, Keywords = {spelling correction}, Title = {Combining trigram-based and feature-based methods for context-sensitive spelling correction}, Year = 1996} @article{Golding+98, Author = {A. R. Golding and D. Roth}, Journal = {Machine Learning}, Title = {A {W}innow-based approach to spelling correction}, Year = {1998, forthcoming}} @article{Golding+99, Author = {Golding, A.R. and Roth, D.}, Journal = {{Machine Learning}}, Number = {1--3}, Pages = {{107--130}}, Publisher = {{Kluwer Academic}}, Title = {{A Winnow-Based Approach to Context-Sensitive Spelling Correction}}, Volume = 34, Year = 1999} @phdthesis{Golding91, Address = {Stanford, CA}, Author = {A. R. Golding}, School = {Stanford University}, Title = {Pronouncing names by a combination of rule-based and case-based reasoning}, Year = 1991} @inproceedings{Golding95, Author = {A. R. Golding}, Booktitle = {Proceedings of the 3rd workshop on very large corpora, ACL-95}, Comment = {good description of other techniques for context-sensitive spell ing}, Date-Modified = {2011-06-19 16:22:01 +0200}, Pages = {39--53}, Title = {A {B}ayesian hybrid method for context-sensitive spelling correction}, Year = 1995} @article{Goldsmith01, Author = {J. Goldsmith}, Date-Modified = {2012-01-15 20:24:12 +0000}, Journal = CL, Number = 2, Pages = {153--198}, Title = {Unsupervised Learning of the Morphology of a Natural Language}, Volume = 27, Year = 2001} @article{Goldsmith76, Author = {J. Goldsmith}, Journal = {Linguistic Analysis}, Pages = {23--68}, Title = {An overview of autosegmental phonology}, Volume = 2, Year = 1976} @incollection{Goldsmith95, Address = {Cambridge, MA}, Author = {J. A. Goldsmith}, Booktitle = {The handbook of phonological theory}, Editor = {J. A. Goldsmith}, Pages = {1--23}, Publisher = {Blackwell Publishers}, Title = {Phonological theory}, Year = 1995} @book{Goldsmith96, Address = {Cambridge, MA}, Editor = {J. A. Goldsmith}, Publisher = {Blackwell Publishers}, Title = {The handbook of phonological theory}, Year = 1996} @inproceedings{Gonzalo+98, Author = {Gonzalo, J. and Verdejo, F. and Chugur, I. and Cigarran, J.}, Booktitle = COLING/ACL98, Pages = {38--44}, Title = {{Indexing with WordNet synsets can improve text retrieval}}, Year = 1998} @article{Good53, Author = {I. J. Good}, Journal = {Biometrika}, Pages = {237--264}, Title = {The population frequencies of species and the estimation of population parameters}, Volume = 40, Year = 1953} @article{Goodhill+99, Author = {Goodhill, J. and Finch, S. and Sejnowski, T.}, Journal = {{Advances in Neural Information Processing}}, Title = {{Optimizing Cortical Mappings}}, Year = 1996} @inproceedings{Goodman+02, Address = {New York, NY, USA}, Author = {J. Goodman and G. Venolia and K. Steury and C. Parker}, Booktitle = {{IUI} '02: Proceedings of the 7th International Conference on Intelligent User Interfaces}, Date-Modified = {2011-10-17 06:35:11 +0000}, Location = {San Francisco, California, USA}, Pages = {194--195}, Publisher = {ACM}, Title = {Language modeling for soft keyboards}, Year = 2002} @inproceedings{Gough+04, Address = {Baltimore, Maryland}, Author = {N. Gough and A. Way}, Booktitle = {Proceedings of TMI 2004}, Pages = {95-104}, Title = {Robust Large-Scale {EBMT} with Marker-Based Segmentation}, Year = 2004} @book{Gould02, Address = {Cambridge, MA}, Author = {S. J. Gould}, Date-Added = {2010-02-01 23:25:36 +0100}, Date-Modified = {2010-09-20 00:22:58 +0200}, Publisher = {Harvard University Press}, Title = {The Structure of Evolutionary Theory}, Year = {2002}} @article{Grainger90, Author = {J. Grainger}, Journal = {Journal of Memory and Language}, Pages = {228--244}, Title = {Word Frequency and Neighborhood Frequency Effects in Lexical Decision and Naming}, Volume = 29, Year = 1990} @article{Grasby+93, Author = {Grasby, P. and C. Frith and K. Friston and C. Bench and R. Frackowiak and R. Dolan}, Journal = {Brain}, Pages = {1--20}, Title = {Functional Mapping of Brain Areas Implicated in Auditory--Verbal Memory Function}, Year = 1993} @article{Green79, Author = {T. Green}, Journal = {Journal of Verbal Learning and Behavior}, Pages = {481-496}, Title = {The Necessity of Syntax Markers. Two experiments with artificial languages}, Volume = 18, Year = 1979} @inproceedings{Grefenstette93, Author = {Gregory Grefenstette}, Booktitle = {{SIGLEX/ACL} Workshop on Acquisition of Lexical Knowledge from Text, Columbus, OH}, Month = {June}, Title = {Evaluation Techniques for Automatic Semantic Extraction: Comparing Syntactic and Window Based Approaches}, Year = 1993} @inproceedings{Grefenstette94, Author = {Gregory Grefenstette}, Booktitle = {Proceedings of {EURALEX'94}}, Title = {Corpus-Derived First, Second and Third-Order Word Affinities}, Year = 1994} @inproceedings{Grefenstette96, Author = {G. Grefenstette}, Booktitle = {Workshop on Extended Finite State Models of Language, {ECAI'96}, Budapest, Hungary}, Date-Modified = {2012-01-15 20:24:21 +0000}, Editor = {Wolfgang Wahlster}, Publisher = {John Wiley \& Sons, Ltd.}, Title = {Light Parsing as Finite-State Filtering}, Year = 1996} @inproceedings{Greffenstette93, Author = {G. Greffenstette}, Booktitle = {ACL'93 workshop on the Acquisition of Lexical Knowledge from Text}, Title = {Evaluation techniques for automatic semantic extraction: comparing semantic and window based approaches}, Year = 1993} @inproceedings{Griffiths05, Author = {Thomas L. Griffiths and Mark Steyvers and David M. Blei and Joshua B. Tenenbaum}, Booktitle = {In Advances in Neural Information Processing Systems 17}, Pages = {537--544}, Publisher = {MIT Press}, Title = {Integrating topics and syntax}, Year = {2005}} @incollection{Grimshaw81, Address = {Cambridge, MA}, Author = {J. Grimshaw}, Booktitle = {The Logical Problem of Language Acquisition}, Chapter = 6, Editor = {C.L. Baker and John J. McCarthy}, Pages = {165--182}, Publisher = MIT, Title = {Form, Function and the Language Acquisition Device}, Year = 1981} @inbook{Grind90, Address = {New Jersey}, Author = {W. {Van de Grind}}, Booktitle = {Perception \& Control of Self-Motion}, Editor = {Warren, R. and Wertheim, A.}, Publisher = {Lawrence Erlbaum Associates}, Title = {Smart mechanisms for the visual evaluation and control of self-motion}, Year = 1990} @inproceedings{Grishman+92, Author = {R. Grishman and J. Sterling}, Booktitle = COLING92, Pages = {658-664}, Title = {Acquisition of selectional patterns}, Year = 1992} @inproceedings{Grishman+94, Address = {New York}, Author = {Ralph Grishman and Catherine Macleod and Adam Meyers}, Booktitle = COLING94, Date-Modified = {2009-11-14 23:08:13 +0100}, Keywords = {lexicon}, Pages = {268-272}, Publisher = {New York University}, Title = {Comlex Syntax: Building a Computational Lexicon}, Year = 1994} @article{Grishman86, Author = {R. Grishman and L. Hirschman and Ngo Thanh Nhan}, Journal = CL, Pages = {205--214}, Title = {Discovery procedures for sublanguage selectional patterns -- initial experiments}, Volume = 12, Year = 1986} @book{Grishman86b, Author = {R. Grishman}, Publisher = CUP, Title = {Computational Linguistics -- An Introduction}, Year = 1986} @book{Grishman86c, Editor = {R. Grishman and R. Kittredge}, Publisher = {Lawrence Erlbaum Associates}, Title = {Analyzing Language in Restricted Domains: Sublanguage Description and Processing}, Year = 1986} @incollection{Grishman95, Author = {R. Grishman}, Booktitle = {Proceedings of the Sixth Message Understanding Conference}, Editor = {Sundheim, B.}, Publisher = {Morgan Kaufmann Publishers}, Title = {The {NYU} system for {MUC-6} or where's syntax?}, Year = 1995} @article{Groenendijk+92, Author = {P. Groenendijk and J. Stokhof}, Journal = {{Linguistics}}, Title = {{Information Change Potential}}, Year = 1992} @article{Gropen+91, Author = {J. Gropen and S. Pinker and M. Hollander and R. Goldberg}, Journal = {Cognition}, Pages = {153-195}, Title = {Affectedness and direct object: The role of lexical semantics in the acquisition of verb argument structure}, Volume = 41, Year = 1991} @misc{Grover+98, Author = {D. Grover and M. King and C. Kushler}, Date-Modified = {2010-06-25 22:09:22 +0200}, Howpublished = {Patent No. US5818437, Tegic Communications, Inc., Seattle, WA}, Month = {October}, Title = {Reduced keyboard disambiguating computer}, Year = 1998} @inproceedings{Groves05, Address = {Ann Arbor, Michigan}, Author = {D. Groves and A. Way}, Booktitle = {Proceedings of the Workshop on Building and Using Parallel Texts: Data-Driven Machine Translation and Beyond, ACL 2005}, Pages = {183--190}, Title = {Hybrid Example-Based {SMT}: the Best of Both Worlds?}, Year = 2005} @article{Groves06, Author = {D. Groves and A. Way}, Journal = {Machine Translation, Special Issue on {EBMT}}, Note = {(to appear)}, Title = {Hybrid Data-Driven Models of {MT}}, Year = 2006} @incollection{Gruenwald96, Address = {Berlin}, Author = {P. Gr\"{u}nwald}, Booktitle = {Connectionist, Statistical and Symbolic Approaches to Learning for Natural Language Processing}, Editor = {Stefan Wermter and Ellen Riloff and Gabriele Scheler}, Pages = {203--216}, Publisher = {Springer-Verlag}, Series = {Lecture Notes in Artificial Intelligence}, Title = {A Minimum Description Length Approach to Grammar Inference}, Volume = 1040, Year = 1996} @article{Guiasu+85, Author = {S. Guiasu and A. Shenitzer}, Journal = {The Mathematical Intelligencer}, Number = 1, Title = {The principle of maximum entropy}, Volume = 7, Year = 1985} @incollection{Gupta+92, Address = {San Mateo, CA}, Author = {P. Gupta and D. Touretzky}, Booktitle = {Advances in Neural Information Processing Systems}, Editor = {J. Moody and S. J. Hanson and R. P. Lippmann}, Pages = {225--234}, Publisher = {Morgan Kaufmann}, Title = {A connectionist learning approach to analysing linguistic stress}, Volume = 4, Year = 1992} @article{Gupta92, Author = {V. Gupta and M. Lennig and P. Mermelstein}, Journal = CSL, Pages = {331--344}, Title = {A language model for very large-vocabulary speech recognition}, Volume = 6, Year = 1992} @inproceedings{Gustafson+99, Author = {J. Gustafson and N. Lindberg and M. Lundeberg}, Booktitle = {Proceedings of Eurospeech'99}, Title = {The August spoken dialogue system}, Year = 1999} @book{Gusfield97, Address = {Cambridge}, Author = {D. Gusfield}, Publisher = {University of Cambridge}, Title = {Algorithms on Strings, Trees and Sequences}, Year = {1997}} @inproceedings{Gutowitz03, Author = {H. Gutowitz}, Booktitle = {{P}roceedings of the {EACL} 2003 {W}orkshop on {L}anguage {M}odeling for {T}ext {E}ntry {S}ystems}, Pages = {33--41}, Title = {{B}arriers to {A}doption of {D}ictionary-{B}ased {T}ext-{E}ntry {M}ethods: {A} {F}ield {S}tudy}, Year = 2003} @inproceedings{Hacioglu04, Address = {Morristown, NJ, USA}, Author = {K. Hacioglu}, Booktitle = {COLING '04: Proceedings of the 20th international conference on Computational Linguistics}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2010-07-12 11:23:04 +0200}, Publisher = {ACL}, Title = {Semantic Role Labeling Using Dependency Trees}, Year = {2004}} @inproceedings{Haigh88, Author = {R. Haigh and G. Sampson and E. Atwell}, Booktitle = ACL, Title = {Project {APRIL} -- a progress report}, Year = 1988} @misc{Hajic+06, Author = {Jan Haji\v{c} and Jarmila Panevov\'{a} and Eva Haji\v{c}ov\'{a} and Petr Sgall and Petr Pajas and Jan \v{S}t\v{e}p\'{a}nek and Ji\v{r}\'{\i} Havelka and Marie Mikulov\'{a} and Zden\v{e}k \v{Z}abokrtsk\'{y}}, Booktitle = {CD-ROM}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 21:04:12 +0100}, Number = {{Cat. No. LDC2006T01, ISBN 1-58563-370-4}}, Publisher = {Linguistic Data Consortium, Philadelphia, Pennsylvania, USA. URL: http://ldc.upenn.edu.}, Title = {{Prague Dependency Treebank 2.0}}, Year = {2006}} @inproceedings{Hajic+09, Address = {Boulder, Colorado, USA}, Author = {J. Haji\v{c} and M. Ciaramita and R. Johansson and D. Kawahara and M. A. Mart\'{\i} and L. M\`{a}rquez and A. Meyers and J. Nivre and S. Pad\'{o} and J. \v{S}t\v{e}p\'{a}nek and P. Stra\v{n}\'{a}k and M. Surdeanu and N. Xue and Y. Zhang}, Booktitle = {Proc. of {CoNLL-2009}: Shared Task}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2011-06-19 16:31:33 +0200}, Keywords = {shared task}, Pages = {1--18}, Title = {The {CoNLL}-2009 Shared Task: Syntactic and Semantic Dependencies in Multiple Languages}, Year = {2009}} @article{Hale06, Author = {John Hale}, Journal = {Cognitive Science}, Pages = {643--672}, Title = {Uncertainty About the Rest of the Sentence}, Volume = {30}, Year = {2006}} @inproceedings{Hall+07, Author = {Hall, J. and Nilsson, J. and Nivre, J. and Eryigit, G. and Megyesi, B. and Nilsson, M. and Saers, M.}, Booktitle = {Proceedings of the CoNLL Shared Task Session of EMNLP-CoNLL 2007}, Date-Added = {2009-11-15 00:34:09 +0100}, Date-Modified = {2009-11-15 00:34:23 +0100}, Pages = {933--939}, Title = {Single Malt or Blended? A Study in Multilingual Parser Optimization}, Year = {2007}} @book{Halle+71, Address = {New York, NY}, Author = {M. Halle and S. J. Keyser}, Publisher = {Harper and Row}, Title = {English stress: its form, its growth, and its role in verse}, Year = 1971} @incollection{Halle78, Address = {Cambridge, MA}, Author = {M. Halle}, Booktitle = {Linguistic theories and psychological reality}, Editor = {M. Halle and J. W. Bresnan and G. A. Miller}, Pages = {294--303}, Publisher = MIT, Title = {Knowledge unlearned and untaught: what speakers know about the sounds of their language}, Year = 1978} @book{Halliday+76, Address = {London}, Author = {Halliday, M. A. K. and Ruqaiya Hasan}, Date-Added = {2010-02-01 23:26:18 +0100}, Date-Modified = {2010-02-01 23:26:24 +0100}, Note = {English Language Series, Title No. 9}, Publisher = {Longman}, Title = {Cohesion in {E}nglish}, Year = {1976}} @article{Halliday61, Author = {M. A. K. Halliday}, Journal = {Word}, Number = 3, Pages = {241--292}, Title = {Categories of the theory of grammar}, Volume = 17, Year = 1961} @book{Halliday85, Address = {London}, Author = {M. A. K. Halliday}, Date-Added = {2010-02-01 23:26:03 +0100}, Date-Modified = {2010-02-01 23:26:09 +0100}, Publisher = {Edward Arnold}, Title = {An Introduction to Functional Grammar}, Year = {1985}} @article{Halteren+01, Author = {H. {Van Halteren} and J. Zavrel and W. Daelemans}, Date-Modified = {2010-01-05 22:33:44 +0100}, Journal = CL, Keywords = {ilk, part-of-speech tagging}, Number = 2, Pages = {199--230}, Title = {Improving accuracy in word class tagging through combination of machine learning systems}, Volume = 27, Year = 2001} @inproceedings{Halteren+98, Address = {Montreal, Canada}, Author = {H. {Van Halteren} and J. Zavrel and W. Daelemans}, Booktitle = COLING/ACL98, Date-Modified = {2010-01-05 22:33:53 +0100}, Keywords = {ilk, part-of-speech tagging}, Month = {August 10-14}, Pages = {491--497}, Title = {Improving Data-Driven Wordclass Tagging by System Combination}, Year = 1998} @inproceedings{Halteren00, Author = {H. {Van Halteren}}, Booktitle = {Proceedings of CoNLL-2000}, Title = {Chunking with {WPDV} models}, Year = 2000} @inproceedings{Halteren00b, Author = {H. {Van Halteren}}, Booktitle = {Proceedings of CoNLL-2000}, Title = {A default first order weight determination procedure for {WPDV} models}, Year = 2000} @inproceedings{Halteren96, Address = {Oxford, England}, Author = {H. {Van Halteren}}, Booktitle = {Research in Humanities Computing 4. Selected papers from the ALLC/ACH Conference, Christ Church, Oxford, April 1992}, Editor = {S. Hockey and N. Ide}, Pages = {207--215}, Publisher = {Clarendon Press}, Title = {Comparison of Tagging Strategies, a Prelude to Democratic Tagging}, Year = 1996} @book{Halteren99, Address = {Dordrecht, The Netherlands}, Author = {H. {Van Halteren}}, Publisher = {Kluwer Academic Publishers}, Title = {Syntactic Wordclass Tagging}, Year = 1999} @unpublished{Hammersley71, Author = {Hammersley, J.M. and Clifford, P.}, Date-Added = {2009-11-15 00:34:28 +0100}, Date-Modified = {2009-11-15 00:34:37 +0100}, Note = {Unpublished manuscript}, Title = {{Markov fields on finite graphs and lattices}}, Year = {1971}} @book{Hammerton+02, Author = {J. Hammerton and M. Osborne and S. Armstrong and W. Daelemans}, Publisher = MIT, Title = {Special Issue of Journal of Machine Learning Research on Shallow Parsing}, Year = 2002} @book{Hann92, Address = {Amsterdam, The Netherlands}, Author = {M. Hann}, Publisher = {John Benjamins}, Title = {The Key to Technical Translation}, Volume = 1, Year = 1992} @article{Han+06, Author = {N. Han and M. Chodorow and C. Leacock}, Doi = {10.1017/S1351324906004190}, Eprint = {http://journals.cambridge.org/article_S1351324906004190}, Journal = {Natural Language Engineering}, Number = {02}, Pages = {115-129}, Title = {Detecting errors in English article usage by non-native speakers}, Url = {http://journals.cambridge.org/action/displayAbstract?fromPage=online&aid=439987&fulltextType=RA&fileId=S1351324906004190}, Volume = {12}, Year = {2006}, Bdsk-Url-1 = {http://journals.cambridge.org/action/displayAbstract?fromPage=online&aid=439987&fulltextType=RA&fileId=S1351324906004190}, Bdsk-Url-2 = {http://dx.doi.org/10.1017/S1351324906004190}} @article{Hansen+90, Author = {L. K. Hansen and P. Salamon}, Date-Added = {2009-11-15 00:34:43 +0100}, Date-Modified = {2009-11-15 00:35:01 +0100}, Journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, Keywords = {ensembles, neural networks}, Pages = {993-1001}, Title = {Neural Network Ensembles}, Volume = {12}, Year = {1990}} @unpublished{Happel+93, Author = {B. L. M. Happel and J. M. J. Murre}, Note = {Unpublished manuscript}, Title = {The Design and Evolution of Modular Neural Network Architectures}, Year = 1993} @inproceedings{Haque+09, Address = {Hong Kong, China}, Author = {R. Haque and S. Kumar Naskar and A. {Van den Bosch} and A. Way}, Booktitle = {Proceedings of PACLIC 23: the 23rd Pacific Asia Conference on Language, Information and Computation}, Date-Added = {2010-01-02 19:48:29 +0100}, Date-Modified = {2010-01-02 19:50:33 +0100}, Keywords = {ilk, statistical machine translation}, Pages = {170--179}, Title = {Dependency relations as source context in phrase-based {SMT}}, Year = {2009}, Bdsk-Url-1 = {http://www.computing.dcu.ie/~rhaque/PACLIC23-170-179.pdf}} @inproceedings{Harabagiu+00, Author = {S. Harabagiu and D. Moldovan and M. Pasca and R. Mihalcea and M. Surdeanu and R. Bunescu and R. Girju and V. Rus and P. Morerescu}, Booktitle = {Proceedings of the Ninth Text Retrieval Conference (TREC-9)}, Date-Modified = {2011-01-29 16:45:32 +0100}, Keywords = {question answering}, Title = {{FALCON} - Boosting Knowledge for Answer Engines}, Year = 2000} @inproceedings{Harabagiu+01, Author = {S. Harabagiu and D. Moldovan}, Booktitle = {Tuturial of the Second Meeting of the North American Chapter of the Association for Computational Linguistics}, Title = {Open-Domain Textual Question Answering}, Year = 2001} @techreport{Harding82, Address = {London}, Author = {P. Harding}, Institution = {British Library R \& D Department}, Month = feb, Number = {BLRDD Report No. 5723}, Title = {Automatic Indexing and Classification for Mechanized Information Retrieval}, Year = 1982} @incollection{Harnad82, Address = {Hillsdale, NJ}, Author = {S. Harnad}, Booktitle = {Language, mind, and brain}, Editor = {T. Simon and R. Scholes}, Pages = {189--211}, Publisher = {Lawrence Erlbaum}, Title = {Metaphor and mental duality}, Year = 1982} @article{Harris40, Author = {Z. S. Harris}, Journal = {Language}, Note = {Page numbers cited from repr. in Harris 1970:695--705 under the title "Gray's Foundations of Language".}, Number = 3, Pages = {216--231}, Title = {Review of {L}ouis {H}. {G}ray, {F}oundations of {L}anguage ({N}ew {Y}ork: MacMillan, 1939)}, Volume = 16, Year = 1940} @book{Harris51, Author = {Z. S. Harris}, Publisher = {University of Chicago Press}, Title = {Methods in Structural Linguistics}, Year = 1951} @article{Harris57, Author = {Z. S. Harris}, Journal = {Language}, Number = 3, Pages = {283--340}, Title = {Co-Occurrence and Transformation in Linguistic Structure.}, Volume = 33, Year = 1957} @book{Harris68, Author = {Z. S. Harris}, Publisher = {Wiley}, Title = {Mathematical structures of language}, Year = 1968} @book{Harris70, Author = {Z. S. Harris}, Number = 1, Publisher = {D. Reidel}, Series = {Formal Linguistic Series}, Title = {Papers in structural and transformational linguistics}, Year = 1970} @book{Harris87, Address = {La Salle, IL}, Author = {R. Harris}, Publisher = {Open Court}, Title = {Reading {S}aussure}, Year = 1987} @article{Hart68, Author = {P. E. Hart}, Journal = {IEEE Transactions on Information Theory}, Pages = {515--516}, Title = {The condensed nearest neighbor rule}, Volume = 14, Year = 1968} @article{Hartley+96, Author = {Hartley, T. and Houghton, G.}, Journal = {Journal of Memory and Language}, Pages = {1--31}, Title = {A linguistically Constrained Model of Short-Term Memory for Nonwords}, Volume = 35, Year = 1996} @inproceedings{Hassan+08, Author = {Ahmed Hassan and Sara Noeman and Hany Hassan}, Booktitle = {Proceedings of the Third International Joint Conference on Natural Language Processing}, Date-Added = {2010-01-29 15:28:24 +0100}, Date-Modified = {2010-01-29 15:28:29 +0100}, Location = {(IJCNLP, 2008)}, Title = {Language Independent Text Correction using Finite State Automata}, Year = {2008}} @techreport{Hassibi+92, Author = {B. Hassibi and D. G. Stork and G. J. Wolff}, Institution = {RICOH California Research Centre}, Number = {CRC-TR-9235}, Title = {Optimal Brain Surgeon and General Network Pruning}, Year = 1992} @unpublished{Hastie+94, Author = {T. Hastie and R. Tibshirani}, Note = {submitted}, Title = {Discriminant adaptive nearest neighbour classification}, Year = 1994} @article{Hastie+98, Author = {Hastie, T. and Tibshirani, R.}, Date-Added = {2009-11-15 00:35:05 +0100}, Date-Modified = {2009-11-15 00:35:20 +0100}, Journal = {The Annals of Statistics}, Number = {2}, Pages = {451--471}, Title = {Classification by pairwise coupling}, Volume = {26}, Year = {1998}} @incollection{Hatzivassiloglou96, Address = {Cambridge, MA}, Author = {Vasileios Hatzivassiloglou}, Booktitle = {The Balancing Act: Combining Symbolic and Statistical Approaches to Language}, Editor = {Judith L. Klavans and Philip Resnik}, Pages = {67--94}, Publisher = MIT, Title = {Do We Need Linguistics When We Have Statistics? A Comparative Analysis of the Contributions of Linguistics Cues to a Statistical Word Grouping System}, Year = 1996} @techreport{Haussler1999, Author = {Haussler, D.}, Date-Added = {2009-11-15 00:35:29 +0100}, Date-Modified = {2009-11-15 00:35:40 +0100}, Institution = {UC Santa Cruz}, Number = {UCS-CRL-99-10}, Title = {Convolution kernels on discrete structures}, Year = {1999}} @misc{Hayes+90, Author = {P. Hayes and S. Weinstein}, Title = {Construe-TIS: A system for content-based indexing of a database of news stories}, Year = 1990} @book{Hays88, Address = {Orlando, FA}, Author = {W. L. Hays}, Edition = {Fourth}, Publisher = {Holt, Rinehart and Winston}, Title = {Statistics}, Year = 1988} @inproceedings{Hearne+03, Address = {New Orleans, Louisiana}, Author = {M. Hearne and A. Way}, Booktitle = {Machine Translation Summit IX}, Pages = {59--68}, Title = {Seeing the Wood for the Trees: Data-Oriented Translation}, Year = 2003} @inproceedings{Hearne+06, Address = {Oslo, Norway}, Author = {M. Hearne and A. Way}, Booktitle = {Proceedings of the 11th Conference of the European Association for Machine Translation}, Title = {Disambiguation Strategies for Data-Oriented Translation}, Year = 2006} @incollection{Hearst+96, Address = {Cambridge MA}, Author = {M. A. Hearst and H. Sch\"{u}tze}, Booktitle = {Corpus Processing for Lexical Acquisition}, Date-Modified = {2012-06-06 07:03:01 +0000}, Editor = {Branimir Boguraev and James Pustejovsky}, Publisher = MIT, Title = {Customizing a Lexicon to Better Suit a Computational Task}, Year = 1996} @inproceedings{Hearst91b, Author = {Marti Hearst}, Booktitle = UWOED, Date-Modified = {2012-06-06 06:49:01 +0000}, Pages = {1--22}, Title = {Noun homograph disambiguation using local context in large text corpora}, Year = 1991} @book{Hebb49, Address = {New York}, Author = {Hebb, D.O.}, Publisher = {{Wiley}}, Title = {{The organization of behavior}}, Year = 1949} @incollection{Heemskerk+93, Author = {J. Heemskerk and V. J. {Van Heuven}}, Booktitle = {Analysis and synthesis of speech; strategic research towards high-quality text-to-speech generation}, Editor = {V. J. {Van Heuven} and L. C. W. Pols}, Publisher = {Berlin, Mouton de Gruyter}, Title = {MORPA, a lexicon-based MORphological PArser}, Year = 1993} @inproceedings{Heemskerk93, Author = {J. Heemskerk}, Booktitle = {Proceedings of the 6th Conference of the EACL}, Pages = {183--192}, Title = {A Probabilistic Context-free Grammar for Disambiguation in Morphological Parsing}, Year = 1993} @techreport{Heemskerk93b, Author = {J. Heemskerk}, Institution = {{{\sc itk}}, Tilburg University}, Number = 44, Title = {A Probabilistic Context-free Grammar for Disambiguation in Morphological Parsing}, Year = 1993} @inproceedings{Henderson+99, Author = {Henderson, J. and Brill, E.}, Booktitle = {{Proceedings of the 1999 Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora}}, Pages = {187--194}, Title = {{Exploiting Diversity in Natural Language Processing: Combining Parsers}}, Year = 1999} @inproceedings{Hendrickx+02, Address = {Philadelphia, PA}, Author = {I. Hendrickx and A. {Van den Bosch} and V. Hoste and W. Daelemans}, Booktitle = {Proceedings of the Workshop on word sense disambiguation: Recent successes and future directions}, Date-Modified = {2010-09-18 14:23:11 +0200}, Keywords = {ilk, wsd, word sense disambiguation, memory-based learning, memory-based language processing, Dutch, vi}, Pages = {61--65}, Title = {Dutch word sense disambiguation: Optimizing the localness of context}, Year = 2002} @inproceedings{Hendrickx+03, Author = {I. Hendrickx and A. {Van den Bosch}}, Booktitle = {Proceedings of CoNLL-2003}, Date-Modified = {2010-09-18 14:23:38 +0200}, Editors = {W. Daelemans and M. Osborne}, Keywords = {ilk, named-entity recognition, vi}, Pages = {176--179}, Title = {Memory-based one-step named-entity recognition: Effects of seed list features, classifier stacking, and unannotated data}, Year = 2003, Bdsk-Url-1 = {http://www.cnts.ua.ac.be/conll2003/pdf/17679hen.pdf}} @inproceedings{Hendrickx+04, Address = {Groningen, The Netherlands}, Author = {I. Hendrickx and A. {Van den Bosch}}, Booktitle = {Proceedings of the Sixteenth Belgian-Dutch Conference on Artificial Intelligence, Groningen}, Date-Modified = {2010-09-18 14:23:04 +0200}, Editor = {R. Verbrugge and N. Taatgen and L. Schomaker}, Keywords = {ilk, k-NN, vi}, Pages = {19--26}, Title = {Maximum-entropy parameter estimation for the {{\it k}-NN} modified value-difference kernel}, Year = 2004, Bdsk-Url-1 = {http://ilk.uvt.nl/~piroska/specom-LVKC.pdf}} @inproceedings{Hendrickx+05, Address = {Berlin}, Author = {I. Hendrickx and A. {Van den Bosch}}, Booktitle = {Machine Learning: ECML 2005: 16th European Conference on Machine Learning}, Date-Modified = {2010-09-18 14:23:18 +0200}, Editor = {J. Gama and R. Camacho and P. Brazdil and A. Jorge and L. Torgo}, Keywords = {ilk, k-NN, vi}, Pages = {158--169}, Publisher = {Springer Verlag}, Series = {Lecture Notes in Computer Science}, Title = {Hybrid algorithms for instance-based classification}, Volume = 3720, Year = 2005, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/ecml_hybrids_final.pdf}} @inproceedings{Hendrickx+07, Address = {Prague, Czech Republic}, Author = {I. Hendrickx and R. Morante and C. Sporleder and A. {Van den Bosch}}, Booktitle = {Proceedings of the Fourth International Workshop on Semantic Evaluations (SemEval-2007)}, Date-Added = {2010-01-03 01:07:48 +0100}, Date-Modified = {2011-01-29 16:45:44 +0100}, Keywords = {ilk, semantic relatedness, mitch, vici}, Pages = {187--190}, Title = {{ILK}: Machine learning of semantic relations with shallow features and almost no data}, Year = {2007}, Bdsk-Url-1 = {http://acl.ldc.upenn.edu/W/W07/W07-2039.pdf}} @phdthesis{Hendrickx05, Author = {I. Hendrickx}, Date-Modified = {2010-01-02 21:22:43 +0100}, Keywords = {ilk, k-NN}, School = {Tilburg University}, Title = {Local classification and global estimation: Explorations of the k-nearest neighbor algorithm}, Year = 2005} @inproceedings{Hermjakob01, Author = {U. Hermjakob}, Booktitle = {ACL-2001 Workshop on Open-Domain Question Answering}, Title = {Parsing and question classification for question answering}, Year = 2001} @book{Hertz+91, Address = {Redwood City, CA}, Author = {Hertz, J.A. and A.S. Krogh and R.G. Palmer}, Publisher = {Addison-Wesley}, Title = {{Introduction to the theory of neural computation}}, Year = 1991} @book{Heuven+93, Address = {Berlin}, Author = {V. J. {Van Heuven} and L. Pols}, Date-Modified = {2008-07-23 15:59:59 +0200}, Keywords = {speech synthesis}, Publisher = {Mouton de Gruyter}, Title = {Analysis and synthesis of speech, strategic research towards high-quality text-to-speech generation}, Year = 1993} @incollection{Heuven80, Address = {Baltimore}, Author = {V. J. {Van Heuven}}, Booktitle = {Orthography, Reading and Dyslexia}, Editor = {J. F. Kavanagh and R. L. Venezky}, Pages = {57--73}, Publisher = {University Park Press}, Title = {Aspects of {D}utch orthography and reading}, Year = 1980} @phdthesis{Hiemstra01, Author = {D. Hiemstra}, School = {University of Twente}, Title = {Using language models for information retrieval}, Year = 2001} @article{Higuera05, Author = {C. {de la Higuera}}, Date-Modified = {2010-09-20 00:26:57 +0200}, Journal = {Pattern Recognition}, Pages = {1332--1348}, Title = {A bibliographical study of grammatical inference}, Volume = {38}, Year = {2005}} @book{Higuera10, Address = {Cambridge}, Author = {C. {de la Higuera}}, Date-Modified = {2011-06-19 16:14:25 +0200}, Isbn = {9780521763165}, Pages = {485}, Publisher = {Cambridge University Press}, Title = {Grammatical Inference, Learning Automata and Grammars}, Year = {2010}} @article{Hillis+95, Author = {Hillis, A. and Caramazza, A.}, Date-Modified = {2009-02-21 19:48:24 +0100}, Journal = {{Journal of Cognitive Neruoschience}}, Keywords = {brain, human language processing, syntactic categories}, Page = {396--407}, Title = {{Representation of Grammatical Categories of Words in the Brain}}, Volume = {7: 3}, Year = 1995} @article{Hindle+93, Author = {Hindle, D. and M. Rooth}, Journal = {Computational Linguistics}, Pages = {103--120}, Title = {Structural Ambiguity and Lexical Relations}, Volume = 19, Year = 1993} @inproceedings{Hindle83, Author = {Hindle, D.}, Booktitle = ACL, Title = {Deterministic parsing of syntactic non-fluencies}, Year = 1983} @inproceedings{Hindle89, Author = {Hindle, D.}, Booktitle = ACL, Title = {Acquiring disambiguation rules from text}, Year = 1989} @inproceedings{Hindle90, Author = {Donald Hindle}, Booktitle = ACL90, Pages = {268--275}, Title = {Noun Classification from Predicate-Argument Structures}, Year = 1990} @inproceedings{Hindle91, Author = {D. Hindle and M. Rooth}, Booktitle = ACL, Pages = {229--236}, Title = {Structural Ambiguity and Lexical Relations}, Year = 1991} @article{Hindle93, Author = {Donald Hindle and Mats Rooth}, Journal = CL, Number = 1, Pages = {103--120}, Title = {Structural Ambiguity and Lexical Relations}, Volume = 19, Year = 1993} @inproceedings{Hinton86, Address = {Hillsdale, NJ}, Author = {G. E. Hinton}, Booktitle = {Proceedings of the Eighth Annual Conference of the Cognitive Science Society}, Pages = {1--12}, Publisher = {Lawrence Erlbaum Associates}, Title = {Learning distributed representation of concepts}, Year = 1986} @misc{Hinton90, Author = {G. E. Hinton}, Note = {Special Issues of {\em Artificial Intelligence}}, Title = {Connectionist Symbol Processing}, Year = 1990} @article{Hirose+91, Author = {Y. Hirose and K. Yamashita and S. Hijiya}, Journal = {Neural Networks}, Pages = {61--66}, Title = {Back-propagation algorithm which varies the number of hidden units}, Volume = 4, Year = 1991} @inproceedings{Hirschman+00, Author = {L. Hirschman and M. Light and E. Breck and J. Burger}, Booktitle = ACL99, Title = {Deep Read: A Reading Comprehension System}, Year = 1999} @article{Hirschman+05, Author = {Hirschman, L. and Yeh, A. and Blaschke, C. and Valencia, A.}, Date-Added = {2009-11-15 00:35:44 +0100}, Date-Modified = {2009-11-15 00:35:58 +0100}, Journal = {BMC Bioinformatics}, Number = {S1}, Title = {Overview of BioCreAtIvE: critical assessment of information extraction for biology}, Volume = {6}, Year = {2005}} @incollection{Hirschman86, Author = {L. Hirschman}, Booktitle = {Analyzing Language in Restricted Domains: Sublanguage Description and Processing}, Editor = {R. Grishman and R. Kittredge}, Pages = {211--234}, Publisher = {Lawrence Erlbaum Associates}, Title = {Discovering sublanguage structures}, Year = 1986} @inproceedings{Hirsh+94, Author = {H. Hirsh and M. Noordewier}, Booktitle = {Proceedings of the Tenth IEEE Conference on Artificial Intelligence for Applications}, Title = {Using Background Knowledge to Improve Learning of DNA Sequences}, Year = 1994} @inproceedings{Hirsh89, Address = {Ithaca, NY}, Author = {H. Hirsh}, Booktitle = {Proceedings of the Sixth International Workshop in Machine Learning}, Pages = {330--338}, Publisher = {Morgan Kaufmann}, Title = {Combining empirical and analytical learning with version spaces}, Year = 1989} @inproceedings{Hirshman+00, Author = {L. Hirschman and M. Light and E. Breck and J. Burger}, Booktitle = ACL99, Title = {Deep Read: A Reading Comprehension System}, Year = 1999} @book{Hirst87, Author = {Hirst, G.}, Publisher = CUP, Title = {Semantic interpretation and the Disambiguation of Ambiguity}, Year = 1987} @article{Hirst88, Author = {Graeme Hirst}, Journal = AI, Pages = {131--177}, Title = {Semantic interpretation and ambiguity}, Volume = 34, Year = 1988} @techreport{Hobbs+90, Author = {Hobbs, J, R. and M. Stickel and P. Martin and D. Edwards}, Institution = {SRI International}, Month = Dec, Number = {Technical Note 499}, Title = {Interpretation as Abduction}, Year = 1990} @inproceedings{Hobbs+92, Author = {Hobbs, J, R. et al.}, Booktitle = {Proceedings of Forth Message Understanding Conference}, Pages = {268-275}, Title = {Description of the {FASTUS} system used for {MUC}-4}, Year = 1992} @article{Hobbs+93, Author = {Hobbs, J, R. and M. Stickel and P. Martin and D. Edwards}, Journal = {Artificial Intelligence}, Pages = {69-142}, Title = {Interpretation as Abduction}, Volume = 63, Year = 1993} @article{Hobbs78, Author = {Hobbs, J. R.}, Journal = {Lingua}, Pages = {311--338}, Title = {Resolving pronoun references}, Volume = 44, Year = 1978} @inproceedings{Hobbs90, Author = {Jerry Hobbs and John Bear}, Booktitle = Coling, Title = {Two principles of parse preference}, Year = 1990} @book{Hobbs90b, Author = {Jerry R. Hobbs}, Date-Added = {2010-02-01 23:26:31 +0100}, Date-Modified = {2010-02-01 23:26:46 +0100}, Publisher = {CSLI Lecture Notes 21}, Title = {Literature and Cognition}, Year = {1990}} @techreport{Hochreiter+94, Author = {S. Hochreiter and J. Schmidhuber}, Institution = {Fakult{\"{a}}t f{\"{u}}r Informatik, Universit{\"{a}}t Karlsruhe, Germany}, Number = {FKI-200-94}, Title = {Flat minimum search finds simple nets}, Year = 1994} @article{Hoffman95, Author = {R. R. Hoffman}, Journal = {AI Magazine}, Pages = {11--35}, Title = {Monster analogies}, Volume = {?}, Year = 1995} @inproceedings{Hofmann+08, Address = {Singapore}, Author = {K. Hoffman and K. Balog and T. Bogers and M. {De Rijke}}, Booktitle = {Proceedings of the SIGIR 2008 Workshop on Future Challenges in Expert Retrieval}, Date-Added = {2010-01-03 10:42:39 +0100}, Date-Modified = {2010-01-03 10:43:36 +0100}, Keywords = {ilk, expert retrieval}, Month = {July}, Pages = {29--36}, Title = {ntegrating Contextual Factors into Topic-Centric Retrieval Methods for Finding Similar Experts}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/hofmann.2008.sigir-fcher-2008-paper.pdf}} @inproceedings{Hogenhout+97, Author = {Wide R. Hogenhout and Yuji Matsumoto}, Booktitle = {{CoNNL}: Computational Natural Language Learning}, Editor = {T. M. Ellison}, Pages = {16--24}, Publisher = {ACL}, Title = {A Preliminary Study of Word Clustering Based on Suntactic Behavior}, Year = 1997} @inproceedings{Holte+89, Address = {San Mateo, CA}, Author = {R. C. Holte and L. E. Acker and B. W. Porter}, Booktitle = {Proceedings of the Eleventh International Joint Conference on Artificial Intelligence}, Pages = {813--818}, Publisher = {Morgan Kaufmann}, Title = {Concept learning and the problem of small disjuncts}, Year = 1989} @book{Hopcroft+79, Address = {Reading, MA}, Author = {John E. Hopcroft and Jeffrey D. Ullman}, Date-Added = {2010-02-01 23:26:58 +0100}, Date-Modified = {2010-02-01 23:27:05 +0100}, Publisher = {Addison-Wesley}, Title = {Introduction to Automata Theory, Languages, and Computation}, Year = {1979}} @book{Hornby86, Author = {Hornby, A. S. and C. Ruse and J. A. Reif and Y. Levy}, Date-Modified = {2011-01-29 16:45:59 +0100}, Publisher = {Kernerman Publishing Ltd., Lonnie Kahn \& Co. Ltd.}, Title = {Oxford Student's Dictionary for {H}ebrew Speakers}, Year = 1986} @book{Hornby89, Address = {Oxford}, Author = {A. S. Hornby}, Edition = 4, Publisher = {Oxford University Press}, Title = {Oxford Advanced Learner's Dictionary of Current English}, Year = 1989} @techreport{Hornik+89, Author = {K. Hornik and M. Stinchcombe and H. White}, Institution = {Department of Economics, UCSD, San Diego, CA}, Number = {88-45R}, Title = {Multilayer feedforward networks are universal approximators}, Year = 1989} @inproceedings{Hoste+01, Author = {V. Hoste and A. Kool and W. Daelemans}, Booktitle = {Proceedings of Senseval-2}, Date-Modified = {2009-09-06 20:38:18 +0200}, Keywords = {wsd, word sense disambiguation, feature selection}, Title = {Classifier Optimization and Combination in the English All Words Task}, Year = 2001} @article{Hoste+02, Author = {V. Hoste and I. Hendrickx and W. Daelemans and A. {Van den Bosch}}, Date-Modified = {2010-09-18 14:23:55 +0200}, Journal = {Natural Language Engineering}, Keywords = {wsd, word sense disambiguation, parameter optimization, memory-based learning, ilk, vi}, Number = 4, Pages = {311--325}, Title = {Parameter optimization for machine learning of word sense disambiguation}, Volume = 8, Year = 2002, Bdsk-Url-1 = {http://www.cnts.ua.ac.be/~walter/papers/2002/hhdb02.pdf}} @inproceedings{Hoste+08, Address = {Bergen, Norway}, Author = {V. Hoste and A. {Van den Bosch}}, Booktitle = {Proceedings from the First Bergen Workshop on Anaphora Resolution (WAR I)}, Date-Added = {2010-01-03 10:32:18 +0100}, Date-Modified = {2010-09-18 14:23:47 +0200}, Editor = {C. Johansson}, Keywords = {ilk, co-reference resolution, Dutch, vici}, Pages = {51--75}, Title = {A modular approach to learning Dutch co-reference}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/war.pdf}} @phdthesis{Hoste05, Author = {V. Hoste}, School = {University of Antwerp}, Title = {Optimization in Machine Learning of Coreference Resolution}, Year = 2005} @inproceedings{Hovy+00, Author = {E. Hovy and L. Gerber and U. Hermjakob and M. Junk and C-Y Lin}, Booktitle = {Proceedings of TREC-9}, Organization = {NIST}, Title = {Question Answering in Webclopedia}, Year = 2001} @book{Hovy88, Address = {Hillsdale, NJ}, Author = {Eduard H. Hovy}, Date-Added = {2010-02-01 23:27:11 +0100}, Date-Modified = {2010-02-01 23:27:18 +0100}, Publisher = {Lawrence Erlbaum}, Title = {Generating Natural Language Under Pragmatic Constraints}, Year = {1988}} @inproceedings{HovyGerber01, Author = {E. Hovy and L. Gerber and U. Hermjakob and C. Lin and D. Ravichandran}, Booktitle = {Proceedings of the DARPA Human Language Technology conference (HLT). San Diego, CA}, Title = {Toward Semantics-Based Answer Pinpointing}, Year = 2001} @inproceedings{How+05, Address = {Las Vegas, NV}, Author = {Yijue How and Min-Yen Kan}, Booktitle = {{HCII} '05: {P}roceedings of the {T}he 11th {I}nternational {C}onference on {H}uman-{C}omputer {I}nteraction}, Editor = {M. J. Smith and G. Salvendy}, Month = {July}, Publisher = {Lawrence Erlbaum Associates}, Title = {{O}ptimizing {P}redictive {T}ext {E}ntry for {S}hort {M}essage {S}ervice on {M}obile {P}hones}, Year = 2005} @inproceedings{Howe+97, Address = {Berlin}, Author = {N. Howe and C. Cardie}, Booktitle = {Case-Based Reasoning Research and Developments: Second International Conference on Case-Based Reasoning}, Editor = {D. Leake and E. Plaza}, Pages = {455--466}, Publisher = {Springer Verlag}, Series = {Lecture Notes in Computer Science}, Title = {Examining locally varying weights for nearest neighbor algorithms}, Volume = 1266, Year = 1997} @article{Hsu+02, Author = {C. W. Hsu and C. J. Lin}, Journal = {IEEE Transactions on Neural Networks}, Pages = {415--425}, Title = {A comparison of methods for multi-class support vector machines}, Year = 2002} @manual{Hsu+2003, Archive = {Iris, web}, Author = {C. W. Hsu and C. C. Chang and C. J. Lin}, Key = {SVM}, Note = {zeer bruikbaar}, Source = {Iris}, Title = {A practical guide to support vector classification}, Url = {http://www.csie.ntu.edu.tw/ cjlin/papers/guide/guide.pdf}, Year = 2003, Bdsk-Url-1 = {http://www.csie.ntu.edu.tw/%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20cjlin/papers/guide/guide.pdf}} @inproceedings{Huang+01, Address = {Gold Coast, Queensland, Australia}, Author = {J. H. Huang and D. W. Powers}, Booktitle = {Australasian Computer Science Conference Proceedings}, Date-Modified = {2011-06-19 16:33:16 +0200}, Organization = {Bond University}, Pages = {77--82}, Title = {Large scale experiments on correction of confused words}, Year = 2001} @inproceedings{Huckle95, Author = {C. C. Huckle}, Booktitle = {Proceedings of EACL-95}, Comment = {Gives a good intro on the importance and psychological plausibility of word grouping}, Title = {Grouping Words Using Statistical Context}, Year = 1995} @inproceedings{Hudson+91, Address = {Amsterdam, The Netherlands}, Author = {P. T. W. Hudson and R. H. Phaf}, Booktitle = {Proceedings of {{\sc icann}}-91, Espoo, Finland}, Date-Modified = {2011-06-21 18:26:07 +0200}, Editor = {Teuvo Kohonen and Kai M{\"{a}}kisara and Olli Simula and J. Kangas}, Pages = {1025--1028}, Publisher = {North Holland}, Title = {Orders of approximation of neural networks to brain structure levels, modules and computing power}, Year = 1991} @inproceedings{Hughes+94, Author = {J. Hughes and E. Attwell}, Booktitle = {Proceedings of the 11th European Conference on Artificial Intelligence (ECAI'94)}, Editor = {A. Cohn}, Pages = {535--539}, Publisher = {John Wiley \& Sons Ltd.}, Title = {The Automated Evaluation of Inferred Word Classifications}, Year = 1994} @phdthesis{Hughes94, Author = {J. Hughes}, School = {University of Leeds}, Title = {Automatically acquiring a classification of words}, Year = 1994} @article{Humphreys+85, Author = {G. W. Humphreys and L. J. Evett}, Journal = {The Behavioural and Brain Sciences}, Pages = {689--710}, Title = {Are There Independent Lexical and Nonlexical Routes in Word Processing? An Evaluation of the Dual-Route Theory of Reading}, Volume = 8, Year = 1985} @article{Hunnicutt76, Author = {S. Hunnicutt}, Journal = {American Journal of Computational Linguistics}, Pages = {1-72}, Title = {Phonological rules for a text-to-speech system}, Volume = {Microfiche 57}, Year = 1976} @techreport{Hunnicutt80, Address = {KTH, Sweden}, Author = {S. Hunnicutt}, Institution = {Speech Transmission Laboratory}, Number = {STL QPSR 2-3}, Title = {Grapheme-phoneme rules: a review}, Year = 1980} @book{Hunt+66, Address = {New York, NY}, Author = {E. B. Hunt and J. Marin and P. J. Stone}, Publisher = {Academic Press}, Title = {Experiments in induction}, Year = 1966} @book{Hunt62, Address = {New York}, Author = {E. B. Hunt}, Publisher = {Wiley}, Title = {Concept learning: an information processing problem}, Year = 1962} @book{Hutchins+92, Author = {W. John Hutchins and Harold L. Somers}, Date-Added = {2010-02-01 23:25:45 +0100}, Date-Modified = {2010-02-01 23:25:57 +0100}, Publisher = {Academic Press}, Title = {An Introduction to Machine Translation}, Year = {1992}} @book{Hutchins92, Address = {London}, Author = {W. J. Hutchins and H. L. Somers}, Publisher = {Academic Press}, Title = {An introduction to machine translation}, Year = 1992} @inproceedings{Huyssteen+04, Address = {Patras: University of Patras, Greece}, Author = {van Huyssteen, G.B. and Eiselen, E.R. and Puttkammer, M.J.}, Booktitle = {Proceedings of First Workshop on International Proofing Tools and Language Technologies}, Date-Added = {2010-01-29 15:10:34 +0100}, Date-Modified = {2010-01-29 15:10:43 +0100}, Pages = {91-99}, Title = {Re-evaluating evaluation metrics for spelling checker evaluations}, Year = {2004}} @inproceedings{Huyssteen+04b, Address = {Patras: University of Patras, Greece}, Author = {G. Van Huyssteen and M. {Van Zaanen}}, Booktitle = {Proceedings of First Workshop on International Proofing Tools and Language Technologies}, Date-Added = {2010-01-29 15:10:47 +0100}, Date-Modified = {2010-02-17 20:29:42 +0100}, Keywords = {spelling correction, Afrikaans, ilk}, Pages = {25-30}, Title = {Learning Compound Boundaries for Afrikaans Spelling Checking}, Year = {2004}} @article{Ide+98, Author = {Ide, N. and V\'{e}ronis, J.}, Date-Modified = {2009-09-06 20:38:45 +0200}, Journal = {Computational Linguistics}, Keywords = {wsd, word sense disambiguation}, Number = 1, Pages = {1--40}, Title = {Introduction to the Special Issue on Word Sense Disambiguation: The State of the Art}, Volume = 24, Year = 1998} @techreport{Ipa93, Author = {{IPA}}, Institution = {{International Phonetic Association}}, Title = {Extensions to {IPA} alphabet}, Year = 1993} @inproceedings{Isabelle92, Author = {P. Isabelle}, Booktitle = UWOED, Title = {Bi-textual aids for translators}, Year = 1992} @inproceedings{Isabelle93, Author = {Isabelle, P. and M. Dymetman and G. Foster and J.-M. Jutras and E. Macklovitch and F. Perrault and X. Ren and M. Simard}, Booktitle = {Proceedings of TMI-93}, Pages = {201--217}, Place = {Kyoto}, Title = {Translation Analysis and Translation Automation}, Year = 1993} @inproceedings{Islam+09, Address = {New York, NY, USA}, Author = {A. Islam and D. Inkpen}, Booktitle = {CIKM '09: Proceeding of the 18th ACM conference on Information and knowledge management}, Date-Added = {2010-02-12 22:08:23 +0100}, Date-Modified = {2010-02-14 19:57:45 +0100}, Doi = {http://doi.acm.org/10.1145/1645953.1646205}, Isbn = {978-1-60558-512-3}, Location = {Hong Kong, China}, Pages = {1689--1692}, Publisher = {ACM}, Title = {Real-word spelling correction using {G}oogle web {1T}n-gram data set}, Year = {2009}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/1645953.1646205}} @inproceedings{Ittycheriah+00, Author = {A. Ittycheriah and M. Franz and W. Zhu and A. Ratnaparkhi}, Booktitle = {Proceedings of Text REtrieval Conference (TREC-9)}, Title = {IBM's Statistical Question Answering System}, Year = 2000} @inproceedings{Ittycheriah+01, Author = {A. Ittycheriah and M. Franz and W-J Zhu and A. Ratnaparkhi and R.J. Mammone}, Booktitle = {Proceedings of the 9th Text Retrieval Conference, NIST}, Title = {{IBM}'s Statistical Question Answering System}, Year = 2001} @article{Ivanova+95, Author = {I. Ivanova and M Kubat}, Date-Modified = {2009-11-14 19:01:57 +0100}, Journal = {Knowledge Based Systems, Special Issue on Knowledge Based Neural Networks}, Keywords = {neural networks, decision trees}, Number = 6, Title = {Initialization of neural networks by means of decision trees}, Volume = 8, Year = 1995} @inproceedings{Iwayama+94, Author = {M. Iwayama and T. Tokunaga}, Booktitle = {Proceedings of the 4th Conference on Applied Natural Language Processing}, Title = {A probabilistic model for text categorization based on a single random variable with multiple values}, Year = 1994} @book{Jackendoff77, Address = {Cambridge, MA}, Author = {Ray Jackendoff}, Publisher = MIT, Title = {X-bar syntax}, Year = 1977} @book{Jackendoff83, Address = {Cambridge, MA}, Author = {Ray Jackendoff}, Publisher = MIT, Title = {Semantic and cognition}, Year = 1983} @book{Jackendoff90, Address = {Cambridge, MA}, Author = {Ray Jackendoff}, Publisher = MIT, Title = {Semantic structures}, Year = 1990} @article{Jacobs+91, Author = {R. A. Jacobs and M. I. Jordan and S. J. Nowlan and G. E. Hinton}, Journal = {Neural Computation}, Pages = {79--87}, Title = {Adaptive mixtures of local experts}, Volume = 3, Year = 1991} @inproceedings{Jacobs92, Author = {P. Jacobs}, Booktitle = {Proceedings of the 3rd Conference on Applied Natural Language Processing}, Title = {Joining statistics with {NLP} for text categorization}, Year = 1992} @inproceedings{James+01, Address = {New York, NY, USA}, Author = {Christina L. James and Kelly M. Reischel}, Booktitle = {{CHI} '01: {P}roceedings of the {SIGCHI} {C}onference on {H}uman {F}actors in {C}omputing {S}ystems}, Location = {Seattle, Washington, USA}, Pages = {365--371}, Publisher = {ACM}, Title = {{T}ext {I}nput for {M}obile {D}evices: {C}omparing {M}odel {P}rediction to {A}ctual {P}erformance}, Year = 2001} @article{Jared90, Author = {D. Jared and K. McRae and M.S. Seidenberg}, Journal = {Journal of Memory and Language}, Pages = {687--715}, Title = {The basis of consistency effects in word naming}, Volume = 29, Year = 1990} @article{Jelinek75, Author = {Jelinek, F. and Bahl, L. and Mercer, R.}, Date-Added = {2009-11-15 00:36:05 +0100}, Date-Modified = {2009-11-15 00:36:19 +0100}, Journal = {IEEE Transactions on Information Theory}, Number = {3}, Pages = {250--256}, Title = {Design of a linguistic statistical decoder for the recognition of continuous speech}, Volume = {21}, Year = {1975}} @article{Jelinek85, Author = {F. Jelinek and R. Mercer}, Journal = {IBM Technical Disclosure Bulletin}, Pages = {2591--2594}, Title = {Probability distribution estimation from sparse data}, Volume = 28, Year = 1985} @incollection{Jelinek90, Address = {San Mateo, California}, Author = {Frederick Jelinek}, Booktitle = {Readings in Speech Recognition}, Editor = {Alex Waibel and Kai-Fu Lee}, Pages = {450-506}, Publisher = {Morgan Kaufmann Publishers, Inc.}, Title = {Self-Organized Language Modeling for Speech Recognition}, Year = 1990} @inproceedings{Jelinek91, Author = {Fred Jelinek}, Booktitle = {Eurospeech}, Title = {Up From Trigrams!}, Year = 1991} @incollection{Jelinek92, Author = {Frederick Jelinek and Robert L. Mercer and Salim Roukos}, Booktitle = {Advances in Speech Signal Processing}, Editor = {Sadaoki Furui and M. Mohan Sondhi}, Pages = {651-699}, Publisher = {Mercer Dekker, Inc.}, Title = {Principles of Lexiacal Language Modeling for Speech Recognition}, Year = 1992} @book{Jelinek98, Address = {Cambridge, MA}, Author = {F. Jelinek}, Publisher = MIT, Title = {Statistical Methods for Speech Recognition}, Year = 1998} @techreport{Jensen86, Author = {K. Jensen}, Institution = {IBM T. J. Watson Research Center}, Title = {PEG 1986: A broad-coverage computational syntax of English}, Year = 1986} @article{Jensen87, Author = {K. Jensen and J. Binot}, Journal = CL, Number = {3--4}, Title = {Disambiguating prepositional phrase attachment by using on-line dictionary definitions}, Volume = 13, Year = 1987} @article{Jiang+01, Author = {Mon F. Jiang and Shian S. Tseng and Chih M. Su}, Journal = {Pattern Recognition Letters}, Pages = {691--700}, Title = {Two-Phase Clustering Process for Outliers Detection}, Tnumber = {6--7}, Volume = 22, Year = 2001} @inproceedings{Jijkoun+04, Address = {Barcelona, Spain}, Author = {V. Jij\-koun and M. {De Rijke}}, Booktitle = {Proceedings of the 42nd Meeting of the Association for Computational Linguistics (ACL'04), Main Volume}, Date-Modified = {2010-09-28 10:49:31 +0200}, Month = {July}, Pages = {311--318}, Title = {Enriching the Output of a Parser Using Memory-based Learning}, Year = 2004} @inproceedings{Jin+01, Address = {Chicago, IL}, Author = {R. Jin and G. Agrawal}, Booktitle = {Proceedings of the First SIAM Conference on Data Mining}, Title = {A Middleware for Developing Parallel Data Mining Applications}, Year = 2001} @inproceedings{Jing+00, Address = {San Francisco, CA, USA}, Author = {Hongyan Jing and Kathleen McKeown}, Booktitle = {Proceedings of the 1st Conference of the North American Chapter of the Association for Computational Linguistics}, Date-Added = {2009-11-15 14:33:50 +0100}, Date-Modified = {2009-11-15 14:33:59 +0100}, Pages = {178--185}, Title = {Cut and paste based text summarization}, Year = {2000}} @book{Joachims02, Address = {Dordrecht}, Author = {T. Joachims}, Publisher = {Kluwer Academic Publishers}, Title = {Learning to Classify Text using Support Vector Machines: Methods, Theory, and Algorithms}, Year = 2002} @inproceedings{Joachims98, Author = {Joachims, T.}, Booktitle = {Proceedings of ECML-98, 10th European Conference on Machine Learning}, Date-Added = {2009-11-15 00:36:48 +0100}, Date-Modified = {2009-11-15 00:37:12 +0100}, Editor = {C. Nedellec and C. Rouveirol}, Keywords = {text classification, support vector machines}, Pages = {137--142}, Title = {Text categorization with support vector machines: Learning with many relevant features}, Volume = {1398}, Year = {1998}} @incollection{Joachims99, Archive = {Iris, web}, Author = {T. Joachims}, Booktitle = {Advances in Kernel Methods - Support Vector Learning}, Editor = {B. Sch\"{o}lkopf and C. Burges}, Key = {SVM}, Publisher = MIT, Source = {Iris}, Title = {Making large-scale SVM learning practical}, Year = 1999} @article{Johansen+02, Author = {M.K. Johansen and T.J. Palmeri}, Journal = {Cognitive Psychology}, Pages = {482--553}, Title = {Are there representational shifts during category learning?}, Volume = 45, Year = 2002} @book{Johansson86, Address = {Bergen, Norway}, Author = {S. Johansson}, Pages = 149, Publisher = {Norwegian Computing Centre for the Humanities}, Title = {The tagged {LOB} Corpus: User's Manual}, Year = 1986} @inproceedings{John+94, Address = {San Mateo, CA}, Author = {G. John and R. Kohavi and K. Pfleger}, Booktitle = {Proceedings of the Eleventh International Conference on Machine Learning}, Pages = {121--129}, Publisher = {Morgan Kaufmann}, Title = {Irrelevant features and the subset selection problem}, Year = 1994} @book{Johnson86, Address = {Chicago, Il}, Author = {M. Johnson}, Publisher = {{Chicago University Press}}, Title = {{The body in the mind}}, Year = 1986} @book{Johnson93, Address = {Oxford, UK}, Author = {M. Johnson}, Publisher = {{Blackwell}}, Title = {{Brain Development and Cognition}}, Year = 1993} @article{Johnson98, Author = {M. Johnson}, Journal = {Computational Linguistics}, Number = 4, Pages = {613--632}, Title = {{PCFG} models of linguistic tree representations}, Volume = 24, Year = 1998} @article{Johnstone+01, Author = {T. Johnstone and D. R. Shanks}, Journal = {Cognitive Psychology}, Pages = {61--112}, Title = {Abstractionist and processing accounts of implicit learning}, Volume = 42, Year = 2001} @book{Jones+91, Address = {Cambridge, UK}, Author = {D. Jones and A. C. Gimson and S. Ramsaran}, Edition = 14, Publisher = CUP, Title = {{E}nglish pronouncing dictionary}, Year = 1991} @inproceedings{Jones+97, Address = {Washington, DC}, Author = {M. P. Jones and J. H. Martin}, Booktitle = {Proceedings of the Fifth Conference on Applied Natural Language Processing}, Title = {Contextual spelling correction using latent semantic analysis}, Year = 1997} @book{Jones96, Address = {London, UK}, Author = {D. Jones}, Publisher = {UCL Press}, Title = {Analogical natural language processing}, Year = 1996} @misc{Jones??, Author = {Doug Jones}, Title = {Predicting Semantics from Syntactic Cues - An Evaluation of {Levin's English} Verb Classes and Alternations}} @techreport{Jordan+93, Author = {M. I. Jordan and R. A. Jacobs}, Institution = {MIT Computational and Cognitive Science}, Number = 9301, Title = {Hierarchical Mixtures of Experts and the EM Algorithm}, Year = 1993} @techreport{Jordan+96, Author = {M. I. Jordan and C. M. Bishop}, Institution = {Artificial Intelligence Laboratory, Massachusetts Institute of Technology}, Number = {A.I. Memo No. 1562}, Title = {Neural networks}, Year = 1996} @inproceedings{Jordan86, Address = {Hillsdale, NJ}, Author = {M. I. Jordan}, Booktitle = {Proceedings of the Eighth Annual Meeting of the Cognitive Science Society}, Pages = {531--546}, Publisher = {Lawrence Erlbaum Associates}, Title = {Attractor dynamics and parallellism in a connectionist sequential machine}, Year = 1986} @inproceedings{Joshi+94, Author = {Aravind K. Joshi and B. Srivinas}, Booktitle = COLING96, Pages = {154--160}, Title = {Disambiguation of Super Parts of Speech (or Supertags): Almost Parsing}, Year = 1996} @inproceedings{Junker97, Author = {M. Junker}, Booktitle = SIGIR, Title = {SIGIR Poster: The effectiveness of using Thesauri in IR}, Year = 1997} @inproceedings{Jurafski91, Author = {Jurafski, D}, Booktitle = {Proceedings of Cognitive Science Society}, Title = {An On-Line Model of Human Sentence Interpretation}, Year = 1991} @article{Jurafski94, Author = {Jurafski, D.}, Journal = {{Cognitive Science}}, Title = {{A Cognitive Model of Sentence Comprehension: the Construction Grammar Approach}}, Year = 1994} @book{Jurafsky+00, Address = {Englewood Cliffs, New Jersey}, Author = {Daniel Jurafsky and James H. Martin}, Publisher = {Prentice Hall}, Title = {Speech and Language Processing: An Introduction to Natural Language Processing, Computational Linguistics, and Speech Recognition}, Year = 2000} @techreport{Justeson93, Author = {J. Justeson and S. Katz}, Date-Modified = {2010-09-18 14:35:47 +0200}, Institution = {IBM Research Division}, Keywords = {term extraction}, Number = {RC 18906}, Title = {Technical terminology: some linguistic properties and an algorithm for identification in text}, Year = 1993} @article{Justeson95, Author = {J. Justeson and S. Katz}, Date-Modified = {2010-09-18 14:36:04 +0200}, Journal = JNLE, Keywords = {term extraction}, Number = 1, Pages = {9--28}, Title = {Technical terminology: some linguistic properties and an algorithm for identification in text}, Volume = 1, Year = 1995} @incollection{Kager95, Address = {Cambridge, MA}, Author = {R. Kager}, Booktitle = {The handbook of phonological theory}, Editor = {J. A. Goldsmith}, Pages = {367--402}, Publisher = {Blackwell}, Title = {The metrical theory of word stress}, Year = 1995} @inproceedings{Kaji92, Author = {Hiroyuki Kaji and Yuuko Kida and Yasutsugu Morimoto}, Booktitle = {Coling}, Title = {Learning translation templates from bilingual texts}, Year = 1992} @inproceedings{KalaiChBlRo99a, Address = {Phoenix, Arizona}, Author = {A. Kalai and S. Chen and A. Blum and R. Rosenfelds}, Booktitle = {ICASSP-99}, Title = {On-Line Algorithms for Combining Language Models}, Year = 1999} @incollection{Kann+01, Address = {Trier, Germany}, Author = {Vigo Kann and R. Domeij and J. Hollman and M. Tillenius}, Booktitle = {Text as a Linguistic Paradigm: Levels, Constituents, Constructs, volume 60 of Quantitative Linguistics}, Date-Added = {2010-01-29 15:11:08 +0100}, Date-Modified = {2010-01-29 15:11:20 +0100}, Editor = {Uhlirova, L. and G. Wimmer and G. Altmann and R. Koehler}, Pages = {108--123}, Publisher = {Wissenschaftlicher Verlag Trier}, Title = {Implementation aspects and applications of a spelling correction algorithm}, Year = {2001}} @article{Kaplan+94, Author = {Ron Kaplan and Martin Kay}, Journal = CL, Number = 3, Pages = {331--378}, Title = {Regular Models of Phonological Rule Systems}, Volume = 20, Year = 1994} @inproceedings{Karlsson90, Author = {Fred Karlsson}, Booktitle = Coling, Title = {Constraint grammar as a framework for parsing running text}, Year = 1990} @book{Karmiloff92, Address = {Cambridge, MA}, Author = {A. Karmiloff-Smith}, Publisher = MIT, Title = {{Beyond Modularity: a developmental perspective on cognitive science}}, Year = 1992} @inproceedings{Karov+96, Author = {Y. Karov and S. Edelman}, Booktitle = {Fourth workshop on very large corpora}, Date-Modified = {2009-09-06 20:38:58 +0200}, Keywords = {word sense disambiguation, wsd}, Pages = {42-55}, Title = {Learning similarity-based word sense disambiguation from sparse data}, Year = 1996} @article{Karov+98, Author = {Y. Karov and S. Edelman}, Booktitle = CL, Date-Modified = {2009-09-06 20:39:06 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {41--59}, Title = {Similarity-based word sense disambiguation}, Volume = 4, Year = 1998} @article{Karttunen+96, Author = {L. Karttunen and J. Chanod and G. Grefenstette and A. Schiller}, Journal = JNLE, Number = 4, Pages = {305--338}, Title = {Regular expressions for language engineering}, Volume = 2, Year = 1996} @article{Kashyap+83, Author = {R. L. Kashyap and B. J. Oommen}, Date-Added = {2010-01-29 15:11:29 +0100}, Date-Modified = {2010-01-29 15:11:39 +0100}, Journal = {Pattern Recognition Letters}, Pages = {147--154}, Title = {Spelling correction using probabilistic methods}, Volume = {2}, Year = {1983}} @article{Kasif+98, Author = {Simon Kasif and Stven Salzberg and David Waltz and John Rachlin and David K. Aha}, Journal = {Artificial Intelligence}, Number = {1--2}, Pages = {287--311}, Title = {A Probabilistic Framework for Memory-Based Reasoning}, Topic = {probabilistic-reasoning;memory-based-reasoning;}, Volume = 104, Year = 1998} @inproceedings{Kate+10, Address = {Uppsala, Sweden}, Author = {Kate, {R. J.} and Mooney, R.}, Booktitle = {Proceedings of the Fourteenth Conference on Computational Natural Language Learning}, Month = {July}, Pages = {203--212}, Publisher = {Association for Computational Linguistics}, Title = {Joint Entity and Relation Extraction Using Card-Pyramid Parsing}, Year = {2010}} @incollection{Katz+92, Author = {L. Katz and R. Frost}, Booktitle = {Haskins Laboratories Status Report on Speech Research 1992}, Pages = {147--160}, Publisher = {Haskins Laboratories}, Title = {The reading process is different for different orthographies: the orthographic depth hypothesis}, Year = 1992} @article{Katz87, Author = {Slava M. Katz}, Journal = {IEEE Transactions on Acoustics, Speech and Signal Processing}, Month = {March}, Pages = {400--401}, Title = {Estimation of Probabilities from Sparse Data for the Language Model Component of a Speech Recognizer}, Volume = {ASSP-35}, Year = 1987} @inproceedings{Katz97, Author = {Boris Katz}, Booktitle = {AAAI Spring Symposium on Natural Language Processing for the World Wide Web}, Title = {From Sentence Processing to Information Access on the World Wide Web}, Year = 1997} @inproceedings{Kawahare+02, Address = {Las Palmas, Canary Islands}, Author = {Daisuke Kawahara and Sadao Kurohashi and {K\^oiti} Hasida}, Booktitle = {Proceedings of the {3rd International Conference on Language Resources and Evaluation (LREC-2002)}}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 21:07:28 +0100}, Pages = {2008--2013}, Title = {Construction of a {J}apanese Relevance-tagged Corpus}, Year = {2002}} @techreport{Kay80, Author = {M. Kay}, Institution = {Xerox Palo Alto Research Center}, Number = {CSL-80-11}, Place = {Palo Alto, California}, Title = {The proper place of men and machines in language translation}, Year = 1980} @inproceedings{Kay87, Address = {Copenhagen, Denmark}, Author = {M. Kay}, Booktitle = {Proceedings of the third Conference of the European Chapter of the Association for Computational Linguistics}, Pages = {2--10}, Title = {Non-concatenative Finite-state Morphology}, Year = 1987} @article{Kay93, Author = {M. Kay and M. Roscheisen}, Journal = CL, Number = 1, Pages = {121--142}, Title = {Text-translation alignment}, Volume = 19, Year = 1993} @article{Kazakov+01, Author = {Dimitar Kazakov and Suresh Manandhar}, Date-Modified = {2008-07-23 16:59:48 +0200}, Journal = {Machine Learning}, Keywords = {word segmentation, unsupervised learning}, Pages = {121--162}, Title = {Unsupervised Learning of Word Segmentation Rules with Genetic Algorithms and Inductive Logic Programming}, Volume = 43, Year = 2001} @inproceedings{Kempen87, Address = {Los Altos, CA}, Author = {G. Kempen}, Booktitle = {Proceedings of the Tenth International Joint Conference on Artificial Intelligence}, Pages = {655--660}, Publisher = {Morgan Kaufmann}, Title = {A framework for incremental syntactic processing}, Year = 1987} @unpublished{Kempen94, Author = {G. Kempen}, Note = {Final version will appear in {\em Spektator, tijdschrift voor Neerlandistiek}}, Title = {De mythe van het woordbeeld: Spellingherziening taalpsychologisch doorgelicht}, Year = 1994} @incollection{Kempen95, Address = {London}, Author = {G. Kempen}, Booktitle = {Computational psycholinguistics: Symbolic and subsymbolic models of language processing}, Chapter = 8, Editor = {A. Dijkstra and K. {De Smet}}, Publisher = {Taylor \& Francis}, Title = {Computational models of syntactic processing in human language comprehension}, Year = 1995} @book{Kenstowicz93, Address = {Oxford, UK}, Author = {M. Kenstowicz}, Publisher = {Blackwell}, Title = {Phonology in generative grammar}, Year = 1993} @article{Kentridge94, Author = {R. Kentridge}, Date-Modified = {2008-07-23 16:59:27 +0200}, Journal = {{Psycholoquy}}, Keywords = {connectionism, human language processing}, Title = {{Modularity of Mind, cerebral localisation and connecionist neuropsychology}}, Volume = 87, Year = {1994}} @article{Keuleers+07, Author = {E. Keuleers and D. Sandra and W. Daelemans and S. Gillis and G. Durieux and E. Martens}, Journal = {Cognitive Psychology}, Number = 4, Pages = {283--318}, Title = {Dutch plural inflection: The exception that proves the analogy}, Volume = 54, Year = 2007} @inproceedings{Khardon+99, Author = {R. Khardon and D. Roth and L. G. Valiant}, Booktitle = IJCAI, Pages = {911--917}, Title = {Relational Learning for {NLP} using Linear Threshold Elements}, Year = 1999} @inproceedings{Khoja01, Author = {S. Khoja}, Booktitle = {Proceedings of the Student Workshop at NAACL-2001}, Date-Modified = {2009-11-14 17:35:07 +0100}, Keywords = {part-of-speech tagging, Arabic}, Pages = {20--25}, Title = {{APT}: {Arabic} Part-of-speech tagger}, Year = 2001} @inproceedings{Kibler+87, Address = {San Mateo, CA}, Author = {D. Kibler and D. W. Aha}, Booktitle = {Proceedings of the Fourth International Workshop on Machine Learning}, Editor = {P. Langley}, Pages = {24--30}, Publisher = {Morgan Kaufmann}, Title = {Learning representative exemplars of concepts: an initial case study}, Year = 1987} @inproceedings{Kievit94, Address = {Tilburg}, Author = {L. Kievit}, Booktitle = {Proceedings of INternational workshop on Computational Semantics}, Editor = {Butn, H. and Muskens,R. and Rentier, G.}, Institution = {ITK}, Pages = {131--139}, Title = {Representing Structural Syntactic Ambiguity}, Year = 1994} @inproceedings{Kilgarriff+00, Author = {Killgarriff, A. and Rosenzweig, J.}, Booktitle = {Proceedings of the 2nd International Conference on Language Resources and Evaluation}, Date-Modified = {2009-09-06 20:39:17 +0200}, Keywords = {senseval, wsd, word sense disambiguation}, Pages = {1239--1243}, Title = {English SENSEVAL: Report and Results}, Year = 2000} @book{Kilgarriff00, Address = {Dordrecht, The Netherlands}, Date-Modified = {2011-06-21 18:13:06 +0200}, Editor = {Adam Kilgarriff}, Keywords = {wsd, word sense disambiguation}, Publisher = {Kluwer}, Title = {Computers and the Humanities, special issue on Senseval}, Year = {forthcoming}} @inproceedings{Kilgarriff01, Archive = {Iris}, Author = {A. Kilgarriff}, Booktitle = {Proceedings of SENSEVAL-2}, Date-Modified = {2008-07-23 16:57:52 +0200}, Keywords = {word sense disambiguation, lexical semantics}, Pages = {17-20}, Source = {Iris,web}, Title = {English lexical sample task description}, Year = 2001} @inproceedings{Kim+06, Address = {Trento, Italy}, Author = {S.N. Kim and T. Baldwin}, Booktitle = {Proceedings of the Third ACL SIGSEM Workshop on Prepositions}, Pages = {65--72}, Title = {Automatic identification of English verb particle constructions using linguistic features}, Year = 2006} @inproceedings{Kim+06b, Address = {Sydney, Australia}, Author = {S.N. Kim and T. Baldwin}, Booktitle = {Proceedings of the COLING/ACL 2006 Main Conference Poster Sessions}, Pages = {491--498}, Title = {Interpreting semantic relations in noun compounds via verb semantics}, Year = 2006} @inproceedings{Kingsbury+02, Address = {San Diego, CA}, Author = {P. Kingsbury and M. Palmer and M. Marcus}, Booktitle = {Proceedings of the Human Language Technology Conference}, Title = {Adding semantic annotation to the {P}enn {T}reebank}, Year = 2002} @incollection{Kiparski95, Author = {P. Kiparski}, Booktitle = {The handbook of phonological theory}, Editor = {J. A. Goldsmith}, Pages = {640--670}, Publisher = {Cambridge, MA: Blackwell}, Title = {The phonological basis of sound change}, Year = 1995} @inproceedings{Kiraz94, Author = {G. Kiraz}, Booktitle = {Proceedings of COLING'94}, Pages = {180--186}, Title = {Multi-tape Two-level Morphology: A Case study in Semitic Non-Linear Morphology}, Volume = 1, Year = 1994} @misc{Kirsch94, Author = {I. Kirsch}, Institution = {Bar-Ilan University}, Note = {Ph.D. Research Proposal}, Title = {Automatic Formulation of Queries in Textual Information Retrieval Systems}, Year = 1994} @inproceedings{Kitano93, Author = {Kitano, H.}, Booktitle = {IJCAI}, Date-Modified = {2008-07-23 17:00:02 +0200}, Keywords = {parallelism}, Pages = {813--834}, Title = {Challenges of massive parallelism}, Year = 1993} @book{Klavans+96, Address = {Cambridge, MA}, Editor = {Judith L. Klavans and Philip Resnik}, Publisher = MIT, Title = {The Balancing Act: Combining Symbolic and Statistical Approaches to Language}, Year = 1996} @inproceedings{Klavans90, Author = {J. Klavans and E. Tzoukermann}, Booktitle = Coling, Title = {The {BICORD} System}, Year = 1990} @inproceedings{Klein+03, Address = {Sapporo, Japan}, Author = {D. Klein and C. Manning}, Booktitle = {Proceedings of ACL-2003}, Pages = {423--430}, Title = {Accurate unlexicalized parsing}, Year = 2003} @inproceedings{Klein+03b, Author = {D. Klein and J. Smarr and H. Nguyen and C.D. Manning}, Booktitle = {Proceedings of the seventh Conference on Natural Language Learning at HLT-NAACL 2003}, Date-Added = {2009-11-15 00:37:18 +0100}, Date-Modified = {2009-11-15 00:37:35 +0100}, Editor = {W. Daelemans and M. Osborne}, Pages = {180--183}, Title = {Named Entity Recognition with Character-Level Models}, Year = {2003}} @incollection{Klima72, Address = {Cambridge, MA}, Author = {E. S. Klima}, Booktitle = {Language by Ear and by Eye: The Relationship Between Speech and Reading}, Date-Modified = {2008-07-23 17:00:24 +0200}, Editor = {J.F. Kavanagh and I.G. Mattingly}, Keywords = {writing systems}, Pages = {57--80}, Publisher = MIT, Title = {How Alphabets Might Reflect Language}, Year = 1972} @article{Klosgen92, Author = {W. Klosgen}, Journal = {International Journal for Intelligent Systems}, Number = 7, Pages = {649--673}, Title = {Problems for knowledge discovery in databases and their treatment in the statistics interpreter {EXPLORA}}, Volume = 7, Year = 1992} @inproceedings{Klosgen93, Author = {W. Klosgen}, Booktitle = {Proceedings of the 1993 workshop on Knowledge Discovery in Databases}, Title = {Some Implementation Aspects of a Discovery System}, Year = 1993} @inproceedings{Kneser+93, Author = {R. Kneser and H. Ney}, Booktitle = {Proc. of EUROSPEECH'93}, Title = {Improved Clustering Techniques for Class-Based Language Modeling}, Year = 1993} @article{Knight+02, Author = {Kevin Knight and Daniel Marcu}, Date-Added = {2009-11-15 14:34:04 +0100}, Date-Modified = {2009-11-15 14:34:12 +0100}, Journal = {Artificial Intelligence}, Number = 1, Pages = {91--107}, Title = {Summarization beyond sentence extraction: A probabilistic approach to sentence compression}, Volume = 139, Year = 2002} @inproceedings{Knorr+98, Author = {E. Knorr and R. Ng}, Booktitle = {Proceedings of the 24th International Conference on Very Large Data Bases (VLDB'98)}, Title = {Algorithms for Mining Distance-Based Outliers in Large Datasets}, Year = 1998} @book{Knuth73, Address = {Reading, MA}, Author = {D. E. Knuth}, Date-Modified = {2010-08-24 22:35:30 +0200}, Publisher = {Addison-Wesley}, Title = {The Art of Computer Programming}, Volume = {3: Sorting and Searching}, Year = 1973} @book{Knuth98, Address = {Reading, MA}, Author = {D. E. Knuth}, Date-Modified = {2010-08-24 22:35:16 +0200}, Edition = {Second}, Publisher = {Addison-Wesley}, Title = {The Art of Computer Programming}, Volume = {3: Sorting and Searching}, Year = 1998} @article{Kocsor+00, Author = {A. Kocsor and L. T\'{o}th and A. Kuba jr. and K. Kov\'{a}cs and M. Jelasity and T. Gyim\'{o}thy and J. Csirik}, Journal = {International Journal of Speech Technology}, Number = {3/4}, Pages = {263--276}, Title = {A comparative study of several feature transformation and learning methods for phoneme classification}, Volume = 3, Year = 2000} @book{Kodratoff+90, Address = {San Mateo, CA}, Editor = {Y. Kodratoff and R. Michalski}, Publisher = {Morgan Kaufmann}, Title = {Machine learning: An artificial intelligence approach}, Volume = {III}, Year = 1990} @article{Koehler87, Author = {R. Koehler}, Journal = {Theoretical Linguistics}, Pages = {242--257}, Title = {System theoretical linguistics}, Volume = 14, Year = 1987} @inproceedings{Koehler92, Address = {Stuttgart}, Author = {R. Koehler}, Booktitle = {SoftStat '91}, Editor = {F. Faulbaum}, Pages = {489--495}, Publisher = {Gustav Fischer}, Series = {Advances in Statistical Software}, Title = {Methoden und Modellen in der quantitativen Linguistik}, Volume = 3, Year = 1992} @inproceedings{Koehn+07, Address = {Prague, Czech Republic}, Author = {P. Koehn and H. Hoang and A. Birch and C. Callison-Burch and M. Federico and N. Bertoldi and B. Cowan and W. Shen and C. Moran and R. Zens and C. Dyer and O. Bojar and A. Constantin and E. Herbst}, Booktitle = {Proceedings of the 45th Annual Meeting of the Association for Computational Linguistics Companion Volume Proceedings of the Demo and Poster Sessions}, Month = {June}, Pages = {177--180}, Publisher = {Association for Computational Linguistics}, Title = {Moses: Open Source Toolkit for Statistical Machine Translation}, Url = {http://www.aclweb.org/anthology/P/P07/P07-2045}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P07/P07-2045}} @inproceedings{Koehn03, Address = {Edmonton, Canada}, Author = {P. Koehn and F.-J. Och and D. Marcu}, Booktitle = {Proceedings of HLT-NAACL 2003}, Pages = {48--54}, Title = {Statistical Phrase-Based Translation}, Year = 2003} @inproceedings{Koehn04, Address = {Washington, District of Columbia}, Author = {P. Koehn}, Booktitle = {Proceedings of AMTA 2004}, Date-Modified = {2008-07-23 17:01:01 +0200}, Keywords = {machine translation, decoding, beam search}, Pages = {115--124}, Title = {Pharaoh: A Beam Search Decoder for Phrase-Based Statistical Machine Translation Models}, Year = 2004} @inproceedings{Koehn05, Address = {Phuket, Thailand}, Author = {P. Koehn}, Booktitle = {Machine Translation Summit X}, Date-Modified = {2008-07-23 17:00:42 +0200}, Keywords = {machine translation}, Pages = {79--86}, Title = {Europarl: A Parallel Corpus for Statistical Machine Translation}, Year = 2005} @article{Koenig+96, Author = {T. Koenig and D. Lehmann}, Journal = {Brain and Language}, Pages = {169--182}, Title = {Microstates in Language Related Brain Potential Maps Show Noun-Verb Differences}, Volume = 53, Year = 1996} @inproceedings{Kohavi+94, Author = {R. Kohavi and G. John and R. Long and D. Manley and K. Pfleger}, Booktitle = {Tools with Artificial Intelligence}, Pages = {740--743}, Publisher = {IEEE Computer Society Press}, Title = {MLC: A Machine Learning Library in {C++}}, Year = 1994} @inproceedings{Kohavi+95, Address = {Montreal}, Author = {Kohavi, R. and Li, C-H.}, Booktitle = {Proceedings of the Fourteenth International Joint Conference on Artificial Intelligence}, Date-Modified = {2009-11-14 19:03:59 +0100}, Keywords = {oblivious decision trees, decision trees}, Pages = {1071--1077}, Publisher = {Morgan Kaufmann}, Title = {Oblivious decision trees, graphs, and top-down pruning}, Year = 1995} @article{Kohavi+97, Author = {R. Kohavi and G. John}, Journal = {Artificial Intelligence Journal}, Number = {1--2}, Pages = {273--324}, Title = {Wrappers for feature subset selection}, Volume = 97, Year = 1997} @article{Kohonen82, Author = {T. Kohonen}, Journal = {Biological Cybernetics}, Pages = {135--140}, Title = {Analysis of a simple self-organising process}, Volume = 44, Year = 1982} @book{Kohonen84, Address = {Berlin}, Author = {T. Kohonen}, Publisher = {Springer-Verlag}, Series = {Series in Information Sciences}, Title = {Self-organisation and associative memory}, Volume = 8, Year = 1984} @inproceedings{Kohonen86, Address = {Paris, France}, Author = {T. Kohonen}, Booktitle = {Proceedings of the Eighth International Conference on Pattern Recognition}, Pages = {27--31}, Title = {Dynamically expanding context, with application to the correction of symbol strings in the recognition of continuous speech}, Year = 1986} @inbook{Kohonen88, Address = {Cambrigde MA}, Author = {Kohonen, T}, Booktitle = {Pattern Recognition by Self-Organizing Neural Networks}, Editor = {G. Carpenter and S. Grossberg}, Publisher = {Bradford Books}, Title = {Neuro Phonetic Typewriter}, Year = 1988} @article{Kokkinakis00, Author = {D. Kokkinakis}, Journal = {Nordic Journal of Linguistics}, Number = 2, Pages = {191--213}, Title = {{PP}-Attachment Disambiguation for Swedish: Combining Unsupervised and Supervised Training Data}, Volume = 23, Year = 2000} @inproceedings{Kokkinakis00b, Address = {Berlin}, Author = {D. Kokkinakis}, Booktitle = {Proceedings of NLP-2000, Bridging the Gap Between Theory and Practice}, Date-Modified = {2009-09-06 20:39:41 +0200}, Editor = {D. Christodoulakis}, Keywords = {wsd, word sense disambiguation, unsupervised learning}, Number = 1835, Publisher = {Springer Verlag}, Series = {Lecture Notes in Artificial Intelligence}, Title = {Concordancing Revised or How to Aid the Recognition of New Senses in Large Corpora}, Year = 2000} @inproceedings{Kolak+03, Address = {Morristown, NJ, USA}, Author = {O. Kolak and W. Byrne and P. Resnik}, Booktitle = {NAACL '03: Proceedings of the 2003 Conference of the North American Chapter of the Association for Computational Linguistics on Human Language Technology}, Date-Added = {2010-02-12 22:11:08 +0100}, Date-Modified = {2010-02-12 23:59:21 +0100}, Doi = {http://dx.doi.org/10.3115/1073445.1073463}, Location = {Edmonton, Canada}, Pages = {55--62}, Publisher = {Association for Computational Linguistics}, Title = {A generative probabilistic {OCR} model for {NLP} applications}, Year = {2003}, Bdsk-Url-1 = {http://dx.doi.org/10.3115/1073445.1073463}} @inproceedings{Kolak+05, Address = {Morristown, NJ, USA}, Author = {O. Kolak and P. Resnik}, Booktitle = {HLT '05: Proceedings of the conference on Human Language Technology and Empirical Methods in Natural Language Processing}, Date-Added = {2010-01-29 15:26:03 +0100}, Date-Modified = {2010-02-12 23:59:32 +0100}, Doi = {http://dx.doi.org/10.3115/1220575.1220684}, Keywords = {spelling correction, OCR}, Location = {Vancouver, British Columbia, Canada}, Pages = {867--874}, Publisher = {Association for Computational Linguistics}, Title = {OCR post-processing for low density languages}, Year = {2005}, Bdsk-Url-1 = {http://dx.doi.org/10.3115/1220575.1220684}} @book{Kolb+90, Address = {New York}, Author = {Kolb, B. and Whishaw, I.}, Publisher = {W.H. Freeman}, Title = {{Fundamentals of Neuropshychology}}, Year = 1990} @article{Kolen+93, Author = {J. F. Kolen and J. B. Pollack}, Date-Modified = {2010-02-14 23:12:36 +0100}, Journal = {Tge Journal of Experimental and Theoretical AI}, Title = {The Observers' Paradox: Apparent Computational Complexity in Physical Systems}, Year = 1993} @book{Kolodner93, Author = {J. Kolodner}, Publisher = {San Mateo, CA: Morgan Kaufmann}, Title = {Case-based reasoning}, Year = 1993} @inproceedings{Kononenko94, Author = {I. Kononenko}, Booktitle = {Proceedings of ECML'94}, Pages = {171--182}, Title = {Estimating attributes: Analysis and extensions of RELIEF}, Year = 1994} @article{Korf85, Author = {R. Korf}, Journal = {Artificial Intelligence}, Number = 1, Pages = {97--109}, Title = {Depth-first iterative-deepening: An optimal admissible tree search}, Volume = 27, Year = 1985} @inproceedings{Koskenniemi+88, Author = {K. Koskenniemi and K. W. Church}, Booktitle = {Proceedings of the Twelfth International Conference on Computational Linguistics}, Organization = {John von Neumann Society for Computing Sciences}, Pages = {335--340}, Title = {Complexity, two-level morphology and {F}innish}, Year = 1988} @phdthesis{Koskenniemi83, Author = {K. Koskenniemi}, School = {University of Helsinki}, Title = {Two-level morphology: a general computational model for word-form recognition and production}, Year = 1983} @inproceedings{Koskenniemi83b, Address = {Los Alamos, CA}, Author = {Kimmo Koskenniemi}, Booktitle = {Proceedings of the 8th International Joint Conference on Artificial Intelligence}, Publisher = {Morgan Kaufmann}, Title = {Two-Level Model for Morphological Analysis}, Year = 1983} @inproceedings{Koskenniemi84, Author = {K. Koskenniemi}, Booktitle = {Proceedings of the Tenth International Conference on Computational Linguistics / 22nd Annual Conference of the Association for Computational Linguistics}, Pages = {178--181}, Title = {A general computational model for wordform recognition and production}, Year = 1984} @inproceedings{Kosmynin+96, Address = {Palo-Alto}, Author = {Kosmynin, A. and Davidson, I}, Booktitle = {PODP Workshop}, Title = {Using Background Contextual Knowledge For Documents Representation}, Year = 1996} @article{Krahmer+01, Author = {E. Krahmer and M. Swerts and M. Theune and M. Weegels}, Date-Modified = {2010-09-14 22:13:11 +0200}, Journal = {International Journal of Speech Technology}, Keywords = {spoken dialogue systems}, Number = 1, Pages = {19--30}, Title = {Error Detection in Spoken Human-Machine Interaction}, Volume = 4, Year = 2001} @article{Krott+01, Author = {A. Krott and R. H. Baayen and R. Schreuder}, Journal = {Linguistics}, Number = 1, Pages = {51--93}, Title = {Analogy in morphology: modeling the choice of linking morphemes in {D}utch}, Volume = 39, Year = 2001} @article{Krovetz+92, Author = {R. Krovetz and W. B. Croft}, Journal = {ACM Transactions on Information Systems}, Number = 2, Pages = {115--141}, Title = {Lexical Ambiguity and Information Retrieval}, Volume = 10, Year = 1992} @inproceedings{Krymolowski+98, Author = {Y. Krymolowski and D. Roth}, Booktitle = {COLING-ACL'98 workshop on the Usage of WordNet in Natural Language Processing Systems}, Title = {Incorporating Knowledge in Natural Language Learning: A Case Study}, Year = 1998} @inproceedings{Kubica+03, Author = {J. Kubica and A. Moore}, Booktitle = {The Third IEEE International Conference on Data Mining}, Date-Modified = {2010-09-18 14:38:05 +0200}, Editor = {X. Wu and A. Tuzhilin and J. Shavlik}, Month = {November}, Pages = {131--138}, Publisher = {IEEE Computer Society}, Title = {Probabilistic Noise Identification and Data Cleaning}, Year = 2003} @book{Kucera+67, Address = {Providence, RI}, Author = {H. Ku\v{c}era and W. N. Francis}, Publisher = {Brown University Press}, Title = {Computational Analysis of Present-Day {A}merican {E}nglish}, Year = 1967} @inproceedings{Kudo+00, Address = {Lisbon, Portugal}, Author = {Kudo, T. and Y. Matsumoto}, Booktitle = {CoNLL}, Date-Modified = {2013-12-21 16:33:01 +0000}, Pages = {142--147}, Title = {Use of support vector machines for chunk identifiation}, Year = 2000} @inproceedings{Kudo+01, Address = {Pittsburgh, PA, USA}, Archive = {Iris, web}, Author = {T. Kudo and Y. Matsumoto}, Booktitle = {Proceedings of NAACL}, Key = {SVM chunking}, Source = {Iris}, Title = {Chunking with Support Vector Machines}, Year = 2001} @book{Kuebler04, Address = {Amsterdam, The Netherlands}, Author = {S. K\"{u}bler}, Publisher = {John Benjamins}, Title = {Memory-based parsing}, Year = 2004} @techreport{Kukich91, Address = {Morristown, NJ 07960}, Author = {K. Kukich}, Date-Modified = {2010-02-08 13:04:32 +0100}, Institution = {Bellcore}, Month = {November}, Note = {Draft, spelling correction}, Title = {Automatic Spelling Correction: Detection, Correction and Context-Dependent Techniques}, Year = 1991} @article{Kukich92, Author = {K. Kukich}, Date-Modified = {2010-02-17 19:00:53 +0100}, Journal = {ACM Computing Surveys}, Keywords = {spelling correction}, Number = 4, Pages = {377--439}, Title = {Techniques for Automatically Correcting Words in Text}, Volume = 24, Year = 1992} @inproceedings{Kulesza+07, Author = {Kulesza, A. and Pereira, F.}, Booktitle = {Advances in Neural Information Processing Systems}, Date-Added = {2009-11-15 00:38:12 +0100}, Date-Modified = {2009-11-15 00:38:25 +0100}, Editor = {J.C. Platt and D. Koller and Y. Singer and S. Roweis}, Pages = {785--792}, Title = {Structured Learning with Approximate Inference}, Volume = {20}, Year = {2008}} @article{Kupiec92, Author = {Julian M. Kupiec}, Journal = {Computer Speech and Language}, Pages = {225--242}, Title = {Robust Part-of-Speech Tagging Using a Hidden Markov Model}, Volume = 6, Year = 1992} @inproceedings{Kupiec93, Author = {Julian Kupiec}, Booktitle = {Proceedings of ACM SIGIR'93}, Pages = {181--190}, Title = {MURAX: A Robust Linguistic Approach For Question Answering Using An On-Line Encyclopedia}, Year = 1993} @inproceedings{Kupiec93a, Author = {J. Kupiec}, Booktitle = ACL, Title = {An algorithm for finding noun phrase correspondences in bilingual corpora}, Year = 1993} @techreport{Kuramochi+02, Author = {Michihiro Kuramochi and George Karypis}, Institution = {IEEE Transactions on Knowledge and Data Engineering}, Title = {An efficient algorithm for discovering frequent subgraphs}, Year = {2002}} @article{Kwok90, Author = {K. L. Kwok}, Journal = {{ACM} Transactions on Information Systems}, Number = 4, Pages = {363--386}, Title = {Experiments with a component theory of probabilistic information retrieval based on single terms as document components}, Volume = 8, Year = 1990} @inproceedings{Lafferty+01, Address = {Williamstown, MA}, Author = {J. Lafferty and A. McCallum and F. Pereira}, Booktitle = {Proceedings of the 18th International Conference on Machine Learning}, Date-Modified = {2008-07-23 16:22:39 +0200}, Keywords = {conditional random fields}, Title = {Conditional Random Fields: Probabilistic Models for Segmenting and Labeling Sequence Data}, Year = 2001} @article{Lafferty+06, Author = {Lafferty, J. and Wasserman, L.}, Date-Added = {2009-11-15 00:38:41 +0100}, Date-Modified = {2009-11-15 00:38:56 +0100}, Journal = {Statistica Sinica}, Number = {2}, Pages = {307--323}, Title = {Challenges in statistical machine learning}, Volume = {16}, Year = {2006}} @inproceedings{Lafferty+92, Author = {Lafferty, J. and D. Sleator and D. Temperley}, Booktitle = {Proceedings of AAAI Fall Symp.}, Date-Modified = {2009-02-21 19:48:00 +0100}, Keywords = {link grammar, probabilistic, natural language processing, entropy of English, trigrams}, Title = {Grammatical Trigrams: A Probabilistic Model of Link Grammar}, Year = 1992} @inproceedings{Lafferty92, Author = {J. Lafferty and D. Slator and D. Temperley}, Booktitle = {Working Notes, AAAI Fall Symposium on Probabilistic Approaches to Natural Language}, Title = {Grammatical trigrams: A probabilisitc approach to {L}ink {G}rammar}, Year = 1992} @book{Lakoff87, Author = {G. Lakoff}, Publisher = {University of Chicago Press}, Title = {Women, fire, and dangerous things}, Year = 1987} @book{Landau+85, Address = {Cambridge, MA}, Author = {B. Landau and L. Gleitman}, Publisher = {Harvard University Press}, Title = {Language and Expirience}, Year = 1985} @article{Landau89, Author = {G. M. Landau and U. Vishkin}, Journal = {Journal of Algorithms}, Number = 2, Pages = {157-169}, Title = {Fast parallel and serial approximate string matching}, Volume = 10, Year = 1989} @article{Landauer+73, Author = {T. K. Landauer and L. A. Streeter}, Journal = {Journal of Learning and Verbal Behaviour}, Pages = {119--131}, Title = {Structural Differences between Common and Rare Words: Failure or Equivalence Assumptions for Theories of Word Recognition}, Volume = 12, Year = 1973} @inproceedings{Landauer90, Author = {Thomas K. Landauer and Michael L. Littman}, Booktitle = UWOED, Title = {Fully automatic cross-language document retrieval using latent semantic indexing}, Year = 1990} @article{LandauerDu97, Author = {T. K. Landauer and S. T. Dumais}, Journal = {Psychological Review}, Pages = {211-240}, Title = {A Solution to Plato's Problem: The Latent Semantic Analysis Theory of Acquisition, Induction, and Representation of Knowledge}, Volume = {104(2)}, Year = 1997} @book{Langacker91, Address = {Berlin}, Author = {R. Langacker}, Publisher = {Mouton De Gruyter}, Title = {Concept, Image, and Symbol. The Cognitive Basis of Grammar}, Year = 1991} @unpublished{Langacker96, Annote = {Cognitive Linguistics}, Author = {R. Langacker}, Title = {A Dynamical Usage-Based Model}, Year = 1996} @inproceedings{Langley+92, Author = {P. Langley and W. Iba and K. Thompson}, Booktitle = {Proceedings of the Tenth Annual Conference on Artificial Intelligence}, Pages = {223--228}, Publisher = {{{\sc aaai}} Press and {{\sc mit}} Press}, Title = {An analysis of {B}ayesian classifiers}, Year = 1992} @inproceedings{Langley+94, Address = {Menlo Park, CA}, Author = {Langley, P. and Sage, S.}, Booktitle = {Case-Based Reasoning: Papers from the 1994 Workshop (Technical Report WS-94-01)}, Date-Modified = {2009-11-14 19:02:22 +0100}, Editor = {D. W. Aha}, Keywords = {decision trees, oblivious decision trees}, Publisher = {AAAI Press}, Title = {Oblivious decision trees and abstract cases}, Year = 1994} @book{Langley96, Address = {San Mateo, CA}, Author = {P. Langley}, Publisher = {Morgan Kaufmann}, Title = {Elements of machine learning}, Year = 1996} @inproceedings{Lapata+04, Address = {Boston, MA}, Author = {M. Lapata and F. Keller}, Booktitle = {HLT-NAACL 2004: Main Proceedings}, Date-Added = {2010-02-10 20:33:58 +0100}, Date-Modified = {2011-06-21 18:11:57 +0200}, Editor = {Susan Dumais and Daniel Marcu and Salim Roukos}, Month = {May 2 - May 7}, Pages = {121--128}, Publisher = {Association for Computational Linguistics}, Title = {The {W}eb as a {B}aseline: Evaluating the {P}erformance of {U}nsupervised {W}eb-based {M}odels for a {R}ange of {NLP} {T}asks}, Year = 2004} @techreport{Lappin89, Author = {S. Lappin and I. Golan and M. Rimon}, Institution = {IBM Israel Center of Science and Technology}, Number = {88.268}, Title = {Computing grammatical functions from a configurational parse tree}, Year = 1989} @inproceedings{Lappin90a, Author = {Shalom Lappin and Michael McCord}, Booktitle = ACL, Title = {A syntactic filter on pronominal anaphora in slot grammar}, Year = 1990} @article{Lappin90b, Author = {Shalom Lappin and Michael McCord}, Journal = CL, Pages = {197--212}, Title = {Anaphora resolution in slot grammar}, Volume = 16, Year = 1990} @inproceedings{Lau93, Author = {Raymond Lau and Ronald Rosenfeld and Salim Roukos}, Booktitle = {ARPA}, Pages = {108--113}, Title = {Adaptive language modeling using the maximum entropy principle}, Year = 1993} @article{Lauritzen+88, Author = {Lauritzen, S.L. and Spiegelhalter, D.J.}, Date-Added = {2009-11-15 00:39:01 +0100}, Date-Modified = {2009-11-15 00:39:14 +0100}, Journal = {Journal of the Royal Statistical Society}, Number = {2}, Pages = {157--224}, Title = {Local computations with probabilities on graphical structures and their application to expert systems}, Volume = {50}, Year = {1988}} @inproceedings{Lavie+07, Address = {Prague, Czech Republic}, Author = {A. Lavie and A. Agarwal}, Booktitle = {Proceedings of the Second Workshop on Statistical Machine Translation}, Month = {June}, Pages = {228--231}, Publisher = {Association for Computational Linguistics}, Title = {{METEOR}: An Automatic Metric for {MT} Evaluation with High Levels of Correlation with Human Judgments}, Year = 2007} @book{Lavrac+94, Address = {Chichester, UK}, Author = {N. Lavrac and S. D\v{z}eroski}, Publisher = {Ellis Horwood}, Title = {Inductive logic programming}, Year = 1994} @article{Lavrac+96, Author = {N. Lavrac and I. Weber and D. Zupani\v{c} and D. Kazakov and O. \v{S}t\v{e}p\'{a}nkov\'{a} and S. D\v{z}eroski}, Journal = {AI Communications}, Number = 4, Pages = {157--206}, Title = {ILPNET repositories on WWW: Inductive logic programming systems, datasets and bibliography}, Volume = 9, Year = 1996} @inproceedings{Lawrence+95, Author = {S. Lawrence and S. Fong and C. L. Giles}, Booktitle = {IJCAI-95 Workshop Notes of the Workshop on New Approaches for Natural Language Processing}, Editor = {S. Wermter}, Pages = {1--8}, Title = {On the applicability of neural network and machine learning methodologies to natural language processing}, Year = 1995} @techreport{Lawrence+96, Address = {College Park, MD 20742}, Author = {S. Lawrence and C. L. Giles and A. C. Tsoi}, Date-Modified = {2009-11-14 18:51:35 +0100}, Institution = {Institute for Advanced Computer Studies, University of Maryland}, Keywords = {neural networks}, Number = {UMIACS-TR-96-22 and CS-TR-3617}, Title = {What size neural network gives optimal generalization? Convergence properties of backpropagation}, Year = 1996} @manual{Le04, Author = {Zhang Le}, Note = {{\tt http://www.nlplab.cn/zhangle/software/maxent/manual/}}, Organization = {Natural Language Processing Lab, Northeastern University, China}, Title = {Maximum Entropy Modeling Toolkit for Python and C++}, Year = 2004} @incollection{LeCun+90, Address = {San Mateo, CA}, Author = {Y. Le Cun and J. Denker and S. Solla}, Booktitle = {Advances in Neural Information Processing Systems}, Editor = {D. S. Touretzky}, Pages = {598--605}, Publisher = {Morgan Kaufmann}, Title = {Optimal brain damage}, Volume = 2, Year = 1990} @inproceedings{Lee+02, Author = {Lee, Y.K. and Ng, H.T.}, Booktitle = {Proceedings of the conference on Empirical methods in natural language processing}, Date-Added = {2009-11-15 00:39:21 +0100}, Date-Modified = {2009-11-15 00:39:40 +0100}, Keywords = {word sense disambiguation}, Pages = {41--48}, Title = {An empirical evaluation of knowledge sources and learning algorithms for word sense disambiguation}, Year = {2002}} @conference{Lee+06, Author = {Lee, J. and Seneff, S.}, Booktitle = {Ninth International Conference on Spoken Language Processing}, Date-Modified = {2011-06-19 22:58:36 +0200}, Organization = {ISCA}, Pages = {1978--1981}, Title = {{Automatic Grammar Correction for Second-Language Learners}}, Year = {2006}} @inproceedings{Lee+08, Address = {Columbus, Ohio}, Author = {J. Lee and S. Seneff}, Booktitle = {Proceedings of ACL-08: HLT}, Month = {June}, Pages = {174--182}, Publisher = {Association for Computational Linguistics}, Title = {Correcting Misuse of Verb Forms}, Url = {http://www.aclweb.org/anthology/P/P08/P08-1021}, Year = {2008}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/P/P08/P08-1021}} @article{Lee+90, Author = {G. Lee and M. Flowers and M. G. Dyer}, Journal = {Connection Science}, Pages = {313--345}, Title = {Learning Distributed Representations and their Application to Script-Based Story Processing}, Volume = 2, Year = 1990} @inproceedings{Lee+99, Author = {L. Lee and F. Pereira}, Booktitle = {ACL 99}, Date-Modified = {2009-12-26 21:11:28 +0100}, Pages = {33-40}, Title = {Distributional similarity models: Clustering vs. nearest neighbors}, Year = 1999} @inproceedings{Lee99, Author = {L. Lee}, Booktitle = {ACL 99}, Pages = {25-32}, Title = {Measure of Distributional Similarity}, Year = 1999} @inproceedings{Lehman94, Author = {Lehman, J. F.}, Booktitle = AAAI, Date-Modified = {2009-09-06 20:39:53 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {734--741}, Title = {Toward The Essential nature of Statistical Knowledge in Sense Resolution}, Year = 1994} @incollection{Lehnert86, Address = {Los Altos, CA}, Author = {W. G. Lehnert}, Booktitle = {Natural Language Processing}, Date-Modified = {2008-07-23 16:55:36 +0200}, Editor = {B. J. Grosz and K. {Sparck Jones} and B. L. Webber}, Keywords = {question answering}, Pages = {651-657}, Publisher = {Kaufmann}, Title = {A Conceptual Theory of Question Answering}, Year = 1986} @inproceedings{Lehnert87, Address = {Los Altos, CA}, Author = {W. Lehnert}, Booktitle = {Proceedings of the Sixth National Conference on Artificial Intelligence (AAAI-87)}, Pages = {301--306}, Publisher = {Morgan Kaufmann}, Title = {Case-based problem solving with a large knowledge base of learned cases}, Year = 1987} @book{Lenat+91, Address = {Reading, MA}, Author = {Douglas B. Lenat and R. V. Guha}, Date-Added = {2010-02-01 23:27:34 +0100}, Date-Modified = {2010-02-01 23:27:39 +0100}, Publisher = {Addison-Wesley}, Title = {Building Large Knowledge-Based Systems: Representation and Inference in CYC}, Year = {1991}} @article{Lenat90, Author = {Lenat, D. B. and R. V. Guha and K. Pittman and D. Pratt and M. Shepherd}, Date-Modified = {2009-09-06 20:40:11 +0200}, Journal = CACM, Keywords = {common sense}, Number = 8, Pages = {30--49}, Title = {Cyc: Toward programs with common sense}, Volume = 33, Year = 1990} @inproceedings{Lendvai+02a, Author = {P. Lendvai and A. {Van den Bosch} and E. Krahmer and M. Swerts}, Booktitle = {Proceedings of the ESSLLI Workshop on Machine Learning Approaches in Computational Linguistics}, Date-Modified = {2010-09-14 12:57:11 +0200}, Keywords = {spoken dialogue systems, ilk, vi}, Title = {Improving machine-learned detection of miscommunications in human-machine dialogues through informed data splitting}, Year = 2002} @inproceedings{Lendvai+02b, Address = {Amsterdam, The Netherlands}, Author = {P. Lendvai and A. {Van den Bosch} and E. Krahmer and M. Swerts}, Booktitle = {Selected Papers from the Twelfth Computational Linguistics in the Netherlands Meeting, CLIN-2001}, Date-Modified = {2011-06-21 18:26:20 +0200}, Keywords = {ilk, spoken dialogue systems, vi}, Publisher = {Rodopi}, Title = {Multi-feature error detection in spoken dialogue systems}, Year = 2002} @inproceedings{Lendvai+03, Author = {P. Lendvai and A. {Van den Bosch} and E. Krahmer}, Booktitle = {Proceedings of the {EACL} Workshop on Dialogue Systems: {I}nteraction, adaptation and styles of management}, Date-Modified = {2010-09-18 14:24:20 +0200}, Keywords = {ilk, spoken dialogue systems, vi}, Pages = {69-78}, Title = {Machine Learning for Shallow Interpretation of User Utterances in Spoken Dialogue Systems}, Year = 2003, Bdsk-Url-1 = {http://ilk.uvt.nl/~piroska/lendvaietal-eacl03ws.pdf}} @inproceedings{Lendvai+03b, Author = {P. Lendvai and L. Maruster}, Booktitle = {Proceedings of the {ISCA} Workshop on Error Handling in Spoken Dialogue Systems}, Date-Modified = {2010-09-14 12:58:05 +0200}, Keywords = {ilk, spoken dialogue systems, vi}, Pages = {119-122}, Title = {Process discovery for evaluating dialogue strategies}, Year = 2003} @inproceedings{Lendvai+03c, Author = {P. Lendvai and A. {Van den Bosch} and E. Krahmer}, Booktitle = {Proceedings of Disfluency in Spontaneous Speech Workshop ({DISS}'03)}, Date-Modified = {2010-09-14 12:57:57 +0200}, Keywords = {ilk, disfluencies, vi}, Pages = {63-66}, Title = {Memory-based Disfluency Chunking}, Year = 2003, Bdsk-Url-1 = {http://ilk.uvt.nl/~piroska/diss03.pdf}} @inproceedings{Lendvai+05, Address = {Pittsburgh, PA}, Author = {P. Lendvai and A. {Van den Bosch}}, Booktitle = {Proceedings of the AAAI Spoken Language Understanding Workshop, SLU-2005}, Date-Modified = {2010-09-14 12:57:29 +0200}, Keywords = {ilk, spoken dialogue systems, imix, vi}, Title = {Robust {ASR} lattice representation types in pragma-semantic processing of spoken input}, Year = 2005} @inproceedings{Lendvai+07, Address = {Antwerp, Belgium}, Author = {P. Lendvai and J. Geertzen}, Booktitle = {Proceedings of the 8th SIGdial Workshop on Discourse and Dialogue}, Date-Added = {2010-01-03 01:12:26 +0100}, Date-Modified = {2010-09-14 12:58:21 +0200}, Keywords = {ilk, dialogue act classification, spoken dialogue systems, imix}, Pages = {174--181}, Title = {Token-based chunking of turn-internal dialogue act sequences}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/~piroska/lendvai_geertzen_sigdial07.pdf}} @inproceedings{Lendvai+08, Address = {Marrakech, Morocco}, Author = {P. Lendvai and S. Hunt}, Booktitle = {Proceedings of the Sixth International Language Resources and Evaluation (LREC'08)}, Date-Added = {2010-01-03 10:40:15 +0100}, Date-Modified = {2010-09-14 12:56:54 +0200}, Keywords = {ilk, information extraction, mitch}, Title = {From field notes towards a knowledge base}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/163_paper.pdf}} @inproceedings{Lendvai03, Author = {P. Lendvai}, Booktitle = {Proceedings of the {EACL} Student Research Workshop.}, Date-Modified = {2010-01-02 21:26:44 +0100}, Keywords = {ilk, spoken dialogue systems}, Pages = {25-32}, Title = {Learning To Identify Fragmented Words in Spoken Discourse}, Year = 2003, Bdsk-Url-1 = {http://ilk.uvt.nl/~piroska/lendvai-studsess.pdf}} @phdthesis{Lendvai04, Author = {P. Lendvai}, Date-Modified = {2010-01-02 21:26:39 +0100}, Keywords = {ilk, spoken dialogue systems}, School = {Tilburg University}, Title = {Extracting information from spoken user input: A machine learning approach}, Year = 2004} @inproceedings{Lendvai05b, Address = {Porto, Portugal}, Author = {P. Lendvai}, Booktitle = {Proceedings of The Second International Workshop on Knowledge Discovery and Ontologies, KDO-2005}, Date-Modified = {2010-09-14 12:56:45 +0200}, Keywords = {ilk, imix, taxonomy induction}, Pages = {31--38}, Title = {Conceptual taxonomy identification in medical documents}, Year = 2005, Bdsk-Url-1 = {http://ilk.uvt.nl/~piroska/kdo05.pdf}} @inproceedings{Lendvai08, Address = {Berlin, Germany}, Author = {P. Lendvai}, Booktitle = {Proceedings of the Computational Linguistics and Intelligent Text Processing 9th International Conference, CICLing 2008}, Date-Added = {2010-01-03 10:29:05 +0100}, Date-Modified = {2010-09-14 12:57:43 +0200}, Editor = {A. Gelbukh}, Keywords = {ilk, textual databases, alignment-based learning, mitch}, Pages = {522--531}, Publisher = {Springer Verlag}, Series = {Lecture Notes in Computer Science}, Title = {Alignment-based expansion of textual database fields}, Volume = {4919}, Year = {2008}} @inproceedings{Lepage+96, Author = {Y. Lepage and A. Shin-ichi}, Booktitle = COLING96, Date-Modified = {2008-07-23 16:23:50 +0200}, Keywords = {analogy, De Saussure}, Pages = {717--722}, Title = {Saussurian analogy: a theoretical account and its application}, Year = 1996} @inproceedings{Leslie+02, Author = {Leslie, C. and Eskin, E. and Noble, W.S.}, Booktitle = {Proceedings of the Pacific Symposium on Biocomputing}, Date-Added = {2009-11-15 00:39:45 +0100}, Date-Modified = {2009-11-15 00:39:55 +0100}, Pages = {566--575}, Title = {{The spectrum kernel: A string kernel for SVM protein classification}}, Volume = {7}, Year = {2002}} @inproceedings{Leusch06, Address = {Trento, Italy}, Author = {G. Leusch and N. Ueffing and H. Ney}, Booktitle = {Proceedings of EACL 2006}, Pages = {241-248}, Title = {{CDER}: Efficient {MT} Evaluation Using Block Movements}, Year = 2006} @inproceedings{Leveling+06, Address = {Seattle, WA}, Author = {J. Leveling and S. Hartrumpf}, Booktitle = {Proceedings of GIR-2006, the 3rd Workshop on Geographical Information Retrieval}, Title = {On metonymy recogntion for geographic {IR}}, Year = 2007} @article{Levelt+91, Author = {W.J.M. Levelt and H. Schriefers and D. Vorberg and A.S. Meyer and T. Pechman and J. Havinga}, Journal = {Psychological Review}, Number = 1, Pages = {122--142}, Title = {The Time Course of Lexical Access in Speech Production: A Study of Picture Naming}, Volume = 98, Year = 1991} @book{Levelt89, Address = {Cambridge, MA}, Author = {W. J. M. Levelt}, Publisher = MIT, Title = {Speaking: From intention to articulation}, Year = 1989} @article{Levenshtein66, Author = {V. Levenshtein}, Journal = {Sovjet Physics Doklady}, Pages = {707--710}, Title = {Binary codes capable of correcting deletions, insertions, and reversals}, Volume = 10, Year = 1966} @book{Levin89, Address = {Evanston}, Author = {B. Levin}, Publisher = {Northwestern University}, Title = {Towards a Lexical Organization of English Verbs}, Year = 1989} @book{Levin93, Address = {Chicago}, Author = {B. Levin}, Publisher = {Chicago University Press}, Title = {Towards a Lexical Organization of English Verbs}, Year = 1993} @book{Levin93b, Address = {Chicago IL}, Author = {B. Levin}, Publisher = {University of Chicago Press}, Title = {English Verb Classe and Alternations: A Preliminary Investigation}, Year = 1993} @inproceedings{Levy+91, Address = {{Singapore}}, Author = {Levy, J and Shillcock, R. and Chater, N.}, Booktitle = {Proceedings of IJCNN'91}, Title = {Connectionist modelling of phonotactic constraints in word recognition}, Year = 1991} @article{Lewis+04, Author = {Lewis, D.D. and Yang, Y. and Rose, T.G. and Li, F.}, Date-Added = {2009-11-15 00:39:59 +0100}, Date-Modified = {2010-09-20 00:17:27 +0200}, Journal = {Journal of Machine Learning Research}, Pages = {361--397}, Title = {{RCV1}: A New Benchmark Collection for Text Categorization Research}, Volume = {5}, Year = {2004}, Bdsk-Url-1 = {http://www.jmlr.org/papers/volume5/lewis04a/lewis04a.pdf}} @inproceedings{Lewis+94, Author = {D. Lewis and J. Catlett}, Booktitle = {Machine Learning: Proceedings of the 11th International Conference}, Title = {Heterogeneous uncertainty sampling for supervised learning}, Year = 1994} @inproceedings{Lewis+94b, Author = {D. Lewis and W. Gale}, Booktitle = {Proceedings of ACM-SIGIR Conference on Information Retrieval}, Title = {Training text classifiers by uncertainty sampling}, Year = 1994} @inproceedings{Lewis+96, Author = {D. Lewis and R. E. Schapire and J. P. Callan and R. Papka}, Booktitle = {SIGIR '96: Proceedings of the 19th Int. Conference on Research and Development in Information Retrieval, 1996.}, Title = {Training algorithms for linear text classifiers}, Year = 1996} @inproceedings{Lewis92, Author = {D. Lewis}, Booktitle = {Proceedings of ACM-SIGIR Conference on Information Retrieval}, Title = {An evaluation of phrasal and clustered representations on a text categorization problem}, Year = 1992} @inproceedings{Li+01, Author = {X. Li and D. Roth}, Booktitle = CoNLL, Title = {Exploring Evidence for Shallow Parsing}, Year = 2001} @inproceedings{Li+02, Author = {X. Li and D. Roth}, Booktitle = {COLING 2002, The 19th International Conference on Computational Linguistics}, Title = {Learning Question Classifiers}, Year = 2002} @inproceedings{Li+96, Author = {Hang Li and Naoki Abe}, Booktitle = COLING96, Organization = {Center for Sprogteknologi}, Title = {Clustering Words with the MDL Principle}, Year = 1996} @inproceedings{Li+98, Author = {H. Li and N. Abe}, Booktitle = {Proceedings of the 17th Internation Conference on Computational Linguistics}, Pages = {749-755}, Title = {Word clustering and disambiguation based on co-occurrence data.}, Year = 1998} @inproceedings{Li+10, Address = {Uppsala, Sweden}, Author = {Li, J. and Zhou, G. and Ng, H.T.}, Booktitle = {Proceedings of the 48th Annual Meeting of the Association for Computational Linguistics}, Month = {July}, Pages = {1108--1117}, Publisher = {Association for Computational Linguistics}, Title = {Joint Syntactic and Semantic Parsing of Chinese}, Year = {2010}} @inproceedings{Liang+06, Author = {Liang, P. and Bouchard-C\^{o}t\'{e}, A. and Klein, D. and Taskar, B.}, Booktitle = {Proceedings of the 21st International Conference on Computational Linguistics and 44th Annual Meeting of the Association for Computational Linguistics}, Date-Added = {2009-11-15 00:40:22 +0100}, Date-Modified = {2009-11-15 00:40:34 +0100}, Pages = {761--768}, Title = {An End-to-End Discriminative Approach to Machine Translation}, Year = {2006}} @phdthesis{Liang83, Address = {Stanford, CA}, Author = {Franklin Mark Liang}, School = {Stanford University}, Title = {Word Hy-phen-a-tion by Com-put-er}, Year = 1983} @article{Liberman+74, Author = {I. Y. Liberman and D. Schankweiler and D. Fisher and D. Carter}, Journal = {Journal of Experimental Child Psychology}, Pages = {202--212}, Title = {Reading and the Awareness of Linguistic Segments}, Volume = 18, Year = 1974} @article{Liberman+77, Author = {M. Liberman and A. Prince}, Journal = {Linguistic Inquiry}, Number = 8, Pages = {249-336}, Title = {On stress and linguistic rhythm}, Year = 1977} @incollection{Liberman+80, Address = {Baltimore}, Author = {I. Y. Liberman and A. M. Liberman and I. G. Mattingly and D. L. Shankweiler}, Booktitle = {Orthography, Reading and Dyslexia}, Editor = {J. F. Kavanagh and R. L. Venezky}, Pages = {137--153}, Publisher = {University Park Press}, Title = {Orthography and the beginning reader}, Year = 1980} @inproceedings{Liebregts+09, Author = {R. Liebregts and T. Bogers}, Booktitle = {roceedings of the 31st European Conference on Information Retrieval (ECIR 2009)}, Date-Added = {2010-01-02 19:31:21 +0100}, Date-Modified = {2010-01-02 19:32:57 +0100}, Keywords = {ilk, expert search, expert retrieval}, Pages = {587--594}, Publisher = {Springer Verlag}, Series = {Lecture Notes in Computer Science}, Title = {Design and evaluation of a university-wide expert search engine}, Volume = {5478}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/bogers.2009.ecir2009-paper.pdf}} @article{Light+00, Author = {Light, M. and Mann, G. and Riloff, E. and Breck, E.}, Journal = JNLE, Note = {{forthcoming}}, Title = {{Analyses for Elucidating Current Question Answering Technology}}, Year = 2001} @inproceedings{Lin+01, Author = {D. Lin and P. Pantel}, Booktitle = {Proceedings of ACM SIGKDD Conference on Knowledge Discovery and Data Mining}, Pages = {317--322}, Title = {Induction of Semantic Classes from Natural Language Text}, Year = 2001} @inproceedings{Lin98, Author = {D. Lin}, Booktitle = {Proceedings of the Fifteenth International Conference on Machine Learning}, Editor = {J. Shavlik}, Pages = {296--304}, Publisher = {Morgan Kaufmann}, Title = {An information-theoretic definition of similarity}, Year = 1998} @unpublished{Ling+96, Author = {C. X. Ling and H. Wang}, Date-Modified = {2009-11-14 19:00:50 +0100}, Keywords = {grapheme-phoneme conversion, decision trees}, Note = {Submitted}, Title = {A decision-tree model for reading aloud with automatic alignment and grapheme generation}, Year = 1996} @article{Ling94, Author = {C. X. Ling}, Journal = {Journal of Artificial Intelligence Research}, Pages = {209--229}, Title = {Learning the past tense of {E}nglish verbs: The symbolic pattern associator vs. connectionist models}, Volume = 1, Year = 1994} @inproceedings{Litkowski00, Author = {K.C. Litkowski}, Booktitle = {Proceedings of TREC-9}, Organization = {NIST}, Title = {Syntactic Clues and Lexical Resources in Question-Answering}, Year = 2001} @inproceedings{Litman+00, Author = {D. Litman and S. Singh and M. Walker and M. Kearns}, Booktitle = COLING, Date-Modified = {2009-12-26 21:11:42 +0100}, Title = {Automatic Optimization of Dialogue Management}, Year = 2000} @article{Litman96, Author = {Diane J. Litman}, Journal = {Journal of Artificial Intelligence Research}, Pages = {53--94}, Title = {Cue Phrase Calssification Using Machine Learning}, Volume = 5, Year = 1996} @article{Littlestone88, Author = {Littlestone, N.}, Journal = {Machine Learning}, Pages = {285--318}, Title = {Learning Quickly when irrelevant attributes abound: A new linear-threshold algorithm}, Volume = 2, Year = 1988} @inproceedings{Lluis+09, Address = {Boulder, CO}, Author = {X. Llu\'is and S. Bott and Ll. M\'arquez}, Booktitle = {Proc. of the {CoNLL} 2009: Shared Task}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 21:07:54 +0100}, Pages = {79--86}, Title = {A second-order joint eisner model for syntactic and semantic dependency parsing}, Year = {2009}} @book{Lock80, Address = {{London}}, Author = {A. Lock}, Publisher = {{Academic Press}}, Title = {{The Guided Reinvention of Language}}, Year = 1980} @article{Lombardi92, Author = {L. Lombardi}, Journal = {Journal of Memory and Language}, Pages = {713--733}, Title = {The Regeneration of Syntax in Short Term Memory}, Volume = 31, Year = 1992} @article{Lopresti+97, Author = {Daniel Lopresti and Jiangying Zhou}, Date-Added = {2010-01-29 15:26:14 +0100}, Date-Modified = {2010-01-29 15:26:19 +0100}, Journal = {Computer Vision and Image Understanding}, Month = {July}, Number = {1}, Pages = {39--47}, Title = {Using Consensus Sequence Voting to Correct {OCR} Errors}, Volume = {67}, Year = {1997}} @unpublished{Lu+95, Author = {B.-L. Lu and K. Ito and H. Kita and Y. Nishikawa}, Note = {unpublished manuscript}, Title = {A parallel and modular multi-sieving neural network architecture for constructive learning}, Year = 1995} @inproceedings{Lucassen+84, Author = {J. M. Lucassen and R. L. Mercer}, Booktitle = {Proceedings of {{\sc icassp}} '84, San Diego}, Pages = {42.5.1--42.5.4}, Title = {An information theoretic approach to the automatic determination of phonemic baseforms}, Year = 1984} @incollection{Luce+90, Address = {Cambridge, MA}, Author = {P. Luce and D. Pisoni and S. Goldinger}, Booktitle = {Cognitive Models of Speech Processing}, Editor = {G. T. M. Altmann}, Pages = {122--147}, Publisher = MIT, Title = {Similarity neighborhoods of spoken words}, Year = 1990} @article{Luk+96, Author = {R. Luk and R. Damper}, Journal = {Computer Speech and Language}, Pages = {133--153}, Title = {Stochastic phonographic transduction for {E}nglish}, Volume = 10, Year = 1996} @incollection{MUC3, Address = {San Mateo CA}, Booktitle = {Proceedings of the Third Message Understanding Conference}, Editor = {Sundheim, B.}, Publisher = {Morgan Kaufmann Publishers}, Year = 1991} @incollection{MUC4, Address = {San Mateo CA}, Booktitle = {Proceedings of the Fourth Message Understanding Conference}, Editor = {Sundheim, B.}, Publisher = {Morgan Kaufmann Publishers}, Year = 1992} @conference{Kernighan+90, Author = {M.D. Kernighan and K.W. Church and W.A. Gale}, Booktitle = {Proc. of COLING}, Date-Modified = {2009-11-14 18:57:59 +0100}, Keywords = {spelling correction}, Pages = {205--210}, Title = {{A spelling correction program based on a noisy channel}}} @incollection{MUC5, Address = {San Mateo CA}, Booktitle = {Proceedings of the Fifth Message Understanding Conference}, Editor = {Sundheim, B.}, Publisher = {Morgan Kaufmann Publishers}, Year = 1993} @inproceedings{Maamouri+04, Address = {Geneva, Switzerland}, Author = {Mohamed Maamouri and Ann Bies}, Booktitle = {Proceedings of the Workshop on Computational Approaches to Arabic Script-based Languages, COLING 2004}, Title = {Developing an Arabic Treebank: Methods, Guidelines, Procedures, and Tools}, Year = 2004} @inproceedings{Maarek89, Author = {Yoelle Maarek and Frank Smadja}, Booktitle = {Proceedings of SIGIR}, Pages = {198--206}, Title = {Full text indexing based on lexical relations -- {An} application: Software libraries}, Year = 1989} @article{MacDonald+94, Author = {MacDonald, M. C. and Pearlmutter, N. J. and Seidenberg, M. S.}, Journal = {Psychological Review}, Pages = {676--703}, Title = {The lexical nature of syntactic ambiguity resolution.}, Volume = 101, Year = 1994} @inproceedings{MacKenzie+01, Address = {New York, NY, USA}, Author = {I. S. MacKenzie and H. Kober and D. Smith and T. Jones and E. Skepner}, Booktitle = {{UIST} '01: {P}roceedings of the 14th annual {ACM} {S}ymposium on {U}ser {I}nterface {S}oftware and {T}echnology}, Date-Modified = {2010-06-25 21:31:38 +0200}, Location = {Orlando, Florida}, Pages = {111--120}, Publisher = {ACM}, Title = {{L}etter{W}ise: {P}refix-{B}ased {D}isambiguation {F}or {M}obile {T}ext {I}nput}, Year = 2001} @article{MacLeod+87, Author = {J. E. S. MacLeod and A. Luk and D. M. Titterington}, Journal = {IEEE Transactions on Systems, Man, and Cybernetics}, Number = 4, Pages = {689--696}, Title = {A Re-Examination of the Distance-Weighted $k$-Nearest Neighbor Classification Rule}, Volume = {SMC-17}, Year = 1987} @article{MacMahon+94, Author = {J. MacMahon and F.J. Smith}, Journal = {Artificial Intelligence and the Simulation of Behaviour Quarterly}, Title = {Structural Tags, Annealing and Automatic Word Classification}, Volume = 90, Year = 1994} @incollection{Mackworth91, Address = {New York, NY, USA}, Author = {A.K. Mackworth}, Booktitle = {Encyclopedia of Artificial Intelligence}, Date-Added = {2009-11-15 00:40:40 +0100}, Date-Modified = {2009-11-15 00:40:40 +0100}, Editor = {S. Shapiro}, Pages = {285--293}, Publisher = {J. Wiley and Sons}, Title = {Constraint Satisfaction}, Year = {1991}} @inproceedings{Macleod+96, Author = {Catherine Macleod and Adam Meyers and Ralph Grishman}, Booktitle = COLING96, Pages = {472--477}, Title = {The Influence of Tagging on the Classification of Lexical Complements}, Volume = 1, Year = 1996} @inproceedings{Madsen+05, Address = {New York, NY, USA}, Author = {R.E. Madsen and D. Kauchak and C. Elkan}, Booktitle = {Proceedings of the 22nd International Conference on Machine learning}, Pages = {545--552}, Publisher = {ACM Press}, Title = {Modeling word burstiness using the Dirichlet distribution}, Year = 2005} @inproceedings{Magerman+90, Author = {D. Magerman and M. Marcus}, Booktitle = {Proceedings of 8th. conference on AI (AAAI-90)}, Pages = {984--989}, Title = {Parsing a natural language using mutual information statistics}, Volume = 2, Year = 1990} @phdthesis{Magerman94, Author = {D. M. Magerman}, School = {Stanford University}, Title = {Natural language parsing as statistical pattern recognition}, Year = 1994} @techreport{Magerman95, Author = {Magerman, D.}, Booktitle = {IBM T.J. Watson Research Center}, Title = {Parsing as Statistical Pattern Recognition}, Year = 1995} @inproceedings{Magerman95b, Author = {Magerman, D. M.}, Booktitle = {Proceedings of ACL-95}, Date-Modified = {2009-11-14 19:02:51 +0100}, Keywords = {parsing, decision trees}, Pages = {276--283}, Title = {Statistical decision-tree models for parsing}, Year = 1995} @article{Mahootian+??, Author = {S. Mahootian and B. Santorini}, Journal = {Linguistic Inquiry}, Pages = {464-479}, Title = {Code Switching and the complement/adjunct distinction}, Volume = 27, Year = {???}} @inproceedings{Maletic+00, Author = {J. Maletic and A. Marcus}, Booktitle = {Proceedings of the Conference on Information Quality (IQ 2000)}, Pages = {200--209}, Title = {Data Cleansing: Beyond Integrity Analysis}, Year = 2000} @inproceedings{Malouf+04, Author = {R. Malouf and G. {Van Noord}}, Booktitle = {Proceedings of the IJCNLP-04 Workshop Beyond Shallow Analyses - Formalisms and statistical modeling for deep analyses}, Title = {Wide Coverage Parsing with Stochastic Attribute Value Grammars}, Year = 2004} @inproceedings{Malouf00, Address = {New Brunswick, NJ}, Author = {R. Malouf}, Booktitle = ACL00, Pages = {85--92}, Publisher = {ACL}, Title = {The order of prenominal adjectives in natural language generation}, Year = 2000} @inbook{Malsburg67, Address = {Cambrigde MA}, Author = {Malsburg, C. von der}, Booktitle = {Pattern Recognition by Self-Organizing Neural Networks}, Editor = {G. Carpenter and S. Grossberg}, Publisher = {Bradford Books}, Title = {Self-Organization of Orientation Sensitive Cells in the Striate Cortex}, Year = 1967} @incollection{Malt96, Author = {Malt, B.C.}, Booktitle = {Cognitive Linguistics in the Redwoods, the Expansion of a New Paradigm in Linguistics}, Editor = {E.H. Casad}, Pages = {147--174}, Publisher = {Mouton de Gruyter}, Series = {Cognitive Linguistics Research}, Title = {From cognitive psychology to cognitive linguistics and back again: The study of category structure}, Volume = 6, Year = 1996} @inproceedings{Mandber+90, Address = {Philadelphia, PA, USA}, Author = {U. Manber and G. Myers}, Booktitle = {SODA '90: Proceedings of the first annual ACM-SIAM symposium on Discrete algorithms}, Isbn = {0-89871-251-3}, Location = {San Francisco, California, United States}, Pages = {319--327}, Publisher = {Society for Industrial and Applied Mathematics}, Title = {Suffix arrays: a new method for on-line string searches}, Year = {1990}} @article{Manber+92, Author = {U. Manber and S. Wu}, Journal = {Byte}, Month = {November}, Title = {Approximate Pattern Matching}, Year = 1992} @article{Manber+93, Author = {U. Manber and G. Myers}, Doi = {10.1137/0222058}, Journal = {SIAM Journal on Computing}, Keywords = {string searching; string matching; pattern matching; suffix trees; algorithms; text indexing; inverted indices}, Number = {5}, Pages = {935-948}, Publisher = {SIAM}, Title = {Suffix Arrays: A New Method for On-Line String Searches}, Volume = {22}, Year = {1993}, Bdsk-Url-1 = {http://dx.doi.org/10.1137/0222058}} @inproceedings{Mangu+97, Author = {L. Mangu and E. Brill}, Booktitle = ICML, Pages = {187--194}, Title = {Automatic rule acquisition for spelling correction}, Year = 1997} @article{Mannes+91, Author = {S. M. Mannes and S. M. Doane}, Journal = {Connection Science}, Pages = {61--87}, Title = {A Hybrid Model of Script Generation: or Getting the Best from Both Worlds}, Volume = 3, Year = 1991} @article{Manning+97, Address = {http://xxx.lanl.gov/archive/cmp-lg}, Author = {C. Manning and B. Carpenter}, Journal = {cmp-lg}, Number = 97110003, Title = {Probabilistic Parsing Using Left Corner Language Models}, Year = 1997} @book{Manning+99, Address = {Cambridge, MA}, Author = {C. Manning and H. Sch{\"u}tze}, Publisher = MIT, Title = {Foundations of Statistical Natural Language Processing}, Year = 1999} @inproceedings{Manning93, Author = {C. Manning}, Booktitle = ACL93, Pages = {235--242}, Title = {Automatic acquisition of a large subcategorization dictionary from corpora}, Year = 1993} @article{Manzini+04, Author = {G. Manzini and P. Ferragina}, Journal = {Algorithmica}, Pages = {33--50}, Title = {Engineering a lightweight suffix array construction algorithm}, Volume = {40}, Year = {2004}} @inproceedings{Marcken95, Author = {C. de Marcken}, Booktitle = {Proceedings of third workshop on very large corpora}, Month = jun, Pages = {14--26}, Title = {Lexical Heads, Phrase Structure and the Induction of Grammar}, Year = 1995} @article{Marcus+93, Author = {M. Marcus and S. Santorini and M. Marcinkiewicz}, Journal = {Computational Linguistics}, Number = 2, Pages = {313--330}, Title = {Building a {L}arge {A}nnotated {C}orpus of {E}nglish: the {P}enn {T}reebank}, Volume = 19, Year = 1993} @inproceedings{Marcus+94, Author = {M. Marcus and G. Kim and M. A. M. and R. MacIntyre and A. Bies and M. Ferguson and K. Katz and B. Schasberger}, Booktitle = {Proceedings of ARPA Human Technology Workshop}, Pages = {110--115}, Title = {The Penn Treebank: Annotating Predicate Argument Structure}, Year = 1994} @article{Marcus+95, Author = {G.F. Marcus and U. Brinkmann and H. Clahsen and R. Wiese and S. Pinker}, Journal = {Cognitive Psychology}, Pages = {189--256}, Title = {German inflection: The exception that proves the rule.}, Volume = 29, Year = 1995} @article{Markov06, Author = {A. A. Markov}, Date-Modified = {2010-09-20 00:19:27 +0200}, Journal = {Science in Context}, Number = {4}, Pages = {591--600}, Title = {An Example of Statistical Investigation of the Text {E}ugene {O}negin Concerning the Connection of Samples in Chains}, Volume = {19}, Year = {2006}} @inproceedings{Markovitch+88, Address = {Ann Arbor, MI}, Author = {S. Markovitch and P. D. Scott}, Booktitle = {Proceedings of the Fifth International Conference on Machine Learning}, Pages = {459--465}, Publisher = {Morgan Kaufmann}, Title = {The role of forgetting in learning}, Year = 1988} @inproceedings{Marques+96, Author = {Nuno C. Marques and Gabriel Pereira Lopes}, Booktitle = {Proceedings of the Fifth International Conference on The Cognitive Science of {NLP}, {CSNLP}, Dublin City University}, Date-Modified = {2009-11-14 17:36:54 +0100}, Keywords = {part-of-speech tagging, Portuguese}, Title = {Using Neural Nets for Portuguese Part-of-Speech Tagging}, Year = 1996} @inproceedings{Marquez+97, Address = {Madrid, Spain}, Author = {L. {M\`{a}rquez} and L. Padr\'{o}}, Booktitle = {Proceedings of EACL/ACL 1997}, Date-Modified = {2011-06-19 23:00:10 +0200}, Pages = {238--245}, Title = {A Flexible POS Tagger Using an Automatically Acquired Language Model}, Year = 1997} @inproceedings{Marquez+98, Address = {Berlin}, Author = {L. M{\`a}rquez and H. Rodr{\'\i}guez}, Booktitle = {Proceedings of the 10th European Conference on Machine Learning ({ECML}-98)}, Date-Modified = {2009-11-14 19:02:42 +0100}, Editor = {Claire N{\'e}dellec and C{\'e}line Rouveirol}, Isbn = {3-540-64417-2}, Keywords = {part-of-speech tagging, decision trees}, Month = apr # { 21--23}, Pages = {25--36}, Publisher = {Springer}, Series = {LNAI}, Title = {Part-of-Speech Tagging Using Decision Trees}, Volume = 1398, Year = 1998} @inproceedings{Marquez+99, Author = {M\`arquez, L. and Rodr\'{\i}guez, H. and Carmona, J. and Montolio, J.}, Booktitle = {{Proceedings of the 1999 Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora}}, Pages = {53--62}, Title = {{Improving POS Tagging Using Machine-Learning Techniques}}, Year = 1999} @article{Marshall+73, Author = {J. C. Marshall and F. Newcombe}, Journal = {Journal of Psycholinguistic Research}, Pages = {175--199}, Title = {Patterns of Paralexia: A Psycholinguistic Approach}, Volume = 2, Year = 1973} @inproceedings{Marsi+03, Address = {New Brunswick, NJ}, Author = {E. Marsi and M. Reynaert and A. {Van den Bosch} and W. Daelemans and V. Hoste}, Booktitle = {Proceedings of the 41st Annual Meeting of the Association for Computational Linguistics}, Date-Modified = {2010-09-14 11:57:13 +0200}, Keywords = {ilk, prosit, prosody, memory-based language processing, memory-based learning, pitch accents}, Pages = {489--496}, Publisher = {ACL}, Title = {Learning to predict pitch accents and prosodic boundaries in {D}utch}, Year = 2003, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/prosit-acl03.pdf}} @inproceedings{Marsi+05, Address = {Ann Arbor, MI}, Author = {E. Marsi and A. {Van den Bosch} and A. Soudi}, Booktitle = {Proceedings of the ACL Workshop on Computational Approaches to Semitic Languages}, Date-Modified = {2010-09-14 13:06:59 +0200}, Keywords = {ilk, part-of-speech tagging, morphological analysis, memory-based learning, Arabic, vi}, Title = {Memory-based morphological analysis generation and part-of-speech tagging of Arabic}, Year = 2006} @article{Marslen-Wilson75, Author = {Marslen-Wilson, W.D}, Journal = {{Science}}, Pages = {226--228}, Title = {{Sentence perception as an interactive parallel process}}, Volume = 189, Year = 1975} @article{Marslen87, Author = {W. Marslen-Wilson}, Journal = {Cognition}, Pages = {71--102}, Title = {Functional parallellism in spoken word recognition}, Volume = 25, Year = 1987} @incollection{Marslen90, Address = {Cambridge, MA}, Author = {W. Marslen-Wilson}, Booktitle = {Cognitive Models of Speech Processing}, Editor = {G. T. M. Altmann}, Pages = {148--172}, Publisher = MIT, Title = {Activation, Competition, and Frequency in Lexical Access}, Year = 1990} @incollection{Martin+83, Address = {London}, Author = {W. Martin and B. Al and P. van Sterkenburg}, Booktitle = {Lexicography: Principles and Practice}, Editor = {R. Hartman}, Publisher = {Academic Press}, Series = {Applied Language Studies Series}, Title = {On the processing of a text corpus: From textual data to lexicographical information}, Year = 1983} @inbook{Martin+87, Author = {W.A. Martin and K.W. Church and R.S. Patil}, Booktitle = {{Natural Language Parsing Systems}}, Editor = {L. Bolc}, Publisher = {Springer Verlag}, Title = {{Preliminary Analysis of a Breadth-First Parsing Algorithm: Theoretical and Experimental Results}}, Year = 1987} @article{Martin+94, Author = {Martin, R. and J. Shelton and L.Yaffee}, Journal = {Journal of Memory and Language}, Pages = {83--111}, Title = {Language Processing and Working Memory: Neuropsychological Evidence for Separate Phonological and Semantic Capacities}, Volume = 33, Year = 1994} @inproceedings{Martinez+02, Author = {D. Martinez and E. Agirre and L. M\'{a}rquez}, Booktitle = {Proceedings of the 19th International Conference on Computational Linguistics (COLING 2002)}, Date-Added = {2009-11-15 00:40:48 +0100}, Date-Modified = {2009-11-15 00:41:06 +0100}, Keywords = {word sense disambiguation}, Pages = {1--7}, Title = {Syntactic Features for High Precision Word Sense Disambiguation}, Year = {2002}} @article{Maruster+06, Author = {L. Maruster and A. Weijters and W. {Van der Aalst} and A. {Van den Bosch}}, Date-Modified = {2010-09-18 14:25:05 +0200}, Journal = {Data Mining and Knowledge Discovery}, Keywords = {ilk, process mining, rule learning, vici}, Pages = {67--87}, Title = {A rule-based approach for process discovery: Dealing with noise and imbalance in process logs}, Volume = 13, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/DAMI24_MinorRevisions_withReferences.pdf}} @inproceedings{Masui98, Address = {New York, NY, USA}, Author = {Toshiyuki Masui}, Booktitle = {{CHI} '98: {P}roceedings of the {SIGCHI} {C}onference on {H}uman {F}actors in {C}omputing {S}ystems}, Location = {Los Angeles, California, United States}, Pages = {328--335}, Publisher = {ACM Press/Addison-Wesley Publishing Co.}, Title = {{A}n {E}fficient {T}ext {I}nput {M}ethod for {P}en-{B}ased {C}omputers}, Year = 1998} @techreport{Matan95, Author = {Ofer Matan}, Department = {Logic Group}, Institution = {Stanford University}, Number = {LOGIC-95-4}, Title = {On-Site Learning}, Year = 1995} @inproceedings{Matsumoto93, Author = {Yuji Matsumoto and Hiroyuki Ishimoto and Takehito Utsuro and Makoto Nagao}, Booktitle = ACL, Title = {Structural matching of parallel texts}, Year = 1993} @book{Matthews74, Address = {Cambridge, UK}, Author = {P. H. Matthews}, Publisher = CUP, Title = {Morphology}, Year = 1974} @article{Matthews88, Author = {Alison Matthews and Martin S. Chodorow}, Journal = {Journal of Memory and Language}, Pages = {245--260}, Title = {Pronoun resolution in two-clause sentences: Effects of ambiguity, antecedent location and depth of embedding}, Volume = 27, Year = 1988} @inbook{Maturana75, Address = {Paris}, Author = {H. R. Maturana}, Booktitle = {{Foundations of Language Development, a Multidisciplanary approach}}, Editor = {Lenneberg, E. H. and Lenneberg E.}, Publisher = {The UNESCO Press}, Title = {{Biology of Language, the Epistomology of Reality}}, Volume = 2, Year = 1975} @inproceedings{Maxwell02, Address = {Paris}, Author = {Mike Maxwell}, Booktitle = {LREC 2002: Third International Conference on Language Resources and Evaluation}, Editor = {Manuel Gonz\'alez Rodr\'iguez and Carmen Paz Su\'arez Araujo}, Pages = {967-974}, Publisher = {ELRA}, Title = {Resources for Morphology Learning and Evaluation}, Volume = {III}, Year = 2002} @article{Mays+91, Author = {E. Mays and F. J. Damerau and R. L. Mercer}, Date-Added = {2010-01-29 15:11:57 +0100}, Date-Modified = {2014-01-06 20:35:01 +0000}, Journal = {Information Processing and Management}, Number = {5}, Pages = {517--522}, Title = {Context based spelling correction}, Volume = {27}, Year = {1991}} @inproceedings{McCallum+00, Address = {Stanford, CA}, Author = {A. McCallum and D. Freitag and F. Pereira}, Booktitle = {Proceedings of the 17th International Conference on Machine Learning}, Title = {Maximum Entropy {M}arkov Models for Information Extraction and Segmentation}, Year = 2000} @article{McCarthy81, Author = {J. McCarthy}, Journal = {Linguistic Inquiry}, Pages = {373--418}, Title = {A Prosodic Theory of Non-concatenative Morphology}, Volume = 12, Year = 1981} @inproceedings{McCarthy97, Author = {D. McCarthy}, Booktitle = {Automatic Information Extraction and Building of Lexical Semantic Resources for {NLP} Applications, {ACL/EACL} Workshop, Madrid}, Date-Modified = {2009-09-06 20:40:18 +0200}, Keywords = {wsd, word sense disambiguation}, Month = {July}, Title = {Word Sense Disambiguation for Acquisition of Selectional Preferences}, Year = 1997} @book{McClelland+86, Address = {Cambridge, MA}, Editor = {J. L. McClelland and D. E. Rumelhart}, Publisher = MIT, Title = {Parallel Distributed Processing: Explorations in the Microstructure of Cognition}, Volume = {2: Psychological and Biological Models}, Year = 1986} @incollection{McClelland+86b, Address = {Cambridge, MA}, Author = {J. L. McClelland and J. L. Elman}, Booktitle = {Parallel Distributed Processing: Explorations in the Microstructure of Cognition}, Editor = {J. L. McClelland and D. E. Rumelhart}, Pages = {58--121}, Publisher = MIT, Title = {Interactive Processes in Speech Perception: The TRACE model}, Volume = {1: Psychological and Biological Models}, Year = 1986} @incollection{McClelland+86c, Address = {Cambridge, MA}, Author = {McClelland, J.L. and A.H. Kawamoto}, Booktitle = {Parallel Distributed Processing, explorations in the microstructure of cognition}, Chapter = 19, Editor = {McClelland, J.L. and D.E. Rumelhart and the PDP research group}, Publisher = MIT, Title = {Mechanisms of Sentence Processing: Assigning Roles to Constituents of Sentences}, Volume = 2, Year = 1986} @incollection{McCord90, Address = {Berlin}, Author = {McCord, M. C.}, Booktitle = {Natural Language and Logic: International Scientific Symposium}, Editor = {R. Studer}, Pages = {118--145}, Publisher = {Springer Verlag}, Series = {Lecture Notes in Computer Science}, Title = {Slot Grammar: A System for Simpler Construction of Practical Natural Language Grammars}, Year = 1990} @techreport{McCord91, Author = {McCord, M. C.}, Institution = {IBM Research Report}, Note = {in J. Wedekind and C. Rohrer (Eds.), {\em Unification in Grammar}, MIT Press}, Number = {RC 17313}, Title = {The Slot Grammar System}, Year = 1991} @inproceedings{McDonald+05, Author = {McDonald, R. and Pereira, F. and Ribarov, K. and Haji{\v{c}}, J.}, Booktitle = {Proceedings of the conference on Human Language Technology and Empirical Methods in Natural Language Processing}, Date-Added = {2009-11-15 00:41:24 +0100}, Date-Modified = {2009-11-15 00:41:48 +0100}, Pages = {523--530}, Title = {{Non-projective dependency parsing using spanning tree algorithms}}, Year = {2005}} @inproceedings{McDonald+06, Author = {McDonald, R. and Pereira, F.}, Booktitle = {Proceedings of the 11th Conference of the European Chapter of the Association for Computational Linguistics (EACL)}, Date-Added = {2009-11-15 00:41:24 +0100}, Date-Modified = {2009-11-15 00:41:37 +0100}, Pages = {81--88}, Title = {{Online learning of approximate dependency parsing algorithms}}, Year = {2006}} @article{McIlroy82, Author = {M.D. McIlroy}, Date-Added = {2010-01-29 15:12:09 +0100}, Date-Modified = {2010-01-29 15:12:16 +0100}, Journal = {IEEE Trans. on Communications}, Mrnumber = {TM 81-11271-2}, Pages = {91--99}, Title = {Development of a Spelling List}, Volume = {30}, Year = {1982}} @article{McKoon+94, Author = {G. McKoon and R. Ratcliff}, Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition}, Pages = {1239--1243}, Title = {Sentential context and on-line lexical decision}, Volume = 20, Year = 1994} @article{McQueen+95, Author = {J. M. McQueen and A. Cutler and T. Briscoe and D. Norris}, Date-Modified = {2015-02-19 10:24:48 +0000}, Journal = {{Language and Cognitive Processes}}, Number = {3/4}, Pages = {309--331}, Title = {{Models of Continuous Speech Recognition and the Contents of the Vocabulary}}, Volume = 10, Year = 1995} @article{McRoy92, Author = {Susan W. McRoy}, Date-Modified = {2009-09-06 20:40:26 +0200}, Journal = CL, Keywords = {wsd, word sense disambiguation}, Number = 1, Pages = {1--30}, Title = {Using multiple knowledge sources for word sense disambiguation}, Volume = 18, Year = 1992} @incollection{Medin+95, Address = {Turnhout, Belgium}, Author = {D. L. Medin and R. L. Goldstone}, Booktitle = {Similarity in language, thought and perception}, Editor = {C. Cacciari}, Number = {I}, Pages = {83--110}, Publisher = {Brepols}, Series = {Semiotic and Cognitive Studies}, Title = {The predicates of similarity}, Year = 1995} @article{Megyesi02, Author = {B. Megyesi}, Issue = {Special Issue on Machine Learning Approaches to Shallow Parsing}, Journal = {Journal of Machine Learning Research}, Title = {Shallow Parsing with PoS Taggers and Linguistic Knowledge}, Year = 2002} @article{Mehler+81, Author = {J. Mehler and J. Dommergues and U. Frauenfelder and J. Segui}, Journal = {Journal of Verbal Learning and Verbal Behaviour}, Pages = {298--305}, Title = {The Syllable's Role in Speech Segmentation}, Volume = 20, Year = 1981} @mastersthesis{Meijer94, Author = {R. Meijer}, School = {Cognitive Artificial Intelligence, Dept. of Philisophy, University of Utrecht}, Title = {A stagewise treatment of connectionism}, Year = 1994} @inproceedings{Merialdo91, Author = {Bernard Merialdo}, Booktitle = {Proceedings International Conference on Acoustics, Speech, and Signal Processing}, Title = {Tagging Text With A Probabilistic Model}, Year = 1991} @article{Merialdo94, Author = {B. Merialdo}, Journal = CL, Number = 2, Pages = {155--172}, Source = {Jakub}, Title = {Tagging {E}nglish text with a probabilistic model}, Volume = 20, Year = 1994} @inproceedings{Mertzbacher+93, Author = {M. Mertzbacher and W. Chu}, Booktitle = {Proceedings of the 1993 workshop on Knowledge Discovery in Databases}, Title = {Pattern-Based Clustering for Databases Attribute Values}, Year = 1993} @inproceedings{Meyer+92, Author = {I. Meyer and D. Skuce and L. Bowker and K. Ack}, Booktitle = Coling, Pages = {956--960}, Title = {Towards a new generation of terminological resources: an experiment in building a terminological knowledge base}, Year = 1992} @techreport{Meyers+94, Author = {Adam Meyers and Catherine Macleod and Ralph Grishman}, Institution = {Computer Science Department, New York University}, Number = {Proteus Project Memorandum 64}, Title = {Stardardization of the Complement Adjunct Distinction}, Year = 1994} @inproceedings{Meyers+96, Author = {Adam Meyers and Catherine Macleod and Ralph Grishman}, Booktitle = {Proceedings of {EURALEX'96}, G\"{o}teborg University, Sweden}, Title = {Stardardization of the Complement/Adjunct Distinction}, Year = 1996} @book{Michalski+83, Address = {Palo Alto, CA}, Editor = {R. S. Michalski and J. G. Carbonell and T. M. Mitchell}, Publisher = {Tioga Publishing Company}, Title = {Machine learning: An artificial intelligence approach}, Volume = {I}, Year = 1983} @book{Michalski+86, Address = {San Mateo, CA}, Editor = {R. S. Michalski and J. G. Carbonell and T. M. Mitchell}, Publisher = {Morgan Kaufmann}, Title = {Machine learning: An artificial intelligence approach}, Volume = {II}, Year = 1986} @book{Michalski+94, Address = {Cambridge, MA}, Editor = {R. Michalski and G. Tecuci}, Publisher = MIT, Title = {Machine learning: A multistrategy approach}, Volume = {IV}, Year = 1994} @article{Michalski83, Author = {R. S. Michalski}, Journal = {Artificial Intelligence}, Pages = {111-161}, Title = {A theory and methodology of inductive learning}, Volume = 11, Year = 1983} @incollection{Michalski93, Author = {R. Michalski}, Booktitle = {Readings in Knowledge Acquisition and Learning: Automating the Construction and Improvement of Expert Systems}, Editor = {B. G. Buchanan and D. C. Wilkins}, Pages = {7--38}, Publisher = {San Mateo, CA: Morgan Kaufmann}, Title = {Toward a unified theory of learning: multistrategy task-adaptive learning}, Year = 1993} @book{Michie+94, Address = {New York}, Author = {D. Michie and D. J. Spiegelhalter and C. C. Taylor}, Publisher = {Ellis Horwood}, Title = {Machine learning, neural and statistical classification}, Year = 1994} @article{Michie91, Author = {D. Michie}, Journal = {The Computer Journal}, Pages = {559--565}, Title = {Methodologies from machine learning in data analysis and software}, Volume = 34, Year = 1991} @inproceedings{Mihalcea02, Address = {Taipei, Taiwan}, Author = {R. Mihalcea}, Booktitle = {Proceedings of the 19th International Conference on Computational Linguistics (COLING 2002)}, Date-Modified = {2009-09-06 20:40:45 +0200}, Keywords = {wsd, word sense disambiguation, memory-based learning, memory-based language processing}, Title = {Instance-Based Learning with Automatic Feature Selection Applied to Word Sense Disambiguation}, Year = 2002} @inproceedings{Mihov+07, Address = {Borovets, Bulgaria}, Author = {S. Mihov and P. Mitankin and A. Gotscharek and U. Reffle and K. Schulz and C. Ringlstetter}, Booktitle = {Finite State Techniques and Approximate Search, Proceedings of the First Workshop on Finite-State Techniques and Approximate Search}, Date-Added = {2010-01-29 15:28:44 +0100}, Date-Modified = {2010-02-12 23:56:28 +0100}, Pages = {25--30}, Title = {Tuning the Selection of Correction Candidates for Garbled Tokens using Error Dictionaries}, Year = {2007}} @inproceedings{Miikkulainen+88, Author = {R. Miikkulainen and M. G. Dyer}, Booktitle = {Proceedings of the IEEE International Conference on Neural Networks (San Diego, CA)}, Pages = {285--292}, Publisher = {Piscataway, NJ: IEEE}, Title = {Forming Global Representations with Extended Backpropagation}, Volume = {I}, Year = 1988} @book{Miikkulainen93, Address = {Cambridge, MA}, Author = {R. Miikkulainen}, Publisher = MIT, Title = {Subsymbolic Natural Language Processing}, Year = 1993} @article{Miller+90, Author = {G. Miller and R. Beckwith and C. Fellbaum and D. Gross and K.J. Miller}, Journal = {International Journal of Lexicography}, Pages = {235-312}, Title = {Wordnet: An on-line lexical database}, Volume = {3(4)}, Year = 1990} @misc{Miller+93, Author = {George A. Miller and Richard Beckwith and Christiane Fellbaum and Derek Gross and Katherine Miller}, Title = {Introduction to WordNet: An On-line Lexical Database}, Year = 1993} @article{Miller57, Author = {G. Miller}, Journal = {American Journal of Psychology}, Pages = {311--}, Title = {Some effects of intermittent silence}, Volume = 70, Year = 1957} @inbook{Miller75, Address = {Paris}, Author = {G. A. Miller}, Booktitle = {{Foundations of Language Development, a Multidisciplanary approach}}, Editor = {Lenneberg, E. H. and Lenneberg E.}, Publisher = {The UNESCO Press}, Title = {{Pastness}}, Volume = 2, Year = 1975} @misc{Miller93, Author = {George A. Miller}, Title = {Nouns in WordNet: A Lexical Inheritance System}, Year = 1993} @article{Miller93b, Author = {L. Chris Miller}, Journal = {BYTE}, Pages = {177--183}, Title = {Bableware for the desktop}, Volume = {January}, Year = 1993} @inproceedings{Minnen+00, Address = {New Brunswick, NJ}, Author = {G. Minnen and F. Bond and A. Copestake}, Booktitle = {Proceedings of the 4th Conference on Computational Natural Language Learning and the Second Learning Language in Logic Workshop}, Pages = {43--48}, Publisher = {ACL}, Title = {Memory-based Learning for Article Generation}, Year = 2000} @article{Minnen+01, Author = {G. Minnen and J. Carroll and D. Pearce}, Journal = JNLE, Number = 3, Pages = {207--223}, Title = {Applied morphological processing of English}, Volume = 7, Year = 2001} @book{Minsky+88, Address = {Cambridge, MA}, Author = {M. L. Minsky and S. A. Papert}, Note = {First published in 1969}, Publisher = MIT, Title = {Perceptrons: Expanded edition}, Year = 1988} @inproceedings{Mitchell+90, Author = {P. Mitchell and Beatrice Santorini}, Booktitle = {Proceedings of the DARPA Speech and Natural Language Workshop}, Month = {June}, Pages = {275-282}, Title = {Deducing linguistic structure from the statistics of large corpora}, Year = 1990} @incollection{Mitchell+91, Address = {Austin}, Author = {Mitchell, D.C. and F. Cuetos}, Booktitle = {Current issues in natural language processing}, Editor = {C. Smith}, Pages = {1--12}, Publisher = {University of Texas, Center for Cognitive Science}, Title = {The origins of parsing strategies}, Year = 1991} @article{Mitchell+95, Author = {D.C. Mitchell and F. Cuetos and M.M.B. Corley and M. Brysbeart}, Journal = {Journal of Psycholinguistic Research}, Number = 6, Pages = {469--488}, Title = {Exposure-Based Models of Human Parsing: Evidence for the Use of Coarse-Grained (Nonlexical) Statistical Records}, Volume = 24, Year = 1995} @inproceedings{Mitchell77, Address = {Cambridge, MA}, Author = {T. Mitchell}, Booktitle = {Proceedings of the Fifth International Joint Conference on Artificial Intelligence}, Editor = MIT, Pages = {305--310}, Title = {Version spaces: A candidate elimination approach to rule learning}, Year = 1977} @incollection{Mitchell94, Address = {San Diego, Cal.}, Author = {Mitchell, D.C.}, Booktitle = {Handbook of Psycholinguistics}, Chapter = 11, Editor = {Gernsbacher, M.}, Publisher = {Academic Press}, Title = {Sentence Parsing}, Year = 1994} @book{Mitchell97, Address = {New York, NY}, Author = {T. Mitchell}, Publisher = {McGraw-Hill}, Title = {Machine Learning}, Year = 1997} @inproceedings{Mitkov+02, Author = {R. Mitkov and R. Evans and C. Orasan}, Booktitle = {Proceedings of the Third International Conference on Computational Linguistics and Intelligent Text Processing}, Pages = {168--186}, Publisher = {Springer-Verlag}, Title = {A New, Fully Automatic Version of {M}itkov's Knowledge-Poor Pronoun Resolution Method}, Year = 2002} @book{Mitkov03, Address = {Oxford}, Editor = {Ruslan Mitkov}, Publisher = {Oxford University Press}, Title = {The {O}xford {H}andbook of {C}omputational {L}inguistics}, Year = 2003} @inproceedings{Mitkov97, Author = {Mitkov, R.}, Booktitle = {Workshop on Operational Factors in Practical Robust Anaphora Resolution for Unrestricted Texts}, Date-Added = {2009-11-15 00:42:22 +0100}, Date-Modified = {2009-11-15 00:43:04 +0100}, Pages = {14--21}, Title = {Factors in anaphora resolution: they are not the only things that matter. A case study based on two different approaches}, Year = {1997}} @techreport{Mitkov99, Author = {R. Mitkov}, Date-Added = {2009-11-15 00:42:22 +0100}, Date-Modified = {2009-11-15 00:42:34 +0100}, Institution = {University of Wolverhampton}, Title = {Anaphora Resolution: The State of the Art}, Year = {1999}} @article{Mitton87, Address = {Tarrytown, NY, USA}, Author = {Roger Mitton}, Date-Added = {2010-01-29 15:12:19 +0100}, Date-Modified = {2010-01-29 15:12:24 +0100}, Issn = {0306-4573}, Journal = {Information Processing Management}, Number = {5}, Pages = {495--505}, Publisher = {Pergamon Press, Inc.}, Title = {Spelling checkers, spelling correctors and the misspellings of poor spellers}, Volume = {23}, Year = {1987}} @manual{Miyata91, Author = {Y. Miyata}, Organization = {Computer Science Department, University of Colorado, Boulder}, Title = {A User's Guide to PlaNet Version 5.6: A Tool for Constructing, Running and Looking into a PDP Network}, Year = 1991} @inproceedings{Moeller+93, Address = {{Amsterdam}}, Author = {R. Moeller and H. M. Gross}, Booktitle = {Proceedings of ICANN}, Title = {Detection of Coincidences and Generation of Hypotheses -- a Proposal for an Elementary Cortical Function}, Year = 1993} @book{Mohanan86, Author = {K. P. Mohanan}, Publisher = {Dordrecht: D. Reidel}, Title = {The theory of lexical phonology}, Year = 1986} @inproceedings{Moldovan+00, Author = {D. Moldovan and S. Harabagiu and M. {Pa\,{s}ca} and R. Mihalcea and R. Goodrum and R. {G\^{\i}rju} and V. Rus}, Booktitle = TREC8, Pages = {175--183}, Title = {LASSO: A Tool for Surfing the Answer Net}, Year = 2000} @inproceedings{Moldovan+02, Author = {D. Moldovan and M. Pasca and S. Harabagiu and M. Surdeanu}, Booktitle = {Proceedings of the 40th Annual Meeting of ACL}, Pages = {33-40}, Title = {Performance Issues and Error Analysis in an Open-Domain Question Answering System}, Year = 2002} @article{Molina+02, Author = {Molina, A. and Pla, F.}, Issue = {Special Issue on Machine Learning Approaches to Shallow Parsing}, Journal = {Journal of Machine Learning Research}, Title = {Shallow Parsing using Specialized HMM}, Year = 2002} @article{Monsell+89, Author = {S. Monsell and M. C. Doyle and P. N. Haggard}, Journal = {Journal of Experimental Psychology}, Pages = {43--71}, Title = {Effects of Frequency on Visual Word Recognition Tasks: Where Are They?}, Volume = 188, Year = 1989} @article{Montanari74, Author = {U. Monatanari}, Date-Added = {2009-11-15 00:43:10 +0100}, Date-Modified = {2009-11-15 00:43:16 +0100}, Journal = {Information Science}, Number = {2}, Pages = {95--132}, Title = {Networks of constraints: Fundamental properties and application to picture processing}, Volume = 7, Year = 1974} @incollection{Moody92, Author = {J. Moody}, Booktitle = {Advances in Neural Information Processing Systems}, Editor = {J. Moody and S. J. Hanson and R. P. Lippmann}, Pages = {847--854}, Publisher = {San Mateo, CA: Morgan Kaufmann}, Title = {The effective number of parameters: An analysis of generalization and regularization in nonlinear learning systems}, Volume = 4, Year = 1992} @unpublished{Moody94, Author = {J. Moody}, Note = {to appear in {\em From Statistics to Neural Networks: Theory and Pattern Recognition Applications}, V. Cherkassky, J. H. Friedman and H. Wechsler (Eds.), NATO ASI Series F. Berlin: Springer-Verlag.}, Title = {Prediction Risk and Architecture Selection for Neural Networks}, Year = 1994} @article{Mooney+95, Author = {R. J. Mooney and M. E. Califf}, Journal = {Journal of Artificial Intelligence Research}, Pages = {1--24}, Title = {Induction of first-order decision lists: Results on learning the past tense of {E}nglish verbs}, Volume = 3, Year = 1995} @inproceedings{Mooney96, Author = {Mooney, R.}, Booktitle = {Proceedings of the 1996 Conference on Empirical Methods in Natural Language Processing}, Date-Modified = {2009-09-06 20:40:55 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {82--91}, Title = {Comparative Experiments on Disambiguating Word Senses: An Illustration of the Role of Bias in Machine Learning}, Year = 1996} @book{Moore+82, Address = {{Bath}}, Author = {Moore, T and Carling, C}, Publisher = {{Pitman Press}}, Title = {{Understanding Language: Towards a post-Chomskyan Linguistics}}, Year = 1982} @inproceedings{Morante+07, Address = {Borovets, Bulgaria}, Author = {R. Morante and A. {Van den Bosch}}, Booktitle = {Proceedings of the International Conference on Recent Advances in Natural Language Processing (RANLP-2007)}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2010-09-14 13:06:31 +0200}, Keywords = {ilk, semantic role labeling, Catalan, Spanish, vici}, Pages = {388--394}, Title = {Memory-based semantic role labelling of Catalan and Spanish}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/srl_ranlp_def.pdf}} @inproceedings{Morante+07b, Address = {Prague, Czech Republic}, Author = {R. Morante and B. Busser}, Booktitle = {Proceedings of the Fourth International Workshop on Semantic Evaluations (SemEval-2007),}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2010-01-03 01:09:32 +0100}, Keywords = {ilk, semantic role labeling, Catalan, Spanish}, Pages = {183--186}, Title = {{ILK2}: Semantic Role Labelling for {Catalan} and {Spanish} using {TiMBL}}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W/W07/W07-2038.pdf}} @inproceedings{Morante+08, Address = {Manchester, UK}, Author = {R. Morante and W. Daelemans and V. {Van Asch}}, Booktitle = {Proc. of the {CoNLL} 2008}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2010-09-14 22:16:17 +0200}, Keywords = {clips, semantic role labeling}, Pages = {208-212}, Title = {A Combined Memory-Based Semantic Role Labeler of English}, Year = {2008}} @article{Morante+08b, Author = {R. Morante}, Date-Added = {2010-01-03 10:36:18 +0100}, Date-Modified = {2010-01-03 10:37:08 +0100}, Journal = {Procesamiento del Lenguaje Natural}, Keywords = {dependency parsing, Spanish}, Number = {40}, Pages = {59--66}, Title = {Experiments with an ensemble of Spanish dependency parsers}, Year = {2008}} @inproceedings{Morante+08c, Author = {R. Morante and A. Liekens and W. Daelemans}, Booktitle = {Proceedings of the 2008 Conference on Empirical Methods in Natural Language Processing}, Date-Added = {2010-01-03 10:47:35 +0100}, Date-Modified = {2010-01-03 10:48:41 +0100}, Keywords = {scope, negation, text mining}, Pages = {715--724}, Title = {Learning the Scope of Negation in Biomedical Texts}, Year = {2008}, Bdsk-Url-1 = {http://aclweb.org/anthology-new/D/D08/D08-1075.pdf}} @inproceedings{Morante+09, Address = {Boulder, CO, USA}, Author = {R. Morante and V. {Van Asch} and A. {Van den Bosch}}, Booktitle = {Proceedings of the Thirteenth Conference on Computational Natural Language Learning (CoNLL): Shared Task,}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2010-09-14 13:06:18 +0200}, Keywords = {ilk, semantic role labeling, dependency parsing, joint learning, vici}, Pages = {25--30}, Title = {Joint memory-based learning of syntactic and semantic dependencies in multiple languages}, Year = {2009}, Bdsk-Url-1 = {http://aclweb.org/anthology-new/W/W09/W09-1203.pdf}} @inproceedings{Morante+09b, Address = {Borovets, Bulgaria}, Author = {R. Morante and V. {Van Asch} and A. {Van den Bosch}}, Booktitle = {Proceedings of the 7th International Conference on Recent Advances in Natural Language Processing (RANLP-2009),}, Date-Added = {2010-01-02 19:42:39 +0100}, Date-Modified = {2010-09-14 13:06:24 +0200}, Keywords = {ilk, semantic role labeling, dependency parsing, vici}, Pages = {275--280}, Title = {Dependency parsing and semantic role labeling as a single task}, Year = {2009}, Bdsk-Url-1 = {http://www.cnts.ua.ac.be/~vincent/pdf/ranlp09_01.pdf}} @inproceedings{Morante08, Address = {Marrakech, Morocco}, Author = {R. Morante}, Booktitle = {Proceedings of the Sixth International Language Resources and Evaluation (LREC'08)}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2010-01-03 10:36:07 +0100}, Title = {Semantic role labeling tools trained on the {Cast3LB-CoNLL-SemRol} corpus}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/393_paper.pdf}} @article{Morrison68, Address = {New York, NY, USA}, Author = {Morrison, Donald R.}, Doi = {http://doi.acm.org/10.1145/321479.321481}, Issn = {0004-5411}, Journal = {J. ACM}, Number = {4}, Pages = {514--534}, Publisher = {ACM}, Title = {PATRICIA---Practical Algorithm To Retrieve Information Coded in Alphanumeric}, Volume = {15}, Year = {1968}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/321479.321481}} @unpublished{Morciniec+95, Author = {M. Morciniec and R. Rohwer}, Note = {unpublished manuscript}, Title = {The n-tuple classifier: Too good to ignore}, Year = 1995} @article{Morin+81, Author = {R. L. Morin and B. E. Raeside}, Date-Modified = {2009-11-14 18:56:34 +0100}, Journal = {IEEE Transactions on Systems, Man, and Cybernetics}, Keywords = {k-NN, instance-based learning, distance weighting}, Number = 3, Pages = {241--243}, Title = {A Reappraisal of Distance-Weighted $k$-Nearest Neighbor Classification for Pattern Recognition with Missing Data}, Volume = {SMC-11}, Year = 1981} @article{Morris+91, Author = {J. Morris and G. Hirst}, Journal = {Computational Linguistics}, Pages = {21--45}, Title = {Lexical cohesion computed by thesauris relations as an indication of the structure of text}, Volume = 17, Year = 1991} @book{Mosteller+64, Author = {F. Mosteller and D. Wallace}, Place = {Reading, Massachusetts}, Publisher = {Addison-Wesley}, Title = {Inference and Disputed Authorship: The Federalist}, Year = 1964} @book{Mosteller+84, Author = {F. Mosteller and D. Wallace}, Note = {2nd edition of {\em Inference and Disputed Authorship: The Federalist}, Addison-Wesley, 1964}, Publisher = {Springer-Verlag}, Title = {Applied Bayesian and classical inference The case of The Federalist papers}, Year = 1984} @article{Mozer+89, Author = {M. C. Mozer and P. Smolensky}, Journal = {Connection Science}, Pages = {3--16}, Title = {Using relevance to reduce network size automatically}, Volume = 1, Year = 1989} @book{Muggleton92, Editor = {S. H. Muggleton}, Publisher = {London: Academic Press}, Title = {Inductive Logic Programming}, Year = 1992} @inproceedings{Munoz+99, Author = {M. Munoz and V. Punyakanok and D. Roth and D. Zimak}, Booktitle = {EMNLP-VLC'99, the Joint SIGDAT Conference on Empirical Methods in Natural Language Processing and Very Large Corpora}, Month = Jun, Pages = {168--178}, Title = {A Learning Approach to Shallow Parsing}, Year = 1999} @techreport{Munoz+99b, Author = {M. Munoz and V. Punyakanok and D. Roth and D. Zimak}, Institution = {UIUC Computer Science Department}, Month = Apr, Number = {UIUCDCS-R-99-2087}, Title = {A Learning Approach to Shallow Parsing}, Year = 1999} @misc{Murphy+95, Author = {P. Murphy and D. W. Aha}, Note = {Maintained at the Department of Information and Computer Science, University of California, Irvine. Anonymous ftp from {{\tt ics.uci.edu}} in the directory {{\tt pub/machine-learning/databases}}}, Title = {UCI repository of machine learning databases -- a machine-readable repository}, Year = 1995} @article{Murre+92, Author = {J. M. J. Murre and R. H. Phaf and G. Wolters}, Journal = {Neural Networks}, Pages = {55--82}, Title = {CALM: Categorizing and Learning Module}, Volume = 5, Year = 1992} @incollection{Murre+96, Address = {London}, Author = {J.M.J. Murre and R. Goebel}, Booktitle = {Computational Psycholinguistics}, Chapter = 3, Editor = {Dijkstra, T. and K. de Smedt}, Pages = {49--82}, Publisher = {Taylor \& Francis}, Title = {Connectionist Modelling}, Year = 1996} @incollection{Murre95, Author = {J. M. J. Murre}, Booktitle = {Connectionist Models of Memory and Language}, Editor = {J. Levy and D. Bairaktaris and J. Bullinaria and P. Cairns}, Note = {to appear}, Publisher = {London: UCL Press}, Title = {Transfer of learning in backpropagation and in related neural network models}, Year = 1995} @inproceedings{Myers+00, Author = {K. Myers and S. Singh and M. Walker and M. Kearns}, Booktitle = ICML, Title = {A Boosting Approach to Topic Spotting on Subdialogues}, Year = 2000} @incollection{Nagao84, Address = {Amsterdam, The Netherlands}, Author = {M. Nagao}, Booktitle = {Artificial and human intelligence}, Date-Modified = {2011-06-21 18:26:35 +0200}, Editor = {A. Elithorn and R. Banerji}, Pages = {173--180}, Publisher = {North-Holland}, Title = {A framework of a mechanical translation between {J}apanese and {E}nglish by analogy principle}, Year = 1984} @article{Naigles91, Author = {Letitia Naigles}, Journal = {Language and Speech}, Number = 1, Pages = {63--79}, Title = {Review of learnability and cognition: The acquisition of argument structure}, Volume = 34, Year = 1991} @inproceedings{Nakagawa07, Author = {Nakagawa, T.}, Booktitle = {Proceedings of the CoNLL Shared Task Session of EMNLP-CoNLL 2007}, Date-Added = {2009-11-15 10:11:19 +0100}, Date-Modified = {2009-11-15 10:11:40 +0100}, Keywords = {dependency parsing}, Pages = {952--956}, Title = {Multilingual Dependency Parsing Using Global Features}, Year = {2007}} @incollection{Nakisa+00, Author = {R.C. Nakisa}, Booktitle = {Cognitive Models of Language Acquisition}, Editor = {P. Broeder and J. Murre}, Pages = {xxx--yyy}, Publisher = CUP, Title = {A Cross-Linguistic Comparison of Single and Dual-Route Models of Inflectional Morphology}, Year = 2000} @inproceedings{Nakisa+96, Author = {R.C. Nakisa and U. Hahn}, Booktitle = {Proceedings of the 18th Annual Conference of the Cognitive Science Society}, Editor = {Garrison W. Cottrell}, Pages = {177--182}, Title = {Where Defaults Don't Help: the Case of the German Plural System}, Year = 1996} @inproceedings{Nastase+06, Author = {V. Nastase and J. Sayyad-Shiarabad and M. Sokolova and S. Szpakowicz}, Booktitle = {Proceedings of the Twenty-First National Conference on Artificial Intelligence and the Eighteenth Innovative Applications of Artificial Intelligence Conference}, Publisher = {AAAI Press}, Title = {Learning noun-modifier semantic relations with corpus-based and WordNet-based features}, Year = 2006} @inproceedings{Nagao+94, Author = {M. Nagao and S. Mori}, Booktitle = {COLING-94}, Date-Modified = {2011-06-19 23:01:27 +0200}, Pages = {611--615}, Title = {A New Method of N-gram Statistics for Large Number of n and Automatic Extraction of Words and Phrases from Large Text Data of Japanese}, Year = {1994}} @article{Navarro+97, Author = {G. Navarro and R. Baeza-Yates}, Journal = {ACM Transactions on Information Systems}, Pages = {400--435}, Title = {Proximal Nodes: A Model to Query Document Databases by Content and Structure}, Volume = {15(4)}, Year = 1997} @article{Nelson+93, Author = {K. Nelson and J. Hampson and L. Kessler Shaw}, Journal = {Journal of Child Language}, Pages = {61--84}, Title = {Nouns in early lexicons: evidence, explanations and implications}, Volume = 20, Year = 1993} @incollection{Nelson93, Author = {P. E. Nelson}, Booktitle = {The First Text Retrieval Conference (TREC-1)}, Editor = {D. K. Harman}, Pages = {287--296}, Publisher = {NIST SP 500-207}, Title = {Site report for the {Text REtrieval Conference} by {ConQuest Software Inc.}}, Year = 1993} @article{Nenov+94, Author = {Nenov, V. and Halgren, E. and Mandelkern, M. and Smith, M.}, Date-Modified = {2008-07-23 16:56:09 +0200}, Journal = {{Human Brain Mapping}}, Keywords = {word encoding, ERP, human language processing, recent memory, brain, PET}, Pages = {249-268}, Title = {{Human Brain Metabolic Responses to Familiarity During Lexical Decision}}, Volume = 1, Year = 1994} @book{Newmeyer80, Author = {Newmeyer}, Date-Modified = {2008-07-23 16:56:40 +0200}, Keywords = {linguistics, transformational generative grammar}, Publisher = {New York, NY: Academic Press}, Title = {Linguistic theory in {A}merica: The first quarter century of transformational grammar}, Year = 1980} @inproceedings{Ng+96, Address = {San Francisco}, Author = {Ng, H.T. and Lee, H.B.}, Booktitle = ACL96, Date-Modified = {2009-09-06 20:41:03 +0200}, Editor = {Arivind Joshi and Martha Palmer}, Keywords = {wsd, word sense disambiguation}, Pages = {40--47}, Publisher = {Morgan Kaufmann Publishers}, Title = {Integrating Multiple Knowledge Sources to Disambiguate Word Sense: An Exemplar-Based Approach}, Year = 1996} @inproceedings{Ng97, Author = {Ng, Hwee Tou}, Booktitle = {???}, Date-Modified = {2009-09-06 20:41:09 +0200}, Keywords = {wsd, word sense disambiguation}, Title = {Getting Serious about Word Sense Disambiguation}, Year = 1997} @inproceedings{Ng97b, Author = {H. T. Ng}, Booktitle = {Proceedings of the Second Conference on Empirical Methods in Natural Language Processing}, Date-Modified = {2009-09-06 20:41:16 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {208--213}, Title = {Exemplar-based word sense disambiguation: some recent improvements}, Year = 1997} @inproceedings{Ngai+01, Author = {Ngai, G. and Florian, R.}, Bookttile = {Proceedings of North American Chapter of ACL 2001}, Pages = {44-47}, Title = {Transformation-Based Learning in the Fast Lane}, Year = 2001} @inproceedings{Nguyen+03, Author = {Nguyen Minh Le and Susumu Horiguchi}, Booktitle = {Proceedings of the 17th Pacific Asia Conference on Language, Information and Computation}, Date-Added = {2009-11-15 14:34:28 +0100}, Date-Modified = {2009-11-15 14:34:39 +0100}, Pages = {290-297}, Title = {A New Sentence Reduction based on Decision Tree Model}, Year = {2003}} @article{Nguyen+04, Author = {M. L. Nguyen and S. Horiguchi and A. Shimazu and B. Tu Ho}, Date-Added = {2009-11-15 14:31:38 +0100}, Date-Modified = {2010-10-06 20:59:49 +0200}, Journal = {ACM Transactions on Asian Language Information Processing (TALIP)}, Number = {2}, Pages = {146--158}, Title = {Example-based sentence reduction using the hidden markov model}, Volume = {3}, Year = {2004}} @inproceedings{Nguyen+04b, Address = {Morristown, NJ, USA}, Author = {Minh Le Nguyen and Akira Shimazu and Susumu Horiguchi and Bao Tu Ho and Masaru Fukushi}, Booktitle = {COLING '04: Proceedings of the 20th international conference on Computational Linguistics}, Date-Added = {2009-11-15 14:34:44 +0100}, Date-Modified = {2009-11-15 14:34:53 +0100}, Pages = {743}, Publisher = {Association for Computational Linguistics}, Title = {Probabilistic sentence reduction using support vector machines}, Year = {2004}} @inproceedings{Niblett87, Address = {Bled, Yugoslavia}, Author = {T. Niblett}, Booktitle = {Proceedings of the Second European Working Session on Learning}, Date-Modified = {2009-11-14 19:01:24 +0100}, Keywords = {decision trees}, Pages = {67--78}, Publisher = {Sigma}, Title = {Constructing decision trees in noisy domains}, Year = 1987} @article{Nigam+00, Author = {K. Nigam and A. McCallum and S. Thrun and T. Mitchell}, Date-Modified = {2010-02-13 00:02:31 +0100}, Journal = {Machine Learning}, Number = {2/3}, Pages = {103--134}, Title = {Text Classification from Labeled and Unlabeled Documents using {EM}}, Volume = 39, Year = 2000} @book{Nirenburg87, Editor = {Nirenburg, S.}, Publisher = CUP, Title = {Machine Translation}, Year = 1987} @techreport{Nirenburg88, Author = {Nirenburg, S. and I. Monarch and T. Kaufmann and I. Nirenburg and J. Carbonell}, Institution = {Center for Machine Translation, Carnegie-Mellon}, Number = {CMU-CMT-88-108}, Title = {Acquisition of Very Large Knowledge Bases: Methodology, Tools and Applications}, Year = 1988} @phdthesis{Niv93, Author = {M. Niv}, School = {University of Pennsylvania}, Title = {A computational model of syntactic processing: Ambiguity resolution from interpretation}, Year = 1993} @inproceedings{Nivre+04, Address = {Boston, MA}, Author = {J. Nivre and J. Hall and J. Nilsson}, Booktitle = {Proceedings of the Eighth Conference on Computational Natural Language Learning (CoNLL 2004)}, Date-Modified = {2011-06-21 18:12:04 +0200}, Editor = {H. T. Ng and E. Riloff}, Pages = {49--57}, Title = {Memory-Based Dependency Parsing}, Year = 2004} @inproceedings{Nivre+04b, Address = {Geneva, Switzerland}, Author = {J. Nivre and M. Scholz}, Booktitle = {Proceedings of COLING 2004}, Pages = {23--27}, Title = {Deterministic Dependency Parsing of {E}nglish Text}, Year = 2004} @inproceedings{Nivre+05, Author = {Nivre, J. and Nilsson, J.}, Booktitle = {Proceedings of the 43rd Annual Meeting of the Association for Computational Linguistics (ACL'05)}, Date-Added = {2009-11-15 10:11:47 +0100}, Date-Modified = {2009-11-15 10:12:09 +0100}, Keywords = {dependency parsing}, Pages = {99--106}, Title = {Pseudo-Projective Dependency Parsing}, Year = {2005}} @article{Nivre+07, Author = {J. Nivre and J. Hall and J. Nilsson and A. Chanev and G. Eryigit and S. K{\"u}bler and S. Marinov and E. Marsi}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 21:09:06 +0100}, Journal = {Natural Language Engineering}, Number = {2}, Pages = {95--135}, Title = {{MaltParser:} A language-independent system for data-driven dependency parsing}, Volume = {13}, Year = {2007}} @inproceedings{Nivre03, Author = {Nivre, J.}, Booktitle = {Proceedings of the 8th International Workshop on Parsing Technologies (IWPT)}, Date-Added = {2009-11-15 10:12:21 +0100}, Date-Modified = {2009-11-15 10:12:34 +0100}, Pages = {149--160}, Title = {An efficient algorithm for projective dependency parsing}, Year = {2003}} @book{Nivre06, Author = {J. Nivre}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 20:57:57 +0100}, Publisher = {Springer}, Title = {Inductive Dependency Parsing}, Year = {2006}} @inproceedings{Nivre07, Author = {Nivre, J.}, Booktitle = {Proceedings of Human Language Technologies: The Annual Conference of the North American Chapter of the Association for Computational Linguistics (NAACL-HLT)}, Date-Added = {2009-11-15 10:15:12 +0100}, Date-Modified = {2009-11-15 10:15:28 +0100}, Pages = {396--403}, Title = {Incremental Non-Projective Dependency Parsing}, Year = {2007}} @article{Nocedal80, Author = {J. Nocedal}, Journal = {Mathematics of Computation}, Pages = {773--782}, Title = {Updating {quasi-Newton} matrices with limited storage}, Volume = 35, Year = 1980} @inproceedings{Nomoto09, Address = {Singapore}, Author = {Nomoto, T.}, Booktitle = {Proceedings of the 2009 Conference on Empirical Methods in Natural Language Processing}, Date-Added = {2009-11-15 14:34:58 +0100}, Date-Modified = {2009-11-15 14:35:09 +0100}, Pages = {391--399}, Title = {A Comparison of Model Free versus Model Intensive Approaches to Sentence Compression}, Year = {2009}} @article{Norris89, Author = {Dennis Norris}, Journal = {Cognition}, Pages = {277--291}, Title = {How to build a connectionist idiot (savant)}, Volume = 35, Year = 1989} @techreport{Norris93, Author = {D. Norris}, Date-Modified = {2009-11-14 18:57:16 +0100}, Institution = {MRC Applied Psychology Unit, Cambridge, UK}, Keywords = {reading aloud, word naming}, Title = {A quantitative model of reading aloud}, Year = 1993} @inproceedings{Norvig+90, Author = {P. Norvig and R. Wilensky}, Booktitle = {Proceedings of the 13th International Conference on Computational Linguistics}, Number = 3, Pages = {225--230}, Title = {A critical Evaluation of Commensurable {A}bduction Models for Semantic interpretation}, Year = 1990} @inproceedings{Norvig88, Author = {P. Norvig}, Booktitle = {Proceedings of the 10th Annual Conference of the Cognitive Science Society}, Title = {Multiple simultaneous interpretations of ambiguous sentences}, Year = 1988} @article{Norvig89, Author = {P. Norvig}, Journal = {Cognitive Science}, Number = 4, Pages = {569--620}, Title = {Marker passing as a weak method for text inferencing}, Volume = 13, Year = 1989} @article{Nosofsky86, Author = {R. Nosofsky}, Journal = {Journal of Experimental Psychology: General}, Pages = {39--57}, Title = {Attention, similarity, and the identification-categorization relationship}, Volume = 15, Year = 1986} @incollection{Nunn+93, Author = {A. Nunn and V. J. {Van Heuven}}, Booktitle = {Analysis and Synthesis of Speech: Strategic Research Towards High-Quality Text-to-Speech Generation}, Editor = {V. J. {Van Heuven} and L. C. W. Pols}, Publisher = {Berlin: Mouton de Gruyter}, Title = {MORPHON, Lexicon-Based Text-to-Phoneme Conversion and Phonological Rules}, Year = 1993} @article{Oaksford+91, Author = {Oaksford, M. and N. Chater}, Journal = {{Mind \& Language}}, Number = 1, Pages = {1--38}, Title = {{Against Logicist cognitive Science}}, Volume = 6, Year = 1991} @techreport{Och+00, Author = {Och, F.J. and Ney, H.}, Date-Added = {2009-11-15 10:15:45 +0100}, Date-Modified = {2009-11-15 10:16:00 +0100}, Institution = {RWTH Aachen, University of Technology}, Title = {Giza++: Training of statistical translation models}, Year = {2000}} @article{Och+03, Author = {F.-J. Och and H. Ney}, Journal = {Computational Linguistics}, Number = 1, Pages = {19--51}, Title = {A Systematic Comparison of Various Statistical Alignment Models}, Volume = 29, Year = 2003} @inproceedings{Och02, Address = {Philadelphia, PA}, Author = {F.-J. Och and H. Ney}, Booktitle = {Proceedings of ACL 2002}, Pages = {295-302}, Title = {Discriminative Training and Maximum Entropy Models for Statistical Machine Translation}, Year = 2002} @inproceedings{Och03, Address = {Sapporo, Japan}, Author = {F.-J. Och}, Booktitle = {Proceedings of ACL 2003}, Date-Modified = {2010-08-25 16:31:17 +0200}, Organization = {Association for Computational Linguistics}, Pages = {160--167}, Title = {Minimum Error Rate Training in Statistical Machine Translation}, Year = 2003} @article{Oflazer+01, Author = {Kemal Oflazer and Sergei Nirenburg and Marjorie McShan}, Journal = CL, Number = 1, Title = {Bootstrapping Morphological Analyzers by Combining Human Elicitation and Machine Learning}, Volume = 27, Year = 2001} @proceedings{Oflazer+96, Editor = {K. Oflazer and H. Somers}, Organization = {Bilkent University, Ankara, Turkey}, Title = {Proceedings of the Second International Conference on New Methods in Language Processing}, Year = 1996} @unpublished{Ogden93, Author = {W. Ogden and M. Gonzales}, Date-Modified = {2010-02-14 23:07:45 +0100}, Note = {Demonstration at ARPA Workshop on Human Language Technology}, Title = {Norm -- A System for Translators}, Year = 1993} @article{Ojemann78, Author = {G.A.Ojemann}, Journal = {Brain and Language}, Pages = {331--340}, Title = {Organization of Short-Term Verbal Memory in Language Areas of Human Cortex: Evidence from Electrical Stimulation}, Volume = 5, Year = 1978} @book{Okabe+00, Author = {A. Okabe and B. Boots and K. Sugihara and S. N. Chiu}, Edition = {Second}, Publisher = {John Wiley}, Title = {Spatial tesselations: Concepts and applications of {V}oronoi diagrams}, Year = 2000} @incollection{Omohundro91, Author = {S. M. Omohundro}, Booktitle = {Advances in Neural Information Processing Systems}, Editor = {R. P. Lippmann and J. E. Moody and D. S. Touretzky}, Pages = {693--699}, Publisher = {San Mateo, CA: Morgan Kaufmann}, Title = {Bumptrees for efficient function, constraint, and classification learning}, Volume = 3, Year = 1991} @inproceedings{Oostdijk+02, Author = {N. Oostdijk and W. Goedertier and F. {Van Eynde} and L. Boves and J.P. Martens and M. Moortgat and H. Baayen}, Booktitle = {Proceedings of the third International Conference on Language Resources and Evaluation}, Date-Modified = {2012-11-28 22:28:07 +0000}, Editor = {M. {Gonz\'{a}lez Rodriguez} and C. {Paz Su\'{a}rez Araujo}}, Pages = {340--347}, Title = {Experiences from the {Spoken Dutch Corpus} Project}, Year = 2002} @inproceedings{Oostdijk+08, Address = {Marrakech, Morocco}, Author = {N. Oostdijk and M. Reynaert and P. Monachesi and G. {Van Noord} and R. Ordelman and I. Schuurman and V. Vandeghinste}, Booktitle = {Proceedings of the Sixth International Language Resources and Evaluation (LREC'08)}, Date-Added = {2010-01-03 10:37:42 +0100}, Date-Modified = {2010-09-14 12:53:47 +0200}, Keywords = {ilk, corpus, Dutch, annotation, sonar, d-coi}, Title = {From {D-Coi} to {SoNaR}: A reference corpus for {Dutch}}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/365_paper.pdf}} @inproceedings{Orasan+01, Address = {Toulouse, France}, Author = {C. Or\u{a}san and R. Evans}, Booktitle = {Proceedings of the Fifth Workshop on Computational Language Learning, CoNLL-2001}, Editor = {W. Daelemans and R. Zajac}, Pages = {129--136}, Title = {Learning to identify animate references}, Year = 2001} @inproceedings{Orasan00, Address = {Monastir, Tunisia}, Author = {C. Or\u{a}san}, Booktitle = {Proceedings of ACIDCA '2000}, Title = {A hybrid method for clause splitting in unrestricted English texts}, Year = 2000} @inproceedings{Osborne00, Author = {M. Osborne}, Booktitle = COLING00, Title = {Estimation of stochastic attribute-value grammars using an informative sample}, Year = 2000} @article{Osborne02, Author = {Osborne, M.}, Issue = {Special Issue on Machine Learning Approaches to Shallow Parsing}, Journal = {Journal of Machine Learning Research}, Title = {Shallow Parsing using Noisy and Non-Stationary Training Material}, Year = 2002} @article{Osterhout+94, Author = {Osterhout, L. and Holcomb, P. and Swinney, D.}, Date-Modified = {2009-02-21 19:46:25 +0100}, Journal = {{Journal of Experimental Psychology}}, Keywords = {ERP, verbs, garden-path sentences, human language processing}, Number = 4, Pages = {786--803}, Title = {{Brain Potentials Elicited by Garden-Path Sentences: Evidence of the APplication of VBerb Information During Parsing}}, Volume = 20, Year = 1994} @techreport{Overmars93, Address = {{Utrecht University}}, Author = {Overmars, M. H.}, Institution = {Dept. of Computer Science}, Title = {{Forms Library, A Graphical User Interface Toolkit for Silicon Graphics Workstation}}, Version = {2.2}, Year = 1993} @incollection{Paap+87, Author = {K. R. Paap and J. E. McDonald and R. W. Schvaneveldt and R. W. Noel}, Booktitle = {Attention and Performance XII: The Psychology of Reading}, Editor = {M. Coltheart}, Pages = {221--243}, Publisher = {Hillsdale, NJ: Lawrence Erlbaum Associates}, Title = {Frequency and Pronounceability in Visually Presented Naming and Lexical Decision Tasks}, Year = 1987} @inproceedings{Paass+02, Author = {Paass, G. and Leopold, E. and Larson, M. and Kindermann, J. and Eickeler, S.}, Booktitle = {Proceedings of the 6th European Conference on Principles of Data Mining and Knowledge Discovery}, Date-Added = {2009-11-15 14:08:25 +0100}, Date-Modified = {2009-11-15 14:08:50 +0100}, Keywords = {support vector machines}, Pages = {373--384}, Title = {SVM Classification Using Sequences of Phonemes and Syllables}, Year = {2002}} @article{Paggio+95, Author = {Paggio, P. and Underwood, N.L.}, Date-Added = {2010-01-29 15:12:33 +0100}, Date-Modified = {2010-01-29 15:12:39 +0100}, Journal = {Natural Language Engineering}, Number = {1}, Pages = {1--18}, Publisher = {Cambridge University Press}, Title = {Validating the {\sc temaa le} evaluation methodology: a case study on {D}anish spelling checkers}, Volume = {1}, Year = {1995}} @article{Pahikkala+09, Author = {Pahikkala, T. and Pyysalo, S. and Boberg, J. and J{\"a}rvinen, J. and Salakoski, T.}, Date-Added = {2010-02-01 21:31:55 +0100}, Date-Modified = {2010-02-01 21:33:07 +0100}, Journal = {Machine Learning}, Number = {2}, Pages = {133--158}, Publisher = {Springer}, Title = {{Matrix representations, linear transformations, and kernels for disambiguation in natural language}}, Volume = {74}, Year = {2009}} @inproceedings{Paijmans+07, Address = {Leuven, Belgium}, Author = {J.J. Paijmans and S. Wubben}, Booktitle = {Proceedings of the 7th Dutch Belgian Information Retrieval Workshop (DIR2007)}, Date-Added = {2010-01-03 00:59:36 +0100}, Date-Modified = {2015-05-14 08:24:57 +0000}, Editor = {M. F. Moens and T. Tuytelaars and A. P. {De Vries}}, Keywords = {ilk, information extraction, number detection}, Pages = {51--56}, Title = {Memory Based Learning and the interpretation of numbers in archaeological reports}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/~swubben/publications/dir2007.pdf}} @inproceedings{Paijmans+07b, Address = {Nottingham, UK}, Author = {J.J. Paijmans and S. Wubben}, Booktitle = {Proceedings of the UK e-Science 2007 All Hands Meeting}, Date-Added = {2010-01-03 01:20:39 +0100}, Date-Modified = {2010-01-03 01:21:34 +0100}, Keywords = {ilk, number detection, digital heritage, text mining}, Title = {Open Boek: A system for the extraction of numeric data from archeological reports}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/~swubben/publications/allhandsopenboek.pdf}} @inproceedings{Paijmans+08, Address = {Bonn, Germany}, Author = {J.J. Paijmans and S. Wubben}, Booktitle = {Layers of Perception. Proceedings of the 35th International Conference on Computer Applications and Quantitative Methods in Archaology}, Date-Added = {2010-01-03 10:48:44 +0100}, Date-Modified = {2010-01-03 10:49:58 +0100}, Editor = {K. Posluschny and K. Lambers and I. Herzog}, Keywords = {ilk, information extraction, text mining}, Publisher = {Dr. Rudolf Habelt GmbH}, Title = {Preparing archeological reports for intelligent retrieval}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/~swubben/publications/caa-2008.pdf}} @inproceedings{Paliouras+99, Author = {G. Paliouras and V. Karkaletsis and C. D. Spyropoulos}, Booktitle = IJCAI, Date-Modified = {2009-09-06 20:41:25 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {674--679}, Title = {Learning Rules for Large Vocabulary Word Sense Disambiguation}, Year = 1999} @article{Palmer+05, Author = {M. Palmer and D. Gildea and P. Kingsbury}, Date-Added = {2009-12-26 21:22:58 +0100}, Date-Modified = {2009-12-26 21:24:25 +0100}, Journal = {Computational Linguistics}, Keywords = {semantic role labeling, propbank}, Number = {1}, Pages = {71--105}, Title = {The Proposition Bank: An Annotated Corpus of Semantic Roles}, Volume = {31}, Year = {2005}} @article{Palmer+09, Author = {Martha Palmer and Nianwen Xue}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2009-12-26 21:02:30 +0100}, Journal = {{Natural Language Engineering}}, Number = {1}, Pages = {143--172}, Title = {{Adding semantic roles to the Chinese Treebank}}, Volume = {15}, Year = {2009}} @book{Palmer68, Address = {London}, Editor = {F. R. Palmer}, Publisher = {Longmans}, Title = {Selected papers of {J. R. Firth} 1952--1959}, Year = 1969} @incollection{Pantel+01, Author = {P. Pantel and D. Lin}, Booktitle = {AI 2001}, Date-Modified = {2009-11-14 18:58:28 +0100}, Editor = {E. Stroulia and S. Matwin}, Keywords = {term extraction}, Pages = {36--46}, Publisher = {Springer Verlag}, Series = {Lecture Notes in Artificial Intelligence}, Title = {A Statistical Corpus-Based Term Extractor}, Year = 2001} @inproceedings{Papineni02, Address = {Philadelphia, PA}, Author = {K. Papineni and S. Roukos and T. Ward and Wei-Jing Zhu}, Booktitle = {Proceedings of ACL 2002}, Date-Modified = {2009-11-14 19:06:10 +0100}, Keywords = {machine translation, BLEU}, Pages = {311--318}, Title = {{BLEU}: a Method for Automatic Evaluation of Machine Translation}, Year = 2002} @techreport{Parker85, Author = {D. B. Parker}, Institution = {Center for Computational Research in Economics and Management Science, M.I.T.}, Title = {Learning logic}, Year = 1985} @unpublished{Parry+95, Author = {J. J. Parry and C. X. Ling and H. Wang}, Note = {submitted to Machine Learning}, Title = {Deciding weights for IBL algorithms using C4.5}, Year = 1995} @inproceedings{Pasca+01, Author = {M. Pa{\,{s}}ca and S. Harabagiu}, Booktitle = {Proceedings of NAACL-01 Workshop on WordNet and Other Lexical Resources}, Pages = {138--143}, Title = {{The Informative Role of WordNet in Open-Domain Question Answering}}, Year = 2001} @incollection{Patterson+85, Author = {K. E. Patterson and J. Morton}, Booktitle = {Surface Dyslexia: Neuropsychological and Cognitive Studies of Phonological Reading}, Editor = {K.E. Patterson and J.C. Marshall and M. Coltheart}, Pages = {15--34}, Publisher = {London: Erlbaum}, Title = {From Orthography to Phonology: An Attempt at an Old Interpretation}, Year = 1985} @incollection{Patterson+87, Author = {K. Patterson and M. Coltheart}, Booktitle = {Attention and Performance XII: The Psychology of Reading}, Editor = {M. Coltheart}, Pages = {421--447}, Publisher = {Hillsdale, NJ: Lawrence Erlbaum Associates}, Title = {Phonological Processes in Reading}, Year = 1987} @article{Paulesu+93, Author = {E.Paulesu and C.D.Frith and R.S.J.Frackowiak}, Journal = {Nature}, Month = {25 March}, Pages = {342--345}, Title = {The Neural correlates of the verbal components of working memory}, Volume = 362, Year = 1993} @inproceedings{Pavlovych+04, Address = {New York, NY, USA}, Author = {Andriy Pavlovych and Wolfgang Stuerzlinger}, Booktitle = {{CHI} '04: {P}roceedings of the {SIGCHI} {C}onference on {H}uman {F}actors in {C}omputing {S}ystems}, Location = {Vienna, Austria}, Pages = {351--358}, Publisher = {ACM}, Title = {{M}odel for {N}on-{E}xpert {T}ext {E}ntry {S}peed on 12-{B}utton {P}hone {K}eypads}, Year = 2004} @book{Pearl88, Address = {San Francisco, CA, USA}, Author = {Pearl, J.}, Date-Added = {2009-11-15 14:09:06 +0100}, Date-Modified = {2009-11-15 14:09:25 +0100}, Publisher = {Morgan Kaufmann}, Title = {Probabilistic Reasoning in Intelligent Systems: Networks of Plausible Inference}, Year = {1988}} @inproceedings{Pedersen+97, Address = {Washington, DC}, Author = {Pedersen, T. and Bruce, R. and Wiebe, J.}, Booktitle = {Proceedings of 5th conference on Applied NLP (ANLP-97)}, Date-Modified = {2009-09-06 20:41:33 +0200}, Keywords = {wsd, word sense disambiguation}, Month = apr, Title = {Sequential Model Selection for Word Sense Desambiguation}, Year = 1997} @book{Pereira+87, Author = {Pereira, F. and Shieber, S.}, Publisher = {Stanford : Center for the Study of Language and Information}, Title = {Prolog and natural-language analysis}, Year = 1987} @inproceedings{Pereira+93, Address = {Columbus, OH}, Author = {F. Pereira and N. Tishby and L. Lee}, Booktitle = {Proc. of the 30th Annual Meeting of the ACL}, Pages = {183--190}, Title = {Distributional Clustering of English Words}, Year = 1993} @inproceedings{Pereira92, Author = {Fernando Pereira and Naftali Tishby}, Booktitle = {Working Notes, AAAI Fall Symposium on Probabilistic Approaches to Natural Language}, Pages = {108--112}, Title = {Distributional Similarity, Phase Transitions and Hierarchical Clustering}, Year = 1992} @inproceedings{Pereira92b, Author = {Fernando Pereira and Yves Schabes}, Booktitle = ACL, Title = {Inside-outside reestimation from partially bracketed corpora}, Year = 1992} @inproceedings{Pereira93, Author = {Fernando Pereira and Naftali Tishby and Lillian Lee}, Booktitle = ACL, Pages = {183--190}, Title = {Distributional clustering of {English} words}, Year = 1993} @inproceedings{Peschl90, Author = {Peschl, M.}, Date-Modified = {2008-07-23 16:54:39 +0200}, Journal = {{Connection Science}}, Keywords = {frontal lobes, language, pseudo-words, brain, PET}, Number = {3\&4}, Pages = {327--338}, Title = {{Construction, Representation and the Embodiment of Knowledge, Meaning, and Symbols in Neural Structures: Towards an Alternative Understanding of Knowledge Representation and Philosophy of Science}}, Volume = 4, Year = 1990} @article{Petersen+90, Author = {Petersen, S. and Fox, P. and Snyder, A. and Raichle, M.}, Date-Modified = {2008-07-23 16:54:31 +0200}, Journal = {{Science}}, Keywords = {frontal lobes, pseudo-words, brain, PET}, Pages = {1041--1044}, Title = {{Activation of Extrastriate and Frontal Cortical Areas by Visual Words and Word-Like Stimuli}}, Volume = 249, Year = 1990} @book{Piaget68, Author = {J. Piaget}, Publisher = {New York, NY: Harper Torchbooks}, Title = {Structuralism}, Year = 1968} @book{Piatelli80, Editor = {M. {Piatelli--Palmarini}}, Publisher = {Cambridge, MA: Harvard University Press}, Title = {Language learning: The debate between {J}ean {P}iaget and {N}oam {C}homsky}, Year = 1980} @inproceedings{Picchi92, Author = {E. Picchi and C. Peters and E. Marina}, Booktitle = COLING, Pages = {972--976}, Title = {A translator's workstation}, Year = 1992} @book{Pinker84, Address = {Cambridge, MA}, Author = {S. Pinker}, Publisher = {Harvard University Press}, Title = {Language Learnability and Language Development}, Year = 1984} @book{Pinker89, Address = {Cambridge MA}, Author = {S. Pinker}, Publisher = MIT, Title = {Learnability and Cognition: The Acquisition of Argument Structure}, Year = 1989} @article{Pinker94, Author = {Steven Pinker}, Journal = {Lingua}, Pages = {377--410}, Title = {How could a child use verb syntax to learn verb semantics?}, Volume = 92, Year = 1994} @book{Pinker95, Author = {S. Pinker}, Publisher = {London, UK; The Penguin Press}, Title = {The language instinct}, Year = 1995} @inproceedings{Pirelli94, Address = {London}, Author = {V. Pirelli and S. Federici}, Booktitle = {Proceedings of the Second Onomastica Research Colloquium}, Title = {On the pronunciation of unknown words by analogy in text-to-speech systems}, Year = 1994} @article{Plaku+07, Author = {E. Plaku and L. Kavraki}, Journal = {Journal of Parallel and Distributed Computing}, Number = 3, Pages = {346--359}, Title = {Distributed Computation of the {$k$-NN} Graph for Large High-Dimensional Point Sets}, Volume = 67, Year = 2007} @article{Platt99, Author = {Platt, J.}, Date-Added = {2009-11-15 14:09:34 +0100}, Date-Modified = {2009-11-15 14:09:50 +0100}, Journal = {Advances in Large Margin Classifiers}, Keywords = {support vector machines}, Number = {3}, Pages = {61--74}, Title = {Probabilistic outputs for support vector machines and comparisons to regularized likelihood methods}, Volume = {10}, Year = {1999}} @inproceedings{Plaut+93, Author = {D. C. Plaut and J. L. McClelland}, Booktitle = {Proceedings of the 15th Annual Conference of the Cognitive Science Society}, Title = {Generalization with Componential Attractors: Word and Nonword Reading in an Attractor Network}, Year = 1993} @article{Plaut+96, Author = {D. C. Plaut and J. L. McClelland and M. S. Seidenberg and K. Patterson}, Journal = {Psychological Review}, Number = 1, Pages = {56--115}, Title = {Understanding normal and impaired word reading: computational principles in quasi-regular domains}, Volume = 103, Year = 1996} @incollection{Poesio95, Address = {CLSI}, Author = {M. Poesio}, Booktitle = {Semantic Ambiguity and Under-specification}, Editor = {K. {Van Deemter} and S. Peters}, Title = {Semantic Ambiguity and Perceived Ambiguity}, Year = 1995} @inproceedings{Pohlmann+97, Author = {R. Pohlmann and W. Kraaij}, Booktitle = {{CLIN VII -- Papers from the Seventh CLIN meeting}}, Editor = {J. Landsbergen and J. Odijk and K. {Van Deemter} and G. {Veldhuijzen van Zanten}}, Pages = {115--128}, Title = {Improving the Precision of a Text Retrieval System with Compound Analysis}, Year = 1997} @article{Pollack90, Author = {J. B. Pollack}, Journal = {Artificial Intelligence}, Pages = {77--105}, Title = {Recursive Distributed Representations}, Volume = 46, Year = 1990} @book{Pollard+87, Address = {Stanford}, Author = {Carl Pollard and Ivan A. Sag}, Publisher = {Center for the Study of Language and Information}, Series = {CSLI Lecture Notes}, Title = {Information-Based Syntax and Semantics, Volume 1: Fundamentals}, Volume = 13, Year = 1987} @book{Pollard+94, Address = {Chicago}, Author = {Carl Pollard and Ivan A. Sag}, Date-Added = {2010-02-08 13:07:58 +0100}, Date-Modified = {2010-02-08 13:08:07 +0100}, Publisher = {University of Chicago Press}, Title = {Head-Driven Phrase Structure Grammar}, Year = {1994}} @article{Pollock+82, Acknowledgement = ack-nhfb, Author = {J. J. Pollock and A. Zamora}, Bibdate = {Wed Jan 15 12:19:41 MST 1997}, Bibsource = {Compendex database}, Classification = {901; 723}, Coden = {PAISDQ}, Conference = {Information Interaction, Proceedings of the 45th ASIS Annual Meeting.}, Date-Added = {2010-01-29 15:04:22 +0100}, Date-Modified = {2010-02-17 19:02:19 +0100}, Isbn = {0-86729-038-2}, Issn = {0044-7870}, Journal = j-PROC-ASIS-AM, Journalabr = {Proc ASIS Annu Meet}, Keywords = {information science, automatic spelling, textual databases, spelling correction, misspellings, correction algorithms}, Lccn = {Z699.A1 .A5 1982}, Meetingaddress = {Columbus, OH, USA}, Pages = {236--238}, Sponsor = {ASIS, Washington, DC, USA}, Title = {Automatic Spelling Error Detection and Correction in Textual Databases}, Volume = {19}, Year = {1982}} @article{Pollock+83, Abstract = {The SPEEDCOP (SPElling Error Detection COrrection Project) project recently completed at Chemical Abstracts Service extracted over 50,000 misspellings from approximately 25,000,000 words of text from seven scientific and scholarly databases. The misspellings were automatically classified and the error types analyzed. The results, which were consistent over the different databases, showed that the expected incidence of misspelling is 0. 2\%, that 90-95\% of spelling errors have only a single mistake, that substitution is homogeneous while transposition is heterogeneous, that omission is the commonest type of misspelling, and that inadvertent doubling of a letter is the most important cause of insertion errors. The more frequently a letter occurs in the text, the more likely it is to be involved in a spelling error. Most misspellings collected by SPEEDCOP are of the type colloquially referred to as `typos' and approximately 90\% are unlikely to be repeated in normal spans of text. 20 rfs.}, Acknowledgement = ack-nhfb, Author = {J.J. Pollock and A. Zamora}, Bibdate = {Wed Jan 15 12:19:41 MST 1997}, Bibsource = {Compendex database}, Classification = {901; 723}, Coden = {AISJB6}, Date-Added = {2010-01-29 15:03:39 +0100}, Date-Modified = {2010-09-18 14:39:33 +0200}, Issn = {0002-8231}, Journal = {Journal of the American Society for Information Science}, Journalabr = {J Am Soc Inf Sci}, Keywords = {languages; engineering writing; spelling errors; spelling correction}, Month = jan, Number = {1}, Pages = {51--58}, Subject = {I.7.1 Computing Methodologies, TEXT PROCESSING, Text Editing, Spelling}, Title = {Collection and Characterization of Spelling Errors in Scientific and Scholarly Text}, Volume = {34}, Year = {1983}} @article{Pollock+84, Abstract = {The SPEEDCOP project recently completed at Chemical Abstracts Service (CAS) extracted over 50,000 misspellings from approximately 25,000,000 words of text from seven scientific and scholarly databases. The misspellings were automatically classified and analyzed and the results used to design and implement a program that proved capable of correcting most such errors. Analysis of the performance of the spelling error detection and correction programs highlighted the features that should be incorporated into a powerful and user-friendly interactive system suitable for nonprogrammers. These include document level thresholds for mispelling detection, automatic reuse of user decisions, and user verification and control of correction. An advantage of the proposed design is that the system automatically customizes itself to its environment. This article is primarily concerned with system design, not implementation details.}, Acknowledgement = ack-nhfb, Author = {J. J. Pollock and A. Zamora}, Bibdate = {Wed Jan 15 12:19:41 MST 1997}, Bibsource = {Compendex database}, Classification = {901; 723}, Coden = {AISJB6}, Date-Added = {2010-01-29 15:03:55 +0100}, Date-Modified = {2010-09-18 14:39:18 +0200}, Issn = {0002-8231}, Journal = j-J-AM-SOC-INF-SCI, Journalabr = {J Am Soc Inf Sci}, Keywords = {spelling correction}, Month = mar, Number = {2}, Pages = {104--109}, Title = {System Design for Detection and Correction of Spelling Errors in Scientific and Scholarly Text}, Volume = {35}, Year = {1984}} @article{Pollock+84b, Address = {New York, NY, USA}, Author = {J. J. Pollock and A. Zamora}, Date-Added = {2010-01-29 15:12:45 +0100}, Date-Modified = {2010-02-12 23:55:07 +0100}, Issn = {0001-0782}, Journal = {Commun. ACM}, Number = {4}, Pages = {358--368}, Publisher = {ACM Press}, Title = {Automatic spelling correction in scientific and scholarly text}, Volume = {27}, Year = {1984}} @incollection{Pols+97, Author = {L. C. W. Pols and U. Jekosch}, Booktitle = {Progress in Speech Processing}, Editor = {J. P. H. {Van Santen} and R. W. Sproat and J. P. Olive and J. Hirschberg}, Pages = {519--527}, Publisher = {Berlin: Springer-Verlag}, Title = {A structured way of looking at the performance of text-to-speech systems}, Year = 1997} @inproceedings{Port+91, Address = {{}}, Author = {R. Port and T. {Van Gelder}}, Booktitle = {Proceedings of 13th Conference of the Cognitive Science Society}, Title = {Representing Aspects of Language}, Year = 1991} @inbook{Port92, Address = {Hillsdale New Jersey}, Author = {Port, R.}, Booktitle = {Connectionist Natural Language Processing}, Editor = {Noel Sharkey}, Publisher = {Lawrence Erlbaum Associates}, Title = {Temporal}, Year = 1992} @article{Porter80, Author = {M. F. Porter}, Journal = {Program}, Number = 3, Pages = {130--137}, Title = {An algorithm for suffix stripping}, Volume = 14, Year = 1980} @techreport{Powers+91, Author = {D. Powers and W. Daelemans}, Date-Modified = {2009-11-14 17:42:56 +0100}, Institution = {ITK, Tilburg University}, Number = 10, Title = {{SHOE}: the extraction of hierarchical structure for machine learning of natural language}, Year = 1991} @incollection{Poznanski+??, Author = {Victor Pozna\'{n}ski and Antonio Sanfilippo}, Booktitle = {?}, Chapter = 9, Pages = {175--190}, Publisher = {?}, Title = {Detecting Dependencies between Semantic Verb Subclasses and Subcategorization Frames in Text Corpora}, Year = {?}} @incollection{Pratt93, Address = {San Mateo, CA}, Author = {L. Y. Pratt}, Booktitle = {Advances in Neural Information Processing Systems 5}, Editor = {C. L. Giles and S. J. Hanson and J. D. Cowan}, Pages = {204--211}, Publisher = {Morgan Kaufmann}, Title = {Discriminability-Based Transfer between Neural Networks}, Year = 1993} @incollection{Pratt94, Address = {Cambridge, MA}, Author = {L. Y. Pratt}, Booktitle = {Computational Learning Theory and Natural Learning Systems}, Chapter = 19, Date-Modified = {2010-09-20 00:23:13 +0200}, Editor = {S. J. Hanson and G. A. Drastal and R. L. Rivest}, Pages = {523--560}, Publisher = MIT, Title = {Experiments on the transfer of knowledge between neural networks}, Volume = {1: Constraints and Prospects}, Year = 1994} @techreport{Prechelt94, Author = {L. Prechelt}, Institution = {Fakult{\"{a}}t f{\"{u}}r Informatik, Universit{\"{a}}t Karlsruhe, Germany}, Number = {19/94}, Title = {Proben1: A set of neural network benchmark problems and benchmarking rules}, Year = 1994} @techreport{Prechelt94b, Author = {L. Prechelt}, Institution = {Fakult{\"{a}}t f{\"{u}}r Informatik, Universit{\"{a}}t Karlsruhe, Germany}, Number = {24/94}, Title = {A study of experimental evaluations of neural network learning algorithms: current research practice}, Year = 1994} @inproceedings{Preiss02, Author = {J. Preiss}, Booktitle = {Proceedings of the Fifth Annual {CLUK} Research Colloquium}, Pages = {1--9}, Title = {Anaphora resolution with memory-based learning}, Year = 2002} @inproceedings{Prescher+03, Author = {Detlef Prescher and Remko Scha and Andreas Zollmann}, Title = {On the statistical consistency of DOP estimators}, Year = {2003}} @inproceedings{Prosser+06, Author = {Prosser, P. and Unsworth, C.}, Booktitle = {ECAI Workshop on Modelling and Solving Problems with Constraints}, Date-Added = {2009-11-15 14:09:54 +0100}, Date-Modified = {2009-11-15 14:10:08 +0100}, Title = {Rooted Tree and Spanning Tree Constraints}, Year = {2006}} @article{Provost+96, Author = {F. J. Provost and J. M. Aronis}, Journal = {Machine Learning}, Number = 1, Pages = 33, Title = {Scaling up inductive learning with massive parallelism}, Volume = 23, Year = 1996} @inproceedings{Provost+99, Author = {F. Provost and D. Jensen and T. Oates}, Booktitle = {Proceedings of the Fifth International Conference on Knowledge Discovery and Data Mining}, Pages = {23--32}, Title = {Efficient progressive sampling}, Year = 1999} @inproceedings{Puerta+07, Address = {Funchal, Madeira}, Author = {M.-C. Puerta Melguizo and T. Bogers and A. Deshpande and L. Boves and A. {Van den Bosch}}, Booktitle = {Proceedings of the 9th International Conference on Enterprise Information Systems (ICEIS 2007)}, Date-Added = {2010-01-03 01:09:43 +0100}, Date-Modified = {2010-09-14 13:06:39 +0200}, Keywords = {ilk, recommender systems, writing support systems, apropos}, Title = {What a proactive recommendation system needs: Relevance, non-intrusiveness, and a new long-term memory}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/puertamelguizo.2006.iceis2007.paper.pdf}} @inproceedings{Puerta+08, Address = {Marrakech, Morocco}, Author = {M.-C. Puerta Melguizo and O. Munoz Ramos and L. Boves and T. Bogers and A. {Van den Bosch}}, Booktitle = {Proceedings of the LREC 2008 Workshop on Natural Language Processing Resources, Algorithms, and Tools for Authoring Aid}, Date-Added = {2010-01-03 10:41:13 +0100}, Date-Modified = {2010-09-14 13:06:45 +0200}, Keywords = {ilk, writing support systems, recommender systems, apropos}, Title = {A Personalized Recommender System for Writing in the Internet Age}, Year = {2008}} @book{Pullum91, Address = {Chicago, IL}, Author = {Geoffrey K. Pullum}, Date-Added = {2010-02-08 13:08:43 +0100}, Date-Modified = {2010-02-08 13:08:43 +0100}, Publisher = {University of Chicago}, Title = {The Great Eskimo Vocabulary Hoax}, Year = {1991}} @article{Pulvermueller92, Author = {Pulverm\"uller, F.}, Journal = {{Concepts in Neuroscience}}, Number = 2, Pages = {157--200}, Title = {{Constituents of a Neurological Theory of Language}}, Volume = 3, Year = 1992} @article{Pulvermueller94, Author = {Pulverm\"uller, F.}, Journal = {{Kognitionswissenschaft}}, Pages = {17--31}, Title = {{Syntax und Hirnmechanismen, Perspektiven einer multidisziplin\"aren Sprachwissenschaft}}, Volume = 4, Year = 1994} @inproceedings{Punyakanok+01, Author = {V. Punyakanok and D. Roth}, Booktitle = {NIPS-13; The 2000 Conference on Advances in Neural Information Processing Systems}, Pages = {995--1001}, Publisher = MIT, Title = {The Use of Classifiers in Sequential Inference}, Year = 2001} @inproceedings{Punyakanok+05, Author = {V. Punyakanok and D. Roth and W. Yih and D. Zimak}, Booktitle = {Proceedings of the International Joint Conference on Artificial Intelligence (IJCAI)}, Date-Added = {2009-11-15 14:10:18 +0100}, Date-Modified = {2009-11-15 14:10:32 +0100}, Pages = {1124--1129}, Title = {Learning and Inference over Constrained Output}, Year = {2005}} @article{Punyakanok+07, Author = {V. Punyakanok and D. Roth and W. Yih}, Date-Added = {2009-11-15 14:10:43 +0100}, Date-Modified = {2009-12-26 21:12:10 +0100}, Journal = {Computational Linguistics}, Number = {2}, Pages = {257--287}, Title = {The Importance of Syntactic Parsing and Inference in Semantic Role Labeling}, Volume = {34}, Year = {2008}} @inproceedings{Qiu93, Author = {Yonggang Qiu}, Booktitle = SIGIR, Pages = {160--169}, Title = {Concept based query expansion}, Year = 1993} @article{Quartz95, Author = {Quartz, S. and Sejnowski, T.}, Journal = {{Brain and Beharioral Sciences}}, Title = {{The Neural Basis of Cognitive Development: A Constructivist Manifesto}}, Year = 1995} @inbook{Quinlan83, Address = {Los Altos, CA}, Author = {J.R. Quinlan}, Booktitle = {Machine Learning: An Artificial Intelligence Approach}, Editor = {R.S. Michalski and J.G. Carbonell and T.M. Mitchell}, Pages = {463--482}, Publisher = {Morgan Kaufmann Publishers}, Title = {Learning Efficient Classification Procedures and Their Application to Chess End-Games}, Year = 1983} @article{Quinlan86, Author = {Quinlan, J.R.}, Date-Modified = {2009-11-14 19:01:47 +0100}, Journal = {{Machine Learning}}, Keywords = {decision trees}, Pages = {81--206}, Title = {Induction of Decision Trees}, Volume = 1, Year = 1986} @article{Quinlan90, Author = {J. R. Quinlan}, Journal = {Machine Learning}, Number = 3, Pages = {239--266}, Title = {Learning logical definitions from relations}, Volume = 5, Year = 1990} @article{Quinlan91, Author = {J. R. Quinlan}, Journal = {Machine Learning}, Pages = {93--98}, Title = {Improved estimation for the accuracy of small disjuncts}, Volume = 6, Year = 1991} @book{Quinlan93, Address = {{San Mateo, CA}}, Author = {Quinlan, J.R.}, Date-Modified = {2009-11-14 17:39:46 +0100}, Publisher = {{Morgan Kaufmann}}, Title = {{C4.5}: Programs for Machine Learning}, Year = 1993} @incollection{Quinlan94, Address = {Cambridge, MA}, Author = {J. R. Quinlan}, Booktitle = {Computational Learning Theory and Natural Learning Systems}, Chapter = 15, Editor = {S. J. Hanson and G. A. Drastal and R. L. Rivest}, Pages = {445--456}, Publisher = MIT, Title = {Comparing connectionist and symbolic learning methods}, Volume = {1: Constraints and Prospects}, Year = 1994} @inproceedings{Quirk+05, Author = {Quirk, C. and Menezes, A. and Cherry, C.}, Booktitle = {Proceedings of the 43rd Annual Meeting of the Association for Computational Linguistics (ACL'05)}, Date-Added = {2009-11-15 14:11:21 +0100}, Date-Modified = {2009-11-15 14:11:31 +0100}, Pages = {271--279}, Title = {{Dependency treelet translation: syntactically informed phrasal SMT}}, Year = {2005}} @book{Quirk+85, Address = {London}, Author = {R. Quirk and S. Greenbaum and G. Leech and J. Svartvik}, Publisher = {Longman}, Title = {A comprehensive grammar of the English language}, Year = 1985} @inproceedings{Raaijmakers00, Address = {New Brunswick, NJ}, Author = {S. Raaijmakers}, Booktitle = {Proceedings of the Fourth Conference on Computational Language Learning and the Second Learning Language in Logic Workshop}, Pages = {55--60}, Publisher = {ACL}, Title = {Learning distributed linguistic classes}, Year = 2000} @article{Rabiner89, Author = {L. R. Rabiner}, Journal = {Proceedings of the {IEEE}}, Number = 2, Pages = {257--285}, Title = {A tutorial on hidden {M}arkov models and selected applications in speech recognition}, Volume = 77, Year = 1989} @article{Rada+89, Author = {R. Rada and H. Mili and E. Bicknell and M. Blettner}, Journal = {IEEE Transactions on Systems, Man, and Cybernetics}, Number = 1, Pages = {17--30}, Title = {Development and application of a metric on semantic nets}, Volume = 19, Year = 1989} @inproceedings{Radev+00, Author = {Dragomir R. Radev and John Prager and Valerie Samn}, Booktitle = {Proceedings of the 6th ANLP}, Organization = {ACL}, Title = {Ranking suspected answers to natural language questions using predictive annotation}, Year = 2000} @article{Rahm+00, Author = {E. Rahm and H.H. Do}, Journal = {IEEE Data Engineering Bulletin}, Number = 4, Pages = {3--13}, Title = {{Data Cleaning: Problems and Current Approaches}}, Volume = 23, Year = 2000} @article{Ramshaw+94, Author = {Lance A. Ramshaw and Mitchell P. Marcus}, Journal = {cmp-lg/9406011}, Title = {Exploring the Statistical Derivation of Transformational Rule Sequences for Part-of-Speech Tagging}, Year = 1994} @inproceedings{Ramshaw+95, Author = {Ramshaw, L.A. and Marcus, M.P.}, Booktitle = WVLC95, Pages = {82--94}, Title = {Text Chunking using Transformation-Based Learning}, Year = 1995} @incollection{Ramshaw+96, Author = {L. A. Ramshaw and M. P. Marcus}, Booktitle = {The Balancing Act: Combining Symbolic and Statistical Approaches to Language}, Editor = {J. Klavans and P. Resnik}, Publisher = MIT, Title = {Exploring the Nature of Transformation-Based Learning}, Year = 1996} @techreport{Rao+96, Author = {Rao, R.P.N. and D.H. Ballard}, Date-Modified = {2010-01-03 10:50:37 +0100}, Institution = {National Institute for the Study of Brain and Behavior, Department of CS, University of Rochester}, Month = {September}, Title = {The Visual Cortex as a Hierarchical Predictor}, Year = 1996} @incollection{Rapp+95, Address = {Bradford}, Author = {Rapp, B.C. and Caramazza, A.}, Booktitle = {The Cognitive Neurosciences}, Chapter = 58, Editor = {Gazzaniga, M.S.}, Pages = {901--913}, Publisher = MIT, Title = {Disordersof LExical Processing and the Lexicon}, Year = 1995} @inproceedings{Rapp95, Author = {Reinhard Rapp}, Booktitle = {Proceedings of the Annual Meeting of the Association for Computational Linguistics (ACL), In Student Session}, Title = {Identifying word translations in non-parallel texts}, Year = 1995} @article{Ratcliff+94, Author = {Ratcliff, R. and McKoon, G.}, Journal = {{Psychological Review}}, Pages = {177--184}, Title = {{Retrieving Information from Memory: Spreading activation theories versus compound-cue theories}}, Volume = 101, Year = 1994} @inproceedings{Ratnaparkhi+94, Address = {Plainsboro, NJ}, Author = {A. Ratnaparkhi and J. Reynar and S. Roukos}, Booktitle = {Workshop on Human Language Technology}, Month = {March}, Organization = {ARPA}, Title = {A maximum entropy model for Prepositional Phrase Attachment}, Year = 1994} @inproceedings{Ratnaparkhi96, Author = {A. Ratnaparkhi}, Booktitle = {Proceedings of the Conference on Empirical Methods in Natural Language Processing, May 17-18, 1996, University of Pennsylvania}, Date-Modified = {2009-11-14 17:33:04 +0100}, Keywords = {part-of-speech tagging, maximum entropy}, Title = {A Maximum Entropy Part-Of-Speech Tagger}, Year = 1996} @inproceedings{Ratnaparkhi97, Author = {A. Ratnaparkhi}, Booktitle = {Proceedings of the Second Conference on Empirical Methods in Natural Language Processing, EMNLP-2, Providence, Rhode Island}, Pages = {1--10}, Title = {A linear observed time statistical parser based on maximum entropy models}, Year = 1997} @phdthesis{Ratnaparkhi98, Author = {A. Ratnaparkhi}, School = {University of Pennsylvania}, Title = {Maximum Entropy Models for Natural Language Ambiguity Resolution}, Year = 1998} @article{Ratnaparkhi99, Author = {Ratnaparkhi, A.}, Journal = {Machine Learning}, Number = {1-3}, Pages = {151-175}, Title = {Learning to Parse Natural Language with Maximum Entropy Models}, Volume = 34, Year = 1999} @inproceedings{Refenes+91, Address = {Amsterdam, The Netherlands}, Author = {A. N. Refenes and S. Vithlani}, Booktitle = {Proceedings of {{\sc icann}}-91, Espoo, Finland}, Date-Modified = {2011-06-21 18:26:52 +0200}, Editor = {T. Kohonen and K. M{\"{a}}kisara and O. Simula and J. Kangas}, Pages = {923--929}, Publisher = {North Holland}, Title = {Constructive Learning by Specialisation}, Year = 1991} @inproceedings{Reffle+08, Address = {New York, NY, USA}, Author = {U. Reffle and A. Gotscharek and C. Ringlstetter and K. Schulz}, Booktitle = {AND '08: Proceedings of the second workshop on Analytics for noisy unstructured text data}, Date-Added = {2010-02-12 22:07:34 +0100}, Date-Modified = {2010-02-12 23:57:00 +0100}, Doi = {http://doi.acm.org/10.1145/1390749.1390754}, Isbn = {978-1-60558-196-5}, Location = {Singapore}, Pages = {17--22}, Publisher = {ACM}, Title = {Successfully detecting and correcting false friends using channel profiles}, Year = {2008}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/1390749.1390754}} @book{Reilley+92, Address = {Hillsdale, NJ}, Editor = {R. G. Reilley and N. E. Sharkey}, Publisher = {Lawrence Erlbaum Associates}, Title = {Connectionist Approaches to Natural Language Processing}, Year = 1992} @inproceedings{Reilley93, Author = {R. G. Reilley}, Booktitle = {Proceedings of the 15th Annual Meeting of the Cognitive Science Society, Boulder, CO.}, Title = {Boundary effects in the linguistic representation of simple recurrent networks}, Year = 1993} @article{Reilly95, Author = {R. Reilly}, Date-Modified = {2009-02-21 19:45:09 +0100}, Journal = {{Artificial Intelligence Review}}, Keywords = {language evolution, metaphor, synesthesia, cortical computation, neural networks}, Title = {{Sandy ideas and coloured days: Some computational implications of embodiment}}, Volume = 8, Year = 1995} @inproceedings{Reinberger+04, Author = {M.-L. Reinberger and P. Spyns and A. J. Pretorius and W. Daelemans}, Booktitle = {On the Move to Meaningful Internet Systems 2004: CoopIS, DOA, and ODBASE, OTM Confederated International Conferences}, Pages = {600-617}, Title = {Automatic Initiation of an Ontology}, Year = 2004} @book{Reinhart83, Address = {London}, Author = {T. Reinhart}, Publisher = {Croom Helm}, Title = {Anaphora}, Year = 1983} @inproceedings{Resnik92, Author = {P. Resnik}, Booktitle = {AAAI Workshop on Statistically-based Natural Language Processing Techniques}, Month = {July}, Pages = {54--64}, Place = {San Jose, California}, Title = {WordNet and distributional analysis: A class-based approach to lexical discovery}, Year = 1992} @inproceedings{Resnik93, Author = {P. Resnik}, Booktitle = ARPA, Title = {Semantic classes and syntactic ambiguity}, Year = 1993} @inproceedings{Resnik95, Address = {Cambridge, MA.}, Author = {Resnik, P.}, Booktitle = {Proceedings of Third Workshop on Very Large Corpora}, Date-Modified = {2009-09-06 20:41:47 +0200}, Editor = {Yarowksy, D. and Church, K.}, Keywords = {wsd, word sense disambiguation, wordnet}, Pages = {54--68}, Title = {Disambiguating Noun Groupings with Respect to WordNet Senses}, Year = 1995} @inproceedings{Reynaert+08b, Address = {Marrakech, Morocco}, Author = {M. Reynaert}, Booktitle = {Proceedings of the Sixth International Language Resources and Evaluation (LREC'08)}, Date-Added = {2010-01-03 10:39:21 +0100}, Date-Modified = {2010-09-14 12:54:02 +0200}, Keywords = {ilk, spelling correction, evaluation, d-coi}, Title = {All, and only, the errors: More complete and consistent spelling and {OCR}-error correction evaluation}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/477_paper.pdf}} @inproceedings{Reynaert04, Address = {Geneva, Switzerland}, Author = {M. Reynaert}, Booktitle = {Proceedings of the 20th International Conference on Computational Linguistics}, Date-Modified = {2010-09-14 12:54:18 +0200}, Keywords = {ilk, spelling correction, tisc}, Title = {Text-induced spelling correction}, Year = 2004, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/TISC.COLING.NR377.pdf}} @phdthesis{Reynaert05, Author = {M. Reynaert}, Date-Modified = {2010-09-14 12:54:55 +0200}, Keywords = {ilk, tisc, prosit}, School = {Tilburg University}, Title = {Text-induced spelling correction}, Year = 2005} @inproceedings{Reynaert06, Address = {Trento, Italy}, Author = {M. Reynaert}, Booktitle = {Proceedings of the Fifth International Conference on Language Resources and Evaluation, LREC-2006}, Date-Modified = {2010-09-14 12:55:22 +0200}, Keywords = {ilk, spelling correction, linguistic data quality management, d-coi}, Title = {Corpus-induced corpus cleanup}, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/229_pdf.pdf}} @techreport{Reynaert07, Author = {M. Reynaert}, Date-Modified = {2010-09-14 12:55:11 +0200}, Institution = {ILK Research Group}, Keywords = {ilk, d-coi, tokenization, normalization}, Number = {ILK 07-03}, Title = {Sentence-splitting and tokenization in {D}-{C}oi}, Year = 2007} @inproceedings{Reynaert08, Address = {Berlin, Germany}, Author = {M. Reynaert}, Booktitle = {Proceedings of the Computational Linguistics and Intelligent Text Processing 9th International Conference, CICLing 2008}, Date-Added = {2010-01-03 10:30:51 +0100}, Date-Modified = {2010-09-14 12:54:40 +0200}, Editor = {A. Gelbukh}, Keywords = {ilk, OCR, spelling correction, d-coi}, Pages = {617--630}, Publisher = {Springer Verlag}, Title = {Non-interactive OCR post-correction for giga-scale digitization projects}, Volume = {4919}, Year = {2008}, Bdsk-Url-1 = {http://www.gelbukh.com/cicling/2008/FirstPages/Paper8718.pdf}} @inproceedings{Reynaert09, Address = {Barcelona, Spain}, Author = {M. Reynaert}, Booktitle = {Proceedings of The Third Workshop on Analytics for Noisy Unstructured Text Data 2009 (AND-2009)}, Date-Added = {2010-01-02 19:40:56 +0100}, Date-Modified = {2010-11-04 10:54:40 +0100}, Keywords = {ilk, spelling correction, sonar, ticcl, spelling variation}, Pages = {77--84}, Title = {Parallel identification of the spelling variants in corpora}, Year = {2009}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=1568310&type=pdf&coll=GUIDE&dl=GUIDE&CFID=45831176&CFTOKEN=42984845}} @book{Rhodes29, Address = {New York}, Author = {Rhodes, Frederick Leland}, Date-Added = {2010-02-08 13:09:09 +0100}, Date-Modified = {2010-02-08 13:09:09 +0100}, Publisher = {Harper and Brothers}, Title = {Beginnings of telephony}, Year = {1929}} @inproceedings{Ricci+98, Address = {Chemnitz, Germany}, Author = {F. Ricci and D. W. Aha}, Booktitle = {Proceedings of the Tenth European Conference on Machine Learni ng}, Pages = {280--291}, Publisher = {Springer Verlag}, Title = {Error-correcting output codes for local learners}, Year = 1998} @inproceedings{Rich88, Author = {Rich, E. and LuperFoy, S.}, Booktitle = ANLP, Title = {An architecture for anaphora resolution}, Year = 1988} @book{Riesbeck+89, Address = {Northvale, NJ}, Author = {C. Riesbeck and R. Schank}, Publisher = {Erlbaum}, Title = {Inside Case-Based Reasoning}, Year = 1989} @inproceedings{Rigau+97, Author = {Rigau, G. and Atserias, J. and Agirre, E.}, Booktitle = {{Proceedings of ACL/EACL '97}}, Date-Modified = {2009-09-06 20:41:59 +0200}, Keywords = {wsd, word sense disambiguation, unsupervised learning}, Pages = {48--55}, Title = {{Combining Unsupervised Lexical Knowledge Methods for Word Sense Disambiguation}}, Year = 1997} @article{Ringlstetter+06, Author = {C. Ringlstetter and K. Schultz and S. Mihov}, Journal = CL, Number = 3, Pages = {295--340}, Title = {Orthographic Errors in Web Pages: Toward Cleaner Web Corpora}, Volume = 32, Year = 2006} @article{Rissanen83, Author = {J. Rissanen}, Journal = {Annals of Statistics}, Pages = {416--431}, Title = {A universal prior for integers and estimation by minimum description length}, Volume = 11, Year = 1983} @techreport{Ristad+96, Author = {E. S. Ristad and P. N. Yianilos}, Date-Modified = {2010-10-06 20:57:47 +0200}, Institution = {Department of Computer Science, Princeton University}, Number = {CS-TR-532-96}, Title = {Learning String Edit Distance}, Year = 1996} @book{Ritchie+92, Address = {Cambridge, MA}, Author = {G. D. Ritchie and G. J. Russell and A. W. Black and S. G. Pulman}, Date-Modified = {2010-09-20 00:23:34 +0200}, Publisher = MIT, Title = {Computational Morphology: practical mechanisms for the {E}nglish lexicon}, Year = 1992} @article{Ritter+89, Author = {Ritter,H and Kohonen, T.}, Journal = {{Biological Cybernetics}}, Title = {{Self-Organizing Semantic Maps}}, Volume = 61, Year = 1989} @article{Ritter90, Author = {Ritter, H.}, Journal = {Psychological Review}, Pages = {128--136}, Title = {Self-Organizing Maps for internal representations}, Volume = 52, Year = 1990} @book{Robins97, Address = {London}, Author = {R. H. Robins}, Date-Modified = {2009-11-14 18:57:42 +0100}, Edition = 4, Keywords = {linguistics, historical linguistics}, Publisher = {Longman}, Title = {A short history of linguistics}, Year = 1997} @article{Roche+95, Author = {Roche, E. and Schabes, Y.}, Date-Modified = {2009-11-14 17:36:12 +0100}, Journal = {Computational Linguistics}, Keywords = {part-of-speech tagging}, Number = 2, Pages = {227--253}, Title = {Deterministic Part-of-Speech Tagging with Finite-State Transducers}, Volume = 21, Year = 1995} @book{Roche+97, Address = {Cambridge, MA}, Editor = {Emmanuel Roche and Yves Schabes}, Publisher = MIT, Title = {Finite-State Language Processing}, Year = 1997} @techreport{Roche96, Author = {Emmanuel Roche}, Institution = {Mitsubishi Electric Research Laboratory}, Number = {TR-96-30 v1.0}, Place = {Cambridge, MA}, Title = {Parsing with Finite-State Transducers}, Year = 1996} @techreport{Rodriguez95, Address = {San Diego, Cal.}, Author = {Rodriguez, P.}, Institution = {Centre for Research in Language at UCSD}, Month = {October}, Number = {10-1}, Title = {Representing the Structure of a Simple Context-Free Language in a Recurrent Neural Network: A Dynamical Systems Approach}, Year = 1995} @article{Rogova94, Author = {G. Rogova}, Journal = {Neural Networks}, Pages = {777-781}, Title = {Combining the results of several neural network classifiers}, Volume = 7, Year = 1994} @book{Rohr94, Address = {Bochum}, Author = {H. M. R{\"{o}}hr}, Publisher = {Universit{\"{a}}tsverlag Dr Norbert Brockmeyer}, Series = {Bochum Publications in Evolutionary Cultural Semiotics}, Title = {Writing: Its evolution and relation to speech}, Year = 1994} @article{Romaniuk+93, Author = {S. G. Romaniuk and L. O. Hall}, Journal = {Neural Networks}, Pages = {1105--1116}, Title = {Divide and Conquer Neural Networks}, Volume = 6, Year = 1993} @inproceedings{Ron+93, Author = {Dana Ron and Yoram Singer and Naftali Tishby}, Booktitle = {Advances in Neural Information Processing Systems}, Publisher = {Morgan Kaufman}, Title = {The Power of Amnesia}, Volume = 6, Year = 1993} @article{Ron+94, Author = {Dana Ron and Yoram Singer and Naftali Tishby}, Note = {(In preparation)}, Title = {On the Learnability and Usage of Acyclic Probabilistic Automata}, Year = 1994} @article{Ron+95, Author = {Dana Ron and Yoram Singer and Naftali Tishby}, Journal = {Machine Learning}, Note = {()}, Title = {The Power of Amnesia: Learning Probabilistic Automata with Variable Memory Length}, Year = 1995} @article{Rooth95, Author = {Rooth}, Journal = {{Biological Cybernetics}}, Title = {{Self-Organizing Semantic Maps}}, Volume = 61, Year = 1995} @article{Rosch+75, Author = {E. Rosch and C. B. Mervis}, Journal = {Cognitive Psychology}, Pages = {??--??}, Title = {Family resemblances: studies in the internal structure of categories}, Volume = 7, Year = 1975} @mastersthesis{Rosen99, Author = {J. Rosen}, Month = May, School = {UIUC, Department of Computer Science}, Title = {Scaling up Context Sensitive Text Correction}, Year = 1999} @article{Rosenblatt58, Author = {F. Rosenblatt}, Journal = {Psychological Review}, Pages = {368--408}, Title = {The perceptron: A probabilistic model for information storage and organisation in the brain}, Volume = 65, Year = 1958} @article{Rosenfeld96, Author = {R. Rosenfeld}, Journal = {Computer, Speech and Language}, Title = {A Maximum Entropy Approach to Adaptive Statistical Language Modeling}, Volume = 10, Year = 1996} @inproceedings{Rosenfeld97, Author = {R. Rosenfeld}, Booktitle = {IEEE Workshop on Automatic Speech Recognition and Understanding}, Title = {A whole sentence Maximum Entropy Language Model}, Year = 1997} @incollection{Rosenke95, Address = {Dresden}, Author = {K. Rosenke}, Booktitle = {Elektronische {S}prachsignalverarbeitung, 6. {K}onferenz}, Editor = {R. Hoffman and R. Ose}, Pages = {159-166}, Publisher = {{I}nstitut f{\"{u}}r {T}echnische {A}kustik}, Title = {Verschiedene neuronale {S}trukturen f{\"{u}}r die {T}ranskription von deutschen {W\"{o}}rtern}, Year = 1995} @article{Rosenzweig99, Author = {Stefan Rosenzweig}, Journal = {Computers and the Humanities}, Pages = {000--999}, Title = {SENSEVAL SCORING???}, Volume = 9999, Year = 1999} @inproceedings{Rotaru+03, Address = {Edmonton, Canada}, Author = {M. Rotaru and D. Litman}, Booktitle = {Proceedings of the Seventh Conference on Computational Natural Language Learning}, Title = {Exceptionality and natural language learning}, Year = 2003} @inproceedings{Roth+01, Author = {D. Roth and G. Kao and X. Li and R. Nagarajan and V. Punyakanok and N. Rizzolo and W. Yih and C. O. Alm and L. G. Moran}, Booktitle = {Proceedings of the 9th Text Retrieval Conference, NIST}, Date-Modified = {2009-11-15 14:12:06 +0100}, Title = {Learning Components for a Question Answering System}, Year = 2002} @inproceedings{Roth+01b, Author = {D. Roth and W. Yih}, Booktitle = IJCAI, Date-Modified = {2009-11-15 14:12:13 +0100}, Pages = {1257--1263}, Title = {Relational Learning via Propositional Algorithms: An Information Extraction Case Study}, Year = 2001} @inproceedings{Roth+02, Author = {D. Roth and W. Yih}, Booktitle = {COLING 2002, The 19th International Conference on Computational Linguistics}, Date-Modified = {2010-05-01 22:50:32 +0200}, Title = {Probabilistic Reasoning for Entity and Relation Recognition}, Year = 2002} @inproceedings{Roth+04, Author = {Roth, D. and W. Yih}, Booktitle = {Proceedings of the Annual Conference on Computational Natural Language Learning (CoNLL)}, Date-Added = {2009-11-15 14:12:44 +0100}, Date-Modified = {2009-11-15 14:12:51 +0100}, Editor = {H.T. Ng and E. Riloff}, Pages = {1--8}, Title = {A Linear Programming Formulation for Global Inference in Natural Language Tasks}, Year = {2004}} @inproceedings{Roth+07, Author = {D. Roth and W. Yih}, Booktitle = {Introduction to Statistical Relational Learning}, Date-Added = {2009-11-15 14:12:44 +0100}, Date-Modified = {2009-11-15 14:12:57 +0100}, Editor = {L. Getoor and B. Taskar}, Title = {Global Inference for Entity and Relation Identification via a Linear Programming Formulation}, Year = {2007}} @inproceedings{Roth+98, Address = {Montreal, Canada}, Author = {D. Roth and D. Zelenko}, Booktitle = COLING/ACL98, Date-Modified = {2009-11-14 17:36:42 +0100}, Keywords = {part-of-speech tagging}, Month = {August 10-14}, Pages = {1136--1142}, Title = {Part of speech tagging using a network of linear separators}, Year = 1998} @inproceedings{Roth98, Address = {Menlo Park, CA}, Author = {D. Roth}, Booktitle = {Proceedings of the National Conference on Artificial Intelligence}, Pages = {806--813}, Publisher = {AAAI Press}, Title = {Learning to resolve natural language ambiguities: A unified approach.}, Year = 1998} @inproceedings{Roth99, Author = {D. Roth}, Booktitle = IJCAI, Pages = {898--904}, Title = {Learning in Natural Language}, Year = 1999} @techreport{Roth99b, Author = {D. Roth}, Date-Modified = {2009-11-15 14:11:58 +0100}, Institution = {UIUC Computer Science Department}, Month = May, Number = {UIUCDCS-R-99-2101}, Title = {The {SNoW} Learning Architecture}, Year = 1999} @incollection{Roukos96, Author = {S. Roukos}, Booktitle = {Survey of the state of the art in human language technology}, Editor = {R. A. Cole and J. Mariani and H. Uszkoreit and A. Zaenen and V. Zue}, Publisher = {Center for Spoken Language Understanding}, Title = {Language representation}, Year = 1996} @incollection{Rumelhart+86, Address = {Cambridge, MA}, Author = {D. E. Rumelhart and G. E. Hinton and R. J. Williams}, Booktitle = {Parallel Distributed Processing: Explorations in the Microstructure of Cognition}, Editor = {D. E. Rumelhart and J. L. McClelland}, Pages = {318--362}, Publisher = MIT, Title = {Learning internal representations by error propagation}, Volume = {1: Foundations}, Year = 1986} @incollection{Rumelhart+86b, Address = {Cambridge, MA}, Author = {D. E. Rumelhart and D. Zipser}, Booktitle = {Parallel Distributed Processing: Explorations in the Microstructure of Cognition}, Editor = {D. E. Rumelhart and J. L. McClelland}, Pages = {151--193}, Publisher = MIT, Title = {Feature discovery by competitive learning}, Volume = {1: Foundations}, Year = 1986} @incollection{Rumelhart+86c, Address = {Cambridge, MA}, Author = {D. E. Rumelhart and J. L. McClelland}, Booktitle = {Parallel Distributed Processing: Explorations in the Microstructure of Cognition}, Editor = {J. L. McClelland and D. E. Rumelhart}, Pages = {216--271}, Publisher = MIT, Title = {On learning the past tenses of {E}nglish verbs}, Volume = {2: Psychological and Biological Models}, Year = 1986} @book{Rumelhart+86d, Address = {Cambridge, MA}, Editor = {D. E. Rumelhart and J. L. McClelland}, Publisher = MIT, Title = {Parallel Distributed Processing: Explorations in the Microstructure of Cognition}, Volume = {1: Foundations}, Year = 1986} @book{Russell+95, Address = {Englewood Cliffs, NJ}, Author = {S. Russell and P. Norvig}, Date-Added = {2010-02-08 13:09:19 +0100}, Date-Modified = {2010-10-06 20:58:05 +0200}, Edition = {First}, Publisher = {Prentice Hall}, Title = {Artificial Intelligence: A Modern Approach}, Year = {1995}} @book{Sadler89, Author = {Sadler, V.}, Publisher = {Foris Publications}, Title = {Working with analogical semantics: Disambiguation techniques in DLT}, Year = 1989} @article{Safavian+91, Author = {S. R. Safavian and D. A. Landgrebe}, Date-Modified = {2009-11-14 19:01:01 +0100}, Journal = {{IEEE} Transactions on Systems, Man, and Cybernetics}, Keywords = {decision trees}, Number = 3, Pages = {660--674}, Title = {A survey of decision tree classifier methodology}, Volume = 21, Year = 1991} @inproceedings{Sagae+05, Address = {Vancouver, Canada}, Author = {K. Sagae and A. Lavie}, Booktitle = {Proceedings of the Ninth International Workshop on Parsing Technologies}, Pages = {125--132}, Title = {A classifier-based parser with linear run-time complexity}, Year = 2005} @inproceedings{Sagae+07, Author = {Sagae, K. and Tsujii, J.}, Booktitle = {Proceedings of the CoNLL Shared Task Session of EMNLP-CoNLL 2007}, Date-Added = {2009-11-15 14:13:07 +0100}, Date-Modified = {2009-11-15 14:13:31 +0100}, Pages = {1044--1050}, Title = {Dependency Parsing and Domain Adaptation with {LR} Models and Parser Ensembles}, Year = {2007}} @book{Sager81, Author = {N. Sager}, Publisher = {Addison-Wesley}, Title = {Natural language information processing: A computer grammar of English and its applications}, Year = 1981} @inproceedings{Salganicoff93, Address = {Amherst, MA}, Author = {M. Salganicoff}, Booktitle = {Proceedings of the Fifth International Conference on Machine Learning}, Pages = {276--283}, Publisher = {Morgan Kaufmann}, Title = {Density-adaptive learning and forgetting}, Year = 1993} @book{Salton83, Author = {Gerard Salton and Michael McGill}, Publisher = {McGrew-Hill Book Company}, Title = {Introduction to Modern Information Retrieval}, Year = 1983} @inproceedings{Salton88, Author = {Gerard Salton}, Booktitle = ACL, Pages = {204--210}, Title = {Syntactic approaches to automatic book indexing}, Year = 1988} @book{Salton89, Author = {Gerard Salton}, Publisher = {Addison-Wesley Publishing Company}, Title = {Automatic Text Processing}, Year = 1989} @book{Salzberg90, Address = {Norwell, MA}, Author = {S. Salzberg}, Publisher = {Kluwer Academic Publishers}, Title = {Learning with nested generalised exemplars}, Year = 1990} @article{Salzberg91, Author = {S. Salzberg}, Date-Modified = {2009-11-14 18:55:53 +0100}, Journal = {Machine Learning}, Keywords = {k-NN, instance-based learning, generalized examples}, Pages = {277--309}, Title = {A nearest hyperrectangle learning method}, Volume = 6, Year = 1991} @techreport{Salzberg95, Author = {S. Salzberg}, Institution = {Johns Hopkins University}, Number = {JHU-95/06}, Title = {On comparing classifiers: a critique of current reseach and methods}, Year = 1995} @article{Salzberg97, Author = {S. Salzberg}, Journal = {Data Mining and Knowledge Discovery}, Number = 3, Title = {On comparing classifiers: Pitfalls to avoid and a recommended approach}, Volume = 1, Year = 1997} @article{Sammon69, Author = {J. W. Sammon}, Journal = {IEEE Transactions on Computers C}, Pages = {401--409}, Title = {A Nonlinear Mapping for Data Structure Analysis}, Volume = 18, Year = 1969} @book{Sampson84, Address = {London}, Author = {G. Sampson}, Publisher = {Hutchinson}, Title = {Writing systems: a linguistic introduction}, Year = 1984} @inproceedings{Samuel+98, Author = {Samuel, Ken and Carberry, Sandra and Vijay-Shanker, K.}, Booktitle = {Proceedings of the 17th International Conference on Computational Linguistics and the 36th Annual Meeting of the Association for Computational Linguistics}, Pages = {1150-1156}, Title = {Dialogue Act Tagging with Transformation-Based Learning}, Year = 1998} @inproceedings{Samuelsson96, Author = {C. Samuelsson}, Booktitle = COLING, Title = {Handling sparse data by successive abstraction}, Year = 1996} @article{Sandra+01, Author = {D. Sandra and F. Daems and S. Frisson}, Date-Modified = {2010-09-20 00:20:41 +0200}, Journal = {Tijdschrift van de Vereniging voor het Onderwijs in het Nederlands}, Number = 3, Pages = {3--20}, Title = {Zo helder en toch zoveel fouten! Wat leren we uit psycholingu\"{\i}stisch onderzoek naar werk\-woord\-fouten bij ervaren spellers?}, Volume = 30, Year = 2001} @inproceedings{Sang+00, Author = {E. {Tjong Kim Sang} and S. Buchholz}, Booktitle = {Proceedings of CoNLL-2000 and LLL-2000}, Date-Modified = {2009-11-25 23:56:34 +0100}, Keywords = {CoNLL shared task, chunking}, Pages = {127--132}, Title = {Introduction to the {CoNLL}-2000 Shared Task: Chunking}, Year = 2000} @inproceedings{Sang+03, Archive = {Iris, web}, Author = {E. {Tjong Kim Sang} and F. {De Meulder}}, Booktitle = {Proceedings of CoNLL-2003}, Date-Modified = {2009-11-25 23:56:55 +0100}, Editor = {W. Daelemans and M. Osborne}, Keywords = {CoNLL shared task, named-entity recognition}, Pages = {142--147}, Publisher = {Edmonton, Canada}, Source = {Iris}, Title = {Introduction to the {CoNLL}-2003 Shared Task: Language-Independent Named Entity Recognition}, Year = 2003} @article{Sang+03b, Author = {E. {Tjong Kim Sang} and W. Daelemans and A. H\"othker}, Date-Added = {2009-11-15 14:35:13 +0100}, Date-Modified = {2010-09-14 22:15:59 +0200}, Journal = {Proceedings of CLIN}, Keywords = {clips, subtitling}, Pages = {109--123}, Title = {Reduction of Dutch Sentences for Automatic Subtitling}, Year = {2003}} @inproceedings{Sang+05, Author = {E. {Tjong Kim Sang} and S. Canisius and A. {Van den Bosch} and T. Bogers}, Booktitle = {Proceedings of the Ninth Conference on Natural Language Learning,CoNLL-2005}, Date-Modified = {2010-09-14 12:59:33 +0200}, Keywords = {ilk, semantic role labeling, imix, vi}, Pages = {229--232}, Publisher = {Ann Arbor, MI, USA}, Title = {Applying spelling error techniques for improving semantic role labelling}, Year = 2005} @inproceedings{Sang+05b, Author = {E. {Tjong Kim Sang} and G. Bouma and M. {De Rijke}}, Booktitle = {Proceedings of the AAAI-05 Workshop on Question Answering in Restricted Domains}, Date-Modified = {2010-09-28 10:50:01 +0200}, Pages = {41--45}, Publisher = {Pittsburgh, PA, USA}, Title = {Developing Offline Strategies for Answering Medical Questions}, Year = 2005} @inproceedings{Sang+99, Author = {E. {Tjong Kim Sang} and J. Veenstra}, Booktitle = {Proceedings of EACL'99}, Date-Modified = {2010-02-17 20:30:41 +0100}, Keywords = {ilk, shallow parsing, chunking}, Pages = {173--179}, Publisher = {Bergen, Norway}, Title = {Representing Text Chunks}, Year = 1999} @inproceedings{Sang01, Author = {E. {Tjong Kim Sang}}, Booktitle = {Proceedings of CoNLL-2001}, Date-Modified = {2010-02-17 20:32:14 +0100}, Editors = {W. Daelemans and R. Zajac}, Keywords = {shallow parsing, clause identification}, Pages = {67--69}, Publisher = {Toulouse, France}, Title = {Memory-Based Clause Identification}, Year = 2001} @article{Sang02, Author = {E. {Tjong Kim Sang}}, Date-Modified = {2010-11-26 19:39:39 +0100}, Issue = {Special Issue on Machine Learning Approaches to Shallow Parsing}, Journal = {Journal of Machine Learning Research}, Keywords = {shallow parsing}, Pages = {559--594}, Title = {Memory-based shallow parsing}, Volume = 2, Year = 2002} @inproceedings{Sang02b, Author = {E. {Tjong Kim Sang}}, Booktitle = {Proceedings of CoNLL-2002}, Date-Modified = {2010-02-17 20:31:48 +0100}, Editors = {D. Roth and A. {Van den Bosch}}, Keywords = {shared task, named-entity recognition}, Pages = {155--158}, Publisher = {Taipei, Taiwan}, Title = {Introduction to the CoNLL-2002 Shared Task: Language-Independent Named Entity Recognition}, Year = 2002} @inproceedings{Santamaria+94, Author = {J. C. Santamaria and A. Ram}, Booktitle = {Case-based reasoning: Papers from the 1994 Workshop (Technical Report WS-94-01)}, Editor = {D. W. Aha}, Publisher = {Menlo Park, CA: AAAI Press}, Title = {Systematic evaluation of design decisions in {CBR} systems}, Year = 1994} @inproceedings{Sarawag+05, Author = {Sarawagi, S. and Cohen, W.W.}, Booktitle = {Advances in Neural Information Processing Systems}, Date-Added = {2009-11-15 14:13:36 +0100}, Date-Modified = {2009-11-15 14:13:48 +0100}, Pages = {1185--1192}, Title = {Semi-markov conditional random fields for information extraction}, Volume = {17}, Year = {2005}} @misc{Sarle97, Author = {W. Sarle}, Howpublished = {URL: {{\tt ftp://ftp.sas.com/pub/neural/FAQ.html}}}, Title = {Neural Networks Frequently Asked Questions}, Year = 1997} @inproceedings{Sato+90, Author = {Sato, S. and Nagao, M.}, Booktitle = {Proceedings of the 13th conference on Computational linguistics}, Date-Added = {2009-11-15 14:13:54 +0100}, Date-Modified = {2009-11-15 14:14:07 +0100}, Pages = {247--252}, Title = {Toward memory-based translation}, Volume = {3}, Year = {1990}} @inproceedings{Saunders+98, Author = {Saunders, C. and Gammerman, A. and Vovk, V.}, Booktitle = {Proceedings of the Fifteenth International Conference on Machine Learning (ICML 1998)}, Date-Added = {2009-11-15 14:14:12 +0100}, Date-Modified = {2009-11-15 14:14:29 +0100}, Editor = {J.W. Shavlik}, Pages = {515--521}, Title = {Ridge regression learning algorithm in dual variables}, Year = {1998}} @book{Saussure16, Address = {Paris}, Author = {F. {De Saussure}}, Date-Modified = {2010-02-17 20:32:04 +0100}, Keywords = {linguistics}, Note = {Edited posthumously by C. Bally, A. Sechehaye, and A. Riedlinger. Citation page numbers and quotes are from the English translation by Wade Baskin, New York: McGraw-Hill Book Company, 1966}, Publisher = {Payot}, Title = {Cours de linguistique g{\'{e}}n{\'{e}}rale}, Year = 1916} @inproceedings{Sawaf+00, Author = {Sawaf, H. and Schutz, K. and Ney, H.}, Booktitle = {6th International Workshop on Parsing Technologies}, Date-Added = {2009-11-15 14:14:34 +0100}, Date-Modified = {2009-11-15 14:14:44 +0100}, Pages = {231--241}, Title = {On the use of grammar based language models for statistical machine translation}, Year = {2000}} @article{Scha+99, Author = {Remko Scha and Rens Bod and Khalil Sima'an}, Date-Modified = {2010-02-17 21:03:13 +0100}, Journal = {Journal of Experimental and Theoretical Artificial Intelligence}, Keywords = {data-oriented parsing}, Pages = {409--440}, Title = {A memory-based model of syntactic analysis: data-oriented parsing}, Volume = 11, Year = 1999} @article{Scha92, Author = {R. Scha}, Journal = {{Gramma/TTT Tijdschrift voor Taalkunde}}, Pages = {57--77}, Title = {{Virtual Grammars and Creative Algorithms}}, Volume = 1, Year = 1992} @article{Schaal+94, Author = {S. Schaal and C. Atkeson}, Journal = {Control Systems Magazine}, Number = 1, Pages = {57--71}, Title = {Robot juggling: An implementation of memory-based learning}, Volume = 14, Year = 1994} @inproceedings{Schaback+07, Address = {Hyderabad, India}, Author = {J. Schaback and F. Li}, Booktitle = {IJCAI-2007 Workshop on Analytics for Noisy Unstructured Text Data}, Date-Added = {2010-01-29 15:13:14 +0100}, Date-Modified = {2010-02-14 23:14:06 +0100}, Pages = {79--86}, Title = {Multi-Level Feature Extraction for Spelling Correction}, Year = {2007}} @inproceedings{Schabes92, Author = {Yves Schabes}, Booktitle = COLING, Title = {Stochastic lexicalized tree-adjoining grammars}, Year = 1992} @article{Schaffer93, Author = {C. Schaffer}, Journal = {Machine Learning}, Number = 2, Pages = {153--178}, Title = {Overfitting avoidance as bias}, Volume = 10, Year = 1993} @inproceedings{Schaffer94, Author = {C. Schaffer}, Booktitle = {Proceedings of the Eleventh International Machine Learning Conference}, Editor = {W. W. Cohen and H. Hirsch}, Organization = {Rutgers University, New Brunswick, NJ}, Pages = {259--265}, Title = {A conservation law for generalization performance}, Year = 1994} @book{Schank+77, Address = {Hillsdale, NJ}, Author = {R. C. Schank and R. Abelson}, Publisher = {Lawrence Erlbaum Associates}, Title = {Script, Plans, Goals and Understanding: An Inquiry into Human Knowledge Structures}, Year = 1977} @inproceedings{Schapire+98a, Author = {R. E. Schapire and Y. Singer and A. Singhal}, Booktitle = {{Proceedings of the 21st Annual International Conference on Research and Development in Information Retrieval, SIGIR '98}}, Title = {{Boosting and Rocchio applied to text filtering}}, Year = 1998} @techreport{Schapire+98b, Author = {R. E. Schapire and Y. Singer}, Institution = {{To appear in Machiene Learning, AT\&T Labs}}, Title = {{BoosTexter: A system for multiclass multi-label text categorization}}, Year = 1998} @article{Schatz97, Author = {Schatz, R. B.}, Journal = {Science}, Month = {Jan}, Pages = {273--444}, Title = {Information Retrieval in Digital Libraries: Bringing Search to the Net}, Volume = 275, Year = 1997} @inproceedings{Shaw+99, Address = {College Park, Maryland, USA}, Author = {J. Shaw and V. Hatzivassiloglou}, Booktitle = {Proceedings of the 37th Annual Meeting of the Association for Computational Linguistics}, Doi = {10.3115/1034678.1034707}, Month = {June}, Pages = {135--143}, Publisher = {Association for Computational Linguistics}, Title = {Ordering Among Premodifiers}, Url = {http://www.aclweb.org/anthology/P99-1018}, Year = {1999}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/P99-1018}, Bdsk-Url-2 = {http://dx.doi.org/10.3115/1034678.1034707}} @incollection{Scheerer86, Address = {Berlin}, Author = {E. Scheerer}, Booktitle = {New trends in graphemics and orthography}, Editor = {G. Augst}, Pages = {262--286}, Publisher = {De Gruyter}, Title = {Orthography and lexical access}, Year = 1986} @incollection{Schmid94, Author = {H. Schmid}, Booktitle = {COLING-94}, Date-Modified = {2009-11-14 17:35:45 +0100}, Keywords = {part-of-speech tagging}, Pages = {172-176}, Title = {Part-of-Speech Tagging with Neural Networks}, Year = 1994} @inproceedings{Schmid94b, Author = {Schmid, H.}, Booktitle = {Proceedings of International Conference on New Methods in Language Processing}, Date-Modified = {2009-11-14 17:33:47 +0100}, Keywords = {part-of-speech tagging, decision trees}, Title = {Probabilistic Part-of-Speech Tagging Using Decision Trees}, Year = 1994} @article{Schneider01, Abstract = {This paper describes the ongoing development of a software spelling normalization system named ZENSPELL. It is intended to assign normalized, present-day English spellings to 18th spelling variants with minimal user intervention while keeping the source text intact and available for comparison. The article examines the possibility of adapting 18th century English newspaper texts in order to make them comply with 20th century spelling rules. The idea is to create a hybrid text: like glossed word-for-word 'translations' of Latin texts, the target text will contain 18th century sentences, but with 20th century orthographic words. Despite somewhat doubtful linguistic qualities, the resulting 'artificial' text will be useful for two purposes: first, lexical searches can be made using one normalized search term instead of having to guess possible spelling variations of the intended term. Second, the target text can be used as input for wordclass taggers such as ENGCG.}, Author = {P. Schneider}, Date-Added = {2010-01-29 15:13:27 +0100}, Date-Modified = {2013-08-07 09:25:44 +0000}, Journal = {Language and Computers}, Pages = {199-211(13)}, Title = {Computer assisted spelling normalization of 18th century English}, Volume = {36}, Year = {20 November 2001}} @book{Scholkopf+02, Address = {Cambridge, MA, USA}, Author = {Sch\"olkopf, B. and Smola, A.J.}, Date-Added = {2009-11-15 14:14:54 +0100}, Date-Modified = {2009-11-15 14:15:10 +0100}, Publisher = {MIT Press}, Title = {Learning with kernels}, Year = {2002}} @inproceedings{Scholkopf+95, Address = {Menlo Park}, Author = {B. Sch\"olkopf and C. Burges and V. Vapnik}, Booktitle = {Proceedings of the First International Conference on Knowledge Discovery and Data Mining}, Editor = {U. M. Fayyad and R. Uthurusamy}, Publisher = {AAAI Press}, Title = {Extracting support data for a given task}, Year = 1995} @phdthesis{Scholtes93, Author = {J. Scholtes}, School = {University of Amsterdam}, Title = {Neural networks in {NLP} and {IR}}, Year = 1993} @misc{schone+01, Author = {P. Schone and D. Jurafsky}, Text = {P. Schone and D. Jurafsky. 2001. Knowledge-free induction of inflectional morphologies. In Proc. NAACL-01.}, Title = {Knowledge-free induction of inflectional morphologies}, Year = {2001}} @book{Schrooten+94, Author = {Schrooten, W. and Vermeer, A.}, Publisher = {TUP(Studies in meertaligheid 6)}, Title = {Woorden in het basisonderwijs. 15.000 woorden aangeboden aan leerlingen}, Year = 1994} @inproceedings{Schuetze+93, Address = {Oxford, England}, Author = {H. Sch\"{u}tze and J. Pedersen}, Booktitle = {Proceedings of the 9th Annual Conference of the University of Waterloo Centre for the New OED and Text Research}, Comment = {hinjan.synpar.ps.Z}, Date-Modified = {2009-11-15 14:17:43 +0100}, Title = {A Vector Model for syntagmatic and paradigmatic relatedness}, Year = 1993} @inproceedings{Schuetze+94, Address = {Rockefeller University}, Author = {H. Sch\"{u}tze and J. O. Pedersen}, Booktitle = {Proceedings of RIAO 94}, Date-Modified = {2009-11-15 14:18:09 +0100}, Title = {A Co-occurrence-Based Thesaurus and Two Applications to Information Retrieval}, Year = 1994} @inproceedings{Schuetze+94c, Author = {H. Sch\"{u}tze and Y. Singer}, Booktitle = {Proceedings of the 32nd Annual Meeting of the Association for Computational Linguistics}, Comment = {vartagging.ps.Z}, Date-Modified = {2009-11-15 14:18:59 +0100}, Keywords = {part-of-speech tagging}, Title = {Part-of-speech tagging using a variable memory markov model}, Year = 1994} @inproceedings{Schuetze+95, Author = {H. Sch\"{u}tze and D. A. Hull and J. O. Pedersen}, Booktitle = {Proceedings of SIGIR 95}, Date-Modified = {2009-11-15 14:17:34 +0100}, Title = {A Comparison of Classifiers and Document Representations for the Routing Problem}, Year = 1995} @inproceedings{Schuetze+95b, Address = {Las Vegas NV}, Author = {H. Sch\"{u}tze and J. O. Pedersen}, Booktitle = {Fourth Annual Symposium on Document Analysis and Information Retrieval}, Date-Modified = {2009-11-15 14:18:41 +0100}, Keywords = {wsd, word sense disambiguation, information retrieval, ir}, Title = {Information Retrieval Based on Word Senses}, Year = 1995} @inproceedings{Schuetze+95c, Address = {Washington DC}, Author = {H. Sch\"{u}tze and J. O. Pedersen and M. A. Hearst}, Booktitle = {The Third Text REtrieval Conference (TREC-3)}, Date-Modified = {2009-11-15 14:18:35 +0100}, Editor = {D.K. Harman}, Publisher = {U.S. Department of Commerce}, Title = {Xerox {TREC} 3 Report: {C}ombining Exact and Fuzzy Predictors}, Year = 1995} @incollection{Schuetze92, Address = {Amsterdam, The Netherlands}, Author = {H. Sch\"{u}tze}, Booktitle = {The Reality of Linguistic Rules}, Date-Modified = {2008-07-23 17:16:01 +0200}, Editor = {Roberta Corrigan and Gregory K. Iverson and Susan D. Lima}, Publisher = {John Benjamins}, Title = {Towards connectionist lexical semantics}, Year = 1992} @inproceedings{Schuetze92b, Author = {H. Sch\"{u}tze}, Booktitle = {Proceedings of Supercomputing}, Date-Modified = {2011-02-02 08:04:36 +0100}, Pages = {787-796}, Title = {Dimensions of meaning}, Year = 1992} @inproceedings{Schuetze92c, Author = {H. Sch\"{u}tze}, Booktitle = {Working Notes, AAAI Fall Symposium on Probabilistic Approaches to Natural Language}, Date-Modified = {2011-02-02 08:04:23 +0100}, Title = {Context space}, Year = 1992} @inproceedings{Schuetze93, Author = {H. Sch\"{u}tze}, Date-Modified = {2011-02-02 08:04:10 +0100}, Organization = {IEEE}, Pages = {1504--1509}, Title = {Distributed Syntantic Representations with an Application to Part-of-Speech Tagging}, Year = 1993} @inproceedings{Schuetze93b, Address = {Stanford CA}, Author = {H. Sch\"{u}tze}, Booktitle = {Working Notes of the AAAI Spring Symposium on Building Lexicons for Machine Translation}, Date-Modified = {2009-11-15 14:16:51 +0100}, Editor = {Bonnie Dorr}, Organization = {AAAI Press}, Title = {Translation by Confusion}, Year = 1993} @incollection{Schuetze93c, Address = {San Mateo CA}, Author = {H. Sch\"{u}tze}, Booktitle = {Advances in Neural Information Processing Systems 5}, Date-Modified = {2009-11-15 14:16:14 +0100}, Editor = {S. Jos\'{e} H. and J. D. Cowan and C. L. Giles}, Pages = {895--902}, Publisher = {Morgan Kaufmann Publishers}, Title = {Word Space}, Year = 1993} @inproceedings{Schuetze93d, Author = {H. Sch\"{u}tze}, Booktitle = ACL, Date-Modified = {2011-02-02 08:03:55 +0100}, Pages = {251--258}, Title = {Part-of-speech induction from scratch}, Year = 1993} @inproceedings{Schuetze93e, Author = {H. Sch\"{u}tze}, Booktitle = {Proceedings of the IEEE International Conference on Neural Networks}, Comment = {schuetze.tagging.ps.Z}, Date-Modified = {2009-11-15 14:17:05 +0100}, Title = {Distributed Syntactic Representations with an Application to Part-of-Speech Tagging}, Year = 1993} @inproceedings{Schuetze94, Address = {Georgia Tech}, Author = {H. Sch\"{u}tze}, Booktitle = {Proceedings of the Sixteenth Annual Conference of the Cognitive Science Society}, Comment = {verbsubcat.ps.Z.}, Date-Modified = {2009-11-15 14:17:15 +0100}, Title = {A Connectionist Model of Verb Subcategorization}, Year = 1994} @inproceedings{Schuetze94b, Address = {{Dublin, Ireland}}, Author = {Sch\"utze, H.}, Booktitle = {Proceedings of 7th Conference of the European Chapter of the Association for Computational Linguistics}, Date-Modified = {2009-11-15 14:17:19 +0100}, Title = {Distributional Part-of-Speech Tagging}, Year = 1994} @phdthesis{Schuetze95, Author = {Hinrich Sch\"{u}tze}, Month = {July}, School = {Stanford University}, Title = {Ambiguity in Language Learning Computational and Cognitive Models}, Year = 1995} @book{Schuetze97, Address = {Stanford}, Author = {Hinrich Sch\"utze}, Publisher = {CSLI Publications}, Title = {Ambiguity Resolution in Language Learning: Computational and Cognitive Models}, Year = 1997} @inproceedings{Scott+00, Author = {S. Scott and R. Gaizauskas}, Booktitle = {Proceedings of TREC-9}, Organization = {NIST}, Title = {University of Sheffield TREC-9 Q\&A System}, Year = 2001} @incollection{Sebag+94, Author = {M. Sebag and M. Schoenauer}, Booktitle = {Topics in case-based reasoning}, Date-Modified = {2009-11-14 18:57:27 +0100}, Editor = {S. Wess and K.-D. Althoff and M. M. Richter}, Keywords = {rule learning, instance-based learning}, Pages = {119--130}, Publisher = {Springer Verlag}, Title = {A rule-based similarity measure}, Year = 1994} @incollection{Segui+91, Address = {Cambridge, MA}, Author = {J. Segui and E. Dupoux and J. Mehler}, Booktitle = {Cognitive Models of Speech Processing}, Editor = {G. T. M. Altmann}, Pages = {263--280}, Publisher = {Bradford Books}, Title = {The Role of the Syllable in Speech Segmentation, Phoneme Identification, and Lexical Access}, Year = 1991} @incollection{Segui+92, Address = {Amsterdam, The Netherlands}, Author = {J. Segui and J. Grainger}, Booktitle = {Analytic Approaches to Human Cognition}, Date-Modified = {2011-06-21 18:27:08 +0200}, Editor = {D. Holender and J. Jun\c{c}a de Morais and M. Radeau}, Publisher = {Elsevier Science Publishers}, Title = {Neighborhood frequency and stimulus frequency effects: Two different but related phenomena?}, Year = 1992} @article{Seidenberg+84, Author = {M. S. Seidenberg and G. S. Waters and M. A. Barnes and M. K. Tanenhaus}, Date-Modified = {2009-11-14 18:52:11 +0100}, Journal = {Journal of Verbal Learning and Verbal Behaviour}, Keywords = {spelling, pronunciation, writing systems}, Pages = {283--404}, Title = {When Does Irregular Spelling or Pronunciation Influence Word Recognition?}, Volume = 23, Year = 1984} @article{Seidenberg+89, Author = {M. S. Seidenberg and J. L. McClelland}, Journal = {Psychological Review}, Pages = {523-568}, Title = {A Distributed Developmental Model of Word Recognition and Naming}, Volume = 96, Year = 1989} @article{Seidenberg85, Author = {M. S. Seidenberg}, Journal = {Cognition}, Pages = {1--30}, Title = {The Time Course of Phonological Code Activation in Two Writing Systems}, Volume = 19, Year = 1985} @techreport{Sejnowski+86, Author = {T. J. Sejnowski and C. Rosenberg}, Institution = {Johns Hopkins University}, Number = {JHU EECS 86-01}, Title = {{NET}talk:{A} Parallel Network that Learns to Read Aloud}, Year = 1986} @article{Sejnowski+87, Author = {Sejnowski, T.J. and Rosenberg, C.S.}, Journal = {Complex Systems}, Pages = {145--168}, Title = {Parallel networks that learn to pronounce {E}nglish text}, Volume = 1, Year = 1987} @misc{Sekimizu+98, Author = {T. Sekimizu and H. Park and J. Tsujii}, Booktitle = {Genome Informatics. Universal Academy Press, Inc.}, Title = {Identifying the interaction between genes and gene products based on frequently seen verbs in medline abstracts}, Year = 1998} @phdthesis{Sekine98, Author = {Satoshi Sekine}, School = {New York University}, Title = {Corpus-Based Parsing and Sublanguage Studies}, Year = 1998} @incollection{Selkirk84, Address = {Cambridge, MA}, Author = {E. O. Selkirk}, Booktitle = {Language Sound Structure}, Editor = {M. Aronoff and R. T. Oehrle}, Pages = {107--136}, Publisher = MIT, Title = {On the major class features and syllable theory}, Year = 1984} @book{Senseval1, Date-Modified = {2009-09-06 20:42:44 +0200}, Editor = {A. Kilgarriff and M. Palmer}, Keywords = {wsd, word sense disambiguation}, Number = {1-2}, Title = {{Computers and the Humanities} special issue based on Senseval-1}, Volume = 34, Year = 1999} @book{Senseval2, Date-Modified = {2009-09-06 20:42:50 +0200}, Editor = {P. Edmonds and A. Kilgarriff}, Keywords = {wsd, word sense disambiguation}, Number = 1, Publisher = CUP, Title = {{Journal of Natural Language Engineering} special issue based on Senseval-2}, Volume = 9, Year = 2003} @article{Servan-Schreiber+91, Author = {D. Servan-Schreiber and A. Cleeremans and J. L. McClelland}, Journal = {Machine Learning}, Pages = {161--193}, Title = {Graded State Machines: The Representation of Temporal Contingencies in Simple Recurrent Networks}, Volume = 7, Year = 1991} @inproceedings{Sha+03, Author = {Sha, F. and Pereira, F.}, Booktitle = {Proceedings of the 2003 Conference of the North American Chapter of the Association for Computational Linguistics on Human Language Technology}, Date-Added = {2009-11-15 14:19:06 +0100}, Date-Modified = {2009-11-15 14:19:18 +0100}, Pages = {134--141}, Title = {Shallow parsing with conditional random fields}, Year = {2003}} @article{Shannon48, Author = {C. E. Shannon}, Date-Added = {2009-11-15 14:19:26 +0100}, Date-Modified = {2009-11-15 14:19:26 +0100}, Journal = {Bell Systems Technical Journal}, Pages = {623-656}, Title = {A Mathematical Theory of Communication}, Volume = {27}, Year = {1948}} @inproceedings{Sharkey+92, Address = {Enschede}, Author = {N. E. Sharkey and A. J. C. Sharkey}, Booktitle = {TWLT3: Connectionism and Natural Language Processing}, Editor = {M. F. J. Drossaers and A. Nijholt}, Pages = {87--102}, Publisher = {Twente University}, Title = {A Modular Design for Connectionist Parsing}, Year = 1992} @article{Sharkey+93, Author = {N. E. Sharkey and A. J. C. Sharkey}, Journal = {Artificial Intelligence Review}, Pages = {313--328}, Title = {Adaptive Generalisation}, Volume = 7, Year = 1993} @book{Sharkey92, Address = {{New York}}, Author = {N. Sharkey}, Date-Modified = {2008-07-23 17:17:31 +0200}, Keywords = {connectionism}, Publisher = {{Weather Hill}}, Title = {{Connectionist Natural Language Processing}}, Year = 1992} @article{Sharkey97, Author = {A. J. C. Sharkey}, Journal = {Connection Science, Special Issue on Combining Artificial Neural Nets: Modular Approaches}, Number = 1, Pages = {3--10}, Title = {Modularity, combining and artificial neural nets}, Volume = 9, Year = 1997} @article{Shavlik+91, Author = {J. W. Shavlik and R. J. Mooney and G. G. Towell}, Journal = {Machine Learning}, Pages = {111--143}, Title = {An experimental comparison of symbolic and connectionist learning algorithms}, Volume = 6, Year = 1991} @book{Shavlik90, Address = {San Mateo, CA}, Editor = {J. W. Shavlik and T. G. Dietterich}, Publisher = {Morgan Kaufmann}, Title = {Readings in {M}achine {L}earning}, Year = 1990} @inproceedings{Shemtov93, Author = {Hadar Shemtov}, Booktitle = {EACL}, Title = {Text alignment in a tool for translating revised documents}, Year = 1993} @article{Shepard87, Author = {Shepard, R.N.}, Journal = {{Science}}, Pages = {1317--1323}, Title = {{Toward a universal law of generalization for psychological science}}, Volume = 237, Year = 1987} @incollection{Shillcock90, Address = {Cambridge, MA.}, Author = {R.C. Shillcock}, Booktitle = {Cognitive models of speech processing: psycholinguistic and computational perspectives}, Editor = {G.T. Altmann}, Publisher = MIT, Title = {Lexical hypotheses in continuous speech}, Year = 1990} @inproceedings{Shimohata+97, Author = {Sayori Shimohata and Toshiyuki Sugio and Junji Nagata}, Booktitle = EACL/ACL97, Pages = {476--481}, Title = {Retrieving Collocations by Co-occurrences and Word Order Constraints}, Year = 1997} @incollection{Shultz+94, Address = {Cambridge, MA}, Author = {T. R. Shultz and D. Buckingham and Y. Oshima-Takane}, Booktitle = {Computational learning theory and natural learning systems, Vol. 2: Intersection between theory and experiment}, Editor = {S. J. Hanson and T. Petsche and M. Kearns and R. L. Rivest}, Pages = {347--362}, Publisher = MIT, Title = {A connectionist model of the learning of personal pronouns in {E}nglish}, Year = 1994} @inproceedings{Shutze92min, Author = {H. Sch\"{u}tze}, Booktitle = {Proceedings of Supercomputing '92}, Comment = {schuetze.meaning.ps.Z}, Title = {Dimensions of Meaning}, Year = 1992} @inproceedings{Silfverberg+00, Address = {New York, NY, USA}, Author = {Miika Silfverberg and I. Scott MacKenzie and Panu Korhonen}, Booktitle = {{CHI} '00: {P}roceedings of the {SIGCHI} {C}onference on {H}uman {F}actors in {C}omputing {S}ystems}, Location = {The Hague, The Netherlands}, Pages = {9--16}, Publisher = {ACM Press}, Title = {{P}redicting {T}ext {E}ntry {S}peed on {M}obile {P}hones}, Year = 2000} @inproceedings{Simard92, Author = {M. Simard and G. Foster and P. Isabelle}, Booktitle = TMI, Title = {Using cognates to align sentences in bilingual corpora}, Year = 1992} @techreport{Simard95, Address = {Laval, Canada}, Author = {Simard, M. and G. Foster and F. Perrault}, Institution = {CITI}, Title = {{TransSearch}: a Bilingual Concordance Tool}, Year = 1995} @article{Simon83, Author = {H. A. Simon}, Journal = {Artificial Intelligence}, Pages = {7--29}, Title = {Search and reasoning in problem-solving}, Volume = 21, Year = 1983} @incollection{Simpson88, Author = {Simpson, Greg B. and Curt Burgess}, Booktitle = {Lexical Ambiguity Resolution}, Editor = {Small, S. L., G. W. Cotrell and M. K. Tanenhaus}, Pages = {271--288}, Publisher = {Morgan Kaufman Publishers}, Title = {Implications of lexical ambiguity resolution for word recognition}, Year = 1988} @inproceedings{Singhal00, Author = {A. Singhal and S. Abney and M. Bacchiani and M. Collins and D. Hindle and F. Pereira}, Booktitle = {Proceedings of the 8th Text Retrieval Conference, NIST}, Place = {in Voorhees}, Title = {{AT\&T} at {TREC-8}}, Year = 2000} @book{Skinner57, Address = {Englewood Cliffs, NJ}, Author = {B. F. Skinner}, Publisher = {Prentice-Hall}, Title = {Verbal behaviour}, Year = 1957} @book{Skousen+02, Address = {Amsterdam, The Netherlands}, Date-Modified = {2008-07-23 15:57:45 +0200}, Editor = {R. Skousen and D. Lonsdale and D. B. Parkinson}, Keywords = {analogical modeling}, Publisher = {John Benjamins}, Title = {Analogical modeling: An exemplar-based approach to language}, Year = 2002} @incollection{Skousen02, Address = {Amsterdam, The Netherlands}, Author = {R. Skousen}, Booktitle = {Analogical modeling: An exemplar-based approach to language}, Editor = {R. Skousen and D. Lonsdale and D. B. Parkinson}, Pages = {11--26}, Publisher = {John Benjamins}, Title = {An overview of analogical modeling}, Year = 2002} @book{Skousen89, Address = {Dordrecht}, Author = {R. Skousen}, Publisher = {Kluwer Academic Publishers}, Title = {Analogical modeling of language}, Year = 1989} @book{Skousen92, Address = {Dordrecht}, Author = {R. Skousen}, Date-Modified = {2008-07-23 17:17:21 +0200}, Keywords = {analogical modeling}, Publisher = {Kluwer Academic Publishers}, Title = {Analogy and Structure}, Year = 1992} @incollection{Skousen99, Author = {Royal Skousen}, Booktitle = {Handbook of Quantitative Linguistics}, Publisher = {xxx}, Title = {Analogical Modeling}, Year = 2000} @inproceedings{Skut+98, Author = {W. Skut and T. Brants}, Booktitle = {ESSLLI-1998 Workshop on Automated Acquisition of Syntax and Parsing}, Title = {Chunk tagger: statistical recognition of noun phrases}, Year = 1998} @techreport{Sleator+91, Address = {{Pittsburgh}}, Author = {D. Sleator and D. Temperley}, Institution = {CMU}, Title = {Parsing English with a Link Grammar}, Year = 1991} @inproceedings{Smadja+90, Author = {Frank A. Smadja and Kathleen R. McKeown}, Booktitle = ACL90, Pages = {252--259}, Title = {Automatically Extracting and Representing Collocations for Language Generation}, Year = 1990} @inproceedings{Smadja90, Author = {Frank Smadja and Katheleen McKeown}, Booktitle = ACL, Title = {Automatically extracting and representing collocations for language generation}, Year = 1990} @inproceedings{Smadja91, Author = {Frank A. Smadja}, Booktitle = ACL91, Pages = {279--284}, Title = {From N-Grams to Collocations: An Evaluation of {Xtract}}, Year = 1991} @inproceedings{Smadja92, Author = {Frank Smadja}, Booktitle = {AAAI Workshop on Statistically-based Natural Language Processing Techniques}, Month = {July}, Place = {San Jose, California}, Title = {How to compile a bilingual collocational lexicon automatically}, Year = 1992} @article{Smadja93, Author = {Frank Smadja}, Journal = CL, Number = 1, Pages = {143--177}, Title = {Retrieving collocations from text: {X}tract}, Volume = 19, Year = 1993} @article{Smadja96, Author = {Frank Smadja and Katheleen R. McKeown and Vasileios Hatzivassiloglou}, Journal = CL, Number = 1, Pages = {1--38}, Title = {Translating collocations for bilingual lexicons: a statistical approach}, Volume = 22, Year = 1996} @article{Smid+96, Author = {H. Smid and G. Mulder and K. B\"ocker and D. Touw and C. Brunia}, Journal = {{Journal of Ecperimental Psychology: Human Perception and Performance}}, Number = 1, Pages = {3--24}, Title = {{A Psychophysiological Investigation of the Selection and the use of Partial Stimulus Information in Response Choice}}, Volume = 22, Year = 1996} @book{Smith+81, Address = {Cambridge, MA}, Author = {E. Smith and D. Medin}, Publisher = {Harvard University Press}, Title = {Categories and Concepts}, Year = 1981} @incollection{Smith+97, Address = {Cambridge}, Author = {L. Smith and L. Samuelson}, Booktitle = {Knowledge, concepts, and categories}, Editor = {K. Lamberts and D. Shanks}, Pages = {161--195}, Publisher = CUP, Title = {Perceiving and remembering: Category stability, variability, and development}, Year = 1997} @article{Soderland99, Author = {S. Soderland}, Date-Modified = {2011-06-21 18:12:13 +0200}, Journal = ML, Number = {1-3}, Pages = {233--272}, Title = {Learning Information Extraction Rules for Semi-Structured and Free Text}, Volume = 34, Year = 1999} @article{Somers99, Author = {Somers, H.}, Date-Added = {2009-11-15 14:19:38 +0100}, Date-Modified = {2009-11-15 14:19:55 +0100}, Journal = {Machine Translation}, Number = {2}, Pages = {113--157}, Title = {Example-based Machine Translation}, Volume = {14}, Year = {1999}} @article{Soon+01, Author = {Wee Meng Soon and Hwee Tou Ng and Daniel Chung Yong Lim}, Journal = CL, Number = 4, Pages = {521--544}, Title = {A Machine Learning Approach to Coreference Resolution of Noun Phrases}, Volume = 27, Year = 2001} @book{Soudi+07, Date-Added = {2010-01-03 01:15:42 +0100}, Date-Modified = {2010-09-14 13:03:06 +0200}, Editor = {A. Soudi and A. {Van den Bosch} and G. Neumann}, Keywords = {ilk, Arabic, morphological analysis, vici}, Publisher = {Springer Verlag}, Title = {Arabic computational morphology: Knowledge-based and empirical methods}, Year = {2007}, Bdsk-Url-1 = {http://www.springer.com/east/home/linguistics/computational+linguistics?SGWID=5-40374-22-173729328-0}} @phdthesis{Soudi02, Author = {A. Soudi}, Date-Modified = {2009-11-14 18:55:25 +0100}, Keywords = {Arabic, morphological analysis}, School = {Mohamed V University (Morocco) and Carnegie Mellon University (USA)}, Title = {A Computational Lexeme-based Treatment of Arabic Morphology}, Year = 2002} @book{Sowa84, Author = {J.F. Sowa}, Publisher = {Addison-Wesley}, Title = {Conceptual structures in mind and machines}, Year = 1984} @inbook{Sowa92, Author = {J.F. Sowa}, Chapter = {1 {\em Conceptual Graphs Summary}}, Editor = {T.E. Nagle and J.A. Nagle and L.L. Gerholz and P.W. Eklund}, Pages = {3-51}, Publisher = {Ellis Horwood}, Title = {Conceptual Structures: Current Research and Practice}, Year = 1992} @inproceedings{Spitters00, Address = {Paris, France}, Author = {M. Spitters}, Booktitle = {Proceedings of the Sixth Conference on Content-Based Multimedia Access (RIAO 2002)}, Pages = {1124--1135}, Title = {Comparing feature sets for learning text categorization}, Year = 2000} @incollection{Spivey-Knowlton94, Address = {Hillsdale, NJ}, Author = {M. Spivey-Knowlton}, Booktitle = {Proceedings of 1993 Connectionist Models Summer School}, Editor = {M.C.Mozer and P. Smolensky and D.S.Touretzky and J.L. Elman and A.S. Weigend}, Pages = {130--137}, Publisher = {Lawrence Erlbaum Associates}, Title = {Quantitative Predictions From a Constraint-Based Theory of Syntactic Ambiguity Resolution}, Year = 1994} @inproceedings{Sporleder+05, Address = {Vancouver, Canada}, Author = {C. Sporleder and M. Lapata}, Booktitle = {Proceedings of the 2005 Human Language Technology Conference and the Conference on Empirical Methods in Natural Language Processing, HLT/EMNLP-05}, Title = {Discourse chunking and its application to sentence compression}, Year = 2005} @inproceedings{Sporleder+05b, Address = {Borovets, Bulgaria}, Author = {C. Sporleder and A. Lascarides}, Booktitle = {Proceedings of Recent Advances in Natural Language Processing, RANLP-2005}, Pages = {532--539}, Title = {Exploiting linguistic cues to classify rhetorical relations}, Year = 2005} @inproceedings{Sporleder+06, Address = {Trento, Italy}, Author = {C. Sporleder and M. {Van Erp} and T. Porcelijn and A. {Van den Bosch}}, Booktitle = {Proceedings of the EACL 2006 Workshop on Adaptive Text Extraction and Mining, ATEM-2006}, Date-Modified = {2010-09-14 11:59:06 +0200}, Keywords = {ilk, data cleaning, natural history, mitch}, Title = {Spotting the `odd-one-out': Data-driven error detection and correction in textual databases}, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/~caroline/papers/atem06.pdf}} @inproceedings{Sporleder+06b, Address = {Trento, Italy}, Author = {C. Sporleder and M. {Van Erp} and T. Porcelijn and A. {Van den Bosch}}, Booktitle = {Proceedings of the Fifth International Conference on Language Resources and Evaluation, LREC-2006}, Date-Modified = {2010-09-14 11:58:51 +0200}, Keywords = {ilk, named-entity recognition, natural history, mitch}, Title = {Identifying named entities in text databases from the natural history domain}, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/mitch/papers/lrec06.pdf}} @inproceedings{Sporleder+06c, Address = {Ghent, Belgium}, Author = {C. Sporleder and M. {Van Erp} and T. Porcelijn and A. {Van den Bosch}}, Booktitle = {Proceedings of the Annual Machine Learning Conference of Belgium and The Netherlands (Benelearn-06)}, Date-Modified = {2010-09-14 11:59:01 +0200}, Keywords = {ilk, mitch, data imputation, timpute, memory-based learning}, Pages = {49--56}, Title = {Correcting `wrong-column' errors in text databases}, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/main.pdf}} @inproceedings{Sproat+92, Address = {Banff}, Author = {Sproat, R. and Hirschberg, J. and Yarowsky, D.}, Booktitle = {Proceedings of the International Conference on Spoken Language Processing}, Date-Modified = {2009-12-26 21:12:45 +0100}, Keywords = {speech synthesis}, Month = {October}, Organization = {ICSLP}, Pages = {563-566}, Title = {A Corpus-Based Synthesizer}, Year = 1992} @incollection{Sproat00, Address = {New York and Basel}, Author = {Richard Sproat}, Booktitle = {Handbook of Natural Language Processing}, Editor = {Robert Dale and Hermann Moisl and Harold Somers}, Pages = {37--57}, Publisher = {Marcel Dekker}, Title = {Lexical Analysis}, Year = 2000} @book{Sproat92, Address = {Cambridge, MA}, Author = {R. Sproat}, Date-Modified = {2009-11-25 23:52:29 +0100}, Keywords = {morphology, morphological analysis}, Publisher = MIT, Series = {ACL-MIT Press Series in Natural Language Processing}, Title = {Morphology and computation}, Year = 1992} @inproceedings{Sproat94, Author = {R. Sproat and J. Olive}, Booktitle = {Proceedings of the Second {{\sc esca/ieee}} Workshop on Speech Synthesis, New York}, Date-Modified = {2009-11-25 23:52:50 +0100}, Keywords = {text-to-speech, grapheme-phoneme conversion, speech synthesis}, Organization = {{{\sc esca/ieee}}}, Pages = {187--190}, Title = {A modular architecture for multi-lingual text-to-speech}, Year = 1994} @incollection{Squire+92, Author = {L.R. Squire and J. G. Ojemann and F. M. Miezin and S. E. Petersen and T. O. Videen and M. E. Raichle}, Booktitle = {Proceedings of National Academy of Sciences USA, Neurobiology}, Date-Modified = {2010-10-06 20:59:17 +0200}, Month = {March}, Pages = {1837--1841}, Title = {Activation of the hippocampus in normal humans: A functional anatomical study of memory}, Volume = 89, Year = 1992} @inproceedings{Srihari+99, Author = {R. Srihari and W. Li}, Booktitle = {Proceedings of Text REtrieval Conference (TREC-8)}, Date-Modified = {2009-12-26 21:13:15 +0100}, Title = {Information Extraction Supported Question Answering}, Year = 1999} @article{Stanfill+86, Author = {Stanfill, C. and Waltz, D.}, Journal = {Communications of the {ACM}}, Month = dec, Number = 12, Pages = {1213--1228}, Title = {Toward Memory-Based Reasoning}, Volume = 29, Year = 1986} @inproceedings{Stanfill87, Address = {Los Altos, CA}, Author = {C. Stanfill}, Booktitle = {Proceedings of the Sixth National Conference on Artificial Intelligence}, Pages = {577-581}, Publisher = {Morgan Kaufmann}, Title = {Memory-based reasoning applied to {E}nglish pronunciation}, Year = 1987} @inproceedings{Starlander+02, Address = {Las Palmas de Gran Canaria, Spain}, Author = {M. Starlander and A. Popescu-Belis}, Booktitle = {LREC 2002: Third International Conference on language resources and evaluation}, Date-Added = {2010-01-29 15:13:39 +0100}, Date-Modified = {2010-02-12 23:58:29 +0100}, Editors = {Manuel Gonz\'{a}lez Rodr\'{i}guez and Carmen Paz Suarez Araujo}, Pages = {268--274}, Publisher = {Paris : ELRA, European Language Resources}, Title = {Corpus-based evaluation of a {F}rench spelling and grammar checker}, Volume = {1}, Year = 2002} @article{Steedman94, Author = {M. Steedman}, Date-Modified = {2010-10-06 21:00:13 +0200}, Journal = {Lingua}, Pages = {471--480}, Title = {Acquisition of verb categories}, Volume = 92, Year = 1994} @mastersthesis{Steetskamp95, Address = {Nijmegen, The Netherlands}, Author = {R. Steetskamp}, Pages = 48, School = {TOSCA Research Group, University of Nijmegen}, Title = {An Implementation of a Probabilistic Tagger}, Year = 1995} @inproceedings{Stehouwer+09, Address = {Nijmegen, The Netherlands}, Author = {H. Stehouwer and A. {Van den Bosch}}, Booktitle = {Computational Linguistics in the Netherlands 2007: Selected Papers from the 18th CLIN Meeting}, Date-Added = {2010-01-02 19:11:51 +0100}, Date-Modified = {2010-09-14 13:02:26 +0200}, Editor = {S. Verberne and H. van Halteren and P.-A. Coppen}, Keywords = {ilk, confusible disambiguation, spelling correction, vici}, Pages = {21--36}, Title = {Putting the t where it belongs: Solving a confusion problem in {D}utch}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/CLIN07-putting-the-t.pdf}} @inproceedings{Stehouwer+09b, Address = {Athens, Greece}, Author = {H. Stehouwer and M. {Van Zaanen}}, Booktitle = {Proceedings of the EACL 2009 Workshop on Computational Linguistic Aspects of Grammatical Inference}, Date-Added = {2010-01-02 19:29:59 +0100}, Date-Modified = {2010-09-14 13:02:32 +0200}, Keywords = {language modeling, ilk, vici}, Pages = {41--48}, Title = {Language models for contextual error detection and correction}, Year = {2009}, Bdsk-Url-1 = {http://aclweb.org/anthology-new/W/W09/W09-1007.pdf}} @inproceedings{Stehouwer+09c, Address = {Eindhoven, The Netherlands}, Author = {H. Stehouwer and M. {Van Zaanen}}, Booktitle = {Proceedings of the 21st Benelux Conference on Artificial Intelligence (BNAIC-2009)}, Date-Added = {2010-01-02 19:50:45 +0100}, Date-Modified = {2010-09-14 13:02:42 +0200}, Keywords = {ilk, language modeling, token merging, vici}, Pages = {241--248}, Title = {Token merging in language model-based confusible disambiguation}, Year = {2009}, Bdsk-Url-1 = {http://wwwis.win.tue.nl/bnaic2009/papers/bnaic2009_paper_76.pdf}} @inproceedings{Steinberger+06, Author = {Steinberger, R. and Pouliquen, B. and Widiger, A. and Ignat, C. and Erjavec, T. and Tufis, D. and Varga, D.}, Booktitle = {Proceedings of the 5th International Conference on Language Resources and Evaluation (LREC'2006)}, Date-Added = {2009-11-15 14:20:00 +0100}, Date-Modified = {2009-11-15 14:20:13 +0100}, Pages = {2142--2147}, Title = {The JRC-Acquis: A multilingual aligned parallel corpus with 20+ languages}, Year = {2006}} @inproceedings{Stetina+97, Address = {Beijing, China}, Author = {J. Stetina and M. Nagao}, Booktitle = {Proceedings of the Fifth Workshop on Very Large Corpora}, Pages = {66--80}, Title = {Corpus-based {PP} attachment ambiguity resolution with a semantic dictionary}, Year = 1997} @inproceedings{Stevens+07, Address = {Leuven, Belgium}, Author = {G. Stevens and P. Monachesi and A. {Van den Bosch}}, Booktitle = {Computational Linguistics in the Netherlands: Selected Papers from the Seventeenth CLIN Meeting}, Date-Added = {2010-01-03 01:24:02 +0100}, Date-Modified = {2010-09-14 13:06:00 +0200}, Editor = {P. Dirix and I. Schuurman and V. Vandeghinste and and F. {Van Eynde}}, Keywords = {ilk, semantic role labeling, Dutch, vici}, Pages = {99--114}, Title = {A pilot study for semantic role labeling in a Dutch corpus}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/stevensetal-clin17.pdf}} @inproceedings{Stevenson+00, Author = {M. Stevenson and R. Gaizauskas}, Booktitle = {Proceedings of the Sixth Conference on Applied Natural Language Processing and the First Conference of the North American Chapter of the Association for Computational Linguistics}, Date-Modified = {2010-11-02 21:11:28 +0100}, Pages = {24--30}, Title = {Experiments on sentence boundary detection}, Year = 2000} @article{Stevenson+01, Author = {Stevenson, M. and Wilks, Y.}, Date-Modified = {2010-11-02 21:11:45 +0100}, Journal = {Computational Linguistics}, Keywords = {wsd, word sense disambiguation}, Number = 3, Pages = {321--349}, Title = {The interaction of knowledge sources in word sense disambiguation}, Volume = 27, Year = 2001} @inproceedings{Stevenson+99, Author = {M. Stevenson and Y. Wilks}, Booktitle = {Proceedings of the International Joint Conference on Artificial Intelligence}, Date-Modified = {2010-11-02 21:11:58 +0100}, Keywords = {wsd, word sense disambiguation}, Title = {Combining weak knowledge sources for sense disambiguation}, Year = 1999} @incollection{Stickel89, Author = {Stickel, M. E.}, Booktitle = {Natural Language and Logic}, Editor = {Studer,. R}, Publisher = {Lecture notes in AI, 459 Springer-Verlag}, Title = {Rationale and methods for abductive reasoning in natural-language interpretation}, Year = 1989} @inproceedings{Stocky+04, Address = {New York, NY, USA}, Author = {T. Stocky and A. Faaborg and H. Lieberman}, Booktitle = {{CHI} '04: {CHI} '04 {E}xtended {A}bstracts on {H}uman {F}actors in {C}omputing {S}ystems}, Date-Modified = {2010-06-25 22:12:14 +0200}, Keywords = {predictive text entry, word completion, text completion}, Location = {Vienna, Austria}, Pages = {1163--1166}, Publisher = {ACM}, Title = {A commonsense approach to predictive text entry}, Year = 2004} @inproceedings{Stolcke+93, Author = {A. Stolcke and S. Omohundro}, Bookeditor = {C. L. Giles and S. J. Hanson and J. D. Cowan}, Booktitle = {Advances in Neural Information Processing Systems 5}, Date-Modified = {2010-09-20 00:23:46 +0200}, Publisher = {Morgan Kaufman}, Title = {{H}idden {M}arkov {M}odel Induction by {B}ayesian Model Merging}, Year = 1993} @inproceedings{Stolcke02, Address = {Denver, Colorado}, Author = {A. Stolcke}, Booktitle = {Proceedings of the International Conference on Spoken Language Processing}, Pages = {901--904}, Title = {{SRILM} -- {A}n extensible language modeling toolkit}, Year = 2002} @article{Stolcke95, Author = {Andreas Stolcke}, Journal = {Computational Linguistics}, Title = {An Efficient Probabilistic Context-Free Parsing Algorithm that Computes Prefix Probabilities}, Volume = 21, Year = 1995} @inproceedings{Stone+95, Address = {Cambridge, MA}, Author = {P. Stone and M. Veloso}, Booktitle = {Advances in Neural Information Processing Systems 8}, Date-Modified = {2010-10-06 20:58:43 +0200}, Editor = {D. S. Touretzky and M. C. Mozer and M. E. Hasselmo}, Pages = {896--902}, Publisher = {{MIT} Press}, Title = {Beating a Defender in Robotic Soccer: Memory-Based Learning of a Continuous Function}, Year = 1996} @inproceedings{Streiter01, Author = {O. Streiter}, Booktitle = {Proceedings of the Fifteenth Pacific Asia conference on Language, Information and Computation (PACLIC 2001)}, Title = {Recursive top-down fuzzy match: New perspectives on memory-based parsing}, Year = 2001} @inproceedings{Streiter01b, Author = {O. Streiter}, Booktitle = {Proceedings of the Nineteenth International Conference on Computer Processing of Oriental Languages (ICCPOS 2001)}, Title = {Memory-Based Parsing: Enhancing Recursive Top-down Fuzzy Match with Bottom-Up Chunking}, Year = 2001} @article{Strohmaier+03, Address = {Los Alamitos, CA, USA}, Author = {C. M. Strohmaier and C. Ringlstetter and K. Schulz and S. Mihov}, Date-Added = {2010-01-29 15:26:31 +0100}, Date-Modified = {2010-02-12 23:57:23 +0100}, Isbn = {0-7695-1960-1}, Journal = {Document Analysis and Recognition, International Conference on}, Pages = {1133}, Publisher = {IEEE Computer Society}, Title = {Lexical Postcorrection of OCR-Results: The Web as a Dynamic Secondary Dictionary?}, Volume = {2}, Year = {2003}} @inbook{Strohmaier+03b, Author = {C. Strohmaier and C. Ringlstetter and K. Schulz and S. Mihov}, Booktitle = {Proceedings of the Workshop on Document Image Analysis and Retrieval DIAR'03.}, Date-Added = {2010-01-29 15:26:45 +0100}, Date-Modified = {2010-02-12 23:57:58 +0100}, Title = {A visual and interactive tool for optimizing lexical postcorrection of OCR-results.}, Url = {http://coli.lili.uni-bielefeld.de/Texttechnologie/Forschergruppe/publications/strohmaieretal03a.pdf}, Year = 2003, Bdsk-Url-1 = {http://coli.lili.uni-bielefeld.de/Texttechnologie/Forschergruppe/publications/strohmaieretal03a.pdf}} @inproceedings{Stroppa+07, Address = {Sk{\"o}vde, Sweden}, Author = {N. Stroppa and A. {Van den Bosch} and A. Way}, Booktitle = {Proceedings of the 11th International Conference on Theoretical Issues in Machine Translation (TMI 2007)}, Date-Modified = {2010-09-14 13:05:51 +0200}, Editor = {A. Way and B. Gawronska}, Keywords = {ilk, statistical machine translation, IGTree, vici}, Pages = {231--240}, Title = {Exploiting source similarity for {SMT} using context-informed features}, Year = {2007}, Bdsk-Url-1 = {http://www.compapp.dcu.ie/~away/PUBS/2007/49_Paper.pdf}} @inproceedings{Stroppa06, Address = {Cambridge, MA}, Author = {N. Stroppa and D. Groves and A. Way and K. Sarasola}, Booktitle = {Proceedings of AMTA 2006}, Pages = {232-241}, Title = {Example-Based Machine Translation of the {B}asque Language}, Year = 2006} @inproceedings{Su94, Author = {Keh-Yih Su and Ming-Wen Wu and Jing-Shin Chang}, Booktitle = ACL, Date-Modified = {2009-11-14 18:54:35 +0100}, Keywords = {compounding}, Pages = {242-247}, Title = {A corpus-based approach to automatic compound extraction}, Year = 1994} @incollection{Sullivan+92, Address = {Amsterdam, The Netherlands}, Author = {K. Sullivan and R. Damper}, Booktitle = {Talking machines: theories, models, and applications}, Date-Modified = {2011-06-21 18:27:23 +0200}, Editor = {G. Bailly and C. Beno{\^{\i}}t}, Pages = {183--195}, Publisher = {Elsevier}, Title = {Novel-word pronunciation with a text-to-speech system}, Year = 1992} @article{Sullivan+93, Author = {K. Sullivan and R. Damper}, Journal = {Speech Communication}, Pages = {441--452}, Title = {Novel-word pronunciation: a cross-language study}, Volume = 13, Year = 1993} @book{Sun+01, Address = {Heidelberg}, Author = {R. Sun and L. Giles}, Publisher = {Springer Verlag}, Title = {Sequence Learning: Paradigms, Algorithms, and Applications}, Year = 2001} @inproceedings{Sun+07, Address = {Prague, Czech Republic}, Author = {G. Sun and X. Liu and G. Cong and M. Zhou and Z. Xiong and J. Lee and C. Lin}, Booktitle = {Proceedings of the 45th Annual Meeting of the Association of Computational Linguistics}, Month = {June}, Pages = {81--88}, Publisher = {Association for Computational Linguistics}, Title = {Detecting Erroneous Sentences using Automatically Mined Sequential Patterns}, Url = {http://www.aclweb.org/anthology/P07-1011}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/P07-1011}} @inproceedings{Surdeanu+08, Author = {M. Surdeanu and R. Johansson and A. Meyers and L. M\`{a}rquez and J. Nivre}, Booktitle = {Proc. of {CoNLL-2008}}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2010-01-03 10:26:04 +0100}, Keywords = {shared task}, Title = {The {CoNLL}-2008 Shared Task on Joint Parsing of Syntactic and Semantic Dependencies}, Year = {2008}} @inproceedings{Surdeanu+08b, Address = {Berlin, Germany}, Author = {M. Surdeanu and R. Morante and L. M\`{a}rquez}, Booktitle = {Proceedings of the Computational Linguistics and Intelligent Text Processing 9th International Conference, CICLing 2008}, Date-Added = {2010-01-03 10:27:18 +0100}, Date-Modified = {2010-01-03 10:37:26 +0100}, Editor = {A. Gelbukh}, Keywords = {semantic role labeling, Spanish, Catalan}, Pages = {206--218}, Publisher = {Springer Verlag}, Series = {Lecture Notes in Computer Science}, Title = {Analysis of joint inference strategies for the semantic role labeling of Spanish and Catalan}, Volume = {4919}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/surdeanuetal-cicling08.pdf}} @inproceedings{Sutton+04, Author = {C. Sutton and K. Rohanimanesh and A. McCallum}, Booktitle = {Proceedings of the Twenty-first International Conference on Machine Learning (ICML 2004)}, Date-Added = {2009-11-15 14:20:38 +0100}, Date-Modified = {2009-11-15 14:20:48 +0100}, Editor = {C.E. Brodley}, Pages = {783--790}, Title = {Dynamic Conditional Random Fields: Factorized Probabilistic Models for Labeling and Segmenting Sequence Data}, Year = {2004}} @book{Suzuki70, Address = {New York}, Author = {S. Suzuki}, Publisher = {Weather Hill}, Title = {Zen mind, beginner's mind}, Year = 1970} @article{Swets+00, Author = {J. Swets and R. Dawes and J. Monahan}, Journal = {Scientific American}, Pages = {82--87}, Title = {Better decisions through science}, Volume = 283, Year = 2000} @incollection{Swonger72, Address = {Orlando, Fla}, Author = {C. W. Swonger}, Booktitle = {Frontiers of Pattern Recognition}, Editor = {S. Watanabe}, Pages = {511--519}, Publisher = {Academic Press}, Title = {Sample set condensation for a condensed nearest neighbor decision rule for pattern recognition}, Year = 1972} @inproceedings{Szdemak+05, Author = {Szdemak, S. and Saunders, C. and Shawe-Taylor, J. and Rousu, J.}, Booktitle = {NIPS 2005 Workshop on Kernel methods and structured domains}, Date-Added = {2009-11-15 14:20:52 +0100}, Date-Modified = {2009-11-15 14:21:05 +0100}, Title = {Learning hierarchies at two-class complexity}, Year = {2005}} @proceedings{TREC-9, Number = {500-XXX}, Publisher = {National Institute of Standards and Technology (NIST)}, Series = {NIST Special Publication}, Title = {Proceedings of TREC-9}, Year = 2000} @inproceedings{Tabor+96, Address = {{Oxford UK}}, Author = {W. Tabor and C. Juliano and M. Tanenhaus}, Booktitle = {Proceedings of 18th annual meeting of the Cognitive Science Society}, Date-Modified = {2008-07-23 16:59:08 +0200}, Keywords = {psycholinguistics, sentence processing, syntax, connectionism}, Title = {A Dynamical System for Language Processing}, Year = 1996} @inproceedings{Tabor95, Address = {{Oxford UK}}, Author = {W. Tabor}, Booktitle = {Proceedings of 17th annual meeting of the Cognitive Science Society}, Date-Modified = {2008-07-23 16:53:36 +0200}, Keywords = {lexical change, syntax, connectionism}, Title = {Lexical Change as Nonlinear Interpolation}, Year = 1995} @inproceedings{Tabor96, Author = {W. Tabor and C. Juliano and M. Tanenhaus}, Booktitle = {Proceedings of 18th annual meeting of the Cognitive Science SOciety}, Title = {A Dynamical System Approach for Language Processing}, Year = 1996} @article{Taghva+01, Author = {Kazem Taghva and Eric Stofsky}, Date-Added = {2010-01-29 15:13:50 +0100}, Date-Modified = {2010-01-29 15:13:57 +0100}, Journal = {{I}nternational {J}ournal on {D}ocument {A}nalysis and {R}ecognition}, Number = {3}, Pages = {125--137}, Title = {{OCRS}pell: an interactive spelling correction system for {OCR} errors in text}, Volume = {3}, Year = {2001}} @inproceedings{Taghva+04, Author = {Kazem Taghva and Thomas Nartker and Julie Borsack}, Booktitle = {HDP '04: Proceedings of the 1st ACM workshop on Hardcopy document processing}, Date-Added = {2010-01-29 15:27:04 +0100}, Date-Modified = {2010-01-29 15:27:10 +0100}, Doi = {http://doi.acm.org/10.1145/1031442.1031443}, Isbn = {1-58113-976-4}, Location = {Washington, DC, USA}, Pages = {1--8}, Publisher = {ACM Press}, Title = {Information access in the presence of {OCR} errors}, Year = {2004}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/1031442.1031443}} @inproceedings{Takezawa02, Address = {Las Palmas, Spain}, Author = {T. Takezawa and E. Sumita and F. Sugaya and H. Yamamoto and S. Yamamoto}, Booktitle = {Proceedings of LREC 2002}, Pages = {147-152}, Title = {Toward a broad-coverage bilingual corpus for speech translation of travel conversations in the real world}, Year = 2002} @inproceedings{Tanaka-Ishii+02, Address = {Morristown, NJ, USA}, Author = {K. Tanaka-Ishii and Y. Inutsuka and M. Takeichi}, Booktitle = {Proceedings of the 19th International Conference on Computational Linguistics}, Date-Modified = {2010-06-25 22:14:47 +0200}, Location = {Taipei, Taiwan}, Pages = {1--7}, Publisher = {Association for Computational Linguistics}, Title = {Entering text with a four-button device}, Year = 2002} @inproceedings{Tanaka-Ishii+03, Address = {Morristown, NJ, USA}, Author = {K. Tanaka-Ishii and D. Hayakawa and M. Takeichi}, Booktitle = {{ACL} '03: {P}roceedings of the 41st {A}nnual {M}eeting on {A}ssociation for {C}omputational {L}inguistics}, Date-Modified = {2012-01-15 20:22:35 +0000}, Location = {Sapporo, Japan}, Pages = {407--414}, Publisher = {Association for Computational Linguistics}, Title = {{A}cquiring {V}ocabulary for {P}redictive {T}ext {E}ntry through {D}ynamic {R}euse of a {S}mall {U}ser {C}orpus}, Year = 2003} @article{Tanaka-Ishii07, Address = {New York, NY, USA}, Author = {K. Tanaka-Ishii}, Date-Modified = {2012-01-15 20:22:19 +0000}, Journal = {{N}atural {L}anguage {E}ngineering}, Number = 1, Pages = {51--74}, Publisher = {Cambridge University Press}, Title = {{W}ord-based {P}redictive {T}ext {E}ntry using {A}daptive {L}anguage {M}odels}, Volume = 13, Year = 2007} @inproceedings{Tanaka96, Author = {H. Tanaka}, Booktitle = COLING96, Date-Modified = {2010-10-06 21:00:22 +0200}, Keywords = {decision trees}, Pages = {943--948}, Title = {Decision Tree Learning Algorithm with Structured Attributes: Application to Verbal Case Frame Acquisition}, Year = 1996} @inproceedings{Tapanainen+94, Author = {Pasi Tapanainen and Timo J\"{a}rvinen}, Booktitle = COLING94, Pages = {629--634}, Title = {Syntactic Analysis of Natural Language Using Linguistic Rules and Corpus-Based Patterns}, Volume = 1, Year = 1994} @inproceedings{TapanainenJa97, Address = {Washington DC}, Author = {P. Tapanainen and T. J{\"{a}}rvinen}, Booktitle = {In Proceedings of the 5th Conference on Applied Natural Language Processing}, Title = {A non-projective dependency parser.}, Year = 1997} @article{Taraban+87, Author = {R. Taraban and J. L. McClelland}, Journal = {Journal of Memory and Language}, Pages = {608--631}, Title = {Conspiracy effects in word pronunciation}, Volume = 26, Year = 1987} @article{Taraban+88, Author = {Taraban, R. and J.L. McClelland}, Journal = {Journal of Memory and Language}, Pages = {597--632}, Title = {Constituent Attachment and thematic Role Assignment in Sentence Processing: Influences of Content-Based Expectations}, Volume = 27, Year = 1988} @incollection{Taraban+90, Address = {Hillsdal, NJ}, Author = {Taraban, R. and J.L. McClelland}, Booktitle = {Comprehehension Processes in Reading}, Editor = {D.A. Balota and G.B. Flores d'Arcais and K. Rayner}, Publisher = {Lawrence Erlbaum Associates}, Title = {Parsing and Comprehension: A Multiple-Constraint View}, Year = 1990} @article{Tarjan77, Author = {Tarjan, R.E.}, Date-Added = {2009-11-15 14:21:10 +0100}, Date-Modified = {2009-11-15 14:21:21 +0100}, Journal = {Networks}, Number = {1}, Pages = {25--36}, Title = {Finding optimum branchings}, Volume = {7}, Year = {1977}} @inproceedings{Taskar+04, Author = {Taskar, B. and Guestrin, C. and Koller, D.}, Booktitle = {Advances in Neural Information Processing Systems}, Date-Added = {2009-11-15 14:21:25 +0100}, Date-Modified = {2009-11-15 14:21:36 +0100}, Editor = {S. Thrun and L.K. Saul and B. Sch\"olkopf}, Pages = {25--32}, Title = {Max-margin Markov networks}, Volume = {16}, Year = {2004}} @inproceedings{Taule+08, Address = {Marrakesh, Morroco}, Author = {M. Taul\'{e} and {M. A.} Mart\'{\i} and M. Recasens}, Booktitle = {Proceedings of the {6th International Conference on Language Resources and Evaluation (LREC-2008)}}, Date-Added = {2009-12-26 20:57:57 +0100}, Date-Modified = {2011-07-02 22:18:13 +0200}, Title = {{AnCora: Multilevel Annotated Corpora for Catalan and Spanish}}, Year = {2008}} @book{Taylor+94, Author = {C. Taylor and D. Michie and D. Spiegalhalter}, Publisher = {Paramount Publishing International}, Title = {Machine Learning, Neural and Statistical Classifiers}, Year = 1994} @book{TerMeulen95, Address = {Cambridge, MA}, Author = {Alice ter Meulen}, Date-Added = {2010-02-01 23:28:34 +0100}, Date-Modified = {2010-02-01 23:28:43 +0100}, Publisher = {MIT Press}, Title = {Representing Time in Natural Language}, Year = {1995}} @inproceedings{Tetreault+08, Address = {Manchester, UK}, Author = {Tetreault, Joel R. and Chodorow, Martin}, Booktitle = {Proceedings of the 22nd International Conference on Computational Linguistics (Coling 2008)}, Month = {August}, Pages = {865--872}, Publisher = {Coling 2008 Organizing Committee}, Title = {The Ups and Downs of Preposition Error Detection in {ESL} Writing}, Url = {http://www.aclweb.org/anthology/C08-1109}, Year = {2008}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/C08-1109}} @inproceedings{Thielen95, Author = {C. Thielen}, Booktitle = {Proceedings of EACL-95}, Comment = {similarity measures}, Title = {An Approach to Proper Name Tagging for German}, Year = 1995} @inproceedings{Thompson+99, Author = {C. A. Thompson and M. E. Califf and R. J. Mooney}, Booktitle = {Proceedings of the Sixteenth International Conference on Machine Learning}, Pages = {406--414}, Publisher = {Morgan Kaufmann, San Francisco, CA}, Title = {Active learning for natural language parsing and information extraction}, Year = 1999} @inproceedings{Thompson+99b, Author = {C. A. Thompson and R. J. Mooney}, Booktitle = AAAI, Title = {Automatic construction of semantic lexicons for learning natural language interfaces}, Year = 1999} @book{Thornton92, Address = {London}, Author = {C. J. Thornton}, Publisher = {Chapman and Hall Computing}, Title = {Techniques in Computational Learning: An Introduction}, Year = 1992} @article{Thornton96, Author = {C. Thornton}, Journal = {Connection Science}, Pages = {81--92}, Title = {Measuring the difficulty of specific learning problems}, Volume = 7, Year = 1995} @inproceedings{Tiedemann+04, Address = {Lisbon, Portugal}, Author = {J. Tiedeman and L. Nygaard}, Booktitle = {Proceedings of the Fourth International Conference on Language Resources and Evaluation (LREC'04)}, Title = {The {OPUS} corpus - parallel and free.}, Year = {2004}} @inproceedings{Tiedemann07, Address = {Leuven, Belgium}, Author = {J. Tiedemann}, Booktitle = {Computational Linguistics in the Netherlands: Selected Papers from the Seventeenth CLIN Meeting}, Editor = {P. Dirix and I. Schuurman and V. Vandeghinste and and F. {Van Eynde}}, Title = {Building a Multilingual Parallel Subtitle Corpus}, Year = 2007} @inproceedings{Tiedemann07b, Author = {Tiedemann, J.}, Booktitle = {Proceedings of the International Conference on Recent Advances in Natural Language Processing (RANLP 2007)}, Date-Added = {2009-11-15 14:21:50 +0100}, Date-Modified = {2009-11-15 14:22:05 +0100}, Pages = {582--588}, Title = {Improved Sentence Alignment for Movie Subtitles}, Year = {2007}} @inproceedings{Tiedemann09, Address = {Amsterdam/Philadelphia}, Author = {Tiedemann, J.}, Booktitle = {Recent Advances in Natural Language Processing (vol V)}, Date-Modified = {2013-08-07 07:55:04 +0000}, Editor = {N. Nicolov and K. Bontcheva and G. Angelova and R. Mitkov}, Pages = {237--248}, Publisher = {John Benjamins}, Title = {News from OPUS - A Collection of Multilingual Parallel Corpora with Tools and Interfaces}, Year = {2009}} @mastersthesis{Tillenius96, Address = {Stockholm, Sweden}, Author = {Mikael Tillenius}, Date-Added = {2010-01-29 15:14:03 +0100}, Date-Modified = {2010-01-29 15:14:06 +0100}, Note = {NADA report TRITA-NA-E9621}, School = {Department of Numerical Analysis and Computing Science, Royal Institute of Technology}, Title = {Efficient generation and ranking of spelling error corrections}, Year = {1996}} @inproceedings{Ting+97, Address = {Berlin}, Author = {K. M. Ting and B. T. Low}, Booktitle = {Machine Learning: Proceedings of ECML-97}, Editor = {M. {Van Someren} and G. Widmer}, Number = 1224, Pages = {250--265}, Publisher = {Springer-Verlag}, Series = {Lecture Notes in Artificial Intelligence}, Title = {Model combination in the multiple-data-batches scenario}, Year = 1997} @inproceedings{Ting+97a, Author = {K.M. Ting and B.T. Low}, Booktitle = {{Proceedings European Conference on Machine Learning, Prague, Czech Republic}}, Pages = {250--265}, Publisher = {Springer-Verlag}, Series = {{LNAI-1224}}, Title = {{Model combination in the multiple-data-batched scenario}}, Year = 1997} @inproceedings{Ting+97b, Author = {K.M. Ting and I.H. Witten}, Booktitle = {{International Joint Conference on Artificial Intelligence, Japan}}, Pages = {866--871}, Title = {{Stacked generalization: when does it work?}}, Year = 1997} @inproceedings{Ting+97c, Author = {K.M. Ting and I.H. Witten}, Booktitle = {{International Conference on Machine Learning, Tennessee}}, Pages = {367--375}, Title = {{Stacking bagged and dagged models}}, Year = 1997} @inproceedings{Ting94, Author = {K. M. Ting}, Booktitle = {Proceedings of the Tenth Canadian Conference on Artificial Intelligence}, Date-Modified = {2009-11-14 19:03:03 +0100}, Keywords = {decision trees, small disjuncts}, Pages = {91--97}, Title = {The problem of small disjuncts: Its remedy in decision trees}, Year = 1994} @inproceedings{Ting94b, Author = {K. M. Ting}, Booktitle = {Proceedings of the The Third Pacific Rim International Conference on Artificial Intelligence}, Pages = {360--366}, Title = {The problem of atypicality in instance-based learning}, Year = 1994} @inproceedings{Titov+07, Author = {Titov, I. and Henderson, J.}, Booktitle = {Proceedings of the CoNLL Shared Task Session of EMNLP-CoNLL 2007}, Date-Added = {2009-11-15 14:22:14 +0100}, Date-Modified = {2009-11-15 14:22:25 +0100}, Pages = {947--951}, Title = {Fast and Robust Multilingual Dependency Parsing with a Generative Latent Variable Model}, Year = {2007}} @inproceedings{Tomabechi85, Author = {Hideto Tomabechi and Masaru Tomita}, Booktitle = AAAI, Title = {Integration of unification-based syntax/semantics and memory-based pragmatics for real time understanding of continuous speech input}, Year = 1985} @book{Tomasello03, Author = {Michael Tomasello}, Publisher = {Harvard University Press}, Title = {Constructing a Language: A Usage-Based Theory of Language Acquisition}, Year = 2003} @article{Tomek76, Author = {I. Tomek}, Journal = {IEEE Transactions on Systems, Man, and Cybernetics}, Number = 6, Pages = {448--452}, Title = {An experiment with the edited nearest-neighbor rule}, Volume = {SMC-6}, Year = 1976} @book{Tomita86, Address = {Dordrecht, The Netherlands}, Author = {M. Tomita}, Date-Modified = {2011-06-21 18:12:50 +0200}, Publisher = {Kluwer Academic Publishers}, Title = {Efficient Parsing for Natural Language}, Year = 1986} @article{Tong+97, Author = {Tong, X. and Zhai, C.X. and Milid-Frayling, N. and Evans, D.}, Date-Added = {2010-02-12 22:10:22 +0100}, Date-Modified = {2010-02-12 22:10:41 +0100}, Journal = {NIST special publication SP}, Pages = {341--346}, Publisher = {Citeseer}, Title = {{OCR Correction and Query Expansion for Retrieval on OCR Data-CLARIT TREC-5 Confusion Track Report}}, Year = {1997}} @inproceedings{Torkkola93, Address = {Minneapolis}, Author = {K. Torkkola}, Booktitle = {Proceedings of the International Conference on Acoustics, Speech, and Signal Processing ({ICASSP})}, Pages = {199--202}, Title = {An efficient way to learn {E}nglish grapheme-to-phoneme rules automatically}, Volume = 2, Year = 1993} @inproceedings{Toutanova+02, Author = {K. Toutanova and R. C. Moore}, Booktitle = {Proceedings of the 40th Annual Meeting of the ACL}, Date-Added = {2010-01-29 15:14:13 +0100}, Date-Modified = {2010-02-12 23:58:48 +0100}, Pages = {144--151}, Title = {Pronunciation modeling for improved spelling correction}, Year = {2002}} @incollection{Tranel95, Address = {Cambridge, MA}, Author = {B. Tranel}, Booktitle = {The handbook of phonological theory}, Editor = {J. A. Goldsmith}, Pages = {798--816}, Publisher = {Blackwell}, Title = {Current issues in French phonology}, Year = 1995} @article{Treiman+90, Author = {R. Treiman and A. Zukowski}, Journal = {Journal of Memory and Language}, Pages = {66--85}, Title = {Toward an understanding of {E}nglish syllabification}, Volume = 29, Year = 1990} @inproceedings{Trost92, Address = {San Francisco, CA}, Author = {Harald Trost}, Booktitle = {Proceedings of the 12th International Joint Conference on Artificial Intelligence}, Pages = {1024--1030}, Publisher = {Morgan Kaufmann}, Title = {X2MORPH: A morphological Component Based on Augmented Two-Level Morphology}, Year = 1991} @article{Trueswell+93, Author = {Trueswell, J.C. and M.K. Tanenhaus and C. Kello}, Journal = {Journal of Experimental Psychology: Learning, Memory and Cognition}, Number = 3, Pages = {528--553}, Title = {Verb-Specific Constraints in Sentence Processing: Separating Effects of Lexical Preference from Garden-Paths}, Volume = 19, Year = 1993} @incollection{Trueswell+94, Address = {Hillsdale, NJ}, Author = {Trueswell, J.C. and M.K. Tanenhaus}, Booktitle = {Perspectives on Sentence Processing}, Chapter = 7, Editor = {C. Clifton Jr. and L. Frazier and K. Rayner}, Publisher = {Lawrence Erlbaum Associates}, Title = {Toward a Lexicalist Framework for Constraint-Based Syntactic Ambiguity Resolution}, Year = 1994} @book{Tsang93, Address = {San Diego, CA, USA}, Author = {Tsang, E.}, Date-Added = {2009-11-15 14:22:30 +0100}, Date-Modified = {2009-11-15 14:22:42 +0100}, Publisher = {Academic Press}, Title = {Foundations of constraint satisfaction}, Year = {1993}} @article{Tsochantaridis05, Author = {Tsochantaridis, I. and Joachims, T. and Hofmann, T. and Altun, Y.}, Date-Added = {2009-11-15 14:22:46 +0100}, Date-Modified = {2009-11-15 14:22:57 +0100}, Journal = {Journal of Machine Learning Research}, Pages = {1453--1484}, Title = {Large Margin Methods for Structured and Interdependent Output Variables}, Volume = {6}, Year = {2005}} @inproceedings{Tsuruoka+03, Address = {Morristown, NJ, USA}, Author = {Tsuruoka, Yoshimasa and Tsujii, Jun'ichi}, Booktitle = {Proceedings of the seventh conference on Natural language learning at HLT-NAACL 2003}, Doi = {http://dx.doi.org/10.3115/1119176.1119193}, Location = {Edmonton, Canada}, Pages = {127--134}, Publisher = {Association for Computational Linguistics}, Title = {Training a naive bayes classifier via the EM algorithm with a class distribution constraint}, Year = {2003}, Bdsk-Url-1 = {http://dx.doi.org/10.3115/1119176.1119193}} @inproceedings{Tufis99, Author = {{{Tufi\c{s}}, D.}}, Booktitle = {{Proceedings Workshop on Text, Speech, and Dialogue}}, Title = {{Tiered tagging and Combined Language Models Classifiers}}, Year = 1999} @article{Tumer+96, Author = {{K. Tumer and J. Ghosh}}, Journal = {{Connection Science, Special issue on combining artificial neural networks: ensemble approaches}}, Number = {3--4}, Pages = {{385--404}}, Title = {{Error Correlation and Error Reduction in Ensemble Classifiers}}, Volume = 8, Year = 1996} @article{Turing36, Author = {Alan M. Turing}, Date-Added = {2010-01-29 15:29:08 +0100}, Date-Modified = {2010-01-29 15:29:08 +0100}, Journal = {Proceedings of the London Mathematical Society}, Note = {Read to the Society in 1936, but published in 1937. Correction in volume 43, 544--546}, Pages = {230--265}, Series = {2}, Title = {On Computable Numbers, with an application to the {Entscheidungsproblem}}, Volume = {42}, Year = {1936}} @inproceedings{Turner+05, Address = {Ann Arbor, Michigan}, Author = {J. Turner and E.Charniak}, Booktitle = {Proceedings of the 43rd Annual Meeting of the Association for Computational Linguistics}, Date-Added = {2009-11-15 14:35:43 +0100}, Date-Modified = {2010-06-25 21:31:54 +0200}, Month = {June}, Pages = {290--297}, Title = {Supervised and Unsupervised Learning for Sentence Compression}, Year = {2005}} @book{Twain1880, Address = {Hartford}, Author = {Mark Twain}, Publisher = {American Publishing Co.}, Title = {A Tramp Abroad}, Year = 1880} @inproceedings{Tzoukermann94, Author = {E. Tzoukermann}, Booktitle = {Proceedings of the Second {{\sc esca/ieee}} Workshop on Speech Synthesis, New York}, Organization = {{{\sc esca/ieee}}}, Pages = {179--182}, Title = {Text-to-Speech for French}, Year = 1994} @book{Uitdenboogaard75, Address = {Utrecht, the Netherlands}, Author = {P.C. {Uit den Boogaard}}, Publisher = {Scheltema en Holkema}, Title = {Woordfrequenties in geschreven en gesproken {N}ederlands}, Year = 1975} @inproceedings{Ukkonen83, Author = {E. Ukkonen}, Booktitle = {Proceedings of the International Foundations of Computation Theroy Conference, Lecture Notes in Computer Science 158}, Pages = {487--495}, Title = {On approximate string matching}, Year = 1983} @article{Ukkonen95, Author = {E. Ukkonen}, Journal = {Algorithmica}, Month = {september}, Number = {3}, Pages = {249--260}, Publisher = {Springer New york}, Title = {On-line construction of suffix trees}, Volume = {14}, Year = {1995}} @inproceedings{Ule+03, Address = {Antwerp, Belgium}, Author = {T. Ule and J. Veenstra}, Booktitle = {Proceedings of the 14th Meeting of Computational Linguistics in the Netherlands}, Organization = {University of Antwerp}, Title = {Iterative treebank refinement}, Year = 2003} @incollection{Ushioda+93, Address = {Columbus, Ohio}, Author = {A. Ushioda and D. Evans and T. Gibson and A. Waibel}, Booktitle = {SIGLEX ACL Workshop on the Acquisition of Lexical Knowledge from Text}, Editor = {B. Boguraev and J. Pustejovsky}, Pages = {95-106}, Title = {The automatic acquisition of frequencies of verb subcategorization frames from tagged corpora}, Year = 1993} @inproceedings{Ushioda+93b, Author = {A. Ushioda and D. Evans and T. Gibson and A. Waibel}, Booktitle = {Proceedings of the Third International Workshop on Parsing Technologies, IWPT-3, Tilburg, The Netherlands, and Durbuy, Belgium}, Title = {Frequency Estimation of Verb Subcategorization Frames Based on Syntactic and Multidimensional Statistical Analysis}, Year = 1993} @article{Valiant84, Author = {Valiant, L. G.}, Comment = {Defines `learnability' wrt EXAMPLES and ORACLE using arbitrary probability measure on event space. Shows k-CNF learnable from examples only.}, Journal = CACM, Month = Nov, Number = 11, Pages = {1134--1142}, Title = {A Theory of the Learnable}, Volume = 27, Year = 1984} @inproceedings{VanCoile90, Address = {Kobe, Japan}, Author = {B. {van Coile}}, Booktitle = {Proceedings of the International Conference on Spoken Language Processes 1990}, Pages = {765--768}, Title = {Inductive learning of grapheme-to-phoneme rules}, Volume = 2, Year = 1990} @book{VanDijk+83, Address = {New York}, Author = {T. A. van Dijk and W. Kintsch}, Date-Added = {2010-02-08 13:09:58 +0100}, Date-Modified = {2010-09-14 22:12:04 +0200}, Keywords = {discourse}, Publisher = {Academic Press}, Title = {Strategies of Discourse Comprehension}, Year = {1983}} @inproceedings{VanErp+09, Author = {M. {Van Erp} and P. Lendvai and A. {Van den Bosch}}, Booktitle = {Proceedings of the Eighth International Conference on Computational Semantics (IWCS-8)}, Date-Added = {2010-01-02 19:25:53 +0100}, Date-Modified = {2010-09-14 11:59:35 +0200}, Keywords = {ilk, ontology learning, natural history, mitch}, Pages = {282--285}, Title = {Comparing alternative data-driven ontological vistas of natural history}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/mitch-iwcs.pdf}} @inproceedings{VanErp+09b, Address = {Athens, Greece}, Author = {M. {Van Erp} and A. {Van den Bosch} and S. Wubben and S. Hunt}, Booktitle = {Proceedings of the EACL 2009 Workshop on Language Technology and Resources for Cultural Heritage, Social Sciences, Humanities, and Education (LaTeCH-SHELT&R 2009)}, Date-Added = {2010-01-02 19:27:15 +0100}, Date-Modified = {2010-09-14 11:59:48 +0200}, Keywords = {ilk, ontology learning, Wikipedia, mitch}, Pages = {60--68}, Title = {Instance-driven discovery of ontological relation labels}, Year = {2009}, Bdsk-Url-1 = {http://aclweb.org/anthology-new/W/W09/W09-0307.pdf}} @inproceedings{VanErp06, Address = {Malaga, Spain}, Author = {M. {Van Erp}}, Booktitle = {Proceedings of the 11th ESSLLI Student Session}, Date-Modified = {2010-09-14 12:53:26 +0200}, Editor = {J. Huitink and S. Katrenko}, Keywords = {ilk, mitch, named-entity recognition, gazetteers}, Pages = {192--202}, Title = {Bootstrapping multilingual geographical gazetteers from corpora}, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/mitch/papers/esslli06.pdf}} @inproceedings{VanErp07, Address = {Prague, Czech Republic}, Author = {M. {Van Erp}}, Booktitle = {Proceedings of the Workshop on Language Technology for Cultural Heritage Data (LaTeCH 2007)}, Date-Added = {2010-01-03 01:04:40 +0100}, Date-Modified = {2010-09-14 11:59:41 +0200}, Keywords = {ilk, clustering, text mining, natural history, mitch}, Pages = {17--24}, Title = {Retrieving lost information from textual databases: Rediscovering expeditions from an animal specimen database}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/W/W07/W07-0903.pdf}} @inproceedings{VanEynde+00, Author = {F. {Van Eynde} and J. Zavrel and W. Daelemans}, Booktitle = {In Proceedings of LREC'2000}, Date-Modified = {2010-10-31 11:30:16 +0100}, Keywords = {clips, POS tagging, lemmatization, CGN, part-of-speech tagging}, Pages = {1427--1433}, Title = {Part of speech tagging and lemmatisation for the {S}poken {D}utch {C}orpus}, Year = {2000}} @techreport{VanEynde04, Author = {F. {Van Eynde}}, Date-Modified = {2009-11-14 17:35:23 +0100}, Institution = {Centrum voor Computerlingu\"{\i}stiek, K.U. Leuven}, Keywords = {part-of-speech tagging, lemmatization, Spoken Dutch Corpus}, Month = {February}, Title = {Part of speech tagging en lemmatisering van het {C}orpus {G}esproken {N}ederlands}, Year = 2004} @mastersthesis{VanGompel09, Author = {M. {van Gompel}}, School = {Tilburg University}, Title = {Phrase-based Memory-based Machine Translation}, Year = 2009} @inproceedings{VanGompel+09, Address = {Dublin, Ireland}, Author = {M. {Van Gompel} and A. {Van den Bosch} and P. Berck}, Booktitle = {Proceedings of the Third Workshop on Example-Based Machine Translation}, Date-Added = {2010-01-02 19:47:11 +0100}, Date-Modified = {2011-02-01 22:27:45 +0100}, Editor = {M. Forcada and A. Way}, Keywords = {ilk, dutchsemcor, memory-based machine translation, vici, pbmbmt, mbmt}, Pages = {79--86}, Title = {Extending memory-based machine translation to phrases}, Year = {2009}, Bdsk-Url-1 = {http://proylt.anaproy.nl/media/software/pbmbmt_paper.pdf}} @inproceedings{VanGompel10, Address = {Morristown, NJ, USA}, Author = {M. {Van Gompel}}, Booktitle = {SemEval '10: Proceedings of the 5th International Workshop on Semantic Evaluation}, Date-Modified = {2011-02-01 22:27:37 +0100}, Keywords = {ilk, vici, dutchsemcor, wsd, semeval, cross-lingual, word sense disambiguation}, Location = {Los Angeles, California}, Pages = {238--241}, Publisher = {Association for Computational Linguistics}, Title = {UvT-WSD1: A cross-lingual word sense disambiguation system}, Year = {2010}} @inproceedings{VanHalteren+98, Author = {H. {Van Halteren} and J. Zavrel and W. Daelemans}, Booktitle = {Proceedings of the joint 17th International Conference on Computational Linguistics and 36th Annual Meeting of the Association for Computational Linguistics}, Date-Modified = {2010-09-18 14:41:02 +0200}, Keywords = {ilk, POS tagging, ensembles, part-of-speech tagging}, Pages = {491--497}, Title = {Improving Data Driven Wordclass Tagging by System Combination}, Year = 1998} @inproceedings{VanHerwijnen+03, Author = {O. {Van Her\-wij\-nen} and A. {Van den Bosch} and J. Terken and E. Marsi}, Booktitle = {Tenth Conference of the European Chapter of the Association for Computational Linguistics (EACL-03)}, Date-Modified = {2010-09-14 13:04:07 +0200}, Keywords = {ilk, PP attachment, prosody, memory-based language processing, memory-based learning, prosit, vi}, Pages = {139--146}, Title = {Learning {PP} attachment for filtering prosodic phrasing}, Year = 2004, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/paper_EACL03.pdf}} @article{VanHulse+07, Author = {Van Hulse, J.D. and Khoshgoftaar, T.M. and Huang, H.}, Journal = {Knowledge and Information Systems}, Number = 2, Pages = {171--190}, Publisher = {Springer}, Title = {{The pairwise attribute noise detection algorithm}}, Volume = 11, Year = 2007} @article{VanOrden+90, Author = {G. C. {Van Orden} and B. F. Pennington and G. O. Stone}, Journal = {Psychological Review}, Pages = {488--522}, Title = {Word identification in reading and the promise of subsymbolic psycholinguistics}, Volume = 97, Year = 1990} @book{VanRijsbergen79, Address = {London}, Author = {C.J. {Van Rijsbergen}}, Publisher = {Buttersworth}, Title = {Information Retrieval}, Year = 1979} @inproceedings{VanZaanen+03, Address = {Amsterdam, The Netherlands}, Author = {M. {Van Zaanen} and G. {Van Huyssteen}}, Booktitle = {Computational Linguistics in the Netherlands 2002---Selected Papers from the Thirteenth {CLIN} Meeting}, Date-Added = {2010-02-12 22:01:20 +0100}, Date-Modified = {2010-02-17 20:29:55 +0100}, Editor = {T. Gaustad}, Keywords = {ilk, spelling correction, Afrikaans}, Pages = {143--156}, Publisher = {Rodopi}, Series = {Language and Computers: Studies in Practical LInguistics}, Title = {Improving a Spelling Checker for {A}frikaans}, Volume = {47}, Year = {2003}} @inproceedings{VandenBosch+00, Address = {San Francisco, CA}, Author = {A. {Van den Bosch} and J. Zavrel}, Booktitle = {Proceedings of the Seventeenth International Conference on Machine Learning}, Date-Modified = {2010-01-02 21:33:03 +0100}, Editor = {P. Langley}, Keywords = {ilk}, Pages = {1055--1062}, Publisher = {Morgan Kaufmann}, Title = {Unpacking multi-valued symbolic features and classes in memory-based language learning}, Year = 2000} @incollection{VandenBosch+00b, Author = {A. {Van den Bosch} and W. Daelemans}, Booktitle = {Models of Language Acquisition: inductive and deductive approaches}, Date-Modified = {2010-09-14 13:05:39 +0200}, Editor = {P. Broeder and J.M.J. Murre}, Keywords = {ilk, grapheme-phoneme conversion, knaw}, Pages = {76--99}, Publisher = {Oxford University Press}, Title = {A Distributed, Yet Symbolic Model of Text-to-Speech Processing}, Year = 2000} @inproceedings{VandenBosch+01, Address = {New Brunswick, NJ}, Author = {A. {Van den Bosch} and E. Krahmer and M. Swerts}, Booktitle = {Proceedings of the 39th Meeting of the Association for Computational Linguistics}, Date-Modified = {2010-01-02 21:32:51 +0100}, Keywords = {ilk, spoken dialogue systems}, Pages = {499--506}, Publisher = {ACL}, Title = {Detecting problematic turns in human-machine interactions: Rule-induction versus memory-based learning approaches}, Year = 2001} @inproceedings{VandenBosch+02, Author = {A. {Van den Bosch} and S. Buchholz}, Booktitle = {Proceedings of the 40th Meeting of the Association for Computational Linguistics}, Date-Modified = {2010-01-02 21:32:44 +0100}, Keywords = {ilk, shallow parsing, learning curves, part-of-speech tagging}, Pages = {433--440}, Title = {Shallow parsing on the basis of words only: A case study}, Year = 2002} @inproceedings{VandenBosch+04, Address = {Boston, MA}, Author = {A. {Van den Bosch} and S. Canisius and W. Daelemans and I. Hendrickx and E. {Tjong Kim Sang}}, Booktitle = {Proceedings of the Eighth Conference on Computational Natural Language Learning}, Date-Modified = {2011-06-21 18:12:20 +0200}, Editor = {E. Riloff and H. T. Ng}, Keywords = {ilk, semantic role labeling, imix, vi}, Title = {Memory-based semantic role labeling: Optimizing features, algorithm, and output}, Year = 2004, Bdsk-Url-1 = {http://www.cnts.ua.ac.be/conll2004/pdf/10205van.pdf}} @inproceedings{VandenBosch+05, Address = {Ann Arbor, MI}, Author = {A. {Van den Bosch} and W. Daelemans}, Booktitle = {Proceedings of the Ninth Conference on Natural Language Learning, CoNLL-2005}, Date-Modified = {2010-09-14 13:05:06 +0200}, Keywords = {ilk, information extraction, vi, imix}, Pages = {80--87}, Title = {Improving sequence segmentation learning by predicting trigrams}, Year = 2006} @inproceedings{VandenBosch+06, Address = {Trento, Italy}, Author = {A. {Van den Bosch} and I. Schuurman and V. Vandeghinste}, Booktitle = {Proceedings of the Fifth International Conference on Language Resources and Evaluation, LREC-2006}, Date-Modified = {2010-09-18 14:34:55 +0200}, Keywords = {ilk, part-of-speech tagging, vici, d-coi}, Title = {Transferring {PoS}-tagging and lemmatization tools from spoken to written {D}utch corpus development}, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/dcoi-taglemma.pdf}} @inproceedings{VandenBosch+06b, Address = {New York, NY}, Author = {A. {Van den Bosch} and S. Canisius}, Booktitle = {Proceedings of the Eighth Meeting of the ACL Special Interest Group in Computational Phonology, SIGPHON '06}, Date-Modified = {2010-09-14 13:00:06 +0200}, Keywords = {ilk, imix, vi, constraint satisfaction inference, letter-phoneme conversion, morphological segmentation}, Pages = {61--69}, Title = {Improved morpho-phonological sequence processing with constraint satisfaction inference}, Year = 2006} @inproceedings{VandenBosch+07, Address = {Leuven, Belgium}, Author = {A. {Van den Bosch} and N. Stroppa and A. Way}, Booktitle = {Proceedings of the METIS-II Workshop on New Approaches to Machine Translation}, Date-Modified = {2010-01-02 21:32:04 +0100}, Editor = {F. Van Eynde and V. Vandeghinste and I. Schuurman}, Keywords = {ilk, memory-based machine translation, example-based machine translation}, Pages = {63--72}, Title = {A memory-based classification approach to marker-based {EBMT}}, Year = 2007, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/metis07.pdf}} @inproceedings{VandenBosch+07b, Address = {Amsterdam, The Netherlands}, Author = {A. {Van den Bosch} and K. {Van der Sloot}}, Booktitle = {Proceedings of the 18th BENELEARN Conference}, Date-Modified = {2010-01-02 21:31:38 +0100}, Editor = {P. Adriaans and M. van Someren and S. Katrenko}, Keywords = {ilk, k-NN}, Title = {Superlinear parallelisation of the {{\em k}}-nearest neighbor classifier}, Year = 2007, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/benelearn07-vandenbosch-sloot.pdf}} @inproceedings{VandenBosch+07c, Address = {Leuven, Belgium}, Author = {A. {Van den Bosch} and G.J. Busser and S. Canisius and W. Daelemans}, Booktitle = {Computational Linguistics in the Netherlands: Selected Papers from the Seventeenth CLIN Meeting}, Date-Modified = {2010-09-14 12:59:57 +0200}, Editor = {P. Dirix and I. Schuurman and V. Vandeghinste and F. {Van Eynde}}, Keywords = {ilk, imix, vi, morphological analysis, dependency parsing, memory-based language processing, part-of-speech tagging, Dutch}, Pages = {99--114}, Title = {An efficient memory-based morpho-syntactic tagger and parser for {Dutch}}, Year = 2007, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/tadpole-final.pdf}} @inproceedings{VandenBosch+07d, Address = {Illinois, US}, Author = {A. {Van den Bosch} and C. Sporleder and M. {Van Erp} and S. Hunt}, Booktitle = {Proceedings of Digital Humanities 2007, the 19th Joint International Conference of the Association for Computers and the Humanities and the Association for Literary and Linguistic Computing}, Date-Added = {2010-01-03 01:03:01 +0100}, Date-Modified = {2010-09-14 11:59:15 +0200}, Keywords = {ilk, digital heritage, data cleaning, mitch, data imputation}, Organization = {University of Illinois at Urbana-Champaign}, Pages = {223--224}, Title = {Automatic techniques for generating and correcting cultural heritage collection metadata}, Year = {2007}, Bdsk-Url-1 = {http://www.digitalhumanities.org/dh2007/abstracts/xhtml.xq?id=160}} @inbook{VandenBosch+07e, Address = {Berlin, Germany}, Author = {A. {Van den Bosch} and E. Marsi and A. Soudi}, Chapter = {11}, Date-Added = {2010-01-03 01:17:27 +0100}, Date-Modified = {2010-01-03 01:19:13 +0100}, Editor = {A. Soudi and A. {Van den Bosch} and G. Neumann}, Keywords = {ilk, Arabic, part-of-speech tagging, morphological analysis}, Pages = {203--219}, Publisher = {Springer Verlag}, Title = {Memory-based morphological analysis and part-of-speech tagging of Arabic}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/P3-C11-Jan07.pdf}} @inproceedings{VandenBosch+07f, Address = {Utrecht, the Netherlands}, Author = {A. {Van den Bosch} and K. {Van der Sloot}}, Booktitle = {Proceedings of the 19th Belgian-Dutch Artificial Intelligence Conference (BNAIC-2007)}, Date-Added = {2010-01-03 01:22:00 +0100}, Date-Modified = {2010-01-03 01:23:13 +0100}, Editor = {M. Dastani and E. de Jong}, Keywords = {ilk, k-NN, parallelism}, Pages = {65--72}, Title = {Superlinear parallelization of {\em k}-nearest neighbor retrieval}, Year = {2007}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/parallelknn.pdf}} @inproceedings{VandenBosch+08, Address = {Amsterdam, The Netherlands}, Author = {A. {Van den Bosch} and T. Bogers}, Booktitle = {MobileHCI 2008: Proceedings of the 10th International Conference on Human-Computer Interaction with Mobile Devices and Services, IOP-MMI special track,}, Date-Added = {2010-01-03 10:43:51 +0100}, Date-Modified = {2010-09-18 14:34:36 +0200}, Keywords = {ilk, word completion, mobile devices, apropos, vici}, Month = {September}, Pages = {465--470}, Title = {Efficient Context-Sensitive Word Completion for Mobile Devices}, Year = {2008}, Bdsk-Url-1 = {http://ilk.uvt.nl/~toine/publications/vandenbosch.2008.mobilehci-2008-paper.pdf}} @article{VandenBosch+09, Author = {A. {Van den Bosch} and P. Berck}, Date-Added = {2010-01-02 19:13:36 +0100}, Date-Modified = {2010-09-14 13:01:30 +0200}, Journal = {The Prague Bulletin of Mathematical Linguistics}, Keywords = {ilk, vici, memory-based machine translation, memory-based language modeling}, Pages = {17--26}, Title = {Memory-based machine translation and language modeling}, Volume = {91}, Year = {2009}, Bdsk-Url-1 = {http://ufal.mff.cuni.cz/pbml/91/art-bosch.pdf}} @article{VandenBosch+09b, Author = {A. {Van den Bosch} and M. {Van Erp} and C. Sporleder}, Date-Added = {2010-01-02 19:33:15 +0100}, Date-Modified = {2010-09-14 11:59:29 +0200}, Journal = {IEEE Intelligent Systems}, Keywords = {ilk, cultural heritage, data cleaning, mitch}, Number = {2}, Pages = {54--63}, Title = {Making a clean sweep of cultural heritage}, Volume = {34}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/MITCH-IS.pdf}} @article{VandenBosch+09c, Author = {A. {Van den Bosch} and H. J. {Van den Herik} and P. Doorenbosch}, Date-Added = {2010-01-02 19:37:57 +0100}, Date-Modified = {2010-01-02 19:39:15 +0100}, Journal = {Interdisciplinary Science Review}, Keywords = {ilk, digital heritage, cultural heritage}, Number = {2--3}, Pages = {129--138}, Title = {Digital discoveries in museums, libraries, and archives: Computer science meets cultural heritage}, Volume = {34}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/ISR34.2-01.pdf}} @article{VandenBosch+09d, Author = {A. {Van den Bosch} and P. Lendvai and M. {Van Erp} and S. Hunt and M. {Van der Meij} and R. Dekker}, Date-Added = {2010-01-02 19:39:32 +0100}, Date-Modified = {2010-09-14 11:59:21 +0200}, Journal = {Interdisciplinary Science Review}, Keywords = {ilk, natural history, digital heritage, mitch}, Number = {2--3}, Pages = {206--23}, Title = {Weaving a new fabric of natural history}, Volume = {34}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/ISR34.2-06.pdf}} @inproceedings{VandenBosch+92, Author = {A. {Van den Bosch} and W. Daelemans}, Booktitle = {Proceedings of the Computational Linguistics in the Netherlands meeting 1991}, Date-Modified = {2010-01-02 21:31:17 +0100}, Editor = {J. {Van Eijk} and W. Meyer}, Keywords = {ilk}, Pages = {40--53}, Publisher = {Utrecht: OTS}, Title = {Linguistic pattern matching capabilities of connectionist networks}, Year = 1992} @inproceedings{VandenBosch+93, Author = {A. {Van den Bosch} and W. Daelemans}, Booktitle = {Proceedings of the 6th Conference of the EACL}, Date-Modified = {2010-01-02 21:31:12 +0100}, Keywords = {ilk, grapheme-phoneme conversion}, Pages = {45--53}, Title = {Data-Oriented Methods for Grapheme-to-Phoneme Conversion}, Year = 1993} @inproceedings{VandenBosch+94, Author = {A. {Van den Bosch} and A. Content and W. Daelemans and B. {De Gelder}}, Booktitle = {Proceedings of the 2nd International Conference on Quantitative Linguistics, QUALICO-94}, Date-Modified = {2010-01-02 21:31:06 +0100}, Editor = {A. A. Polikarpov}, Keywords = {ilk, writing systems, orthographic depth}, Pages = {26--30}, Title = {Analysing orthographic depth of different languages using data-oriented algorithms}, Year = 1994} @inproceedings{VandenBosch+95, Author = {A. {Van den Bosch} and A. Weijters and H. J. {Van den Herik} and W. Daelemans}, Booktitle = {Proceedings of the 5th {B}elgian-{D}utch Conference on Machine Learning}, Date-Modified = {2010-09-14 13:05:18 +0200}, Keywords = {ilk, machine learning}, Pages = {118--126}, Title = {The profit of learning exceptions}, Year = 1995} @inproceedings{VandenBosch+95b, Author = {A. {Van den Bosch} and A. Weijters and H. J. {Van den Herik}}, Booktitle = {Proceedings of the Seventh {D}utch Conference on AI, {{\sc naic}}'95}, Date-Modified = {2010-09-14 13:05:29 +0200}, Editor = {J. C. Bioch and Y.-H. Tan}, Keywords = {ilk, lazy learning}, Pages = {211--218}, Title = {Scaling effects with greedy and lazy machine learning algorithms}, Year = 1995} @article{VandenBosch+95c, Author = {A. {Van den Bosch} and A. Content and W. Daelemans and B. {De Gelder}}, Date-Modified = {2010-01-02 21:30:39 +0100}, Journal = {Journal of Quantitative Linguistics}, Keywords = {ilk, writing systems, spelling, grapheme-phoneme conversion}, Number = 3, Title = {Measuring the complexity of writing systems}, Volume = 1, Year = 1995} @inproceedings{VandenBosch+96, Author = {A. {Van den Bosch} and W. Daelemans and A. Weijters}, Booktitle = {Proceedings of the Second International Conference on New Methods in Natural Language Processing, NeMLaP-2, Ankara, Turkey}, Date-Modified = {2010-01-02 21:30:15 +0100}, Editor = {K. Oflazer and H. Somers}, Keywords = {ilk, morphological analysis}, Pages = {79--89}, Title = {Morphological analysis as classification: an inductive-learning approach}, Year = 1996} @inproceedings{VandenBosch00, Address = {New Brunswick, NJ}, Author = {A. {Van den Bosch}}, Booktitle = {Proceedings of the Fourth Conference on Computational Natural Language Learning and of the Second Learning Language in Logic Workshop}, Date-Modified = {2010-01-02 21:30:01 +0100}, Keywords = {ilk, rule learning}, Pages = {73--78}, Publisher = {ACL}, Title = {Using induced rules as complex features in memory-based language learning}, Year = 2000} @inproceedings{VandenBosch04, Address = {Pisa, Italy}, Author = {A. {Van den Bosch}}, Booktitle = {Proceedings of the ECML/PKDD 2004 Workshop on Advances in Inductive Rule Learning}, Date-Modified = {2010-01-02 21:29:53 +0100}, Editor = {J. F\"{u}rnkrantz}, Keywords = {ilk, rule learning, feature construction}, Pages = {1--16}, Title = {Feature transformation through rule induction: a case study with the k-NN classifier}, Year = 2004, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/rbm.pdf}} @inproceedings{VandenBosch04b, Address = {Groningen, The Netherlands}, Author = {A. {Van den Bosch}}, Booktitle = {Proceedings of the Sixteenth Belgian-Dutch Conference on Artificial Intelligence}, Date-Modified = {2010-01-02 21:29:48 +0100}, Editor = {R. Verbrugge and N. Taatgen and L. Schomaker}, Keywords = {ilk, meta-learning, hyperparameter tuning, iterative deepening}, Pages = {219--226}, Title = {Wrapped progressive sampling search for optimizing learning algorithm parameters}, Year = 2004} @inproceedings{VandenBosch05, Address = {Chicago, IL}, Author = {A. {Van den Bosch}}, Booktitle = {Workshop Proceedings of the 6th International Conference on Case-Based Reasoning}, Date-Modified = {2010-01-02 21:29:42 +0100}, Keywords = {ilk, spoken dialogue systems}, Pages = {85--94}, Title = {Memory-based understanding of user utterances in a spoken dialogue system: Effects of feature selection and co-learning}, Year = 2005, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/ovis-featsel.pdf}} @article{VandenBosch05b, Author = {A. {Van den Bosch}}, Date-Modified = {2010-09-14 13:04:36 +0200}, Journal = {Traitement Automatique des Langues}, Keywords = {ilk, vi, spelling correction, confusible disambiguation, language modeling}, Number = 2, Pages = {39--63}, Title = {Scalable classification-based word prediction and confusible correction}, Volume = 46, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/wordpredict-prefinal-TAL.pdf}} @article{VandenBosch06, Author = {A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:29:17 +0100}, Journal = {Written Language and Literacy}, Keywords = {ilk, spelling, grapheme-phoneme conversion, morphological analysis}, Number = 1, Pages = {25--44}, Title = {Spelling space: A computational test bed for phonological and morphological changes in Dutch spelling}, Volume = 9, Year = 2006, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/Spelling-space.pdf}} @inproceedings{VandenBosch06b, Address = {New York, NY}, Author = {A. {Van den Bosch}}, Booktitle = {Proceedings of the HLT-NAACL Workshop on Computationally hard problems and joint inference in speech and language processing}, Date-Modified = {2010-01-02 21:28:53 +0100}, Keywords = {ilk, language modeling, word prediction, confusible disambiguation}, Title = {All-word prediction as the ultimate confusible disambiguation}, Year = 2006, Bdsk-Url-1 = {http://www.aclweb.org/anthology-new/W/W06/W06-3604.pdf}} @misc{VandenBosch09b, Author = {A. {Van den Bosch}}, Howpublished = {Personal communication}, Title = {Harvesting Accurate and Fast Confusible Detectors}, Year = 2009} @mastersthesis{VandenBosch92, Author = {A. {Van den Bosch}}, Date-Modified = {2010-01-02 21:28:45 +0100}, Keywords = {ilk, grapheme-phoneme conversion, speech synthesis}, School = {Faculty of Arts, Tilburg University}, Title = {A Hybrid Model for Text-to-Speech Conversion}, Year = 1992} @inproceedings{VandenBosch94b, Author = {A. {Van den Bosch}}, Booktitle = {Proceedings of the Workshop on Cognitive Models of Language Acquisition}, Date-Modified = {2010-01-02 21:28:34 +0100}, Keywords = {ilk, grapheme-phoneme conversion, speech synthesis}, Pages = {91--93}, Publisher = {{{\sc itk}}, Tilburg University}, Title = {A Distributed, Yet Symbolic Model of Text-to-Speech Processing}, Year = 1994} @phdthesis{VandenBosch97, Address = {Cadier en Keer, the Netherlands}, Author = {A. {Van den Bosch}}, Date-Modified = {2010-02-22 15:24:13 +0100}, Keywords = {ilk}, Month = {December}, Publisher = {Uitgeverij Phidippides}, School = {Universiteit Maastricht}, Title = {Learning to pronounce written words: A study in inductive language learning}, Year = 1997} @article{VandenBosch99, Author = {A. {Van den Bosch}}, Date-Modified = {2010-09-14 13:04:51 +0200}, Journal = {Journal for Experimental and Theoretrical Artificial Intelligence}, Keywords = {ilk, fambl, knaw}, Number = 3, Pages = {339-368}, Title = {Careful Abstraction from Instance Families in Memory-Based Language Learning}, Volume = 11, Year = 1999} @inproceedings{VandenBosch99b, Address = {Bled, Slovenia}, Author = {A. {Van den Bosch}}, Booktitle = {Machine Learning: Proceedings of the Sixteenth International Conference}, Date-Modified = {2010-01-02 21:28:01 +0100}, Editor = {I. Bratko and S. Dzeroski}, Keywords = {ilk, Fambl}, Pages = {39--48}, Title = {Instance-family abstraction in memory-based language learning}, Year = 1999} @misc{VandenHerik88, Address = {Maastricht, The Netherlands}, Author = {H.J. {Van den Herik}}, Note = {Inaugural adress, University of Limburg, Maastricht, The Netherlands}, School = {University of Limburg}, Title = {Informatica en het menselijk blikveld}, Year = 1988} @article{VandenHeuvel03, Author = {Theo van den Heuvel}, Date-Added = {2010-01-29 15:14:24 +0100}, Date-Modified = {2010-01-29 15:14:31 +0100}, Journal = {Onze Taal}, Number = {9}, Pages = {236--238}, Title = {De spelling onder controle? - Waarom de spellingcorrector niet altijd doet wat we willen}, Volume = {72}, Year = {2003}} @inproceedings{Vandenbosch+99, Address = {San Francisco, CA}, Author = {A. {Van den Bosch} and W. Daelemans}, Booktitle = {Proceedings of the 37th Annual Meeting of the ACL}, Date-Modified = {2010-01-02 21:30:07 +0100}, Keywords = {ilk, morphological analysis}, Pages = {285--292}, Publisher = {Morgan Kaufmann}, Title = {Memory-based morphological analysis}, Year = 1999} @inproceedings{VanderBeek+02, Address = {Amsterdam, The Netherlands}, Author = {L. {Van der Beek} and G. Bouma and R. Malouf and G. {Van Noord}}, Booktitle = {Selected Papers from the Twelfth Computational Linguistics in the Netherlands Meeting, CLIN-2001}, Date-Modified = {2011-06-21 19:19:33 +0200}, Publisher = {Rodopi}, Title = {The {A}lpino {D}ependency {T}reebank}, Year = 2001} @phdthesis{vanZaanen02, Address = {Leeds, UK}, Author = {Menno {Van Zaanen}}, Keywords = {machine_learning}, School = {University of Leeds}, Title = {Bootstrapping Structure into Language: {A}lignment-{B}ased {L}earning}, Year = {2002}} @article{Vapnik+93, Author = {V. Vapnik and L. Bottou}, Journal = {Neural Computation}, Number = 6, Pages = {893--909}, Title = {Local algorithms for pattern recognition and dependencies estimation}, Volume = 5, Year = 1993} @book{Vapnik95, Address = {Berlin}, Author = {V. Vapnik}, Publisher = {Springer}, Series = {Springer Series in Statistics}, Title = {Estimation of dependencies based on empirical data}, Year = 1995} @book{Vapnik95b, Address = {New York, NY, USA}, Author = {Vapnik, V.N.}, Date-Added = {2009-11-15 14:23:12 +0100}, Date-Modified = {2009-11-15 14:23:28 +0100}, Publisher = {Springer-Verlag}, Title = {The nature of statistical learning theory}, Year = {1995}} @book{Vapnik98, Address = {New York}, Author = {V. Vapnik}, Publisher = {John Wiley and Sons Inc.}, Title = {Statistical Learning Theory}, Year = 1998} @inbook{Varela+88, Author = {F. Varela and A. Coutinho and B. Dupire and N. Nelson}, Booktitle = {Theoretical Immunology, Santa Fe Institue in the Sciences of Compexity}, Editor = {A. S. Perelson}, Publisher = {Addison Wesley Publishing Company}, Title = {Cognitive Networks: Immune, Neural and Otherwise}, Year = 1988} @inproceedings{Varges+01, Address = {New Brunswick, NJ}, Author = {S. Varges and C. Mellish}, Booktitle = {Proceedings of the 2nd Meeting of the North American Chapter of the Association for Computational Linguistics (NAACL-01)}, Pages = {1--8}, Publisher = {ACL}, Title = {Instance-based natural language generation}, Year = 2001} @inproceedings{Veale+97, Author = {Veale, T. and Way, A.}, Booktitle = {Proceedings of 2nd International Conference on Recent Advances in Natural Language Processing}, Date-Added = {2009-11-15 14:23:33 +0100}, Date-Modified = {2009-11-15 14:23:44 +0100}, Pages = {239--244}, Title = {Gaijin: A bootstrapping, template-driven approach to example-based machine translation}, Year = {1997}} @article{Veenstra+00, Author = {J. Veenstra and A. {Van den Bosch} and S. Buchholz and W. Daelemans and J. Zavrel}, Date-Modified = {2010-09-14 13:03:55 +0200}, Editor = {Kilgarriff, A. and Palmer, M.}, Journal = {Computers and the Humanities}, Keywords = {ilk, knaw, wsd, word sense disambiguation, memory-based learning, memory-based language processing}, Number = {1/2}, Pages = {171-177}, Title = {Memory-Based Word Sense Disambiguation}, Volume = 34, Year = 2000} @techreport{Veenstra+00b, Author = {J. Veenstra and W. Daelemans}, Date-Modified = {2010-01-02 21:33:53 +0100}, Institution = {ILK Research Group, University of Tilburg}, Keywords = {ilk, parsing}, Number = {ILK 00-12}, Title = {A Memory-Based Alternative for Connectionist Shift-Reduce Parsing}, Year = 2000} @inproceedings{Veenstra+94, Address = {{Dublin}}, Author = {J. Veenstra and J. Zavrel}, Booktitle = {Proceedings of third international conference on the Cognitive Science of NLP}, Title = {A Corpus-Based Approach to Syntax Acquisition}, Year = 1994} @mastersthesis{Veenstra95, Author = {J. Veenstra}, School = {Utrecht University}, Title = {{HeadCODE}: Syntactic parsing as head correlation detection}, Year = 1995} @inproceedings{Veenstra98, Address = {Wageningen, The Netherlands}, Author = {J. Veenstra}, Booktitle = {Proceedings of BENELEARN'98}, Date-Modified = {2010-01-02 21:33:34 +0100}, Keywords = {ilk, shallow parsing}, Pages = {71--78}, Title = {Fast {NP} chunking using memory-based learning techniques}, Year = 1998} @inproceedings{Veenstra99, Address = {Chania, Greece}, Author = {J. Veenstra}, Booktitle = {Proceedings of ACAI'99}, Date-Modified = {2010-01-02 21:33:26 +0100}, Keywords = {ilk}, Note = {submitted}, Title = {Memory-Based Text Chunking}, Year = 1999} @book{Venezky70, Author = {R. L. Venezky}, Publisher = {The Hague: Mouton}, Title = {The structure of {E}nglish orthography}, Year = 1970} @article{Verstaen+93, Author = {A. Verstaen and I. Gielen and M. Brysbaert and G. d'Ydewalle}, Journal = {Psychologica Belgica}, Pages = {77--98}, Title = {Naming and Lexical Decision Latencies for Three Hundred {D}utch Nonwords}, Volume = 33, Year = 1993} @inproceedings{Vilain+96, Author = {M.B. Vilain and D.S. Day}, Booktitle = COLING96, Title = {Finite-state phrase parsing by rule sequences}, Year = 1996} @article{Vincent+02, Author = {Vincent, P. and Bengio, Y.}, Date-Added = {2009-11-15 14:23:49 +0100}, Date-Modified = {2009-11-15 14:23:59 +0100}, Journal = {Machine Learning}, Number = {1}, Pages = {165--187}, Title = {Kernel Matching Pursuit}, Volume = {48}, Year = {2002}} @article{Viterbi67, Author = {A. J. Viterbi}, Date-Modified = {2009-11-25 23:46:37 +0100}, Journal = {{IEEE} Transactions on Information Theory}, Keywords = {viterbi search}, Pages = {260--269}, Title = {Error bounds for convolutional codes and an asymptotically optimum decoding algorithm}, Volume = 13, Year = 1967} @inproceedings{Vogel96, Address = {Copenhagen, Denmark}, Author = {S. Vogel and H. Ney and C. Tillmann}, Booktitle = {Proceedings of COLING 1996}, Pages = {836-841}, Title = {{HMM}-based Word Alignment in Statistical Translation}, Year = 1996} @article{Voisin+87, Author = {J. Voisin and P. A. Devijver}, Journal = {Pattern Recognition}, Pages = {465--474}, Title = {An application of the {M}ultiedit-{C}ondensing technique to the reference selection problem in a print recognition system}, Volume = 5, Year = 1987} @inproceedings{Voorhees00, Author = {E. Voorhees}, Booktitle = {The Ninth Text Retrieval Conference (TREC-9)}, Date-Modified = {2009-11-25 23:46:48 +0100}, Keywords = {question answering}, Pages = {71--80}, Publisher = {NIST SP 500-249}, Title = {Overview of the TREC-9 Question Answering Track}, Year = 2000} @incollection{Voorhees93, Author = {E. Voorhees and Y.-W. Hou}, Booktitle = {The First Text Retrieval Conference (TREC-1)}, Editor = {D. K. Harman}, Pages = {343--351}, Publisher = {NIST SP 500-207}, Title = {Vector expansion in a large collection}, Year = 1993} @inproceedings{Voorhees93a, Author = {E. Voorhees}, Booktitle = SIGIR, Date-Modified = {2009-09-06 20:43:50 +0200}, Keywords = {wsd, word sense disambiguation, information retrieval, ir}, Pages = {171--180}, Title = {Using {WordNet} to disambiguate word senses for text retrieval}, Year = 1993} @phdthesis{Vosse94, Address = {Leiden, The Netherlands}, Author = {Theo Vosse}, Date-Added = {2010-01-29 15:14:37 +0100}, Date-Modified = {2010-01-29 15:14:46 +0100}, Pages = {9, 240}, School = {Rijksuniversiteit Leiden}, Title = {The {W}ord {C}onnection. Grammar-based spelling error correction in {D}utch}, Year = {1994}} @inproceedings{Voutilainen93, Author = {A. Voutilainen}, Booktitle = {Proceedings of the 9th Nordiska Datalingvistikdagarna}, Place = {Stockholm}, Title = {A Noun Phrase Parser of English}, Year = 1993} @inproceedings{Voutilainen93b, Author = {A. Voutilainen and P. Tapanainen}, Booktitle = EACL93, Title = {Ambiguity resolution in a reductionistic parser}, Year = 1993} @unpublished{Vroomen+97, Author = {J. Vroomen and A. {Van den Bosch} and B. {De Gelder}}, Date-Modified = {2010-09-14 22:16:43 +0200}, Keywords = {ilk, syllabification}, Note = {Language and Cognitive Processes}, Title = {A Connectionist Model for Bootstrap Learning of Syllabic Structure}, Year = 1997} @inproceedings{Vysniauskas+95, Author = {V. Vysniauskas and F. Groen and B. Kr{\"{o}}se}, Booktitle = {Proceedings of {{\sc icann}}'95, Paris}, Pages = 311, Title = {Orthogonal incremental learning of a feedforward network}, Year = 1995} @article{Wagner74, Author = {Robert A. Wagner and Michael J. Fischer}, Date-Added = {2010-01-29 15:29:18 +0100}, Date-Modified = {2010-01-29 15:29:18 +0100}, Journal = {Journal of the Association for Computing Machinery}, Pages = {168--173}, Title = {The String-to-String Correction Problem}, Volume = {21}, Year = {1974}} @book{Wahlster00, Editor = {W. Wahlster}, Publisher = {Springer}, Title = {Verbmobil: {F}oundations of {S}peech-to{S}peech {T}ranslation}, Year = 2000} @inproceedings{Walker89, Author = {M. Walker}, Booktitle = ACL, Title = {Evaluating discourse processing algorithms}, Year = 1989} @article{Waltz+85, Author = {D.L. Waltz and J.B. Pollack}, Journal = {Cognitive Science}, Pages = {51--74}, Title = {Massively Parallel Parsing: A strongly Interactive Model of Natural Language Interpretation}, Volume = 9, Year = 1985} @article{Waltz95, Author = {D.L. Waltz}, Date-Modified = {2009-11-25 23:46:57 +0100}, Journal = {International Journal of High Speed Computing}, Keywords = {parallelism}, Number = 3, Pages = {491--501}, Title = {Massively parallel {AI}}, Volume = 5, Year = 1995} @inproceedings{Wang+97, Author = {Wang, Y.Y. and Waibel, A.}, Booktitle = {Proceedings of the eighth conference on European chapter of the Association for Computational Linguistics}, Date-Added = {2009-11-15 14:24:27 +0100}, Date-Modified = {2009-11-15 14:24:43 +0100}, Keywords = {statistical machine translation}, Pages = {366--372}, Title = {Decoding algorithm in statistical machine translation}, Year = {1997}} @inproceedings{Warwick90, Author = {S. Warwick and J. Hajic and G. Russell}, Booktitle = UWOED, Title = {Searching on tagged corpora: linguistically motivated concordance analysis}, Year = 1990} @incollection{Watanabe+03, Address = {Dordrecht, The Netherlands}, Author = {H. Watanabe and S. Kurohashi and E. Aramaki}, Booktitle = {Recent Advances in Example-Based Machine Translation}, Editor = {M. Carl and A. Way}, Pages = {397--420}, Publisher = {Kluwer Academic Publishers}, Title = {Finding Translation Patterns from Paired Source and Target Dependency Structures}, Year = 2003} @inproceedings{Wayland+94, Address = {{CUNY Conference on Sentence Processing, Manhattan NY}}, Author = {S. Wayland and R. Berndt and J. Sandson}, Booktitle = {Poster presentation}, Date-Modified = {2008-07-23 16:21:19 +0200}, Keywords = {nouns, verbs, human language processing}, Title = {Syntactic, Semantic and LExical Information in Ahditory Comprehension}, Year = {1994}} @incollection{Weaver55, Address = {Cambridge, MA, USA}, Author = {Weaver, W.}, Booktitle = {Machine Translation of Languages: Fourteen Essays}, Chapter = {1}, Date-Added = {2009-11-15 14:24:46 +0100}, Date-Modified = {2009-11-15 14:24:57 +0100}, Editor = {W.N. Locke}, Pages = {15--23}, Publisher = {MIT Press}, Title = {Translation}, Year = {1955}} @incollection{Weigend+91, Address = {San Mateo, CA}, Author = {A.S. Weigend and D.E. Rumelhart and B.A. Huberman}, Booktitle = {Advances in Neural Information Processing Systems}, Editor = {R. P. Lippman and J. E. Moody and D. S. Touretzky}, Pages = {875-882}, Publisher = {Morgan Kaufmann}, Title = {Generalization by weight-elimination with application to forecasting}, Volume = 3, Year = 1991} @incollection{Weigend+94, Address = {Cambridge, MA}, Author = {A.S. Weigend and D.E. Rumelhart}, Booktitle = {Computational Learning Theory and Natural Learning Systems}, Chapter = 16, Editor = {S. J. Hanson and G. A. Drastal and R. L. Rivest}, Pages = {457--476}, Publisher = MIT, Title = {Weight elimination and effective network size}, Volume = {1: Constraints and Prospects}, Year = 1994} @inproceedings{Weigend95, Author = {A.S. Weigend}, Booktitle = {Proceedings of the 1993 Connectionist Models Summer School}, Editor = {M. Mozer and P. Smolensky and D.S. Touretzky and J.L. Elman and A.S. Weigend}, Pages = {335-342}, Title = {On overfitting and the effective number of hidden units}} @article{Weijters+90, Author = {A. Weijters and G. Hoppenbrouwers}, Journal = {TABU}, Number = 1, Pages = {1--25}, Title = {NetSpraak: Een Neuraal Netwerk voor Grafeem-Foneem-Omzetting}, Volume = 20, Year = 1990} @inproceedings{Weijters+92, Author = {A. Weijters and J. Thole}, Booktitle = {TWLT3: Connectionism and Natural Language Processing}, Editor = {M.F.J. Drossaers and A. Nijholt}, Pages = {17--25}, Publisher = {Enschede: Twente University}, Title = {Speech Synthesis with Artificial Neural Networks}, Year = 1992} @inproceedings{Weijters90, Author = {A. Weijters}, Booktitle = {Proceedings van de Derde Nederlandstalige AI Conferentie, NAIC-90}, Editor = {H. J. {van den Herik} and N. Mars}, Pages = {137--146}, Title = {NetSpraak: een spraakmakend back-propagation netwerk}, Year = 1990} @inproceedings{Weijters91, Author = {A. Weijters}, Booktitle = {{{\sc naic}} '91 Proceedings}, Editor = {J. Treur}, Pages = {249--260}, Title = {Analyse van het Patroonherkennend Vermogen van {{\sc net}}talk}, Year = 1991} @inproceedings{Weijters91b, Author = {A. Weijters}, Booktitle = {Proceedings of the International Conference on Artificial Neural Networks - {ICANN}-91, Espoo, Finland}, Title = {A simple look-up procedure superior to {NETtalk}?}, Year = 1991} @article{Weijters96, Author = {A. Weijters}, Date-Modified = {2009-11-14 17:39:24 +0100}, Journal = {Neural Processing Letters}, Pages = {13-16}, Title = {The {BP-SOM} architecture and learning rule}, Year = 1996} @article{Weischedel+93, Author = {R. Weischedel and M. Meteer and R. Schwartz and L. Ramshaw and J. Palmucci}, Journal = CL, Number = 2, Pages = {359--382}, Title = {Coping with Ambiguity and Unknown Words through Probabilistic Models}, Volume = 19, Year = 1993} @book{Weiss+91, Author = {S. Weiss and C. Kulikowski}, Date-Modified = {2009-11-14 23:08:47 +0100}, Keywords = {machine learning, evaluation}, Publisher = {San Mateo, CA: Morgan Kaufmann}, Title = {Computer systems that learn}, Year = 1991} @article{Wells47, Author = {Wells, R.S.}, Journal = {Language}, Pages = {81--117}, Title = {Immediate Constituents}, Volume = 23, Year = 1947} @phdthesis{Werbos74, Author = {P. J. Werbos}, School = {Harvard University}, Title = {Beyond regression: new tools for the prediction and analysis in the behavioral sciences}, Year = 1974} @incollection{Wermter+96, Address = {Berlin}, Author = {Stefan Wermter and Ellen Riloff and Gabriele Scheler}, Booktitle = {Connectionist, Statistical and Symbolic Approaches to Learning for Natural Language Processing}, Editor = {Stefan Wermter and Ellen Riloff and Gabriele Scheler}, Pages = {1--16}, Publisher = {Springer}, Series = {Lecture Notes in Artificial Intelligence}, Title = {Learning Aprroaches for Natural Language Processing}, Volume = 1040, Year = 1996} @book{Wermter+96b, Address = {Berlin}, Author = {Stefan Wermter and Ellen Riloff and Gabriele Scheler}, Publisher = {Springer}, Series = {Lecture Notes in Artificial Intelligence}, Title = {Connectionist, Statistical and Symbolic Approaches to Learning for Natural Language Processing}, Volume = 1040, Year = 1996} @article{Wermter+97, Author = {Stefan Wermter and Volker Weber}, Journal = {Journal of Artificial Intelligence Research}, Number = 1, Title = {SCREEN: Learning a Flat Syntactic and Semantic Spoken Language Analysis Using Artificial Neural Networks}, Volume = 6, Year = 1997} @book{Wernicke74, Address = {Breslau}, Author = {Wernicke, C.}, Publisher = {Cohn und Weigart}, Title = {{Der aphasische Symptomencomplex}}, Year = 1874} @incollection{Wess+94, Author = {S. Wess and K. D. Althoff and G. Derwand}, Booktitle = {Topics in Case-Based Reasoning}, Editor = {K. D. Althoff and S. Wess and G. Derwand}, Pages = {167--181}, Publisher = {Berlin: Springer-Verlag}, Title = {Using k-d trees to improve the classification step in case-based reasoning}, Year = 1994} @phdthesis{Wess95, Author = {S. Wess}, School = {University of Kaiserslautern}, Title = {Fallbasiertes Probleml{\"{o}}sen in wissensbasierten Systemen zur Entscheidungsunterst{\"{u}}tzung und Diagnostik}, Year = 1995} @inproceedings{Weston+02, Author = {Weston, J. and Chapelle, O. and Elisseeff, A. and Scholkopf, B. and Vapnik, V.}, Booktitle = {Advances in Neural Information Processing Systems}, Date-Added = {2009-11-15 14:25:03 +0100}, Date-Modified = {2009-11-15 14:25:28 +0100}, Editor = {Becker, S. and Thrun, S. and Obermayer, K.}, Pages = {873--880}, Title = {Kernel dependency estimation}, Volume = {15}, Year = {2002}} @techreport{Weston+98, Author = {J. Weston and C. Watkins}, Date-Added = {2009-11-15 14:25:03 +0100}, Date-Modified = {2009-11-15 14:25:18 +0100}, Institution = {Department of Computer Science, Royal Holloway, University of London}, Keywords = {support vector machines}, Number = {CSD-TR-98-04}, Title = {Multi-class support vector machines}, Year = {1998}} @inproceedings{Wettschereck+94, Address = {Palo Alto, CA}, Author = {D. Wettschereck and T. G. Dietterich}, Booktitle = {Advances in Neural Information Processing Systems}, Editor = {J. D. Cowan {\em et al}.}, Pages = {184--191}, Publisher = {Morgan Kaufmann}, Title = {Locally adaptive nearest neighbor algorithms}, Volume = 6, Year = 1994} @article{Wettschereck+95, Author = {D. Wettschereck and T. G. Dietterich}, Journal = {Machine Learning}, Pages = {1--25}, Title = {An experimental comparison of the nearest-neighbor and nearest-hyperrectangle algorithms}, Volume = 19, Year = 1995} @techreport{Wettschereck+96, Address = {Washington, DC}, Author = {D. Wettschereck and D. W. Aha and T. Mohri}, Date-Modified = {2009-11-14 18:56:57 +0100}, Institution = {Naval Research Laboratory, Navy Center for Applied Research in Artificial Intelligence}, Keywords = {feature weighting, lazy learning, instance-based learning}, Number = {AIC-95-012}, Title = {A review and comparative evaluation of feature weighting methods for lazy learning algorithms}, Year = 1996} @article{Wettschereck+97, Author = {D. Wettschereck and D. W. Aha and T. Mohri}, Editor = {D. W. Aha}, Journal = {Artificial Intelligence Review, special issue on Lazy Learning}, Pages = {273--314}, Title = {A Review and Comparative Evaluation of Feature-Weighting Methods for a Class of Lazy Learning Algorithms}, Volume = 11, Year = 1997} @phdthesis{Wettschereck94, Author = {D. Wettschereck}, School = {Oregon State University}, Title = {A study of distance-based machine learning algorithms}, Year = 1994} @article{White+94, Archive = {MLOnline}, Author = {A.P. White and W.Z. Liu}, Date-Modified = {2009-11-14 19:01:17 +0100}, Journal = {Machine Learning}, Keywords = {decision trees}, Pages = {321--329}, Source = {Jakub}, Title = {Bias in Information-Based Measures in Decision Tree Induction}, Volume = {15(3)}, Year = 1994} @article{White90, Author = {H. White}, Journal = {Neural Networks}, Pages = {535--550}, Title = {Connectionist nonparametric regression: multilayer feedforward networks can learn arbitrary mappings}, Volume = 3, Year = 1990} @inproceedings{Widmer94, Address = {Seattle, WA}, Author = {G. Widmer}, Booktitle = {Proceedings of the 11th National Conference on Artificial Intelligence ({AAAI}-94)}, Month = {August}, Title = {The synergy of music theory and AI: Learning multi-level expressive interpretation}, Year = 1994} @inproceedings{Widrow+60, Address = {New York, NY}, Author = {B. Widrow and M. E. Hoff}, Booktitle = {1960 {{\sc ire wescon}} Convention Record}, Number = 4, Pages = {96-104}, Publisher = {{{\sc ire}}}, Title = {Adapting switching circuits}, Year = 1960} @inproceedings{Wilcox+08, Address = {Berlin, Germany}, Author = {L. A. Wilcox-O'Hearn and G. Hirst and A. Budanitsky}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {Proceedings of the Computational Linguistics and Intelligent Text Processing 9th International Conference, CICLing 2008}, Date-Added = {2010-02-10 20:37:37 +0100}, Date-Modified = {2010-02-14 19:59:16 +0100}, Editor = {A. Gelbukh}, Ee = {http://dx.doi.org/10.1007/978-3-540-78135-6_52}, Pages = {605-616}, Publisher = {Springer Verlag}, Title = {Real-{W}ord Spelling Correction with Trigrams: A Reconsideration of the {M}ays, {D}amerau, and {M}ercer Model}, Volume = {LNCS 4919}, Year = {2008}} @techreport{Wiles+94, Author = {J. Wiles and C. Latimer and C. Stevens}, Institution = {Department of Computer Science, University of Queensland}, Number = 289, Title = {Collected Papers from a Symposium on Connectionist Models and Psychology}, Year = 1994} @article{Wilks+97, Address = {http://xxx.lanl.gov/archive/cmp-lg}, Annote = {WSD, Word-Sense Disambiguation}, Author = {Y. Wilks and M. Stevenson}, Date-Modified = {2010-10-06 21:00:39 +0200}, Journal = {cmp-lg}, Keywords = {wsd, word sense disambiguation}, Number = 970516, Title = {Sense Tagging: Semantic Tagging with a Lexicon}, Year = 1997} @inproceedings{Wilks+98, Address = {Montr\'eal, Quebec, Canada}, Author = {Y. Wilks and M. Stevenson}, Booktitle = COLING/ACL98, Date-Modified = {2010-10-06 21:00:54 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {1398--1402}, Publisher = {Universit\'e de Montr\'eal}, Title = {Word Sense Disambiguation using Optimised Combinations of Knowledge Sources}, Year = 1998} @article{Wilks75a, Author = {Yorick Wilks}, Journal = CACM, Note = {reprinted in RNLP}, Number = 5, Pages = {264--274}, Title = {An intelligent analyzer and understander of English}, Volume = 18, Year = 1975} @article{Wilks75b, Author = {Yorick Wilks}, Journal = AI, Pages = {53--74}, Title = {A preferential pattern-seeking semantics for natural language inference}, Volume = 6, Year = 1975} @inproceedings{Wilks85, Author = {Yorick Wilks and Xiuming Huang and Dan Fass}, Booktitle = IJCAI, Title = {Syntax, preference and right attachment}, Year = 1985} @unpublished{WilksSt96, Author = {Wilks, Y. and Stevenson, M.}, Date-Modified = {2009-09-06 20:44:14 +0200}, Keywords = {wsd, word sense disambiguation}, Note = {cmp-lg/9607028}, Title = {The Grammar of sense: Is word-sense tagging much more then Part-Of-S-peach tagging?}, Year = 1996} @techreport{Williams+90, Author = {R. J. Williams and D. Zipser}, Institution = {Northeastern University}, Number = {NU--CCS--90--9}, Title = {Gradient-based Learning Algorithms for Recurrent Connectionist Networks}, Year = 1990} @article{Willingham+95, Author = {Willingham, D.B. and L. Preus}, Journal = {{Psyche}}, Title = {{The Death of Implicit Memory}}, Volume = {12(4)}, Year = 1995} @inproceedings{Wilson+97, Address = {San Francisco, CA}, Author = {D.R. Wilson and A.R. Martinez}, Booktitle = {Machine Learning: Proceedings of the Fourteenth International Conference}, Editor = {D. Fisher}, Title = {Instance pruning techniques}, Year = 1997} @article{Wilson72, Author = {D.R. Wilson}, Journal = {Institute of Electrical and Electronic Engineers Transactions on Systems, Man and Cybernetics}, Pages = {408--421}, Title = {Asymptotic properties of nearest neighbor rules using edited data}, Volume = 2, Year = 1972} @incollection{Winston83, Address = {San Mateo, CA}, Author = {P.H. Winston}, Booktitle = {Machine Learning: An Artificial Intelligence Approach}, Chapter = {3}, Editor = {Michalski, R.S. and Carbonell, J.G. and Mitchell, T.M.}, Pages = {45--62}, Publisher = {Morgan Kaufmann Publishers}, Title = {Learning by augmenting rules and accumulating censors}, Volume = {II}, Year = {1983}} @book{Witten+99, Author = {I. Witten and E. Frank}, Publisher = {San Mateo, CA: Morgan Kaufmann}, Title = {Data Mining: Practical Machine Learning Tools and Techniques with Java Implementations}, Year = 1999} @article{Wolpert+97, Author = {Wolpert, D.H. and Macready, W.G.}, Date-Added = {2009-11-15 14:25:42 +0100}, Date-Modified = {2009-11-15 14:26:08 +0100}, Journal = {IEEE Transactions on Evolutionary Computation}, Number = {1}, Pages = {67--82}, Title = {No Free Lunch Theorems for Optimization}, Volume = {1}, Year = {1997}} @inproceedings{Wolpert01, Author = {Wolpert, D.H.}, Booktitle = {Proceedings of the 6th Online World Conference on Soft Computing in Industrial Applications}, Date-Added = {2009-11-15 14:25:42 +0100}, Date-Modified = {2009-11-15 14:26:19 +0100}, Pages = {25--42}, Title = {The supervised learning no-free-lunch theorems}, Year = {2001}} @article{Wolpert90, Author = {D. H. Wolpert}, Journal = {Neural Networks}, Pages = {445--452}, Title = {Constructing a generalizer superior to {NET}talk via a mathematical theory of generalization}, Volume = 3, Year = 1990} @techreport{Wolpert92, Author = {D. H. Wolpert}, Date-Modified = {2009-11-25 23:46:25 +0100}, Institution = {The Santa Fe Institute}, Keywords = {overfitting}, Number = {SFI TR 92-03-5001}, Title = {On overfitting avoidance as bias}, Year = 1992} @article{Wolpert92b, Author = {D. H. Wolpert}, Date-Modified = {2008-07-23 17:17:12 +0200}, Journal = {{Neural Networks}}, Keywords = {stacking}, Pages = {241--259}, Title = {{Stacked Generalization}}, Volume = 5, Year = 1992} @article{Wolpert96, Author = {Wolpert, D.H.}, Date-Added = {2009-11-15 14:25:42 +0100}, Date-Modified = {2009-11-15 14:25:56 +0100}, Journal = {Neural Computation}, Number = {7}, Pages = {1341--1390}, Title = {The Lack of A Priori Distinctions Between Learning Algorithms}, Volume = {8}, Year = {1996}} @inproceedings{Wolters+97, Address = {Prague, Czech Republic}, Author = {M. Wolters and A. {Van den Bosch}}, Booktitle = {Workshop notes of the {ECML/MLnet} Familiarization Workshop on Empirical Learning of Natural Language Processing Tasks}, Date-Modified = {2010-09-14 13:03:43 +0200}, Editor = {W. Daelemans and A. {Van den Bosch} and A. Weij\-ters}, Keywords = {ilk, grapheme-phoneme conversion, Scottish Gaelic}, Pages = {61--70}, Publisher = {University of Economics}, Title = {Automatic phonetic transcription of words based on sparse data}, Year = 1997} @inproceedings{Wolters96, Author = {M. Wolters}, Booktitle = {Proceedings of the International Conference on Artificial Neural Networks -- {{\sc icann 96}}}, Editor = {C. {Van der Malsburg} {\em et al}.}, Pages = {233--238}, Publisher = {Berlin: Springer Verlag}, Title = {A dual route neural net approach to grapheme-to-phoneme conversion}, Year = 1996} @mastersthesis{Wolters97, Author = {M. Wolters}, School = {Department of Computer Science, Universit{\"{a}}t Bonn}, Title = {A diphone-based text-to-speech system for {S}cottish {G}aelic}, Year = 1997} @phdthesis{Wood96, Author = {M. Wood}, Date-Modified = {2011-03-26 23:37:40 +0100}, Keywords = {augmentative technology, text completion, word completion}, School = {University of Bristol}, Title = {Syntactic pre-processing in single-word prediction for disabled people}, Year = 1996} @incollection{Woods78, Author = {Woods, W. A.}, Booktitle = {Advances in Computers}, Editor = {M. Yovits}, Note = {Reprinted in {\em Readings in Natural Language Processing}, B. Grosz, K. Spark Jones and B. Lynn Webber (eds), 1986.}, Pages = {2--64}, Publisher = {Academic Press}, Title = {Semantics and quantification in natural language question answering}, Volume = 17, Year = 1978} @inproceedings{Wouden90, Author = {T. {Van der Wouden}}, Booktitle = {Proceedings of Buda{L}ex'88}, Editor = {T. Magay}, Publisher = {Budapest: Akad{\'{e}}miai Klad{\'{o}}}, Title = {Celex: Building a multifunctional polytheoretical lexical data base}, Year = 1990} @article{Wu+92, Author = {Su Wu and Udi Manber}, Journal = {Communications of the {ACM}}, Month = {October}, Number = 10, Title = {Fast Text Searching Allowing Errors}, Volume = 35, Year = 1992} @inproceedings{Wu+94, Address = {Columbia, Maryland}, Author = {D. Wu and X. Xia}, Booktitle = {AMTA-94, Association for Machine Translation in the Americas}, Month = Oct, Pages = {206--213}, Title = {Learning an {E}nglish-{C}hinese lexicon from a parallel corpus}, Year = 1994} @inproceedings{Wu+94b, Address = {Las Cruces, New Mexico}, Author = {Z. Wu and M. Palmer}, Booktitle = {Proceedings of the 32nd Annual Meeting of the Association for Computational Linguistics}, Pages = {133--138}, Title = {Verb semantics and lexical selection}, Year = 1994} @inproceedings{Wu+95, Address = {Hong Kong}, Author = {D. Wu and C. Ng}, Booktitle = {PACLIC-10, Pacific Asia Conference on Language, Information and Computation}, Month = Dec, Pages = {195--204}, Title = {Using Brackets to Improve Search for Statistical Machine Translation}, Year = 1995} @article{Wu+95b, Author = {D. Wu and X. Xia}, Journal = {Machine Translation}, Number = {3--4}, Pages = {285--313}, Title = {Large-scale automatic extraction of an {E}nglish-{C}hinese lexicon}, Volume = 9, Year = 1995} @inproceedings{Wu+99, Address = {Budapest}, Author = {D. Wu and Z. Sui and J. Zhao}, Booktitle = {Proceedings of the Sixth European Conference on Speech Communication and Technology, EUROSPEECH'99}, Title = {An information-based method for selecting feature types for word prediction}, Year = 1999} @inproceedings{Wu93, Author = {Ming-Wen Wu and Keh-Yih Su}, Booktitle = {Proceedings of {ROCLING} {VI}}, Pages = {207-216}, Title = {Corpus-based compound extraction with mutual information and relative frequency count}, Year = 1993} @inproceedings{Wu94, Address = {Las Cruces, New Mexico}, Author = {D. Wu}, Booktitle = {Proceedings of the 32nd Annual Conference of the Association for Computational Linguistics}, Month = Jun, Pages = {80--87}, Title = {Aligning a Parallel {E}nglish-{C}hinese Corpus Statistically with Lexical Criteria}, Year = 1994} @inproceedings{Wu95, Address = {Cambridge, MA}, Author = {D. Wu}, Booktitle = {Proceedings of the 33rd Annual Conference of the Association for Computational Linguistics}, Month = Jun, Pages = {244--251}, Title = {An Algorithm for Simultaneously Bracketing Parallel Texts by Aligning Words}, Year = 1995} @inproceedings{Wu95b, Address = {Montreal}, Author = {D. Wu}, Booktitle = {Proceedings of IJCAI-95, Fourteenth International Joint Conference on Artificial Intelligence}, Month = Aug, Pages = {1328--1334}, Title = {Stochastic Inversion Transduction Grammars, with Application to Segmentation, Bracketing, and Alignment of Parallel Corpora}, Year = 1995} @inproceedings{Wu95c, Address = {Leuven, Belgium}, Author = {D. Wu}, Booktitle = {TMI-95, Proceedings of the Sixth International Conference on Theoretical and Methodological Issues in Machine Translation}, Month = Jul, Pages = {354--372}, Title = {Grammarless Extraction of Phrasal Translation Examples from Parallel Texts}, Volume = 2, Year = 1995} @inproceedings{Wu95d, Address = {Cambridge, MA}, Author = {D. Wu}, Booktitle = {Proceedings of the Third Annual Workshop on Very Large Corpora}, Month = Jun, Pages = {69--81}, Title = {Trainable Coarse Bilingual Grammars for Parallel Text Bracketing}, Year = 1995} @inproceedings{Wu96, Address = {Santa Cruz, California}, Author = {D. Wu}, Booktitle = {Proceedings of the 34th Annual Conference of the Association for Computational Linguistics}, Date-Modified = {2009-11-14 18:56:13 +0100}, Keywords = {statistical machine translation}, Month = Jun, Pages = {152--158}, Title = {A Polynomial-Time Algorithm for Statistical Machine Translation}, Year = 1996} @inproceedings{Wubben+09, Author = {S. Wubben and A. {Van den Bosch}}, Booktitle = {Proceedings of the Eighth International Conference on Computational Semantics (IWCS-8)}, Date-Added = {2010-01-02 19:21:43 +0100}, Date-Modified = {2010-09-14 13:03:17 +0200}, Editor = {H.C. Bunt and V. Petukhova and S. Wubben}, Keywords = {ilk, semantic relatedness, Wikipedia, Conceptnet, Wordnet, memphix, vici}, Pages = {355--359}, Title = {A semantic relatedness metric based on free link structure}, Year = {2009}, Bdsk-Url-1 = {http://ilk.uvt.nl/downloads/pub/papers/freelinkstructure.pdf}} @inproceedings{Wubben+09b, Address = {Athens, Greece}, Author = {S. Wubben and A. {Van den Bosch} and E. Krahmer and E. Marsi}, Booktitle = {Proceedings of the 12th European Workshop on Natural Language Generation (ENLG 2009)}, Date-Added = {2010-01-02 19:28:47 +0100}, Date-Modified = {2010-09-14 13:03:23 +0200}, Keywords = {ilk, paraphrasing, headlines, clustering, memphix, vici}, Pages = {122--125}, Title = {Clustering and matching headlines for automatic paraphrase acquisition}, Year = {2009}, Bdsk-Url-1 = {http://aclweb.org/anthology-new/W/W09/W09-0621.pdf}} @inproceedings{Wulf96, Author = {D. Wulf}, Booktitle = {Proceedings of the Twelfth Northwest Linguistics Conference. Working Papers in Linguistics}, Pages = {239--254}, Title = {An Analogical Approach to Plural Formation in German}, Volume = 14, Year = 1996} @inproceedings{Yamada+01, Author = {K. Yamada and K. Knight}, Booktitle = {Proceedings of 39th Annual Meeting of the Association for Computational Linguistics}, Date-Added = {2009-11-15 14:26:30 +0100}, Date-Modified = {2009-11-15 14:26:41 +0100}, Pages = {523--530}, Title = {A Syntax-based Statistical Translation Model}, Year = {2001}} @inproceedings{Yang+99b, Author = {Yang, Y. and Liu, X.}, Booktitle = {Proceedings of the 22nd Annual International ACM SIGIR Conference on Research and Development in Information Retrieval}, Date-Added = {2009-11-15 14:26:46 +0100}, Date-Modified = {2012-06-06 06:51:33 +0000}, Pages = {42--49}, Title = {A re-examination of text categorization methods}, Year = {1999}} @inproceedings{Yarowski95, Address = {Cambridge, MA}, Author = {Yarowski, D.}, Booktitle = {ACL33}, Date-Modified = {2009-09-06 20:44:47 +0200}, Keywords = {wsd, word sense disambiguation, unsupervised learning, boosting}, Pages = {189--196}, Title = {Unsupervised Word Sense Disambiguation Rivaling Supervised Methods}, Year = 1995} @inproceedings{Yarowsky+00, Address = {San Francisco, CA}, Author = {Yarowsky, D. and R. Wicentowski}, Booktitle = {Proceedings of ACL-2000}, Pages = {207--216}, Publisher = {Morgan Kaufmann}, Title = {Minimally supervised morphological analysis by multimodal alignment}, Year = 2000} @inproceedings{Yarowsky+01, Address = {San Francisco, CA}, Author = {Yarowsky, D. and G. Ngai}, Booktitle = {Proceedings of NAACL-2001}, Pages = {200--207}, Publisher = {Morgan Kaufmann}, Title = {Inducing Multilingual POS Taggers and NP Bracketers via Robust Projection Across Aligned Corpora}, Year = 2001} @article{Yarowsky00, Author = {Yarowsky, D.}, Date-Modified = {2009-09-06 20:44:55 +0200}, Journal = {Computers and the Humanities}, Keywords = {wsd, word sense disambiguation}, Number = {1/2}, Pages = {179-186}, Title = {Hierarchical Decision Lists for Word Sense Disambiguation}, Volume = 34, Year = 2000} @inproceedings{Yarowsky92, Author = {D. Yarowsky}, Booktitle = Coling, Date-Modified = {2009-09-06 20:45:03 +0200}, Keywords = {wsd, word sense disambiguation}, Pages = {454--460}, Title = {Word sense disambiguation using statistical models of {Roget's} categories trained on large corpora}, Year = 1992} @inproceedings{Yarowsky93, Author = {D. Yarowsky}, Booktitle = {Proceedings of the ARPA Workshop on Human Language Technology}, Date-Modified = {2009-09-06 20:45:12 +0200}, Keywords = {wsd, word sense disambiguation}, Title = {One sense per collocation}, Year = 1993} @inproceedings{Yarowsky94, Author = {D. Yarowsky}, Booktitle = ACL, Pages = {88--95}, Title = {Decision lists for lexical ambiguity resolution: application to accent restoration in {Spanish} and {French}}, Year = 1994} @inproceedings{Yarowsky95, Author = {D. Yarowsky}, Booktitle = {Proceedings of ACL-95}, Comment = {Not fully supervised}, Date-Modified = {2009-09-06 20:45:18 +0200}, Keywords = {wsd, word sense disambiguation}, Title = {Unsupervised word sense disambiguation rivaling supervised methods}, Year = 1995} @inproceedings{Yeh00, Address = {{Saarbr\"{u}cken}, Germany}, Author = {A. Yeh}, Booktitle = {Proceedings of the 18th International Conference on Computational Linguistics}, Pages = {1146--1150}, Title = {Comparing two trainable grammatical relations finders}, Year = 2000} @article{Young+94, Author = {S. J. Young and P. C. Woodland}, Date-Added = {2010-01-29 15:03:06 +0100}, Date-Modified = {2010-01-29 15:03:14 +0100}, Journal = {Computer Speech and Language}, Number = {4}, Pages = {369--394}, Title = {State clustering in {HMM}-based continuous speech recognition}, Volume = {8}, Year = {1994}} @phdthesis{Yuret98, Author = {D. Yuret}, Month = {May}, School = {EECS Department, MIT}, Title = {Discovery of Linguistic Relations Using Lexical Attraction}, Year = 1998} @phdthesis{Yvon96, Author = {F. Yvon}, School = {Ecole Nationale Sup\'{e}rieure des T\'{e}l\'{e}communications, Paris}, Title = {Prononcer par analogie: motivation, formalisation et \'{e}valuation}, Year = 1996} @inproceedings{Zadrozny+01, Author = {Zadrozny, B. and Elkan, C.}, Booktitle = {Proceedings of the Eighteenth International Conference on Machine Learning}, Date-Added = {2009-11-15 14:27:00 +0100}, Date-Modified = {2009-11-15 14:27:09 +0100}, Editor = {C.E. Brodley and A. Danyluk}, Pages = {609--616}, Title = {{Obtaining calibrated probability estimates from decision trees and naive Bayesian classifiers}}, Year = {2001}} @inproceedings{Zadrozny+02, Author = {Zadrozny, B. and Elkan, C.}, Booktitle = {Proceedings of the eighth ACM SIGKDD international conference on Knowledge discovery and data mining}, Date-Added = {2009-11-15 14:27:00 +0100}, Date-Modified = {2009-11-15 14:27:20 +0100}, Pages = {694--699}, Title = {Transforming classifier scores into accurate multiclass probability estimates}, Year = {2002}} @article{Zajic+07, Author = {David Zajic and Bonnie J. Dorr and Jimmy Lin and Richard Schwartz}, Date-Added = {2009-11-15 14:36:44 +0100}, Date-Modified = {2009-11-15 14:36:53 +0100}, Journal = {Information Processing Management}, Number = {6}, Pages = {1549--1570}, Title = {Multi-candidate reduction: Sentence compression as a tool for document summarization tasks}, Volume = {43}, Year = {2007}} @inproceedings{Zanchetta05, Address = {Birmingham, UK}, Author = {Eros Zanchetta and Marco Baroni}, Booktitle = {Proceedings of the Corpus Linguistics 2005 conference}, Keyword = {NLP; corpus}, Title = {Morph-it!: a free corpus-based morphological resource for the Italian language}, Year = 2005} @inproceedings{Zavrel+00, Author = {J. Zavrel and W. Daelemans}, Booktitle = {Proceedings of the Second International Conference on Language Resources and Evaluation (LREC 2000)}, Date-Modified = {2010-01-02 21:35:35 +0100}, Keywords = {ilk, part-of-speech tagging}, Pages = {17--20}, Title = {Bootstrapping a Tagged Corpus through Combination of Existing Heterogeneous Taggers}, Year = 2000} @inproceedings{Zavrel+00b, Address = {Athens, Greece}, Author = {J. Zavrel and P. Berck and W. Lavrijssen}, Booktitle = {Proceedings of the workshop Information Extraction meets Corpus Linguistics}, Date-Modified = {2010-09-14 13:01:15 +0200}, Keywords = {ilk, text classification, information extraction, stil}, Title = {Information extraction by text classification: Corpus mining for features}, Year = 2000} @inproceedings{Zavrel+03, Address = {Heidelberg, Germany}, Author = {J. Zavrel and W. Daelemans}, Booktitle = {Text Mining, Theoretical Aspects and Applications}, Date-Modified = {2010-01-02 21:35:14 +0100}, Editor = {J. Franke, G. Nakhaeizadeh and I. Renz}, Keywords = {ilk, information extraction, text mining}, Pages = {33--54}, Publisher = {Springer Physica-Verlag}, Title = {Feature-rich memory-based classification for shallow {NLP} and information extraction}, Year = 2003, Bdsk-Url-1 = {http://www.cnts.ua.ac.be/~walter/papers/2003/zd03.pdf}} @inproceedings{Zavrel+94, Address = {{Dublin}}, Author = {J. Zavrel and J. Veenstra}, Booktitle = {Proceedings of third international conference on the Cognitive Science of NLP}, Title = {A Data-Driven Start of Lexicon and Syntax}, Year = 1994} @inproceedings{Zavrel+95, Address = {{Groningen}}, Author = {J. Zavrel and J. Veenstra}, Booktitle = {Proceedings of Groningen Assembly on Language Acquisition (GALA95)}, Editor = {F. Wijnen and C. Koster}, Title = {The language environment and syntactic word class acquisition}, Year = 1995} @inproceedings{Zavrel+96, Author = {J. Zavrel and J. Veenstra}, Booktitle = {Proceedings of the Groningen Assembly on Language Acquisition (GALA '95)}, Editor = {Koster C. and Wijnen F.}, Title = {The language environment and syntactic word class acquisition}, Year = 1996} @inproceedings{Zavrel+97, Author = {J. Zavrel and W. Daelemans}, Booktitle = {Proceedings of the 35th Annual Meeting of the Association for Computational Linguistics}, Date-Modified = {2010-01-02 21:34:39 +0100}, Keywords = {ilk}, Pages = {436--443}, Title = {Memory-Based Learning: Using Similarity for Smoothing}, Year = 1997} @inproceedings{Zavrel+97b, Address = {{ACL, Madrid}}, Author = {J. Zavrel and W. Daelemans and J. Veenstra}, Booktitle = {Proceedings of the Workshop on Computational Language Learning (CoNLL'97)}, Date-Modified = {2010-01-02 21:34:34 +0100}, Editor = {M. Ellison}, Keywords = {ilk, PP attachment, memory-based learning}, Title = {Resolving {PP} Attachment Ambiguities with Memory-Based Learning}, Year = 1997} @inproceedings{Zavrel+99, Author = {J. Zavrel and W. Daelemans}, Booktitle = {VI Simposio Internacional de Comunicacion Social}, Date-Modified = {2010-01-02 21:34:29 +0100}, Keywords = {ilk, memory-based tagging, memory-based learning, memory-based language processing}, Location = {Santiago de Cuba, Cuba}, Pages = {590--597}, Title = {Recent Advances in Memory-Based Part-of-Speech Tagging}, Year = 1999} @inproceedings{Zavrel97, Address = {Tilburg}, Author = {J. Zavrel}, Booktitle = {Proceedings of the 7th Belgian-Dutch Conference on Machine Learning}, Date-Modified = {2010-01-02 21:34:25 +0100}, Editor = {W. Daelemans and P. Flach and A. {Van den Bosch}}, Keywords = {ilk, k-NN}, Pages = {139--148}, Title = {An Empirical Re-Examination of Weighted Voting for {$k$-{\sc nn}}}, Year = 1997} @inproceedings{Zechner+98, Address = {Montreal}, Author = {K. Zechner and A. Waibel}, Booktitle = {COLING/ACL-98}, Date-Added = {2010-01-29 15:02:28 +0100}, Date-Modified = {2010-02-12 23:58:06 +0100}, Organization = {ACL}, Pages = {1453--1459}, Title = {Using Chunk Based Partial Parsing of Spontaneous Speech in Unrestricted Domains for Reducing Word Error Rate in Speech Recognition}, Year = {1998}} @inproceedings{Zelle+96, Author = {J. M. Zelle and R. J. Mooney}, Booktitle = AAAI, Pages = {1050--1055}, Title = {Learning to Parse Database queries using inductive logic proramming}, Year = 1996} @inproceedings{Zens04, Address = {Boston, MA}, Author = {R. Zens and H. Ney}, Booktitle = {Proceedings of HLT-NAACL 2004}, Pages = {257-264}, Title = {Improvements in phrase-based statistical machine translation}, Year = 2004} @inproceedings{Zernik+90, Author = {U. Zernik and P. Jacobs}, Booktitle = Coling, Pages = {34--39}, Title = {Tagging for Learning: Collecting Thematic Relations from Corpus}, Volume = 1, Year = 1990} @inproceedings{Zernik89, Author = {U. Zernik}, Booktitle = IJCAI, Title = {Lexicon acquisition: Learning from corpus by capitalizing on lexical categories}, Year = 1989} @book{Zernik91, Address = {Hillsdale, NJ}, Editor = {U. Zernik}, Publisher = {Lawrence Erlbaum Associates}, Title = {Lexical Acquisition: Exploiting On-Line Resources to Build a Lexicon}, Year = 1991} @article{Zhang+02, Author = {Zhang, T. and Damerau, F. and Johnson, D.}, Date-Added = {2009-11-15 14:27:26 +0100}, Date-Modified = {2009-11-15 14:28:47 +0100}, Journal = {Journal of Machine Learning Research}, Pages = {615-637}, Title = {Text chunking based on a generalization of {Winnow}}, Volume = {2}, Year = {2002}} @inproceedings{Zhang+03, Author = {T. Zhang and D. Johnson}, Booktitle = {Proceedings of the seventh Conference on Natural Language Learning at HLT-NAACL 2003}, Date-Added = {2009-11-15 14:27:26 +0100}, Date-Modified = {2009-11-15 14:29:08 +0100}, Editor = {W. Daelemans and M. Osborne}, Pages = {204--207}, Title = {A Robust Risk Minimization based Named Entity Recognition System}, Year = {2003}} @proceedings{Zhang+04, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {IRI}, Date-Added = {2010-02-10 20:37:02 +0100}, Date-Modified = {2010-02-10 20:37:12 +0100}, Editor = {Du Zhang and {\'E}ric Gr{\'e}goire and Doug DeGroot}, Isbn = {0-7803-8819-4}, Publisher = {IEEE Systems, Man, and Cybernetics Society}, Title = {Proceedings of the 2004 IEEE International Conference on Information Reuse and Integration, IRI - 2004, November 8-10, 2004, Las Vegas Hilton, Las Vegas, NV, USA}, Year = {2004}} @inproceedings{Zhang+06, Author = {Zhang, M. and Zhang, J. and Su, J.}, Booktitle = {Proceedings of the main conference on Human Language Technology Conference of the North American Chapter of the Association of Computational Linguistics}, Date-Added = {2009-11-15 14:27:26 +0100}, Date-Modified = {2009-11-15 14:28:58 +0100}, Pages = {288--295}, Title = {Exploring syntactic features for relation extraction using a convolution tree kernel}, Year = {2006}} @inproceedings{Zhang92, Author = {J. Zhang}, Booktitle = {Proceedings of the International Machine Learning Conference 1992}, Date-Modified = {2008-07-23 15:58:07 +0200}, Keywords = {typicality, instance-based learning}, Pages = {470--479}, Title = {Selecting typical instances in instance-based learning}, Year = 1992} @inproceedings{Zhao+09, Address = {Suntec, Singapore}, Author = {Zhao, Shiqi and Lan, Xiang and Liu, Ting and Li, Sheng}, Booktitle = {Proceedings of the Joint Conference of the 47th Annual Meeting of the ACL and the 4th International Joint Conference on Natural Language Processing of the AFNLP}, Date-Added = {2009-11-15 14:36:57 +0100}, Date-Modified = {2009-11-15 14:37:15 +0100}, Keywords = {paraphrasing}, Month = {August}, Pages = {834--842}, Publisher = {Association for Computational Linguistics}, Title = {Application-driven Statistical Paraphrase Generation}, Year = {2009}} @inproceedings{Zho+98, Address = {Montreal}, Author = {GuoDong Zhou and KimTeng Lua}, Booktitle = {COLING/ACL-98}, Date-Added = {2010-01-29 15:02:09 +0100}, Date-Modified = {2010-01-29 15:02:16 +0100}, Pages = {1465--1471}, Publisher = {ACL}, Title = {Word Association and {MI}-Trigger-based Language Modelling}, Year = {1998}} @inproceedings{Zhou95, Author = {J. Zhou and P. Dapkus}, Booktitle = {Proceedings of the Third Workshop on Very Large Corpora}, Pages = {131--147}, Title = {Automatic suggestion of significant terms for a predefined topic}, Year = 1995} @inproceedings{Zhu+04, Address = {San Jose, California}, Author = {X. Zhu and X. Wu and Y. Yang}, Booktitle = {Proceedings of the 19th National Conference on Artificial Intelligence (AAAI-04)}, Month = {July}, Pages = {378--383}, Title = {Error Detection and Impact-Sensitive Instance Ranking in Noisy Datasets}, Year = 2004} @book{Zimmerman91, Author = {H. J. Zimmerman}, Publisher = {Kluwer}, Title = {Fuzzy Set Theory and Its Applications}, Year = 1991} @book{Zipf35, Address = {Cambridge, MA}, Author = {G. K. Zipf}, Note = {Second paperback edition, 1968}, Publisher = MIT, Title = {The psycho-biology of language: an introduction to dynamic philology}, Year = 1935} @book{Zipf49, Address = {Cambridge, MA}, Author = {George Kingsley Zipf}, Date-Added = {2010-01-29 15:02:02 +0100}, Date-Modified = {2010-01-29 15:02:02 +0100}, Publisher = {Addison--Wesley}, Title = {Human Behavior and the Principle of Least Effort}, Year = {1949}} @article{Zobel+95, Author = {Justin Zobel and Philip Dart}, Date-Added = {2010-01-29 15:01:45 +0100}, Date-Modified = {2010-01-29 15:01:54 +0100}, Issn = {0038-0644}, Journal = {Software -- Practice and Experience}, Number = {3}, Pages = {331--345}, Publisher = {John Wiley \& Sons, Inc.}, Title = {Finding approximate matches in large lexicons}, Volume = {25}, Year = {1995}} @article{Zollmann+05, Address = {Germany}, Author = {Zollmann, A. and Sima'an, K.}, Citeulike-Article-Id = {4480457}, Issn = {1430-189X}, Journal = {Journal of Automata, Languages and Combinatorics}, Number = {2-3}, Pages = {367--389}, Posted-At = {2009-05-06 18:18:05}, Priority = {2}, Title = {A consistent and efficient estimator for data-oriented parsing}, Url = {http://www.cs.cmu.edu/\~{}zollmann/publications/jalc2005\_dopstar\_paper.pdf}, Volume = {10}, Year = {2005}, Bdsk-Url-1 = {http://www.cs.cmu.edu/%5C~%7B%7Dzollmann/publications/jalc2005%5C_dopstar%5C_paper.pdf}} @inproceedings{Zwicky72, Author = {A. Zwicky}, Booktitle = {CLS-72}, Date-Added = {2010-01-29 15:01:17 +0100}, Date-Modified = {2010-01-29 15:01:26 +0100}, Month = {April}, Pages = {607--615}, Publisher = {University of Chicago}, Title = {{On Casual Speech}}, Year = {1972}} @misc{Leech+94, Author = {G. Leech and R. Garside and M. Bryant}, Title = {Claws4: The Tagging Of The British National Corpus}, Year = {1994}} @phdthesis{Noklestad09, Author = {A. N{\o}klestad}, Date-Modified = {2011-04-25 23:45:49 +0200}, School = {University of Oslo}, Title = {A Machine Learning Approach to Anaphora Resolution Including Named Entity Recognition, PP Attachment Disambiguation, and Animacy Detection}, Year = {2009}} @inproceedings{Steinbach+00, Author = {M. Steinbach and G. Karypis and V. Kumar}, Booktitle = {KDD Workshop on Text Mining}, Date-Modified = {2011-06-19 23:08:42 +0200}, Title = {A comparison of document clustering techniques}, Year = {2000}} @inproceedings{Seginer07, Address = {Prague, Czech Republic}, Author = {Y. Seginer}, Booktitle = {Proceedings of the 45th Annual Meeting of the Association of Computational Linguistics}, Month = {June}, Pages = {384--391}, Publisher = {Association for Computational Linguistics}, Title = {Fast Unsupervised Incremental Parsing}, Url = {http://www.aclweb.org/anthology/P07-1049}, Year = {2007}, Bdsk-Url-1 = {http://www.aclweb.org/anthology/P07-1049}} @phdthesis{Feist08, Address = {Auckland, NZ}, Author = {J.M. Feist}, Date-Modified = {2011-06-19 16:14:52 +0200}, School = {The University of Auckland}, Title = {The order of premodifiers in English nominal phrases}, Year = 2008} @inproceedings{Mitchell09, Acmid = {1610203}, Address = {Stroudsburg, PA, USA}, Author = {Mitchell, Margaret}, Booktitle = {Proceedings of the 12th European Workshop on Natural Language Generation}, Location = {Athens, Greece}, Numpages = {8}, Pages = {50--57}, Publisher = {Association for Computational Linguistics}, Series = {ENLG '09}, Title = {Class-based ordering of prenominal modifiers}, Url = {http://portal.acm.org/citation.cfm?id=1610195.1610203}, Year = {2009}, Bdsk-Url-1 = {http://portal.acm.org/citation.cfm?id=1610195.1610203}} @inproceedings{DeJong83, Address = {Dordrecht, The Netherlands}, Author = {F.M.G. {De Jong}}, Booktitle = {Linguistics in the Netherlands}, Date-Modified = {2011-07-24 19:51:51 +0000}, Editor = {H. Bennis and W.U.S. {Van Lessen Kloeke}}, Keywords = {numerals}, Pages = {95--104}, Publisher = {Foris}, Title = {Numerals as determiners}, Year = {1983}} @inproceedings{DeJong83b, Address = {Dordrecht, The Netherlands}, Author = {F.M.G. {De Jong}}, Booktitle = {GLOT 6}, Date-Modified = {2011-07-24 19:50:35 +0000}, Pages = {229--246}, Title = {Sommige niet, andere wel; de verklaring van een raadselachtig verschil.}, Year = {1983}} LanguageMachines-timbl-642727d/docs/texfiles/mble-method.eps000077500000000000000000000276621451477526200240110ustar00rootroot00000000000000%!PS-Adobe-2.0 EPSF-1.2 %%DocumentFonts: Times-Bold Times-Italic Times-Roman %%Pages: 1 %%BoundingBox: 125 280 465 617 %%EndComments /IdrawDict 53 dict def IdrawDict begin /reencodeISO { dup dup findfont dup length dict begin { 1 index /FID ne { def }{ pop pop } ifelse } forall /Encoding ISOLatin1Encoding def currentdict end definefont } def /ISOLatin1Encoding [ /.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef /.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef /.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef /.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef /space/exclam/quotedbl/numbersign/dollar/percent/ampersand/quoteright /parenleft/parenright/asterisk/plus/comma/minus/period/slash /zero/one/two/three/four/five/six/seven/eight/nine/colon/semicolon /less/equal/greater/question/at/A/B/C/D/E/F/G/H/I/J/K/L/M/N /O/P/Q/R/S/T/U/V/W/X/Y/Z/bracketleft/backslash/bracketright /asciicircum/underscore/quoteleft/a/b/c/d/e/f/g/h/i/j/k/l/m /n/o/p/q/r/s/t/u/v/w/x/y/z/braceleft/bar/braceright/asciitilde /.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef /.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef/.notdef /.notdef/dotlessi/grave/acute/circumflex/tilde/macron/breve /dotaccent/dieresis/.notdef/ring/cedilla/.notdef/hungarumlaut /ogonek/caron/space/exclamdown/cent/sterling/currency/yen/brokenbar /section/dieresis/copyright/ordfeminine/guillemotleft/logicalnot /hyphen/registered/macron/degree/plusminus/twosuperior/threesuperior /acute/mu/paragraph/periodcentered/cedilla/onesuperior/ordmasculine /guillemotright/onequarter/onehalf/threequarters/questiondown /Agrave/Aacute/Acircumflex/Atilde/Adieresis/Aring/AE/Ccedilla /Egrave/Eacute/Ecircumflex/Edieresis/Igrave/Iacute/Icircumflex /Idieresis/Eth/Ntilde/Ograve/Oacute/Ocircumflex/Otilde/Odieresis /multiply/Oslash/Ugrave/Uacute/Ucircumflex/Udieresis/Yacute /Thorn/germandbls/agrave/aacute/acircumflex/atilde/adieresis /aring/ae/ccedilla/egrave/eacute/ecircumflex/edieresis/igrave /iacute/icircumflex/idieresis/eth/ntilde/ograve/oacute/ocircumflex /otilde/odieresis/divide/oslash/ugrave/uacute/ucircumflex/udieresis /yacute/thorn/ydieresis ] def /Times-Bold reencodeISO def /Times-Italic reencodeISO def /Times-Roman reencodeISO def /arrowHeight 8 def /arrowWidth 4 def /none null def /numGraphicParameters 17 def /stringLimit 65535 def /Begin { save numGraphicParameters dict begin } def /End { end restore } def /SetB { dup type /nulltype eq { pop false /brushRightArrow idef false /brushLeftArrow idef true /brushNone idef } { /brushDashOffset idef /brushDashArray idef 0 ne /brushRightArrow idef 0 ne /brushLeftArrow idef /brushWidth idef false /brushNone idef } ifelse } def /SetCFg { /fgblue idef /fggreen idef /fgred idef } def /SetCBg { /bgblue idef /bggreen idef /bgred idef } def /SetF { /printSize idef /printFont idef } def /SetP { dup type /nulltype eq { pop true /patternNone idef } { dup -1 eq { /patternGrayLevel idef /patternString idef } { /patternGrayLevel idef } ifelse false /patternNone idef } ifelse } def /BSpl { 0 begin storexyn newpath n 1 gt { 0 0 0 0 0 0 1 1 true subspline n 2 gt { 0 0 0 0 1 1 2 2 false subspline 1 1 n 3 sub { /i exch def i 1 sub dup i dup i 1 add dup i 2 add dup false subspline } for n 3 sub dup n 2 sub dup n 1 sub dup 2 copy false subspline } if n 2 sub dup n 1 sub dup 2 copy 2 copy false subspline patternNone not brushLeftArrow not brushRightArrow not and and { ifill } if brushNone not { istroke } if 0 0 1 1 leftarrow n 2 sub dup n 1 sub dup rightarrow } if end } dup 0 4 dict put def /Circ { newpath 0 360 arc patternNone not { ifill } if brushNone not { istroke } if } def /CBSpl { 0 begin dup 2 gt { storexyn newpath n 1 sub dup 0 0 1 1 2 2 true subspline 1 1 n 3 sub { /i exch def i 1 sub dup i dup i 1 add dup i 2 add dup false subspline } for n 3 sub dup n 2 sub dup n 1 sub dup 0 0 false subspline n 2 sub dup n 1 sub dup 0 0 1 1 false subspline patternNone not { ifill } if brushNone not { istroke } if } { Poly } ifelse end } dup 0 4 dict put def /Elli { 0 begin newpath 4 2 roll translate scale 0 0 1 0 360 arc patternNone not { ifill } if brushNone not { istroke } if end } dup 0 1 dict put def /Line { 0 begin 2 storexyn newpath x 0 get y 0 get moveto x 1 get y 1 get lineto brushNone not { istroke } if 0 0 1 1 leftarrow 0 0 1 1 rightarrow end } dup 0 4 dict put def /MLine { 0 begin storexyn newpath n 1 gt { x 0 get y 0 get moveto 1 1 n 1 sub { /i exch def x i get y i get lineto } for patternNone not brushLeftArrow not brushRightArrow not and and { ifill } if brushNone not { istroke } if 0 0 1 1 leftarrow n 2 sub dup n 1 sub dup rightarrow } if end } dup 0 4 dict put def /Poly { 3 1 roll newpath moveto -1 add { lineto } repeat closepath patternNone not { ifill } if brushNone not { istroke } if } def /Rect { 0 begin /t exch def /r exch def /b exch def /l exch def newpath l b moveto l t lineto r t lineto r b lineto closepath patternNone not { ifill } if brushNone not { istroke } if end } dup 0 4 dict put def /Text { ishow } def /idef { dup where { pop pop pop } { exch def } ifelse } def /ifill { 0 begin gsave patternGrayLevel -1 ne { fgred bgred fgred sub patternGrayLevel mul add fggreen bggreen fggreen sub patternGrayLevel mul add fgblue bgblue fgblue sub patternGrayLevel mul add setrgbcolor eofill } { eoclip originalCTM setmatrix pathbbox /t exch def /r exch def /b exch def /l exch def /w r l sub ceiling cvi def /h t b sub ceiling cvi def /imageByteWidth w 8 div ceiling cvi def /imageHeight h def bgred bggreen bgblue setrgbcolor eofill fgred fggreen fgblue setrgbcolor w 0 gt h 0 gt and { l b translate w h scale w h true [w 0 0 h neg 0 h] { patternproc } imagemask } if } ifelse grestore end } dup 0 8 dict put def /istroke { gsave brushDashOffset -1 eq { [] 0 setdash 1 setgray } { brushDashArray brushDashOffset setdash fgred fggreen fgblue setrgbcolor } ifelse brushWidth setlinewidth originalCTM setmatrix stroke grestore } def /ishow { 0 begin gsave fgred fggreen fgblue setrgbcolor printFont printSize scalefont setfont /descender 0 printFont /FontBBox get 1 get printFont /FontMatrix get transform exch pop def /vertoffset 1 printSize sub descender sub def { 0 vertoffset moveto show /vertoffset vertoffset printSize sub def } forall grestore end } dup 0 2 dict put def /patternproc { 0 begin /patternByteLength patternString length def /patternHeight patternByteLength 8 mul sqrt cvi def /patternWidth patternHeight def /patternByteWidth patternWidth 8 idiv def /imageByteMaxLength imageByteWidth imageHeight mul stringLimit patternByteWidth sub min def /imageMaxHeight imageByteMaxLength imageByteWidth idiv patternHeight idiv patternHeight mul patternHeight max def /imageHeight imageHeight imageMaxHeight sub store /imageString imageByteWidth imageMaxHeight mul patternByteWidth add string def 0 1 imageMaxHeight 1 sub { /y exch def /patternRow y patternByteWidth mul patternByteLength mod def /patternRowString patternString patternRow patternByteWidth getinterval def /imageRow y imageByteWidth mul def 0 patternByteWidth imageByteWidth 1 sub { /x exch def imageString imageRow x add patternRowString putinterval } for } for imageString end } dup 0 12 dict put def /min { dup 3 2 roll dup 4 3 roll lt { exch } if pop } def /max { dup 3 2 roll dup 4 3 roll gt { exch } if pop } def /arrowhead { 0 begin transform originalCTM itransform /taily exch def /tailx exch def transform originalCTM itransform /tipy exch def /tipx exch def /dy tipy taily sub def /dx tipx tailx sub def /angle dx 0 ne dy 0 ne or { dy dx atan } { 90 } ifelse def gsave originalCTM setmatrix tipx tipy translate angle rotate newpath 0 0 moveto arrowHeight neg arrowWidth 2 div lineto arrowHeight neg arrowWidth 2 div neg lineto closepath patternNone not { originalCTM setmatrix /padtip arrowHeight 2 exp 0.25 arrowWidth 2 exp mul add sqrt brushWidth mul arrowWidth div def /padtail brushWidth 2 div def tipx tipy translate angle rotate padtip 0 translate arrowHeight padtip add padtail add arrowHeight div dup scale arrowheadpath ifill } if brushNone not { originalCTM setmatrix tipx tipy translate angle rotate arrowheadpath istroke } if grestore end } dup 0 9 dict put def /arrowheadpath { newpath 0 0 moveto arrowHeight neg arrowWidth 2 div lineto arrowHeight neg arrowWidth 2 div neg lineto closepath } def /leftarrow { 0 begin y exch get /taily exch def x exch get /tailx exch def y exch get /tipy exch def x exch get /tipx exch def brushLeftArrow { tipx tipy tailx taily arrowhead } if end } dup 0 4 dict put def /rightarrow { 0 begin y exch get /tipy exch def x exch get /tipx exch def y exch get /taily exch def x exch get /tailx exch def brushRightArrow { tipx tipy tailx taily arrowhead } if end } dup 0 4 dict put def /midpoint { 0 begin /y1 exch def /x1 exch def /y0 exch def /x0 exch def x0 x1 add 2 div y0 y1 add 2 div end } dup 0 4 dict put def /thirdpoint { 0 begin /y1 exch def /x1 exch def /y0 exch def /x0 exch def x0 2 mul x1 add 3 div y0 2 mul y1 add 3 div end } dup 0 4 dict put def /subspline { 0 begin /movetoNeeded exch def y exch get /y3 exch def x exch get /x3 exch def y exch get /y2 exch def x exch get /x2 exch def y exch get /y1 exch def x exch get /x1 exch def y exch get /y0 exch def x exch get /x0 exch def x1 y1 x2 y2 thirdpoint /p1y exch def /p1x exch def x2 y2 x1 y1 thirdpoint /p2y exch def /p2x exch def x1 y1 x0 y0 thirdpoint p1x p1y midpoint /p0y exch def /p0x exch def x2 y2 x3 y3 thirdpoint p2x p2y midpoint /p3y exch def /p3x exch def movetoNeeded { p0x p0y moveto } if p1x p1y p2x p2y p3x p3y curveto end } dup 0 17 dict put def /storexyn { /n exch def /y n array def /x n array def n 1 sub -1 0 { /i exch def y i 3 2 roll put x i 3 2 roll put } for } def %%EndProlog %I Idraw 9 Grid 8 %%Page: 1 1 Begin %I b u %I cfg u %I cbg u %I f u %I p u %I t [ .8 0 0 .8 0 0 ] concat /originalCTM matrix currentmatrix def Begin %I Rect %I b 65535 1 0 0 [] 0 SetB %I cfg Black 0 0 0 SetCFg %I cbg White 1 1 1 SetCBg none SetP %I p n %I t [ 1 0 0 1 148 197 ] concat %I 291 4 291 5 Rect End Begin %I Text %I cfg Black 0 0 0 SetCFg %I f *-times-bold-r-*-140-* Times-Bold 14 SetF %I t [ 1 0 0 1 328 734 ] concat %I [ (EXAMPLES) ] Text End Begin %I Text %I cfg Black 0 0 0 SetCFg %I f *-times-bold-r-*-140-* Times-Bold 14 SetF %I t [ 1 0 0 1 345 439 ] concat %I [ (CASES) ] Text End Begin %I Line %I b 65535 1 0 1 [] 0 SetB %I cfg Black 0 0 0 SetCFg %I cbg White 1 1 1 SetCBg none SetP %I p n %I t [ 1 0 0 1 148 197 ] concat %I 221 510 221 261 Line End Begin %I Text %I cfg Black 0 0 0 SetCFg %I f *-times-bold-r-*-140-* Times-Bold 14 SetF %I t [ 1 0 0 1 179 438 ] concat %I [ (INPUT) ] Text End Begin %I Text %I cfg Black 0 0 0 SetCFg %I f *-times-bold-r-*-140-* Times-Bold 14 SetF %I t [ 1 0 0 1 498 438 ] concat %I [ (OUTPUT) ] Text End Begin %I Line %I b 65535 1 0 1 [] 0 SetB %I cfg Black 0 0 0 SetCFg %I cbg White 1 1 1 SetCBg none SetP %I p n %I t [ 1 0 0 1 148 197 ] concat %I 91 231 191 231 Line End Begin %I Line %I b 65535 1 0 1 [] 0 SetB %I cfg Black 0 0 0 SetCFg %I cbg White 1 1 1 SetCBg none SetP %I p n %I t [ 1 0 0 1 148 197 ] concat %I 260 231 320 231 Line End Begin %I Text %I cfg Black 0 0 0 SetCFg %I f *-times-medium-i-*-140-* Times-Italic 14 SetF %I t [ 1 0 0 1 286 403 ] concat %I [ (Similarity-Based Reasoning) ] Text End Begin %I Text %I cfg Black 0 0 0 SetCFg %I f *-times-medium-i-*-140-* Times-Italic 14 SetF %I t [ 1 0 0 1 384 641 ] concat %I [ (Storage) (Computation of Metrics) ] Text End Begin %I Rect %I b 65535 1 0 1 [] 0 SetB %I cfg Black 0 0 0 SetCFg %I cbg White 1 1 1 SetCBg none SetP %I p n %I t [ 1 0 0 1 148 197 ] concat %I 12 161 430 571 Rect End Begin %I Text %I cfg Black 0 0 0 SetCFg %I f *-times-medium-r-*-140-* Times-Roman 14 SetF %I t [ 1 0 0 1 170 757 ] concat %I [ (Memory-Based) (Learning) (Architecture) ] Text End Begin %I Text %I cfg Black 0 0 0 SetCFg %I f *-times-medium-r-*-140-* Times-Roman 14 SetF %I t [ 1 0 0 1 495 714 ] concat %I [ (Learning) ] Text End Begin %I Text %I cfg Black 0 0 0 SetCFg %I f *-times-medium-r-*-140-* Times-Roman 14 SetF %I t [ 1 0 0 1 176 386 ] concat %I [ (Performance) ] Text End End %I eop showpage %%Trailer end LanguageMachines-timbl-642727d/docs/texfiles/pos-neg.eps000077500000000000000000005251041451477526200231560ustar00rootroot00000000000000%!PS-Adobe-3.0 EPSF-3.0 %%HiResBoundingBox: 0.000000 0.000000 244.500000 218.001007 %APL_DSC_Encoding: UTF8 %%Title: (Unknown) %%Creator: (Unknown) %%CreationDate: (Unknown) %%For: (Unknown) %%DocumentData: Clean7Bit %%LanguageLevel: 2 %%Pages: 1 %%BoundingBox: 0 0 244 218 %%EndComments %%BeginProlog %%BeginFile: cg-pdf.ps %%Copyright: Copyright 2000-2002 Apple Computer Incorporated. %%Copyright: All Rights Reserved. currentpacking true setpacking /cg_md 140 dict def cg_md begin /L3? languagelevel 3 ge def /bd{bind def}bind def /ld{load def}bd /xs{exch store}bd /xd{exch def}bd /cmmtx matrix def mark /sc/setcolor /scs/setcolorspace /dr/defineresource /fr/findresource /T/true /F/false /d/setdash /w/setlinewidth /J/setlinecap /j/setlinejoin /M/setmiterlimit /i/setflat /rc/rectclip /rf/rectfill /rs/rectstroke /f/fill /f*/eofill /sf/selectfont /s/show /xS/xshow /yS/yshow /xyS/xyshow /S/stroke /m/moveto /l/lineto /c/curveto /h/closepath /n/newpath /q/gsave /Q/grestore counttomark 2 idiv {ld}repeat pop /SC{ /ColorSpace fr scs }bd /cgmtx matrix def /sdmtx{cgmtx currentmatrix pop}bd /CM {cgmtx setmatrix}bd /cm {cmmtx astore CM concat}bd /W{clip newpath}bd /W*{eoclip newpath}bd statusdict begin product end dup (HP) anchorsearch{ pop pop pop true }{ pop (hp) anchorsearch{ pop pop true }{ pop false }ifelse }ifelse { { { pop pop (0)dup 0 4 -1 roll put F charpath }cshow } }{ {F charpath} }ifelse /cply exch bd /cps {cply stroke}bd /pgsave 0 def /bp{/pgsave save store}bd /ep{pgsave restore showpage}def /re{4 2 roll m 1 index 0 rlineto 0 exch rlineto neg 0 rlineto h}bd /scrdict 10 dict def /scrmtx matrix def /patarray 0 def /createpat{patarray 3 1 roll put}bd /makepat{ scrmtx astore pop gsave initgraphics CM patarray exch get scrmtx makepattern grestore setpattern }bd /cg_BeginEPSF{ userdict save/cg_b4_Inc_state exch put userdict/cg_endepsf/cg_EndEPSF load put count userdict/cg_op_count 3 -1 roll put countdictstack dup array dictstack userdict/cg_dict_array 3 -1 roll put 3 sub{end}repeat /showpage {} def 0 setgray 0 setlinecap 1 setlinewidth 0 setlinejoin 10 setmiterlimit [] 0 setdash newpath false setstrokeadjust false setoverprint }bd /cg_EndEPSF{ countdictstack 3 sub { end } repeat cg_dict_array 3 1 index length 3 sub getinterval {begin}forall count userdict/cg_op_count get sub{pop}repeat userdict/cg_b4_Inc_state get restore F setpacking }bd /cg_biproc{currentfile/RunLengthDecode filter}bd /cg_aiproc{currentfile/ASCII85Decode filter/RunLengthDecode filter}bd /ImageDataSource 0 def L3?{ /cg_mibiproc{pop pop/ImageDataSource{cg_biproc}def}bd /cg_miaiproc{pop pop/ImageDataSource{cg_aiproc}def}bd }{ /ImageBandMask 0 def /ImageBandData 0 def /cg_mibiproc{ string/ImageBandMask xs string/ImageBandData xs /ImageDataSource{[currentfile/RunLengthDecode filter dup ImageBandMask/readstring cvx /pop cvx dup ImageBandData/readstring cvx/pop cvx]cvx bind}bd }bd /cg_miaiproc{ string/ImageBandMask xs string/ImageBandData xs /ImageDataSource{[currentfile/ASCII85Decode filter/RunLengthDecode filter dup ImageBandMask/readstring cvx /pop cvx dup ImageBandData/readstring cvx/pop cvx]cvx bind}bd }bd }ifelse /imsave 0 def /BI{save/imsave xd mark}bd /EI{imsave restore}bd /ID{ counttomark 2 idiv dup 2 add dict begin {def} repeat pop /ImageType 1 def /ImageMatrix[Width 0 0 Height neg 0 Height]def currentdict dup/ImageMask known{ImageMask}{F}ifelse exch L3?{ dup/MaskedImage known { pop << /ImageType 3 /InterleaveType 2 /DataDict currentdict /MaskDict << /ImageType 1 /Width Width /Height Height /ImageMatrix ImageMatrix /BitsPerComponent 1 /Decode [0 1] currentdict/Interpolate known {/Interpolate Interpolate}if >> >> }if }if exch {imagemask}{image}ifelse end }bd /cguidfix{statusdict begin mark version end {cvr}stopped{cleartomark 0}{exch pop}ifelse 2012 lt{dup findfont dup length dict begin {1 index/FID ne 2 index/UniqueID ne and {def} {pop pop} ifelse}forall currentdict end definefont pop }{pop}ifelse }bd /t_array 0 def /t_i 0 def /t_c 1 string def /x_proc{ exch t_array t_i get add exch moveto /t_i t_i 1 add store }bd /y_proc{ t_array t_i get add moveto /t_i t_i 1 add store }bd /xy_proc{ t_array t_i 2 copy 1 add get 3 1 roll get 4 -1 roll add 3 1 roll add moveto /t_i t_i 2 add store }bd /sop 0 def /cp_proc/x_proc ld /base_charpath { /t_array xs /t_i 0 def { t_c 0 3 -1 roll put currentpoint t_c cply sop cp_proc }forall /t_array 0 def }bd /sop/stroke ld /nop{}def /xsp/base_charpath ld /ysp{/cp_proc/y_proc ld base_charpath/cp_proc/x_proc ld}bd /xysp{/cp_proc/xy_proc ld base_charpath/cp_proc/x_proc ld}bd /xmp{/sop/nop ld /cp_proc/x_proc ld base_charpath/sop/stroke ld}bd /ymp{/sop/nop ld /cp_proc/y_proc ld base_charpath/sop/stroke ld}bd /xymp{/sop/nop ld /cp_proc/xy_proc ld base_charpath/sop/stroke ld}bd /refnt{ findfont dup length dict copy dup /Encoding 4 -1 roll put definefont pop }bd /renmfont{ findfont dup length dict copy definefont pop }bd L3? dup dup{save exch}if /Range 0 def /Domain 0 def /Encode 0 def /Decode 0 def /Size 0 def /DataSource 0 def /mIndex 0 def /nDomain 0 def /ival 0 def /val 0 def /nDomM1 0 def /sizem1 0 def /srcEncode 0 def /srcDecode 0 def /nRange 0 def /d0 0 def /r0 0 def /di 0 def /ri 0 def /a0 0 def /a1 0 def /r1 0 def /r2 0 def /dx 0 def /Nsteps 0 def /sh3tp 0 def /ymax 0 def /ymin 0 def /xmax 0 def /xmin 0 def /min { 2 copy gt {exch pop}{pop}ifelse }bd /max { 2 copy lt {exch pop}{pop}ifelse }bd /inter { 1 index sub 5 2 roll 1 index sub 3 1 roll sub 3 1 roll div mul add }bd /setupFunEvalN { begin /nDomM1 Domain length 2 idiv 1 sub store /sizem1[ 0 1 nDomM1 { Size exch get 1 sub }for ]store /srcEncode currentdict/Encode known { Encode }{ [ 0 1 nDomM1 { 0 sizem1 3 -1 roll get }for ] }ifelse store /srcDecode currentdict/Decode known {Decode}{Range}ifelse store /nRange Range length 2 idiv store end }bd /FunEvalN { begin nDomM1 -1 0 { 2 mul/mIndex xs Domain mIndex get max Domain mIndex 1 add get min Domain mIndex get Domain mIndex 1 add get srcEncode mIndex get srcEncode mIndex 1 add get inter round cvi 0 max sizem1 mIndex 2 idiv get min nDomM1 1 add 1 roll }for nDomM1 1 add array astore/val xs nDomM1 0 gt { 0 nDomM1 -1 0 { dup 0 gt { /mIndex xs val mIndex get 1 index add Size mIndex 1 sub get mul add }{ val exch get add }ifelse }for }{ val 0 get }ifelse nRange mul /ival xs 0 1 nRange 1 sub { dup 2 mul/mIndex xs ival add DataSource exch get 0 255 srcDecode mIndex 2 copy get 3 1 roll 1 add get inter Range mIndex get max Range mIndex 1 add get min }for end }bd /sh2 { /Coords load aload pop 3 index 3 index translate 3 -1 roll sub 3 1 roll exch sub 2 copy dup mul exch dup mul add sqrt dup scale atan rotate /Function load setupFunEvalN clippath {pathbbox}stopped {0 0 0 0}if newpath /ymax xs /xmax xs /ymin xs /xmin xs currentdict/Extend known { /Extend load 0 get { /Domain load 0 get /Function load FunEvalN sc xmin ymin xmin abs ymax ymin sub rectfill }if }if /dx/Function load/Size get 0 get 1 sub 1 exch div store gsave /di ymax ymin sub store /Function load dup /Domain get dup 0 get exch 1 get 2 copy exch sub dx mul exch { 1 index FunEvalN sc 0 ymin dx di rectfill dx 0 translate }for pop grestore currentdict/Extend known { /Extend load 1 get { /Domain load 1 get /Function load FunEvalN sc 1 ymin xmax 1 sub abs ymax ymin sub rectfill }if }if }bd /shp { 4 copy dup 0 gt{ 0 exch a1 a0 arc }{ pop 0 moveto }ifelse dup 0 gt{ 0 exch a0 a1 arcn }{ pop 0 lineto }ifelse fill dup 0 gt{ 0 exch a0 a1 arc }{ pop 0 moveto }ifelse dup 0 gt{ 0 exch a1 a0 arcn }{ pop 0 lineto }ifelse fill }bd /calcmaxs { xmin dup mul ymin dup mul add sqrt xmax dup mul ymin dup mul add sqrt xmin dup mul ymax dup mul add sqrt xmax dup mul ymax dup mul add sqrt max max max }bd /sh3 { /Coords load aload pop 5 index 5 index translate 3 -1 roll 6 -1 roll sub 3 -1 roll 5 -1 roll sub 2 copy dup mul exch dup mul add sqrt /dx xs 2 copy 0 ne exch 0 ne or { exch atan rotate }{ pop pop }ifelse /r2 xs /r1 xs /Function load dup/Size get 0 get 1 sub /Nsteps xs setupFunEvalN dx r2 add r1 lt{ 0 }{ dx r1 add r2 le { 1 }{ r1 r2 eq { 2 }{ 3 }ifelse }ifelse }ifelse /sh3tp xs clippath {pathbbox}stopped {0 0 0 0}if newpath /ymax xs /xmax xs /ymin xs /xmin xs dx dup mul r2 r1 sub dup mul sub dup 0 gt { sqrt r2 r1 sub atan /a0 exch 180 exch sub store /a1 a0 neg store }{ pop /a0 0 store /a1 360 store }ifelse currentdict/Extend known { /Extend load 0 get r1 0 gt and { /Domain load 0 get/Function load FunEvalN sc { { dx 0 r1 360 0 arcn xmin ymin moveto xmax ymin lineto xmax ymax lineto xmin ymax lineto xmin ymin lineto eofill } { r1 0 gt{0 0 r1 0 360 arc fill}if } { 0 r1 xmin abs r1 add neg r1 shp } { r2 r1 gt{ 0 r1 r1 neg r2 r1 sub div dx mul 0 shp }{ 0 r1 calcmaxs dup r2 add dx mul dx r1 r2 sub sub div neg exch 1 index abs exch sub shp }ifelse } }sh3tp get exec }if }if /d0 0 store /r0 r1 store /di dx Nsteps div store /ri r2 r1 sub Nsteps div store /Function load /Domain load dup 0 get exch 1 get 2 copy exch sub Nsteps div exch { 1 index FunEvalN sc d0 di add r0 ri add d0 r0 shp { d0 0 r0 a1 a0 arc d0 di add 0 r0 ri add a0 a1 arcn fill d0 0 r0 a0 a1 arc d0 di add 0 r0 ri add a1 a0 arcn fill }pop /d0 d0 di add store /r0 r0 ri add store }for pop currentdict/Extend known { /Extend load 1 get r2 0 gt and { /Domain load 1 get/Function load FunEvalN sc { { dx 0 r2 0 360 arc fill } { dx 0 r2 360 0 arcn xmin ymin moveto xmax ymin lineto xmax ymax lineto xmin ymax lineto xmin ymin lineto eofill } { xmax abs r1 add r1 dx r1 shp } { r2 r1 gt{ calcmaxs dup r1 add dx mul dx r2 r1 sub sub div exch 1 index exch sub dx r2 shp }{ r1 neg r2 r1 sub div dx mul 0 dx r2 shp }ifelse } } sh3tp get exec }if }if }bd /sh { begin /ShadingType load dup dup 2 eq exch 3 eq or { gsave newpath /ColorSpace load scs currentdict/BBox known { /BBox load aload pop 2 index sub 3 index 3 -1 roll exch sub exch rectclip }if 2 eq {sh2}{sh3}ifelse grestore }{ pop (DEBUG: shading type unimplemented\n)print flush }ifelse end }bd {restore}if not dup{save exch}if L3?{ /sh/shfill ld /csq/clipsave ld /csQ/cliprestore ld }if {restore}if end setpacking %%EndFile %%EndProlog %%BeginSetup %%EndSetup %%Page: 1 1 %%PageBoundingBox: 0 0 244 218 %%BeginPageSetup cg_md begin bp sdmtx %RBIBeginFontSubset: HUCPDH+Helvetica %!PS-TrueTypeFont-1.0000-0.0000-2 14 dict begin/FontName /HUCPDH+Helvetica def /PaintType 0 def /Encoding 256 array 0 1 255{1 index exch/.notdef put}for dup 33 /t put dup 34 /r put dup 35 /u put dup 36 /e put dup 37 /space put dup 38 /c put dup 39 /l put dup 40 /a put dup 41 /s put dup 42 /p put dup 43 /d put dup 44 /i put dup 45 /T put dup 46 /P put dup 47 /N put dup 48 /F put readonly def 42/FontType resourcestatus{pop pop false}{true}ifelse %APLsfntBegin {currentfile 0(%APLsfntEnd\n)/SubFileDecode filter flushfile}if /FontType 42 def /FontMatrix matrix def /FontBBox[2048 -342 1 index div -914 2 index div 2036 3 index div 2100 5 -1 roll div]cvx def /sfnts [< 74727565000900000000000063767420000000000000009C000003626670676D000000000000040000000322676C7966000000000000072400000C9A6865616400000000000013C0000000386868656100000000000013F800000024686D7478000000000000141C000000446C6F63610000000000001460000000246D6178700000000000001484000000207072657000000000000014A4000003BB05C0001005BD00280580001A042F001F0000FFD90000FFDA0000FFD9FE55FFE605C70010FE6DFFF1033B000000B9000000B902FE3F3C00C0008D009B00AF000600A800C00028005E009800C9016A00B9015C00B400D6011E002E0080000400B8004C00CC01FFFFD1006600A400AF007400C2009500B1000C0028006D0015004C008E0125FF7A000C0040004C00620084FFA200240038008600BD0039005E008E00EDFFA9FFB300400052005500AA00AB00C200CB012302B10413FFAEFFE4000800510074008400AA00D1FF4CFFAF0012002C004200500051008400BE012503DAFF680018003B0098009C009F00A100C100EC018201B4FF68FF76FFD0FFE100020018001C00530053007D01B401E103AF0486FF9CFFEAFFFE001F0028002A00520060009300A300AA00AF00AF00C001000145016B0174019301950240028202B404850517FEFD00060029004700470048006F008800B400B900C400F200F901EF02180310037403C5FF35FFF3000B004B004C0052005500650076007600870087008E00AB00BB0106013001430150017D0194019501D3022A025502580277027802E6034E035C037903D3047304B2058C0598060BFEF5FFBBFFC7FFD50017001D005B0072007E009C00C200D000F400FA01030106011C0125013B0142015E015E0180019B02B901A101B9025001C001D002AA01DF01E301EF01FB0205020C0215022B0274029302AB02C202CE03690395039903DF03F5043E050205A105E5062507DBFE62FE89FECEFF3BFFE1FFF800030008002100390042004E005F0061006F00700034007F008E00AD00AD00AF00BD00C400C500C900C900C900E3011C00ED00F800F901000112011A0132014D014D014E014F01660169019E01BA01BA01BE01E301EF01F602000200020902110217021C02530262026D028002D50280031B032A034A035A03AF03AF03C803D603FB03FB04050413041504470449008C046D049A049A04A604A804B204CF0539053E054E055605800589058C036305D105D6067E068E06B206EF06F00728074C076F078C00B400C900C000C10000000000000000000000000004012400AF0032006E0063014401620096014301A10161008A00740064018801EF01700028FF5D037E0347023000AA00BE007B0062009A007D0089035C00A1FFD803AA00D70093006C0000008000A70442001D0597001D008200300000 40292A292827262524232221201F1E1D1C1B1A191817161514131211100D0C0B0A090807060504030201002C4523466020B02660B004262348482D2C452346236120B02661B004262348482D2C45234660B0206120B04660B004262348482D2C4523462361B0206020B02661B02061B004262348482D2C45234660B0406120B06660B004262348482D2C4523462361B0406020B02661B04061B004262348482D2C0110203C003C2D2C20452320B0CD442320B8015A51582320B08D44235920B0ED51582320B04D44235920B09051582320B00D44235921212D2C20204518684420B001602045B04676688A4560442D2C01B9400000000A2D2C00B9000040000B2D2C2045B00043617D6818B0004360442D2C45B01A234445B01923442D2C2045B00325456164B050515845441B2121592D2C20B0032552582359212D2C69B04061B0008B0C6423648BB8400062600C642364615C58B0036159B002602D2C45B0112BB0172344B0177AE5182D2C45B0112BB01723442D2C45B0112BB017458CB0172344B0177AE5182D2CB002254661658A46B040608B482D2CB0022546608A46B040618C482D2C4B53205C58B002855958B00185592D2C20B0032545B019236A4445B01A23444565234520B00325606A20B009234223688A6A606120B0005258B21A401A4523614459B0005058B219401945236144592D2CB9187E3B210B2D2CB92D412D410B2D2CB93B21187E0B2D2CB93B21E7830B2D2CB92D41D2C00B2D2CB9187EC4E00B2D2C4B525845441B2121592D2C0120B003252349B04060B0206320B000525823B002253823B002256538008A63381B212121212159012D2C456920B00943B0022660B00325B005254961B0805358B21940194523616844B21A401A4523606A44B209191A45652345604259B00943608A103A2D2C01B005251023208AF500B0016023EDEC2D2C01B005251023208AF500B0016123EDEC2D2C01B0062510F500EDEC2D2C20B001600110203C003C2D2C20B001610110203C003C2D2C764520B003254523616818236860442D2C7645B00325452361682318456860442D2C7645B0032545616823452361442D2C4569B014B0324B505821B0205961442D0000000200A10000052F05BD00030007003E402105062F02010004072F03000A05042F0303021A0906072F01001908098821637B182B2B4EF43C4DFD3C4E10F63C4D10FD3C003F3CFD3C3F3CFD3C31303311211127112111A1048EB8FCE205BDFA43B8044DFBB300000100AF000004AA05BD000900394018071E040409031E0100020908066B011A0B03082500190A0BB80157B32195DC182B2B4EF44DFD3C4E10F64DE4003F3F3CED12392FFD313013211521112115211123AF03FBFCCC02D1FD2FC705BDB4FE42AFFD64000002009C0000052A05BD0009000A00764014 3701380602070117012701470158020507010202B8019B4017120606070207080301020806080A030A0A010608020302B8019BB505041A0C0708B8019B400A0900190B0CA0217670182B2B4EF43C4DFD3C4E10F63C4DFD3C11123939392F003F3F3C3F3C12393904872E2B877DC53130005D015D13330111331123011123019CEB02E6BDDFFD0FBE023A05BDFB5A04A6FA4304A5FB5B05BD000200AF000004F805BD000A001400614035690C6912790C7A12044814581468147A140407081E1110100A010E0F1E0100020A080206120C0409141431041A160F092500191516B8010BB3219589182B2B4EF44DFD3C4E10F64DFD11121739003F3F3CFD3C1012392F3CFD3C015D31305D132132161514062321112300272623211121323635AF0295C4F0D6DEFE32C70380784273FE74018C86A705BDDDC8ACFFFD9304B93A1FFE0372900000010021000004C905BD00070034401A01061E00070204080917171A00FB0203250504FB0619088C5E182B4E10F44DF43CFD3CF44E456544E6003F3F3C4DFD3C3130011521112311213504C9FE11CAFE1105BDAFFAF2050EAF0000030052FFDC04470449000F003B003C00DD40382A30010A100B1B0C1C2733481069096A10073908120C09031B320724091D100C1D3B2B022E293BB73B023B322A2512100705081C2722171CB8018A4023171D1F07271D2E0B021D350B3C073C3C1C1407292AA8241A3E1B291C4A0F2738193D3EBC0197002100B9019600182B2B4EF44DEDF4ED4E10F64DE4FDC412392F003F3FED3FED3FEDED1239111217395D1112392EED2EED01111239111739313043794028363715220001192501360F2100181E1B21001620142101212200370221001A1D1721011521172101002B2B2B01103C2B2B2B2B818181005D015D2416333237363D010E010F0106070615013637363534262322070607233E01333217161511141633323637150E0123222726270E012322263534363713010E724E5F59962168326D62315301B43E150C837A8D3B210AA805F7A3BD767517250C1E112A2C265D2A160937CE7C95BDBA978ACF5A2C49A691151C060E0D1C2F67016C082C182D5C534C2A53C69B484898FD971C220303850C06422340486AB58895A41301E4000002003BFFE103D0044E001A001B00A7402FA719019818A808AA18034A08119B14030314061D1A070D1D140B1B071B1B1710271201032702111A1D0A2717191C1DB80107B321727D182B2B4EF44DED4E10F63C4DED3939ED12392F003F3FED3FED12392F10ED313043794034001908250C150A26000E1310260112110F1007190A26000500032101010204030B160D26000F120D2600091806260104010621012B2B2B2B01103C103C2B2B103C103C2B2B2B81005D015D001617232E012322070615141633323637330E01232202 351000330702D6E317AF10727EAC4A308892708319AF1EF0BBD2FA0112D41C044EB0D76383A86DA0A1DC8977D5C50133E6011A013A0500020038FFDA03ED05C2000B001D00774032370E470E570EA704A91B05250814020F1D1000081D1D07130A021D170B052E132E102911121A1F0B271A191E1F87217242182B2B4EF44DED4E10F63C4DFDE4E4003FED3F3FED3F1139113931304379401A181C090A000101180B2600091C0B260000190226000A1B0826012B2B012B2B818181005D1216333236353426232206150017161711331123350E0123220035341233F692A17DA1A67A88A9018A53303DADA23FAC6FB3FEFAEFDE015FE8D7C9CBC3D0CA0237341E4B021DFA3E956358012DFAEA015700030048FFDA041A0449001C00240025010C40799708991AA71F03050E020F0514150E120F1514400C401408291A014B0BB603C701C603C71BD808D909D61FD823E817E8230BC711C712025C080521240F9A161D243906070716211D1C070A1D160B2507971CA71CB71CD71C0425160F251C05190A0C07110E270F1D27051A27242E072719192627D421A65D182B2B4EF44DFDE44E10F64DEDD4FD391239391112393912392F5D003F3FED3FED12392F3CFD3C10ED1112393130437940460023040503050205010504061F26111012101310141004060C25221B24260020001D26011E1D09170726000B150E26010D0E231A2126011E0521260108180A26000D100A2600002B2B2B2B01103C2B2B103C2B2B2B2A2B2A8101715D00715D5D00161716171615211E013332373637330E01070607062322001110003301262726232206070102B4D638361210FCEF0590978D543014B1074F3152794152C8FEEA0118E2011F0B284AAD7CA805012304476B55516C4AA2A3C55D36473B912E501C100123010601020142FE26754682B38A01DC00000200840000013B05BD000300070036401C07E50400010006030A0917171A06010229070300190809AA216242182B2B4EF43C4DC4FD3CC44E456544E6003F3F3C3F4DED3130133311231133152384B7B7B7B7042AFBD605BDCC000100890000013D05BD0003002940150000030A0517171A0102290003190405AA216242182B2B4EF43C4DFD3C4E456544E6003F3F31301333112389B4B405BDFA4300020076FE5504250449000E00220074402CA908A717022808201C110E061D15070F060E1D1C0B220E0227181A240A2E102E2129220F1923248721BD5D182B2B4EF43C4DFDE4E44E10F64DED003F3FED3F3FED1139123931304379401C161B00051A260426001B022601051602260101190E260003170626012B2B012B2B2B2B8181005D243635342726232207061514171633013315363736333212111007062322272627112302C6A72546BABB45252546BAFE2EAF36405B7BB6FEB7749A7952303BB4 79D3D2805CB1BB649A7C57A603B18E49283CFEE9FEFDFEA2965F351E49FDDD00000100890000029204470011004F40262703260D37034704040E0810020E0911090C270805070006110A081A13012E10291100191213B80145B321627E182B2B4EF43C4DFDE44E10E6003F3F4D3FC4FDC411123939011112393130005D1333153E0133321617152E0123220615112389AB15A46B05181D101B108892B4042FB9369B0203BE0302AF72FD980000020042FFD703B6044B002E002F012E408F38099805961299149815982A062824252736214621472447275624572766246726790C790D790E7623742474257426A61EA82C1303000B15052D042E13001A151B171C18152D142E280F0B6908262536250225220D0A042B1318C61C1D1307041D2E9A2B0B2F07090E100207002F212F1A1F18161827173E28260727281A310E1F27103E00272E193031B221A65D182B2B4EF44DEDF4FD394E10F64DFD3910F4FD3911123939392F111239113939003F3FEDED3FEDED111217397131304379404C012D022615251A26210E1F21000926072101032C002100052A0721011D121F21001B14182101200F22210021220E0D08270A21012625090A012D04210006290421001E111C210119161C2101002B2B2B2B103C103C2B103C103C2B012B2B2B2B2B2B2B2B2B81005D5D015D13161716333236353427262F01262726353436333217160723262726232206151417161F011617161514062322262701EF082544A864983D27738F894174DBB9F26B4302AA05263E99666945284E77C24269D9DEEFC70701B701505A3057575B4524161D24222A498198BC8E5A683D32474E40462A19131D2F2C45948FD0D9A002F900010017FFEF0209055A00180052B50D2E0AC00E01B8013F40250416391703060E0A111A17171A0301062900150E150F031F030203FC1619191AFC21677D182B2B4EF44DFD5D39C42F3CFD3C104E456544E6002F3F3F3C4DFD3CED10FDE431301333113315231114171633323637150E012322263511233533A8B6ABAB2615310D1E141F43277E5A9191055AFED593FD4538130B01028E0908816702C59300020080FFE303DE044900170018005E403AB814C81402091308141913191428067703D707070800050E0A00060D0A051D120B180718180B160D2E0A290C0B1A1A01291619191AD2216242182B2B4EF44DED4E10F63C4DFDE41112392F003F3FED3F3F3C391112393130005D015D0111141716333237363511331123370607062322272635112501381A3083BC4425B4AA0223346793E5532D01AF042FFD39523460A85A9D020EFBD19E3D2A5499528902D81A000000000100000000000073F8B13B5F0F3CF501010800000000015F4E858000000000B53F1B40FEAAFC6E07F40834000000090001000000000000000100000629FE29 0000081FFEAAFEB307F400010000000000000000000000000000001105C700A10239000004E300AF05C7009C055600AF04E30021047300520400003B047300380473004801C7008401C700890473007602AA0089040000420239001704730080000000330033006500BA0110013D0207028902F503BE03EB040C047E04C405A405F3064D00010000001100530007005B0006000200100010002B000007E80161000600014118008001A6009001A600A001A600030069018B0079018B0089018B0099018B00040089018B0099018B00A9018B00B9018BB2040840BA0179001A014A400B041F5414191F180A0B1FD2B80106B49E1FD918E3BB0119000D00E10119B20D0009410A01A0019F0064001F01A50025017A00480028019AB3296C1F60410A01A9007001A9008001A90003008001A9000101A9B21E321FBE012C00250401001F0126001E0401B61FE7312D1FE531B80201B21FC227B80401B21FC11EB80201400F1FC01D9E1FBF1D671FBE1D671FAB27B80401B21FAA29B80401B61FA91D6C1F931EB8019AB21F921DB80101B21F911DB80101B21F751DB80201B61F6D29961F6431B8019AB21F4C96B802ABB21F391DB80156400B1F3638211F351DE41F2F27B80801400B1F2D1D4C1F2A31CD1F241DB802ABB21F201EB8012540111F1C1D931F3A1D4C1F1E1D45273A1D4527BB01AA019B002A019BB2254A1FBA019B0025017AB349293896B8017BB348283125B8017A403648289629482725294C1F252946272729482756C80784075B07410732072B072807260721071B071408120810080E080C080A08080807B801ACB23F1F06BB01AB003F001F01ABB308060805B801AEB23F1F04BB01AD003F001F01ADB70804080208000814B8FFE0B40000010014B801ABB41000000100B801ABB606100000010006B801ADB300000100B801AD401F04000001000410000001001002000001000200000001000002010802004A00B0018DB806008516763F183F123E113946443E113946443E113946443E113946443E113946443E11394660443E11394660443E11394660442B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B18011DB0964B5358B0AA1D59B0324B5358B0FF1D592B2B2B2B2B2B2B2B182B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B74752B2B2B65422B2B4B5279B376706A66456523456023456560234560B08B766818B080622020B16A704565234520B003266062636820B003266165B070236544B06A234420B176664565234520B003266062636820B003266165B066236544B0762344B10066455458B166406544B27640764523614459B36242725D456523456023456560234560B089766818B080622020B172424565234520B003266062636820B003266165B0422365 44B072234420B1625D4565234520B003266062636820B003266165B05D236544B0622344B1005D455458B15D406544B262406245236144592B2B2B2B456953427374B8019A2045694B20B02853B049515A58B020615944B801A6204569447500 00>] def /CharStrings 17 dict dup begin /.notdef 0 def /space 1 def /F 2 def /N 3 def /P 4 def /T 5 def /a 6 def /c 7 def /d 8 def /e 9 def /i 10 def /l 11 def /p 12 def /r 13 def /s 14 def /t 15 def /u 16 def end readonly def currentdict dup/FontName get exch definefont pop end %APLsfntEnd 42/FontType resourcestatus{pop pop true}{false}ifelse {currentfile 0(%APLT1End\n)/SubFileDecode filter flushfile}if /FontType 1 def /FontMatrix [ 0.00048828125 0 0 0.00048828125 0 0 ] def /FontBBox{-342 -914 2036 2100}def /UniqueID 4045371 def currentdict currentfile eexec 54544758EC884CF30C3CD503CEDBFF3839C47C3C3333173232E3FDBFF439491DB843E1924E63AA7726BBB0485AB56D93D8C0906F647A47162891E73FFC2A9873C4B1EAC5EEBDFFC4D06084FBD84139DF4583C6E259D10699944D1068C9C45667DCCCFB9B7EA01B606435EDCBD273ABAC093D14085CCBAC149BD7382E842CFE0D7FE4FD2EF589A2471F6074A80A8B675C2F7A50D63AC1EF90D787BADD11633CB01CF6EE3B37AAF9078A69AC4740E9B6525D78BBD839551A1CB80DB8682FA5E87591BBD6EE8B946063A2A58D9CA3685AB305495DC5FB5747EB8A9A059C4976C0FE4EEAB1D56FF47F1E9664ED9F4A7DAB763AF92B2F6CF2FA7DEC24710E0B9096E30F772BA7FEA9BDBE496C42ED2CEB58F54E80BDF57CE7B4DB6CCFE7182F43BF93CCA0767AF95D62C5D2C3DC6AE1E6D139F51A2C63432117F1714C5566572EE9967A715420ABDCD1D7BD74F8450B89965FCC81C6ACA565C5F3CCF91D430D1F953E4F1A645300A98DD8C47CD64555F08F422340A85404EAE0D3229C4F9336B9470CACBD6BBF3395104750A915CC6EAAC197668267B8C62D2764C8CD69FD937CA3C924D997A0EDE7964BEB9EA2F92EF70C5E5DA0AA5567765E71F2B911B3C5586B741EEB93F3C73016EC16BFF283758900903D203992EFC8BAFAF13579C602F38C91B322782D282AC603D1F5A452D1E7AE90B2FF78B151C731C717F5B5A2EDB414C7411DC55C9F11252DB02F5559851CCD7D7CB91F73C4FE2797104F2A7A4B0F92CC5F8FC75B9E037F311CB425CC75313E5F79F6A8C8DB2D2F44546E17686656E54AB2EA61DCC8CCDE9AA90B41909B5B684DF8D176A01DB5C96971FD8CE4A7887EDAB3F9C6BEBC7BDADBD4D6F2A95A091978330458763D446A1F2DD5421BBBAFA0925374C00CA6BF911D0534C3F222546FF3B707D3934CDF36A8190C3B59B529FF4DBC9516B6E2B1C5FB488AA1DB843A49B9DBDD3638A6A1D908A87D3B1A399C14870827F1D517C21705CE98353E08620C6841D5D027FA791C845DC7D65BFFD1321ADEED78BB6C9D0DEDC6198960816AF07D4B525E78FD109B9CD53DB5606FEBC2026E4BFA02E2700EA52847DB1B90597A85FA133C3137B73989ED47B112943419AA982EC5AAA878EAAEFB3E5C83CA3FCBDD6EAF8492A2E347EDBF91AD9F419AE4534BD00890980D6CFE7FEDC82C3F135C9C1EEA6630A458EC0F8CA7298432AD967D5DEDC5A3CC3B3330DBB662558F23A2A398C36F7C838E5C688497D7BC94BBB6989E88AC0731D1163AB2453E32A21DDEB7CC1B73816C9D4D6908B801FE30BCC4314F3FF62D6C66477419A34E470FEF957F1F105ABC68A501356F19A3E9F05860CB5A473742A5EAF44000CF4538DBAE3914A59380B0B889295C6106E6FCAAF32EC9DEE5564FE2C3C3A69D943AA5622A99071A25A8A0B6ECAFDB07162F3D1DA2FABE 9A12934DA3FF393FC3815D484B8774AE365780F8FC45CC7C0930E86EECF82FF20D04E1F63B07C137ABAF10337BD7FCFD1B489216F3BDC00B40654860A50B9600FD4529F8169071B73BF80D181DC539C6CB8DB7CE885615DFF6FB6E8E9C265B29CCEC93EB8D21DDFEF9C836C0B39BE860637E0A55966E94805B913DD032EC2ED31433BC310F38C221CB418914A24532F8FB91A202F4FDF36A4874B1CED2FE767AE676636634C22AEC718D504760FFCD5266C8A820464D46D1F14F5200F00007503EA560930066E6FE0685AE48B3B4D13A083D850FCDD44CCF290520818E67AB0080896883C9D55B041D879722C4D5A894C9684AC3B52697947E1282A75E318DBC675ED900281CC7C4F88BE51FEB4425F29EB4A1DEE5E38EC264CDCD4EC94A7D922FB345577775F4F590AC7901AE5F03F98F8D863123020FB3976164C2EA5AF6B187D2E61ACE100B6B5B6545176D4D64DCEC5ACF451DDC99861D09501FA6D04FAADDA5AA7DC47F71BBE0DE3C8302D08A28C16ECABBBF0C94D7261A217859584C3CD38D640AA66C1B8FDC827542AAB36DFFF06500127475EB8992A3D34F8C623E0D93F356127EF956588C3418061813F48BEA13B9A1BE35328B1E412921644E2A9DF4FBFAA528E998B2F8E848AA4D39DA6287C76109AC914338E86665652E98F9713549D8A691F4A87C715D8C17CB7E507F70D69608BE44B98E8B893BBDD4F773803B8545D1220FFDEAC9206FBEB2B3815DA69FE8E73C5C93BD0E38A90E965449AFBE531A37CBEA45CBEF82CC486A5D526493985E49F6D091F2F94C8E6D7760BD6E78F974612C2BEA0442CDAA0F0D02BFA8234AFDC95EEC1558F8E8FC2853333A9E536A48195625CD3E2DD5B9417CF661C71AF1DD6998334D14D115388A833471A413FBDD1D6A994E25A2357FFD6D936989FDEA12291EEE1B6CF64EC5BD5983BEA7800AC5A15C11DDC0B3EB6A6E47E65A8D94074BAE6F3873E7A219267A573A82AD7CA68A6BDC7D7E304A1062FDB88F7E9985C5E04D8B9F25B2A44625421ECD334110261D57C51D51B97B9DB233FCCCACB16738E1E1972F75FA16EC40D8CCB730E1483888FD09A8815E0C8ADC79A698837582DF9FEF36076E03BE45A36D4AC60B65654741ED8A2A53114C94E890E0884B8864CD54436F34A8D6CCD86BA0ADE09453D26B44D4A5E1C0800FA519EAC64858D5912B8258E3C988FA51928DF0FE1BB1BA34BDB6DFC33D710E856933C131E2406DEB33973AC80618D3A8929211244D862F9550313E116BC14A0449A2565495B7A188BD118BE76AA76109332466C67B3D3BD2F161C56592B84411EE9EBA966742B2A14B4BEE4281A8A3C00A9A9870D69259B13E0E52AE352283A522EE25619EAB7B35A9980A5615E41129A50B62350783753B65965D5315C5F775CC9999D0B1FF9184D73B9525F2166A69E806A57A40EA51 9683D62F5EA60CB2738B8709E90900451C7000D582260C407B5F458507714CDD1E25FDCBC713542C1139371758063DEDB0CCF2BE3C99959D1DF36712295B391E65D22455EE32E8117C36AA7A1A0F2A6BE5AFDC0308749323D1925954ACD2354B3D2489477F96AB3B292504D17AC2A819279B821844A008268B222C9295617B1184067A904F078EB15E024351FA203F58BE39B5D9A5A5B0C8C354A25738FA0909043BC980C4A505E08478E17F82C52D54B125EF2FDEB86F347E3BC7B4767873F1D426DF938D5F4B356A4FB994DD2434363DC119FA0543CA5DBE5C2BDB0CE05290D9402D4276EA46068859847F7936DAB6ACE0A25A3CCC0F0104AAF8E34118016305033701D15B870EBA2D26385AE3072FD9EC9E589F0A0C1AECB7124BA2B5CB4E7F68157940F5F5990C2302CBD4C148E365E5D8753407C5F6FDA66FF7697FF7B8EC854CC84E05EAED82ABFD9E4C4790D717A381231129C36E914F2F877E57BEE4495CDE13592D16209ABDD73574F3AB6903C13B01D0C9637574DAC9F9D1650DECEFAB1E6AA3E9716864187F76F8E39D99D730B251C00F91010D6E9288B7210C604308BFACEF1F2F33BFDA13188E007677F7C4B152964ECD31E9B198163D31CD917D3760C3E216290BFE3BA896F7E607287BD634F081B0BE305A6E19B46D700231EE697073058105A0E3D138398A342128D9E2A32ACD7FA306F189E8EC43DBD8DFC2C99C9F5C1920E5BF91900E56546AC0CB48B179C4573FCCB9412C3CF63A25FB8BF16807087C5E0CBC5508CAC709F3E2457D04E754963658E701CA5B740A629F2CE1B1D64ABE 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 cleartomark end %APLT1End %RBIEndFontSubset /HUCPDH+Helvetica cguidfix /F1.1/HUCPDH+Helvetica renmfont %RBIBeginFontSubset: RPMHUL+Helvetica-Oblique %!PS-TrueTypeFont-1.0000-0.0000-2 14 dict begin/FontName /RPMHUL+Helvetica-Oblique def /PaintType 0 def /Encoding 256 array 0 1 255{1 index exch/.notdef put}for dup 33 /t put dup 34 /r put dup 35 /u put dup 36 /e put dup 37 /space put dup 38 /p put dup 39 /o put dup 40 /s put dup 41 /i put dup 42 /v put dup 43 /n put dup 44 /g put dup 45 /a put dup 46 /f put dup 47 /l put dup 48 /c put readonly def 42/FontType resourcestatus{pop pop false}{true}ifelse %APLsfntBegin {currentfile 0(%APLsfntEnd\n)/SubFileDecode filter flushfile}if /FontType 42 def /FontMatrix matrix def /FontBBox[2048 -342 1 index div -914 2 index div 2036 3 index div 2100 5 -1 roll div]cvx def /sfnts [< 74727565000900000000000063767420000000000000009C000003626670676D000000000000040000000322676C7966000000000000072400000E6668656164000000000000158C000000386868656100000000000015C400000024686D747800000000000015E8000000446C6F6361000000000000162C000000246D617870000000000000165000000020707265700000000000001670000003BB05C0001005BD00280580001A042F001F0000FFD90000FFDA0000FFD9FE55FFE605C70010FE6DFFF1033B000000B9000000B902FE3F3C00C0008D009B00AF000600A800C00028005E009800C9016A00B9015C00B400D6011E002E0080000400B8004C00CC01FFFFD1006600A400AF007400C2009500B1000C0028006D0015004C008E0125FF7A000C0040004C00620084FFA200240038008600BD0039005E008E00EDFFA9FFB300400052005500AA00AB00C200CB012302B10413FFAEFFE4000800510074008400AA00D1FF4CFFAF0012002C004200500051008400BE012503DAFF680018003B0098009C009F00A100C100EC018201B4FF68FF76FFD0FFE100020018001C00530053007D01B401E103AF0486FF9CFFEAFFFE001F0028002A00520060009300A300AA00AF00AF00C001000145016B0174019301950240028202B404850517FEFD00060029004700470048006F008800B400B900C400F200F901EF02180310037403C5FF35FFF3000B004B004C0052005500650076007600870087008E00AB00BB0106013001430150017D0194019501D3022A025502580277027802E6034E035C037903D3047304B2058C0598060BFEF5FFBBFFC7FFD50017001D005B0072007E009C00C200D000F400FA01030106011C0125013B0142015E015E0180019B02B901A101B9025001C001D002AA01DF01E301EF01FB0205020C0215022B0274029302AB02C202CE03690395039903DF03F5043E050205A105E5062507DBFE62FE89FECEFF3BFFE1FFF800030008002100390042004E005F0061006F00700034007F008E00AD00AD00AF00BD00C400C500C900C900C900E3011C00ED00F800F901000112011A0132014D014D014E014F01660169019E01BA01BA01BE01E301EF01F602000200020902110217021C02530262026D028002D50280031B032A034A035A03AF03AF03C803D603FB03FB04050413041504470449008C046D049A049A04A604A804B204CF0539053E054E055605800589058C036305D105D6067E068E06B206EF06F00728074C076F078C00B400C900C000C10000000000000000000000000004012400AF0032006E0063014401620096014301A10161008A00740064018801EF01700028FF5D037E0347023000AA00BE007B0062009A007D0089035C00A1FFD803AA00D70093006C0000008000A70442001D0597001D008200300000 40292A292827262524232221201F1E1D1C1B1A191817161514131211100D0C0B0A090807060504030201002C4523466020B02660B004262348482D2C452346236120B02661B004262348482D2C45234660B0206120B04660B004262348482D2C4523462361B0206020B02661B02061B004262348482D2C45234660B0406120B06660B004262348482D2C4523462361B0406020B02661B04061B004262348482D2C0110203C003C2D2C20452320B0CD442320B8015A51582320B08D44235920B0ED51582320B04D44235920B09051582320B00D44235921212D2C20204518684420B001602045B04676688A4560442D2C01B9400000000A2D2C00B9000040000B2D2C2045B00043617D6818B0004360442D2C45B01A234445B01923442D2C2045B00325456164B050515845441B2121592D2C20B0032552582359212D2C69B04061B0008B0C6423648BB8400062600C642364615C58B0036159B002602D2C45B0112BB0172344B0177AE5182D2C45B0112BB01723442D2C45B0112BB017458CB0172344B0177AE5182D2CB002254661658A46B040608B482D2CB0022546608A46B040618C482D2C4B53205C58B002855958B00185592D2C20B0032545B019236A4445B01A23444565234520B00325606A20B009234223688A6A606120B0005258B21A401A4523614459B0005058B219401945236144592D2CB9187E3B210B2D2CB92D412D410B2D2CB93B21187E0B2D2CB93B21E7830B2D2CB92D41D2C00B2D2CB9187EC4E00B2D2C4B525845441B2121592D2C0120B003252349B04060B0206320B000525823B002253823B002256538008A63381B212121212159012D2C456920B00943B0022660B00325B005254961B0805358B21940194523616844B21A401A4523606A44B209191A45652345604259B00943608A103A2D2C01B005251023208AF500B0016023EDEC2D2C01B005251023208AF500B0016123EDEC2D2C01B0062510F500EDEC2D2C20B001600110203C003C2D2C20B001610110203C003C2D2C764520B003254523616818236860442D2C7645B00325452361682318456860442D2C7645B0032545616823452361442D2C4569B014B0324B505821B0205961442D0000000200A10000052F05BD00030007003E402105062F02010004072F03000A05042F0303021A0906072F01001908098821637B182B2B4EF43C4DFD3C4E10F63C4D10FD3C003F3CFD3C3F3CFD3C31303311211127112111A1048EB8FCE205BDFA43B8044DFBB30000030052FFDC04470449000F003B003C00DD40382A30010A100B1B0C1C2733481069096A10073908120C09031B320724091D100C1D3B2B022E293BB73B023B322A2512100705081C2722171CB8018A4023171D1F07271D2E0B021D350B3C073C3C1C1407292AA8241A3E1B291C4A0F2738193D3EBC01 97002100B9019600182B2B4EF44DEDF4ED4E10F64DE4FDC412392F003F3FED3FED3FEDED1239111217395D1112392EED2EED01111239111739313043794028363715220001192501360F2100181E1B21001620142101212200370221001A1D1721011521172101002B2B2B01103C2B2B2B2B818181005D015D2416333237363D010E010F0106070615013637363534262322070607233E01333217161511141633323637150E0123222726270E012322263534363713010E724E5F59962168326D62315301B43E150C837A8D3B210AA805F7A3BD767517250C1E112A2C265D2A160937CE7C95BDBA978ACF5A2C49A691151C060E0D1C2F67016C082C182D5C534C2A53C69B484898FD971C220303850C06422340486AB58895A41301E4000002003BFFE103D0044E001A001B00A7402FA719019818A808AA18034A08119B14030314061D1A070D1D140B1B071B1B1710271201032702111A1D0A2717191C1DB80107B321727D182B2B4EF44DED4E10F63C4DED3939ED12392F003F3FED3FED12392F10ED313043794034001908250C150A26000E1310260112110F1007190A26000500032101010204030B160D26000F120D2600091806260104010621012B2B2B2B01103C103C2B2B103C103C2B2B2B81005D015D001617232E012322070615141633323637330E01232202351000330702D6E317AF10727EAC4A308892708319AF1EF0BBD2FA0112D41C044EB0D76383A86DA0A1DC8977D5C50133E6011A013A0500030048FFDA041A0449001C00240025010C40799708991AA71F03050E020F0514150E120F1514400C401408291A014B0BB603C701C603C71BD808D909D61FD823E817E8230BC711C712025C080521240F9A161D243906070716211D1C070A1D160B2507971CA71CB71CD71C0425160F251C05190A0C07110E270F1D27051A27242E072719192627D421A65D182B2B4EF44DFDE44E10F64DEDD4FD391239391112393912392F5D003F3FED3FED12392F3CFD3C10ED1112393130437940460023040503050205010504061F26111012101310141004060C25221B24260020001D26011E1D09170726000B150E26010D0E231A2126011E0521260108180A26000D100A2600002B2B2B2B01103C2B2B103C2B2B2B2A2B2A8101715D00715D5D00161716171615211E013332373637330E01070607062322001110003301262726232206070102B4D638361210FCEF0590978D543014B1074F3152794152C8FEEA0118E2011F0B284AAD7CA805012304476B55516C4AA2A3C55D36473B912E501C100123010601020142FE26754682B38A01DC000001001C0000021705D20017004D402B071D060A1D03010F1439160D06120A1917171A0E0D1129171207120F0E1F0E020EFC14191819FC21677E182B2B4EF44DFD5D39C42F3CFD3C104E4565 44E6003F3F3C4DFD3C3FEDD4ED313012373633321617152E012322061533152311231123353335B5233FB41124171C190B5220B2B4B295950542345C0202A4020155AE8EFC64039C8EA80003003DFE3B03E80449001F002D002E00B7404D36144908490958085909880CA91BA81DA927A62BB91B0B4008031622290EC40A221D1F070406291D190A121D0A0F2E072E2E051C032E162E2D29051A300C0E270D3E26271C192F3087217242182B2B4EF44DEDF4ED394E10F64DFDE4F51112392F003F3FED3FED3F3FED10ED1112393931304379402C23281A1E0B1124251026231E262600281A262600110B0E21000F0E0C0D251D222601271B2926000F0C122100002B2B2B01103C103C2B2B2B2B2B818181005D00171617353311140706212226273316171633323736270E0123222411100033002623220706151416333237363501027C5E3335A63C70FEC9ADEC0EB70D273D83CF40260336987DAEFEFB0107BA0144A47FBE4625937CC24F2CFED104423E234387FC32CC76DA9BA548273C9256DD5250F7011D010D012EFEA1C0B25F9AB5BDAF6384022D000200840000013B05BD000300070036401C07E50400010006030A0917171A06010229070300190809AA216242182B2B4EF43C4DC4FD3CC44E456544E6003F3F3C3F4DED3130133311231133152384B7B7B7B7042AFBD605BDCC000100890000013D05BD0003002940150000030A0517171A0102290003190405AA216242182B2B4EF43C4DFD3C4E456544E6003F3F31301333112389B4B405BDFA4300020084000003ED04490019001A005E4031B706C706020406140627147606740705140C021418101D05070006180B0A1A071A1A000C29091A1C012E18291900191B1CB80106B3216242182B2B4EF43C4DFDE44E10F64DED12392F003F3F3C3F3FED1139390112393130005D015D1333153E01333217161511231134272623220706070E011511230184AB4CAA68E4502CB71D307E40294A382D1BB401A7042F985E529F57A2FD5102A3623C640D1642357169FDCF0449000003003BFFD90421044E000C0018001900904033980896109916A504A808A610A916B808C808D704E50EE9140C3A08061D18070C1D120B190719191502270F1A1B092715191A1BB80109B321725D182B2B4EF44DED4E10F64DED12392F003F3FED3FED31304379402C001704260B1309260000110226010717092600050D0226010A140C260001100C26000816062601030E0626012B2B2B2B012B2B2B2B2B81005D241235342726232206151416331200111002212200351000330702E085304CBAA59696A3D6011EFCFEF7DDFEFC0112E70674010FA6965E94FCB2ABE403DAFEECFEF4FEFDFEAE012BFC010E01400500020076FE5504250449000E00220074402CA908A717022808201C110E061D15070F060E 1D1C0B220E0227181A240A2E102E2129220F1923248721BD5D182B2B4EF43C4DFDE4E44E10F64DED003F3FED3F3FED1139123931304379401C161B00051A260426001B022601051602260101190E260003170626012B2B012B2B2B2B8181005D243635342726232207061514171633013315363736333212111007062322272627112302C6A72546BABB45252546BAFE2EAF36405B7BB6FEB7749A7952303BB479D3D2805CB1BB649A7C57A603B18E49283CFEE9FEFDFEA2965F351E49FDDD00000100890000029204470011004F40262703260D37034704040E0810020E0911090C270805070006110A081A13012E10291100191213B80145B321627E182B2B4EF43C4DFDE44E10E6003F3F4D3FC4FDC411123939011112393130005D1333153E0133321617152E0123220615112389AB15A46B05181D101B108892B4042FB9369B0203BE0302AF72FD980000020042FFD703B6044B002E002F012E408F38099805961299149815982A062824252736214621472447275624572766246726790C790D790E7623742474257426A61EA82C1303000B15052D042E13001A151B171C18152D142E280F0B6908262536250225220D0A042B1318C61C1D1307041D2E9A2B0B2F07090E100207002F212F1A1F18161827173E28260727281A310E1F27103E00272E193031B221A65D182B2B4EF44DEDF4FD394E10F64DFD3910F4FD3911123939392F111239113939003F3FEDED3FEDED111217397131304379404C012D022615251A26210E1F21000926072101032C002100052A0721011D121F21001B14182101200F22210021220E0D08270A21012625090A012D04210006290421001E111C210119161C2101002B2B2B2B103C103C2B103C103C2B012B2B2B2B2B2B2B2B2B81005D5D015D13161716333236353427262F01262726353436333217160723262726232206151417161F011617161514062322262701EF082544A864983D27738F894174DBB9F26B4302AA05263E99666945284E77C24269D9DEEFC70701B701505A3057575B4524161D24222A498198BC8E5A683D32474E40462A19131D2F2C45948FD0D9A002F900010017FFEF0209055A00180052B50D2E0AC00E01B8013F40250416391703060E0A111A17171A0301062900150E150F031F030203FC1619191AFC21677D182B2B4EF44DFD5D39C42F3CFD3C104E456544E6002F3F3F3C4DFD3CED10FDE431301333113315231114171633323637150E012322263511233533A8B6ABAB2615310D1E141F43277E5A9191055AFED593FD4538130B01028E0908816702C59300020080FFE303DE044900170018005E403AB814C81402091308141913191428067703D707070800050E0A00060D0A051D120B180718180B160D2E0A290C0B1A1A01291619191AD2216242182B2B4EF44DED4E10F63C 4DFDE41112392F003F3FED3F3F3C391112393130005D015D0111141716333237363511331123370607062322272635112501381A3083BC4425B4AA0223346793E5532D01AF042FFD39523460A85A9D020EFBD19E3D2A5499528902D81A000001000B000003EA042F00060102402E4201C5010200670068026803670687048805A700A802084700480245044A0586048905C704C80508492873280708B80109B321677E182B2B4B5279B8FF70B40105042004B80183B703036D1202010205B80183401E06066D120000010506040301010502030603000605040A0817171A03AF02BA018400000184B301AF0619194EF4184DFDE0E0FD194E456544E618003F3C3F173C1239011112391239074D2E2B104EE44D072E2B104EE44D2B4B51794025022912030304002912060605010502030603000605040A0817171A020403AF050001AF0619194EF4184DFD3939FD3939194E456544E618003F3C3F173C12390507102B07102B313001715D005D7113090133012301DC011E012BC5FE6CC0FE75042FFC980368FBD1042F0000000100000000000073F8B13B5F0F3CF501010800000000015F4E858000000000B53F1B40FEAAFC6E07F40834000000090001000000000000000100000629FE290000081FFEAAFEB307F400010000000000000000000000000000001105C700A102390000047300520400003B047300480239001C0473003D01C7008401C70089047300840473003B0473007602AA00890400004202390017047300800400000B00000033003300FD017F02480293033A0367038803E4045C04CE051405F40643069D073300010000001100530007005B0006000200100010002B000007E80161000600014118008001A6009001A600A001A600030069018B0079018B0089018B0099018B00040089018B0099018B00A9018B00B9018BB2040840BA0179001A014A400B041F5414191F180A0B1FD2B80106B49E1FD918E3BB0119000D00E10119B20D0009410A01A0019F0064001F01A50025017A00480028019AB3296C1F60410A01A9007001A9008001A90003008001A9000101A9B21E321FBE012C00250401001F0126001E0401B61FE7312D1FE531B80201B21FC227B80401B21FC11EB80201400F1FC01D9E1FBF1D671FBE1D671FAB27B80401B21FAA29B80401B61FA91D6C1F931EB8019AB21F921DB80101B21F911DB80101B21F751DB80201B61F6D29961F6431B8019AB21F4C96B802ABB21F391DB80156400B1F3638211F351DE41F2F27B80801400B1F2D1D4C1F2A31CD1F241DB802ABB21F201EB8012540111F1C1D931F3A1D4C1F1E1D45273A1D4527BB01AA019B002A019BB2254A1FBA019B0025017AB349293896B8017BB348283125B8017A403648289629482725294C1F252946272729482756C80784075B07410732072B07 2807260721071B071408120810080E080C080A08080807B801ACB23F1F06BB01AB003F001F01ABB308060805B801AEB23F1F04BB01AD003F001F01ADB70804080208000814B8FFE0B40000010014B801ABB41000000100B801ABB606100000010006B801ADB300000100B801AD401F04000001000410000001001002000001000200000001000002010802004A00B0018DB806008516763F183F123E113946443E113946443E113946443E113946443E113946443E11394660443E11394660443E11394660442B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B18011DB0964B5358B0AA1D59B0324B5358B0FF1D592B2B2B2B2B2B2B2B182B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B74752B2B2B65422B2B4B5279B376706A66456523456023456560234560B08B766818B080622020B16A704565234520B003266062636820B003266165B070236544B06A234420B176664565234520B003266062636820B003266165B066236544B0762344B10066455458B166406544B27640764523614459B36242725D456523456023456560234560B089766818B080622020B172424565234520B003266062636820B003266165B042236544B072234420B1625D4565234520B003266062636820B003266165B05D236544B0622344B1005D455458B15D406544B262406245236144592B2B2B2B456953427374B8019A2045694B20B02853B049515A58B020615944B801A6204569447500 00>] def /CharStrings 17 dict dup begin /.notdef 0 def /space 1 def /a 2 def /c 3 def /e 4 def /f 5 def /g 6 def /i 7 def /l 8 def /n 9 def /o 10 def /p 11 def /r 12 def /s 13 def /t 14 def /u 15 def /v 16 def end readonly def currentdict dup/FontName get exch definefont pop end %APLsfntEnd 42/FontType resourcestatus{pop pop true}{false}ifelse {currentfile 0(%APLT1End\n)/SubFileDecode filter flushfile}if /FontType 1 def /FontMatrix [ 0.00048828125 0 0 0.00048828125 0 0 ] def /FontBBox{-342 -914 2036 2100}def /UniqueID 4045371 def currentdict currentfile eexec 54544758EC884CF30C3CD503CEDBFF3839C47C3C3333173232E3FDBFF439491DB843E1924E63AA7726BBB0485AB56D93D8C0906F647A47162891E73FFC2A9873C4B1EAC5EEBDFFC4D06084FBD84139DF4583C6E259D10699944D1068C9C45667DCCCFB9B7EA01B606435EDCBD273ABAC093D14085CCBAC149BD7382E842CFE0D7FE4FD2EF589A2471F6074A80A8B675C2F7A50D63AC1EF90D787BADD11633CB01CF6EE3B37AAF9078A69AC4740E9B6525D78BBD839551A1CB80DB8682FA5E87591BBD6EE8B946063A2A58D9CA3685AB305495DC5FB5747EB8A9A059C4976C0FE4EEAB1D56FF47F1E9664ED9F4A7DAB763AF92B2F6CF2FA7DEC24710E0B9096E30F772BA7FEA9BDBE496C42ED2CEB58F54E80BDF57CE7B4DB6CCFE7182F43BF93CCA0767AF95D62C5D2C3DC6AE1E6D139F51A2C63432117F1714C5566572EE9967A715420ABDCD1D7BD74F8450B89965FCC81C6ACA565C5F3CCF91D430D1F953E4F1A645300A98DD8C47CD64555F08F422340A85404EAE0D3229C4F9336B9470CACBD6BBF3395104750A915CC6EAAC197668267B8C62D2764C8CD69FD937CA3C924D997A0EDE7964BEB9EA2F92EF70C5E5DA0AA5567765E71F2B911B3C5586B741EEB93F3C73016EC16BFF283758900903D203992EFC8BAFAF13579C602F38C91B322782D282AC603D1F5A452D1E7AE90B2FF78B151C731C717F5B5A2EDB414C7411DC55C9F11252DB02F5559851CCD7D7CB91F73C4FE2797104F2A7A4B0F92CC5F8FC75B9E037F311CB425CC75313E5F79F6A8C8DB2D2F44546E17686656E54AB2EA61DCC8CCDE9AA90B41909B5B684DF8D176A01DB5EE3F8A51409586DB6678F1BE9481D766DE364A7211FDDFC1FECEAFB3CBE0471BF99E59B2EC74CBD51FBD8D15DEADB0354802A61B0D71D7E5FF888F681C9E0164438C1275B945C082A0E58B8C1CBCB02806FEF755E8A794599D186B8CE97C941D6123E822BC702B06CB5840D66065E9DD714A5E7D86FB810A7B5D79F8577D8DED4A6C392933464C8D3F72968FB8635368E3B3F7E5C4206AEF6A37330ADE67B806C34E9FCBC6A97B03A5747D7102F65AC25C782FE107E76DF334C63A9D54A74624295087434C0D9263BE248F548CD249B2BE0AF6A97E8E0154F2EDACF8798EC16D06821D3D0E5D569463AB8C64F33584A6A72DECF789197F10856072EE5A1D62F33144366BEBAD4BEF50A4669A2379787F1199118FAE55B1713092CD8D2A7821391FE6A57CA616E571457C0C30D098E27C9AF8DEC8D641795DDB03230927CD2288C364A332C9C4ECB7325AF3E09AF4CD53DDC2881A08F44BBC15716703EFB11543091D590E7D9D6CED3DBC05A4B9FABE9154BAAD67F7C0CEE4805C483243E25703048836C71D938B707B7E467758AD0F12CF3E291ECA57B220CC8FC6F11AB191C15370E553EBDC156F3AB2 6C53E81D7F96DD6A8797CBE5C5E64A0D06495DF4E03FEA4AD959FE5CB1EA693E1E8908DD81610D38C06C8E5F95A5DF6CD19CD2BCEFA460BE766B43CF7CBF14734145DCA871388B496A9A3F7A02D0B9BA535730390B84325403ECDA389224285CBEEF3AE561A64CACE5FF5D7A22D1529EBC001057B646A9B3BC207148F86E819A87245EAD191F0D0F86B6C555783C81C509CEFA5191725B26C23EDB0331E92D6A2EE987080557E14CD51D0A3DF3837E6B83658BEA2A716B16EAA1341E590F658AF490015EF5BCBB5EA509D42F7E421D207057D5704E3101D84A3C32A5DA904C31A1CF7C62799695F91E3DE1FB24C4302A3B168151F14F76C817E0B4AF847D18B805569CC6B32CE36B200745C8DED4997F8515F2008BE31D1A416873E8C8B74E92F9B90E6F466C4CE12B40AD09A7F81FBFFEEAC96348C3EAD3CBCB60E8C39640E7074AD4902B28C4581EE11FEAFEC978EB34C281C4B7BD7171318CDAA38CC1C0C8848C7288AD3AA6CC50390864C933614D00F5546C700CF2456724992151E397D64EED61FF41BD38B41B79CDB1CB74F783C4EDBB5482DAA1C5F451291F0CBBB79FC0D12BE37369CD894D00E0E005D648073AA5DD955D465C5A2CEBF1E23FFDB233A8DB39BEB3A73C3B9556EBC937FB4721A5EB35E81B55E19A0948331BB5B773DA5CCC9E003CB5AB5F03B823F24769D05E3DD16AE92D05857E1E4E299B1E1A5208F7ECA81CEC6971B2812194C8EEB4D55F375F07191510DC5F0999D10F45C217446D792809D500426D63EB255BDD5C3C628BA466C917E38C221606F1E9CB352B7F26CD0F254CB033E90EEFA3C8FBAC439DB699329DF5CADD1F92C69885900F3B70776F5115540A88702F77EAE6143AF4516DAA68D7A540163631B740F80391C17A3BA67F80916B7E4961E6E1F6430E0AA031EDFB2AC04CAC0865F710B05068CAA74D938C62FC2A7A5A5B46B8AEDFC7B82555EF0CDE85A61041B1675AE2F36EF5B41277C6DD5E7F43440FF682D2D972D1715A9BA47DC260E187EB0F556C0B1004ECB69630512F02BED16CFD0D066A8927F1DDE8EDA087A7D672C84711A0F50FB8C0303CDB3034FCA419679D55D051E8A719CF1C84FD00F4558EEE5E7FD4FECC71E7E77779DD7735F4C93B928658A588FCB138810528B8FB723834D19110713584E8D67AB7E8ADC49145D2DF35AA7D110FFA79B50BCBBD5C14007384A2548A0B001C881611325FD7648173F1788142F9C0F78F68667B397444DB53D6E6B1F49343DCFFED82EFB734F9454FA3C0BFAA5B0F4A9887C83A4D983F77E27310EB3E1D288252D6779B47EFAE23EFE87995C389D7E98B32A94A4EA4EC8B430DDD4F7578043176C9E4F1E722655EB81A0A1E5DFB69FAB6F2A97F5122CD3ECDCB0319F3230714B9178213C29A5ED0AC302CB4E561D9EABD5D0DEB6996D7549F4D837AFC8688C1 26667E7BD4E1B55A038A2F0018EC664C5BD36D1AC545E3D7248D73D6F5635D2C9F6C24B80B350E4ACEFAA9413F287E5D521E66FDA5D243C159C3D6AF6F0AEC6FDFD54D1DCB6A2BDB6402D09C86537264D5378D387C4FABD81E72155EE0D6C876396D1541C322063D7796D2BA9C8B04555E0FC2A5B36BE27B3504AF14018E9870C8328897B7E4B41ACECEA47CB350547D2DED1536B7F4E7010260BB33296CB4D8FDC6A76B6C97F9B1943E5E72604DF0F1F87CB8484C197DE1679C1468517BEDFEFA42B3723EC168F803426B9F80CD656F156FE7623CCF6194AF6B43253EA4BC8D0BDCF0D51DC896720F36C5F3A4ED709AF5077056A23DE5AE650CD75D66EAC8D70B2B9672105D84C37BB85B2809781038789134103A2AE93A0A80C2F1CEFC359A27A9E01A21B6F81C6AE98DAE20BB06A7E9206B0A0AEF19FC5D929138F01B335142463ED56A1CCD379BEFEE604728BA5D77E90B08BEFE889DB9497B4EA51EEBBD0DB5249983A5600DD05E8F316E69AD3A6401EBD5BCFD2C6DA0C3E611DC422D858F382C235A5A3697B4AC15C608E8BE22EEABE1E754E905D8AA00E0C12E33ABB4BF31F6AE587952F70F5F2888979B100BEC68709926574E5B592A9B6385ADED58D6614CE5A1D67B8AA1AE6915F908232010F3A9403A74DE2E527ED6888D3B709F47B74135BA300AE02B0B371C31FCE7C4DC738ADD021171B5DF8ED4AAE34B7BE970ACCA4035DF3E2739ADB9D7921355B8B8C3131BBB05B41ADEE7053719F6F7B012075B0818D9BDB6E0301FA680F8DFF8EDD04874FBE6E1EE16BEB82C60B14367C84913AAD73F5DD9E752C94C534C31287D7805D7F2A06C4C91FD67C525CCCA44B99EB7E62295652EF7CD394890DEAD7052117C94F1099B0939A82ACD6E9B94CE2542FDFD6B22CED06A2C72F9A201528C754E3C87AE82590A8AB458C43C3D6581DED40E9CEE40D2D5BB0C5035693812EA58089F9E10BDC4C4E613728DCE209B440D6CD2C46C8044A8883C630D84BF656CCF4962DBF70A618235F4AA42350138FAEA8AFAE282C2ABC71CE291325650436E32C00E2EB128D551E5651A01F5D646DCBD2FF7B7E1A6FFC1466B2C935D66AC218E0C606E6A230E0346965E5A9D5F95CF1B59 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 cleartomark end %APLT1End /RPMHUL+Helvetica-Oblique /RPMHUL+Helvetica-Oblique findfont dup length dict begin { 1 index /FID ne {def}{pop pop} ifelse } forall FontMatrix [1 0 0.212557 1 0 0] matrix concatmatrix /FontMatrix exch def currentdict end definefont pop %RBIEndFontSubset /RPMHUL+Helvetica-Oblique cguidfix /F2.1/RPMHUL+Helvetica-Oblique renmfont [ /CIEBasedABC 4 dict dup begin /WhitePoint [ 0.9505 1.0000 1.0891 ] def /DecodeABC [ { 1.8008 exp } bind { 1.8008 exp } bind { 1.8008 exp } bind ] def /MatrixABC [ 0.4294 0.2332 0.0202 0.3278 0.6737 0.1105 0.1933 0.0938 0.9580 ] def /RangeLMN [ 0.0 0.9505 0.0 1.0000 0.0 1.0891 ] def end ] /Cs1 exch/ColorSpace dr pop [ /CIEBasedA 5 dict dup begin /WhitePoint [ 0.9505 1.0000 1.0891 ] def /DecodeA { { 1.8008 exp } bind exec} bind def /MatrixA [ 0.9642 1.0000 0.8249 ] def /RangeLMN [ 0.0 2.0000 0.0 2.0000 0.0 2.0000 ] def /DecodeLMN [ { 0.9857 mul} bind { 1.0000 mul} bind { 1.3202 mul} bind ] def end ] /Cs2 exch/ColorSpace dr pop %%EndPageSetup /Cs1 SC q 0 0 244.5 218.00101 rc 93 0 0 93 79.5 86.501007 cm BI /Width 186 /Height 186 /BitsPerComponent 8 /Decode[ 0 1 0 1 0 1 ] /DataSource cg_aiproc ID JcC<$ JcC<$ e,P"!JcC<$JcF4!JcC<$ JcC<$ e,P"!JcC<$JcF4!JcC<$ JcC<$ e,QlVJcCaqtU0dq"jmbrquTj !<)fo gAg+2qu6BhrqucmqtgXqu-NlqtgjSRtg iofZ4/gA]e)f`'M"f)+!K eEtb:eA'N1ebdqsfDaG%g&B_)h#?./hu;R5ir8'=kPj`Fm/HGPnc&([ o^i.Urq??drqcWkrqucorr20^ m/R(aqu-NkrqcWhrqHEarpp'XrpKdOroj@Cro3q7rn[S.rn7;%rmh"srmL\jps+H8 lHOn)]Zp5sqp>>jrmUksrmq)%rnIG.rnm_7roO.Crp0ROrpTsZ nc87[o`"Uaq#:-hqtp?k rp9[Os8N#srVl`oqYpibl5caao9B\`q@<% `:1]r`59Ic`W!mVaSs<\b5TWacMl2idf.bqfDaM'h>Z@3jSnBBli-;Q nF-GIrpp']rqHEfrqcWlrr)iq m/QGPrr2oqrql]krqHEbrq$-ZrpKdOroa:Aro!e2rn.5"rmC_jrlb;^rl4rUrkeZMrkJHHpq20i mCRq]^Us9RrkABHrkSNMrl"fUrlG)^rm(Mjrm^r"rnRM2roF(Arp'UQ mJu\Snc&.Zp&=^bqYpBkrVlf_ s6p!err2lqr;QQlq>U-fo`"C[mf)POkl0ZBhu;C0f)F+scMl#da8X$V_#D(I]DfA?\,Ni8[/RE2Z@T:G Z%90mZ2V'.Zi7?2[f3c8\c05?^AbqI`;[gVbPofddf.hsg]$+0jSn?D lg"Q:rpKdUrq-3arqZQirqucorr2Bd n,NCdrr)iorqcWgrq69]rpTjProj@Bro!e1rmq(qrlkA_rkn`Mrk/6?rjDa2rilC(riH+!ri(IH kGNm/\YoG+ri5t!riQ1(rj)O2rj_s?rkJHMrlG)_rm:YqrnIG1ro=+D kQ'iGli-APo)A=]p\t$gr;QZo rpTmUs8N#srVl]nq>U-fo`"C[mJcALjo43;h#>n(dJh>g`W![P]Df8Grg*POrg`t[rhBChri5t$rj;[8rkJHOrlkAirn.5+ !TN)=ir8-?li-APo)A@^q#:-hrVlfb s6p!erVl`oqYp6go`"C[mf)JMjo40:gA]V$cMkoa_#CnDZi7*+VuEIjT)P2XQN!*IOT(7=MuJV5Lk1?H LOk5nL]3,/MZ/P5NW,%=PQ$mIRf8oXU].1jXo>[+\c0DDaSsKaeGe/& hqnFkroO.DrpKdUrq-3arqcWlrr);)h]P[Xdrc%jWrcS3brd4Wore19.rf@&Drg`t_ri5t'rk/6Krlb;h !SQ-+g&Bn.jo4NDmf)eVpAXmequ6Q_ s6p!erVl]nq#9scnc%qTkl0WAh>Z%*d/M,c_>^tDY5Y$"MZ/_:QiDaSsNe f@L#Srn[S6roj@Krpg!\rqQKirr)=re19"rcnE`rbqdOrb2:BraYq9ra::a m9bEO^L-tJraGe9ral(BrbMLOrcA'`rdOj"reg]=rgNh\ri?%+rkABOrltPp eH"/!h#?=4kl0rJnc&1[q#:0i rU9dTs8Murqu6?ho`"C[li-&GiVqR1e,ISj`;[FKZMpd$TDk/UNrFh5JGsinF8g(VCAqiE@fBa6?2e(->(KiD =b0_j=oMV(>Q.q-?iFO6B)ZQEDZ4_VH2`?nL]3>5QN!NUVuF"$]`,kKbQ62A !7Ur$ro*k=rp0RRrq-3arql]n n,MkVrr)imrqHEarp]pQroO.:rn7;!rlY5Wrji$4rh]Uarfd>=re(2trcS3YrbDFDraGe4r`];'r`9"uJQ2#9 JQ0B`r`/qur`K/'ra,S4ral(DrbqdYrd=]treg]=rg`tarilC4rkeZW !RfBrdf.r!hu;a:li-DQo`"UaqYpH^ s6p!erVlZmp\sgan,DYPjo40:f`'7saSs!S\,NN/UAgS[OT(%7J,X]lEW0bQB)Z6QGlE3lL]3D7RJrr[Xo>g/_#DCU d*Md>rn.5-roO.FrpTjYrqHEgrr)?& JP<^Mr_EGgr_i_qr`T5+raPk?rbh^Xrd=^!rf-oCrh9=mrjMg@rl>,e ciDGlfDaY+jo4QEn,DqXp\t$g rU9dTs8MurqYp6goD\1WlMflDh>Z")ci1r`^AbM=WrAXiQ2Z^@KDp2rErKnSAc?'9>5hS%;Z9Jk9`@Z`8V'G" 8:a=H8H)3[9)_N`:]=8k&@K's>E;k%[J,Y9'P5_!NW;a.&]`,kN cHZF8rmh#'ro="BrpKdWrqHEgrr)_/r`/qor_3;_r^QlTr^26' m6Z@j^I%oer^?`Tr^m)_r_WSor`];/rb)4JrcS3hreLK8rg`tcrj)O9rl"o` c2c/hec+A'j8S5hP$;#X/f8cD3Y7/fRQ 6@hDk 5_22;5lO+L6iKLQ7fH!Y9`@lf-%OoCmMVuF"$]`,kN cHZF8rmh#'ro="BrpKdWrqHEgrr)iq o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oqrqcWgrq$-WrosFBrnRM'rm(M^rk/69rhoacrfR28rdOihrbh^Jra>_/r`/qor_3;_r^QlTr^6ZO JO&]i JO%(;r^-TOr^?`Tr^m)_r_WSor`];/rb)4JrcS3hreLK8rg`tcrj)O9rl"o` c2c/hec+A'j8S5hP$;#X/f8cD3Y7/fQ) 6L@*j6GGkd6iKLQ7fH!Y9`@lf-%OoCmMVuF"$]`,kN cHZF8rmh#'ro="BrpKdWrqHEgrr)Z")cMkf^]`,8:WW&IfPQ$I=Jc9onEW0_PA,]d5=T27u;#X/f9)_A4 7dZL7lMg5NoD\L`qYpH^ s6p!erVlZmp\sd`n,DVOjo4-9fDa.raSrsR[f3B-U&LAWNW+Y2I/\9fDZ4>K@fB[4=oMG#;Z9Jk:4Z+( 9S#mM9`@fd:]=8krn%/+roO.ErpTjXrqHEgrr),e ciDGlf`'e-jo4TFn,DtYp\t$g rU9dTs8MurqYp6go`"@Zli-#Fhu;=.df.Dg_Z%+FYl:KuSGn`OMZ/8-I/\9fDuOMNAc?-;?N+.-=T2A#5hk-@K'j;CAr/NFoHdfKDpc-PQ%*OV>d^u\c0JFaoTr= !7Ll"rnm_:rp0RQrq-3arqcWm n,MkVrr)inrqHEarpg!RroX4=rn@A#rlkA[rk/69rhoafrg*PDreLK'rd"Karbh^Mrau.=ra5Y1r`fA*JQ_AC JQ]`jr`];*ra#M1raPk=rbDFMrcA'ardb!'rf6uDrh'1frj)O9rl"f[ !RoKte,J)#iVr!=li-GRo`"Uaqu6Q_ s6p!erVl]nq#9pbnc%nSkl0T@h#>k'cMki_^AbP>X8\jmRf8NMMZ/;.If=TkF8g+WC]8#HAc?3=@K'Yc ?gV3O?b]tI@/aU6A,^'=B`;fHDuOhWGlE0kK`6l.PQ%$MUAh4m[Jmo>`r=6a e^XZMrnRM4roj@Jrpg![rqQKirr)ZI6kl0uKnc&4\q#:0i rU9dTs8Murqu6EjpAXX^mf)JMjo40:g&BG!bPoHZ]`,59WrAdmSGnfQO8b%9K`6K#HiA9hFoHF\E;jd, D#QrjMg0rhoahrgWnPrfI,:reLK)rdXoprd+QgrcWj8 l?.Z#]QO3srcnEgrd=]prdt-)reg]:rg!JPrh9=hric=0rkJHQrm(Mm !Sc?/g]$.1k5O]Gn,DqXpAXmer;QZ` s6p!erVl`oqYp6go`"@Zm/H5Jir7d5fDa1sbPoEY]Df/9Xo>6tU&LS]QN!'HNrFq8L]2u+K)U?#It<(6 IXus\If=irJc:9#K`6c+MZ/Y8P5^dHSGo5]VuEgtZi7T9_u@jYdJh_u h;&(ero3q@rp9XRrq-3arqcWlrr)Crf-o8reg]2JVWNG JVUmnreUQ2repc8rfI,Crg*PPrgs+arhoatrj2U7rkSNSrm(Mmrn7D1 hZ2R5jo4NDmf)bUo`"UaqYpEl rU9dTs8N#srVlZmq#:!do)A+Wli-)Hir7d5f`'=ubl5T\^](eC[/R9.WrAmpUAgeaSGnrUQN!-JPQ$aEOb&V] OF`M.OT(CAP5^[EPl@!JRJr`UTDkSaVZ*UpYPtp.\c0AC`W"*\df.nuh#ZNp !94"@rp0RPrpp'^rqQKhrr)iq n,MkVrr2oqrql]jrqHEarpg!TrosFDro!e2rmq(rrlb;[rkSNFrjMg4riZ7$rhoakrhBCcrh'1\qj`5, m@/Zr^RP+jrgj%\rh9=crhTOkri?%$rj2U4rk&0Frl+l[rmC_rrnRM2 !TiDCjo4NDmJcYTo`"Uaq>U9jrVlfb s6p!err2lqr;QQlp\sjbo)A+Wli-,IjSn*:g]#e'd/M8gaSs*V^](nF\Gio8ZMq*-Xo>F$WrAuK Vrst/Vn&`)WW&psXT#@$Yl:s-[Jm]8]`,\F_u@aVbl5ugf)FG'i;Vd< ki`$1rp0RPrpp']rqHEgrqucorr2Bd n,NCdrr)iorqcWirqHEarpg!Urp0RIroF(9rnIG'rmLekrlb;]rl"fOrkABDrji$;rjMg5rj2U0J[+LF J[)kmrj)O0rj;[5rjVm;rk&0DrkSNOrl>#]rm(Mkrmq)'rnm_9roaCK lN$8Mmf)bUo`"Uaq>U6ir;QZo rpTmRs8N#srVl]nqYp6gp&=R^nc%tUli-)HjSn*:h#>t*eGdkpcMl#daSs3Y`;[XQ_#D+J^A,?l ]C!4]]>(uQ]`,YE^])%J_Z%LQ`r='YbPofddJhYpf`'\*i;Vg:kPjcJ md:)Crpg!Zrq69brqcWkrr)iq m/QGPrr2oqrquclrqZQfrq69]rp]pTrp0RIroF(:rn[S-rn%/!rmLemrm(MerlY5^rlG)You2L# nAKmr_Sl,drl4rYrlP/^rlkAerm:Ymrm^r!rn@A-ro!e:roj@IrpBgV n,VtWo)A=]p\t!fqYpElrVlf_ s6TdbrqlZnqu6Hkq#:$eo`"F\nG_kTli-,Ijo49=hu;I2g]#n*f)F5!e,Iemd.kn; c0EQ)c+MZ=2ir8'=kl0oImf)\V o'ueOrq-3arqQKhrqlTkrr29a l2Ub^rr)iorql]krqQKdrq-3\rpg!Urp9XKroa:Bro3q8rnm_2rnRM,rn7;'rn%%uqpg8I mF6^:^XW/2qptburn.5'rn@A,rn[S2ro!e8roO.BrosFKrpKdUrpg*^ oDnO_pAXjdqYpBkr;QZo rosIIs8N#prVl`oqYp^ guR7AgpZ#;h>Z:.hu;R5ir8!;jSn9?kl0iGli-;Nn,DkVoD\@_ p\+[^rqQKgrqcWlqu-Nn l2ToGrr2fnrquZirqQKerq69_rpp'YrpTjRrp9XMrosFGroa:CqrRh9pcp&=U_o)A1YnGDYQmJcJLlhfuEl%/'] k^hs.kkjWAlMKoHm/HDLmf)_Tnc&+Yo`"O\p\ssh qYL0erquZlqu5aX j8]#Uqu-NlrqlThrqQBbrq60\rq$-[qsj[TqsT+( hX5s_YjVDWrp]gTrpp'[qt0m\qtL*bqt^?j qZ-Nmr;6Hi ro=%:s82fjrVl`oqtp6hq>U0dp\sm`p%\1So7?Jn nq$A?o(_hOo_\=\pA=U`q#:*dqYpBkquQcp qYgC\qD[f:pX bl31mT)RgN e,P"!JcC<$JcF4!JcC<$ JcC<$ e,P"!JcC<$JcF4!JcC<$ JcC<$ e,P"!JcC<$JcF4!J,~> %APLeod EI 93 0 0 93 151.5 14.501007 cm BI /Width 186 /Height 186 /BitsPerComponent 8 /Decode[ 0 1 0 1 0 1 ] /DataSource cg_aiproc ID mm-k?r^HfWr^m)br_`Yrr`];3rb)4MrcS3lreCE:rgEbariQ14rkSNWrm:YurndY:rosFPrq$-arqZHi JcG?A JcC<$ e,Sl%r^?`Sr^d#[r_NMjr`B)&rac"@rc.p[rdt-(rfmDMri#gurk/6GrlkAgrn@A.roX4Erpg!ZrqQKgqu)*G oDaCAJcF4!mm-k?r^HfWr^m)br_`Yrr`];3rb)4MrcS3lreCE:rgEbariQ14rkSNWrm:YurndY:rosFPrq$-arqZHi JcG?A JcC<$ e,Sl%r^?`Sr^d#[r_NMjr`B)&rac"@rc.p[rdt-(rfmDMri#gurk/6GrlkAgrn@A.roX4Erpg!ZrqQKgqu)*G oDaCAJcF4!og&C@r^?`Sr^d#[r_NMjr`B)&rac"@rc.p[rdt-(rfmDMri#gurk/6GrlkAgrn@A.roX4Erpg!ZrqQKg qu)*G oDaCAJcF4!og&C@r^?`Sr^d#\r_NMjr`K/'rac"@rc.p[rdt-(rg!JOri#gurk/6Frlb;ern7;,roO.Crp]pXrqHEe qtu$E oDX=?T)RgN e,Sl)r^ZrYr_*5br_WSlr`];+rau.DrcA'_re(3*rfmDLri#gurk&0Erlb;ern7;,roO.CrpTjVrq??cqtksB oDF1;Zi&b[q#:*h e,Su-r^QlWr^d#_r_EGir_rf#ra,S9rb2:Qrce?oreUQ;rg<\`riQ12rkABRrlkAnrn@A2roO.HrpTjYrq60a Jbo!7 Jbl):qtp0dp&4gf !<1LJ mn!=Kr_NMir`/r!ra,S3rb)4FrcS3cre(3*rg*POri,n"rj_s>rlG)]rmh##ro!e9rp'LLrp]pWqt5O6 oCdb/\b>CaqtU0dq"jmbrquTj !<)fo gAgV5r_rl+lWrmUktrn[S2roX4Crp9XOqsf7. oC@J'[ds4bq"4IVrq?6`rqQKgqtgsYr=T2M'>lJ:5BDu`HDuP(^IK#$$MZ0"BS,T;aX8]U-])KYIa8XHbdf.u"h#?=4jSnBBl20k! mdojtm]c-Vn,DhRnc&+YoDA1Zp&"L_q"smg qtgBjrquZlqu5XU mns'_r`&bsra#M0ral(Arb_XRrd4WlreLK2rg*POrhf[orj;[6rk\TPrlkAfrmq)$rndY3ro3q;qr`Oo oB:bh\`iqTqs+(Erp9XOqsOIQrpg!Yrq-3_qtL*c !W)ZkqYpEirVQTV s707j=T2J&>5MY*A,^'=CAr#JF8g=]IK"crM>iJ5R/WQRVuE^q[/RN5^])+LaSs?]df._pf`'V(h#?.,iIU"[ idp+"iqqd8jSS'U6ir;6Hl rosIOAH$3?BDuTDC&VrJD>nSTFT-UcI/\ctLAm23OT(XHRf9/_VZ*duZ2V<5]DfVF_Z%UTaSsE_cMl/hd/2:C eFV1@eA'N1ebdqsfDaG%g&B_)h#?./hu;R5ir8'=kPj`Fm/HGPnc&([ o^i.Urq??drqcWkrqucorr20^ nn7`2rbVRNrbh^RrcJ-_rd=]nre:?,rf-o!!K`6o/N;f">PQ%!LS,8u[X8]4"Z2V'.\,Ni8]DfJ?^AGbG`;[Z* `:Lou`59Ic`W!mVaSs<\b5TWacMl2idf.bqfDaM'h>Z@3jSnBBli-;Q nF-GIrpp']rqHEfrqcWlrr)iq m/QNsqfi$ard"Kjrd4NmreCE-qht?7rg<\Qrh07aqkj7krilC,qm6:2rjMg9rjhp;J\1Nc J\/\/pq6U?rkJHJrkeZQrl4rYrlb;drmC_prn.5*ro!e:roa:H !Uf@Umf)bUoD\F^p\t$gqu6Qn rp9[OK)U>uK`6`*L\lr*NW,.@P5CUCRf8rYT`1bdU\gtgW;`dnX8B(!YPt\U Z0efIZ+R@@ZMq30[/RN5\,Nr;]DfPD_#D:Oa8X9]cMl8kf)FG'hu;^9kQ1#8 !:0XPrpg![rqHEfrqcWlrr)iq n,Ms5reLB*reUQ/qhk90qiCWrf;Vl m>um\^QAGWrf[8ErfmDJrgEbUrh'1arhf[pric=.rji$Crl+l\rmLeurnRV7 iW/!;kPjfHn,DnWpAXjdqYpHm rpTmUW;`dqW;`dnVuERmVuEOlVYd.fTDkM_SGo#WQi<[+\c0DDaSsKaeGe/& hqnFkroO.DrpKdUrq-3arqcWlrr)repc4reCE+qh"ftpjW3jJU[!8 JUY7\rdXourdt-&reLK1rf6u@rg*PRrh9=hriQ1+rk&0Irlb;grmq2, h#Q:1ir80@m/HMRo`"UaqYpEl rU9dT]`,PB])K8>\Gir6[/R?0YPtX&X8]$rV#I"cS,S`QP5^L@N;eV3LAlc'JGsrqHiA9hGQ)dbFoHK6 FmX=&FhDkrG5cacHN&U9j rU9dTbl5fbao9B\`r_4K]DfA?[JmH1X8]'sT`1DZQ2ZjDM>i8/Jc:&rGQ)aaEW0kTC]8)JBn9mZ Aq=R/B)?6?CAr#JDZ4YTFoH[cIf>$"MZ/_:QiDaSsNe f@L#Srn[S6roj@Krpg!\rqQKirr)raPk7JR@\L JR?0!ra>_6raYq=rb2:HrbqdWrcnEkre19.rfd>MrhBCmrjDa>rl4r_ !S,^#ec+A'ir80@m/HPSo`"Xbqu6Q_ s6n\@g&BV&f)F.tdJhAhao99Y_#CtF\,NW2WrA^kRJrELN;eP1JGsinFoH=YCAqlFAH$!9?N+4/>Q.fW =m]@C=he,=>5hb*?2e41@fC!=CAr,MF8gIaJGt?'NrG@DTDkbfZMqN9`;[s] e'eOrh]Uurji$FrlP8i dK%_pg&Bn.jo4TFnG`(Zp\t$g rU9dTk5OKAir7j7h>Z(+ec+"rbl5Z^_#CnDZi7$)UAgS[OoC7;K)U,rFT-.VB`;QA?iF=0=T2;!;uT\o:k;F0 :Ou_0r`K/!r_`PfJP>?& JP<^Mr_EGgr_i_qr`T5+raPk?rbh^Xrd=^!rf-oCrh9=mrjMg@rl>,e ciDGlfDaY+jo4QEn,DqXp\t$g rU9dTnc&"Vm/H8Kjo46sAj9`@Z`8V'G" 8:a=H8H)3[9)_N`:]=8k^tDYPt?sRf8EJLAlZ$FoH=YBDu<<>Q.\&;Z9Gj9)_B\7fGf, 6g[6l6bc"f7/f[T8H)6\:B"/j&@K's>E;k%[J,Y9'P5_!NW;a.&]`,kN cHZF8rmh#'ro="BrpKdWrqHEgrr)Kir7^3eGd_l`;[CJZ2UX"SGnZML]2c%FoH:XBDu<<>5hP$;#X/f8cD3Y7/fRQ 6@hDk 5_22;5lO+L6iKLQ7fH!Y9`@lf-%OoCmMVuF"$]`,kN cHZF8rmh#'ro="BrpKdWrqHEgrr)iq o)J:ZrquclrqQKbrp]pSroa:>rnIG$rlkAZrjr*5rhf[arfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oprqcWfrpp'Vroj@@rnRM&rltG\rk&07rhoabrfI,7rdOihrb_XHra5Y-r`&kmr_3;^r^QlSr^)0$ m6H4f^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s763hrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg o)J^grquclrq??^rpKdLro3q4rm^qlrl"fJriuI#rgWnKreLK%rcS3Wrau.:r`T5"r_NMer^d#Xr^6ZO JO&Tf JO$t8r^-TOr^QlXr_3;er`&l"ra5Y:rb_XWrdOj%rfI,Krhob#rk&0J !R8jhc2Q5lh#?=4kl1#Lo)A@^qYpEl rpp*[s8N#sr;QQlpAXX^mf)GLir7a4eGd_l`;[CJZ2U[#S,SNKL]2c%FoH7WB)Z0:=oMD":]=&e8H)*X6iKE' 5j^gf5efS`6N0@O7fGsX9`@ie<;p&"?iF[:D>n\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J:\rr2oqrqcWgrq$-WrosFBrnRM'rm(M^rk/69rhoacrfR28rdOihrbh^Jra>_/r`/qor_3;_r^QlTr^6ZO JO&]i JO%(;r^-TOr^?`Tr^m)_r_WSor`];/rb)4JrcS3hreLK8rg`tcrj)O9rl"o` c2c/hec+A'j8S5hP$;#X/f8cD3Y7/fQ) 6L@*j6GGkd6iKLQ7fH!Y9`@lf-%OoCmMVuF"$]`,kN cHZF8rmh#'ro="BrpKdWrqHEgrr)Z")cMkf^]`,8:WW&IfPQ$I=Jc9onEW0_PA,]d5=T27u;#X/f9)_A4 7dZL7lMg5NoD\L`qYpH^ s6p!erVlZmp\sd`n,DVOjo4-9fDa.raSrsR[f3B-U&LAWNW+Y2I/\9fDZ4>K@fB[4=oMG#;Z9Jk:4Z+( 9S#mM9`@fd:]=8krn%/+roO.ErpTjXrqHEgrr),e ciDGlf`'e-jo4TFn,DtYp\t$g rU9dTs8MurqYp6go`"@Zli-#Fhu;=.df.Dg_Z%+FYl:KuSGn`OMZ/8-I/\9fDuOMNAc?-;?N+.-=T2A#5hk-@K'j;CAr/NFoHdfKDpc-PQ%*OV>d^u\c0JFaoTr= !7Ll"rnm_:rp0RQrq-3arqcWm n,MkVrr)inrqHEarpg!RroX4=rn@A#rlkA[rk/69rhoafrg*PDreLK'rd"Karbh^Mrau.=ra5Y1r`fA*JQ_AC JQ]`jr`];*ra#M1raPk=rbDFMrcA'ardb!'rf6uDrh'1frj)O9rl"f[ !RoKte,J)#iVr!=li-GRo`"Uaqu6Q_ s6p!erVl]nq#9pbnc%nSkl0T@h#>k'cMki_^AbP>X8\jmRf8NMMZ/;.If=TkF8g+WC]8#HAc?3=@K'Yc ?gV3O?b]tI@/aU6A,^'=B`;fHDuOhWGlE0kK`6l.PQ%$MUAh4m[Jmo>`r=6a e^XZMrnRM4roj@Jrpg![rqQKirr)ZI6kl0uKnc&4\q#:0i rU9dTs8Murqu6EjpAXX^mf)JMjo40:g&BG!bPoHZ]`,59WrAdmSGnfQO8b%9K`6K#HiA9hFoHF\E;jd, D#QrjMg0rhoahrgWnPrfI,:reLK)rdXoprd+QgrcWj8 l?.Z#]QO3srcnEgrd=]prdt-)reg]:rg!JPrh9=hric=0rkJHQrm(Mm !Sc?/g]$.1k5O]Gn,DqXpAXmer;QZ` s6p!erVl`oqYp6go`"@Zm/H5Jir7d5fDa1sbPoEY]Df/9Xo>6tU&LS]QN!'HNrFq8L]2u+K)U?#It<(6 IXus\If=irJc:9#K`6c+MZ/Y8P5^dHSGo5]VuEgtZi7T9_u@jYdJh_u h;&(ero3q@rp9XRrq-3arqcWlrr)Crf-o8reg]2JVWNG JVUmnreUQ2repc8rfI,Crg*PPrgs+arhoatrj2U7rkSNSrm(Mmrn7D1 hZ2R5jo4NDmf)bUo`"UaqYpEl rU9dTs8N#srVlZmq#:!do)A+Wli-)Hir7d5f`'=ubl5T\^](eC[/R9.WrAmpUAgeaSGnrUQN!-JPQ$aEOb&V] OF`M.OT(CAP5^[EPl@!JRJr`UTDkSaVZ*UpYPtp.\c0AC`W"*\df.nuh#ZNp !94"@rp0RPrpp'^rqQKhrr)iq n,MkVrr2oqrql]jrqHEarpg!TrosFDro!e2rmq(rrlb;[rkSNFrjMg4riZ7$rhoakrhBCcrh'1\qj`5, m@/Zr^RP+jrgj%\rh9=crhTOkri?%$rj2U4rk&0Frl+l[rmC_rrnRM2 !TiDCjo4NDmJcYTo`"Uaq>U9jrVlfb s6p!err2lqr;QQlp\sjbo)A+Wli-,IjSn*:g]#e'd/M8gaSs*V^](nF\Gio8ZMq*-Xo>F$WrAuK Vrst/Vn&`)WW&psXT#@$Yl:s-[Jm]8]`,\F_u@aVbl5ugf)FG'i;Vd< ki`$1rp0RPrpp']rqHEgrqucorr2Bd n,NCdrr)iorqcWirqHEarpg!Urp0RIroF(9rnIG'rmLekrlb;]rl"fOrkABDrji$;rjMg5rj2U0J[+LF J[)kmrj)O0rj;[5rjVm;rk&0DrkSNOrl>#]rm(Mkrmq)'rnm_9roaCK lN$8Mmf)bUo`"Uaq>U6ir;QZo rpTmRs8N#srVl]nqYp6gp&=R^nc%tUli-)HjSn*:h#>t*eGdkpcMl#daSs3Y`;[XQ_#D+J^A,?l ]C!4]]>(uQ]`,YE^])%J_Z%LQ`r='YbPofddJhYpf`'\*i;Vg:kPjcJ md:)Crpg!Zrq69brqcWkrr)iq m/QGPrr2oqrquclrqZQfrq69]rp]pTrp0RIroF(:rn[S-rn%/!rmLemrm(MerlY5^rlG)You2L# nAKmr_Sl,drl4rYrlP/^rlkAerm:Ymrm^r!rn@A-ro!e:roj@IrpBgV n,VtWo)A=]p\t!fqYpElrVlf_ s6TdbrqlZnqu6Hkq#:$eo`"F\nG_kTli-,Ijo49=hu;I2g]#n*f)F5!e,Iemd.kn; c0EQ)c+MZ=2ir8'=kl0oImf)\V o'ueOrq-3arqQKhrqlTkrr29a l2Ub^rr)iorql]krqQKdrq-3\rpg!Urp9XKroa:Bro3q8rnm_2rnRM,rn7;'rn%%uqpg8I mF6^:^XW/2qptburn.5'rn@A,rn[S2ro!e8roO.BrosFKrpKdUrpg*^ oDnO_pAXjdqYpBkr;QZo rosIIs8N#prVl`oqYp^ guR7AgpZ#;h>Z:.hu;R5ir8!;jSn9?kl0iGli-;Nn,DkVoD\@_ p\+[^rqQKgrqcWlqu-Nn l2ToGrr2fnrquZirqQKerq69_rpp'YrpTjRrp9XMrosFGroa:CqrRh9pcp&=U_o)A1YnGDYQmJcJLlhfuEl%/'] k^hs.kkjWAlMKoHm/HDLmf)_Tnc&+Yo`"O\p\ssh qYL0erquZlqu5aX j8]#Uqu-NlrqlThrqQBbrq60\rq$-[qsj[TqsT+( hX5s_YjVDWrp]gTrpp'[qt0m\qtL*bqt^?j qZ-Nmr;6Hi ro=%:s82fjrVl`oqtp6hq>U0dp\sm`p%\1So7?Jn nq$A?o(_hOo_\=\pA=U`q#:*dqYpBkquQcp qYgC\qD[f:pX bl31mT)RgN e,P"!JcC<$JcF4!JcC<$ JcC<$ e,P"!JcC<$JcF4!JcC<$ JcC<$ e,P"!JcC<$JcF4!J,~> %APLeod EI 93 0 0 93 151.5 86.501007 cm BI /Width 186 /Height 186 /BitsPerComponent 8 /Decode[ 0 1 0 1 0 1 ] /DataSource cg_aiproc ID JcC<$ JcC<$ e,P"!JcC<$JcF4!JcC<$ JcC<$ e,P"!JcC<$JcF4!j8J$;JcEF` JcC<$ e,SJ,j8SWLJc iV\9)Zi&b[q#:*h e,SJ'o(i:[rqZ?arqcWiqtg!]Jbmpn Jbl):qtp0dp&4gf !<1LJ j7E0?rq60\rq$-^rq?-Zrq#pXrq-3`q"=OXqtG[8 kOsK#\b>CaqtU0dq"jmbrquTj !<)fo gAg4'rpp'YrpojSrpp'YqssXSrp]pWrpp'Yrq#^Orpt^3 jRRlm[ds4bq"4IVrq?6`rqQKgqtgpKj !;lcnqu-Ek j8\T1p#l57roj%e)h>Z=2h>Z=,hY#e*iV;@1iVqd7iqVJ` iof(pt>Grrn72'qq:u(qqM,(J_]GD J_[]hrn[S1qqqD2ro3q;roF(?roj@Grp0RNrpTjVrq$-] !VcU6ir;6Hl rosIOd/25id/MAjd/MAgdf.VmchPoed/MJmdJhShdJMAee,IdH eD\o.eA'N1ebdqsfDaG%g&B_)h#?./hu;R5ir8'=kPj`Fm/HGPnc&([ o^i.Urq??drqcWkrqucorr20^ n'(dUqoo&aqoeu`prr`^qoo&^rlb2]rlP/arlk8^qonr\qoo&arlfr< mEL4,]Zp5sqp>>jrmUksrmq)%rnIG.rnm_7roO.Crp0ROrpTsZ nc87[o`"Uaq#:-hqtp?k rp9[R_#D1L_#D1L_#(tF_#D4M_>_=N_#D4M_#D1L_#D1F_#D4M_>_@F_Z%CK_u@W+ `:1]r`59Ic`W!mVaSs<\b5TWacMl2idf.bqfDaM'h>Z@3jSnBBli-;Q nF-GIrpp']rqHEfrqcWlrr)iq m/QOerk8!;rjr*Ark&0Crk&0Crk&0Brk&0Bqmud;rjr*=rk&'=pppC8rjhp;J\1<] J\/\/pq6U?rkJHJrkeZQrl4rYrlb;drmC_prn.5*ro!e:roa:H !Uf@Umf)bUoD\F^p\t$gqu6Qn rp9[RXnAjlYPt['Z2V!,Z2V!,YPt^(YPt['Yl:j*YkYF$Yl:g&Z2:a(Yl:hW Z0efIZ+R@@ZMq30[/RN5\,Nr;]DfPD_#D:Oa8X9]cMl8kf)FG'hu;^9kQ1#8 !:0XPrpg![rqHEfrqcWlrr)iq n,MjTri5ssri5Ogpo==lri5sqri#gori#gnrhoamrhoXjrh]Cepn`P> mAGN5\YoG+ri5t!riQ1(rj)O2rj_s?rkJHMrlG)_rm:YqrnIG1ro=+D kQ'iGli-APo)A=]p\t$gr;QZo rpTmRRJrTNRJWHPSGo#WSGo#WSc5/YSc5,XSc5,XSGnuVSc5,XSGScPRJWHPR/<>' S*c\rS%kHiSGo)YTDkP`UAgtfVuEarY5Yd,[f3o<_#D=PbPoogf)FJ(huVs$ !9O4ErpB^Trq-3arqZQjrr)iq n,MO7rfd>Irfm;Grg3VLrg3VLrg3VKrg*PJrg!JHrg!JGrfd>Drf[8BrfR2?rf@&=qi?;i m>um\^QAGWrf[8ErfmDJrgEbUrh'1arhf[pric=.rji$Crl+l\rmLeurnRV7 iW/!;kPjfHn,DnWpAXjdqYpHm rpTmXLAlu-L&Qi+L]3#)M#N;2M>iJ5MZ/V7MuJ_8MuJ\7M>iJ5M>iG4MZ/J3L]3,/LAlu-L%pE%Kn5$H LOk5nL]3,/MZ/P5NW,%=PQ$mIRf8oXU].1jXo>[+\c0DDaSsKaeGe/& hqnFkroO.DrpKdUrq-3arqcWlrr)U9j rU9dWBDuTABDuZ@C&VuKD#SDQDuOhWEW1(ZErL.ZE;jqXE;jkSDYn5HB_Z3:AG]qh B(KVcB"qsTBDuZFC]85NEW1+[H2`9lK`6l.OT(UGT)PVdY5Ys1_>_OTciMbM !8%5+ro="ArpB^Trq69crql]n n,Mieqdob=ral(Arau%Brb_XNrc%jSrc.pTrbqdPrbq[Mqe?%Arakt:raGe6ra5>)JRA"U JR?0!ra>_6raYq=rb2:HrbqdWrcnEkre19.rfd>MrhBCmrjDa>rl4r_ !S,^#ec+A'ir80@m/HPSo`"Xbqu6Q_ s6j.g>Q.n,?2e:3@/a^9A,^*>AH$?CB`;fHB`;cGB`;cGBDuTAAH$$:@/F7,>5h[t=FjWH =b0_j=oMV(>Q.q-?iFO6B)ZQEDZ4_VH2`?nL]3>5QN!NUVuF"$]`,kKbQ62A !7Ur$ro*k=rp0RRrq-3arql]n n,Mr[pfRN"r`]2*qdBD6raYq>ral(Brau.Crac"@raYqOrh]Uurji$FrlP8i dK%_pg&Bn.jo4TFnG`(Zp\t$g rU9dT;>X8gQGlE3lL]3D7RJrr[Xo>g/_#DCU d*Md>rn.5-roO.FrpTjYrqHEgrr)V/r`];(r`9"ur_i_mqbI,`r_3;`qb)K6 o1X^/]M/<"r_NMkr`/r#ra#M4rb2:KrcS3fre:?2rg*PWriH+-rkJHR !RT0nd/MYrhYuX9lMg8OoD\L`qYpH^ s6iPP9`%]a<;Tht>Q.h*?iF=0?iF=0?Mdt(=8Pnp:]=,g9E%Q\8GG]+ 8FoB'8A@^p8cD?]9`@ie;Z9_r>5hn.Ac?NFErLCaKDpf.Q2[ETWrBC)^Ac+Q cd)U;rmq))roF(DrpKdWrqHEgrr)sSp<;p)#=T2V*>5hb*=T2P%=8l5!<;o\m:B!oc9)_?[7f,UK6@hMq 6\.V>6iKLQ7fGsX9)_Tb;Z9_r>Q/(1B`;uMG5d!jM#NS:SGoJdZi7W:`<"90 !7(SornRM5rosFMrq$-`rqcWm n,MiEqaUQUr^luZr_WSir`/qsr`K/$r`oG*r`fA'r`]2"r_reor_EGer_!/\qaLBKpd'Ht o0S!p^I%oer^?`Tr^m)_r_WSor`];/rb)4JrcS3hreLK8rg`tcrj)O9rl"o` c2c/hec+A'j8S$5hq/BDufJFoHjhL]3G8SGoGcZMqN9`<"90 !7(SornRM5rosFMrq$-`rqcWmrr2Kg og&LCr^$EKqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7/GS5PmhF7Jf[R:&\)j;Z9_rn\WIf>-%OT(aKVuEt#]Df_L c-675rm^r&ro3q@rpKdVrq??frqucp o)J8Gr]g9IqaLBRqb@&gr_i_rr`/r$r`B)%r`9##r`/qur_i_mr_5hV&=oMJ$=T2>"s;h9`@W_8,bsV7/fOP62j.H5Cl)k 5_2285lO+L6iKRS8H)<^:]=>m=oMe-B)ZZHFoHjhL]3D7S,T>bZ2VB7_u\-- !6tMmrnRM4roj@Lrpp'^rqcWlrr2Kg nj*1=qa:6LqapZZr_reor`9"ur`];&r`T5$r`K/"r`9"rr_`Yhr_3;_r^ZrVr^?`Pr^$NKq`fWs o0@jl^Hhcbr^6ZSr^d#^r_NMmr`T5-rau.HrcS3hreLK7rgWnbriuI7rkni^ blH#feGe8&ir80@mf)eVpAXpfr;Q]d s7JYV6N0:J6i0=L8c)6];uT\o5hV&=oMJ$=T2;!5hV&=oMJ$=T2;!d^u])KSJ c-675rm^r&ro="BrpKdWrqHEgrr)nYVI/\m"NrGFFV>d[t\c0DG bfg(2rm^r%ro*k?rpB^VrqHEgrr)lJ.1?2e10>Q.h*=8l>$<;ohq:]=,g9E%Q_8H)-V7K,]+ 7I5ht0B)ZZHFT-^fL&R/4Rf92`Z2V<5_?%j' !6bAirnIG3roj@Krpp'^rqcWm n,MiRqbm;jr`B)"r`T5(r`oG+ra5Y1ra>V0r`fA-r`K/&r`9"ur_i_mr_EGer_!/]qagKOJOf<% JOdIFr^d#\r_3;cr_WSnr`T5+raYq@rbqdZrdOj%rfR2Jrhf\!rji$F !QrObb5Tigh#?=4l2L)LoD\I_qYpH^ s707d=T2M'=o2J'?N+70@K'[6A,]p9@fBa6@K'R3?iF7.>Q.b(X,f9`@]^8bbi- 8b5N)8\[jr9)_K_9`@ie;Z9\q=oM_+A,^0@E;k(\If>*$OT(^JV>dXs])KPI bKBn/rmUl#ro!e>rpB^Urq69err)_8raGe:raYq=raGe8ra5Y4r`oG.r`];'r`/qsqbd5cr__:rbMLRrd"KnreUQ9rgNh`rilC5rk\][ bQ,ldeGe2$i;Vs>mf)eVp&=jf rU9dNAH$0>AH$0>A,Bm;B`;WCC&VcEB_uB<@K'X5?N+.-=oMJ$sAg:B!ue9S#q* 9n?$R:&[oe:]=5j;Z9_r=T2V*@K'j;C]8ARGlE9nL]3J9Rf92`YPu*3_#_^$ !6Y;hrn@A1roa:Irpp'^rqcWl n,Mrkrb21@rb;@Fqe,e?rbDFGrbVIGqe,e:qdKJ1ra#M+r`K/#r`/hor_iViJPkf3 JPj0ZqbmDnr`9#$r`oG1rac"BrbqdYrd4Wsreg]=rg`tcrilC6rkec[ b5f`bdJho"iVr$>m/HVUpAXpf r9s[PEW0qVEW0qJEVj_PD>n>MCAVWBA,]s7?iF:/=o28!<;ohn;Z9Mi;1VX: ;Lq`Y;Z9Yp5hh,?iFR7CAr/NFT-UcK)UW+P5^pLVZ*du\Gj8E bKBn/rmC`"rnm_;rp0RSrq69drql0a l#luIrcA']rcA']rcA'\rcA'Yrc%jVrb_XNqe?%Aral(_8rb2:Irc.p^rdFd"repc@rgj%drilC5rkJQZ blH#fdf/##i;Vp=m/HPSp&=ge qsXRRK):&sJGt#sJGt&tIJ\EhH2DdaErL"VD#S2KBDuNBA,]p9?iF:/>5h\"=8l8"<.S': 5hh,?iFR7B)ZWGE;k(\I/\ctM>i\;Rf92`YPu*3^]DR! !6P5ern@A/roj@Krpg!\rqcWk n,Ms5reLK*qh4s"re19&rdk&urdaurrd=]krcnEbrcA'Wrb_XMrb2:Crac";ra>_3ra#D*r`];(JQV2> JQTZhr`];*ra#M0ra>_8rb)4FrbqdWrd+QlreLK3rg!JRri#h#rjr*F !QrObb5TiggA^%0kl0rJnc&4\qYpB\ s6l3LP5^OANrG+:N;e_6M#N,-KDpE#IK"KjG5cO]DuOSPC&V`DAH$$:?iFC2>Q.e)=T2BQ $=oMY)?N+@3Ac?BBD#SDQGQ*!hK`6i-Pl@0OVZ*^s])KJG b/s_,rmC_urnm_:rp0RQrq$-crql0a n!X0Arg!AFrfd5Arf6u9reg]1re19%rdFckrc\9\rbqdPrbDFFral(V0r`fA)r`XkU lU9[ s6locVu*@jU]-qcT)P5YRJrNOPQ$XBN;eV3K)U/sHN&'dEW0kTC]7uGAc?-;?i+.+=o2;"=FjQC =+OGi=8Q,!=oMY)>lJ.1@K'mDreg]0rdk&srd"Kdrc.pTrb;@Frac";ra>_4r`oG,r`];(JQhGE JQf]ir`fA.ra5Y6rac"ArbMLQrcA'drdXp(rf6uErgj%gric=6rkSWZ bQ,lddf.r!hYu^;lMgARp&=dd qsXRX]_f>?])K5=[f3]6Yl:g)X8]'sU]-qcRf8ZQOoC@>K`6N$H2`!dE;j_RC&VcE@fBg5>lIn'=8l6O $=oMY)?iFI4Ac??ADZ4YTGlE*iLAm)0Q2[rb;@NrcA'brdOj%rf-oDrgj%grilC8rkec] blH#fdJhr#hu;j=li-JSp&=ddqtpBc s7OV5bPoT^a8/IK"NkF8g1YB`;WC@K'X5>5MG$=8l1u;Z9UG ;XdS8;Sl?/;uTbn=T2P(>lJ43AH$EEDZ4n[I/\j!MZ/n?S,TDdYPu05^]DX% !6bAfrn@A-roa:Grpp'\rqQKhqu69g p!ET^qoo&\rl+lQrkJHFrj_s3riZ6trhTO^rg!JAreUQ(rd=]erc.pPrb2:@ra>_0r`];'r`B)!r`"GI m8JR7^Jk,3r`8nur`oG/raYq>rb_XRrd"KjreCE2rg3VSri5t$rk8U6f rq6_1J\Gio8XT#-sSc5&VNrFq8J,XipF8g.XC&V`D?iFC2>5hV&<;obo;#X7A :$kZ+9tsF%:]=2i;Z9Yp=oMY)@K'd9CAr&KG5cgeK`6i-QN!BQWW'("]DfSH c-675rmLf!rn[S:rp'LQrq-3crqcNk p&F^Aqq_/'rmq(urm(Merl"fOrk/6=ric=%rh07\rfI,_3r`T5%r_reor_NMgJPGN+ JPEmRr_EGgr_reqr`T5)ra>_8rbDFMrc\9fre:?.rg*PQri5t%rk/6H !R8jhc2Q/jg&Bk-kl0lHo)A=]q>U6f rq6ir7p9h>Z4/eGdnqbl5]__#D"GZMq$+UAgb`P5^F>KDpB"F8g1YBDuH@?N+4/krg`tdriZ76rkSWZ bQ,ldd/Mi"hu;j=li-MTp&=ddqYU9b s7PpZnG_eRli-&GjSn'9g]#_%d/M#`_Z%(E[/R$'U]-PXO8ab1If=EfDuOGLAc>p5=T27u;>s8d9)_D5 7IlJ@7C&W,OFoI!lLAmG:Rf9;cY5Z*5^]D[' !6kGgrnIG0roa:Hrpp'\rqZQiqu69g p%/(OrpKdQroj@Dro!e3rn%.trl>#Urj_s8ri,mnrg!JFre19#rcS3Zrb)4?r`oG+r_renr_U6f rq6lIn*;>sAj9`@]a7fGjU 7"I_p 6\.VA6iKLQ7K,dU9E%Wa:]=5j=oM\*AH$3?F8g@^KDp]+PQ$sKWW'("]DfVI c-675rmC`#rnm_U0goD\:Zl2KiEhYu7.d/M8g_#D"GXo>9uRJrHMLAlf(FoHC[BDuH@>5hV&;>sAj8cD9[7K,]+ 5j^gf5efS`7/fXS8H)3[:]=5j=8lD&AH$6@EW1+[K)UN(Pl@*MW;`pu]`,\I bfg(2rm:YurndY:rosFPrq$-arqZHi p&F^bqu-NirqQKarpg!ProX4:rn@@urlkAWrk/64ri#garfmD:rdt,lrc.pMrac"3r`B(rr_NMbr^d#Wr^;<& m6H4f^Hhcdr^HfWr^m)br_`Yrr`];3rb)4MrcS3lreCE:rgEbariQ14rkJQY bQ,ldd/MbuhYu[:l2L5PoD\Oaq>:-` s7QEhrVlWlq#9manc%ePk5O6:gA]Iubl5EW]`,&4W;`4aPl?F:K)TolEW0VMAH#d3=8l(r:]sYr>5i(3BDuoMFoI!lLAmG:RJs/aXo?!4^]D[' !6kGhrn@A.roX4Erpg!ZrqQKgqu-3f p&FpfrqZQgrq$-ZrosFErndY.rm:YgrkSNGriQ0urgEbMreCE(rcS3[rb)4@r`];&r_`Yjr^m)[r^HfS JO&Tf JO$t8r^?`Sr^d#[r_NMjr`B)&rac"@rc.p[rdt-(rfmDMri#gurk/6G !R/afbl5uggA]t.k5OWEnc&.Zq#:*d rUp2@~> %APLeod EI 93 0 0 93 79.5 14.501007 cm BI /Width 186 /Height 186 /BitsPerComponent 8 /Decode[ 0 1 0 1 0 1 ] /DataSource cg_aiproc ID p&FpfrqZQgrq$-ZrosFErndY.rm:YgrkSNGriQ0urgEbMreCE(rcS3[rb)4@r`];&r_`Yjr^m)[r^HfS JO&Tf JO%1>q`k'Hr^6QNr^d#[r_*5ar_`Ymr`/qumo]Qir`&ts ;?0Vo;>s8g9E%N^8GbgS6iKFL 5P7;7s82coq>U0goD\:Zl2KiEhYu7.d/M8g_#D"GXo>9uRJrHMLAlf(FoHC[BDuH@>5hV&;>sAj8cD9[7K,]+ 5j^gf5fH"c5Q4"K6i0:N8H)3[9E%Wa;>sJm5l3eF6N0@L7/faV8cDB^9`@og;Z9\qjAk9`@]a8H)-V7/fOP62Nq? 5P>'RrVlWlq#9manc%ePk5O6:gA]Iubl5EW]`,&4W;`7bQ2ZO;K)TolE;jJKA,]X1=8l%q:B!ia8,bpU6i00# 5P%*j5K,ka5lO(K6N0@O7/f^U8Gc'Z;#X>k<;oqq=Sl7u=8l5!;uoin rDEGcr^d#Yr^HfSr^-TMq`sg= p&FpgrqcWirq-3\rp'LGrnm_0rmC_irk\TIriZ7"rgEbLreCE(rc\9]rb2:Br`fA)r_`Yjr_3;`r^ZrW JOAfl JO@CDqa19Mr^HfUqag]Xr_s;h9`@T[8,c!W7/fRB 6M:BUrr2cnq>U$co)@qRkPjB_#c2PQY]Deu4WW&:aPQ$C;KDp&nF8feNAH#j5=oM:t:]=&e9E%E[7K,]+ 6h!Ho6cDFl6Mj.L7/f^R8H)3[:&[rf;#XAirnIG!rlkAVrk&05ri5sdrg*P=rdt,nrc8!Orac"6r`fA$r_i_ir_32[JOSrp JOROHqaCU$co)@tSkPjBs;b9`@T^8,c'Y 8+fFGs82fpqYpi/,GlDjbCAqlF@/aL3=8l8";>sDk9`@\9 8aT*#8]=9u8H)3[9)D9Y:B",i;YsDl=8l7h=oMP&urc\9YrbDFBra>_0r`T5"r_relr_A#= m7i.+`D->*peCW]r_WSkr_i_or`9#"r`T,%r`f/$pfRVu sDh:A@NR 9D/>^rr2cnq>U$cnc%hQkPj<:g&BG!c2PNX]Deu4WW&CdQMua?K`6;sG5c=WCAq`B@K'L1=oMD";Z9Ml:Ou7- 9n?$X:&@W]:&[oe:]=2i;YsDlb mnEESqu6TkrqZQdrpp'Sroj@=rn@A!rlkAWrk8<7ri5sfrg3VCreCE"rd"K^rb_XHraYq6r`o>&r`9"uJQ(r7 JQ'NdohtH]r`/r!r`K/&r`]2'r`o,#qcX#& =9)J&=8Q%t;Z9Vl;>sJ[ ;>'tdrr2cnq#9pbo)@qRkPj<:gA]FtbPoPhP%<;TUF ;XdS8;SQ-/;#X>h<;oep;uTbn5hb$>5hb*>5hb*>5h_, =BPN(r`9"ur_reor`&kqqbd>jr_`>d p&Fpgrql]jrq69]rp0RIrnm_1rmC_irkn`KrilC%rgj%Trf-o4rdOimrcA'Xrb;@DraYq9r`oG,qcNi! JQD8@ JQBEar`/hrr`B)%r`K&$qcj&)r`fA-r`equ !F&e,>PhP"=8Q"s<;onp <:s8as82fpqu6Ejp&=O]li-&Ghu;C0d/M8g_#CqEYl:X$SGnfQMZ/A0I/\BiEW0kTBDuKA@/aO4>Q.e)=T2BQ 5h\(>5MP'>5hb'>lIq+>6.o2 rEB2&r`]2#oiCri oiCP]rql]jrq69]rp'LGrndY-rmLeirk\THric=$rgj%Trf6u7rdXoprcA'YrbMLHrac";ra5Y2r`aqW l<8aB^KUV=r`B)%r`K/&r`];(r`]2'qcs#(r`oG.r`oG. q-B JQK]ir`K/"pf@/kr`K&#r`];*r`])$r`])% !ErY,>5hb*=oMV(=oMV"=T2Cj =RZ:hqu6Ejo`"C[li-)Hhu;@/dJh>g_>_+HYPtL"Sc4rSN;eS2If=ZmF8g+WCAqlFA,]s:?N+4,>CfrE =b0_j=o2>#Q.n,>5MP'?2e%,?2e(->QJ&5 rEK8)r`fA(r`Snq mo]&Yrql]krq$-Yrp0RFrnm_.rmC_frkSNEriQ0srgWnOre^W-rd=]hrc%jQrb2:Bra>_3r`fA)r`K/$JQD/= JQBNdqc3Vpqc3VrpfI>qr`])"r`fA)r`oG+r`]D, =o_b*>PhV'=T2J#=8l>$ =798[s8MopqYp0enc%nSkl0N>gA]P"b5T6V])Jr5W;`@eQ2ZaAL]2`$HN&!bDuOMNBDuE?@/aL3?2e(->(KfC =FjSh=T2Cp=T2J&=T2M'=oMY)>52>$>5he+>Q.q->Q.n/ >$:o0r`o>(qcWo"r`T5% lr``Vrql]krq69\rpB^Krnm_/rm:YdrkSNDricra5Y1r`];(r`B)"JQ;); JQ9Hbr`&ksr`&ksr`&ktr`&ktr`Au!r`];(r`];(r`];*ra#M-r`fJ- =o_b*>kM7s=Sl1g =RZ:hqu6Hkp&=L\m/H2Ii;VL1df.Mj_#CqEYl:R"Sc4oRMuJG0IK"KjEW0eRB`;N@@/aI2>Q.e)=+OE; 5hY'>Q.h*>lIq+>lIq+>lIq) >Q%e*=T2M$=8Q"^ g_#CkCYPtEuRf8KLM>i,+HN&$cDuOMNA,]j7?2e%,=8l5!<.Rs4 ;Lq`b;Ys>j:]"#d;Z9Vo;YsJn=T2D!=T2M'=oMY)>5he+ =o;J%>5hY!=8Q"s <:!WRs8MrqqYp3foD\+UkPjH>g]#V"bPoEY]Df&6VuE4cPQ$I=KDp5sFoH=YC&VZB@/aF1=oMJ$k;Z9Pm;Z9Vo5hb*>5hb- ='#B'r`T,!qc!Akqbd>k nl"rVrquclrq??^rp9XIro*k1rmLegrk\TEric";Z9Ml:B!t= 9^PN)9Z9^#9D_E[:&[ug:]=5j;YsJn=T2D$>5hV&=oMP&>Q.b(=ohZ) !*9)#r_reqqbmDir_NMfr_DoU n,NCcrqcWerpp'Uroj@>rn@A#rlb;Xrjr*3rh]U_rfI,6rdOijrc%jMraYq5r`T5#r_i_kr_3;ar^q`5 m7Dk#`C]hqqapcZr_*,_r_NMir_reqr`B)!r`T5$r`T5%r`T5&r`K8% sDh:B!ub 9_(jFs8MurqYp3foD\.Vl2K]Ah#>b$b5T9W\c/i4VZ*"_OoC.8If=NiDuOGLA,]d5=oMD";#X2g9`@Z`8:a:u 7tF1L8,,RN8Gc!X9`@c`;#XAl;uThs5hS%=92?! !)rkrr_i_mqbI,ar_*,\ mn*!Jrr)imrq??^rpB^Kro3q3rmLehrkeZFriuI"rgNhIre:?"rcJ-Vrau.cq_OT("6If=KhD>n/H@/a@/sJj<;p#!k'cMkf^]`,59VuE4cOoC+7IK"?fD>n/H?iF4-<;o\m9E%K]7K,[R6N09% 5O^mg5K,kd55RVE62j4M7/fXS8,c*Z9`@cc;>sJmQ._'>5hP$h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$q`k'Fr^$NMr^?`Sr^ZrZr_3;cr_`Ymr`9"ur`K/#r`T5%r`];$r`0%t ;?0Vo;u9;d8c(sR6iK=@ 5k=sSrr2ipqYp3fo)A(Vkl0T@h#>h&c2PZ\]Df)7VuE1bOT(%7If=KhD>n/H?iF4-<;o\m9`@T^7fGdS6@hDh 5_22>5l3eF5lO(K6N0CP7K,gV8cDE_:&\#h;Z9_rsJm5hP$_/r`/qor_3;_r^QlTr^6ZO JO&]i JO%1>r]g9Gqa19Mr^HfWr_!/`r_E>gr`9"ur`K/$r`fA'r`];%r`9,! ;uKYo:&[oe9)_E]7fGjR6iKCN 6M3V4s8MurqYp6goD\1Wl2K`Bh#>k'cMkf^]`,59VuE4cOoC.8If=KhDZ4;J@/a@/% ;ufnsZ")cMkf^]`,8:WW&IfPQ$I=Jc9onEW0_PA,]d5=T27u;#X/f9)_A4 7dXAm=T2M'?2e%,?N+1.?2e"+>6.c* !*9)#r`&kqqbR)_r^m)] mmupIrr)imrqHE`rpKdNroF(7rmq(prl4rOrj;[)rh'1Trepc.rd+QarbVRFra5Y.r`9"rr_NMer_!/]JOo0! JOmaNpdb3Pr_!/_r_EGgqbm;mqcj&+r`oG0r`oG0r`];* !EiS(=o21q;>!WO 8al]YrVlZmp\sd`n,DVOjo4-9fDa.raSrsR[f3B-U&LAWNW+Y2I/\9fDZ4>K@fB[4=oMG#;Z9Jk:4Z+( 9S#mV9`%K^9)_N`9D_K`;>sJm[754qc`ksr_reqqbR2f nk\`Srr)imrqHEarpTjProO.:rn.4srlG)SrjVm/rhBC[rfI,7rdXolrc.pQrau.Q._'=Sl+q ;X[KOs8MurqYp6go`"@Zli-#Fhu;=.df.Dg_Z%+FYl:KuSGn`OMZ/8-I/\9fDuOMNAc?-;?N+.-=T2A#lJ%.@/aU6AH$*B)Z<>AH$!9@K'R3>l.ao >4;LjrVl]nq#9pbnc%nSkl0T@h#>k'cMki_^AbP>X8\jmRf8NMMZ/;.If=TkF8g+WC]8#HAc?3=@K'Yc ?gV3O?c?CF?N+=2@/aU3A,^'=B)?<>D#SAPD#SAPDZ4YTDZ4SU Chs)frb_OIrau.Bral(@qdo5, n,NCcrql]irq69\rpB^Kro="6rn%.rrlG)Trj_s1rhf[drg3VGreg].rdOilrcS3[rbh^NrbD=CJS+:] JS)l5qdoY8ph'D:qeQ(Hrc.pVrcA'XrcA'ZrcJ-Zrc8*Y DZF_TE;j\QD>7iECAVZC B_#g)s8Murqu6EjpAXX^mf)JMjo40:g&BG!bPoHZ]`,59WrAdmSGnfQO8b%9K`6K#HiA9hFoHF\E;jd, DZ%*dJh>g_u@:IZi7*+VuEChSGniRP5^L@MZ/D1K`6T&Jc:.M Id2B5I_U@2I/&-hI/ABiJGXrtL&Q`(LAli&LAm&/LAlu0 KnTM\reCE*re:?(re19&re(*! mt1$.rr)iorqcWgrq-3[rpKdMroO.:rn@A$rm(MarkSNDrj2U+rhoajrgs+Xrg*PIrfI,=repc5reQ,\ lA'qG^PDfBpkJd%reLK-reUQ/re^W3repc4rf$i5rf$i7rf-o8rf$r9 M?&P5N;e\5MYi5/L&Ql,LAlo+ L[p2gs8N#srVlZmq#:!do)A+Wli-)Hir7d5f`'=ubl5T\^](eC[/R9.WrAmpUAgeaSGnrUQN!-JPQ$aEOb&V] OF`M.OSb+O'Y5YX(Yl:m. Ycn,3riuI)poj@n n#bsVrr2oqrql]krqHEbrq$-ZrpKdOroa:Aro!e2rn.5"rmC_jrlb;^rl4rUrkeZMrkJHHpq20i mCRq]^UsBSrji$>ppp:5rk&0>rjr*>qmud>rk&0Brk&0C !P>rD]DfMC]DfJB](N`8 ^$iW#s8N#srVl`oqYpibl5caao9B\`q@<% `:1]r`59Il`V[RP_=b\E_u@LO_YCqG_>_7L_>_7L_Z%@M_?%Kh !5\ZOqnW*FrkSNLrkSNL n%e;irr2fnrql]krqQKerq-3\rp]pTrp0RIroO.=rnm_2rnIG*rmq)!rmUkpqp>,`J^*K) J^(sSrlkAdqonr\qonr[rlkAdrlP&[rlb;`qoo&a !R/dabl5labPoc`bl5lU bj#0,rr2lqr;QTmqYp9hpAX^`o)A1Ymf)SPl2KlFjo49=i;VX5h>Z4/gA]e)f`'M"f)+!K eEtb:e?d[(deM;gdeM5hdf.Yndf.Vgd/hS> !71YkqpPJirm:Yjrm:Pi n'L>!rr2fnrquclrqZQgrqHEarq$-ZrpTjRrp0RJroj@CroF(=ro3q8qqqD0rnW.\ jP5=A\_R2Purn@J- g@s>#g]$"-g]$"* g[EkYs8N#prVl`lqYp9hp\sjbo`"F\nc&"VmJcJOli-/Jkl0cEk549>jSRtg iof n_4$?rqQHkr;6BdqYp %APLeod EI /Cs2 SC 0 sc 1 0 0 -1 163.5 209.87601 cm /F1.1[ 12 0 0 -12 0 0]sf -25.675781 4.5 m (!"#$%&'\(\)\))[ 3.333984 3.996094 6.673828 6.673828 3.333984 6.000000 2.666016 6.673828 6.000000 6.000000 ] xS 1 0 0 -1 33 101.87601 cm -24.682617 -2.5 m (*"$+,&!$+)[ 6.673828 3.996094 6.673828 6.673828 2.666016 6.000000 3.333984 6.673828 6.673828 ] xS 24.682617 -2.5 m (%)s -13.669922 11.5 m (&'\(\)\))[ 6.000000 2.666016 6.673828 6.000000 6.000000 ] xS 0.60000002 i /Cs1 SC 1 1 1 sc CM 90 173.00101 m 162 173.00101 l 162 101.00101 l 90 101.00101 l h 90 173.00101 m f 1 J 1 j 1 M 0 0 0 sc 1 0 0 -1 -126 317.00101 cm 216 144 m 288 144 l 288 216 l 216 216 l h 216 144 m S /Cs2 SC 0 sc 0 i 1 0 0 -1 126 137.00101 cm -7.6669922 -1 m (-.)[ 7.330078 8.003906 ] xS /F2.1[ 9 0 0 -9 0 0]sf -26.512207 11 m (!"#$%&'\(\)!\)*$\()[ 2.500488 2.997070 5.005371 5.005371 2.500488 5.005371 5.005371 4.500000 1.999512 2.500488 1.999512 4.500000 5.005371 4.500000 ] xS 0.60000002 i /Cs1 SC 1 1 1 sc CM 162 101.00101 m 234 101.00101 l 234 29.001007 l 162 29.001007 l h 162 101.00101 m f 0 0 0 sc 1 0 0 -1 -126 317.00101 cm 288 216 m 360 216 l 360 288 l 288 288 l h 288 216 m S /Cs2 SC 0 sc 0 i 1 0 0 -1 198 65.001007 cm /F1.1[ 12 0 0 -12 0 0]sf -7.9980469 -1 m (-/)[ 7.330078 8.666016 ] xS /F2.1[ 9 0 0 -9 0 0]sf -28.267822 11 m (!"#$%+$,-!\)*$\()[ 2.500488 2.997070 5.005371 5.005371 2.500488 5.005371 5.005371 5.005371 5.005371 2.500488 1.999512 4.500000 5.005371 4.500000 ] xS 0.60000002 i /Cs1 SC 1 1 1 sc CM 162 173.00101 m 234 173.00101 l 234 101.00101 l 162 101.00101 l h 162 173.00101 m f 0 0 0 sc 1 0 0 -1 -126 317.00101 cm 288 144 m 360 144 l 360 216 l 288 216 l h 288 144 m S /Cs2 SC 0 sc 0 i 1 0 0 -1 198 137.00101 cm /F1.1[ 12 0 0 -12 0 0]sf -7.6669922 -1 m (0.)[ 7.330078 8.003906 ] xS /F2.1[ 9 0 0 -9 0 0]sf -28.263428 11 m (.-/\($%&'\(\)!\)*$\()[ 2.500488 5.005371 1.999512 4.500000 5.005371 2.500488 5.005371 5.005371 4.500000 1.999512 2.500488 1.999512 4.500000 5.005371 4.500000 ] xS 0.60000002 i /Cs1 SC 1 1 1 sc CM 90 101.00101 m 162 101.00101 l 162 29.001007 l 90 29.001007 l h 90 101.00101 m f 0 0 0 sc 1 0 0 -1 -126 317.00101 cm 216 216 m 288 216 l 288 288 l 216 288 l h 216 216 m S /Cs2 SC 0 sc 0 i 1 0 0 -1 126 65.001007 cm /F1.1[ 12 0 0 -12 0 0]sf -7.9980469 -1 m (0/)[ 7.330078 8.666016 ] xS /F2.1[ 9 0 0 -9 0 0]sf -30.019043 11 m (.-/\($%+$,-!\)*$\()[ 2.500488 5.005371 1.999512 4.500000 5.005371 2.500488 5.005371 5.005371 5.005371 5.005371 2.500488 1.999512 4.500000 5.005371 4.500000 ] xS 1 0 0 -1 122.5 189.87601 cm /F2.1[ 10 0 0 -10 0 0]sf -16.953125 4.5 m (&'\(\)!\)*$)[ 5.561523 5.561523 5.000000 2.221680 2.778320 2.221680 5.000000 5.561523 ] xS 1 0 0 -1 193.5 189.87601 cm -18.903809 4.5 m (+$,-!\)*$)[ 5.561523 5.561523 5.561523 5.561523 2.778320 2.221680 5.000000 5.561523 ] xS 1 0 0 -1 58 136.87601 cm -15.280762 4.5 m (0'""$0!)[ 5.000000 5.561523 3.330078 3.330078 5.561523 5.000000 2.778320 ] xS 1 0 0 -1 60 64.876007 cm -19.172363 4.5 m (\)+0'""$0!)[ 2.221680 5.561523 5.000000 5.561523 3.330078 3.330078 5.561523 5.000000 2.778320 ] xS 1 0 0 -1 126 8.8760071 cm /F1.1[ 12 0 0 -12 0 0]sf -4.0019531 4.5 m (.)s 1 0 0 -1 198 8.8760071 cm -4.3330078 4.5 m (/)s ep end %%Trailer %%EOF LanguageMachines-timbl-642727d/docs/texfiles/roc-auc.eps000077500000000000000000001055051451477526200231360ustar00rootroot00000000000000%!PS-Adobe-3.0 EPSF-3.0 %%HiResBoundingBox: 0.000000 0.000000 292.989014 267.126007 %APL_DSC_Encoding: UTF8 %%Title: (Unknown) %%Creator: (Unknown) %%CreationDate: (Unknown) %%For: (Unknown) %%DocumentData: Clean7Bit %%LanguageLevel: 2 %%Pages: 1 %%BoundingBox: 0 0 292 267 %%EndComments %%BeginProlog %%BeginFile: cg-pdf.ps %%Copyright: Copyright 2000-2002 Apple Computer Incorporated. %%Copyright: All Rights Reserved. currentpacking true setpacking /cg_md 140 dict def cg_md begin /L3? languagelevel 3 ge def /bd{bind def}bind def /ld{load def}bd /xs{exch store}bd /xd{exch def}bd /cmmtx matrix def mark /sc/setcolor /scs/setcolorspace /dr/defineresource /fr/findresource /T/true /F/false /d/setdash /w/setlinewidth /J/setlinecap /j/setlinejoin /M/setmiterlimit /i/setflat /rc/rectclip /rf/rectfill /rs/rectstroke /f/fill /f*/eofill /sf/selectfont /s/show /xS/xshow /yS/yshow /xyS/xyshow /S/stroke /m/moveto /l/lineto /c/curveto /h/closepath /n/newpath /q/gsave /Q/grestore counttomark 2 idiv {ld}repeat pop /SC{ /ColorSpace fr scs }bd /cgmtx matrix def /sdmtx{cgmtx currentmatrix pop}bd /CM {cgmtx setmatrix}bd /cm {cmmtx astore CM concat}bd /W{clip newpath}bd /W*{eoclip newpath}bd statusdict begin product end dup (HP) anchorsearch{ pop pop pop true }{ pop (hp) anchorsearch{ pop pop true }{ pop false }ifelse }ifelse { { { pop pop (0)dup 0 4 -1 roll put F charpath }cshow } }{ {F charpath} }ifelse /cply exch bd /cps {cply stroke}bd /pgsave 0 def /bp{/pgsave save store}bd /ep{pgsave restore showpage}def /re{4 2 roll m 1 index 0 rlineto 0 exch rlineto neg 0 rlineto h}bd /scrdict 10 dict def /scrmtx matrix def /patarray 0 def /createpat{patarray 3 1 roll put}bd /makepat{ scrmtx astore pop gsave initgraphics CM patarray exch get scrmtx makepattern grestore setpattern }bd /cg_BeginEPSF{ userdict save/cg_b4_Inc_state exch put userdict/cg_endepsf/cg_EndEPSF load put count userdict/cg_op_count 3 -1 roll put countdictstack dup array dictstack userdict/cg_dict_array 3 -1 roll put 3 sub{end}repeat /showpage {} def 0 setgray 0 setlinecap 1 setlinewidth 0 setlinejoin 10 setmiterlimit [] 0 setdash newpath false setstrokeadjust false setoverprint }bd /cg_EndEPSF{ countdictstack 3 sub { end } repeat cg_dict_array 3 1 index length 3 sub getinterval {begin}forall count userdict/cg_op_count get sub{pop}repeat userdict/cg_b4_Inc_state get restore F setpacking }bd /cg_biproc{currentfile/RunLengthDecode filter}bd /cg_aiproc{currentfile/ASCII85Decode filter/RunLengthDecode filter}bd /ImageDataSource 0 def L3?{ /cg_mibiproc{pop pop/ImageDataSource{cg_biproc}def}bd /cg_miaiproc{pop pop/ImageDataSource{cg_aiproc}def}bd }{ /ImageBandMask 0 def /ImageBandData 0 def /cg_mibiproc{ string/ImageBandMask xs string/ImageBandData xs /ImageDataSource{[currentfile/RunLengthDecode filter dup ImageBandMask/readstring cvx /pop cvx dup ImageBandData/readstring cvx/pop cvx]cvx bind}bd }bd /cg_miaiproc{ string/ImageBandMask xs string/ImageBandData xs /ImageDataSource{[currentfile/ASCII85Decode filter/RunLengthDecode filter dup ImageBandMask/readstring cvx /pop cvx dup ImageBandData/readstring cvx/pop cvx]cvx bind}bd }bd }ifelse /imsave 0 def /BI{save/imsave xd mark}bd /EI{imsave restore}bd /ID{ counttomark 2 idiv dup 2 add dict begin {def} repeat pop /ImageType 1 def /ImageMatrix[Width 0 0 Height neg 0 Height]def currentdict dup/ImageMask known{ImageMask}{F}ifelse exch L3?{ dup/MaskedImage known { pop << /ImageType 3 /InterleaveType 2 /DataDict currentdict /MaskDict << /ImageType 1 /Width Width /Height Height /ImageMatrix ImageMatrix /BitsPerComponent 1 /Decode [0 1] currentdict/Interpolate known {/Interpolate Interpolate}if >> >> }if }if exch {imagemask}{image}ifelse end }bd /cguidfix{statusdict begin mark version end {cvr}stopped{cleartomark 0}{exch pop}ifelse 2012 lt{dup findfont dup length dict begin {1 index/FID ne 2 index/UniqueID ne and {def} {pop pop} ifelse}forall currentdict end definefont pop }{pop}ifelse }bd /t_array 0 def /t_i 0 def /t_c 1 string def /x_proc{ exch t_array t_i get add exch moveto /t_i t_i 1 add store }bd /y_proc{ t_array t_i get add moveto /t_i t_i 1 add store }bd /xy_proc{ t_array t_i 2 copy 1 add get 3 1 roll get 4 -1 roll add 3 1 roll add moveto /t_i t_i 2 add store }bd /sop 0 def /cp_proc/x_proc ld /base_charpath { /t_array xs /t_i 0 def { t_c 0 3 -1 roll put currentpoint t_c cply sop cp_proc }forall /t_array 0 def }bd /sop/stroke ld /nop{}def /xsp/base_charpath ld /ysp{/cp_proc/y_proc ld base_charpath/cp_proc/x_proc ld}bd /xysp{/cp_proc/xy_proc ld base_charpath/cp_proc/x_proc ld}bd /xmp{/sop/nop ld /cp_proc/x_proc ld base_charpath/sop/stroke ld}bd /ymp{/sop/nop ld /cp_proc/y_proc ld base_charpath/sop/stroke ld}bd /xymp{/sop/nop ld /cp_proc/xy_proc ld base_charpath/sop/stroke ld}bd /refnt{ findfont dup length dict copy dup /Encoding 4 -1 roll put definefont pop }bd /renmfont{ findfont dup length dict copy definefont pop }bd L3? dup dup{save exch}if /Range 0 def /Domain 0 def /Encode 0 def /Decode 0 def /Size 0 def /DataSource 0 def /mIndex 0 def /nDomain 0 def /ival 0 def /val 0 def /nDomM1 0 def /sizem1 0 def /srcEncode 0 def /srcDecode 0 def /nRange 0 def /d0 0 def /r0 0 def /di 0 def /ri 0 def /a0 0 def /a1 0 def /r1 0 def /r2 0 def /dx 0 def /Nsteps 0 def /sh3tp 0 def /ymax 0 def /ymin 0 def /xmax 0 def /xmin 0 def /min { 2 copy gt {exch pop}{pop}ifelse }bd /max { 2 copy lt {exch pop}{pop}ifelse }bd /inter { 1 index sub 5 2 roll 1 index sub 3 1 roll sub 3 1 roll div mul add }bd /setupFunEvalN { begin /nDomM1 Domain length 2 idiv 1 sub store /sizem1[ 0 1 nDomM1 { Size exch get 1 sub }for ]store /srcEncode currentdict/Encode known { Encode }{ [ 0 1 nDomM1 { 0 sizem1 3 -1 roll get }for ] }ifelse store /srcDecode currentdict/Decode known {Decode}{Range}ifelse store /nRange Range length 2 idiv store end }bd /FunEvalN { begin nDomM1 -1 0 { 2 mul/mIndex xs Domain mIndex get max Domain mIndex 1 add get min Domain mIndex get Domain mIndex 1 add get srcEncode mIndex get srcEncode mIndex 1 add get inter round cvi 0 max sizem1 mIndex 2 idiv get min nDomM1 1 add 1 roll }for nDomM1 1 add array astore/val xs nDomM1 0 gt { 0 nDomM1 -1 0 { dup 0 gt { /mIndex xs val mIndex get 1 index add Size mIndex 1 sub get mul add }{ val exch get add }ifelse }for }{ val 0 get }ifelse nRange mul /ival xs 0 1 nRange 1 sub { dup 2 mul/mIndex xs ival add DataSource exch get 0 255 srcDecode mIndex 2 copy get 3 1 roll 1 add get inter Range mIndex get max Range mIndex 1 add get min }for end }bd /sh2 { /Coords load aload pop 3 index 3 index translate 3 -1 roll sub 3 1 roll exch sub 2 copy dup mul exch dup mul add sqrt dup scale atan rotate /Function load setupFunEvalN clippath {pathbbox}stopped {0 0 0 0}if newpath /ymax xs /xmax xs /ymin xs /xmin xs currentdict/Extend known { /Extend load 0 get { /Domain load 0 get /Function load FunEvalN sc xmin ymin xmin abs ymax ymin sub rectfill }if }if /dx/Function load/Size get 0 get 1 sub 1 exch div store gsave /di ymax ymin sub store /Function load dup /Domain get dup 0 get exch 1 get 2 copy exch sub dx mul exch { 1 index FunEvalN sc 0 ymin dx di rectfill dx 0 translate }for pop grestore currentdict/Extend known { /Extend load 1 get { /Domain load 1 get /Function load FunEvalN sc 1 ymin xmax 1 sub abs ymax ymin sub rectfill }if }if }bd /shp { 4 copy dup 0 gt{ 0 exch a1 a0 arc }{ pop 0 moveto }ifelse dup 0 gt{ 0 exch a0 a1 arcn }{ pop 0 lineto }ifelse fill dup 0 gt{ 0 exch a0 a1 arc }{ pop 0 moveto }ifelse dup 0 gt{ 0 exch a1 a0 arcn }{ pop 0 lineto }ifelse fill }bd /calcmaxs { xmin dup mul ymin dup mul add sqrt xmax dup mul ymin dup mul add sqrt xmin dup mul ymax dup mul add sqrt xmax dup mul ymax dup mul add sqrt max max max }bd /sh3 { /Coords load aload pop 5 index 5 index translate 3 -1 roll 6 -1 roll sub 3 -1 roll 5 -1 roll sub 2 copy dup mul exch dup mul add sqrt /dx xs 2 copy 0 ne exch 0 ne or { exch atan rotate }{ pop pop }ifelse /r2 xs /r1 xs /Function load dup/Size get 0 get 1 sub /Nsteps xs setupFunEvalN dx r2 add r1 lt{ 0 }{ dx r1 add r2 le { 1 }{ r1 r2 eq { 2 }{ 3 }ifelse }ifelse }ifelse /sh3tp xs clippath {pathbbox}stopped {0 0 0 0}if newpath /ymax xs /xmax xs /ymin xs /xmin xs dx dup mul r2 r1 sub dup mul sub dup 0 gt { sqrt r2 r1 sub atan /a0 exch 180 exch sub store /a1 a0 neg store }{ pop /a0 0 store /a1 360 store }ifelse currentdict/Extend known { /Extend load 0 get r1 0 gt and { /Domain load 0 get/Function load FunEvalN sc { { dx 0 r1 360 0 arcn xmin ymin moveto xmax ymin lineto xmax ymax lineto xmin ymax lineto xmin ymin lineto eofill } { r1 0 gt{0 0 r1 0 360 arc fill}if } { 0 r1 xmin abs r1 add neg r1 shp } { r2 r1 gt{ 0 r1 r1 neg r2 r1 sub div dx mul 0 shp }{ 0 r1 calcmaxs dup r2 add dx mul dx r1 r2 sub sub div neg exch 1 index abs exch sub shp }ifelse } }sh3tp get exec }if }if /d0 0 store /r0 r1 store /di dx Nsteps div store /ri r2 r1 sub Nsteps div store /Function load /Domain load dup 0 get exch 1 get 2 copy exch sub Nsteps div exch { 1 index FunEvalN sc d0 di add r0 ri add d0 r0 shp { d0 0 r0 a1 a0 arc d0 di add 0 r0 ri add a0 a1 arcn fill d0 0 r0 a0 a1 arc d0 di add 0 r0 ri add a1 a0 arcn fill }pop /d0 d0 di add store /r0 r0 ri add store }for pop currentdict/Extend known { /Extend load 1 get r2 0 gt and { /Domain load 1 get/Function load FunEvalN sc { { dx 0 r2 0 360 arc fill } { dx 0 r2 360 0 arcn xmin ymin moveto xmax ymin lineto xmax ymax lineto xmin ymax lineto xmin ymin lineto eofill } { xmax abs r1 add r1 dx r1 shp } { r2 r1 gt{ calcmaxs dup r1 add dx mul dx r2 r1 sub sub div exch 1 index exch sub dx r2 shp }{ r1 neg r2 r1 sub div dx mul 0 dx r2 shp }ifelse } } sh3tp get exec }if }if }bd /sh { begin /ShadingType load dup dup 2 eq exch 3 eq or { gsave newpath /ColorSpace load scs currentdict/BBox known { /BBox load aload pop 2 index sub 3 index 3 -1 roll exch sub exch rectclip }if 2 eq {sh2}{sh3}ifelse grestore }{ pop (DEBUG: shading type unimplemented\n)print flush }ifelse end }bd {restore}if not dup{save exch}if L3?{ /sh/shfill ld /csq/clipsave ld /csQ/cliprestore ld }if {restore}if end setpacking %%EndFile %%EndProlog %%BeginSetup %%EndSetup %%Page: 1 1 %%PageBoundingBox: 0 0 292 267 %%BeginPageSetup cg_md begin bp sdmtx %RBIBeginFontSubset: NTZMIF+Helvetica %!PS-TrueTypeFont-1.0000-0.0000-2 14 dict begin/FontName /NTZMIF+Helvetica def /PaintType 0 def /Encoding 256 array 0 1 255{1 index exch/.notdef put}for dup 33 /f put dup 34 /a put dup 35 /l put dup 36 /s put dup 37 /e put dup 38 /space put dup 39 /p put dup 40 /o put dup 41 /i put dup 42 /t put dup 43 /v put dup 44 /r put dup 45 /u put dup 46 /zero put dup 47 /period put dup 48 /five put dup 49 /one put readonly def 42/FontType resourcestatus{pop pop false}{true}ifelse %APLsfntBegin {currentfile 0(%APLsfntEnd\n)/SubFileDecode filter flushfile}if /FontType 42 def /FontMatrix matrix def /FontBBox[2048 -342 1 index div -914 2 index div 2036 3 index div 2100 5 -1 roll div]cvx def /sfnts [< 74727565000900000000000063767420000000000000009C000003626670676D000000000000040000000322676C7966000000000000072400000DE46865616400000000000015080000003868686561000000000000154000000024686D74780000000000001564000000486C6F636100000000000015AC000000266D61787000000000000015D4000000207072657000000000000015F4000003BB05C0001005BD00280580001A042F001F0000FFD90000FFDA0000FFD9FE55FFE605C70010FE6DFFF1033B000000B9000000B902FE3F3C00C0008D009B00AF000600A800C00028005E009800C9016A00B9015C00B400D6011E002E0080000400B8004C00CC01FFFFD1006600A400AF007400C2009500B1000C0028006D0015004C008E0125FF7A000C0040004C00620084FFA200240038008600BD0039005E008E00EDFFA9FFB300400052005500AA00AB00C200CB012302B10413FFAEFFE4000800510074008400AA00D1FF4CFFAF0012002C004200500051008400BE012503DAFF680018003B0098009C009F00A100C100EC018201B4FF68FF76FFD0FFE100020018001C00530053007D01B401E103AF0486FF9CFFEAFFFE001F0028002A00520060009300A300AA00AF00AF00C001000145016B0174019301950240028202B404850517FEFD00060029004700470048006F008800B400B900C400F200F901EF02180310037403C5FF35FFF3000B004B004C0052005500650076007600870087008E00AB00BB0106013001430150017D0194019501D3022A025502580277027802E6034E035C037903D3047304B2058C0598060BFEF5FFBBFFC7FFD50017001D005B0072007E009C00C200D000F400FA01030106011C0125013B0142015E015E0180019B02B901A101B9025001C001D002AA01DF01E301EF01FB0205020C0215022B0274029302AB02C202CE03690395039903DF03F5043E050205A105E5062507DBFE62FE89FECEFF3BFFE1FFF800030008002100390042004E005F0061006F00700034007F008E00AD00AD00AF00BD00C400C500C900C900C900E3011C00ED00F800F901000112011A0132014D014D014E014F01660169019E01BA01BA01BE01E301EF01F602000200020902110217021C02530262026D028002D50280031B032A034A035A03AF03AF03C803D603FB03FB04050413041504470449008C046D049A049A04A604A804B204CF0539053E054E055605800589058C036305D105D6067E068E06B206EF06F00728074C076F078C00B400C900C000C10000000000000000000000000004012400AF0032006E0063014401620096014301A10161008A00740064018801EF01700028FF5D037E0347023000AA00BE007B0062009A007D0089035C00A1FFD803AA00D70093006C0000008000A70442001D0597001D008200300000 40292A292827262524232221201F1E1D1C1B1A191817161514131211100D0C0B0A090807060504030201002C4523466020B02660B004262348482D2C452346236120B02661B004262348482D2C45234660B0206120B04660B004262348482D2C4523462361B0206020B02661B02061B004262348482D2C45234660B0406120B06660B004262348482D2C4523462361B0406020B02661B04061B004262348482D2C0110203C003C2D2C20452320B0CD442320B8015A51582320B08D44235920B0ED51582320B04D44235920B09051582320B00D44235921212D2C20204518684420B001602045B04676688A4560442D2C01B9400000000A2D2C00B9000040000B2D2C2045B00043617D6818B0004360442D2C45B01A234445B01923442D2C2045B00325456164B050515845441B2121592D2C20B0032552582359212D2C69B04061B0008B0C6423648BB8400062600C642364615C58B0036159B002602D2C45B0112BB0172344B0177AE5182D2C45B0112BB01723442D2C45B0112BB017458CB0172344B0177AE5182D2CB002254661658A46B040608B482D2CB0022546608A46B040618C482D2C4B53205C58B002855958B00185592D2C20B0032545B019236A4445B01A23444565234520B00325606A20B009234223688A6A606120B0005258B21A401A4523614459B0005058B219401945236144592D2CB9187E3B210B2D2CB92D412D410B2D2CB93B21187E0B2D2CB93B21E7830B2D2CB92D41D2C00B2D2CB9187EC4E00B2D2C4B525845441B2121592D2C0120B003252349B04060B0206320B000525823B002253823B002256538008A63381B212121212159012D2C456920B00943B0022660B00325B005254961B0805358B21940194523616844B21A401A4523606A44B209191A45652345604259B00943608A103A2D2C01B005251023208AF500B0016023EDEC2D2C01B005251023208AF500B0016123EDEC2D2C01B0062510F500EDEC2D2C20B001600110203C003C2D2C20B001610110203C003C2D2C764520B003254523616818236860442D2C7645B00325452361682318456860442D2C7645B0032545616823452361442D2C4569B014B0324B505821B0205961442D0000000200A10000052F05BD00030007003E402105062F02010004072F03000A05042F0303021A0906072F01001908098821637B182B2B4EF43C4DFD3C4E10F63C4D10FD3C003F3CFD3C3F3CFD3C31303311211127112111A1048EB8FCE205BDFA43B8044DFBB300000100AF0000018000DA000300264013012A030A0517171A01640019040564216365182B2B4EF44DFD4E456544E6003F4DED313037331523AFD1D1DADA0000020040FFD9041C0598000F001C00714017870501460815350F051C35070D1238036F18380B1E471D1076C418D4EDFDED003FED3FED3130 43794034001B0D2601251A2609250526160E18280014001228011B081828001006122801170C1528011302152801190A1C280011041C28002B2B2B2B012B2B2B2B2B2B2B2B2B81005D001716111007022120272611343712211236113402232202111417163303407C60577EFEE2FEFE7E693F7601358AA678AD9F932F48AE0598E5B1FECCFEDCBFFEEEE0BB013BF4AF0146FAE5F80152F4013BFED5FEDDDB85CB00000100C4000002D5059200080023B10801B80133400C0404070C04079605000A47091076C418C4D5FD39003F3FF4CD313013353E013733112311C4C39A268EC003F68A1359A6FA6E03F600010042FFDC041C0580002000BB402B4812881F02390C461357136713043A080C0D07000C0A0F0E0E75121313120E0F201213070013170A17350AB80122401A0D7F0E0E1D123A0F04043520C71D0D1107381A6F0038202247211076C418D4EDF5EDC4003FEDED3FFD12392FE4F4ED1112390111123939123939872E2B7D10C5001239011112393931301843794028181F01090226031E00280001001F20051C0728010918072801011F042800061B04280008190A2801002B2B2B012B2B103C103C2B2B8181005D5D131617163332363534262322060727132115210336373633320415140221222427FD127D4054A09AB7805D852F9C6D02E8FD9F3D322D5069C50112FBFEEDAFFEF310016D9A3B1ECC7C96A44840090303AEFE72261321FEC3CBFECAC5CC00030052FFDC04470449000F003B003C00DD40382A30010A100B1B0C1C2733481069096A10073908120C09031B320724091D100C1D3B2B022E293BB73B023B322A2512100705081C2722171CB8018A4023171D1F07271D2E0B021D350B3C073C3C1C1407292AA8241A3E1B291C4A0F2738193D3EBC0197002100B9019600182B2B4EF44DEDF4ED4E10F64DE4FDC412392F003F3FED3FED3FEDED1239111217395D1112392EED2EED01111239111739313043794028363715220001192501360F2100181E1B21001620142101212200370221001A1D1721011521172101002B2B2B01103C2B2B2B2B818181005D015D2416333237363D010E010F0106070615013637363534262322070607233E01333217161511141633323637150E0123222726270E012322263534363713010E724E5F59962168326D62315301B43E150C837A8D3B210AA805F7A3BD767517250C1E112A2C265D2A160937CE7C95BDBA978ACF5A2C49A691151C060E0D1C2F67016C082C182D5C534C2A53C69B484898FD971C220303850C06422340486AB58895A41301E40000030048FFDA041A0449001C00240025010C40799708991AA71F03050E020F0514150E120F1514400C401408291A014B0BB603C701C603C71BD808D909D61FD823E817E8230BC711C712025C080521240F9A161D243906070716 211D1C070A1D160B2507971CA71CB71CD71C0425160F251C05190A0C07110E270F1D27051A27242E072719192627D421A65D182B2B4EF44DFDE44E10F64DEDD4FD391239391112393912392F5D003F3FED3FED12392F3CFD3C10ED1112393130437940460023040503050205010504061F26111012101310141004060C25221B24260020001D26011E1D09170726000B150E26010D0E231A2126011E0521260108180A26000D100A2600002B2B2B2B01103C2B2B103C2B2B2B2A2B2A8101715D00715D5D00161716171615211E013332373637330E01070607062322001110003301262726232206070102B4D638361210FCEF0590978D543014B1074F3152794152C8FEEA0118E2011F0B284AAD7CA805012304476B55516C4AA2A3C55D36473B912E501C100123010601020142FE26754682B38A01DC000001001C0000021705D20017004D402B071D060A1D03010F1439160D06120A1917171A0E0D1129171207120F0E1F0E020EFC14191819FC21677E182B2B4EF44DFD5D39C42F3CFD3C104E456544E6003F3F3C4DFD3C3FEDD4ED313012373633321617152E012322061533152311231123353335B5233FB41124171C190B5220B2B4B295950542345C0202A4020155AE8EFC64039C8EA8000200840000013B05BD000300070036401C07E50400010006030A0917171A06010229070300190809AA216242182B2B4EF43C4DC4FD3CC44E456544E6003F3F3C3F4DED3130133311231133152384B7B7B7B7042AFBD605BDCC000100890000013D05BD0003002940150000030A0517171A0102290003190405AA216242182B2B4EF43C4DFD3C4E456544E6003F3F31301333112389B4B405BDFA430003003BFFD90421044E000C0018001900904033980896109916A504A808A610A916B808C808D704E50EE9140C3A08061D18070C1D120B190719191502270F1A1B092715191A1BB80109B321725D182B2B4EF44DED4E10F64DED12392F003F3FED3FED31304379402C001704260B1309260000110226010717092600050D0226010A140C260001100C26000816062601030E0626012B2B2B2B012B2B2B2B2B81005D241235342726232206151416331200111002212200351000330702E085304CBAA59696A3D6011EFCFEF7DDFEFC0112E70674010FA6965E94FCB2ABE403DAFEECFEF4FEFDFEAE012BFC010E01400500020076FE5504250449000E00220074402CA908A717022808201C110E061D15070F060E1D1C0B220E0227181A240A2E102E2129220F1923248721BD5D182B2B4EF43C4DFDE4E44E10F64DED003F3FED3F3FED1139123931304379401C161B00051A260426001B022601051602260101190E260003170626012B2B012B2B2B2B8181005D24363534272623220706151417163301331536373633321211100706232227262711 2302C6A72546BABB45252546BAFE2EAF36405B7BB6FEB7749A7952303BB479D3D2805CB1BB649A7C57A603B18E49283CFEE9FEFDFEA2965F351E49FDDD00000100890000029204470011004F40262703260D37034704040E0810020E0911090C270805070006110A081A13012E10291100191213B80145B321627E182B2B4EF43C4DFDE44E10E6003F3F4D3FC4FDC411123939011112393130005D1333153E0133321617152E0123220615112389AB15A46B05181D101B108892B4042FB9369B0203BE0302AF72FD980000020042FFD703B6044B002E002F012E408F38099805961299149815982A062824252736214621472447275624572766246726790C790D790E7623742474257426A61EA82C1303000B15052D042E13001A151B171C18152D142E280F0B6908262536250225220D0A042B1318C61C1D1307041D2E9A2B0B2F07090E100207002F212F1A1F18161827173E28260727281A310E1F27103E00272E193031B221A65D182B2B4EF44DEDF4FD394E10F64DFD3910F4FD3911123939392F111239113939003F3FEDED3FEDED111217397131304379404C012D022615251A26210E1F21000926072101032C002100052A0721011D121F21001B14182101200F22210021220E0D08270A21012625090A012D04210006290421001E111C210119161C2101002B2B2B2B103C103C2B103C103C2B012B2B2B2B2B2B2B2B2B81005D5D015D13161716333236353427262F01262726353436333217160723262726232206151417161F011617161514062322262701EF082544A864983D27738F894174DBB9F26B4302AA05263E99666945284E77C24269D9DEEFC70701B701505A3057575B4524161D24222A498198BC8E5A683D32474E40462A19131D2F2C45948FD0D9A002F900010017FFEF0209055A00180052B50D2E0AC00E01B8013F40250416391703060E0A111A17171A0301062900150E150F031F030203FC1619191AFC21677D182B2B4EF44DFD5D39C42F3CFD3C104E456544E6002F3F3F3C4DFD3CED10FDE431301333113315231114171633323637150E012322263511233533A8B6ABAB2615310D1E141F43277E5A9191055AFED593FD4538130B01028E0908816702C59300020080FFE303DE044900170018005E403AB814C81402091308141913191428067703D707070800050E0A00060D0A051D120B180718180B160D2E0A290C0B1A1A01291619191AD2216242182B2B4EF44DED4E10F63C4DFDE41112392F003F3FED3F3F3C391112393130005D015D0111141716333237363511331123370607062322272635112501381A3083BC4425B4AA0223346793E5532D01AF042FFD39523460A85A9D020EFBD19E3D2A5499528902D81A000001000B000003EA042F00060102402E4201C5010200670068026803670687048805A700 A802084700480245044A0586048905C704C80508492873280708B80109B321677E182B2B4B5279B8FF70B40105042004B80183B703036D1202010205B80183401E06066D120000010506040301010502030603000605040A0817171A03AF02BA018400000184B301AF0619194EF4184DFDE0E0FD194E456544E618003F3C3F173C1239011112391239074D2E2B104EE44D072E2B104EE44D2B4B51794025022912030304002912060605010502030603000605040A0817171A020403AF050001AF0619194EF4184DFD3939FD3939194E456544E618003F3C3F173C12390507102B07102B313001715D005D7113090133012301DC011E012BC5FE6CC0FE75042FFC980368FBD1042F000100000000000073F8B13B5F0F3CF501010800000000015F4E858000000000B53F1B40FEAAFC6E07F40834000000090001000000000000000100000629FE290000081FFEAAFEB307F400010000000000000000000000000000001205C700A102390000023900AF04730040047300C40473004204730052047300480239001C01C7008401C700890473003B0473007602AA00890400004202390017047300800400000B000000330033005200BF00E401770241030A0355038203A3041B048D04D305B30602065C06F2000000010000001200530007005B0006000200100010002B000007E80161000600014118008001A6009001A600A001A600030069018B0079018B0089018B0099018B00040089018B0099018B00A9018B00B9018BB2040840BA0179001A014A400B041F5414191F180A0B1FD2B80106B49E1FD918E3BB0119000D00E10119B20D0009410A01A0019F0064001F01A50025017A00480028019AB3296C1F60410A01A9007001A9008001A90003008001A9000101A9B21E321FBE012C00250401001F0126001E0401B61FE7312D1FE531B80201B21FC227B80401B21FC11EB80201400F1FC01D9E1FBF1D671FBE1D671FAB27B80401B21FAA29B80401B61FA91D6C1F931EB8019AB21F921DB80101B21F911DB80101B21F751DB80201B61F6D29961F6431B8019AB21F4C96B802ABB21F391DB80156400B1F3638211F351DE41F2F27B80801400B1F2D1D4C1F2A31CD1F241DB802ABB21F201EB8012540111F1C1D931F3A1D4C1F1E1D45273A1D4527BB01AA019B002A019BB2254A1FBA019B0025017AB349293896B8017BB348283125B8017A403648289629482725294C1F252946272729482756C80784075B07410732072B072807260721071B071408120810080E080C080A08080807B801ACB23F1F06BB01AB003F001F01ABB308060805B801AEB23F1F04BB01AD003F001F01ADB70804080208000814B8FFE0B40000010014B801ABB41000000100B801ABB606100000010006B801ADB300000100B801AD401F04000001000410000001001002 000001000200000001000002010802004A00B0018DB806008516763F183F123E113946443E113946443E113946443E113946443E113946443E11394660443E11394660443E11394660442B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B18011DB0964B5358B0AA1D59B0324B5358B0FF1D592B2B2B2B2B2B2B2B182B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B74752B2B2B65422B2B4B5279B376706A66456523456023456560234560B08B766818B080622020B16A704565234520B003266062636820B003266165B070236544B06A234420B176664565234520B003266062636820B003266165B066236544B0762344B10066455458B166406544B27640764523614459B36242725D456523456023456560234560B089766818B080622020B172424565234520B003266062636820B003266165B042236544B072234420B1625D4565234520B003266062636820B003266165B05D236544B0622344B1005D455458B15D406544B262406245236144592B2B2B2B456953427374B8019A2045694B20B02853B049515A58B020615944B801A6204569447500 00>] def /CharStrings 18 dict dup begin /.notdef 0 def /space 1 def /period 2 def /zero 3 def /one 4 def /five 5 def /a 6 def /e 7 def /f 8 def /i 9 def /l 10 def /o 11 def /p 12 def /r 13 def /s 14 def /t 15 def /u 16 def /v 17 def end readonly def currentdict dup/FontName get exch definefont pop end %APLsfntEnd 42/FontType resourcestatus{pop pop true}{false}ifelse {currentfile 0(%APLT1End\n)/SubFileDecode filter flushfile}if /FontType 1 def /FontMatrix [ 0.00048828125 0 0 0.00048828125 0 0 ] def /FontBBox{-342 -914 2036 2100}def /UniqueID 4045371 def currentdict currentfile eexec 54544758EC884CF30C3CD503CEDBFF3839C47C3C3333173232E3FDBFF439491DB843E1924E63AA7726BBB0485AB56D93D8C0906F647A47162891E73FFC2A9873C4B1EAC5EEBDFFC4D06084FBD84139DF4583C6E259D10699944D1068C9C45667DCCCFB9B7EA01B606435EDCBD273ABAC093D14085CCBAC149BD7382E842CFE0D7FE4FD2EF589A2471F6074A80A8B675C2F7A50D63AC1EF90D787BADD11633CB01CF6EE3B37AAF9078A69AC4740E9B6525D78BBD839551A1CB80DB8682FA5E87591BBD6EE8B946063A2A58D9CA3685AB305495DC5FB5747EB8A9A059C4976C0FE4EEAB1D56FF47F1E9664ED9F4A7DAB763AF92B2F6CF2FA7DEC24710E0B9096E30F772BA7FEA9BDBE496C42ED2CEB58F54E80BDF57CE7B4DB6CCFE7182F43BF93CCA0767AF95D62C5D2C3DC6AE1E6D139F51A2C63432117F1714C5566572EE9967A715420ABDCD1D7BD74F8450B89965FCC81C6ACA565C5F3CCF91D430D1F953E4F1A645300A98DD8C47CD64555F08F422340A85404EAE0D3229C4F9336B9470CACBD6BBF3395104750A915CC6EAAC197668267B8C62D2764C8CD69FD937CA3C924D997A0EDE7964BEB9EA2F92EF70C5E5DA0AA5567765E71F2B911B3C5586B741EEB93F3C73016EC16BFF283758900903D203992EFC8BAFAF13579C602F38C91BC64A62485D7C863002D39D4194FF17CC8AA420E752C449BD1094F4D1F95E92330FAC9EC5D2BB0F3D950D0D16D8276992C645A639E47286D415864D7E15A32D4F23E2A822D38CE6D0ABB183BEDA6D69A501F87E4D4694FFF59C09B82464A76F4B032EBC84BFA042CDC676C5699C0C0FA405A774B90D04D93EBACA046DF1E890B37511C9BF05FACB5D1D5DA5EB2FB80CE9C7A9BAD4973C846FD64CE64755615771C50249CFFF5B0C699F5134833CF3FB3371439CD5011C20DBAC8F4C38D192EF261EC7D6523FDCD23EEBA5AA9561ABC8F969AAD496D5719EE110B8F55E95E7E5F7C4AA58FFDC3EF25891523FEF64F28AA3EFD70954DAF5DF3C63352D328C83AF5126FFEFDBB00151251FF9DA997F2B5F37E8AFB97314E06A4DDB7AEAECB7FE97B6BC6C438086F6491CE83508A0747479DBD797BBFFCF2BA5C7B972F3CC243AEF4CB9B8753498EFC77B888DA80FFD2B6627C0CEC0BD48204A325816A5B00ABFA3A8DBB6C66FAE83570BCAAE73427C690058D60DB151701B470929FDDBC993A059B849E74F10CA290DA29B32E2C8139E10F0138B0D99E8E8F1F6E05D2F83E29CFE3C17D57852DCD8B1E62A1594D44931955CB0C5846F53103AEB596EE1B77B9FB0C1C1EDB8657597D52460F42AB2254460F9D00E6A3BCFAA3E0E081C8FB35FC963E39C4FC5EA3E96A6A780C37D2FCBDCB059E38C3E9F8EE34C38C96BE4889C1D8D93AADFCB34960A645B52AC08C23EF0478CEB9EDCF64003DD35D9CA5EC6B75246A 20A654BCC23643E8ED01C05BE66F129618A21F268657BD4B15278A7498C9C3F62AF9A17CC7068B78124247F6F8D63471D6A821307285033957E033127A6FF5901EA63C149A017266810D3C04AE82089A92767AE6D671486333E220CD66664B26AF018B34CE5CA8F4931C28ECE352846EC355F676FB48E231A6FB1F6E40D941C6E7C0A074590BFF86CB475DAFD22217F88B5802AF5E31A67E3A0086314352EBB81C8C7B451F5D2E2B2E0EC6A55A8B31B9188CFB2E0FF32EA10B02DC7AF9A7638F1AD5892AAC82A068E900DCEE87A8B3634792138C39DD1D1D6059E27722F8496F6BD2651B3B58A4ED1BC6D57812E3756F4CC285977A8E7A2A8E7BA975C9E1377592F79BBDC4985D155C58E367528A69A9A919CF0965ADF9CC061C0ACCAEB60C832E4E78E0A42210C40D9F8FCF7EFEE149069C102D2BD5D809C0F0CA1DFE63C9C521CE20CE5B826367580D4011268DDA5245F0543BEBFDC575B475BFC8FE1FF17C994692FB572661E55340B3D47FA7D011864EDB7F0D6C6C226833221A727F6ED589381FB300E6AE413141EE76E7CDB88C0F13BFB657A2551ADE2721EEFD3259E36DDB9DA66D1C528A2CCE227DD0D8266F35B070870F403DA851ABB97C15B6C40D4DB5B9E9FBDED7A37B5F2AA2C1B3AE559D5C1B26EFDB923D58E86CD51A12055F804A655C8DC478AF36C8377D4821C34A00F3EC8656E45BAB0755B89DAC70C423B3AF95FA4A1851E1E9ED73471CB82625C111068325EE4C920C2133C52B4700FD584117B84B1DF27F1FFC0AC2317BA118CAE9B9E7FFE012B9D413BA00BDB936B349C51A7529C5708C019A3770BBD07B0F247852100D035289BCFBA3A57A2430F928B19BBEA88997447879B7CE446914CA57F263E5D5033A2E6046A635DF2D7B24FB634AAC4244DE44637B635ACB2D0F2B98B3682F214B24C58993B0D821067D7E92547DDF48102CE123814750D879D491AB9632BF8B9094272AABBCB4B9611FE0F1A9F47A320BA45BCD5821DE1A6FE26AD3ED8FCC39D98093B5EB8D48435CF79E19DAE82D339EE7EDD31995974AD599316ED2E34142E91EF9AE48028DEE883B8D3CAE0047A560AE3678B4524BA977FAB23301A623E1925628706BE5DAC930D03151E8FB694704B1ABB054751A7F33B516F5D80F4FABAE242526AE298A1729AA5292E1BEE3DD94AA08A68E7938A8563A6D1ABC2E75C53163D973B0E7CFD672DCA2377A1473439DFC5F9AEFE94549095E6FDE6D875669A5BD6050A95532CC7090D973629044D9A1C0AF01EB90ED4F2AE87D6DE94A3CA1E3C5D6ECB82FBEBE7390438B8FA61B744C8EFB883F737252FB925C9AFF25C96DA486DADA30B4B35E91F997789729EF2EA18BE92F345CA07F852E84497E902EEA510EF9B54570614A80DEA9B855F2E8CDAD54E74E15A9C7CAFE82EF1502628B0C1C8D10F4EC470A9C402DDA 05DD5F70FDB406E6E018020308EFAD3B77E314D66E9B512EED08B73679CA4417442BED4ED14183F7A93647DD6A9CD8A60288686D368A4F2C4E2A85C222D6708BA15A7C04DB0E41F9EF7A02D1051BB5233975043CD05DD3086566320134B151B64CBAF9B207F52E6B62824698943BA20560E65352652F15A75945893CD4CE86F46F6DE7C113BCE630A4FB286ACBE01FA20D4C0E725CAB5F305825DE338EA1E1625680EB80B00CAA471744BB810604537FD90085109A896A78FFCCBE710671E0B09877CB905740830AC7A99AE8779B25A54F79479C66A105C77F47DD2856440999408AEA27D239662B9CF55DD46D5D65631C934FA0A5439044D75A6D2C6BE999E0AA42AB2732F3229C53D31E7536F642B5501E716735F5CD0B5B4264BC2644141EA6635AE9503D653DDC396F3DCA6AA1A905B4874EE4A5FFA66338189C9C405899630F03BD96C247F6978952A4F7DC96125D438E903843B3CFCB283EC4BDB3FC1E3FB4FDE49172B2317438273712EE9D9312E0F173FC45FB389868BC76F7F531C81C2B6DF94E0AE7E192D7FEB351DF144952A0CA4D55C3F2282E028F39F1137E75D85B7B56BBA98D6CFEE3921F6106BA0D293E1DC05367D6418607A1504423422A8C4D0701AB5DBC2CB87374E0ACE6A38C9E2412AA5CCABECEE4CEEE9B7EE1FE9052919F2FAC34CABCB82187F776DCC5B576E1DDDC66418E16FB13353311E2D65316B50F523022C5FD131011E50422DCF4F28D6E9405B3FDB9118E66D9E2F25C03D28B0D76D40437A40EA67083E45EA24340FABAC7DB92CBEF750639E6925A4781612EE41B7B19ECE8186A1E2BA67BB8AAF2EC73325794D92923A56211425B484ABC84FEFC0DEC22263FCAE718D7FE83F1FC8CBECAE2D23F7A1EF97E87E6AFE8DF9E98AA7B9CDABC464D280F6DDD4F4DFA113C103791485B30760EBAE55414AEE91447B9FECCCC8A74FD6B8E7FEC3FCF39D1566864AE850E33508E69C4DA9FC96D5E7882B7F16E5FC7EE558B1BD39B8895E40485A979F2C35A13BD89353BEA8B4F1EA772A8486381EB553277F80839A4B9732E7180AD5450 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000 cleartomark end %APLT1End %RBIEndFontSubset /NTZMIF+Helvetica cguidfix /F1.1/NTZMIF+Helvetica renmfont [ /CIEBasedABC 4 dict dup begin /WhitePoint [ 0.9505 1.0000 1.0891 ] def /DecodeABC [ { 1.8008 exp } bind { 1.8008 exp } bind { 1.8008 exp } bind ] def /MatrixABC [ 0.4294 0.2332 0.0202 0.3278 0.6737 0.1105 0.1933 0.0938 0.9580 ] def /RangeLMN [ 0.0 0.9505 0.0 1.0000 0.0 1.0891 ] def end ] /Cs1 exch/ColorSpace dr pop [ /CIEBasedA 5 dict dup begin /WhitePoint [ 0.9505 1.0000 1.0891 ] def /DecodeA { { 1.8008 exp } bind exec} bind def /MatrixA [ 0.9642 1.0000 0.8249 ] def /RangeLMN [ 0.0 2.0000 0.0 2.0000 0.0 2.0000 ] def /DecodeLMN [ { 0.9857 mul} bind { 1.0000 mul} bind { 1.3202 mul} bind ] def end ] /Cs2 exch/ColorSpace dr pop %%EndPageSetup 1 J 1 j 0.60000002 i /Cs1 SC 0 0 0 sc q 0 0 292.98901 267.12601 rc 1 0 0 -1 -87.0009 334.1257 cm 144 72 m 144 288 l 360 288 l S /Cs2 SC 0 sc 0 i 1 0 0 -1 174.4991 8.0007019 cm /F1.1[ 12 0 0 -12 0 0]sf -46.69043 4.5 m (!"#$%&'\($\)*\)+%&,"*%)[ 3.333984 6.673828 2.666016 6.000000 6.673828 3.333984 6.673828 6.673828 6.000000 2.666016 3.333984 2.666016 6.000000 6.673828 3.333984 3.996094 6.673828 3.333984 6.673828 ] xS 0.00000001 1 1 -0.00000001 7.9990997 161.0007 cm -44.355469 4.5 m (*,-%&'\($\)*\)+%&,"*%)[ 3.333984 3.996094 6.673828 6.673828 3.333984 6.673828 6.673828 6.000000 2.666016 3.333984 2.666016 6.000000 6.673828 3.333984 3.996094 6.673828 3.333984 6.673828 ] xS 0.60000002 i /Cs1 SC 0 0 0 sc 1 0 0 -1 -87.0009 334.1257 cm 144 288 m 144 297 l S 252 288 m 252 297 l S 360 288 m 360 297 l S 135 288 m 144 288 l S 135 180 m 144 180 l S 135 72 m 144 72 l S /Cs2 SC 0 sc 0 i 1 0 0 -1 61.9991 25.125702 cm -8.3408203 4.5 m (./.)[ 6.673828 3.333984 6.673828 ] xS 1 0 0 -1 169.9991 25.125702 cm -8.3408203 4.5 m (./0)[ 6.673828 3.333984 6.673828 ] xS 1 0 0 -1 277.99908 25.125702 cm -8.3408203 4.5 m (1/.)[ 6.673828 3.333984 6.673828 ] xS 1 0 0 -1 34.9991 43.125702 cm -8.3408203 4.5 m (./.)[ 6.673828 3.333984 6.673828 ] xS 1 0 0 -1 34.9991 152.1257 cm -8.3408203 4.5 m (./0)[ 6.673828 3.333984 6.673828 ] xS 1 0 0 -1 34.9991 259.1257 cm -8.3408203 4.5 m (1/.)[ 6.673828 3.333984 6.673828 ] xS 0.60000002 i /Cs1 SC 0.80000001 0.80000001 0.80000001 sc CM 132.9991 222.1257 m 273.99908 262.1257 l 273.99908 46.125702 l 56.9991 46.125702 l 56.9991 46.125702 l h 132.9991 222.1257 m f 1 M 0 0 0 sc 1 0 0 -1 -87.0009 334.1257 cm 220 112 m 361 72 l 361 288 l 144 288 l 144 288 l h 220 112 m S CM 136.68108 224.80768 m 138.43845 223.05032 138.43845 220.20108 136.68108 218.44373 c 134.92372 216.68636 132.07448 216.68636 130.31712 218.44373 c 128.55975 220.20108 128.55975 223.05032 130.31712 224.80768 c 132.07448 226.56505 134.92372 226.56505 136.68108 224.80768 c f 10 M 1 0 0 -1 -87.0009 334.1257 cm 223.68198 109.31802 m 225.43935 111.07538 225.43935 113.92462 223.68198 115.68198 c 221.92462 117.43935 219.07538 117.43935 217.31802 115.68198 c 215.56065 113.92462 215.56065 111.07538 217.31802 109.31802 c 219.07538 107.56065 221.92462 107.56065 223.68198 109.31802 c S [ 4 4 ] 0 d 145 288 m 361 72 l 361 72 l S ep end %%Trailer %%EOF LanguageMachines-timbl-642727d/docs/timbl.1000066400000000000000000000155771451477526200204500ustar00rootroot00000000000000.TH timbl 1 "2017 November 9" .SH NAME timbl \- Tilburg Memory Based Learner .SH SYNOPSIS timbl [options] timbl \-f data\-file \-t test\(hyfile .SH DESCRIPTION TiMBL is an open source software package implementing several memory\(hybased learning algorithms, among which IB1\(hyIG, an implementation of k\(hynearest neighbor classification with feature weighting suitable for symbolic feature spaces, and IGTree, a decision\(hytree approximation of IB1\(hyIG. All implemented algorithms have in common that they store some representation of the training set explicitly in memory. During testing, new cases are classified by extrapolation from the most similar stored cases. .SH OPTIONS .B \-a or .B \-a .RS determines the classification algorithm. Possible values are: .B 0 or .B IB the IB1 (k\(hyNN) algorithm (default) .B 1 or .B IGTREE a decision\(hytree\(hybased approximation of IB1 .B 2 or .B TRIBL a hybrid of IB1 and IGTREE .B 3 or .B IB2 an incremental editing version of IB1 .B 4 or .B TRIBL2 a non\(hyparameteric version of TRIBL .RE .B \-b n .RS number of lines used for bootstrapping (IB2 only) .RE .B \-B n .RS number of bins used for discretization of numeric feature values (Default B=20) .RE .BR \-\-Beam = .RS limit +v db output to n highest\(hyvote classes .RE .BR \-\-clones = .RS number f threads to use for parallel testing .RE .B \-c n .RS clipping frequency for prestoring MVDM matrices .RE .B +D .RS store distributions on all nodes (necessary for using +v db with IGTree, but wastes memory otherwise) .RE .B \-\-Diversify .RS rescale weight (see docs) .RE .B \-d val .RS weigh neighbors as function of their distance: Z : equal weights to all (default) ID : Inverse Distance IL : Inverse Linear ED:a : Exponential Decay with factor a (no whitespace!) ED:a:b : Exponential Decay with factor a and b (no whitespace!) .RE .B \-e n .RS estimate time until n patterns tested .RE .B \-f file .RS read from data file 'file' OR use filenames from 'file' for cross validation test .RE .B \-F format .RS assume the specified input format (Compact, C4.5, ARFF, Columns, Binary, Sparse ) .RE .B \-G normalization .RS normalize distributions (+v db option only) Supported normalizations are: .B Probability or .B 0 normalize between 0 and 1 .BR addFactor : or .BR 1 : add f to all possible targets, then normalize between 0 and 1 (default f=1.0). .B logProbability or .B 2 Add 1 to the target Weight, take the 10Log and then normalize between 0 and 1 .RE .B +H or .B \-H .RS write hashed trees (default +H) .RE .B \-i file .RS read the InstanceBase from 'file' (skips phase 1 & 2 ) .RE .B \-I file .RS dump the InstanceBase in 'file' .RE .B \-k n .RS search 'n' nearest neighbors (default n = 1) .RE .B \-L n .RS set value frequency threshold to back off from MVDM to Overlap at level n .RE .B \-l n .RS fixed feature value length (Compact format only) .RE .B \-m string .RS use feature metrics as specified in 'string': The format is : GlobalMetric:MetricRange:MetricRange e.g.: mO:N3:I2,5\-7 C: cosine distance. (Global only. numeric features implied) D: dot product. (Global only. numeric features implied) DC: Dice coefficient O: weighted overlap (default) E: Euclidian distance L: Levenshtein distance M: modified value difference J: Jeffrey divergence S: Jensen\(hyShannon divergence N: numeric values I: Ignore named values .RE .BR \-\-matrixin =file .RS read ValueDifference Matrices from file 'file' .RE .BR \-\-matrixout =file .RS store ValueDifference Matrices in 'file' .RE .B \-n file .RS create a C4.5\-style names file 'file' .RE .B \-M n .RS size of MaxBests Array .RE .B \-N n .RS number of features (default 2500) .RE .B \-o s .RS use s as output filename .RE .BR \-\-occurrences = .RS The input file contains occurrence counts (at the last position) value can be one of: .B train , .B test or .B both .RE .B \-O path .RS save output using 'path' .RE .B \-p n .RS show progress every n lines (default p = 100,000) .RE .B \-P path .RS read data using 'path' .RE .B \-q n .RS set TRIBL threshold at level n .RE .B \-R n .RS solve ties at random with seed n .RE .B \-s .RS use the exemplar weights from the input file .RE .B \-s0 .RS ignore the exemplar weights from the input file .RE .B \-T n .RS use feature n as the class label. (default: the last feature) .RE .B \-t file .RS test using 'file' .RE .B \-t leave_one_out .RS test with the leave\(hyone\(hyout testing regimen (IB1 only). you may add \-\-sloppy to speed up leave\(hyone\(hyout testing (but see docs) .RE .B \-t cross_validate .RS perform cross\(hyvalidation test (IB1 only) .RE .B \-t @file .RS test using files and options described in 'file' Supported options: d e F k m o p q R t u v w x % \- .RE .B \-\-Treeorder =value n .RS ordering of the Tree: DO: none GRO: using GainRatio IGO: using InformationGain 1/V: using 1/# of Values G/V: using GainRatio/# of Valuess I/V: using InfoGain/# of Valuess X2O: using X\(hysquare X/V: using X\(hysquare/# of Values SVO: using Shared Variance S/V: using Shared Variance/# of Values GxE: using GainRatio * SplitInfo IxE: using InformationGain * SplitInfo 1/S: using 1/SplitInfo .RE .B \-u file .RS read value\(hyclass probabilities from 'file' .RE .B \-U file .RS save value\(hyclass probabilities in 'file' .RE .B \-V .RS Show VERSION .RE .B +v level or .B \-v level .RS set or unset verbosity level, where level is: s: work silently o: show all options set b: show node/branch count and branching factor f: show calculated feature weights (default) p: show value difference matrices e: show exact matches as: show advanced statistics (memory consuming) cm: show confusion matrix (implies +vas) cs: show per\(hyclass statistics (implies +vas) cf: add confidence to output file (needs \-G) di: add distance to output file db: add distribution of best matched to output file md: add matching depth to output file. k: add a summary for all k neigbors to output file (sets \-x) n: add nearest neigbors to output file (sets \-x) You may combine levels using '+' e.g. +v p+db or \-v o+di .RE .B \-w n .RS weighting 0 or nw: no weighting 1 or gr: weigh using gain ratio (default) 2 or ig: weigh using information gain 3 or x2: weigh using the chi\(hysquare statistic 4 or sv: weigh using the shared variance statistic 5 or sd: weigh using standard deviation. (all features must be numeric) .RE .B \-w file .RS read weights from 'file' .RE .B \-w file:n .RS read weight n from 'file' .RE .B \-W file .RS calculate and save all weights in 'file' .RE .B +% or .B \-% .RS do or don't save test result (%) to file .RE .B +x or .B \-x .RS do or don't use the exact match shortcut (IB1 and IB2 only, default is \-x) .RE .BR \-X " file" .RS dump the InstanceBase as XML in 'file' .RE .SH BUGS possibly .SH AUTHORS Ko van der Sloot Timbl@uvt.nl Antal van den Bosch Timbl@uvt.nl .SH SEE ALSO .BR timblserver (1) LanguageMachines-timbl-642727d/include/000077500000000000000000000000001451477526200177335ustar00rootroot00000000000000LanguageMachines-timbl-642727d/include/Makefile.am000066400000000000000000000000771451477526200217730ustar00rootroot00000000000000# $Id$ # $URL$ AUTOMAKE_OPTIONS = foreign SUBDIRS = timbl LanguageMachines-timbl-642727d/include/timbl/000077500000000000000000000000001451477526200210425ustar00rootroot00000000000000LanguageMachines-timbl-642727d/include/timbl/.gitignore000066400000000000000000000000031451477526200230230ustar00rootroot00000000000000*~ LanguageMachines-timbl-642727d/include/timbl/BestArray.h000066400000000000000000000047541451477526200231210ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_BESTARRAY_H #define TIMBL_BESTARRAY_H #include #include #include #include "unicode/unistr.h" #include "libxml/parser.h" #include "ticcutils/json.hpp" #include "timbl/Targets.h" namespace Timbl { class neighborSet; class BestRec { friend std::ostream& operator<< ( std::ostream&, const BestRec * ); public: BestRec(); BestRec( const BestRec& ) = delete; // forbid copies BestRec& operator=( const BestRec& ) = delete; // forbid copies ~BestRec(); size_t totalBests() const { return aggregateDist.totalSize(); }; double bestDistance; ClassDistribution aggregateDist; std::vector bestDistributions; std::vector bestInstances; private: }; class BestArray { friend std::ostream& operator<< ( std::ostream&, const BestArray& ); public: BestArray(): _storeInstances(false), _showDi(false), _showDb(false), size(0), maxBests(0) {}; ~BestArray(); void init( unsigned int, unsigned int, bool, bool, bool ); double addResult( double, const ClassDistribution *, const icu::UnicodeString& ); void initNeighborSet( neighborSet& ) const; void addToNeighborSet( neighborSet& , size_t ) const; xmlNode *toXML() const; nlohmann::json to_JSON() const; nlohmann::json record_to_json( const BestRec *, size_t ) const; private: bool _storeInstances; bool _showDi; bool _showDb; unsigned int size; unsigned int maxBests; std::vector bestArray; }; } #endif // TIMBL_BESTARRAY_H LanguageMachines-timbl-642727d/include/timbl/Choppers.h000066400000000000000000000127471451477526200230110ustar00rootroot00000000000000#ifndef TIMBL_CHOPPERS_H #define TIMBL_CHOPPERS_H /* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include // for ostream #include // for vector #include "unicode/unistr.h" #include "unicode/ustream.h" #include "timbl/Types.h" namespace Timbl{ static const icu::UnicodeString DefaultSparseString = "0.0000E-17"; class Chopper { public: Chopper(): vSize(0) {}; virtual ~Chopper() {}; virtual bool chop( const icu::UnicodeString&, size_t ) = 0; const icu::UnicodeString& operator[]( int i ) const { return choppedInput[i]; } const icu::UnicodeString& getField( size_t i ) const { return choppedInput[i]; }; virtual double getExW() const { return -1; }; virtual int getOcc() const { return 1; }; virtual icu::UnicodeString getString() const = 0; void print( std::ostream& os ){ os << getString(); }; void swapTarget( size_t target_pos ){ icu::UnicodeString tmp = choppedInput[target_pos]; for ( size_t i = target_pos+1; i < vSize; ++i ){ choppedInput[i-1] = choppedInput[i]; } choppedInput[vSize-1] = tmp; } static Chopper *create( InputFormatType , bool, int, bool ); static InputFormatType getInputFormat( const icu::UnicodeString&, bool=false ); static size_t countFeatures( const icu::UnicodeString&, InputFormatType, int, bool=false ); protected: virtual void init( const icu::UnicodeString&, size_t, bool ); size_t vSize; icu::UnicodeString strippedInput; std::vector choppedInput; }; class ExChopper: public virtual Chopper { public: ExChopper(): Chopper(), exW(-1.0) {}; double getExW() const override { return exW; }; protected: void init( const icu::UnicodeString&, size_t, bool ) override; double exW; }; class OccChopper: public virtual Chopper { public: OccChopper(): Chopper(), occ(-1) {}; int getOcc() const override { return occ; }; protected: void init( const icu::UnicodeString&, size_t, bool ) override; int occ; }; class C45_Chopper : public virtual Chopper { public: bool chop( const icu::UnicodeString&, size_t ) override; icu::UnicodeString getString() const override; }; class C45_ExChopper : public C45_Chopper, public ExChopper { }; class C45_OccChopper : public C45_Chopper, public OccChopper { }; class ARFF_Chopper : public C45_Chopper { public: bool chop( const icu::UnicodeString&, size_t ) override; }; class ARFF_ExChopper : public C45_ExChopper { }; class ARFF_OccChopper : public C45_OccChopper { }; class Bin_Chopper : public virtual Chopper { public: bool chop( const icu::UnicodeString&, size_t ) override; icu::UnicodeString getString() const override; }; class Bin_ExChopper : public Bin_Chopper, public ExChopper { }; class Bin_OccChopper : public Bin_Chopper, public OccChopper { }; class Compact_Chopper : public virtual Chopper { public: explicit Compact_Chopper( int L ): fLen(L){}; bool chop( const icu::UnicodeString&, size_t ) override; icu::UnicodeString getString() const override; private: int fLen; Compact_Chopper(); }; class Compact_ExChopper : public Compact_Chopper, public ExChopper { public: explicit Compact_ExChopper( int L ): Compact_Chopper( L ){}; private: Compact_ExChopper(); }; class Compact_OccChopper : public Compact_Chopper, public OccChopper { public: explicit Compact_OccChopper( int L ): Compact_Chopper( L ){}; private: Compact_OccChopper(); }; class Columns_Chopper : public virtual Chopper { public: bool chop( const icu::UnicodeString&, size_t ) override; icu::UnicodeString getString() const override; }; class Columns_ExChopper : public Columns_Chopper, public ExChopper { }; class Columns_OccChopper : public Columns_Chopper, public OccChopper { }; class Tabbed_Chopper : public virtual Chopper { public: bool chop( const icu::UnicodeString&, size_t ) override; icu::UnicodeString getString() const override; }; class Tabbed_ExChopper : public Tabbed_Chopper, public ExChopper { }; class Tabbed_OccChopper : public Tabbed_Chopper, public OccChopper { }; class Sparse_Chopper : public virtual Chopper { public: bool chop( const icu::UnicodeString&, size_t ) override; icu::UnicodeString getString() const override; }; class Sparse_ExChopper : public Sparse_Chopper, public ExChopper { }; class Sparse_OccChopper : public Sparse_Chopper, public OccChopper { }; } #endif // TIMBL_CHOPPERS_H LanguageMachines-timbl-642727d/include/timbl/Common.h000066400000000000000000000035621451477526200224510ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_COMMON_H #define TIMBL_COMMON_H #include #include #include #include #include // for isspace #include // for string namespace Common { const double Epsilon = std::numeric_limits::epsilon(); // smallest x so that 1+x != 1 const int DEFAULT_MAX_FEATS = 2500; // default maximun number of Features std::string Version(); std::string VersionName(); std::string BuildInfo(); std::string VersionInfo( bool ); // obsolete inline int look_ahead( std::istream &is ){ while( is ){ int nc=is.peek(); if ( !isspace(nc) ) return nc; is.get(); } return -1; } inline void skip_spaces( std::istream &is ){ while( is ){ int nc=is.peek(); if ( !isspace(nc) ) return; is.get(); } } inline double Log2(double number){ // LOG base 2. if ( fabs(number) < Epsilon) return(0.0); return log2(number); } } #endif LanguageMachines-timbl-642727d/include/timbl/Features.h000066400000000000000000000164431451477526200230010ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_FEATURES_H #define TIMBL_FEATURES_H #include #include #include #include "timbl/MsgClass.h" #include "timbl/Matrices.h" #include "ticcutils/Unicode.h" namespace Hash { class UnicodeHash; } namespace Timbl { class ValueClass; class TargetValue; class Targets; class metricClass; class SparseValueProbClass { friend std::ostream& operator<< ( std::ostream&, SparseValueProbClass * ); public: typedef std::map< size_t, double > IDmaptype; typedef IDmaptype::const_iterator IDiterator; explicit SparseValueProbClass( size_t d ): dimension(d) {}; void Assign( const size_t i, const double d ) { vc_map[i] = d; }; void Clear() { vc_map.clear(); }; IDiterator begin() const { return vc_map.begin(); }; IDiterator end() const { return vc_map.end(); }; private: IDmaptype vc_map; size_t dimension; }; enum FeatVal_Stat { Unknown, Singleton, SingletonNumeric, NumericValue, NotNumeric }; class FeatureValue: public ValueClass { friend class Feature; friend class Feature_List; friend struct D_D; public: explicit FeatureValue( const icu::UnicodeString& ); FeatureValue( const icu::UnicodeString&, size_t ); ~FeatureValue(); void ReconstructDistribution( const ClassDistribution& vd ) { TargetDist.Merge( vd ); _frequency = TargetDist.totalSize(); }; bool isUnknown() const { return _index == 0; }; SparseValueProbClass *valueClassProb() const { return ValueClassProb; }; private: SparseValueProbClass *ValueClassProb; ClassDistribution TargetDist; }; class Feature: public MsgClass { friend class MBLClass; friend class Feature_List; public: explicit Feature( Hash::UnicodeHash *T ); ~Feature(); bool Ignore() const { return ignore; }; void Ignore( const bool val ){ ignore = val; }; bool setMetricType( const MetricType ); MetricType getMetricType() const; double Weight() const { return weight; }; void SetWeight( const double w ) { weight = w; }; double InfoGain() const { return info_gain; }; void InfoGain( const double w ){ info_gain = w; }; double SplitInfo() const { return split_info; }; void SplitInfo( const double w ){ split_info = w; }; double GainRatio() const { return gain_ratio; }; void GainRatio( const double w ){ gain_ratio = w; }; double ChiSquare() const { return chi_square; }; void ChiSquare( const double w ){ chi_square = w; }; double SharedVariance() const { return shared_variance; }; void SharedVariance( const double w ){ shared_variance = w; }; double StandardDeviation() const { return standard_deviation; }; void StandardDeviation( const double w ){ standard_deviation = w; }; double Min() const { return n_min; }; void Min( const double val ){ n_min = val; }; double Max() const { return n_max; }; void Max( const double val ){ n_max = val; }; double fvDistance( const FeatureValue *, const FeatureValue *, size_t=1 ) const; FeatureValue *add_value( const icu::UnicodeString&, TargetValue *, int=1 ); FeatureValue *add_value( size_t, TargetValue *, int=1 ); FeatureValue *Lookup( const icu::UnicodeString& ) const; bool decrement_value( FeatureValue *, const TargetValue * ); bool increment_value( FeatureValue *, const TargetValue * ); size_t EffectiveValues() const; size_t TotalValues() const; bool isNumerical() const; bool isStorableMetric() const; bool AllocSparseArrays( size_t ); void InitSparseArrays(); bool ArrayRead(){ return vcpb_read; }; bool matrixPresent( bool& ) const; size_t matrix_byte_size() const; bool store_matrix( int = 1 ); void clear_matrix(); bool fill_matrix( std::istream& ); void print_matrix( std::ostream&, bool = false ) const; void print_vc_pb_array( std::ostream& ) const; bool read_vc_pb_array( std::istream & ); FeatVal_Stat prepare_numeric_stats(); void Statistics( double, const Targets&, bool ); void NumStatistics( double, const Targets&, int, bool ); void ClipFreq( size_t f ){ matrix_clip_freq = f; }; size_t ClipFreq() const { return matrix_clip_freq; }; SparseSymetricMatrix *metric_matrix; private: Feature( const Feature& ); Feature& operator=( const Feature& ); Hash::UnicodeHash *TokenTree; metricClass *metric; bool ignore; bool numeric; bool vcpb_read; enum ps_stat{ ps_undef, ps_failed, ps_ok, ps_read }; enum ps_stat PrestoreStatus; MetricType Prestored_metric; void delete_matrix(); double entropy; double info_gain; double split_info; double gain_ratio; double chi_square; double shared_variance; double standard_deviation; size_t matrix_clip_freq; std::vector n_dot_j; std::vector n_i_dot; double n_min; double n_max; double weight; void Statistics( double ); void NumStatistics( std::vector&, double ); void ChiSquareStatistics( std::vector&, const Targets& ); void ChiSquareStatistics( const Targets& ); void SharedVarianceStatistics( const Targets&, int ); void StandardDeviationStatistics(); std::vector values_array; std::unordered_map< size_t, FeatureValue *> reverse_values; bool is_reference; }; class Feature_List: public MsgClass { friend class MBLClass; public: Feature_List(): _eff_feats(0), _num_of_feats(0), _num_of_num_feats(0), _feature_hash(0), _is_reference(false) { } explicit Feature_List( Hash::UnicodeHash *hash ): Feature_List() { _feature_hash = hash; } Feature_List &operator=( const Feature_List& ); ~Feature_List(); void init( size_t, const std::vector& ); Hash::UnicodeHash *hash() const { return _feature_hash; }; size_t effective_feats(){ return _eff_feats; }; Feature *operator[]( size_t i ) const { return feats[i]; }; void write_permutation( std::ostream & ) const; void calculate_permutation( const std::vector& ); size_t _eff_feats; size_t _num_of_feats; size_t _num_of_num_feats; std::vector feats; std::vector perm_feats; std::vector permutation; private: Hash::UnicodeHash *_feature_hash; bool _is_reference; }; } // namespace Timbl #endif // TIMBL_FEATURES_H LanguageMachines-timbl-642727d/include/timbl/GetOptClass.h000066400000000000000000000060361451477526200234100ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_GETOPTCLASS_H #define TIMBL_GETOPTCLASS_H #include #include namespace TiCC { class CL_Options; } namespace Timbl { class TimblExperiment; class GetOptClass: public MsgClass { public: explicit GetOptClass( const TiCC::CL_Options& ); GetOptClass& operator=( const GetOptClass& ) = delete; // forbid copies virtual ~GetOptClass(); GetOptClass *Clone( std::ostream * = 0 ) const; bool parse_options( const TiCC::CL_Options&, const int=0 ); void set_default_options( const int=0 ); bool definitive_options( TimblExperiment * ); AlgorithmType Algo() const { return local_algo; }; int MaxFeatures() const { return MaxFeats; }; VerbosityFlags getVerbosity() { return myVerbosity; }; private: GetOptClass( const GetOptClass& ); AlgorithmType local_algo; MetricType local_metric; OrdeningType local_order; WeightType local_weight; InputFormatType LocalInputFormat; DecayType local_decay; double local_decay_alfa; double local_decay_beta; normType local_normalisation; double local_norm_factor; int MaxFeats; int target_pos; int no_neigh; int mvd_limit; int estimate; int maxbests; int clip_freq; int clones; int BinSize; int BeamSize; int bootstrap_lines; int f_length; int local_progress; int seed; int threshold; int igThreshold; VerbosityFlags myVerbosity; bool opt_init; bool opt_changed; bool do_exact; bool do_hashed; bool min_present; bool N_present; bool keep_distributions; bool do_sample_weights; bool do_ignore_samples; bool do_ignore_samples_test; bool do_query; bool do_all_weights; bool do_sloppy_loo; bool do_silly; bool do_diversify; std::vectormetricsArray; std::ostream *parent_socket_os; std::string inPath; std::string outPath; int occIn; void Error( const std::string& ) const override; inline bool parse_range( std::string&, std::string::iterator&, MetricType ); inline bool parse_metrics( const std::string&, MetricType& ); }; } #endif LanguageMachines-timbl-642727d/include/timbl/IBtree.h000066400000000000000000000237111451477526200223710ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_IBTREE_H #define TIMBL_IBTREE_H #include #include "ticcutils/XMLtools.h" #include "timbl/MsgClass.h" //#define IBSTATS namespace Hash { class UnicodeHash; } namespace Timbl { class IB_InstanceBase; class IG_InstanceBase; class TRIBL_InstanceBase; class TRIBL2_InstanceBase; class Feature; class FeatureValue; class Instance; class Feature_List; class Targets; class TargetValue; class ClassDistribution; class WClassDistribution; class IBtree { friend class InstanceBase_base; friend class IB_InstanceBase; friend class IG_InstanceBase; friend class TRIBL_InstanceBase; friend class TRIBL2_InstanceBase; friend std::ostream &operator<<( std::ostream&, const IBtree& ); friend std::ostream &operator<<( std::ostream&, const IBtree * ); friend xmlNode *to_xml( IBtree *pnt ); friend int count_next( const IBtree * ); public: const TargetValue* targetValue() const { return TValue; }; private: FeatureValue *FValue; const TargetValue *TValue; ClassDistribution *TDistribution; IBtree *link; IBtree *next; IBtree(); explicit IBtree( FeatureValue * ); IBtree( const IBtree& ) = delete; // forbid copies IBtree& operator=( const IBtree& ) = delete; // forbid copies ~IBtree(); IBtree *Reduce( const TargetValue *, unsigned long&, long ); #ifdef IBSTATS static inline IBtree *add_feat_val( FeatureValue *, unsigned int&, IBtree *&, unsigned long& ); #else static inline IBtree *add_feat_val( FeatureValue *, IBtree *&, unsigned long& ); #endif inline ClassDistribution *sum_distributions( bool ); inline IBtree *make_unique( const TargetValue *, unsigned long& ); void cleanDistributions(); void re_assign_defaults( bool, bool ); void assign_defaults( bool, bool, size_t ); void redo_distributions(); void countBranches( unsigned int, std::vector&, std::vector& ); const ClassDistribution *exact_match( const Instance& ) const; protected: const IBtree *search_node( const FeatureValue * ) const; }; typedef std::unordered_map FI_map; class InstanceBase_base: public MsgClass { friend class IG_InstanceBase; friend class TRIBL_InstanceBase; friend class TRIBL2_InstanceBase; InstanceBase_base( const InstanceBase_base& ) = delete; // forbid copies InstanceBase_base& operator=( const InstanceBase_base& ) = delete; // forbid copies friend std::ostream& operator<<( std::ostream &os, const InstanceBase_base& ); friend std::ostream& operator<<( std::ostream &os, const InstanceBase_base * ); public: InstanceBase_base( size_t, unsigned long&, bool, bool ); virtual ~InstanceBase_base( void ); void AssignDefaults( void ); void RedoDistributions(); bool AddInstance( const Instance& ); void RemoveInstance( const Instance& ); void summarizeNodes( std::vector&, std::vector& ); virtual bool MergeSub( InstanceBase_base * ); const ClassDistribution *ExactMatch( const Instance& I ) const { return InstBase->exact_match( I ); }; virtual const ClassDistribution *InitGraphTest( std::vector&, const std::vector *, const size_t, const size_t ); virtual const ClassDistribution *NextGraphTest( std::vector&, size_t& ); unsigned long int GetDistSize( ) const { return NumOfTails; }; virtual const ClassDistribution *IG_test( const Instance& , size_t&, bool&, const TargetValue *& ); virtual IB_InstanceBase *TRIBL_test( const Instance& , size_t, const TargetValue *&, const ClassDistribution *&, size_t& ); virtual IB_InstanceBase *TRIBL2_test( const Instance& , const ClassDistribution *&, size_t& ); bool read_hash( std::istream&, Hash::UnicodeHash&, Hash::UnicodeHash& ) const; virtual InstanceBase_base *Copy() const = 0; virtual InstanceBase_base *clone() const = 0; void Save( std::ostream&, bool=false ); void Save( std::ostream&, const Hash::UnicodeHash&, const Hash::UnicodeHash&, bool=false ); void toXML( std::ostream& ); void printStatsTree( std::ostream&, unsigned int startLevel ); virtual bool ReadIB( std::istream&, Feature_List&, Targets&, int ); virtual bool ReadIB_hashed( std::istream&, Feature_List&, Targets&, int ); virtual void Prune( const TargetValue *, long = 0 ); virtual bool IsPruned() const { return false; }; void CleanPartition( bool ); unsigned long int GetSizeInfo( unsigned long int&, double & ) const; const ClassDistribution *TopDist() const { return TopDistribution; }; bool HasDistributions() const; const TargetValue *TopTarget( bool & ); bool PersistentD() const { return PersistentDistributions; }; unsigned long int nodeCount() const { return ibCount;} ; size_t depth() const { return Depth;} ; const IBtree *instBase() const { return InstBase; }; #ifdef IBSTATS std::vector mismatch; #endif protected: bool DefAss; bool DefaultsValid; bool Random; bool PersistentDistributions; int Version; ClassDistribution *TopDistribution; WClassDistribution *WTop; const TargetValue *TopT; FI_map fast_index; bool tiedTop; IBtree *InstBase; IBtree *LastInstBasePos; std::vector RestartSearch; std::vector SkipSearch; std::vector InstPath; unsigned long int& ibCount; size_t Depth; unsigned long int NumOfTails; IBtree *read_list( std::istream&, Feature_List&, Targets&, int ); IBtree *read_local( std::istream&, Feature_List&, Targets&, int ); IBtree *read_list_hashed( std::istream&, Feature_List&, Targets&, int ); IBtree *read_local_hashed( std::istream&, Feature_List&, Targets&, int ); void write_tree( std::ostream &os, const IBtree * ) const; void write_tree_hashed( std::ostream &os, const IBtree * ) const; bool read_IB( std::istream&, Feature_List& , Targets&, int ); bool read_IB_hashed( std::istream&, Feature_List& , Targets&, int ); void fill_index(); const IBtree *fast_search_node( const FeatureValue * ); }; class IB_InstanceBase: public InstanceBase_base { public: IB_InstanceBase( size_t size, unsigned long& cnt, bool rand ): InstanceBase_base( size, cnt, rand , false ), offSet(0), effFeat(0), testInst(0) {}; IB_InstanceBase *Copy() const override; IB_InstanceBase *clone() const override; const ClassDistribution *InitGraphTest( std::vector&, const std::vector *, const size_t, const size_t ) override; const ClassDistribution *NextGraphTest( std::vector&, size_t& ) override; private: size_t offSet; size_t effFeat; const std::vector *testInst; }; class IG_InstanceBase: public InstanceBase_base { public: IG_InstanceBase( size_t size, unsigned long& cnt, bool rand, bool pruned, bool keep_dists ): InstanceBase_base( size, cnt, rand, keep_dists ), Pruned( pruned ) {}; IG_InstanceBase *clone() const override; IG_InstanceBase *Copy() const override; void Prune( const TargetValue *, long = 0 ) override; void specialPrune( const TargetValue * ); bool IsPruned() const override { return Pruned; }; const ClassDistribution *IG_test( const Instance& , size_t&, bool&, const TargetValue *& ) override; bool ReadIB( std::istream&, Feature_List&, Targets&, int ) override; bool ReadIB_hashed( std::istream&, Feature_List&, Targets&, int ) override; bool MergeSub( InstanceBase_base * ) override; protected: bool Pruned; }; class TRIBL_InstanceBase: public InstanceBase_base { public: TRIBL_InstanceBase( size_t size, unsigned long& cnt, bool rand, bool keep_dists ): InstanceBase_base( size, cnt, rand, keep_dists ), Threshold(0) {}; TRIBL_InstanceBase *clone() const override; TRIBL_InstanceBase *Copy() const override; IB_InstanceBase *TRIBL_test( const Instance&, size_t, const TargetValue *&, const ClassDistribution *&, size_t& ) override; private: IB_InstanceBase *IBPartition( IBtree * ) const; void AssignDefaults( size_t ); size_t Threshold; }; class TRIBL2_InstanceBase: public InstanceBase_base { public: TRIBL2_InstanceBase( size_t size, unsigned long& cnt, bool rand, bool keep_dists ): InstanceBase_base( size, cnt, rand, keep_dists ) { }; TRIBL2_InstanceBase *clone() const override; TRIBL2_InstanceBase *Copy() const override; IB_InstanceBase *TRIBL2_test( const Instance& , const ClassDistribution *&, size_t& ) override; private: IB_InstanceBase *IBPartition( IBtree * ) const; }; } #endif LanguageMachines-timbl-642727d/include/timbl/Instance.h000066400000000000000000000037411451477526200227640ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_INSTANCE_H #define TIMBL_INSTANCE_H #include "ticcutils/Unicode.h" #include "timbl/Targets.h" #include "timbl/Features.h" namespace Hash { class UnicodeHash; } namespace Timbl { class TargetValue; class FeatureValue; class Instance { friend std::ostream& operator<<(std::ostream&, const Instance& ); friend std::ostream& operator<<(std::ostream&, const Instance * ); public: Instance(); explicit Instance( size_t s ): Instance() { Init( s ); }; Instance( const Instance& ) = delete; // inhibit copies Instance& operator=( const Instance& ) = delete; // inhibit copies ~Instance(); void Init( size_t ); void clear(); double ExemplarWeight() const { return sample_weight; }; void ExemplarWeight( const double sw ){ sample_weight = sw; }; int Occurrences() const { return occ; }; void Occurrences( const int o ) { occ = o; }; size_t size() const { return FV.size(); }; std::vector FV; TargetValue *TV; private: double sample_weight; // relative weight int occ; }; } #endif LanguageMachines-timbl-642727d/include/timbl/MBLClass.h000066400000000000000000000223421451477526200226160ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_MBLCLASS_H #define TIMBL_MBLCLASS_H #include "timbl/Instance.h" #include "timbl/BestArray.h" #include "timbl/neighborSet.h" #include "timbl/Options.h" typedef struct _xmlNode xmlNode; namespace Hash { class UnicodeHash; } namespace Timbl { using namespace Common; class InstanceBase_base; class TesterClass; class Chopper; class neighborSet; class MBLClass: public MsgClass { public: bool SetOption( const std::string& ); xmlNode *settingsToXml() const; nlohmann::json settings_to_JSON() const; bool ShowWeights( std::ostream& ) const; bool Verbosity( VerbosityFlags v ) const { return verbosity & v; }; void SetVerbosityFlag( VerbosityFlags v ) { verbosity |= v; }; void ResetVerbosityFlag( VerbosityFlags v ) { verbosity &= ~v; }; bool MBLInit() const { return MBL_init; }; void MBLInit( bool b ) { MBL_init = b; }; bool ExpInvalid( bool b = true ) const { if ( err_cnt > 0 ){ if ( b ){ InvalidMessage(); } return true; } else return false; }; WeightType CurrentWeighting() const { return Weighting; }; InputFormatType InputFormat() const { return input_format; }; bool connectToSocket( std::ostream *, bool = false ); std::ostream *sock_os; bool sock_is_json; mutable nlohmann::json last_error; int getOcc() const { return doOcc; }; protected: explicit MBLClass( const std::string& = "" ); void init_options_table( size_t ); MBLClass& operator=( const MBLClass& ); enum PhaseValue { TrainWords, LearnWords, TestWords, TrainLearnWords }; friend std::ostream& operator<< ( std::ostream&, const PhaseValue& ); enum IB_Stat { Invalid, Normal, Pruned }; bool writeArrays( std::ostream& ); bool readArrays( std::istream& ); bool writeMatrices( std::ostream& ) const; bool readMatrices( std::istream& ); bool writeWeights( std::ostream& ) const; bool readWeights( std::istream&, WeightType ); bool writeNamesFile( std::ostream& ) const; bool ShowOptions( std::ostream& ) const; bool ShowSettings( std::ostream& ) const; void writePermutation( std::ostream& ) const; void LearningInfo( std::ostream& ); virtual ~MBLClass(); void Initialize( size_t ); bool PutInstanceBase( std::ostream& ) const; VerbosityFlags get_verbosity() const { return verbosity; }; void set_verbosity( VerbosityFlags v ) { verbosity = v; }; const Instance *chopped_to_instance( PhaseValue ); bool Chop( const icu::UnicodeString& ); bool HideInstance( const Instance& ); bool UnHideInstance( const Instance& ); icu::UnicodeString formatInstance( const std::vector&, const std::vector&, size_t, size_t ) const; bool setInputFormat( const InputFormatType ); size_t countFeatures( const icu::UnicodeString&, const InputFormatType ) const; InputFormatType getInputFormat( const icu::UnicodeString& ) const; size_t examineData( const std::string& ); void time_stamp( const char *, int =-1 ) const; void TestInstance( const Instance& , InstanceBase_base * = NULL, size_t = 0 ); icu::UnicodeString get_org_input( ) const; const ClassDistribution *ExactMatch( const Instance& ) const; void fillNeighborSet( neighborSet& ) const; void addToNeighborSet( neighborSet& ns, size_t n ) const; double getBestDistance() const; WClassDistribution *getBestDistribution( unsigned int =0 ); IB_Stat IBStatus() const; bool get_ranges( const std::string& ); size_t get_IB_Info( std::istream&, bool&, int&, bool&, std::string& ); size_t NumOfFeatures() const { return features._num_of_feats; }; size_t targetPos() const { return target_pos; }; size_t NumNumFeatures() const { return features._num_of_num_feats; }; size_t EffectiveFeatures() const { return features._eff_feats; }; void IBInfo( std::ostream& os ) const; void MatrixInfo( std::ostream& ) const; int RandomSeed() const { return random_seed; }; void Info( const std::string& ) const override; void Warning( const std::string& ) const override; void Error( const std::string& ) const override; void FatalError( const std::string& ) const override; size_t MaxFeats() const { return MaxFeatures; }; int Progress() const { return progress; }; void Progress( int p ){ progress = p; }; std::string extract_limited_m( size_t ); Targets targets; Feature_List features; InstanceBase_base *InstanceBase; std::ostream *mylog; std::ostream *myerr; size_t TRIBL_offset() const { return tribl_offset; }; unsigned int igOffset() const { return igThreshold; }; unsigned int IB2_offset() const { return ib2_offset; }; void IB2_offset( unsigned int n ) { ib2_offset = n; }; bool Do_Sloppy_LOO() const { return do_sloppy_loo; }; bool doSamples() const { return do_sample_weighting && !do_ignore_samples; }; bool Do_Exact() const { return do_exact_match; }; void Do_Exact( bool b ) { do_exact_match = b; }; void InitWeights(); void diverseWeights(); bool KeepDistributions() const { return keep_distributions; }; void KeepDistributions( bool f ){ keep_distributions = f; }; bool IsClone() const { return is_copy; }; void default_order(); void set_order(void); void calculatePermutation( const std::vector& ); void calculate_fv_entropy( bool ); bool recalculate_stats( Feature_List&, std::vector&, bool ); OptionTableClass Options; PhaseValue runningPhase; WeightType Weighting; metricClass *GlobalMetric; OrdeningType TreeOrder; size_t num_of_neighbors; bool dynamic_neighbors; DecayType decay_flag; std::string exp_name; Instance CurrInst; BestArray bestArray; size_t MaxBests; neighborSet nSet; decayStruct *decay; int beamSize; normType normalisation; double norm_factor; bool is_copy; bool is_synced; unsigned int ib2_offset; int random_seed; double decay_alfa; double decay_beta; bool MBL_init; bool tableFilled; MetricType globalMetricOption; bool do_diversify; bool initProbabilityArrays( bool ); void calculatePrestored(); void initDecay(); void initTesters(); Chopper *ChopInput; int F_length; private: size_t MaxFeatures; std::vector UserOptions; InputFormatType input_format; VerbosityFlags verbosity; size_t target_pos; int clip_factor; int Bin_Size; int progress; size_t tribl_offset; unsigned igThreshold; int mvd_threshold; bool do_sloppy_loo; bool do_exact_match; bool do_silly_testing; bool hashed_trees; bool need_all_weights; bool do_sample_weighting; bool do_ignore_samples; bool no_samples_test; bool keep_distributions; double DBEntropy; TesterClass *tester; int doOcc; bool chopExamples() const { return do_sample_weighting && !( runningPhase == TestWords && no_samples_test ); } bool chopOcc() const { switch( runningPhase ) { case TrainWords: case LearnWords: case TrainLearnWords: return doOcc == 1 || doOcc == 3; case TestWords: return doOcc > 1; default: return false; } }; void InvalidMessage() const ; void do_numeric_statistics( ); void test_instance( const Instance& , InstanceBase_base * = NULL, size_t = 0 ); void test_instance_sim( const Instance& , InstanceBase_base * = NULL, size_t = 0 ); void test_instance_ex( const Instance&, InstanceBase_base * = NULL, size_t = 0 ); bool allocate_arrays(); double RelativeWeight( unsigned int ) const; void writePermSpecial(std::ostream&) const; bool read_the_vals( std::istream& ); MBLClass( const MBLClass& ); }; inline std::ostream& operator<< ( std::ostream& os, const MBLClass::PhaseValue& ph ){ switch( ph ){ case MBLClass::TrainWords: os << "TrainWords"; break; case MBLClass::LearnWords: os << "LearnWords"; break; case MBLClass::TestWords: os << "TestWords"; break; case MBLClass::TrainLearnWords: os << "TrainlearnWords"; break; default: os << "unknown phase"; } return os; } bool empty_line( const icu::UnicodeString& , const InputFormatType ); } #endif // TIMBL_MBLCLASS_H LanguageMachines-timbl-642727d/include/timbl/Makefile.am000066400000000000000000000004301451477526200230730ustar00rootroot00000000000000# $Id$ # $URL$ pkginclude_HEADERS = Common.h GetOptClass.h IBtree.h Matrices.h \ Features.h Targets.h Instance.h \ MBLClass.h MsgClass.h BestArray.h \ StringOps.h TimblAPI.h Options.h \ TimblExperiment.h Types.h neighborSet.h Statistics.h \ Choppers.h Testers.h Metrics.h LanguageMachines-timbl-642727d/include/timbl/Matrices.h000066400000000000000000000067301451477526200227700ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_MATRICES_H #define TIMBL_MATRICES_H template class SparseSymetricMatrix; template std::ostream& operator << (std::ostream&, const SparseSymetricMatrix& ); template class SparseSymetricMatrix { typedef std::map< Class, double > CDmap; typedef std::map< Class, CDmap > CCDmap; friend std::ostream& operator << <> ( std::ostream&, const SparseSymetricMatrix& ); public: void Clear() { my_mat.clear(); }; void Assign( Class i, Class j, double d ){ if ( i == j ) return; if ( i second.find(i); if ( it2 != it1->second.end() ){ return it2->second; } } } else { typename CCDmap::const_iterator it1 = my_mat.find(i); if ( it1 != my_mat.end() ){ typename CDmap::const_iterator it2 = it1->second.find(j); if ( it2 != it1->second.end() ){ return it2->second; } } } return 0.0; }; unsigned int NumBytes(void) const{ unsigned int tot = sizeof(std::map); typename CCDmap::const_iterator it1 = my_mat.begin(); while ( it1 != my_mat.end() ){ tot += sizeof(CDmap); typename CDmap::const_iterator it2 = it1->second.begin(); while ( it2 != it1->second.end() ){ tot += sizeof(double); ++it2; } ++it1; } return tot; }; SparseSymetricMatrix *copy(void) const{ SparseSymetricMatrix *res = new SparseSymetricMatrix(); typename CCDmap::const_iterator it1 = my_mat.begin(); while ( it1 != my_mat.end() ){ typename CDmap::const_iterator it2 = it1->second.begin(); while ( it2 != it1->second.end() ){ res->my_mat[it1->first][it2->first] = it2->second; ++it2; } ++it1; } return res; } private: CCDmap my_mat; }; template inline std::ostream& operator << (std::ostream& os, const SparseSymetricMatrix& m ){ typename SparseSymetricMatrix::CCDmap::const_iterator it1 = m.my_mat.begin(); while ( it1 != m.my_mat.end() ){ typename SparseSymetricMatrix::CDmap::const_iterator it2 = it1->second.begin(); while ( it2 != it1->second.end() ){ os << "[" << it1->first << ",\t" << it2->first << "] " << it2->second << std::endl; ++it2; } ++it1; } return os; } #endif // TIMBL_MATRICES_H LanguageMachines-timbl-642727d/include/timbl/Metrics.h000066400000000000000000000134161451477526200226260ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_METRICS_H #define TIMBL_METRICS_H #include #include namespace Timbl{ class FeatureValue; class metricClass { public: explicit metricClass( MetricType m ): _type(m){}; virtual ~metricClass() {}; MetricType type() const { return _type; }; virtual bool isSimilarityMetric() const = 0; virtual bool isNumerical() const = 0; virtual bool isStorable() const = 0; virtual double distance( const FeatureValue *, const FeatureValue *, size_t=1, double = 1.0 ) const = 0; virtual double get_max_similarity() const { throw std::logic_error( "get_max_similarity not implemented for " + TiCC::toString( _type ) ); } private: MetricType _type; }; metricClass *getMetricClass( MetricType ); class distanceMetricClass: public metricClass { public: explicit distanceMetricClass( MetricType m ): metricClass(m){}; virtual ~distanceMetricClass() {}; bool isSimilarityMetric() const override { return false; }; }; class OverlapMetric: public distanceMetricClass { public: OverlapMetric(): distanceMetricClass( Overlap ){}; bool isNumerical() const override { return false; }; bool isStorable() const override { return false; }; double distance( const FeatureValue *, const FeatureValue *, size_t, double ) const override; }; class NumericMetricClass: public distanceMetricClass { public: explicit NumericMetricClass( MetricType m ): distanceMetricClass( m ){}; virtual ~NumericMetricClass() {}; bool isNumerical() const override { return true; }; bool isStorable() const override { return false; }; }; class NumericMetric: public NumericMetricClass { public: NumericMetric(): NumericMetricClass( Numeric ){}; double distance( const FeatureValue *, const FeatureValue *, size_t, double ) const override; }; class EuclideanMetric: public NumericMetricClass { public: EuclideanMetric(): NumericMetricClass( Euclidean ){}; double distance( const FeatureValue *, const FeatureValue *, size_t, double ) const override; }; class ValueDiffMetric: public distanceMetricClass { public: ValueDiffMetric(): distanceMetricClass( ValueDiff ){}; bool isNumerical() const override { return false; }; bool isStorable() const override { return true; }; double distance( const FeatureValue *, const FeatureValue *, size_t, double ) const override; }; class DiceMetric: public distanceMetricClass { public: DiceMetric(): distanceMetricClass( Dice ){}; bool isNumerical() const override { return false; }; bool isStorable() const override { return true; }; double distance( const FeatureValue *, const FeatureValue *, size_t, double ) const override; }; class JeffreyMetric: public distanceMetricClass { public: JeffreyMetric(): distanceMetricClass( JeffreyDiv ){}; bool isNumerical() const override{ return false; }; bool isStorable() const override { return true; }; double distance( const FeatureValue *, const FeatureValue *, size_t, double ) const override; }; class JSMetric: public distanceMetricClass { public: JSMetric(): distanceMetricClass( JSDiv ){}; bool isNumerical() const override { return false; }; bool isStorable() const override { return true; }; double distance( const FeatureValue *, const FeatureValue *, size_t, double ) const override; }; class LevenshteinMetric: public distanceMetricClass { public: LevenshteinMetric(): distanceMetricClass( Levenshtein ){}; bool isNumerical() const override { return false; }; bool isStorable() const override { return true; }; double distance( const FeatureValue *, const FeatureValue *, size_t, double ) const override; }; class similarityMetricClass: public metricClass { public: explicit similarityMetricClass( MetricType m ): metricClass( m ){}; virtual ~similarityMetricClass() {}; bool isSimilarityMetric() const override { return true; }; bool isNumerical() const override { return true; }; bool isStorable() const override { return false; }; }; class CosineMetric: public similarityMetricClass { public: CosineMetric(): similarityMetricClass( Cosine ){}; double distance( const FeatureValue *, const FeatureValue *, size_t, double ) const override; double get_max_similarity() const override { return 1.0; }; }; class DotProductMetric: public similarityMetricClass { public: DotProductMetric(): similarityMetricClass( DotProduct ){}; double distance( const FeatureValue *, const FeatureValue *, size_t, double ) const override; double get_max_similarity() const override { return std::numeric_limits::max(); }; }; } #endif // TIMBL_METRICS_H LanguageMachines-timbl-642727d/include/timbl/MsgClass.h000066400000000000000000000025061451477526200227320ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_MSGCLASS_H #define TIMBL_MSGCLASS_H namespace Timbl { class MsgClass { public: MsgClass(): err_cnt(0) {}; virtual ~MsgClass() {}; virtual void Info( const std::string& ) const; virtual void Warning( const std::string& ) const ; virtual void Error( const std::string& ) const ; virtual void FatalError( const std::string& ) const ; mutable int err_cnt; }; } #endif LanguageMachines-timbl-642727d/include/timbl/Options.h000066400000000000000000000351651451477526200226600ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_OPTIONS_H #define TIMBL_OPTIONS_H #include #include #include #include #include "ticcutils/StringOps.h" namespace Timbl { class OptionClass { friend class OptionTableClass; public: explicit OptionClass( const std::string& n ): Name( n ) {}; virtual ~OptionClass() {}; virtual bool set_option( const std::string& ) = 0; virtual std::ostream& show_opt( std::ostream & ) const = 0; virtual std::ostream& show_full( std::ostream & ) const = 0; protected: const std::string Name; private: OptionClass(const OptionClass&); OptionClass& operator = (const OptionClass&); }; template class OptionClassT: public OptionClass { public: OptionClassT( const std::string& n, Type *tp, Type t ):OptionClass(n), Content(tp) { *Content = t; }; virtual bool set_option( const std::string& line ) override { Type T; bool result = TiCC::stringTo( line, T ); if ( result ) { *Content = T; } return result; }; virtual std::ostream& show_opt( std::ostream &os ) const override { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : " << TiCC::toString(*Content); return os; }; virtual std::ostream& show_full( std::ostream &os ) const override { return show_opt( os ); }; private: Type *Content; OptionClassT(const OptionClassT&); OptionClassT& operator = (const OptionClassT&); }; typedef OptionClassT BoolOption; template <> inline std::ostream& OptionClassT::show_opt( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os.setf( std::ios::boolalpha ); os << Name << " : " << *Content; return os; } template <> inline std::ostream& OptionClassT::show_full( std::ostream &os ) const{ os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os.setf( std::ios::boolalpha ); os << Name << " : false or true [" << *Content << "]"; return os; } typedef OptionClassT VerbosityOption; template <> inline std::ostream& OptionClassT::show_full( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : " << TiCC::toString(*Content,true); return os; } typedef OptionClassT InputFormatOption; template <> inline std::ostream& InputFormatOption::show_full( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : {"; InputFormatType i = UnknownInputFormat; for ( ++i; i < MaxInputFormat-1; ++i ){ os << TiCC::toString(i) << ", "; } os << TiCC::toString(i) << "}, [ " << TiCC::toString(*Content) << "]"; return os; } typedef OptionClassT MetricOption; template <> inline std::ostream& OptionClassT::show_full( std::ostream &os )const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : {"; MetricType i = UnknownMetric; for ( ++i; i < MaxMetric-1; ++i ){ os << TiCC::toString(i) << ", "; } os << TiCC::toString(i) << "}, [ " << TiCC::toString(*Content) << "]"; return os; } typedef OptionClassT AlgorithmOption; template <> inline std::ostream& OptionClassT::show_full( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : {"; AlgorithmType i = Unknown_a; for ( ++i; i < Max_a-1; ++i ){ os << TiCC::toString(i) << ", "; } os << TiCC::toString(i) << "}, [ " << TiCC::toString(*Content) << "]"; return os; } typedef OptionClassT DecayOption; template <> inline std::ostream& DecayOption::show_full( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : {"; DecayType i = UnknownDecay; for ( ++i; i < MaxDecay-1; ++i ){ os << TiCC::toString(i) << ", "; } os << TiCC::toString(i) << "}, [ " << TiCC::toString(*Content) << "]"; return os; } typedef OptionClassT SmoothOption; template <> inline std::ostream& SmoothOption::show_full( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : {"; SmoothingType i = UnknownSmoothing; for ( ++i; i < MaxSmoothing-1; ++i ){ os << TiCC::toString(i) << ", "; } os << TiCC::toString(i) << "}, [ " << TiCC::toString(*Content) << "]"; return os; } typedef OptionClassT WeightOption; template <> inline std::ostream& OptionClassT::show_full( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : {"; WeightType i = Unknown_w; for ( ++i; i < Max_w-1; ++i ){ os << TiCC::toString(i) << ", "; } os << TiCC::toString(i) << "}, [ " << TiCC::toString(*Content) << "]"; return os; } typedef OptionClassT OrdeningOption; template <> inline std::ostream& OptionClassT::show_full( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : {"; OrdeningType i = UnknownOrdening; for ( ++i; i < MaxOrdening-1; ++i ){ os << TiCC::toString(i) << ", "; } os << TiCC::toString(i) << "}, [ " << TiCC::toString(*Content) << "]"; return os; } typedef OptionClassT NormalisationOption; template <> inline std::ostream& NormalisationOption::show_full( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : {"; normType i = unknownNorm; for ( ++i; i < maxNorm-1; ++i ){ os << TiCC::toString(i) << ", "; } os << TiCC::toString(i) << "}, [ " << TiCC::toString(*Content) << "]"; return os; } // // Array of options types // template class OptionArrayClass: public OptionClass { public: OptionArrayClass( const std::string& n, std::vector& ta, const size_t size ): OptionClass( n ), TA(ta), Size(size ){}; protected: std::vector& TA; size_t Size; private: OptionArrayClass(const OptionArrayClass&); OptionArrayClass& operator = (const OptionArrayClass&); }; class MetricArrayOption: public OptionArrayClass { public: MetricArrayOption( const std::string& n, std::vector& mp, MetricType& m, size_t s ): OptionArrayClass( n, mp, s ), def(m){ TA.resize(s,m); }; bool set_option( const std::string& line ) override; std::ostream& show_opt( std::ostream &os ) const override; std::ostream& show_full( std::ostream &os ) const override; private: const MetricType& def; }; inline bool MetricArrayOption::set_option( const std::string& line ){ MetricType m = UnknownMetric; size_t i=0; std::vector res; bool result = TiCC::split_at( line, res, "=" ) == 2 && TiCC::stringTo( res[1], m ) && TiCC::stringTo( res[0], i, 0, Size ); if ( result ){ TA[i] = m; } return result; } inline std::ostream& MetricArrayOption::show_opt( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : "; for ( size_t i=0; i < Size; i++ ){ if ( TA[i] != def ){ os << i << ":" << TiCC::toString(TA[i]) << ", "; } } return os; } inline std::ostream& MetricArrayOption::show_full( std::ostream &os ) const { os.width(20); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : comma separated metricvalues, ["; bool first = true; for ( size_t i=0; i < Size; i++ ){ if ( TA[i] != def ){ if ( !first ){ os << ","; } else { first = false; } os << i << ":" << TiCC::toString(TA[i]); } } os << "]"; return os; } // // Limited Type, with min and maxVal // template class OptionClassLT: public OptionClass { public: OptionClassLT( const std::string& n, Type *tp, Type t, Type Min, Type Max ):OptionClass(n), Content( tp), minVal( Min ), maxVal( Max ) { *Content = t; }; virtual bool set_option( const std::string& line ) override { Type T; bool result = TiCC::stringTo( line, T, minVal, maxVal ); if ( result ) { *Content = T; } return result; }; virtual std::ostream& show_opt( std::ostream &os ) const override { os.width(20); os.setf( std::ios::showpoint ); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : " << *Content; return os; }; virtual std::ostream& show_full( std::ostream &os ) const override { os.width(20); os.setf( std::ios::showpoint ); os.setf( std::ios::left, std::ios::adjustfield ); os << Name << " : { " << minVal << " - " << maxVal << "}, [" << *Content << "]"; return os; }; private: Type *Content; Type minVal; Type maxVal; OptionClassLT(const OptionClassLT&); OptionClassLT& operator = (const OptionClassLT&); }; typedef OptionClassLT IntegerOption; typedef OptionClassLT UnsignedOption; typedef OptionClassLT SizeOption; typedef OptionClassLT RealOption; enum SetOptRes { Opt_OK, Opt_Frozen, Opt_Unknown, Opt_Ill_Val}; struct ci_less { // case-independent (ci) compare_less binary function struct nocase_compare { bool operator() (const unsigned char& c1, const unsigned char& c2) const { return tolower (c1) < tolower (c2); } }; bool operator() (const std::string & s1, const std::string & s2) const { return std::lexicographical_compare (s1.begin(), s1.end(), // source range s2.begin(), s2.end(), // dest range nocase_compare()); // comparison } }; class OptionTableClass { public: OptionTableClass(): table_frozen(false){}; OptionTableClass( const OptionTableClass& ) = delete; // forbid copies OptionTableClass& operator=( const OptionTableClass& ) = delete; // forbid copies ~OptionTableClass(){ for ( const auto& it : global_table ){ delete it.second; } for ( const auto& it : runtime_table ){ delete it.second; } }; bool Add( OptionClass *opt ){ // std::cerr << "Table add: " << opt->Name << std::endl; runtime_table[opt->Name] = opt; return true; }; void FreezeTable(void); bool TableFrozen(void){ return table_frozen; }; SetOptRes SetOption( const std::string& ); void Show_Settings( std::ostream& ) const; void Show_Options( std::ostream& ) const; private: bool table_frozen; std::map runtime_table; std::map global_table; inline OptionClass *look_up( const std::string&, bool & ); }; inline void OptionTableClass::FreezeTable(void){ global_table = runtime_table; runtime_table.clear(); table_frozen = true; } inline void OptionTableClass::Show_Settings( std::ostream& os ) const{ for ( const auto& it: global_table ){ it.second->show_opt( os ) << std::endl; } for ( const auto& it: runtime_table ){ it.second->show_opt( os ) << std::endl; } } inline void OptionTableClass::Show_Options( std::ostream& os ) const { for ( const auto& it: global_table ){ it.second->show_full( os ) << std::endl; } for ( const auto& it: runtime_table ){ it.second->show_full( os ) << std::endl; } } inline void split_line( const std::string& line, std::string& name, std::string& value ){ std::vector results; size_t i = TiCC::split_at( line, results, ":" ); switch (i){ case 2: name = TiCC::trim(results[0]); // fallthrough case 1: value = TiCC::trim(results[1]); default: break; } } inline OptionClass *OptionTableClass::look_up( const std::string& option_name, bool &runtime ){ // std::cerr << "lookup: " << option_name << std::endl; const auto itr = runtime_table.find( option_name ); if ( itr != runtime_table.end() ){ runtime = true; // std::cerr << "FOUND: runtime= " << option_name << std::endl; return itr->second; } else { const auto itg = global_table.find( option_name ); if ( itg != global_table.end() ){ runtime = table_frozen; // std::cerr << "FOUND global= " << option_name << std::endl; return itg->second; } } return NULL; } inline SetOptRes OptionTableClass::SetOption( const std::string& line ){ SetOptRes result = Opt_OK; bool runtime = false; std::string option_name; std::string value; split_line( line, option_name, value ); OptionClass *option = look_up( option_name, runtime ); if ( option ){ if ( !runtime ){ result = Opt_Frozen; // may not be changed at this stage } else if ( !option->set_option( value ) ){ result = Opt_Ill_Val; // illegal value } } else { result = Opt_Unknown; // What the hell ??? } return result; } } #endif LanguageMachines-timbl-642727d/include/timbl/Statistics.h000066400000000000000000000050311451477526200233440ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_STATISTICS_H #define TIMBL_STATISTICS_H #include "timbl/MsgClass.h" namespace Timbl { class Targets; class TargetValue; class ConfusionMatrix: public MsgClass { size_t size; std::vector > mat; public: explicit ConfusionMatrix( size_t ); virtual ~ConfusionMatrix(); void Increment( const TargetValue*, const TargetValue* ); void Print( std::ostream&, const Targets& ) const; void FScore( std::ostream&, const Targets&, bool ) const; void merge( const ConfusionMatrix * ); }; class StatisticsClass { public: StatisticsClass(): _data(0), _skipped(0), _correct(0), _tieOk(0), _tieFalse(0), _exact(0) {}; void clear() { _data =0; _skipped = 0; _correct = 0; _tieOk = 0; _tieFalse = 0; _exact = 0; }; void addLine() { ++_data; } void addSkipped() { ++_skipped; } void addCorrect() { ++_correct; } void addTieCorrect() { ++_tieOk; } void addTieFailure() { ++_tieFalse; } void addExact() { ++_exact; } unsigned int dataLines() const { return _data; }; unsigned int skippedLines() const { return _skipped; }; unsigned int totalLines() const { return _data + _skipped; }; unsigned int testedCorrect() const { return _correct; }; unsigned int tiedCorrect() const { return _tieOk; }; unsigned int tiedFailure() const { return _tieFalse; }; unsigned int exactMatches() const { return _exact; }; void merge( const StatisticsClass& ); private: unsigned int _data; unsigned int _skipped; unsigned int _correct; unsigned int _tieOk; unsigned int _tieFalse; unsigned int _exact; }; } #endif LanguageMachines-timbl-642727d/include/timbl/StringOps.h000066400000000000000000000027341451477526200231510ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_STRING_OPS_H #define TIMBL_STRING_OPS_H #include #include #include #include #include #include "unicode/unistr.h" namespace Timbl { bool compare_nocase( const std::string&, const std::string& ); bool compare_nocase_n( const std::string&, const std::string& ); icu::UnicodeString StrToCode( const icu::UnicodeString&, bool=true ); icu::UnicodeString CodeToStr( const icu::UnicodeString& ); std::string correct_path( const std::string&, const std::string&, bool = true ); } #endif LanguageMachines-timbl-642727d/include/timbl/Targets.h000066400000000000000000000166501451477526200226340ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_TARGETS_H #define TIMBL_TARGETS_H #include #include #include #include "unicode/unistr.h" #include "timbl/MsgClass.h" #include "ticcutils/Unicode.h" namespace Hash { class UnicodeHash; } namespace Timbl { class ValueClass { public: ValueClass( const icu::UnicodeString& n, size_t i ): _name( n ), _index( i ), _frequency( 1 ) {}; ValueClass( const ValueClass& ) = delete; // forbid copies ValueClass& operator=( const ValueClass& ) = delete; // forbid copies virtual ~ValueClass() {}; void ValFreq( size_t f ){ _frequency = f; }; void IncValFreq( int f ){ _frequency += f; }; size_t ValFreq( ) const { return _frequency; }; void incr_val_freq(){ ++_frequency; }; void decr_val_freq(){ --_frequency; }; size_t Index() const { return _index; }; const icu::UnicodeString& name() const { return _name; }; const std::string name_string() const { return TiCC::UnicodeToUTF8(_name);}; // temporary for backward compatability const icu::UnicodeString& name_u() const { return _name; }; // HACK const std::string Name() const { return TiCC::UnicodeToUTF8(_name); }; // HACK // REMOVE ^^^^ friend std::ostream& operator<<( std::ostream& os, ValueClass const *vc ); protected: const icu::UnicodeString& _name; size_t _index; size_t _frequency; }; class TargetValue: public ValueClass { public: TargetValue( const icu::UnicodeString&, size_t ); }; class Targets: public MsgClass { friend class MBLClass; friend class WClassDistribution; friend class ConfusionMatrix; public: explicit Targets( Hash::UnicodeHash *T ): target_hash( T ), is_reference(false) {}; ~Targets(); Targets& operator=( const Targets& ); void init(); TargetValue *add_value( const icu::UnicodeString&, int freq = 1 ); TargetValue *add_value( size_t, int freq = 1 ); TargetValue *Lookup( const icu::UnicodeString& ) const; TargetValue *ReverseLookup( size_t ) const; bool decrement_value( TargetValue * ); bool increment_value( TargetValue * ); TargetValue *MajorityClass() const; size_t EffectiveValues() const; size_t TotalValues() const; size_t num_of_values() const { return values_array.size(); }; Hash::UnicodeHash *hash() const { return target_hash; }; private: Hash::UnicodeHash *target_hash; std::vector values_array; std::unordered_map< size_t, TargetValue *> reverse_values; bool is_reference; }; class Vfield{ friend class ClassDistribution; friend class WClassDistribution; friend std::ostream& operator<<( std::ostream&, const Vfield& ); friend std::ostream& operator<<( std::ostream&, const Vfield * ); public: Vfield( const TargetValue *val, int freq, double w ): value(val), frequency(freq), weight(w) {}; Vfield( const Vfield& in ): value(in.value), frequency(in.frequency), weight(in.weight) {}; Vfield& operator=( const Vfield& ) = delete; // forbid copies ~Vfield(){}; std::ostream& put( std::ostream& ) const; const TargetValue *Value() const { return value; }; void Value( const TargetValue *t ){ value = t; }; size_t Freq() const { return frequency; }; void IncFreq( int inc=1 ) { frequency += inc; }; void AddFreq( int f ) { frequency += f; weight += f; }; void DecFreq() { frequency -= 1; }; double Weight() const { return weight; }; void SetWeight( double w ){ weight = w; }; size_t Index(); protected: const TargetValue *value; size_t frequency; double weight; private: }; class WClassDistribution; class ClassDistribution{ friend std::ostream& operator<<( std::ostream&, const ClassDistribution& ); friend std::ostream& operator<<( std::ostream&, const ClassDistribution * ); friend class WClassDistribution; public: typedef std::map VDlist; typedef VDlist::const_iterator dist_iterator; ClassDistribution( ): total_items(0) {}; ClassDistribution( const ClassDistribution& ); virtual ~ClassDistribution(){ clear(); }; size_t totalSize() const{ return total_items; }; size_t size() const{ return distribution.size(); }; bool empty() const{ return distribution.empty(); }; void clear(); dist_iterator begin() const { return distribution.begin(); }; dist_iterator end() const { return distribution.end(); }; virtual const TargetValue* BestTarget( bool&, bool = false ) const; void Merge( const ClassDistribution& ); virtual void SetFreq( const TargetValue *, int, double=1.0 ); virtual bool IncFreq( const TargetValue *, size_t, double=1.0 ); void DecFreq( const TargetValue * ); static ClassDistribution *read_distribution( std::istream&, Targets&, bool ); static ClassDistribution *read_distribution_hashed( std::istream&, Targets&, bool ); const std::string DistToString() const; const std::string DistToStringW( int ) const; double Confidence( const TargetValue * ) const; virtual const std::string SaveHashed() const; virtual const std::string Save() const; bool ZeroDist() const { return total_items == 0; }; double Entropy() const; ClassDistribution *to_VD_Copy( ) const; virtual WClassDistribution *to_WVD_Copy() const; protected: virtual void DistToString( std::string&, double=0 ) const; virtual void DistToStringWW( std::string&, int ) const; const TargetValue* BestTargetN( bool &, bool = false ) const; const TargetValue* BestTargetW( bool &, bool = false ) const; virtual ClassDistribution *clone( ) const { return new ClassDistribution(); }; size_t total_items; VDlist distribution; }; class WClassDistribution: public ClassDistribution { public: WClassDistribution(): ClassDistribution() {}; const TargetValue* BestTarget( bool &, bool = false ) const override; void SetFreq( const TargetValue *, int, double ) override; bool IncFreq( const TargetValue *, size_t, double ) override; WClassDistribution *to_WVD_Copy( ) const override; const std::string SaveHashed() const override; const std::string Save() const override; void Normalize(); void Normalize_1( double, const Targets& ); void Normalize_2(); void MergeW( const ClassDistribution&, double ); private: void DistToString( std::string&, double=0 ) const override; void DistToStringWW( std::string&, int ) const override; WClassDistribution *clone() const override { return new WClassDistribution; }; }; } #endif // TINBL_TARGETS_H LanguageMachines-timbl-642727d/include/timbl/Testers.h000066400000000000000000000072331451477526200226510ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_TESTERS_H #define TIMBL_TESTERS_H namespace Timbl{ class metricTestFunction { public: virtual ~metricTestFunction(){}; virtual double test( const FeatureValue *, const FeatureValue *, const Feature * ) const = 0; }; class overlapTestFunction: public metricTestFunction { public: double test( const FeatureValue *FV, const FeatureValue *G, const Feature *Feat ) const override; }; class valueDiffTestFunction: public metricTestFunction { public: explicit valueDiffTestFunction( int t ): metricTestFunction(), threshold( t ) {}; double test( const FeatureValue *, const FeatureValue *, const Feature * ) const override; protected: int threshold; }; class TesterClass { public: TesterClass( const Feature_List& ); TesterClass( const TesterClass& ) = delete; // inhibit copies TesterClass& operator=( const TesterClass& ) = delete; // inhibit copies virtual ~TesterClass(){}; void init( const Instance&, size_t, size_t ); virtual size_t test( const std::vector&, size_t, double ) = 0; virtual double getDistance( size_t ) const = 0; protected: size_t _size; size_t effSize; size_t offSet; const std::vector *FV; const std::vector &features; const std::vector &permutation; std::vector permFeatures; std::vector distances; private: }; class DistanceTester: public TesterClass { public: DistanceTester( const Feature_List&, int ); ~DistanceTester(); double getDistance( size_t ) const override; size_t test( const std::vector&, size_t, double ) override; private: std::vector metricTest; }; class SimilarityTester: public TesterClass { public: explicit SimilarityTester( const Feature_List& pf ): TesterClass( pf ){}; ~SimilarityTester() {}; virtual size_t test( const std::vector&, size_t, double ) override = 0; protected: private: }; class CosineTester: public SimilarityTester { public: explicit CosineTester( const Feature_List& pf ): SimilarityTester( pf ){}; double getDistance( size_t ) const override; size_t test( const std::vector&, size_t, double ) override; private: }; class DotProductTester: public SimilarityTester { public: explicit DotProductTester( const Feature_List& pf ): SimilarityTester( pf ){}; double getDistance( size_t ) const override; size_t test( const std::vector&, size_t, double ) override; private: }; TesterClass* getTester( MetricType, const Feature_List&, int ); } #endif // TIMBL_TESTERS_H LanguageMachines-timbl-642727d/include/timbl/TimblAPI.h000066400000000000000000000135661451477526200226270ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_API_H #define TIMBL_API_H #include #include #include "ticcutils/CommandLine.h" #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Instance.h" #include "timbl/neighborSet.h" #include "timbl/TimblExperiment.h" namespace Timbl{ inline std::string Version() { return Common::Version(); } inline std::string VersionName() { return Common::VersionName(); } inline std::string BuildInfo() { return Common::BuildInfo(); } enum Algorithm { UNKNOWN_ALG, IB1, IB2, IGTREE, TRIBL, TRIBL2, LOO, CV }; enum Weighting { UNKNOWN_W, UD, NW, GR, IG, X2, SV, SD }; class TimblAPI { friend class TimblExperiment; public: TimblAPI( const TiCC::CL_Options&, const std::string& = "" ); TimblAPI( const std::string&, const std::string& = "" ); TimblAPI( const TimblAPI& ); ~TimblAPI(); bool isValid() const; bool Valid() const; TimblExperiment *grabAndDisconnectExp(){ TimblExperiment *res = 0; if ( Valid() ){ res = pimpl; pimpl = 0; } return res; } bool Prepare( const std::string& = "" ); bool CVprepare( const std::string& = "", Weighting = GR, const std::string& = "" ); bool Learn( const std::string& = "" ); bool Increment_u( const icu::UnicodeString& ); bool Increment( const std::string& ); bool Decrement_u( const icu::UnicodeString& ); bool Decrement( const std::string& ); bool Expand( const std::string& ); bool Remove( const std::string& ); bool Test( const std::string& = "", const std::string& = "", const std::string& = "" ); bool NS_Test( const std::string& = "", const std::string& = "" ); const TargetValue *Classify( const std::string& ); const TargetValue *Classify( const std::string&, const ClassDistribution *& ); const TargetValue *Classify( const std::string&, double& ); const TargetValue *Classify( const std::string&, const ClassDistribution *&, double& ); const TargetValue *Classify( const icu::UnicodeString& ); const TargetValue *Classify( const icu::UnicodeString&, const ClassDistribution *& ); const TargetValue *Classify( const icu::UnicodeString&, double& ); const TargetValue *Classify( const icu::UnicodeString&, const ClassDistribution *&, double& ); const neighborSet *classifyNS( const icu::UnicodeString& ); bool classifyNS( const icu::UnicodeString&, neighborSet& ); bool classifyNS( const std::string& in, neighborSet& st ){ return classifyNS( TiCC::UnicodeFromUTF8(in), st ); } const Instance *lastHandledInstance() const; const Targets& myTargets() const; bool Classify( const std::string&, std::string& ); bool Classify( const std::string&, std::string&, double& ); bool Classify( const std::string&, std::string&, std::string&, double& ); bool Classify( const icu::UnicodeString&, icu::UnicodeString& ); bool ShowBestNeighbors( std::ostream& ) const; size_t matchDepth() const; double confidence() const; bool matchedAtLeaf() const; std::string ExpName() const; static std::string VersionInfo( bool = false ); bool SaveWeights( const std::string& = "" ); bool GetWeights( const std::string& = "", Weighting = UNKNOWN_W ); double GetAccuracy(); Weighting CurrentWeighting() const; Weighting GetCurrentWeights( std::vector& ) const; bool WriteInstanceBase( const std::string& = "" ); bool WriteInstanceBaseXml( const std::string& = "" ); bool WriteInstanceBaseLevels( const std::string& = "", unsigned int=0 ); bool GetInstanceBase( const std::string& = "" ); bool WriteArrays( const std::string& = "" ); bool WriteMatrices( const std::string& = "" ); bool GetArrays( const std::string& = "" ); bool GetMatrices( const std::string& = "" ); bool WriteNamesFile( const std::string& = "" ); bool ShowWeights( std::ostream& ) const; bool ShowOptions( std::ostream& ) const; bool ShowSettings( std::ostream& ) const; bool ShowIBInfo( std::ostream& ) const; bool ShowStatistics( std::ostream& ) const; bool SetOptions( const std::string& ); bool SetIndirectOptions( const TiCC::CL_Options& ); bool SetThreads( int c ); std::string extract_limited_m( int ) const; Algorithm Algo() const; InputFormatType getInputFormat() const; size_t NumOfFeatures() const; static size_t Default_Max_Feats(); bool initExperiment(); private: TimblAPI(); TimblAPI& operator=( const TimblAPI& ); // forbid copies TimblExperiment *pimpl; bool i_am_fine; }; const std::string to_string( const Algorithm ); const std::string to_string( const Weighting ); bool string_to( const std::string&, Algorithm& ); bool string_to( const std::string&, Weighting& ); typedef ClassDistribution ValueDistribution; // for backward compatability typedef WClassDistribution WValueDistribution; // for backward compatability } #endif // TIMBL_API_H LanguageMachines-timbl-642727d/include/timbl/TimblExperiment.h000066400000000000000000000375561451477526200243430ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_EXPERIMENT_H #define TIMBL_EXPERIMENT_H #include #include #include #include #include "ticcutils/XMLtools.h" #include "timbl/Statistics.h" #include "timbl/MsgClass.h" #include "timbl/MBLClass.h" namespace TiCC { class CL_Options; } namespace Timbl { extern const std::string timbl_short_opts; extern const std::string timbl_long_opts; extern const std::string timbl_serv_short_opts; extern const std::string timbl_indirect_opts; class TimblAPI; class ConfusionMatrix; class GetOptClass; class TargetValue; class Instance; class resultStore: public MsgClass { public: resultStore(): rawDist(0), dist(0), disposable(false), isTop(false), beam(0), norm(unknownNorm), factor(0.0), best_target(0), targets(0) {}; resultStore( const resultStore& ) = delete; // inhibit copies resultStore& operator=( const resultStore& ) = delete; // inhibit copies ~resultStore(); bool reset( int, normType, double, const Targets& ); void clear(); void addConstant( const ClassDistribution *, const TargetValue * ); void addTop( const ClassDistribution *, const TargetValue * ); void addDisposable( ClassDistribution *, const TargetValue * ); const WClassDistribution *getResultDist(); std::string getResult(); void prepare(); void normalize(); double confidence() const { if ( dist ){ return dist->Confidence( best_target ); } else { return 0.0; } }; double confidence( const TargetValue* tv ) const { if ( dist ){ return dist->Confidence( tv ); } else { return 0.0; } }; private: const ClassDistribution *rawDist; WClassDistribution *dist; bool disposable; bool isTop; int beam; normType norm; double factor; const TargetValue *best_target; const Targets *targets; std::string topCache; std::string resultCache; }; class fCmp { public: bool operator()( const FeatureValue* F, const FeatureValue* G ) const{ return F->Index() > G->Index(); } }; typedef std::map, fCmp> fileIndex; typedef std::map fileDoubleIndex; std::ostream& operator<< ( std::ostream&, const fileIndex& ); std::ostream& operator<< ( std::ostream&, const fileDoubleIndex& ); class threadData; class TimblExperiment: public MBLClass { friend class TimblAPI; friend class threadData; friend class threadBlock; public: virtual ~TimblExperiment(); virtual TimblExperiment *clone() const = 0; TimblExperiment& operator=( const TimblExperiment& ); virtual bool Prepare( const std::string& = "", bool = true, bool = false ); virtual bool CVprepare( const std::string& = "", WeightType = GR_w, const std::string& = "" ); virtual bool Increment( const icu::UnicodeString& ){ FatalError( "Increment" ); return false; }; virtual bool Decrement( const icu::UnicodeString& ){ FatalError( "Decrement" ); return false; }; virtual bool Expand( const std::string& ); virtual bool Remove( const std::string& ){ FatalError( "Remove" ); return false;}; virtual bool Test( const std::string&, const std::string& ); virtual bool NS_Test( const std::string&, const std::string& ); virtual void InitInstanceBase() = 0; virtual bool ReadInstanceBase( const std::string& ); virtual bool WriteInstanceBase( const std::string& ); bool chopLine( const icu::UnicodeString& ); bool WriteInstanceBaseXml( const std::string& ); bool WriteInstanceBaseLevels( const std::string&, unsigned int ); bool WriteNamesFile( const std::string& ) const; virtual bool Learn( const std::string& = "", bool = true ); int Estimate() const { return estimate; }; void Estimate( int e ){ estimate = e; }; int Clones() const { return numOfThreads; }; void Clones( int cl ) { numOfThreads = cl; }; void setOutPath( const std::string& s ){ outPath = s; }; TimblExperiment *CreateClient( int ) const; TimblExperiment *splitChild() const; bool SetOptions( int, const char *[] ); bool SetOptions( const std::string& ); bool SetOptions( const TiCC::CL_Options& ); bool IndirectOptions( const TiCC::CL_Options& ); bool ConfirmOptions(); GetOptClass *getOptParams() const { return OptParams; }; void setOptParams( GetOptClass *op ) { OptParams = op; }; bool WriteArrays( const std::string& ); bool GetArrays( const std::string& ); bool WriteMatrices( const std::string& ); bool GetMatrices( const std::string& ); bool SaveWeights( const std::string& ); bool GetWeights( const std::string&, WeightType ); bool GetCurrentWeights( std::vector& ); xmlNode *weightsToXML(); nlohmann::json weights_to_JSON(); bool ShowOptions( std::ostream& ); bool ShowSettings( std::ostream& ); xmlNode *settingsToXML(); nlohmann::json settings_to_JSON(); bool showBestNeighbors( std::ostream& ) const; xmlNode *bestNeighborsToXML() const; nlohmann::json best_neighbors_to_JSON() const; bool showStatistics( std::ostream& ) const; void showInputFormat( std::ostream& ) const; const std::string& ExpName() const { return exp_name; }; void setExpName( const std::string& s ) { exp_name = s; }; bool Classify( const std::string& , std::string&, std::string&, double& ); bool Classify( const icu::UnicodeString& , icu::UnicodeString& ); bool Classify( const icu::UnicodeString&, icu::UnicodeString&, icu::UnicodeString&, double& ); size_t matchDepth() const { return match_depth; }; double confidence() const { return bestResult.confidence(); }; bool matchedAtLeaf() const { return last_leaf; }; nlohmann::json classify_to_JSON( const std::string& ); nlohmann::json classify_to_JSON( const std::vector& ); virtual AlgorithmType Algorithm() const = 0; const TargetValue *Classify( const icu::UnicodeString& Line, const ClassDistribution *& db, double& di ){ const TargetValue *res = classifyString( Line, di ); if ( res ){ normalizeResult(); db = bestResult.getResultDist(); } return res; } const TargetValue *Classify( const icu::UnicodeString& Line ){ double dum_d; return classifyString( Line, dum_d ); } const TargetValue *Classify( const icu::UnicodeString& Line, const ClassDistribution *& db ){ double dum_d; const TargetValue *res = classifyString( Line, dum_d ); if ( res ){ normalizeResult(); db = bestResult.getResultDist(); } return res; } const TargetValue *Classify( const icu::UnicodeString& Line, double& di ){ return classifyString( Line, di ); } const neighborSet *NB_Classify( const icu::UnicodeString& ); virtual void initExperiment( bool = false ); protected: TimblExperiment( const AlgorithmType, const std::string& = "" ); virtual bool checkLine( const icu::UnicodeString& ); virtual bool ClassicLearn( const std::string& = "", bool = true ); virtual const TargetValue *LocalClassify( const Instance&, double&, bool& ); virtual bool GetInstanceBase( std::istream& ) = 0; virtual void showTestingInfo( std::ostream& ); virtual bool checkTestFile(); bool learnFromFileIndex( const fileIndex&, std::istream& ); bool initTestFiles( const std::string&, const std::string& ); void show_results( std::ostream&, const double, const std::string&, const TargetValue *, const double ) ; void testInstance( const Instance&, InstanceBase_base *, size_t = 0 ); void normalizeResult(); const neighborSet *LocalClassify( const Instance& ); bool nextLine( std::istream &, icu::UnicodeString&, int& ); bool nextLine( std::istream &, icu::UnicodeString& ); bool skipARFFHeader( std::istream & ); void show_progress( std::ostream& os, time_t, unsigned int ); bool createPercFile( const std::string& = "" ) const; void show_speed_summary( std::ostream& os, const timeval& ) const; void show_ignore_info( std::ostream& os ) const; void show_weight_info( std::ostream& os ) const; void show_metric_info( std::ostream& os ) const; double sum_remaining_weights( size_t ) const; bool build_file_index( const std::string&, fileIndex& ); bool build_file_multi_index( const std::string&, fileDoubleIndex& ); bool Initialized; GetOptClass *OptParams; AlgorithmType algorithm; std::string CurrentDataFile; std::string WFileName; std::string outPath; std::string testStreamName; std::string outStreamName; std::ifstream testStream; std::ofstream outStream; unsigned long ibCount; ConfusionMatrix *confusionInfo; std::vector instances; StatisticsClass stats; resultStore bestResult; size_t match_depth; bool last_leaf; private: TimblExperiment( const TimblExperiment& ); int estimate; int numOfThreads; const TargetValue *classifyString( const icu::UnicodeString&, double& ); }; class IB1_Experiment: public TimblExperiment { public: IB1_Experiment( const size_t N = DEFAULT_MAX_FEATS, const std::string& s= "", const bool init = true ); bool Increment( const icu::UnicodeString& ) override; bool Decrement( const icu::UnicodeString& ) override; bool Remove( const std::string& ) override; AlgorithmType Algorithm() const override { return IB1_a; }; void InitInstanceBase() override; bool NS_Test( const std::string&, const std::string& ) override; protected: TimblExperiment *clone() const override { return new IB1_Experiment( MaxFeats(), "", false ); }; bool checkTestFile() override; bool checkLine( const icu::UnicodeString& ) override; bool Increment( const Instance& I ) { return UnHideInstance( I ); }; bool Decrement( const Instance& I ) { return HideInstance( I ); }; private: bool GetInstanceBase( std::istream& ) override; }; class IB2_Experiment: public IB1_Experiment { public: IB2_Experiment( size_t N, const std::string& s="" ): IB1_Experiment( N, s ) { IB2_offset( 0 ); }; bool Prepare( const std::string& = "", bool=false, bool=false ) override; bool Expand( const std::string& ) override; bool Remove( const std::string& ) override; bool Learn( const std::string& = "", bool = false ) override; AlgorithmType Algorithm() const override { return IB2_a; }; protected: bool checkTestFile() override; TimblExperiment *clone() const override { return new IB2_Experiment( MaxFeats() ); }; bool Expand_N( const std::string& ); bool show_learn_progress( std::ostream& os, time_t, size_t ); }; class LOO_Experiment: public IB1_Experiment { public: LOO_Experiment( int N, const std::string& s = "" ): IB1_Experiment( N, s ) { }; bool Test( const std::string&, const std::string& ) override; AlgorithmType Algorithm() const override { return LOO_a; }; bool ReadInstanceBase( const std::string& ) override; void initExperiment( bool = false ) override; protected: bool checkTestFile() override; void showTestingInfo( std::ostream& ) override; }; class CV_Experiment: public IB1_Experiment { public: CV_Experiment( int N = DEFAULT_MAX_FEATS, const std::string& s = "" ): IB1_Experiment( N, s ), CV_fileW(Unknown_w) { }; CV_Experiment( const CV_Experiment& ) = delete; // forbid copies CV_Experiment& operator=( const CV_Experiment& ) = delete; // forbid copies bool Learn( const std::string& = "", bool = true ) override; bool Prepare( const std::string& = "", bool=true, bool=false ) override; bool Test( const std::string&, const std::string& ) override; bool CVprepare( const std::string& = "", WeightType = GR_w, const std::string& = "" ) override; AlgorithmType Algorithm() const override { return CV_a; }; protected: bool checkTestFile() override; bool get_file_names( const std::string& ); private: std::vector FileNames; std::string CV_WfileName; std::string CV_PfileName; WeightType CV_fileW; }; class TRIBL_Experiment: public TimblExperiment { public: TRIBL_Experiment( const size_t N = DEFAULT_MAX_FEATS, const std::string& s = "", const bool init = true ): TimblExperiment( TRIBL_a, s ) { if ( init ) init_options_table( N ); }; void InitInstanceBase() override; protected: TimblExperiment *clone() const override { return new TRIBL_Experiment( MaxFeats(), "", false ); }; void showTestingInfo( std::ostream& ) override; bool checkTestFile() override; AlgorithmType Algorithm() const override { return TRIBL_a; }; bool checkLine( const icu::UnicodeString& ) override; const TargetValue *LocalClassify( const Instance&, double&, bool& ) override; private: bool GetInstanceBase( std::istream& ) override; }; class TRIBL2_Experiment: public TimblExperiment { public: TRIBL2_Experiment( const size_t N = DEFAULT_MAX_FEATS, const std::string& s = "", const bool init = true ): TimblExperiment( TRIBL2_a, s ) { if ( init ) init_options_table( N ); }; void InitInstanceBase() override; protected: TimblExperiment *clone() const override { return new TRIBL2_Experiment( MaxFeats(), "", false ); }; bool checkTestFile() override; AlgorithmType Algorithm() const override { return TRIBL2_a; }; bool checkLine( const icu::UnicodeString& ) override; const TargetValue *LocalClassify( const Instance& , double&, bool& ) override; private: bool GetInstanceBase( std::istream& ) override; }; class IG_Experiment: public TimblExperiment { public: IG_Experiment( const size_t N = DEFAULT_MAX_FEATS, const std::string& s = "", const bool init = true ): TimblExperiment( IGTREE_a, s ) { if ( init ) init_options_table( N ); }; AlgorithmType Algorithm() const override { return IGTREE_a; }; void InitInstanceBase() override; bool WriteInstanceBase( const std::string& ) override; bool ReadInstanceBase( const std::string& ) override; void initExperiment( bool = false ) override; bool Expand( const std::string& ) override { FatalError( "Expand not supported for IGTree" ); return false; }; protected: TimblExperiment *clone() const override{ return new IG_Experiment( MaxFeats(), "", false ); }; bool ClassicLearn( const std::string& = "", bool = true ) override; bool checkTestFile() override; void showTestingInfo( std::ostream& ) override; bool checkLine( const icu::UnicodeString& ) override; bool sanityCheck() const; const TargetValue *LocalClassify( const Instance&, double&, bool& ) override; private: bool GetInstanceBase( std::istream& ) override; }; } #endif // TIMBL_EXPERIMENT_H LanguageMachines-timbl-642727d/include/timbl/Types.h000066400000000000000000000305171451477526200223250ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_TYPES_H #define TIMBL_TYPES_H #include #include #include #include #include #include #include #include #include "ticcutils/StringOps.h" #include "timbl/StringOps.h" namespace Timbl { enum InputFormatType { UnknownInputFormat, Compact, C4_5, Columns, Tabbed, ARFF, SparseBin, Sparse, MaxInputFormat }; inline InputFormatType& operator++( InputFormatType &I ){ return I = ( MaxInputFormat == I ) ? UnknownInputFormat : InputFormatType(I+1); } enum WeightType { Unknown_w, No_w, GR_w, IG_w, X2_w, SV_w, SD_w, UserDefined_w, Max_w }; inline WeightType& operator++( WeightType &W ){ return W = ( Max_w == W ) ? Unknown_w : WeightType(W+1); } enum AlgorithmType { Unknown_a, IB1_a, IB2_a, IGTREE_a, TRIBL_a, TRIBL2_a, LOO_a, CV_a, Max_a }; inline AlgorithmType& operator++( AlgorithmType &W ){ return W = ( Max_a == W ) ? Unknown_a : AlgorithmType(W+1); } enum MetricType { UnknownMetric, Ignore, Numeric, DotProduct, Cosine, Overlap, Levenshtein, Dice, ValueDiff, JeffreyDiv, JSDiv, Euclidean, MaxMetric }; inline MetricType& operator++( MetricType &W ){ return W = ( MaxMetric == W ) ? UnknownMetric : MetricType(W+1); } enum OrdeningType { UnknownOrdening, DataFile, NoOrder, GROrder, IGOrder, OneoverFeature, OneoverSplitInfo, GRoverFeature, IGoverFeature, GREntropyOrder, IGEntropyOrder, X2Order, SVOrder, SDOrder, X2overFeature, SVoverFeature, SDoverFeature, MaxOrdening }; inline OrdeningType& operator++( OrdeningType &W ){ return W = ( MaxOrdening == W ) ? UnknownOrdening : OrdeningType(W+1); } enum VerbosityFlags { NO_VERB=0, SILENT=1, OPTIONS=2, FEAT_W=4, VD_MATRIX=8, EXACT=16, DISTANCE=32, DISTRIB=64, NEAR_N=128, ADVANCED_STATS=256, CONF_MATRIX=512, CLASS_STATS=1024, CLIENTDEBUG=2048, ALL_K=4096, MATCH_DEPTH=8192, BRANCHING=16384, CONFIDENCE=32768, MAX_VERB }; inline VerbosityFlags operator~( VerbosityFlags V ){ return (VerbosityFlags)( ~(int)V ); } inline VerbosityFlags operator|( VerbosityFlags V1, VerbosityFlags V2 ){ return (VerbosityFlags)( (int)V1|(int)V2 ); } inline VerbosityFlags& operator|= ( VerbosityFlags& f, VerbosityFlags g ){ f = (f | g); return f; } inline VerbosityFlags operator& ( VerbosityFlags f, VerbosityFlags g ){ return (VerbosityFlags)((int)f & (int)g ); } inline VerbosityFlags& operator&= ( VerbosityFlags& f, VerbosityFlags g ){ f = (f & g); return f; } enum OptType { UnknownOpt, StringT, IntegerT, BooleanT, VerbosityT, IFormatT, AlgoT, MetricT, WeightT, OrdeningT, MaxOpt }; inline OptType& operator++( OptType &W ){ return W = ( MaxOpt == W ) ? UnknownOpt : OptType(W+1); } enum DecayType { UnknownDecay, Zero, InvDist, InvLinear, ExpDecay, MaxDecay }; inline DecayType& operator++( DecayType &W ){ return W = ( MaxDecay == W ) ? UnknownDecay : DecayType(W+1); } enum SmoothingType { UnknownSmoothing, Default, Lidstone, MaxSmoothing }; inline SmoothingType& operator++( SmoothingType &W ){ return W = ( MaxSmoothing == W ) ? UnknownSmoothing : SmoothingType(W+1); } enum normType { unknownNorm, noNorm, probabilityNorm, addFactorNorm, logProbNorm, maxNorm }; inline normType& operator++( normType &W ){ return W = ( maxNorm == W ) ? noNorm : normType(W+1); } extern const std::string DecayName[][2]; extern const std::string OrdeningName[][2]; extern const std::string WeightName[][2]; extern const std::string MetricName[][2]; extern const std::string InputFormatName[][2]; extern const std::string AlgorithmName[][2]; extern const std::string SmoothingName[][2]; extern const std::string VerbosityName[][2]; extern const std::string NormalisationName[][2]; WeightType charToWeig( char ); AlgorithmType charToAlg( char ); normType charToNorm( char ); } namespace TiCC { // // We create specializations of TiCC templates // the must be placed in the TiCC namespace (isn't it?) // using namespace Timbl; template <> inline DecayType stringTo( const std::string& str ) { DecayType d = UnknownDecay; for ( ++d; d < MaxDecay; ++d ){ if ( compare_nocase( str, DecayName[d][0] ) || compare_nocase( str, DecayName[d][1] ) ){ return d; } } throw( std::runtime_error( "conversion from string '" + str + "' to decayType failed" ) ); } template <> inline std::string toString( const DecayType& W, bool b ){ if ( b ){ return DecayName[W][1]; } else { return DecayName[W][0]; } } template <> inline OrdeningType stringTo( const std::string& str ) { OrdeningType d = UnknownOrdening; for ( ++d; d < MaxOrdening; ++d ){ if ( compare_nocase( str, OrdeningName[d][0] ) || compare_nocase( str, OrdeningName[d][1] ) ){ return d; } } throw( std::runtime_error( "conversion from string '" + str + "' to ordeningType failed" ) ); } template <> inline std::string toString( const OrdeningType& W, bool b ){ return OrdeningName[W][(b?1:0)]; } template <> inline MetricType stringTo( const std::string& str ) { MetricType d = UnknownMetric; for ( ++d; d < MaxMetric; ++d ){ if ( compare_nocase( str, MetricName[d][0] ) || compare_nocase( str, MetricName[d][1] ) ){ return d; } } throw( std::runtime_error( "conversion from string '" + str + "' to metricType failed" ) ); } template <> inline std::string toString( const MetricType& W, bool b ){ if ( b ) return MetricName[W][1]; else return MetricName[W][0]; } template <> inline WeightType stringTo( const std::string& str ) { WeightType w = Unknown_w; if ( str.length() == 1 && isdigit(str[0]) ){ w = charToWeig( str[0] ); } if ( w != Unknown_w ) return w; for ( ++w; w < Max_w; ++w ){ if ( compare_nocase( str, WeightName[w][0] ) || compare_nocase( str, WeightName[w][1] ) ){ return w; } } throw( std::runtime_error( "conversion from string '" + str + "' to weightType failed" ) ); } template <> inline std::string toString( const WeightType& W, bool b ){ if ( b ){ return WeightName[W][1]; } else { return WeightName[W][0]; } } template <> inline AlgorithmType stringTo( const std::string& str ) { AlgorithmType a = Unknown_a; if ( str.length() == 1 && isdigit(str[0]) ){ a = charToAlg( str[0] ); } if ( a != Unknown_a ) return a; for ( ++a; a < Max_a; ++a ){ if ( compare_nocase( str, AlgorithmName[a][0] ) || compare_nocase( str, AlgorithmName[a][1] ) ){ return a; } } throw( std::runtime_error( "conversion from string '" + str + "' to algorithmType failed" ) ); } template <> inline std::string toString( const AlgorithmType& a, bool b ){ if ( b ) return AlgorithmName[a][1]; else return AlgorithmName[a][0]; } template <> inline InputFormatType stringTo( const std::string& str ){ InputFormatType d = UnknownInputFormat; for ( ++d; d < MaxInputFormat; ++d ){ if ( compare_nocase( str, InputFormatName[d][0] ) || compare_nocase( str, InputFormatName[d][1] ) ){ return d; } } throw( std::runtime_error( "conversion from string '" + str + "' to weightType failed" ) ); } template <> inline std::string toString( const InputFormatType& i, bool b ){ if ( b ){ return InputFormatName[i][1]; } else { return InputFormatName[i][0]; } } template <> inline SmoothingType stringTo( const std::string& str ) { SmoothingType d = UnknownSmoothing; for ( ++d; d < MaxSmoothing; ++d ){ if ( compare_nocase( str, SmoothingName[d][0] ) || compare_nocase( str, SmoothingName[d][1] ) ){ return d; } } throw( std::runtime_error( "conversion from string '" + str + "' to smoothingType failed" ) ); } template <> inline std::string toString( const SmoothingType& s, bool b ){ if ( b ){ return SmoothingName[s][1]; } else { return SmoothingName[s][0]; } } template <> inline normType stringTo( const std::string& str ) { normType d = unknownNorm; if ( str.length() == 1 && isdigit(str[0]) ){ d = charToNorm( str[0] ); } if ( d != unknownNorm ) return d; for ( ++d; d < maxNorm; ++d ){ if ( compare_nocase( str, NormalisationName[d][0] ) || compare_nocase( str, NormalisationName[d][1] ) ){ return d; } } throw( std::runtime_error( "conversion from string '" + str + "' to normalisationType failed" ) ); } template <> inline std::string toString( const normType& s, bool b ){ if ( b ){ return NormalisationName[s][1]; } else { return NormalisationName[s][0]; } } inline bool string_to_verbflag( const std::string& line, VerbosityFlags &a ){ unsigned int i; for ( i=0; VerbosityName[i][0][0] != '\0'; i++ ) if ( compare_nocase( line, VerbosityName[i][0] ) || compare_nocase( line, VerbosityName[i][1] ) ){ if ( i==0 ){ a = NO_VERB; } else{ a = (VerbosityFlags)(1<<(i-1)); } return true; } return false; } template <> inline VerbosityFlags stringTo( const std::string& str ) { std::vector tmp; size_t cnt = TiCC::split_at( str, tmp, "+" ); VerbosityFlags V = NO_VERB; for ( size_t i=0; i < cnt; ++i ){ VerbosityFlags Flag; if ( string_to_verbflag( tmp[i], Flag ) ){ V |= Flag; } else { throw( std::runtime_error( "conversion from string '" + str + "' to verbosityFlag failed" ) ); } } return V; } inline std::string verbosity_to_string( int v, bool full ){ if ( v == 0 ) return VerbosityName[0][(full?1:0)]; else { std::string OutLine; bool first = true; for ( unsigned int i=1; VerbosityName[i][0][0] != '\0'; ++i ) if ( v & (1<<(i-1)) ){ if (first) first = false; else OutLine += '+'; OutLine += VerbosityName[i][(full?1:0)]; } return OutLine; } } template <> inline std::string toString( const VerbosityFlags& v, bool full ){ return verbosity_to_string( (int)v, full ); } inline std::string toString( const AlgorithmType& a, bool b=false ){ return toString( a, b ); } inline std::string toString( const MetricType& a, bool b=false ){ return toString( a, b ); } inline std::string toString( const WeightType& a, bool b=false ){ return toString( a, b ); } inline std::string toString( const InputFormatType& a, bool b=false ){ return toString( a, b ); } inline std::string toString( const DecayType& a, bool b=false ){ return toString( a, b ); } inline std::string toString( const OrdeningType& a, bool b=false ){ return toString( a, b ); } inline std::string toString( const VerbosityFlags& a, bool b=false ){ return toString( a, b ); } } #endif LanguageMachines-timbl-642727d/include/timbl/neighborSet.h000066400000000000000000000072071451477526200234720ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #ifndef TIMBL_NEIGHBORSET_H #define TIMBL_NEIGHBORSET_H #include "timbl/Types.h" namespace Timbl { class ClassDistribution; class WClassDistribution; class decayStruct { friend std::ostream& operator<<( std::ostream&, const decayStruct& ); friend std::ostream& operator<<( std::ostream&, const decayStruct * ); public: decayStruct():alpha(0),beta(0){}; decayStruct(double a, double b ):alpha(a),beta(b){}; virtual ~decayStruct(){}; virtual std::ostream& put( std::ostream& ) const = 0; virtual DecayType type() const = 0; double alpha; double beta; }; class zeroDecay: public decayStruct { public: zeroDecay():decayStruct(){}; std::ostream& put( std::ostream& ) const override; DecayType type() const override { return Zero;}; }; class invLinDecay: public decayStruct { public: invLinDecay():decayStruct(){}; std::ostream& put( std::ostream& ) const override; DecayType type() const override { return InvLinear;}; }; class invDistDecay: public decayStruct { public: invDistDecay():decayStruct(){}; std::ostream& put( std::ostream& ) const override; DecayType type() const override { return InvDist;}; }; class expDecay: public decayStruct { public: explicit expDecay( double alp ): decayStruct(alp,1.0){}; expDecay( double alp, double bet ): decayStruct(alp,bet){}; std::ostream& put( std::ostream& ) const override; DecayType type() const override { return ExpDecay;}; }; class neighborSet { friend std::ostream& operator<<( std::ostream&, const neighborSet& ); friend std::ostream& operator<<( std::ostream&, const neighborSet * ); friend class BestArray; public: neighborSet(); ~neighborSet(); neighborSet( const neighborSet& in ); neighborSet& operator=( const neighborSet& ); size_t size() const; void reserve( size_t ); void clear(); void truncate( size_t ); void merge( const neighborSet& ); double getDistance( size_t ) const; double bestDistance() const { return getDistance(0); }; const ClassDistribution *getDistribution( size_t ) const; WClassDistribution *bestDistribution( const decayStruct * =0, size_t =0 ) const ; double relativeWeight( const decayStruct *, size_t ) const; bool setShowDistance( bool b ) const { bool ret = showDistance; showDistance = b; return ret; } bool setShowDistribution( bool b ) const { bool ret = showDistribution; showDistribution = b; return ret; } private: mutable bool showDistance; mutable bool showDistribution; void push_back( double, const ClassDistribution & ); std::vector distances; std::vector distributions; }; } #endif LanguageMachines-timbl-642727d/m4/000077500000000000000000000000001451477526200166305ustar00rootroot00000000000000LanguageMachines-timbl-642727d/m4/.gitignore000066400000000000000000000001231451477526200206140ustar00rootroot00000000000000*~ libtool.m libtool.m4 ltoptions.m4 ltsugar.m4 ltversion.m4 lt~obsolete.m4 pkg.m4 LanguageMachines-timbl-642727d/m4/Makefile.am000066400000000000000000000000641451477526200206640ustar00rootroot00000000000000# $Id: $ # $URL: $ extra_DIST = ax_openmp.m4 pkg.m4LanguageMachines-timbl-642727d/m4/ac_osx_pkg.m4000066400000000000000000000040621451477526200212110ustar00rootroot00000000000000# osx_pkg.m4 - Macros to add OSX brew locations to pkg-config. -*- Autoconf -*- # serial 1 (pkg-config-0.24) # # Copyright © 2018 Ko van der Sloot # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # AC_OSX_PKG_ALL() # add all /opt/{package} directories to the PKG_CONFIG search path # ---------------------------------- AC_DEFUN([AC_OSX_PKG_ALL], [ case ${host_os} in linux*) # linux is wellbehaved ;; darwin*) # darwin isn't for i in `ls /usr/local/opt/` do if test -d "/usr/local/opt/$i/lib/pkgconfig" then export PKG_CONFIG_PATH="$PKG_CONFIG_PATH:/usr/local/opt/$i/lib/pkgconfig" fi done ;; esac ]) # AC_OSX_PKG_ALL([LIST_OF_PACKAGES]) # fore every packake in LIST_OF_PACKAGES, add the /opt/{package} directory # to the PKG_CONFIG search path # ---------------------------------- AC_DEFUN([AC_OSX_PKG], [ case ${host_os} in linux*) # linux is wellbehaved ;; darwin*) # darwin isn't for i in $* do if test -d "/usr/local/opt/$i/lib/pkgconfig" then export PKG_CONFIG_PATH="$PKG_CONFIG_PATH:/usr/local/opt/$i/lib/pkgconfig" fi done ;; esac ]) LanguageMachines-timbl-642727d/src/000077500000000000000000000000001451477526200170775ustar00rootroot00000000000000LanguageMachines-timbl-642727d/src/.gitignore000066400000000000000000000000731451477526200210670ustar00rootroot00000000000000*~ *.o *.lo *.la .deps .libs timbl simpletest* *.out *.log LanguageMachines-timbl-642727d/src/BestArray.cxx000066400000000000000000000243401451477526200215220ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include "timbl/BestArray.h" #include #include #include // for DBL_DIG, DBL_MAX #include // for max #include // for fabs #include // for UnicodeString #include // for operator<< #include // for icu #include "timbl/Common.h" #include "timbl/neighborSet.h" #include "ticcutils/XMLtools.h" #include "ticcutils/StringOps.h" #include "ticcutils/Unicode.h" #include "ticcutils/json.hpp" namespace Timbl { using namespace std; using namespace icu; using namespace Common; using namespace nlohmann; BestRec::BestRec(): bestDistance( 0.0 ) {} BestRec::~BestRec(){ for ( auto const& b : bestDistributions ){ delete b; } } BestArray::~BestArray(){ for ( auto const& b : bestArray ){ delete b; } } ostream& operator<< ( ostream& os, const BestRec *b ){ if ( b ){ os << b->aggregateDist.DistToString(); int OldPrec = os.precision(DBL_DIG-1); os.setf(ios::showpoint); os << "\t" << b->bestDistance; os.precision(OldPrec); os << endl; } else { os << "bestrec is null!" << endl; } return os; } void BestArray::init( unsigned int numN, unsigned int maxB, bool storeI, bool showDi, bool showDb ){ _storeInstances = storeI; _showDi = showDi; _showDb = showDb; maxBests = maxB; // When necessary, take a larger array. (initialy it has 0 length) // Also check if verbosity has changed and a BestInstances array // is required. // size_t S = size; size = numN; if ( S < size ){ bestArray.reserve( size ); for ( size_t k=S; k < size; ++k ) { bestArray.push_back( new BestRec() ); } } size_t penalty = 0; for ( const auto& best : bestArray ){ best->bestDistance = (DBL_MAX - numN) + penalty++; if ( best->bestInstances.empty() ){ if ( _storeInstances ){ best->bestInstances.reserve( maxBests ); best->bestDistributions.reserve( maxBests ); } } else { for ( auto const& bd : best->bestDistributions ){ delete bd; } best->bestInstances.clear(); best->bestDistributions.clear(); } best->aggregateDist.clear(); } } double BestArray::addResult( double Distance, const ClassDistribution *Distr, const UnicodeString& neighbor ){ // We have the similarity in Distance, and a num_of_neighbors // dimensional array with best similarities. // Check, and add/replace/move/whatever. // for ( unsigned int k = 0; k < size; ++k ) { BestRec *best = bestArray[k]; if (fabs(Distance - best->bestDistance) < Epsilon) { // Equal...just add to the end. // best->aggregateDist.Merge( *Distr ); if ( _storeInstances && best->bestInstances.size() < maxBests ){ best->bestInstances.push_back( neighbor ); best->bestDistributions.push_back( Distr->to_VD_Copy() ); } break; } // Check if better than bests[k], insert (or replace if // it's the lowest of the k bests). // /* Example (no_n = 3): k distance number 0 2 3 1 4 2 2 6 1 sim = 1 (dus beste) */ else if (Distance < best->bestDistance) { if (k == size - 1) { // // Replace. // best->bestDistance = Distance; if ( _storeInstances ){ for ( const auto& it : best->bestDistributions ){ delete it; } best->bestInstances.clear(); best->bestDistributions.clear(); best->bestInstances.push_back( neighbor ); best->bestDistributions.push_back( Distr->to_VD_Copy() ); } best->aggregateDist.clear(); best->aggregateDist.Merge( *Distr ); } else { // // Insert. First shift the rest up. // BestRec *keep = bestArray[size-1]; for ( size_t i = size - 1; i > k; i--) { bestArray[i] = bestArray[i-1]; } // i // // And now insert. // keep->bestDistance = Distance; if ( _storeInstances ){ for ( const auto& it :keep->bestDistributions ){ delete it; } keep->bestInstances.clear(); keep->bestDistributions.clear(); keep->bestInstances.push_back( neighbor ); keep->bestDistributions.push_back( Distr->to_VD_Copy() ); } keep->aggregateDist.clear(); keep->aggregateDist.Merge( *Distr ); bestArray[k] = keep; } break; } // Distance < fBest } // k return bestArray[size-1]->bestDistance; } void BestArray::initNeighborSet( neighborSet& ns ) const { ns.clear(); for ( auto const& best : bestArray ){ ns.push_back( best->bestDistance, best->aggregateDist ); } } void BestArray::addToNeighborSet( neighborSet& ns, size_t n ) const { ns.push_back( bestArray[n-1]->bestDistance, bestArray[n-1]->aggregateDist ); } xmlNode *BestArray::toXML() const { xmlNode *top = TiCC::XmlNewNode( "neighborset" ); size_t k = 0; for ( auto const& best : bestArray ){ ++k; if ( _storeInstances ){ size_t totalBests = best->totalBests(); if ( totalBests == 0 ){ break; // TRIBL algorithms do this! } xmlNode *nbs = TiCC::XmlNewChild( top, "neighbors" ); TiCC::XmlSetAttribute( nbs, "k", TiCC::toString(k) ); TiCC::XmlSetAttribute( nbs, "total", TiCC::toString(totalBests) ); TiCC::XmlSetAttribute( nbs, "distance", TiCC::toString( best->bestDistance ) ); if ( maxBests < totalBests ){ TiCC::XmlSetAttribute( nbs, "limited", TiCC::toString( maxBests ) ); } for ( unsigned int m=0; m < best->bestInstances.size(); ++m ){ xmlNode *nb = TiCC::XmlNewChild( nbs, "neighbor" ); TiCC::XmlNewTextChild( nb, "instance", TiCC::UnicodeToUTF8(best->bestInstances[m]) ); if ( _showDb ){ TiCC::XmlNewTextChild( nb, "distribution", best->bestDistributions[m]->DistToString() ); } } } else { if ( best->aggregateDist.ZeroDist() ){ break; } xmlNode *nbs = TiCC::XmlNewChild( top, "neighbors" ); TiCC::XmlSetAttribute( nbs, "k", TiCC::toString(k) ); if ( _showDb ){ TiCC::XmlNewTextChild( nbs, "distribution", best->aggregateDist.DistToString() ); } if ( _showDi ){ TiCC::XmlNewTextChild( nbs, "distance", TiCC::toString(best->bestDistance) ); } } } return top; } json neighbor_to_json( const UnicodeString& nb, const string& db ){ json result; result["instance"] = TiCC::UnicodeToUTF8(nb); if ( !db.empty() ){ result["distribution"] = db; } return result; } json BestArray::record_to_json( const BestRec *best, size_t k ) const { json result; if ( _storeInstances ){ size_t totalBests = best->totalBests(); if ( totalBests > 0 ){ // TRIBL algorithms returns 0 this! result["k"] = k; result["total"] = totalBests; result["distance"] = best->bestDistance; if ( maxBests < totalBests ){ result["limited"] = maxBests; } if ( best->bestInstances.size() == 0 ){ } else if ( best->bestInstances.size() == 1 ){ string db; if ( _showDb ){ db = best->bestDistributions[0]->DistToString(); } result["neighbor"] = neighbor_to_json( best->bestInstances[0], db ); } else { json arr = json::array(); for ( unsigned int m=0; m < best->bestInstances.size(); ++m ){ string db; if ( _showDb ){ db = best->bestDistributions[m]->DistToString(); } arr.push_back( neighbor_to_json( best->bestInstances[m], db ) ); } result["neighbor"] = arr; } } } else { if ( !best->aggregateDist.ZeroDist() ){ result["k"] = k; if ( _showDb ){ result["distribution"] = best->aggregateDist.DistToString(); } if ( _showDi ){ result["distance"] = best->bestDistance; } } } return result; } json BestArray::to_JSON() const { json result; if ( bestArray.size() == 0 ){ return result; // empty } else if ( bestArray.size() == 1 ){ result = record_to_json( bestArray[0], 1 ); return result; } else { result = json::array(); size_t k = 0; for ( auto const& best : bestArray ){ result.push_back( record_to_json( best, ++k) ); } } return result; } ostream& operator<< ( ostream& os, const BestArray& bA ){ size_t k = 0; for ( auto const& best : bA.bestArray ){ ++k; if ( bA._storeInstances ){ size_t totalBests = best->totalBests(); if ( totalBests == 0 ){ break; // TRIBL algorithms do this! } os << "# k=" << k << ", " << totalBests << " Neighbor(s) at distance: "; int OldPrec = os.precision(DBL_DIG-1); os.setf(ios::showpoint); os << "\t" << best->bestDistance; os.precision(OldPrec); if ( bA.maxBests <= best->bestInstances.size() ){ os << " (only " << bA.maxBests << " shown)"; } os << endl; for ( unsigned int m=0; m < best->bestInstances.size(); ++m ){ os << "#\t" << best->bestInstances[m]; if ( bA._showDb ){ os << best->bestDistributions[m]->DistToString() << endl; } else { os << " -*-" << endl; } } } else { if ( best->aggregateDist.ZeroDist() ){ break; } os << "# k=" << k << "\t"; if ( bA._showDb ){ os << best->aggregateDist.DistToString(); } if ( bA._showDi ){ int OldPrec = os.precision(DBL_DIG-1); os.setf(ios::showpoint); os << best->bestDistance; os.precision(OldPrec); } os << endl; } } return os; } } LanguageMachines-timbl-642727d/src/CVExperiment.cxx000066400000000000000000000115541451477526200222020ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/StringOps.h" #include "timbl/TimblExperiment.h" namespace Timbl { using namespace std; bool CV_Experiment::Prepare( const string& f, bool, bool ){ cerr << "CV prepare " << f << endl; return true; } bool CV_Experiment::CVprepare( const string& wgtFile, WeightType w, const string& probFile ){ CV_WfileName = wgtFile; CV_fileW = w; CV_PfileName = probFile; return true; } bool CV_Experiment::Learn( const string& f, bool ){ cerr << "CV Learn " << f << endl; return true; } bool CV_Experiment::checkTestFile(){ if ( !IB1_Experiment::checkTestFile() ){ return false; } else if ( doSamples() ){ FatalError( "Cannot Cross validate on a file with Examplar Weighting" ); return false; } else if ( Verbosity(FEAT_W) ){ LearningInfo( *mylog ); } return true; } bool CV_Experiment::get_file_names( const string& FileName ){ if ( !ExpInvalid() ){ size_t size = 0; ifstream file_names( FileName, ios::in ); if ( !file_names ){ Error( "Unable to read CV filenames from " + FileName ); return false; } string name; while ( getline( file_names, name ) ){ size_t tmp = examineData( name ); if ( tmp != 0 ){ if ( !Verbosity(SILENT) ){ *mylog << "Examine datafile '" << name << "' gave the following results:" << endl << "Number of Features: " << tmp << endl; showInputFormat( *mylog ); } FileNames.push_back(name); if ( size == 0 ){ size = tmp; } else { if ( tmp != size ) { Error( "mismatching number of features in file " + name + "of CV filelist " + FileName ); return false; } } } else { Error( "unable to determine number of features in file " + name + "of CV filelist " + FileName ); return false; } } if ( FileNames.size() < 3 ){ Error( "Not enough filenames found in CV filelist " + FileName + " at least 3 required" ); return false; } return true; } return false; } bool CV_Experiment::Test( const string& FileName, const string& OutFile ){ if ( !ConfirmOptions() ){ return false; } (void)OutFile; bool result = false; VerbosityFlags keep = get_verbosity(); set_verbosity( SILENT ); if ( get_file_names( FileName ) ){ *mylog << "Starting Cross validation test on files:" << endl; for ( const auto& name : FileNames ){ *mylog << name << endl; } size_t NumOfFiles = FileNames.size(); TimblExperiment::Prepare( FileNames[1], false ); TimblExperiment::Learn( FileNames[1], false ); for ( size_t filenum = 2; filenum < NumOfFiles; ++filenum ){ Expand( FileNames[filenum] ); } string outName; string percName; for ( size_t SkipFile = 0; SkipFile < NumOfFiles-1; ++SkipFile ) { outName = correct_path( FileNames[SkipFile], outPath, false ); outName += ".cv"; percName = outName; percName += ".%"; set_verbosity( keep ); if ( CV_WfileName != "" ){ GetWeights( CV_WfileName, CV_fileW ); } if ( !CV_PfileName.empty() ){ GetArrays( CV_PfileName ); } result = TimblExperiment::Test( FileNames[SkipFile], outName ); if ( result ){ result = createPercFile( percName ); } if ( !result ){ return false; } set_verbosity( SILENT ); Expand( FileNames[SkipFile] ); Remove( FileNames[SkipFile+1] ); } outName = correct_path( FileNames[NumOfFiles-1], outPath, false ); outName += ".cv"; percName = outName; percName += ".%"; set_verbosity( keep ); if ( CV_WfileName != "" ){ GetWeights( CV_WfileName, CV_fileW ); } if ( !CV_PfileName.empty() ){ GetArrays( CV_PfileName ); } result = TimblExperiment::Test( FileNames[NumOfFiles-1], outName ); if ( result ){ result = createPercFile( percName ); } } return result; } } LanguageMachines-timbl-642727d/src/Choppers.cxx000066400000000000000000000325121451477526200214110ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include "timbl/Choppers.h" #include // for isspace #include #include #include #include #include "ticcutils/StringOps.h" #include "ticcutils/Unicode.h" #include "ticcutils/PrettyPrint.h" #include "timbl/Types.h" using namespace std; using namespace icu; namespace Timbl{ Chopper *Chopper::create( InputFormatType IF, bool doEx, int fLen, bool doOcc ){ Chopper *result = 0; switch ( IF ){ case C4_5: if ( doOcc ){ result = new C45_OccChopper(); } else if ( doEx ){ result = new C45_ExChopper(); } else { result = new C45_Chopper(); } break; case ARFF: if ( doOcc ){ result = new ARFF_OccChopper(); } else if ( doEx ){ result = new ARFF_ExChopper(); } else { result = new ARFF_Chopper(); } break; case SparseBin: if ( doOcc ){ result = new Bin_OccChopper(); } else if ( doEx ){ result = new Bin_ExChopper(); } else { result = new Bin_Chopper(); } break; case Sparse: if ( doOcc ){ result = new Sparse_OccChopper(); } else if ( doEx ){ result = new Sparse_ExChopper(); } else { result = new Sparse_Chopper(); } break; case Columns: if ( doOcc ){ result = new Columns_OccChopper(); } else if ( doEx ){ result = new Columns_ExChopper(); } else { result = new Columns_Chopper(); } break; case Tabbed: if ( doOcc ){ result = new Tabbed_OccChopper(); } else if ( doEx ){ result = new Tabbed_ExChopper(); } else { result = new Tabbed_Chopper(); } break; case Compact: if ( doOcc ){ result = new Compact_OccChopper( fLen ); } else if ( doEx ) { result = new Compact_ExChopper( fLen ); } else { result = new Compact_Chopper( fLen ); } break; default: break; } return result; } void Chopper::init( const UnicodeString& s, size_t len, bool stripDot ) { vSize = len+1; choppedInput.resize(vSize); UnicodeString split = s; // cerr << " strip input:" << split << endl; // trim spaces at end split = TiCC::rtrim( split ); if ( stripDot ){ // now trim at most 1 trailing dot if ( split[split.length()-1] == '.' ){ split.remove( split.length()-1 ); } } // trim more spaces at end strippedInput = TiCC::rtrim( split ); // cerr << "stripped input:" << strippedInput << endl; } static UnicodeString extractWeight( const UnicodeString& buffer, UnicodeString& wght ) { // cerr << "extract weight from '" << buffer << "'" << endl; UnicodeString tmp = buffer; // first remove trailing whitespace and dots tmp = TiCC::rtrim( tmp, " ." ); // cerr << "step 1: '" << tmp << "'" << endl; int e_pos = tmp.length()-1; for ( ; e_pos >= 0; --e_pos ){ if ( tmp[e_pos] == ' ' || tmp[e_pos] == '\t' ){ break; } } if ( e_pos == 0 ){ wght = ""; } else { wght = UnicodeString( tmp, e_pos+1 ); tmp.remove( e_pos ); } tmp = TiCC::rtrim( tmp, "\t ." ); // cerr << "result='" << tmp << "' with weight: '" << wght << "'" << endl; return tmp; } static UnicodeString extractOcc( const UnicodeString& Buffer, UnicodeString& occ ) { return extractWeight( Buffer, occ ); } size_t Chopper::countFeatures( const UnicodeString& inBuffer, InputFormatType IF, int F_length, bool chopTail ) { size_t result = 0; UnicodeString buffer = inBuffer; if ( chopTail ){ UnicodeString dummy; buffer = extractWeight( buffer, dummy ); } size_t len = buffer.length(); switch ( IF ){ case ARFF: case C4_5: for ( int i=0; i < buffer.length(); ++i ){ if ( buffer[i] == ','){ ++result; } }; break; case Compact: if ( F_length == 0 ){ throw runtime_error( "-F Compact specified, but Feature Length not set." " (-l option)" ); } else { result = (len / F_length) - 1; } break; case Columns: { vector parts = TiCC::split( buffer ); result = parts.size() - 1; }; break; case Tabbed: { vector parts = TiCC::split_at( buffer, "\t" ); result = parts.size() - 1; }; break; default: throw logic_error( "CountFeatures: Illegal value in switch:" + TiCC::toString(IF) ); }; return result; } InputFormatType Chopper::getInputFormat( const UnicodeString& inBuffer, bool stripTail ) { InputFormatType IF = UnknownInputFormat; UnicodeString buffer = inBuffer; if ( stripTail ){ UnicodeString dummy; buffer = extractWeight( buffer, dummy ); } size_t len = buffer.length(); int c45Cnt = 0; int columnCnt = 0; for ( unsigned int i = 0; i < len; ++i ) { if ( buffer[i] == ',' ) { ++c45Cnt; } else if ( isspace( buffer[i] ) ){ ++columnCnt; while ( i < len && isspace( buffer[i+1] ) ) ++i; if ( i >= len-1 ){ // just trailing spaces! --columnCnt; } } } if ( columnCnt == 0 && c45Cnt == 0 ){ IF = Compact; } else if ( c45Cnt >= columnCnt ){ IF = C4_5; } else { IF = Columns; } return IF; } void ExChopper::init( const UnicodeString& s, size_t len, bool stripDot ) { UnicodeString split = s; vSize = len+1; choppedInput.resize(vSize); // trim trailing spaces split = TiCC::rtrim( split ); UnicodeString wght; split = extractWeight( split, wght ); if ( wght.isEmpty() ){ throw logic_error( "Missing sample weight" ); } else { double tmp; if ( !TiCC::stringTo( wght, tmp ) ){ throw runtime_error( "Wrong sample weight: '" + TiCC::UnicodeToUTF8(wght) + "'" ); } else { exW = tmp; } } if ( stripDot ){ // now trim at most 1 trailing dot if ( split[split.length()-1] == '.' ){ split.remove( split.length()-1 ); } } // trim more trailing spaces strippedInput = TiCC::rtrim( split ); } void OccChopper::init( const UnicodeString& s, size_t len, bool stripDot ) { UnicodeString split = s; occ = 1; vSize = len+1; choppedInput.resize(vSize); // first trim trailing spaces split = TiCC::rtrim( split ); UnicodeString occS; // get occ split = extractOcc( split, occS ); if ( occS.isEmpty() ){ throw logic_error( "Missing occurrence" ); } else { int tmp; if ( !TiCC::stringTo( occS, tmp ) ){ throw runtime_error( "Wrong (non-integer) occurrence value: '" + TiCC::UnicodeToUTF8(occS) + "'" ); } else { occ = tmp; } } if ( stripDot ){ // now trim at most 1 trailing dot if ( split[split.length()-1] == '.' ){ split.remove( split.length()-1 ); } } // strip remaining trailing spaces strippedInput = TiCC::rtrim( split ); } using TiCC::operator<<; bool C45_Chopper::chop( const UnicodeString& InBuf, size_t len ){ // Function that takes a line, and chops it up into substrings, // which represent the feature-values and the target-value. init( InBuf, len, true ); vector splits = TiCC::split_at( strippedInput, "," ); size_t res = splits.size(); if ( res != vSize ){ return false; } for ( size_t i=0; i < res ; ++i ){ choppedInput[i] = StrToCode( splits[i] ); } // cerr << "Chopped input=" << choppedInput << endl; return true; } UnicodeString C45_Chopper::getString() const{ UnicodeString res; for ( const auto& part : choppedInput ) { res += CodeToStr( part ) + ","; } return res; } bool ARFF_Chopper::chop( const UnicodeString& InBuf, size_t len ){ // Lines look like this: // one, two, three , bla. // the termination dot is optional // WhiteSpace is skipped! return C45_Chopper::chop( InBuf, len ); } bool Bin_Chopper::chop( const UnicodeString& InBuf, size_t len ) { // Lines look like this: // 12, 25, 333, bla. // the termination dot is optional init( InBuf, len, true ); for ( size_t m = 0; m < vSize-1; ++m ){ choppedInput[m] = "0"; } vector parts = TiCC::split_exact_at( strippedInput, "," ); for ( auto const& p : parts ){ if ( &p == &parts.back() ){ choppedInput[vSize-1] = p; break; } size_t k; if ( !TiCC::stringTo( p, k ) ){ return false; } if ( k < 1 || k > vSize ){ return false; } else { choppedInput[k-1] = "1"; } } return true; } UnicodeString Bin_Chopper::getString() const { UnicodeString res; int i = 1; for ( const auto& part : choppedInput ){ if ( &part == &choppedInput.back() ){ break; } if ( part[0] == '1' ){ res += TiCC::toUnicodeString(i) + ","; } ++i; } res += choppedInput.back() + ","; return res; } bool Compact_Chopper::chop( const UnicodeString& InBuf, size_t leng ){ init( InBuf, leng, false ); // Lines look like this: // ====AKBVAK // v1v2v3v4tt // Get & add the target. // size_t len = strippedInput.length(); if ( len != vSize * fLen ){ return false; } size_t i = 0; for ( auto& part : choppedInput ){ size_t index = i * fLen; // Scan the value. // part.remove(); for ( int j = 0; j < fLen; ++j ) { part += strippedInput[index++]; } ++i; } return ( i == vSize ); // Enough? } UnicodeString Compact_Chopper::getString() const { UnicodeString res; for ( const auto& part : choppedInput ){ res += CodeToStr( part ); } return res; } bool Columns_Chopper::chop( const UnicodeString& InBuf, size_t len ){ // Lines look like this: // one two three bla init( InBuf, len, false ); vector splits = TiCC::split( strippedInput ); size_t res = splits.size(); if ( res != vSize ){ return false; } for ( size_t i=0; i < res ; ++i ){ choppedInput[i] = StrToCode( splits[i] ); } return true; } UnicodeString Columns_Chopper::getString() const { UnicodeString res = TiCC::join( choppedInput ); return res; } bool Tabbed_Chopper::chop( const UnicodeString& InBuf, size_t len ){ // Lines look like this: // oneTABtwoTAB TABthreeTABbla init( InBuf, len, false ); vector splits = TiCC::split_at( strippedInput, "\t" ); size_t res = splits.size(); if ( res != vSize ){ return false; } for ( size_t i=0; i < res ; ++i ){ choppedInput[i] = StrToCode( splits[i], false ); } return true; } UnicodeString Tabbed_Chopper::getString() const { UnicodeString res; for ( const auto& part : choppedInput ){ res += CodeToStr( part ) + "\t"; } return res; } bool Sparse_Chopper::chop( const UnicodeString& InBuf, size_t len ){ // Lines look like this: // (12,value1) (25,value2) (333,value3) bla. // the termination dot is optional init( InBuf, len, true ); for ( size_t m = 0; m < vSize-1; ++m ){ choppedInput[m] = DefaultSparseString; } choppedInput[vSize-1] = ""; vector entries = TiCC::split_at_first_of( strippedInput, "()" ); size_t num_ent = entries.size(); if ( num_ent < 1 ){ return false; } for ( const auto& ent : entries ){ --num_ent; vector parts = TiCC::split_at( ent, "," ); size_t num = parts.size(); if ( num != 2 ){ if ( num == 1 && num_ent == 0 ){ // the target has no ',' parts[0].trim(); choppedInput[vSize-1] = parts[0]; return !choppedInput[vSize-1].isEmpty(); } return false; } if ( num_ent == 0 ){ // missing a target! return false; } size_t index; if ( !TiCC::stringTo( parts[0], index ) ){ return false; } if ( index < 1 || index >= vSize ){ return false; } choppedInput[index-1] = StrToCode( parts[1] ); } return true; } UnicodeString Sparse_Chopper::getString() const { UnicodeString res; int i = 1; for ( const auto& part : choppedInput ){ if ( &part == &choppedInput.back() ){ break; } if ( part != DefaultSparseString ){ res += "(" + TiCC::toUnicodeString( i ) + ","; res += CodeToStr(part); res += ")"; } ++i; } res += choppedInput.back() + ","; return res; } } LanguageMachines-timbl-642727d/src/Common.cxx000066400000000000000000000025061451477526200210560ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include "timbl/Common.h" #include "config.h" using namespace std; namespace Common { string VersionInfo( bool full ){ // obsolete if ( full ){ return BuildInfo(); } else { return Version(); } } string Version() { return VERSION; } string VersionName() { return PACKAGE_STRING; } string BuildInfo() { return Version() + ", compiled on " + __DATE__ + ", " + __TIME__; } } LanguageMachines-timbl-642727d/src/Features.cxx000066400000000000000000000616771451477526200214220ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include // for sort() #include // for accumulate() #include // for fabs() #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Metrics.h" #include "timbl/Matrices.h" #include "timbl/Instance.h" #include "ticcutils/Unicode.h" #include "ticcutils/UniHash.h" namespace Timbl { using namespace std; using namespace Common; using icu::UnicodeString; FeatureValue::FeatureValue( const UnicodeString& value, size_t hash_val ): ValueClass( value, hash_val ), ValueClassProb( 0 ) { } FeatureValue::FeatureValue( const UnicodeString& s ): ValueClass( s, 0 ), ValueClassProb(0){ _frequency = 0; } FeatureValue::~FeatureValue( ){ delete ValueClassProb; } Feature::Feature( Hash::UnicodeHash *T ): metric_matrix( 0 ), TokenTree(T), metric( 0 ), ignore( false ), numeric( false ), vcpb_read( false ), PrestoreStatus(ps_undef), Prestored_metric( UnknownMetric ), entropy( 0.0 ), info_gain (0.0), split_info(0.0), gain_ratio(0.0), chi_square(0.0), shared_variance(0.0), standard_deviation(0.0), matrix_clip_freq(10), n_min (0.0), n_max (0.0), weight(0.0), is_reference(false) {} Feature::Feature( const Feature& in ): MsgClass( in ){ *this = in; is_reference = true; } Feature& Feature::operator=( const Feature& in ){ if ( this != &in ){ metric_matrix = in.metric_matrix; metric = in.metric; PrestoreStatus = in.PrestoreStatus; Prestored_metric = in.Prestored_metric; ignore = in.ignore; numeric = in.numeric; vcpb_read = in.vcpb_read; entropy = in.entropy; info_gain = in.info_gain; split_info = in.split_info; gain_ratio = in.gain_ratio; chi_square = in.chi_square; shared_variance = in.shared_variance; standard_deviation = in.standard_deviation; matrix_clip_freq = in.matrix_clip_freq; n_dot_j = in.n_dot_j; n_i_dot = in.n_i_dot; n_min = in.n_min; n_max = in.n_max; weight = in.weight; values_array = in.values_array; reverse_values = in.reverse_values; TokenTree = in.TokenTree; } return *this; } void Feature::InitSparseArrays(){ if ( !is_reference ){ // Loop over all values. // for ( const auto& FV : values_array ){ size_t freq = FV->ValFreq(); FV->ValueClassProb->Clear(); if ( freq > 0 ){ // Loop over all present classes. // for ( const auto& tit : FV->TargetDist ){ FV->ValueClassProb->Assign( tit.second->Index(), tit.second->Freq()/(double)freq ); } } } } } size_t Feature::EffectiveValues() const { return count_if( values_array.begin(), values_array.end(), [&]( const FeatureValue* v ){ return (v->ValFreq() > 0); } ); } size_t Feature::TotalValues() const { return accumulate( values_array.begin(), values_array.end(), 0, [&]( size_t r, const FeatureValue *v ){ return r + v->ValFreq(); } ); } FeatureValue *Feature::Lookup( const UnicodeString& str ) const { FeatureValue *result = NULL; unsigned int hash_val = TokenTree->lookup( str ); if ( hash_val > 0 ) { auto const& it = reverse_values.find( hash_val ); if ( it != reverse_values.end() ){ result = it->second; } } return result; } FeatureValue *Feature::add_value( const UnicodeString& valstr, TargetValue *tv, int freq ){ unsigned int hash_val = TokenTree->hash( valstr ); // cerr << "hash(" << valstr << ") geeft: " << hash_val << endl; return add_value( hash_val, tv, freq ); } FeatureValue *Feature::add_value( size_t hash_val, TargetValue *tv, int freq ){ auto const& it = reverse_values.find( hash_val ); if ( it == reverse_values.end() ){ const UnicodeString& value = TokenTree->reverse_lookup( hash_val ); // cerr << "lookup(" << index << ") geeft: " << value << endl; // we want to store the singleton value for this index // so we MUST reverse lookup the index FeatureValue *fv = new FeatureValue( value, hash_val ); fv->ValFreq( freq ); reverse_values[hash_val] = fv; values_array.push_back( fv ); } else { it->second->IncValFreq( freq ); } FeatureValue *result = reverse_values[hash_val]; if ( tv ){ result->TargetDist.IncFreq(tv, freq ); } return result; } bool Feature::increment_value( FeatureValue *FV, const TargetValue *tv ){ bool result = false; if ( FV ){ FV->incr_val_freq(); if ( tv ){ FV->TargetDist.IncFreq(tv,1); } result = true; } return result; } bool Feature::decrement_value( FeatureValue *FV, const TargetValue *tv ){ bool result = false; if ( FV ){ FV->decr_val_freq(); if ( tv ){ FV->TargetDist.DecFreq(tv); } result = true; } return result; } bool Feature::AllocSparseArrays( size_t Dim ){ // Loop over all values. // for ( const auto& FV : values_array ){ // Loop over all classes. if ( FV->ValueClassProb == NULL ){ if ( !(FV->ValueClassProb = new SparseValueProbClass( Dim )) ){ return false; } } } return true; } bool Feature::isNumerical() const { if ( metric && metric->isNumerical() ){ return true; } else { return false; } } bool Feature::isStorableMetric() const { if ( metric && metric->isStorable() ){ return true; } else { return false; } } struct D_D { D_D(): dist(0), value(0.0) {}; explicit D_D( FeatureValue *fv ): value(0.0) { if ( !TiCC::stringTo( fv->name(), value ) ){ throw( logic_error("called DD with an non-numeric value" ) ); } dist = &fv->TargetDist; } ClassDistribution *dist; double value; }; bool dd_less( const D_D* dd1, const D_D* dd2 ){ return dd1->value < dd2->value; } void Feature::NumStatistics( vector& FVBin, double DBentropy ){ size_t BinSize = FVBin.size(); double Prob, FVEntropy; size_t TotalVals = TotalValues(); entropy = 0.0; vector ddv; size_t dd_len = values_array.size(); ddv.reserve( dd_len ); for ( const auto& FV : values_array ){ if ( FV->ValFreq() > 0 ){ ddv.push_back( new D_D( FV ) ); } } sort( ddv.begin(), ddv.end(), dd_less ); int num_per_bin = (int)floor( (double)dd_len / BinSize); size_t rest = dd_len - num_per_bin * BinSize; if ( rest ){ num_per_bin++; } int jj = 0; int cnt = 0; for ( const auto& it: ddv ){ FVBin[jj]->TargetDist.Merge( *it->dist ); if ( ++cnt >= num_per_bin ){ ++jj; if ( --rest == 0 ){ --num_per_bin; } cnt = 0; } } for ( auto const& it: ddv ){ delete it; } for ( size_t k=0; k < BinSize; k++ ){ FeatureValue *pnt = FVBin[k]; size_t Freq = pnt->TargetDist.totalSize(); pnt->ValFreq( Freq ); if ( Freq > 0 ){ // Entropy for this FV pair. // FVEntropy = 0.0; for ( const auto& it : pnt->TargetDist ){ Prob = it.second->Freq()/(double)Freq; FVEntropy += Prob * Log2(Prob); } entropy += -FVEntropy * Freq / (double)TotalVals; } } entropy = fabs( entropy ); // Info gain. // info_gain = DBentropy - entropy; // And the split info. // split_info = 0.0; for ( size_t l=0; l < BinSize; ++l ){ size_t Freq = FVBin[l]->ValFreq(); if ( Freq > 0 ){ Prob = Freq / (double)TotalVals; split_info += Prob * Log2(Prob); } } split_info = -split_info; // Gain ratio. // if ( fabs(split_info) FVBin(BinSize); for ( int i=0; i < BinSize; ++i ){ UnicodeString dumname = "dum" + TiCC::toUnicodeString( i ); FVBin[i] = new FeatureValue( dumname ); } NumStatistics( FVBin, DBentropy ); if ( full ){ ChiSquareStatistics( FVBin, Targs ); int cnt = 0; // count effective values in Bin for ( int i=0; i < BinSize; ++i ){ if ( FVBin[i]->ValFreq() > 0 ){ ++cnt; } } SharedVarianceStatistics( Targs, cnt ); } for ( const auto& it : FVBin ){ delete it; } } void Feature::Statistics( double DBentropy ){ size_t TotalVals = TotalValues(); entropy = 0.0; // Loop over the values. for ( const auto& fv : values_array ){ // Entropy for this FV pair. size_t Freq = fv->ValFreq(); if ( Freq > 0 ){ double FVEntropy = 0.0; for ( const auto& tit : fv->TargetDist ){ double Prob = tit.second->Freq() / (double)Freq; FVEntropy += Prob * Log2(Prob); } entropy += -FVEntropy * Freq / (double)TotalVals; } } entropy = fabs( entropy ); // Info. gain. // info_gain = DBentropy - entropy; if ( info_gain < 0.0 ){ info_gain = 0.0; } // And the split. info. // split_info = 0.0; for ( const auto& fv : values_array ){ double Prob = fv->ValFreq() / (double)TotalVals; if ( Prob > 0 ) { split_info += Prob * Log2(Prob); } } split_info = -split_info; // Gain ratio. // if ( fabs(split_info) < Epsilon ){ gain_ratio = 0.0; } else { gain_ratio = info_gain / split_info; } } void Feature::ChiSquareStatistics( vector& FVA, const Targets& Targs ){ size_t Num_Vals = FVA.size(); chi_square = 0.0; long int n_dot_dot = 0; size_t Size = Targs.num_of_values(); n_dot_j.resize(Size,0); n_i_dot.resize(Num_Vals,0); for ( size_t j = 0; j < Size; ++j ){ // ALL values should be zeroed n_dot_j[j] = 0; } for ( size_t i = 0; i < Num_Vals; ++i ){ n_i_dot[i] = 0; // ALL values should be zeroed FeatureValue *fv = FVA[i]; for ( const auto& tit : fv->TargetDist ){ n_dot_j[tit.second->Index()-1] += tit.second->Freq(); n_i_dot[i] += tit.second->Freq(); } n_dot_dot += n_i_dot[i]; } if ( n_dot_dot != 0 ){ for ( size_t m = 0; m < Num_Vals; ++m ){ FeatureValue *fv = FVA[m]; size_t n = 0; for ( const auto& it : fv->TargetDist ){ if ( n >= Size ){ break; } while ( n < it.second->Index()-1 ){ double tmp = ((double)n_dot_j[n++] * (double)n_i_dot[m]) / (double)n_dot_dot; chi_square += tmp; } if ( n == it.second->Index()-1 ){ double tmp = ((double)n_dot_j[n++] * (double)n_i_dot[m]) / (double)n_dot_dot; if ( fabs(tmp) > Epsilon){ chi_square += ( (tmp - it.second->Freq()) * (tmp - it.second->Freq()) ) / tmp; } } else { break; } } while ( n < Size ){ double tmp = ((double)n_dot_j[n++] * (double)n_i_dot[m]) / (double)n_dot_dot; chi_square += tmp; } } } } void Feature::ChiSquareStatistics( const Targets& Targs ){ chi_square = 0.0; long int n_dot_dot = 0; size_t Size = Targs.num_of_values(); size_t Num_Vals = values_array.size(); n_dot_j.resize(Size,0); n_i_dot.resize(Num_Vals,0); for ( size_t j = 0; j < Size; ++j ){ // ALL values should be zeroed n_dot_j[j] = 0; } int i = 0; for ( const auto& fv : values_array ){ n_i_dot[i] = 0; // ALL values should be zeroed for ( const auto& t_it : fv->TargetDist ){ long int fr = t_it.second->Freq(); n_dot_j[t_it.second->Index()-1] += fr; n_i_dot[i] += fr; } n_dot_dot += n_i_dot[i]; ++i; } if ( n_dot_dot != 0 ){ int m = 0; for ( const auto& fv : values_array ){ size_t n = 0; for ( const auto& t_it : fv->TargetDist ){ if ( n >= Size ){ break; } size_t id = t_it.second->Index()-1; long int fr = t_it.second->Freq(); while ( n < id ){ double tmp = ((double)n_dot_j[n++] * (double)n_i_dot[m]) / (double)n_dot_dot; chi_square += tmp; } if ( n == id ){ double tmp = ((double)n_dot_j[n++] * (double)n_i_dot[m]) / (double)n_dot_dot; if ( fabs(tmp) > Epsilon ){ chi_square += ( (tmp - fr ) * (tmp - fr ) ) / tmp; } } else { break; } } while ( n < Size ){ double tmp = ((double)n_dot_j[n++] * (double)n_i_dot[m]) / (double)n_dot_dot; chi_square += tmp; } ++m; } } } double Feature::fvDistance( const FeatureValue *F, const FeatureValue *G, size_t limit ) const { double result = 0.0; if ( F != G ){ bool dummy; if ( metric->isStorable() && matrixPresent( dummy ) && F->ValFreq() >= matrix_clip_freq && G->ValFreq() >= matrix_clip_freq ){ result = metric_matrix->Extract( F, G ); } else if ( metric->isNumerical() ) { result = metric->distance( F, G, limit, Max() - Min() ); } else { result = metric->distance( F, G, limit ); } } return result; } Feature_List &Feature_List::operator=( const Feature_List& l ){ if ( this != &l ){ _num_of_feats = l._num_of_feats; feats.resize(_num_of_feats); perm_feats.resize(_num_of_feats); permutation = l.permutation; _feature_hash = l._feature_hash; // shared ?? for ( unsigned int i=0; i < _num_of_feats; ++i ){ feats[i] = new Feature( *l.feats[i] ); } for ( unsigned int i=0; i < _num_of_feats; ++i ){ if ( l.perm_feats[i] ) { perm_feats[i] = feats[permutation[i]]; } else { perm_feats[i] = 0; } } _is_reference = true; _eff_feats = l._eff_feats; _num_of_num_feats = l._num_of_num_feats; } return *this; } Feature_List::~Feature_List(){ if ( !_is_reference ){ delete _feature_hash; } for ( const auto& it : feats ){ delete it; } feats.clear(); } void Feature_List::init( size_t size, const vector& UserOptions ) { _num_of_feats = size; _feature_hash = new Hash::UnicodeHash(); // all features share the same hash feats.resize(_num_of_feats,NULL); perm_feats.resize(_num_of_feats,NULL); for ( size_t i=0; i< _num_of_feats; ++i ){ feats[i] = new Feature( _feature_hash ); } _eff_feats = _num_of_feats; _num_of_num_feats = 0; // the user thinks about features running from 1 to _num_of_feats+1 // we know better, so shift the UserOptions one down. for ( size_t j = 0; j < _num_of_feats; ++j ){ MetricType m = UserOptions[j+1]; if ( m == Ignore ){ feats[j]->Ignore( true ); --_eff_feats; } else { feats[j]->setMetricType( m ); if ( feats[j]->isNumerical() ){ ++_num_of_num_feats; } } } } void Feature_List::write_permutation( ostream &os ) const { os << "< "; for ( const auto& it : permutation ){ os << it + 1; if ( &it != &permutation.back()) os << ", "; } os << " >"; } void Feature_List::calculate_permutation( const vector& W ){ vector WR = W; size_t IgnoredFeatures = 0; permutation.resize(_num_of_feats); for ( size_t j=0; j < _num_of_feats; ++j ){ permutation[j] = j; if ( feats[j]->Ignore() ){ WR[j] = -0.1; // To be shure that they are placed AFTER // those which are realy Zero IgnoredFeatures++; } } if ( IgnoredFeatures == _num_of_feats ){ Error( "All features seem to be ignored! Nothing to do" ); exit(1); } else { for ( size_t k=0; k < _num_of_feats; ++k ){ size_t Max = 0; for ( size_t m=1; m < _num_of_feats; ++m ){ if ( WR[m] > WR[Max] ){ Max = m; } } WR[Max] = -1; permutation[k] = Max; } } for ( size_t j=0; j < _num_of_feats; ++j ){ if ( j < _eff_feats ){ perm_feats[j] = feats[permutation[j]]; } else { perm_feats[j] = NULL; } } } Feature::~Feature(){ if ( !is_reference ){ delete_matrix(); delete metric; for ( const auto& it : values_array ){ delete it; } } reverse_values.clear(); } bool Feature::matrixPresent( bool& isRead ) const { isRead = false; if ( metric_matrix != 0 ){ if ( PrestoreStatus == ps_ok ){ return true; } else if ( PrestoreStatus == ps_read ){ isRead = true; return true; } } return false; } size_t Feature::matrix_byte_size() const { if ( metric_matrix ){ return metric_matrix->NumBytes(); } else { return 0; } } FeatVal_Stat Feature::prepare_numeric_stats(){ bool first = true; for ( const auto& fv : values_array ){ size_t freq = fv->ValFreq(); if ( freq > 0 ){ double tmp = -1; if ( !TiCC::stringTo( fv->name(), tmp ) ){ Warning( "a Non Numeric value '" + fv->name_string() + "' in Numeric Feature!" ); return NotNumeric; } if ( first ){ first = false; n_min = tmp; n_max = tmp; } else if ( tmp < n_min ){ n_min = tmp; } else if ( tmp > n_max ){ n_max = tmp; } } } if ( fabs(n_max - n_min) < Epsilon ){ return SingletonNumeric; } else { return NumericValue; } } inline int min( int i1, int i2 ) { return (i1>i2?i2:i1); } inline size_t min( size_t i1, size_t i2 ) { return (i1>i2?i2:i1); } void Feature::SharedVarianceStatistics( const Targets& Targ, int eff_cnt ){ size_t NumInst = Targ.TotalValues(); int NumCats = Targ.EffectiveValues(); int k = min( NumCats, eff_cnt ) - 1; if ( k == 0 || NumInst == 0 ){ shared_variance = 0; } else { shared_variance = chi_square / (double)( NumInst * k ); } } void Feature::StandardDeviationStatistics( ){ double sum = 0.0; vector store( values_array.size() ); for ( unsigned int i=0; i < values_array.size(); ++i ){ const FeatureValue *FV = values_array[i]; double val = TiCC::stringTo( FV->name() ); store[i] = val; sum += val; } double total = 0.0; for ( unsigned int i=0; i < values_array.size(); ++i ){ double diff = sum - store[i]; total += diff*diff; } standard_deviation = sqrt( total / values_array.size() ); } void Feature::clear_matrix(){ if ( PrestoreStatus == ps_read ){ return; } else { delete_matrix(); } } void Feature::delete_matrix(){ if ( metric_matrix ){ metric_matrix->Clear(); delete metric_matrix; } metric_matrix = 0; PrestoreStatus = ps_undef; } bool Feature::setMetricType( const MetricType M ){ if ( !metric || M != metric->type() ){ delete metric; metric = getMetricClass(M); return true; } else { return false; } } MetricType Feature::getMetricType() const { return metric->type(); } bool Feature::store_matrix( int limit){ // // Store a complete distance matrix. // if ( PrestoreStatus == ps_read ){ return true; } if ( !metric_matrix ){ metric_matrix = new SparseSymetricMatrix(); } if ( PrestoreStatus != ps_failed && metric->isStorable( ) ) { try { for ( const auto& FV_i : values_array ){ for ( const auto& FV_j : values_array ){ if ( FV_i->ValFreq() >= matrix_clip_freq && FV_j->ValFreq() >= matrix_clip_freq && ( Prestored_metric != metric->type() || fabs(metric_matrix->Extract(FV_i,FV_j)) < Epsilon ) ){ double dist = metric->distance( FV_i, FV_j, limit ); metric_matrix->Assign( FV_i, FV_j, dist ); } } } } catch( ... ){ cout << "hit the ground!" << endl; PrestoreStatus = ps_failed; return false; }; PrestoreStatus = ps_ok; } if ( PrestoreStatus == ps_ok ){ Prestored_metric = metric->type(); } return true; } ostream& operator<< (std::ostream& os, SparseValueProbClass *VPC ){ if ( VPC ) { int old_prec = os.precision(); os.precision(3); os.setf( std::ios::fixed ); auto it = VPC->vc_map.begin(); for ( size_t k = 1; k <= VPC->dimension; ++k ){ os.setf(std::ios::right, std::ios::adjustfield); if ( it != VPC->vc_map.end() && it->first == k ){ os << "\t" << it->second; ++it; } else { os << "\t" << 0.0; } } os << setprecision( old_prec ); } else { os << "(Null SA)"; } return os; } void Feature::print_vc_pb_array( ostream &os ) const { for ( const auto& FV : values_array ){ if ( FV->ValueClassProb ){ os << FV << FV->ValueClassProb << endl; } } } bool Feature::read_vc_pb_array( istream &is ){ unsigned int Num = 0; bool first = true; // clear all existing arrays for ( const auto& FV : values_array ){ if ( FV->ValueClassProb ){ delete FV->ValueClassProb; FV->ValueClassProb = NULL; } } UnicodeString buf; while ( TiCC::getline( is, buf ) ){ if ( buf.length() < 8 ){ // "empty" line separates matrices break; } vector parts = TiCC::split( buf ); if ( first ){ Num = parts.size() - 1; first = false; } UnicodeString name = parts[0]; FeatureValue *FV = Lookup( name ); if ( !FV ){ Warning( "Unknown FeatureValue '" + TiCC::UnicodeToUTF8(name) + "' in file, (skipped) " ); continue; } else { FV->ValueClassProb = new SparseValueProbClass( Num ); for ( size_t i=0; i < Num; ++i ){ UnicodeString tname = parts[i+1]; double value; if ( !TiCC::stringTo( tname, value ) ){ Error( "Found illegal value '" + TiCC::UnicodeToUTF8(tname) + "'" ); return false; } else if ( value > Epsilon ) { FV->ValueClassProb->Assign( i, value ); } } } } // check if we've got all the values, assign a default if not so for ( const auto& FV : values_array ){ if ( FV->ValueClassProb == NULL ){ FV->ValueClassProb = new SparseValueProbClass( Num ); } } vcpb_read = true; return true; } bool Feature::fill_matrix( istream &is ) { if ( !metric_matrix ){ metric_matrix = new SparseSymetricMatrix(); } else { metric_matrix->Clear(); } UnicodeString line; while ( TiCC::getline(is,line) ){ if ( line.isEmpty() ){ break; } vector arr = TiCC::split_at( line, " " ); size_t num = arr.size(); double d; if ( num != 2 ){ Error( "wrong line in inputfile" ); return false; } else if ( arr[0].length() < 2 ){ Error( "wrong line in inputfile" ); return false; } else if ( !TiCC::stringTo( arr[1], d ) ) { Error( "wrong line in inputfile" ); return false; } else { UnicodeString stripped = UnicodeString( arr[0], 1,arr[0].length()-2) ; vector parts = TiCC::split_at( stripped, ",\t" ); if ( parts.size() != 2 ){ Error( "wrong line in inputfile" ); return false; } else { FeatureValue *F1 = Lookup(parts[0]); FeatureValue *F2 = Lookup(parts[1]); metric_matrix->Assign( F1, F2, d ); } } } PrestoreStatus = ps_read; return true; } void Feature::print_matrix( ostream &os, bool full ) const { // // Print the matrix. // int old_prec = os.precision(); ios::fmtflags old_flags = os.flags(); os.unsetf(std::ios_base::floatfield); if ( full ){ for ( const auto& FV_i : values_array ){ os.width(6); os.setf(ios::left, ios::adjustfield); os << FV_i << ":"; os.width(12); os.precision(3); os.setf(ios::right, ios::adjustfield); for ( const auto& FV_j : values_array ){ os.width(12); os.precision(3); os.setf(ios::right,ios::adjustfield ); if ( FV_i->ValFreq() < matrix_clip_freq || FV_j->ValFreq() < matrix_clip_freq ){ os << "*"; } else { os << metric_matrix->Extract(FV_i,FV_j); } } os << endl; } } else { os << *metric_matrix << endl; } os << setprecision( old_prec ); os.flags( old_flags ); } } // namespace Timbl LanguageMachines-timbl-642727d/src/GetOptClass.cxx000066400000000000000000000662771451477526200220350ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include "ticcutils/CommandLine.h" #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Options.h" #include "timbl/MsgClass.h" #include "timbl/Metrics.h" #include "timbl/Instance.h" #include "timbl/GetOptClass.h" #include "timbl/TimblExperiment.h" using namespace std; namespace Timbl { void GetOptClass::set_default_options( int Max ){ local_algo = IB1_a; local_metric = UnknownMetric; local_order = UnknownOrdening; local_weight = Unknown_w; local_decay = Zero; local_decay_alfa = 1.0; local_decay_beta = 1.0; local_normalisation = unknownNorm; local_norm_factor = 1; no_neigh = 1; mvd_limit = 1; estimate = 0; maxbests = 500; BinSize = 0; BeamSize = 0; clip_freq = 10; clones = 1; bootstrap_lines = -1; local_progress = 100000; seed = -1; do_exact = false; do_hashed = true; min_present = false; keep_distributions = false; do_sample_weights = false; do_ignore_samples = false; do_ignore_samples_test = false; do_query = false; do_all_weights = false; do_sloppy_loo = false; do_silly = false; do_diversify = false; if ( MaxFeats == -1 ){ MaxFeats = Max; LocalInputFormat = UnknownInputFormat; // InputFormat and verbosity myVerbosity = NO_VERB; // are not reset! } target_pos = -1; metricsArray.resize(MaxFeats+1, UnknownMetric ); outPath = ""; occIn = 0; } GetOptClass::GetOptClass( const TiCC::CL_Options& opts ): LocalInputFormat( UnknownInputFormat ), MaxFeats(-1), target_pos(-1), f_length( 0 ), threshold( -1 ), igThreshold( -1 ), myVerbosity( NO_VERB ), opt_init( false ), opt_changed( false ), N_present( false ), parent_socket_os( 0 ) { int MaxF = DEFAULT_MAX_FEATS; string optie; if ( opts.is_present( 'N', optie ) ){ N_present = true; MaxF = TiCC::stringTo( optie ); } set_default_options( MaxF ); } GetOptClass::~GetOptClass( ){ } GetOptClass::GetOptClass( const GetOptClass& in ): MsgClass(in), local_algo( in.local_algo ), local_metric( in.local_metric ), local_order( in.local_order ), local_weight( in.local_weight ), LocalInputFormat( in.LocalInputFormat ), local_decay( in.local_decay ), local_decay_alfa( in.local_decay_alfa ), local_decay_beta( in.local_decay_beta ), local_normalisation( in.local_normalisation ), local_norm_factor( in.local_norm_factor ), MaxFeats( in.MaxFeats ), target_pos( in.target_pos ), no_neigh( in.no_neigh ), mvd_limit( in.mvd_limit ), estimate( in.estimate ), maxbests( in.maxbests ), clip_freq( in.clip_freq ), clones( in.clones ), BinSize( in.BinSize ), BeamSize( in.BeamSize ), bootstrap_lines( in.bootstrap_lines ), f_length( in.f_length ), local_progress( in.local_progress ), seed( in.seed ), threshold( in.threshold ), igThreshold( in.igThreshold ), myVerbosity( in.myVerbosity ), opt_init( in.opt_init ), opt_changed( in.opt_changed ), do_exact( in.do_exact ), do_hashed( in.do_hashed ), min_present( in.min_present ), N_present(false), keep_distributions( in.keep_distributions ), do_sample_weights( in.do_sample_weights ), do_ignore_samples( in.do_ignore_samples ), do_ignore_samples_test( in.do_ignore_samples_test ), do_query( in.do_query ), do_all_weights( false ), do_sloppy_loo( false ), do_silly( in.do_silly ), do_diversify( in.do_diversify ), metricsArray( in.metricsArray ), parent_socket_os( in.parent_socket_os ), outPath( in.outPath ), occIn( in.occIn ) { } GetOptClass *GetOptClass::Clone( ostream *sock_os ) const{ GetOptClass *result = new GetOptClass(*this); result->parent_socket_os = sock_os; return result; } void GetOptClass::Error( const string& out_line ) const { if ( parent_socket_os ){ *parent_socket_os << "ERROR { " << out_line << " }" << endl; } else { cerr << "Error:" << out_line << endl; } } bool GetOptClass::definitive_options( TimblExperiment *Exp ){ if ( opt_changed || !opt_init ){ opt_changed = false; bool first = !opt_init; opt_init = true; string optline; if ( first ){ // the following options can only be set once! // If you try it anyway, you should get a MblClass warning... if ( LocalInputFormat == SparseBin ){ if ( !N_present ){ Error( "Missing -N option, mandatory for -F Binary" ); return false; } } if ( LocalInputFormat == Sparse ){ if ( !N_present ){ Error( "Missing -N option, mandatory for -F Sparse" ); return false; } } if ( LocalInputFormat != UnknownInputFormat ){ optline = "INPUTFORMAT: " + TiCC::toString(LocalInputFormat); if ( !Exp->SetOption( optline ) ){ return false; } } if ( target_pos != -1 ){ optline = "TARGET_POS: " + TiCC::toString(target_pos-1); if ( !Exp->SetOption( optline ) ){ return false; } } if ( keep_distributions ){ optline = "KEEP_DISTRIBUTIONS: true"; if ( !Exp->SetOption( optline ) ){ return false; } } if ( do_sloppy_loo ){ if ( local_algo != LOO_a ){ Error( "sloppy only valid for LOO algorithm" ); return false; } else { optline = "DO_SLOPPY_LOO: true"; if ( !Exp->SetOption( optline ) ){ return false; } } } if ( do_silly ){ optline = "DO_SILLY: true"; if ( !Exp->SetOption( optline ) ){ return false; } } if ( do_diversify ){ optline = "DO_DIVERSIFY: true"; if ( !Exp->SetOption( optline ) ){ return false; } } if ( f_length > 0 ){ optline = "FLENGTH: " + TiCC::toString(f_length); if ( !Exp->SetOption( optline ) ){ return false; } } if ( local_weight != Unknown_w ){ optline = "WEIGHTING: " + TiCC::toString(local_weight); Exp->SetOption( optline ); } if ( do_all_weights ){ optline = "ALL_WEIGHTS: true"; Exp->SetOption( optline ); } optline = "MAXBESTS: " + TiCC::toString(maxbests); Exp->SetOption( optline ); if ( BinSize > 0 ){ optline = "BIN_SIZE: " + TiCC::toString(BinSize); Exp->SetOption( optline ); } if ( BeamSize > 0 ){ optline = "BEAM_SIZE: " + TiCC::toString(BeamSize); Exp->SetOption( optline ); } if ( local_algo == TRIBL_a && threshold < 0 ){ Error( "-q is missing for TRIBL algorithm" ); return false; } if ( threshold >= 0 ){ if ( local_algo != TRIBL_a ){ Error( "-q option only valid for TRIBL algorithm" ); return false; } if ( threshold == 0 ){ Error( "invalid -q option. Must be > 0 " ); return false; } optline = "TRIBL_OFFSET: " + TiCC::toString(threshold); Exp->SetOption( optline ); } if ( igThreshold > 0 ){ optline = "IG_THRESHOLD: " + TiCC::toString(igThreshold); Exp->SetOption( optline ); } if ( local_order != UnknownOrdening ){ optline = "TREE_ORDER: " + TiCC::toString(local_order); Exp->SetOption( optline ); } if ( !outPath.empty() ){ Exp->setOutPath( outPath ); } } //first if ( clones > 0 ){ Exp->Clones( clones ); } if ( estimate < 10 ){ Exp->Estimate( 0 ); } else { Exp->Estimate( estimate ); } if ( myVerbosity & CONFIDENCE ){ if ( local_normalisation == unknownNorm ){ Error( "Invalid option +vcf, while -G is missing!" ); return false; } } if ( myVerbosity & DISTRIB ){ if ( !keep_distributions && local_algo == IGTREE_a ){ myVerbosity &= ~DISTRIB; Error( "Invalid option +vdb, while +D is missing!" ); return false; } } if ( myVerbosity & ALL_K ){ if ( local_algo == IGTREE_a ){ Error( "Invalid option +vk, impossible with IGtree algorithm" ); return false; } else if ( !(myVerbosity & DISTRIB) ){ // silently add +vdb when +vk is set myVerbosity |= DISTRIB; } } if ( myVerbosity & NEAR_N ){ if ( local_algo == IGTREE_a ){ Error( "Invalid option +vn, impossible with IGtree algorithm" ); return false; } } if ( myVerbosity & CONF_MATRIX || myVerbosity & CLASS_STATS ) myVerbosity |= ADVANCED_STATS; if ( do_exact ){ Exp->SetOption( "EXACT_MATCH: true" ); } else { Exp->SetOption( "EXACT_MATCH: false" ); } if ( do_hashed ) { Exp->SetOption( "HASHED_TREE: true" ); } else { Exp->SetOption( "HASHED_TREE: false" ); } if ( occIn > 0 && do_sample_weights ){ Error( "--occurrences and -s cannot be combined!" ); return false; } if ( occIn > 0 ){ Exp->SetOption( "HANDLE_OCCURRENCES: " + TiCC::toString(occIn) ); } else if ( do_sample_weights ){ Exp->SetOption( "EXEMPLAR_WEIGHTS: true" ); if ( do_ignore_samples ){ Exp->SetOption( "IGNORE_EXEMPLAR_WEIGHTS: true" ); } else { Exp->SetOption( "IGNORE_EXEMPLAR_WEIGHTS: false" ); } if ( do_ignore_samples_test ){ Exp->SetOption( "NO_EXEMPLAR_WEIGHTS_TEST: true" ); } else { Exp->SetOption( "NO_EXEMPLAR_WEIGHTS_TEST: false" ); } } else { Exp->SetOption( "EXEMPLAR_WEIGHTS: false" ); } if ( local_metric == UnknownMetric ){ // Ok, so NO defaults at all (API usage for instance) local_metric = Overlap; fill( metricsArray.begin(), metricsArray.end(), Overlap ); } optline = "GLOBAL_METRIC: " + TiCC::toString(local_metric); Exp->SetOption( optline ); if ( bootstrap_lines > 0 ){ optline = "IB2_OFFSET: " + TiCC::toString(bootstrap_lines); Exp->SetOption( optline ); } if ( local_normalisation != unknownNorm ){ optline = "NORMALISATION: " + TiCC::toString( local_normalisation ); Exp->SetOption( optline ); if ( local_normalisation == addFactorNorm ){ optline = "NORM_FACTOR: " + TiCC::toString( local_norm_factor ); Exp->SetOption( optline ); } } optline = "MVD_LIMIT: " + TiCC::toString(mvd_limit); Exp->SetOption( optline ); optline = "NEIGHBORS: " + TiCC::toString(no_neigh); if ( Exp->SetOption( optline ) ){ optline = "DECAY: " + TiCC::toString(local_decay); if ( Exp->SetOption( optline ) ){ optline = "DECAYPARAM_A: " + TiCC::toString(local_decay_alfa); if ( Exp->SetOption( optline ) ){ optline = "DECAYPARAM_B: " + TiCC::toString(local_decay_beta); if ( Exp->SetOption( optline ) ){ optline = "CLIP_FACTOR: " + TiCC::toString(clip_freq); if ( Exp->SetOption( optline ) ){ optline = "SEED: " + TiCC::toString(seed); if ( Exp->SetOption( optline ) ){ optline = "PROGRESS: " + TiCC::toString(local_progress); if ( Exp->SetOption( optline ) ){ optline = "VERBOSITY: " + TiCC::toString(myVerbosity); if ( Exp->SetOption( optline ) ){ for ( size_t i=0; i < metricsArray.size(); ++i ){ optline = "METRICS: " + TiCC::toString( i ) + "=" + TiCC::toString(metricsArray[i]); if (!Exp->SetOption( optline ) ){ Error( "changing metric is not possible at this stage" ); return false; } } if ( do_query ){ Exp->ShowSettings( cerr ); do_query = false; } return true; } } } } } } } } return false; } return true; } inline bool GetOptClass::parse_range( string& line, string::iterator& it, MetricType Value ){ size_t m; while( it != line.end() && *it != ':' ){ auto eit = it; while( eit != line.end() && isdigit( *eit ) ) ++eit; string tmp = string( it, eit ); size_t k; if ( TiCC::stringTo( tmp, k, 1, metricsArray.size() ) ){ if ( metricsArray[k] != UnknownMetric && metricsArray[k] != Value ){ Error( "metric of feature " + tmp + " is multiply changed!" ); return false; } metricsArray[k] = Value; } else { Error( "illegal value in metric description: -m " + line ); return false; } it = eit; if ( it == line.end() ){ return true; } else if ( *it == ',' ){ ++it; } else if ( *it == '-' ){ ++it; eit = it; while( eit != line.end() && isdigit( *eit ) ) ++eit; tmp = string( it, eit ); m = TiCC::stringTo(tmp); if ( m == 0 || m > metricsArray.size() ){ Error( "illegal value in metric description: -m " + line ); return false; } it = eit; if ( it != line.end() && (*it != ',' && *it != ':' ) ){ Error( "illegal value in metric description: -m " + line ); return false; } if ( m < k ){ Error( "illegal value in metric description: -m " + line ); return false; } else { for ( size_t j=k+1; j <= m && j < metricsArray.size(); ++j ){ if ( metricsArray[j] != UnknownMetric && metricsArray[j] != Value ){ Error( "metric of feature " + TiCC::toString(j) + " is multiply changed!" ); return false; } metricsArray[j] = Value; } } if ( it != line.end() && *it == ',' ) { ++it; } } } return true; } inline bool GetOptClass::parse_metrics( const string& Mline, MetricType& Def ){ string line = TiCC::trim( Mline ); TiCC::to_upper( line ); auto p = line.begin(); if ( p != line.end() ){ switch ( *p++ ){ case 'O' : Def = Overlap; break; case 'J' : Def = JeffreyDiv; break; case 'S' : Def = JSDiv; break; case 'M' : Def = ValueDiff; break; case 'N' : Def = Numeric; break; case 'E' : Def = Euclidean; break; case 'D' : if ( p == line.end() || *p == ':' ){ Def = DotProduct; } else { if ( *p == 'C' ){ Def = Dice; ++p; } } break; case 'C' : Def = Cosine; break; case 'L' : Def = Levenshtein; break; case 'I' : Def = Ignore; break; default: Error( "illegal default value for metric: -m " + Mline ); return false; } if ( p == line.end() ){ // // only -m options, no further specifications // if ( Def == Ignore ){ Error( "Ignore without further specification for metric: -m " + Mline ); return false; } else { // set the defaults fill( metricsArray.begin(), metricsArray.end(), Def ); return true; } } else if ( *p != ':' ){ Error( "missing ':' after default value in -m option" ); return false; } else { // deviating options expected. reset the array fill( metricsArray.begin(), metricsArray.end(), UnknownMetric ); ++p; MetricType TmpMT; while( p != line.end() ){ switch ( *p ){ case 'O' : TmpMT = Overlap; break; case 'S' : TmpMT = JSDiv; break; case 'J' : TmpMT = JeffreyDiv; break; case 'D' : if ( *(p+1) && *(p+1) == 'C' ){ ++p; TmpMT = Dice; } else { Error( "illegal value in metric description: -m " + Mline ); return false; } break; case 'M' : TmpMT = ValueDiff; break; case 'E' : TmpMT = Euclidean; break; case 'N' : TmpMT = Numeric; break; case 'I' : TmpMT = Ignore; break; default: Error( "illegal value in metric description: -m " + Mline ); return false; } metricClass *tmpMC = getMetricClass(Def); if ( TmpMT != Ignore && tmpMC->isSimilarityMetric() ){ Error( "Similarity metric " + TiCC::toString( Def ) + " only accepts -I specifications: -m " + Mline ); delete tmpMC; return false; } delete tmpMC; ++p; if ( !parse_range( line, p, TmpMT ) ){ return false; } if ( p == line.end() ){ break; } if ( *p != ':' ){ Error( "missing ':' in metric description" ); return false; } else { ++p; } } if ( p != line.end() ){ Error( "illegal value in metric description: -m " + Mline ); return false; } else { // // set defaults for those still unset // replace( metricsArray.begin(), metricsArray.end(), UnknownMetric, Def ); } } return true; } else { return false; } } inline bool isBoolOrEmpty( const string& in, bool& val ){ if ( in.empty() ){ val = true; return true; } else { string s = TiCC::uppercase( in ); if ( s == "TRUE" || s == "YES" || s == "FALSE" || s == "NO" ){ val = ( s == "TRUE" || s == "YES" ); return true; } } return false; } bool GetOptClass::parse_options( const TiCC::CL_Options& opts, const int mode ){ opt_changed = true; // cerr << "options: " << opts << endl; // cerr << "mode: " << mode << endl; for ( auto const& curr_opt: opts ){ // cerr << "process " << curr_opt << endl; bool mood = curr_opt.get_mood(); bool longOpt = curr_opt.is_long(); string value = curr_opt.value(); char opt_char = curr_opt.opt_char(); string option = curr_opt.option(); try { // cerr << "try " << opt_char << endl; switch (opt_char) { case 'a': { AlgorithmType tmp_a = IB1_a; if ( !TiCC::stringTo( value, tmp_a ) ){ Error( "illegal -a value: " + value ); return false; } else if ( tmp_a != IB1_a ){ if ( local_algo == LOO_a || local_algo == CV_a ){ Error( "only IB1 algorithm is allowed for: " + TiCC::toString(local_algo) ); return false; } else { local_algo = tmp_a; } } } break; case 'b': bootstrap_lines = TiCC::stringTo( value ); if ( bootstrap_lines < 1 ){ Error( "illegal value for -b option: " + value ); return false; } break; case 'B': if ( longOpt ){ if ( option == "Beam" ){ if ( !TiCC::stringTo( value, BeamSize ) || BeamSize <= 0 ){ Error( "illegal value for --Beam option: " + value ); return false; } } } else if ( value.find("eam") != string::npos ){ Error( "invalid option: Did you mean '--B" + value + "'?" ); return false; } else { BinSize = TiCC::stringTo( value ); if ( BinSize <= 1 ){ Error( "illegal value for -B option: " + value ); return false; } } break; case 'c': if ( longOpt ){ if ( option == "clones" ){ if ( !TiCC::stringTo( value, clones ) || clones <= 0 ){ Error( "invalid value for --clones option: '" + value + "'" ); return false; } } } else { if ( !TiCC::stringTo( value, clip_freq ) || clip_freq < 0 ){ Error( "illegal value for -c option: " + value ); return false; } } break; case 'd': { string::size_type pos1 = value.find( ":" ); if ( pos1 == string::npos ){ pos1 = value.find_first_of( "0123456789" ); if ( pos1 != string::npos ){ if ( ! ( TiCC::stringTo( string( value, 0, pos1 ), local_decay ) && TiCC::stringTo( string( value, pos1 ), local_decay_alfa ) ) ){ Error( "illegal value for -d option: " + value ); return false; } } else if ( !TiCC::stringTo( value, local_decay ) ){ Error( "illegal value for -d option: " + value ); return false; } } else { string::size_type pos2 = value.find( ':', pos1+1 ); if ( pos2 == string::npos ){ pos2 = value.find_first_of( "0123456789", pos1+1 ); if ( pos2 != string::npos ){ if ( ! ( TiCC::stringTo( string( value, 0, pos1 ), local_decay ) && TiCC::stringTo( string( value, pos2 ), local_decay_alfa ) ) ){ Error( "illegal value for -d option: " + value ); return false; } } else { Error( "illegal value for -d option: " + value ); return false; } } else { if ( ! ( TiCC::stringTo( string( value, 0, pos1 ), local_decay ) && TiCC::stringTo( string( value, pos1+1, pos2-pos1-1 ), local_decay_alfa ) && TiCC::stringTo( string( value, pos2+1 ), local_decay_beta ) ) ){ Error( "illegal value for -d option: " + value ); return false; } } } break; } case 'D': if ( longOpt ){ if ( option == "Diversify" ){ do_diversify = true; } else { Error( "invalid option: Did you mean '--Diversify' ?" ); return false; } } else { keep_distributions = mood; } break; case 'e': if ( !TiCC::stringTo( value, estimate ) || estimate < 0 ){ Error( "illegal value for -e option: " + value ); return false; } break; case 'F': if ( !TiCC::stringTo( value, LocalInputFormat ) ){ Error( "illegal value for -F option: " + value ); return false; } break; case 'G': if ( value.empty() ){ local_normalisation = probabilityNorm; } else { string::size_type pos1 = value.find( ":" ); if ( pos1 == string::npos ){ local_normalisation = TiCC::stringTo( value ); local_norm_factor = 1; } else { local_normalisation = TiCC::stringTo( string( value, 0, pos1 ) ); if ( !TiCC::stringTo( string( value, pos1+1 ), local_norm_factor ) || local_norm_factor < Epsilon ){ Error( "illegal value for -G option: " + value ); return false; } } if ( local_normalisation == unknownNorm ){ Error( "illegal value for -G option: " + value ); return false; } } break; case 'H': do_hashed = mood; break; case 'k': if ( !TiCC::stringTo( value, no_neigh ) || no_neigh <= 0 ){ Error( "illegal value for -k option: " + value ); return false; } break; case 'l': if ( !TiCC::stringTo( value, f_length ) || f_length <= 0 ){ Error( "illegal value for -l option: " + value ); return false; } break; case 'L': { string::size_type pos1 = value.find( ":" ); if ( pos1 == string::npos ){ pos1 = value.find_first_of( "0123456789" ); if ( pos1 != string::npos ){ if ( !TiCC::stringTo( value, mvd_limit ) || mvd_limit <= 0 ){ Error( "illegal value for -L option: " + value ); return false; } } } else { if ( !TiCC::stringTo( string( value, pos1+1 ), mvd_limit ) || mvd_limit <= 0 ){ Error( "illegal value for -L option: " + value ); return false; } } break; } case 'm': if ( !parse_metrics( value, local_metric ) ){ return false; } break; case 'M': if ( !TiCC::stringTo( value, maxbests ) || maxbests <= 0 ){ Error( "illegal value for -M option: " + value ); return false; } break; case 'N': // skip previously parsed NumOfFeatures info. break; case 'O': outPath = value; break; case 'o': if ( longOpt ){ if ( option == "occurrences" ){ if ( value == "train" ){ occIn = 1; } else if ( value == "test" ){ occIn = 2; } else if ( value == "both" ){ occIn = 3; } else { Error( "invalid --ocurrences value. (expected train,test or both)" ); return false; } } } break; case 'p': local_progress = TiCC::stringTo( value ); break; case 'q': threshold = TiCC::stringTo( value ); break; case 'Q': do_query = true; break; case 'R': if ( !TiCC::stringTo( value, seed ) ){ Error( "Integer argument for Random Seed expected (-R option)" ); return false; } break; case 's': if ( longOpt ){ if ( option == "sloppy" ){ bool val; if ( !isBoolOrEmpty(value,val) ){ Error( "invalid value for sloppy: '" + value + "'" ); return false; } do_sloppy_loo = val; } else if ( option == "silly" ){ bool val; if ( !isBoolOrEmpty(value,val) ){ Error( "invalid value for silly: '" + value + "'" ); return false; } do_silly = val; } } else { //short opt, so -s if ( value.empty() ){ do_sample_weights = true; } else { int val; if ( TiCC::stringTo( value, val ) ){ if ( val == 0 ){ do_ignore_samples = true; do_ignore_samples_test = false; do_sample_weights = true; } else if ( val == 1 ){ do_ignore_samples_test = true; do_sample_weights = true; } } if ( !do_sample_weights) { Error( "invalid value for -s: '" + value + "' (maybe you meant --s" + value + " ?)" ); return false; } } } break; case 't': { AlgorithmType tmp_a = IB1_a; if ( compare_nocase( value, "leave_one_out" ) ){ tmp_a = LOO_a; } else if ( compare_nocase( value, "cross_validate" ) ){ tmp_a = CV_a; } if ( local_algo != IB1_a && tmp_a != IB1_a ){ Error( "only IB1 algorithm is allowed for: " + TiCC::toString(tmp_a) ); return false; } local_algo = tmp_a; } break; case 'T': { if ( longOpt ){ if ( option == "Threshold" ){ if ( !TiCC::stringTo(value, igThreshold ) || igThreshold < 0 ){ Error( "invalid value for Threshold: " + value ); return false; } } else if ( option == "Treeorder" ){ if ( !TiCC::stringTo( value, local_order ) ){ Error( "invalid value for Treeorder: " + value ); return false; } } } else if ( value.find("hreshold") != string::npos || value.find("reeorder") != string::npos ){ Error( "invalid option: Did you mean '--T" + value + "' ?" ); return false; } else if ( !TiCC::stringTo( value, target_pos ) || target_pos <= 0 ){ Error( "illegal value for -T option: " + value ); return false; } } break; case 'v':{ VerbosityFlags Flag = NO_VERB; if ( !TiCC::stringTo( value, Flag ) ){ Error( "illegal value for +/- v option: " + value ); return false; } else { if ( mode == 2 && ( !(Flag & (SILENT|DISTANCE|DISTRIB|NEAR_N|CONF_MATRIX) ) ) ){ Error( "-v option: " + TiCC::toString(Flag) + " is not allowed at this stage." ); return false; } else if ( Flag > 0 ){ if ( mood ){ myVerbosity |= Flag; } else { myVerbosity &= ~Flag; } } else { myVerbosity = NO_VERB; } } } break; case 'w': { if ( !TiCC::stringTo( value, local_weight ) ) return false; }; break; case 'W': { do_all_weights = true; }; break; case 'x': do_exact = mood; break; default: Warning( string("unhandled option: ") + opt_char + " " + value ); } } catch( std::runtime_error& err ) { Error( string("invalid value for option '-") + opt_char + "' (" + value + ")" ); return false; } } return true; } } LanguageMachines-timbl-642727d/src/IBprocs.cxx000066400000000000000000000307741451477526200211770ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include "ticcutils/StringOps.h" #include "timbl/IBtree.h" #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Instance.h" #include "timbl/MBLClass.h" using namespace std; namespace Timbl { bool MBLClass::HideInstance( const Instance& Inst ){ bool result = true; InstanceBase->RemoveInstance( Inst ); MBL_init = do_sloppy_loo; // must be only true if you are REALY sure for ( size_t i=0; i < EffectiveFeatures() && result; ++i ){ features.perm_feats[i]->clear_matrix(); if ( !features.perm_feats[i]->decrement_value( Inst.FV[i], Inst.TV ) ){ FatalError( "Unable to Hide an Instance!" ); result = false; } } if ( result ){ targets.decrement_value( Inst.TV ); } return result; } bool MBLClass::UnHideInstance( const Instance& Inst ){ bool result = true; InstanceBase->AddInstance( Inst ); MBL_init = do_sloppy_loo; // must be only true if you are REALY sure for ( size_t i=0; i < EffectiveFeatures() && result; ++i ){ features.perm_feats[i]->clear_matrix(); if ( !features.perm_feats[i]->increment_value( Inst.FV[i], Inst.TV ) ){ FatalError( "Unable to UnHide this Instance!" ); result = false; } } if ( result ){ targets.increment_value( Inst.TV ); } return result; } MBLClass::IB_Stat MBLClass::IBStatus() const { if ( !InstanceBase ){ return Invalid; } else if (InstanceBase->IsPruned() ){ return Pruned; } else { return Normal; } } void MBLClass::IBInfo( ostream& os ) const { double Compres; unsigned long int CurSize; unsigned long int CurBytes; CurBytes = InstanceBase->GetSizeInfo( CurSize, Compres ); ios::fmtflags OldFlg = os.setf( ios::fixed, ios::floatfield ); int OldPrec = os.precision(2); os << "\nSize of InstanceBase = " << CurSize << " Nodes, (" << CurBytes << " bytes), " << Compres << " % compression" << endl; if ( Verbosity(BRANCHING) ) { vector terminals; vector nonTerminals; unsigned int summedNodes = 0; unsigned int endNodes = 0; os << "branching info:" << endl; os << " level | feature | nodes | nonterms | terminals | b-factor | b-factor-n" << endl; InstanceBase->summarizeNodes( terminals, nonTerminals ); unsigned int i = 0; auto nIt = nonTerminals.begin(); auto tIt = terminals.begin(); while ( nIt != nonTerminals.end() ){ endNodes += *tIt; int nodes; if ( i == 0 ){ nodes = 1; os << setw(8) << 0 << " |" << setw(8) << "top" << " |" << setw(10) << 1 << " |" << setw(10) << 1 << " |" << setw(10) << 0 << " |" << setw(10) << double(*nIt + *tIt) << " |" << setw(10) << double(*nIt) << endl; } else { nodes = *(nIt-1) + *(tIt-1); if ( nodes == 0 ){ break; } os << setw(8) << i << " |"<< setw(8) << features.permutation[i-1] + 1 << " |" << setw(10) << nodes << " |" << setw(10) << *(nIt-1) << " |" << setw(10) << *(tIt-1) << " |" << setw(10) << (*nIt + *tIt)/double(nodes) << " |" << setw(10) << (*nIt?(*nIt)/double(*(nIt-1)):0) << endl; } summedNodes += nodes; ++i; ++nIt; ++tIt; } os << "total: nodes = " << summedNodes << " endnodes = " << endNodes << " factor = " << summedNodes/double(endNodes) << endl; } os.precision( OldPrec ); os.setf( OldFlg ); } string string_tok( const string& s, string::size_type& pos, const string& seps ){ string::size_type b_pos = s.find_first_not_of( seps, pos ); if ( b_pos != string::npos ){ pos = s.find_first_of( seps, b_pos ); if ( pos == string::npos ){ return string( s, b_pos ); } else { return string( s, b_pos, pos - b_pos ); } } else { pos = string::npos; } return ""; } size_t MBLClass::get_IB_Info( istream& is, bool& Pruned, int& Version, bool& Hashed, string& range_buf ){ size_t result = 0; if ( ExpInvalid() ){ Error( "Can't retrieve Instance-Base\n" ); return result; } if ( Options.TableFrozen() || NumOfFeatures() != 0 ){ Warning( "unable to read an Instance Base while another" " experiment is already loaded" ); return result; } bool info_ok = true; size_t depth = 0; int version = -1; Hashed = false; range_buf = ""; string buffer; vector splits; bool more = ( look_ahead(is) == '#' && getline( is, buffer ) ); while ( info_ok && more ){ splits = TiCC::split( buffer ); size_t num = splits.size(); if ( num > 2 ){ if ( compare_nocase_n( "Status:", splits[1] ) ){ version = 2; if ( splits[2] == "pruned" ){ Pruned = true; } else if ( splits[2] == "complete" ){ Pruned = false; } else { Error( "Unknown Status Information in Instance-Base file." ); info_ok = false; } } else if ( compare_nocase_n( "Algorithm:", splits[1] ) ) { version = 1; if ( compare_nocase( splits[2], "IG-tree" ) ){ Pruned = true; } else if ( compare_nocase( splits[2], "MBL" ) ){ Pruned = false; } else { Error( "Unknown Algorithm Information in Instance-Base file." ); info_ok = false; } } else if ( compare_nocase_n( "Permutation:", splits[1] ) ){ if ( splits[2][0] != '<' ){ Error( "missing `<` while reading permutation" ); info_ok = false; } else if ( splits[num-1][0] != '>' ){ Error( "missing `>` while reading permutation" ); info_ok = false; } else { string perms; for ( size_t i=3; i < num-1; ++i ){ perms = perms + splits[i]; // Maybe we could use splits directly? } bool excl = false; features._eff_feats = 0; size_t i = 0; string::size_type pos = 0; // skip < while ( info_ok && pos != string::npos && i < MaxFeatures ){ i++; if ( !excl ){ ++features._eff_feats; } string tmp = string_tok( perms, pos, ", !" ); size_t index = TiCC::stringTo( tmp ); features.permutation.push_back( --index ); if ( index >= MaxFeatures ){ Error ( "illegal value " + TiCC::toString(index) + " in permutation, not between 1 and " + TiCC::toString( MaxFeatures ) ); info_ok = false; break; } if ( excl ){ UserOptions[index+1] = Ignore; } if ( pos == string::npos ){ break; } while ( isspace(perms[pos]) ) ++pos; switch ( perms[pos] ){ case ',': ++pos; break; case '!': ++pos; excl = true; break; default: Error ( "missing `,` while reading permutation" ); info_ok = false; } } if ( info_ok ){ depth = i; } } } else if ( compare_nocase_n( "Numeric:", splits[1] ) ){ if ( splits[2][0] != '.' ){ string::size_type pos = 0; while ( pos != string::npos ){ string tmp = string_tok( splits[2], pos, ",. " ); if ( tmp != "" ){ int k = TiCC::stringTo( tmp ); UserOptions[k] = Numeric; } } getline( is, range_buf ); } } else if ( compare_nocase_n( "Bin_Size:", splits[1] ) ){ int siz = TiCC::stringTo( splits[2] ); if ( siz < 2 || siz > 1000000 ){ Error( "invalid Bin_Size found: " + splits[2] ); info_ok = false; } else { Bin_Size = siz; } } else if ( compare_nocase_n( "Version", splits[1] ) ){ version = TiCC::stringTo( splits[2] ); if ( version >= 3 && num > 3 ){ if ( compare_nocase_n( "(Hashed)", splits[3] ) ){ Hashed = true; } } } } more = ( look_ahead(is) == '#' && getline( is, buffer ) ); } if ( version < 0 ) { Error( "missing Version information in Instance-Base file" ); info_ok = false; } else if ( version < 4 ) { Error( "A Version " + TiCC::toString(version) + " type InstanceBase file is found:\n" " You should recreate it as it is no longer supported" "\n in this version of the timbl package" ); } Version = version; if ( info_ok ){ result = depth; return result; } else { Error( "Can't retrieve Instance-Base\n" ); return 0; } } bool MBLClass::get_ranges( const string& rangeline ){ if ( NumNumFeatures() == 0 ){ return true; } istringstream is( rangeline ); string buf; char kar; bool result = false; is >> kar; // skip # is >> ws >> buf; if ( !compare_nocase_n( "Ranges:", buf ) ){ Error( "missing Ranges line in Instance-Base file" ); } else { is >> ws; if ( look_ahead(is) == '.' ){ result = true; } else { do { int k; is >> k; if ( UserOptions[k] != Numeric ){ Error( "Found range info for feature " + TiCC::toString(k) + ", which is Not defined as Numeric!" ); result = false; } else { is >> ws >> buf; double min, max; // this is rather awfull // sscanf is not realy what we want. But notice: // the buffer might contain: [-4--0] !!!! ARGL // simply using split('-') is wrong. // we should have used a different separator! // But we didn't int scancount = sscanf( buf.c_str(), "[%lf-%lf]", &min, &max ); if ( scancount == 2 ){ features[k-1]->Min( min ); features[k-1]->Max( max ); if ( is ){ is >> ws >> buf; if ( !buf.empty() && (buf[0] == '.' || buf[0] == ',' ) ){ result = true; } else { result = false; } } else { buf = "."; result = true; } } else { result = false; } } } while ( result && buf[0] != '.' ); } } return result; } inline void MBLClass::writePermSpecial( ostream &os ) const{ // write out the permutation and mark the last feature which is // NOT to be ignored with an exclamation mark, for instance: // < 5, 2, 3! 1, 4 > bool excl = false; os << "< "; for ( size_t j=0; j < NumOfFeatures()-1; ++j ){ if ( !excl && features[features.permutation[j+1]]->Ignore() ){ excl = true; os << features.permutation[j]+1 << "! "; } else { os << features.permutation[j]+1 << ", "; } } os << features.permutation[NumOfFeatures()-1]+1 << " >" << endl; } bool MBLClass::PutInstanceBase( ostream& os ) const { bool result = true; if ( ExpInvalid() ){ result = false; } else if ( InstanceBase == 0 ){ Warning( "unable to write an Instance Base, nothing learned yet" ); } else { os << "# Status: " << (InstanceBase->IsPruned()?"pruned":"complete") << endl; os << "# Permutation: "; writePermSpecial( os ); os << "# Numeric: "; bool first = true; for ( size_t i=0; i < NumOfFeatures(); ++i ){ if ( !features[i]->Ignore() && features[i]->isNumerical() ){ if ( !first ){ os << ", "; } else { first = false; } os << i+1; } } os << '.' << endl; if ( NumNumFeatures() > 0 ){ os << "# Ranges: "; first = true; for ( size_t j=0; j < NumOfFeatures(); ++j ){ if ( !features[j]->Ignore() && features[j]->isNumerical() ){ if ( !first ){ os << " , "; } else { first = false; } os << j+1 << " [" << features[j]->Min() << "-" << features[j]->Max() << "]"; } } os << " ." << endl; } os << "# Bin_Size: " << Bin_Size << endl; if ( hashed_trees ){ InstanceBase->Save( os, *targets.hash(), *features.hash(), keep_distributions ); } else { InstanceBase->Save( os, keep_distributions ); } } return result; } } LanguageMachines-timbl-642727d/src/IBtree.cxx000066400000000000000000001350571451477526200210100ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include "ticcutils/StringOps.h" #include "ticcutils/UniHash.h" #include "ticcutils/XMLtools.h" #include "timbl/Common.h" #include "timbl/MsgClass.h" #include "timbl/Types.h" #include "timbl/Instance.h" #include "timbl/IBtree.h" using namespace std; using namespace icu; namespace Timbl { using namespace Common; IBtree::IBtree(): FValue(0), TValue(0), TDistribution(0), link(0), next(0) { } IBtree::IBtree( FeatureValue *_fv ): FValue(_fv), TValue( 0 ), TDistribution( 0 ), link(0), next(0) { } IBtree::~IBtree(){ delete TDistribution; delete link; delete next; } #ifdef IBSTATS inline IBtree *IBtree::add_feat_val( FeatureValue *FV, unsigned int& mm, IBtree *& tree, unsigned long& cnt ){ #else inline IBtree *IBtree::add_feat_val( FeatureValue *FV, IBtree *& tree, unsigned long& cnt ){ #endif // Add a Featurevalue to the IB. IBtree **pnt = &tree; while ( *pnt ){ if ( (*pnt)->FValue == FV ){ // already there, so bail out. return *pnt; } else if ( (*pnt)->FValue->Index() < FV->Index() ){ #ifdef IBSTATS ++mm; #endif pnt = &((*pnt)->next); } else { // need to add a new node before the current one IBtree *tmp = *pnt; *pnt = new IBtree( FV ); ++cnt; (*pnt)->next = tmp; return *pnt; } } // add at the end. *pnt = new IBtree( FV ); ++cnt; return *pnt; } static int IBtree_Indent = 0; ostream &operator<<( ostream &os, const IBtree& grap ){ // output an IBtree somewhat orderly. const IBtree *pnt = &grap; while ( pnt ) { if ( pnt->link || pnt->FValue ){ os << pnt->FValue; if ( pnt->TValue ){ os << "(" << pnt->TValue << ")" ; } if ( pnt->TDistribution ){ os << pnt->TDistribution ; } if ( pnt->link ){ os << "\t"; IBtree_Indent++; os << pnt->link; IBtree_Indent--; } else { os << endl; } } else { if ( pnt->TValue ){ os << "(" << pnt->TValue << ")" ; if ( pnt->link ){ os << "\t"; IBtree_Indent++; os << pnt->link; IBtree_Indent--; } } if ( pnt->TDistribution ){ os << pnt->TDistribution ; } os << endl; } if (pnt->next){ for ( int j=0; jnext; } return os; } ostream &operator<<( ostream &os, const IBtree *grap ){ if ( grap ){ os << *grap; } else { os << "null"; } return os; } ostream& operator<<( ostream &os, const InstanceBase_base& ib ){ os << "INSTANCE BASE, tree:" << endl; os << ib.InstBase << endl; return os; } ostream& operator<<( ostream &os, const InstanceBase_base *ib ){ if ( ib ){ os << *ib; } else { os << "empty INSTANCE BASE"; } return os; } unsigned long int InstanceBase_base::GetSizeInfo( unsigned long int& CurSize, double &Compression ) const { unsigned long int MaxSize = (Depth+1) * NumOfTails; CurSize = ibCount; Compression = 100*(1-(double)CurSize/(double)MaxSize); return CurSize * sizeof(IBtree); } void InstanceBase_base::write_tree( ostream &os, const IBtree *pnt ) const { // part of saving a tree in a recoverable manner os << " (" << pnt->TValue << " "; if ( pnt->link ){ if ( PersistentDistributions && pnt->TDistribution ){ os << pnt->TDistribution->Save(); } pnt = pnt->link; if ( pnt->FValue ){ os << "["; while ( pnt ){ os << pnt->FValue << " "; write_tree( os, pnt ); pnt = pnt->next; if ( pnt ){ os << ","; } } os << "]\n"; } else if ( !PersistentDistributions && pnt->TDistribution ){ os << pnt->TDistribution->Save(); } } else if ( pnt->TDistribution ){ os << pnt->TDistribution->Save(); } os << ")\n"; } void InstanceBase_base::write_tree_hashed( ostream &os, const IBtree *pnt ) const { // part of saving a tree in a recoverable manner os << "(" << pnt->TValue->Index(); if ( pnt->link ){ if ( PersistentDistributions && pnt->TDistribution ){ os << pnt->TDistribution->SaveHashed(); } pnt = pnt->link; if ( pnt->FValue ){ os << "["; while ( pnt ){ os << pnt->FValue->Index(); write_tree_hashed( os, pnt ); pnt = pnt->next; if ( pnt ){ os << ","; } } os << "]\n"; } else if ( pnt->TDistribution && !PersistentDistributions ){ os << pnt->TDistribution->SaveHashed(); } } else if ( pnt->TDistribution ){ os << pnt->TDistribution->SaveHashed(); } os << ")\n"; } const TargetValue *InstanceBase_base::TopTarget( bool &tie ) { if ( !DefaultsValid || !DefAss ){ TopT = 0; } if ( TopT == 0 ){ if ( !TopDistribution ){ // added to satisfy scan-build throw logic_error( "TopDistribution=0, might not happen!" ); } TopT = TopDistribution->BestTarget( tiedTop, Random); } tie = tiedTop; return TopT; } void InstanceBase_base::Save( ostream &os, bool persist ) { // save an IBtree for later use. bool temp_persist = PersistentDistributions; PersistentDistributions = persist; AssignDefaults(); bool dummy; os << "# Version " << Version << "\n#\n(" << TopTarget( dummy ) << " " << TopDistribution->Save(); IBtree *pnt = InstBase; if ( pnt ){ os << "["; while ( pnt ){ os << pnt->FValue; write_tree( os, pnt ); pnt = pnt->next; if ( pnt ){ os << ","; } } os << "]\n"; } os << ")\n"; PersistentDistributions = temp_persist; } xmlNode *to_node( const FeatureValue *fv ){ xmlNode *result = TiCC::XmlNewNode( "feature" ); TiCC::XmlAddContent( result, fv->name_string() ); return result; } xmlNode *to_node( const TargetValue *tv ){ xmlNode *result = TiCC::XmlNewNode( "target" ); TiCC::XmlAddContent( result, tv->name_string() ); return result; } xmlNode *to_node( const ClassDistribution *d ){ xmlNode *result = TiCC::XmlNewNode( "distribution" ); TiCC::XmlAddContent( result, d->DistToString() ); return result; } int count_next( const IBtree *pnt ){ int cnt = 0; const IBtree *tmp = pnt; while ( tmp ){ cnt++; tmp = tmp->next; } return cnt; } xmlNode *to_xml( IBtree *pnt ) { xmlNode *nodes = TiCC::XmlNewNode( "nodes" ); int cnt = count_next( pnt ); TiCC::XmlSetAttribute( nodes, "nodecount", TiCC::toString( cnt ) ); while ( pnt ){ xmlNode *node = TiCC::XmlNewChild( nodes, "node" ); if ( pnt->FValue ){ xmlAddChild( node, to_node( pnt->FValue ) ); } if ( pnt->TValue ){ xmlAddChild( node, to_node( pnt->TValue ) ); } if ( pnt->link ){ if ( pnt->link->FValue ){ xmlAddChild( node, to_xml(pnt->link) ); } else if ( pnt->link->TDistribution ){ xmlAddChild( node, to_node( pnt->link->TDistribution ) ); } } else if ( pnt->TDistribution ){ xmlAddChild( node, to_node( pnt->TDistribution ) ); } pnt = pnt->next; } return nodes; } void InstanceBase_base::toXML( ostream &os ) { // save an IBtree for later use. TiCC::XmlDoc doc( "root" ); xmlNode *root = doc.getRoot(); xmlAddChild( root, TiCC::XmlNewComment( "Version " + TiCC::toString(Version) ) ); bool dummy; xmlAddChild( root, to_node( TopTarget( dummy ) ) ); if ( PersistentDistributions ){ xmlAddChild( root, to_node( TopDistribution ) ); } IBtree *pnt = InstBase; xmlNode *tree = to_xml( pnt ); xmlAddChild( root, tree ); os << doc << endl; } UnicodeString VectoString( const vector& vec ){ UnicodeString result; for ( auto const& fv : vec ){ result += " " + fv->name(); } return result; } void InstanceBase_base::printStatsTree( ostream &os, unsigned int startLevel ) { if ( !PersistentDistributions ){ os << "no statsTree written, use IG tree and +D while training" << endl; } else { os << "statistics from level " << startLevel << " upwards" << endl; unsigned int level = startLevel; while ( level < Depth ){ IBtree *branch = InstBase; while ( branch ){ unsigned int l = level; IBtree *pnt = branch; vector pad; while ( pnt && l-- > 0 ){ pad.push_back( pnt->FValue ); pnt = pnt->link; } if ( pnt ){ os << level << " [" << VectoString(pad) << " " << pnt->FValue << " ] " << pnt->TDistribution << " < "; pnt = pnt->link; while ( pnt ){ os << pnt->FValue; pnt = pnt->next; if ( pnt ){ os << " "; } } os << " >" << endl; } branch = branch->next; } ++level; } } } void save_hash( ostream& os, const Hash::UnicodeHash& cats, const Hash::UnicodeHash& feats ){ int Size = cats.num_of_entries(); os << "Classes" << endl; for ( int i=1; i <= Size; ++i ){ os << i << "\t" << cats.reverse_lookup( i ) << endl; } Size = feats.num_of_entries(); os << "Features" << endl; for ( int i=1; i <= Size; ++i ){ os << i << "\t" << feats.reverse_lookup( i ) << endl; } os << endl; } void InstanceBase_base::Save( ostream& os, const Hash::UnicodeHash& cats, const Hash::UnicodeHash& feats, bool persist ) { // save an IBtree for later use. bool temp_persist = PersistentDistributions; PersistentDistributions = persist; AssignDefaults(); os << "# Version " << Version << " (Hashed)\n#" << endl; save_hash( os , cats, feats ); bool dummy; os << "(" << TopTarget( dummy )->Index() << TopDistribution->SaveHashed(); IBtree *pnt = InstBase; if ( pnt ){ os << "["; while ( pnt ){ os << pnt->FValue->Index(); write_tree_hashed( os, pnt ); pnt = pnt->next; if ( pnt ){ os << ","; } } os << "]\n"; } os << ")\n"; PersistentDistributions = temp_persist; } IBtree* InstanceBase_base::read_list( istream &is, Feature_List& feats, Targets& Targ, int level ){ IBtree *result = NULL; IBtree **pnt = &result; bool goon = true; char delim; while ( is && goon ) { is >> delim; // skip the opening `[` or separating ',' *pnt = read_local( is, feats, Targ, level ); if ( !(*pnt) ){ delete result; return NULL; } pnt = &((*pnt)->next); goon = ( look_ahead(is) == ',' ); } is >> delim; // skip closing `]` return result; } IBtree* InstanceBase_base::read_list_hashed( istream &is, Feature_List& feats, Targets& Targ, int level ){ IBtree *result = NULL; IBtree **pnt = &result; bool goon = true; char delim; while ( is && goon ) { is >> delim; // skip the opening `[` or separating ',' *pnt = read_local_hashed( is, feats, Targ, level ); if ( !(*pnt) ){ delete result; return NULL; } pnt = &((*pnt)->next); goon = ( (look_ahead(is) == ',') ); } is >> delim; // skip closing `]` return result; } IBtree *InstanceBase_base::read_local( istream &is, Feature_List& feats, Targets& Targ, int level ){ if ( !is ){ return NULL; } IBtree *result = new IBtree(); ++ibCount; UnicodeString buf; char delim; is >> ws >> buf; result->FValue = feats.perm_feats[level]->add_value( buf, NULL, 1 ); is >> delim; if ( !is || delim != '(' ){ Error( "missing `(` in Instance Base file" ); delete result; return NULL; } is >> ws >> buf; result->TValue = Targ.Lookup( buf ); int nxt = look_ahead(is); if ( nxt == '{' ){ try { result->TDistribution = ClassDistribution::read_distribution( is, Targ, false ); } catch ( const exception& e ){ Warning( e.what() ); Error( "problems reading a distribution from InstanceBase file" ); delete result; return 0; } // also we have to update the targetinformation of the featurevalue // so we can recalculate the statistics later on. if ( result->FValue->ValFreq() > 0 ){ result->FValue->ReconstructDistribution( *(result->TDistribution) ); } } if ( look_ahead(is) == '[' ){ result->link = read_list( is, feats, Targ, level+1 ); if ( !(result->link) ){ delete result; return 0; } } else if ( look_ahead(is) == ')' && result->TDistribution ){ result->link = new IBtree(); ++ibCount; result->link->TValue = result->TValue; if ( PersistentDistributions ){ result->link->TDistribution = result->TDistribution->to_VD_Copy(); } else { result->link->TDistribution = result->TDistribution; result->TDistribution = NULL; } NumOfTails++; } is >> delim; if ( delim != ')' ){ Error( "missing `)` in Instance Base file" ); delete result; return NULL; } return result; } IBtree *InstanceBase_base::read_local_hashed( istream &is, Feature_List& feats, Targets& Targ, int level ){ if ( !is ){ return NULL; } IBtree *result = new IBtree(); ++ibCount; char delim; int index; is >> index; result->FValue = feats.perm_feats[level]->add_value( index, NULL, 1 ); is >> delim; if ( !is || delim != '(' ){ Error( "missing `(` in Instance Base file" ); delete result; return NULL; } is >> index; result->TValue = Targ.ReverseLookup( index ); int nxt = look_ahead(is); if ( nxt == '{' ){ // // A distribution is found, must be the last featurevalue // (the dummy node is not stored) // OR we have Persistent Distributions try { result->TDistribution = ClassDistribution::read_distribution_hashed( is, Targ, false ); } catch ( const exception& e ){ Warning( e.what() ); Error( "problems reading a hashed distribution from InstanceBase file" ); delete result; return 0; } } if ( look_ahead(is) == '[' ){ result->link = read_list_hashed( is, feats, Targ, level+1 ); if ( !(result->link) ){ delete result; return NULL; } } else if ( look_ahead(is) == ')' && result->TDistribution ){ // // make a dummy node for the targetdistributions just read // result->link = new IBtree(); ++ibCount; result->link->TValue = result->TValue; if ( PersistentDistributions ){ result->link->TDistribution = result->TDistribution->to_VD_Copy(); } else { result->link->TDistribution = result->TDistribution; result->TDistribution = NULL; } NumOfTails++; } is >> delim; if ( delim != ')' ){ Error( "missing `)` in Instance Base file" ); delete result; return NULL; } return result; } bool InstanceBase_base::ReadIB( istream &is, Feature_List& feats, Targets& Targ, int expected_version ){ if ( read_IB( is, feats, Targ, expected_version ) ){ InstBase->redo_distributions(); ClassDistribution *Top = InstBase->sum_distributions( PersistentDistributions ); delete Top; // still a bit silly but the Top Distribution is known // but we need to cleanup behind us also return true; } else { return false; } } void InstanceBase_base::fill_index(){ IBtree *pnt = InstBase; while ( pnt ){ fast_index[pnt->FValue->Index()] = pnt; pnt = pnt->next; } } bool IG_InstanceBase::ReadIB( istream &is, Feature_List& feats, Targets& Targ, int expected_version ){ if ( read_IB( is, feats, Targ, expected_version ) ){ if ( PersistentDistributions ){ ClassDistribution *Top = InstBase->sum_distributions( PersistentDistributions ); delete Top; // still a bit silly but the Top Distribution is known // but we need to cleanup behind us also } return true; } else { return false; } } bool InstanceBase_base::read_IB( istream &is, Feature_List& feats, Targets& Targs, int expected_version ){ NumOfTails = 0; DefAss = true; // always for a restored tree DefaultsValid = true; // always for a restored tree Version = expected_version; char delim; is >> delim; if ( !is || delim != '(' ){ Error( "missing first `(` in Instance Base file" ); } else { // first we get the value of the TopTarget. It's in the file // for backward compability string buf; is >> ws >> buf; delete TopDistribution; TopDistribution = 0; if ( look_ahead(is) == '{' ){ // Now read the TopDistribution, to get the Targets // in the right order in Targ try { TopDistribution = ClassDistribution::read_distribution( is, Targs, true ); } catch ( const exception& e ){ Warning( e.what() ); } } if ( !TopDistribution ){ Error( "problems reading Top Distribution from Instance Base file" ); } else { if ( look_ahead( is ) == '[' ){ InstBase = read_list( is, feats, Targs, 0 ); } if ( InstBase ){ is >> ws >> buf; if ( buf.empty() || buf[0] != ')' ){ Error( "missing last `)` in Instance base file, found " + buf ); } } } } return (InstBase != NULL); } bool InstanceBase_base::read_hash( istream& is, Hash::UnicodeHash& cats, Hash::UnicodeHash& feats ) const { UnicodeString line; is >> ws; is >> line; if ( line.caseCompare( "Classes", 0 ) ){ Error( "missing 'Classes' keyword in Hashinfo" ); return false; } is >> ws; while ( TiCC::getline( is, line ) ){ vector vals = TiCC::split( line ); if ( vals.size() == 2 ){ // just ignore index! cats.hash( vals[1] ); } else { break; } is >> ws; } if ( line.caseCompare( "Features", 0 ) ){ Error( "missing 'Features' keyword in Hashinfo" ); return false; } while ( TiCC::getline( is, line ) ){ vector vals = TiCC::split( line ); if ( vals.size() == 2 ){ // just ignore index! feats.hash( vals[1] ); } else { break; } } return true; } bool InstanceBase_base::ReadIB_hashed( istream& is, Feature_List& feats, Targets& Targs, int expected_version ){ if ( read_IB_hashed( is, feats, Targs, expected_version ) ){ InstBase->redo_distributions(); ClassDistribution *Top = InstBase->sum_distributions( PersistentDistributions ); delete Top; // still a bit silly but the Top Distribution is known // but we need to cleanup behind us also return true; } else { return false; } } bool IG_InstanceBase::ReadIB_hashed( istream& is, Feature_List& feats, Targets& Targs, int expected_version ){ if ( read_IB_hashed( is, feats, Targs, expected_version ) ){ if ( PersistentDistributions ){ ClassDistribution *Top = InstBase->sum_distributions( PersistentDistributions ); delete Top; // still a bit silly but the Top Distribution is known // but we need to cleanup behind us also } return true; } else { return false; } } bool InstanceBase_base::read_IB_hashed( istream& is, Feature_List& feats, Targets& Targs, int expected_version ){ char delim; NumOfTails = 0; DefAss = true; // always for a restored tree DefaultsValid = true; // always for a restored tree Version = expected_version; read_hash( is, *Targs.hash(), *feats.hash() ); is >> delim; if ( !is || delim != '(' ){ Error( "missing first `(` in Instance Base file" ); } else { // first we get the value of the TopTarget. It's in the file // for backward compability int dum; is >> dum; delete TopDistribution; TopDistribution = 0; if ( look_ahead(is) == '{' ){ // Now read the TopDistribution, to get the Targets // in the right order in Targ try { TopDistribution = ClassDistribution::read_distribution_hashed( is, Targs, true ); } catch ( const string& what ){ Warning( what ); } if ( !TopDistribution ){ Error( "problems reading Top Distribution from Instance Base file" ); } } else { Error( "problems reading Top Distribution from Instance Base file" ); } if ( look_ahead( is ) == '[' ){ InstBase = read_list_hashed( is, feats, Targs, 0 ); } if ( InstBase ){ is >> delim; if ( delim != ')' ){ Error( "missing last `)` in Instance base file, found: " + string(1,delim) ); } } } return (InstBase != NULL); } bool InstanceBase_base::HasDistributions() const { if ( InstBase && InstBase->link ){ return InstBase->link->TDistribution != NULL; } else { return false; } } inline ClassDistribution *IBtree::sum_distributions( bool keep ){ // create a new distribution at this level by summing up the // distibutions of all branches. ClassDistribution *result; if ( !keep ){ if ( TDistribution ){ if ( FValue ){ result = TDistribution; TDistribution = NULL; } else { result = TDistribution->to_VD_Copy(); } } else { result = new ClassDistribution(); } IBtree *pnt = this->next; while ( pnt ){ if ( pnt->TDistribution ){ result->Merge( *(pnt->TDistribution) ); } if ( FValue ){ delete pnt->TDistribution; pnt->TDistribution = NULL; } pnt = pnt->next; } } else { if ( TDistribution ){ result = TDistribution->to_VD_Copy(); } else { result = new ClassDistribution(); } IBtree *pnt = this->next; while ( pnt ){ if ( pnt->TDistribution ){ result->Merge( *(pnt->TDistribution) ); } pnt = pnt->next; } } return result; } void IBtree::assign_defaults( bool Random, bool persist, size_t level ){ // recursively gather Distribution information up to the top. // at each Node we use that info to calculate the Default target. // when level > 1 the info might be persistent for IGTREE use IBtree *pnt = this; bool dummy; while ( pnt ){ if ( pnt->link ){ if ( !pnt->TDistribution ){ pnt->link->assign_defaults( Random, persist, level-1 ); pnt->TDistribution = pnt->link->sum_distributions( level > 1 && persist ); } } pnt->TValue = pnt->TDistribution->BestTarget( dummy, Random ); pnt = pnt->next; } } void IBtree::re_assign_defaults( bool Random, bool persist ){ // recursively gather Distribution information up to the top. // at each Node we use that info to calculate the Default target. IBtree *pnt = this; bool dummy; while ( pnt ){ if ( pnt->link ){ delete pnt->TDistribution; pnt->link->re_assign_defaults( Random, persist ); pnt->TDistribution = pnt->link->sum_distributions( persist ); } pnt->TValue = pnt->TDistribution->BestTarget( dummy, Random ); pnt = pnt->next; } } void IBtree::redo_distributions(){ // recursively gather Distribution information up to the top. // removing old info... // at each node we also Reconstruct Feature distributions // we keep the Target value that was given! IBtree *pnt = this; while ( pnt ){ if ( pnt->link ){ pnt->link->redo_distributions(); delete pnt->TDistribution; pnt->TDistribution = pnt->link->sum_distributions( false ); if ( pnt->FValue->ValFreq() > 0 ){ pnt->FValue->ReconstructDistribution( *(pnt->TDistribution) ); } } pnt = pnt->next; } } inline IBtree *IBtree::make_unique( const TargetValue *Top, unsigned long& cnt ){ // remove branches with the same target as the Top, except when they // still have a subbranch, which means that they are an exception. IBtree **tmp, *dead, *result; result = this; tmp = &result; while ( *tmp ){ if ( (*tmp)->TValue == Top && (*tmp)->link == NULL ){ dead = *tmp; *tmp = (*tmp)->next; dead->next=NULL; --cnt; delete dead; } else { tmp = &((*tmp)->next); } } return result; } inline IBtree *IBtree::Reduce( const TargetValue *Top, unsigned long& cnt, long depth ){ // recursively cut default nodes, (with make unique,) starting at the // leaves of the Tree and moving back to the top. IBtree *pnt = this; while ( pnt ){ if ( pnt->link != NULL ){ pnt->link = pnt->link->Reduce( pnt->TValue, cnt, depth-1 ); } pnt = pnt->next; } if ( depth <= 0 ){ return make_unique( Top, cnt ); } else { return this; } } const ClassDistribution *IBtree::exact_match( const Instance& Inst ) const { // Is there an exact match between the Instance and the IB // If so, return the best Distribution. const IBtree *pnt = this; int pos = 0; while ( pnt ){ if ( pnt->link == NULL ){ if ( pnt->TDistribution->ZeroDist() ){ return NULL; } else { return pnt->TDistribution; } } else if ( Inst.FV[pos]->isUnknown() ){ return NULL; } else if ( pnt->FValue == Inst.FV[pos] ){ if ( pnt->FValue->ValFreq() == 0 ){ return NULL; } else { pnt = pnt->link; pos++; } } else { pnt = pnt->next; } } return NULL; } InstanceBase_base::InstanceBase_base( size_t depth, unsigned long int&cnt, bool Rand, bool persist ): DefAss( false ), DefaultsValid( false ), Random( Rand ), PersistentDistributions( persist ), Version( 4 ), TopDistribution( new ClassDistribution ), WTop( 0 ), TopT( 0 ), tiedTop(false), InstBase( 0 ), LastInstBasePos( 0 ), ibCount( cnt ), Depth( depth ), NumOfTails( 0 ) { InstPath.resize(depth,0); RestartSearch.resize(depth,0); SkipSearch.resize(depth,0); } InstanceBase_base::~InstanceBase_base(){ // the Instance can become very large, with even millions of 'next' pointers // so recursive deletion will use a lot of stack // therefore we choose to iterate the first level(s). IBtree *pnt1 = InstBase; while ( pnt1 ){ IBtree *toDel1 = pnt1; pnt1 = pnt1->next; toDel1->next = 0; IBtree *pnt2 = toDel1->link; toDel1->link = 0; while ( pnt2 ){ IBtree *toDel2 = pnt2; pnt2 = pnt2->next; toDel2->next = 0; IBtree *pnt3 = toDel2->link; toDel2->link = 0; while ( pnt3 ){ IBtree *toDel3 = pnt3; pnt3 = pnt3->next; toDel3->next = 0; delete toDel3; } delete toDel2; } delete toDel1; } delete TopDistribution; delete WTop; } IB_InstanceBase *IB_InstanceBase::clone() const { return new IB_InstanceBase( Depth, ibCount, Random ); } IB_InstanceBase *IB_InstanceBase::Copy() const { IB_InstanceBase *result = clone(); result->DefAss = DefAss; result->DefaultsValid = DefaultsValid; result->NumOfTails = NumOfTails; // only usefull for Server??? result->InstBase = InstBase; result->LastInstBasePos = LastInstBasePos; delete result->TopDistribution; result->TopDistribution = TopDistribution; return result; } IG_InstanceBase *IG_InstanceBase::clone() const { return new IG_InstanceBase( Depth, ibCount, Random, Pruned, PersistentDistributions ); } IG_InstanceBase *IG_InstanceBase::Copy() const { IG_InstanceBase *result = clone(); result->Pruned = Pruned; result->DefAss = DefAss; result->DefaultsValid = DefaultsValid; result->NumOfTails = NumOfTails; // only usefull for Server??? result->InstBase = InstBase; result->LastInstBasePos = LastInstBasePos; delete result->TopDistribution; result->TopDistribution = TopDistribution; return result; } void IBtree::countBranches( unsigned int l, std::vector& terminals, std::vector& nonTerminals ){ if ( link && link->FValue != 0 ){ ++nonTerminals[l]; link->countBranches( l+1, terminals, nonTerminals ); } else { ++terminals[l]; } if ( next ){ next->countBranches( l, terminals, nonTerminals ); } } void InstanceBase_base::summarizeNodes( std::vector& terminals, std::vector& nonTerminals ){ terminals.clear(); nonTerminals.clear(); terminals.resize( Depth+1, 0 ); nonTerminals.resize( Depth+1, 0 ); if ( InstBase ){ InstBase->countBranches( 0, terminals, nonTerminals ); } } TRIBL_InstanceBase *TRIBL_InstanceBase::clone() const { return new TRIBL_InstanceBase( Depth, ibCount, Random, PersistentDistributions ); } TRIBL_InstanceBase *TRIBL_InstanceBase::Copy() const { TRIBL_InstanceBase *result = clone(); result->Threshold = Threshold; result->DefAss = DefAss; result->DefaultsValid = DefaultsValid; result->NumOfTails = NumOfTails; // only usefull for Server??? result->InstBase = InstBase; result->LastInstBasePos = LastInstBasePos; delete result->TopDistribution; result->TopDistribution = TopDistribution; return result; } TRIBL2_InstanceBase *TRIBL2_InstanceBase::clone() const { return new TRIBL2_InstanceBase( Depth, ibCount, Random, PersistentDistributions ); } TRIBL2_InstanceBase *TRIBL2_InstanceBase::Copy() const { TRIBL2_InstanceBase *result = clone(); result->DefAss = DefAss; result->DefaultsValid = DefaultsValid; result->NumOfTails = NumOfTails; // only usefull for Server??? result->InstBase = InstBase; result->LastInstBasePos = LastInstBasePos; delete result->TopDistribution; result->TopDistribution = TopDistribution; return result; } IB_InstanceBase* TRIBL_InstanceBase::IBPartition( IBtree *sub ) const { int i=0; IBtree *tmp = sub; while ( tmp && tmp->link ){ i++; tmp = tmp->link; } IB_InstanceBase *result = new IB_InstanceBase( i, ibCount, Random ); result->DefAss = DefAss; result->DefaultsValid = DefaultsValid; result->NumOfTails = NumOfTails; // only usefull for Server??? result->InstBase = sub; if ( sub ){ delete result->TopDistribution; result->TopDistribution = sub->sum_distributions( false ); } return result; } IB_InstanceBase* TRIBL2_InstanceBase::IBPartition( IBtree *sub ) const { int i=0; IBtree *tmp = sub; while ( tmp && tmp->link ){ i++; tmp = tmp->link; } IB_InstanceBase *result = new IB_InstanceBase( i, ibCount, Random ); result->DefAss = DefAss; result->DefaultsValid = DefaultsValid; result->NumOfTails = NumOfTails; // only usefull for Server??? result->InstBase = sub; if ( sub ){ delete result->TopDistribution; result->TopDistribution = sub->sum_distributions( false ); } return result; } void InstanceBase_base::CleanPartition( bool distToo ){ InstBase = 0; // prevent deletion of InstBase in next step! if ( !distToo ){ TopDistribution = 0; // save TopDistribution for deletion } delete this; } void InstanceBase_base::AssignDefaults(){ if ( !DefaultsValid ){ if ( !DefAss ){ InstBase->assign_defaults( Random, PersistentDistributions, Depth ); } else { InstBase->re_assign_defaults( Random, PersistentDistributions ); } ClassDistribution *Top = InstBase->sum_distributions( PersistentDistributions ); delete Top; // still a bit silly but the Top Distribution is known } DefAss = true; DefaultsValid = true; } void TRIBL_InstanceBase::AssignDefaults( size_t threshold ){ if ( Threshold != threshold ){ Threshold = threshold; DefaultsValid = false; } if ( !DefaultsValid ){ InstBase->assign_defaults( Random, PersistentDistributions, Threshold ); } DefAss = true; DefaultsValid = true; } void InstanceBase_base::Prune( const TargetValue *, long ){ FatalError( "You Cannot Prune this kind of tree! " ); } void IG_InstanceBase::Prune( const TargetValue *top, long depth ){ AssignDefaults( ); if ( !Pruned ) { InstBase = InstBase->Reduce( top, ibCount, depth ); Pruned = true; } } void IG_InstanceBase::specialPrune( const TargetValue *top ){ IBtree *pnt = InstBase->link; // we have to fix the toptarget here, because the node // is build incremental ClassDistribution dist; while ( pnt ){ if ( pnt->TDistribution ){ dist.Merge( *pnt->TDistribution ); } pnt = pnt->next; } bool dummy; InstBase->TValue = dist.BestTarget( dummy, Random ); InstBase = InstBase->Reduce( top, ibCount, 0 ); Pruned = true; } bool InstanceBase_base::AddInstance( const Instance& Inst ){ bool sw_conflict = false; // add one instance to the IB IBtree *hlp; IBtree **pnt = &InstBase; #ifdef IBSTATS if ( mismatch.size() == 0 ){ mismatch.resize(Depth+1, 0); } #endif if ( !InstBase ){ for ( unsigned int i = 0; i < Depth; ++i ){ *pnt = new IBtree( Inst.FV[i] ); ++ibCount; pnt = &((*pnt)->link); } LastInstBasePos = InstBase; } else { for ( unsigned int i = 0; i < Depth; ++i ){ #ifdef IBSTATS hlp = IBtree::add_feat_val( Inst.FV[i], mismatch[i], *pnt, ibCount ); #else hlp = IBtree::add_feat_val( Inst.FV[i], *pnt, ibCount ); #endif if ( i==0 && hlp->next == 0 ){ LastInstBasePos = hlp; } pnt = &(hlp->link); } } if ( *pnt == NULL ){ *pnt = new IBtree(); ++ibCount; if ( abs( Inst.ExemplarWeight() ) > Epsilon ){ (*pnt)->TDistribution = new WClassDistribution(); } else { (*pnt)->TDistribution = new ClassDistribution; } NumOfTails++; } int occ = Inst.Occurrences(); if ( abs( Inst.ExemplarWeight() ) > Epsilon ){ sw_conflict = (*pnt)->TDistribution->IncFreq( Inst.TV, occ, Inst.ExemplarWeight() ); } else { (*pnt)->TDistribution->IncFreq(Inst.TV, occ ); } TopDistribution->IncFreq(Inst.TV, occ ); DefaultsValid = false; return !sw_conflict; } bool InstanceBase_base::MergeSub( InstanceBase_base *ib ){ if ( ib->InstBase ){ // we place the InstanceBase of ib in front of the current InstanceBase // the assumption is that both are sorted on ascending index, and that // the indices in ib are all smaller then those in the current IB if ( !InstBase ){ InstBase = ib->InstBase; } else { IBtree *ibPnt = ib->InstBase; if ( ib->LastInstBasePos->FValue->Index() >= InstBase->FValue->Index() ){ Error( "MergeSub assumes sorted ans unique additions!" ); return false; } else { ib->LastInstBasePos->next = InstBase; InstBase = ibPnt; } } } else { Warning( "adding empty instancebase?" ); } NumOfTails += ib->NumOfTails; TopDistribution->Merge( *ib->TopDistribution ); #ifdef IBSTATS if ( ib->mismatch.size() > 0 ){ if ( mismatch.size() == 0 ){ mismatch.resize( ib->mismatch.size(), 0 ); } for ( unsigned int i = 0; i < mismatch.size(); ++i ){ mismatch[i] += ib->mismatch[i]; } } #endif DefaultsValid = false; DefAss = false; ib->InstBase = 0; return true; } void IBtree::cleanDistributions() { IBtree *pnt = this; while ( pnt ){ delete pnt->TDistribution; pnt->TDistribution = 0; if ( pnt->link ){ pnt->link->cleanDistributions(); } pnt = pnt->next; } } bool IG_InstanceBase::MergeSub( InstanceBase_base *ib ){ if ( ib->InstBase ){ if ( !PersistentDistributions ){ ib->InstBase->cleanDistributions(); } if ( !InstBase ){ InstBase = ib->InstBase; } else { IBtree *ibPnt = ib->InstBase; while( ibPnt ){ IBtree *ibPntNext = ibPnt->next; ibPnt->next = 0; const FeatureValue *fv = ibPnt->FValue; IBtree **pnt = &InstBase; if ( (*pnt)->FValue->Index() < fv->Index() ){ Error( "MergeSub assumes sorted additions!" ); return false; } if ( (*pnt)->FValue->Index() == fv->Index() ){ // this may happen // snip the link and insert at our link IBtree *snip = ibPnt->link; ibPnt->link = 0; delete ibPnt->TDistribution; ibPnt->TDistribution = 0; --ib->ibCount; delete ibPnt; while ( snip ){ if ( PersistentDistributions ){ (*pnt)->TDistribution->Merge( *snip->TDistribution ); } else { delete snip->TDistribution; } IBtree **tmp = &(*pnt)->link; while ( *tmp && (*tmp)->FValue->Index() < snip->FValue->Index() ){ tmp = &(*tmp)->next; } IBtree *nxt = snip->next; snip->next = 0; if ( *tmp ){ if ( (*tmp)->FValue->Index() == snip->FValue->Index() ){ return false; } snip->next = *tmp; } *tmp = snip; snip = nxt; } } else { ibPnt->next = *pnt; *pnt = ibPnt; } ibPnt = ibPntNext; } } } NumOfTails += ib->NumOfTails; TopDistribution->Merge( *ib->TopDistribution ); #ifdef IBSTATS if ( ib->mismatch.size() > 0 ){ if ( mismatch.size() == 0 ){ mismatch.resize( ib->mismatch.size(), 0 ); } for ( unsigned int i = 0; i < mismatch.size(); ++i ){ mismatch[i] += ib->mismatch[i]; } } #endif Pruned = true; DefaultsValid = true; DefAss = true; ib->InstBase = 0; return true; } void InstanceBase_base::RemoveInstance( const Instance& Inst ){ for ( int occ=0; occ < Inst.Occurrences(); ++occ ){ // remove an instance from the IB int pos = 0; IBtree *pnt = InstBase; while ( pnt ){ if ( pnt->link == NULL ){ pnt->TDistribution->DecFreq(Inst.TV); TopDistribution->DecFreq(Inst.TV); break; } else { if ( pnt->FValue == Inst.FV[pos] ){ pnt = pnt->link; pos++; } else { pnt = pnt->next; } } } } DefaultsValid = false; } const ClassDistribution *InstanceBase_base::InitGraphTest( vector&, const vector *, const size_t, const size_t ){ FatalError( "InitGraphTest" ); return 0; } const IBtree *IBtree::search_node( const FeatureValue *fv ) const { const IBtree *pnt = 0; if ( fv ){ if ( fv->isUnknown() ){ return 0; } pnt = this; while ( pnt ){ if ( pnt->FValue == fv ){ break; } pnt = pnt->next; } } return pnt; } const IBtree *InstanceBase_base::fast_search_node( const FeatureValue *fv ) { const IBtree *result = 0; if ( fast_index.empty() ){ fill_index(); } if ( fv ){ if ( fv->isUnknown() ){ return 0; } auto const& It = fast_index.find( fv->Index() ); if ( It != fast_index.end() ){ result = It->second; } } return result; } //#define DEBUGTESTS const ClassDistribution *IB_InstanceBase::InitGraphTest( vector& Path, const vector *inst, const size_t off, const size_t eff ){ const IBtree *pnt; const ClassDistribution *result = NULL; testInst = inst; offSet = off; effFeat = eff; #ifdef DEBUGTESTS cerr << "initTest for " << *inst << endl; #endif pnt = InstBase; for ( unsigned int i = 0; i < Depth; ++i ){ if ( !pnt ){ // added to satisfy scan-build throw logic_error( "pnt may never be 0!" ); } InstPath[i] = pnt; RestartSearch[i] = pnt; if ( i == 0 ){ pnt = fast_search_node( (*testInst)[offSet+i] ); } else { pnt = pnt->search_node( (*testInst)[offSet+i] ); } if ( pnt ){ // found an exact match, so mark restart position if ( RestartSearch[i] == pnt ){ RestartSearch[i] = pnt->next; } SkipSearch[i] = pnt; InstPath[i] = pnt; } else { // no exact match at this level. Just start with the first.... RestartSearch[i] = NULL; SkipSearch[i] = NULL; pnt = InstPath[i]; } Path[i] = pnt->FValue; #ifdef DEBUGTESTS cerr << "set Path[" << i << "] to " << Path[i] << endl; #endif pnt = pnt->link; if ( pnt && pnt->link == NULL ){ result = pnt->TDistribution; break; } } while ( result && result->ZeroDist() ){ // This might happen when doing LOO or CV tests size_t TmpPos = effFeat-1; result = NextGraphTest( Path, TmpPos ); } #ifdef DEBUGTESTS cerr << "Start test" << Path << endl; #endif return result; } const ClassDistribution *InstanceBase_base::NextGraphTest( vector&, size_t& ){ FatalError( "NextGraphTest" ); return 0; } const ClassDistribution *IB_InstanceBase::NextGraphTest( vector& Path, size_t& pos ){ const IBtree *pnt = NULL; const ClassDistribution *result = NULL; bool goon = true; while ( !pnt && goon ){ if ( RestartSearch[pos] == NULL ) { // No exact match here, so no real problems pnt = InstPath[pos]->next; // cerr << "NO MATCH increment "; // if ( pnt ) // cerr << pnt->FValue; // cerr << endl; } else { pnt = RestartSearch[pos]; // cerr << "restart met " << pnt->FValue << endl; RestartSearch[pos] = NULL; } if ( pnt && pnt == SkipSearch[pos] ){ pnt = pnt->next; } if ( !pnt ) { if ( pos == 0 ){ goon = false; } else { pos--; // cerr << "decremented pos to " << pos << endl; } } } if ( pnt && goon ) { InstPath[pos] = pnt; Path[pos] = pnt->FValue; #ifdef DEBUGTESTS cerr << "set Path[" << pos<< "] to " << Path[pos] << endl; #endif pnt = pnt->link; for ( size_t j=pos+1; j < Depth; ++j ){ const IBtree *tmp = pnt->search_node( (*testInst)[offSet+j] ); if ( tmp ){ // we found an exact match, so mark Restart position if ( pnt == tmp ){ RestartSearch[j] = pnt->next; } else { RestartSearch[j] = pnt; } SkipSearch[j] = tmp; InstPath[j] = tmp; Path[j] = tmp->FValue; pnt = tmp->link; } else { // no exact match at this level. Just start with the first.... RestartSearch[j] = NULL; SkipSearch[j] = NULL; InstPath[j] = pnt; Path[j] = pnt->FValue; pnt = pnt->link; } #ifdef DEBUGTESTS cerr << "set Path[" << j<< "] to " << Path[j] << endl; #endif } if ( pnt ){ result = pnt->TDistribution; } } if ( result && result->ZeroDist() ){ // This might happen when doing LOO or CV tests size_t TmpPos = effFeat-1; result = NextGraphTest( Path, TmpPos ); if ( TmpPos < pos ){ pos = TmpPos; } } #ifdef DEBUGTESTS cerr << "try next " << Path << " pos = " << pos << endl; #endif return result; } const ClassDistribution *InstanceBase_base::IG_test( const Instance& , size_t &, bool &, const TargetValue *& ){ FatalError( "IG_test " ); return NULL; } const ClassDistribution *IG_InstanceBase::IG_test( const Instance& Inst, size_t &end_level, bool &leaf, const TargetValue *&result ) { // The Test function for the IG algorithm, returns a pointer to the // distribution of the last matching position in the Tree, it's position // in the Instance Base and the default TargetValue result = NULL; ClassDistribution *Dist = NULL; int pos = 0; leaf = false; const IBtree *pnt = fast_search_node( Inst.FV[pos] ); while ( pnt ){ result = pnt->TValue; if ( PersistentDistributions ){ Dist = pnt->TDistribution; } pnt = pnt->link; if ( pnt && !pnt->FValue ){ pnt = NULL; } leaf = (pnt == NULL); ++pos; if ( pnt ){ pnt = pnt->search_node( Inst.FV[pos] ); } } end_level = pos; if ( end_level == 0 ){ if ( !WTop && TopDistribution ){ WTop = TopDistribution->to_WVD_Copy(); } Dist = WTop; } return Dist; } IB_InstanceBase *InstanceBase_base::TRIBL_test( const Instance& , size_t, const TargetValue *&, const ClassDistribution *&, size_t & ){ FatalError( "TRIBL_test " ); return NULL; } IB_InstanceBase *InstanceBase_base::TRIBL2_test( const Instance& , const ClassDistribution *&, size_t & ){ FatalError( "TRIBL2_test " ); return NULL; } IB_InstanceBase *TRIBL_InstanceBase::TRIBL_test( const Instance& Inst, size_t threshold, const TargetValue *&TV, const ClassDistribution *&dist, size_t &level ) { // The Test function for the TRIBL algorithm, returns a pointer to the // Target at the last matching position in the Tree, // or the subtree Instance Base necessary for IB1 IBtree *pnt = InstBase; #pragma omp critical AssignDefaults( threshold ); TV = NULL; dist = NULL; IB_InstanceBase *subt = NULL; size_t pos = 0; while ( pnt && pos < threshold ){ if ( pnt->FValue == Inst.FV[pos] ){ dist = pnt->TDistribution; TV = pnt->TValue; pnt = pnt->link; if ( pnt && !pnt->FValue ){ dist = pnt->TDistribution; pnt = NULL; } pos++; } else { pnt = pnt->next; } } if ( pos == threshold ){ if ( pnt ){ subt = IBPartition( pnt ); dist = NULL; } else { level = pos; } } else { if ( pos == 0 && dist == NULL ){ if ( !WTop && TopDistribution ){ WTop = TopDistribution->to_WVD_Copy(); } dist = WTop; bool dummy; TV = TopTarget( dummy ); } else { level = pos; } } return subt; } IB_InstanceBase *TRIBL2_InstanceBase::TRIBL2_test( const Instance& Inst, const ClassDistribution *& dist, size_t &level ){ // The Test function for the TRIBL2 algorithm, returns a pointer to the // the subtree Instance Base necessary for IB1 IBtree *pnt = InstBase; dist = NULL; #pragma omp critical AssignDefaults(); int pos = 0; IB_InstanceBase *subtree = NULL; IBtree *last_match = pnt; while ( pnt ){ if ( pnt->FValue == Inst.FV[pos] ){ // a match, go deeper pnt = pnt->link; last_match = pnt; pos++; if ( pnt && !pnt->FValue ){ // at the end, an exact match dist = pnt->TDistribution; last_match = NULL; break; } } else { pnt = pnt->next; } } if ( last_match ){ subtree = IBPartition( last_match ); level = pos; } return subtree; } } // namespace Timbl LanguageMachines-timbl-642727d/src/IGExperiment.cxx000066400000000000000000000361121451477526200221660ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/IBtree.h" #include "timbl/Instance.h" #include "timbl/TimblExperiment.h" #include "ticcutils/Timer.h" #include "ticcutils/PrettyPrint.h" namespace Timbl { using namespace std; using namespace icu; using TiCC::operator<<; void IG_Experiment::InitInstanceBase(){ srand( RandomSeed() ); default_order(); set_order(); runningPhase = TrainWords; InstanceBase = new IG_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0), false, KeepDistributions() ); } void IG_Experiment::initExperiment( bool ){ if ( !ExpInvalid() ) { if ( !MBL_init ){ // do this only when necessary stats.clear(); delete confusionInfo; confusionInfo = 0; if ( Verbosity(ADVANCED_STATS) ){ confusionInfo = new ConfusionMatrix( targets.num_of_values() ); } if ( !is_copy ){ InitWeights(); if ( do_diversify ){ diverseWeights(); } srand( random_seed ); } MBL_init = true; } } } bool IG_Experiment::checkTestFile(){ if ( TimblExperiment::checkTestFile() ){ return sanityCheck(); } else { return false; } } ostream& operator<< ( ostream& os, const fileDoubleIndex& fmi ){ os << "["; for ( const auto& it : fmi ){ os << it.first << " " << it.second << endl; } os << "]"; return os; } bool IG_Experiment::ClassicLearn( const string& FileName, bool warnOnSingleTarget ){ bool result = true; if ( is_synced ) { CurrentDataFile = FileName; } if ( CurrentDataFile == "" ){ if ( FileName == "" ){ Warning( "unable to build an InstanceBase: No datafile defined yet" ); result = false; } else { if ( !Prepare( FileName, warnOnSingleTarget ) || ExpInvalid() ){ result = false; } } } else if ( FileName != "" && CurrentDataFile != FileName ){ Error( "Unable to Learn from file '" + FileName + "'\n" "while previously instantiated from file '" + CurrentDataFile + "'" ); result = false; } if ( result ) { TiCC::Timer learnT; learnT.start(); InitInstanceBase(); if ( ExpInvalid() ){ return false; } if ( EffectiveFeatures() < 2 ){ fileIndex fmIndex; result = build_file_index( CurrentDataFile, fmIndex ); if ( result ){ stats.clear(); if ( !Verbosity(SILENT) ) { Info( "\nPhase 3: Learning from Datafile: " + CurrentDataFile ); time_stamp( "Start: ", 0 ); } UnicodeString Buffer; IG_InstanceBase *outInstanceBase = 0; TargetValue *TopTarget = targets.MajorityClass(); // cerr << "MAJORITY CLASS = " << TopTarget << endl; // Open the file. // ifstream datafile( CurrentDataFile, ios::in); // for ( const auto& fit : fmIndex ){ for ( const auto& sit : fit.second ){ datafile.clear(); datafile.seekg( sit ); nextLine( datafile, Buffer ); chopLine( Buffer ); // Progress update. // if ( ( stats.dataLines() % Progress() ) == 0 ){ time_stamp( "Learning: ", stats.dataLines() ); } chopped_to_instance( TrainWords ); if ( !outInstanceBase ){ outInstanceBase = new IG_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0), false, true ); } outInstanceBase->AddInstance( CurrInst ); } } if ( outInstanceBase ){ // cerr << "Out Instance Base" << endl; // time_stamp( "Start Pruning: " ); // cerr << outInstanceBase << endl; outInstanceBase->Prune( TopTarget ); // time_stamp( "Finished Pruning: " ); // cerr << outInstanceBase << endl; // time_stamp( "Before Merge: " ); // cerr << InstanceBase << endl; if ( !InstanceBase->MergeSub( outInstanceBase ) ){ FatalError( "Merging InstanceBases failed. PANIC" ); return false; } delete outInstanceBase; } } } else { fileDoubleIndex fmIndex; result = build_file_multi_index( CurrentDataFile, fmIndex ); // cerr << "indexing took " << t << endl; if ( result ){ stats.clear(); if ( !Verbosity(SILENT) ) { Info( "\nPhase 3: Learning from Datafile: " + CurrentDataFile ); time_stamp( "Start: ", 0 ); } UnicodeString Buffer; IG_InstanceBase *PartInstanceBase = 0; IG_InstanceBase *outInstanceBase = 0; TargetValue *TopTarget = targets.MajorityClass(); // cerr << "MAJORITY CLASS = " << TopTarget << endl; // Open the file. // ifstream datafile( CurrentDataFile, ios::in); // for ( const auto& dit : fmIndex ){ // FeatureValue *the_fv = dit.first; // cerr << "handle feature '" << the_fv << "' met index " // << the_fv->Index() << endl; if ( dit.second.size() < 1 ){ FatalError( "panic" ); } if ( igOffset() > 0 && dit.second.size() > igOffset() ){ // cerr << "within offset!" << endl; IG_InstanceBase *TmpInstanceBase = 0; TmpInstanceBase = new IG_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0), false, true ); for ( const auto& fit : dit.second ) { for ( const auto& sit : fit.second ){ datafile.clear(); datafile.seekg( sit ); nextLine( datafile, Buffer ); chopLine( Buffer ); // Progress update. // if ( ( stats.dataLines() % Progress() ) == 0 ){ time_stamp( "Learning: ", stats.dataLines() ); } chopped_to_instance( TrainWords ); if ( !PartInstanceBase ){ PartInstanceBase = new IG_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0), false, true ); } // cerr << "add instance " << &CurrInst << endl; PartInstanceBase->AddInstance( CurrInst ); } if ( PartInstanceBase ){ // time_stamp( "Start Pruning: " ); // cerr << PartInstanceBase << endl; PartInstanceBase->Prune( TopTarget, 2 ); // time_stamp( "Finished Pruning: " ); // cerr << PartInstanceBase << endl; if ( !TmpInstanceBase->MergeSub( PartInstanceBase ) ){ FatalError( "Merging InstanceBases failed. PANIC" ); return false; } // cerr << "after Merge: intermediate result" << endl; // cerr << TmpInstanceBase << endl; delete PartInstanceBase; PartInstanceBase = 0; } else { // cerr << "Partial IB is empty" << endl; } } // time_stamp( "Start Final Pruning: " ); // cerr << TmpInstanceBase << endl; TmpInstanceBase->specialPrune( TopTarget ); // time_stamp( "Finished Final Pruning: " ); // cerr << TmpInstanceBase << endl; if ( !InstanceBase->MergeSub( TmpInstanceBase ) ){ FatalError( "Merging InstanceBases failed. PANIC" ); return false; } // cerr << "finale Merge gave" << endl; // cerr << InstanceBase << endl; delete TmpInstanceBase; } else { // cerr << "other case!" << endl; for ( const auto& fit : dit.second ){ for ( const auto& sit : fit.second ){ datafile.clear(); datafile.seekg( sit ); nextLine( datafile, Buffer ); chopLine( Buffer ); // Progress update. // if ( ( stats.dataLines() % Progress() ) == 0 ){ time_stamp( "Learning: ", stats.dataLines() ); } chopped_to_instance( TrainWords ); if ( !outInstanceBase ){ outInstanceBase = new IG_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0), false, true ); } // cerr << "add instance " << &CurrInst << endl; outInstanceBase->AddInstance( CurrInst ); } } if ( outInstanceBase ){ // cerr << "Out Instance Base" << endl; // time_stamp( "Start Pruning: " ); // cerr << outInstanceBase << endl; outInstanceBase->Prune( TopTarget ); // time_stamp( "Finished Pruning: " ); // cerr << outInstanceBase << endl; // time_stamp( "Before Merge: " ); // cerr << InstanceBase << endl; if ( !InstanceBase->MergeSub( outInstanceBase ) ){ FatalError( "Merging InstanceBases failed. PANIC" ); return false; } delete outInstanceBase; outInstanceBase = 0; } } } } } if ( !Verbosity(SILENT) ){ time_stamp( "Finished: ", stats.dataLines() ); } learnT.stop(); if ( !Verbosity(SILENT) ){ IBInfo( *mylog ); Info( "Learning took " + learnT.toString() ); } #ifdef IBSTATS cerr << "final mismatches: " << InstanceBase->mismatch << endl; #endif } return result; } bool IG_Experiment::checkLine( const UnicodeString& line ){ if ( TimblExperiment::checkLine( line ) ){ return sanityCheck(); } else { return false; } } bool IG_Experiment::sanityCheck() const { bool status = true; if ( IBStatus() != Pruned ){ Warning( "you tried to apply the IGTree algorithm on a complete," "(non-pruned) Instance Base" ); status = false; } if ( num_of_neighbors != 1 ){ Warning( "number of neighbors must be 1 for IGTree test!" ); status = false; } if ( decay_flag != Zero ){ Warning( "Decay impossible for IGTree test, (while k=1)" ); status = false; } if ( globalMetricOption != Overlap ){ Warning( "Metric must be Overlap for IGTree test." ); status = false; } return status; } const TargetValue *IG_Experiment::LocalClassify( const Instance& Inst, double& Distance, bool& exact ){ match_depth = -1; last_leaf = false; exact = false; bool Tie = false; initExperiment(); if ( !bestResult.reset( beamSize, normalisation, norm_factor, targets ) ){ Warning( "no normalisation possible because a BeamSize is specified\n" "output is NOT normalized!" ); } const TargetValue *TV = NULL; const ClassDistribution *ResultDist; ResultDist = InstanceBase->IG_test( Inst, match_depth, last_leaf, TV ); if ( match_depth == 0 ){ // when level 0, ResultDist == TopDistribution TV = InstanceBase->TopTarget( Tie ); } Distance = sum_remaining_weights( match_depth ); if ( ResultDist && InstanceBase && InstanceBase->PersistentD() ){ if ( match_depth == 0 ) { bestResult.addTop( ResultDist, TV ); } else { bestResult.addConstant( ResultDist, TV ); } } if ( confusionInfo ){ confusionInfo->Increment( Inst.TV, TV ); } bool correct = Inst.TV && ( TV == Inst.TV ); if ( correct ){ stats.addCorrect(); if ( Tie ){ stats.addTieCorrect(); } } else if ( Tie ){ stats.addTieFailure(); } return TV; } void IG_Experiment::showTestingInfo( ostream& os ){ if ( !Verbosity(SILENT) ) { if ( Verbosity(OPTIONS) ){ ShowSettings( os ); } os << endl << "Starting to test, Testfile: " << testStreamName << endl << "Writing output in: " << outStreamName << endl << "Algorithm : IGTree" << endl; show_ignore_info( os ); show_weight_info( os ); os << endl; } } bool IG_Experiment::WriteInstanceBase( const string& FileName ){ bool result = false; if ( ConfirmOptions() ){ ofstream outfile( FileName, ios::out | ios::trunc ); if (!outfile) { Warning( "can't open outputfile: " + FileName ); } else { if ( !Verbosity(SILENT) ){ Info( "Writing Instance-Base in: " + FileName ); } if ( PutInstanceBase( outfile ) ){ string tmp = FileName; tmp += ".wgt"; ofstream wf( tmp ); if ( !wf ){ Error( "can't write default weightfile " + tmp ); } else if ( writeWeights( wf ) ){ if ( !Verbosity(SILENT) ){ Info( "Saving Weights in " + tmp ); } result = true; } } } } return result; } bool IG_Experiment::GetInstanceBase( istream& is ){ bool result = false; bool Pruned; bool Hashed; int Version; string range_buf; size_t numF = get_IB_Info( is, Pruned, Version, Hashed, range_buf ); if ( numF == 0 ){ return false; } else if ( !Pruned ){ Error( "Instance-base is NOT Pruned!, invalid for " + TiCC::toString(algorithm) + " Algorithm" ); } else { TreeOrder = DataFile; Initialize( numF ); if ( !get_ranges( range_buf ) ){ Warning( "couldn't retrieve ranges..." ); } else { srand( RandomSeed() ); InstanceBase = new IG_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0), Pruned, KeepDistributions() ); int pos=0; for ( size_t i=0; i < NumOfFeatures(); ++i ){ features[i]->SetWeight( 1.0 ); if ( features[features.permutation[i]]->Ignore() ){ features.perm_feats[i] = NULL; } else { features.perm_feats[pos++] = features[features.permutation[i]]; } } if ( Hashed ){ result = InstanceBase->ReadIB_hashed( is, features, targets, Version ); } else { result = InstanceBase->ReadIB( is, features, targets, Version ); } if ( result ){ if ( !InstanceBase->HasDistributions() ){ if ( KeepDistributions() ){ Error( "Instance base doesn't contain Distributions, " "+D option impossible" ); } else if ( Verbosity(DISTRIB) ){ Info( "Instance base doesn't contain Distributions, " "+vDB option disabled ...." ); ResetVerbosityFlag(DISTRIB); } } } } } return result; } bool IG_Experiment::ReadInstanceBase( const string& FileName ){ bool result = false; if ( ConfirmOptions() ){ ifstream infile( FileName, ios::in ); if ( !infile ) { Error( "can't open: " + FileName ); } else { if ( !Verbosity(SILENT) ){ Info( "Reading Instance-Base from: " + FileName ); } if ( GetInstanceBase( infile ) ){ if ( !Verbosity(SILENT) ){ writePermutation( cout ); } string tmp = FileName; tmp += ".wgt"; ifstream wf( tmp ); if ( !wf ){ Error( "cant't find default weightsfile " + tmp ); } else if ( readWeights( wf, CurrentWeighting() ) ){ WFileName = tmp; if ( !Verbosity(SILENT) ){ Info( "Reading weights from " + tmp ); } } result = true; } } } return result; } } LanguageMachines-timbl-642727d/src/Instance.cxx000066400000000000000000000033571451477526200213770ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include "timbl/Types.h" #include "timbl/Instance.h" using namespace std; namespace Timbl { Instance::Instance(): TV(NULL), sample_weight(0.0), occ(1) { } Instance::~Instance(){ clear(); } void Instance::clear(){ for ( auto& it : FV ){ if ( it ){ if ( it->isUnknown() ){ delete it; } } it = 0; } TV = 0; sample_weight = 0.0; occ = 1; } void Instance::Init( size_t len ){ FV.resize( len, 0 ); } ostream& operator<<( ostream& os, const Instance *I ){ if ( I ){ os << *I; } else { os << " Empty Instance"; } return os; } ostream& operator<<( ostream& os, const Instance& I ){ for ( const auto& it : I.FV ){ os << it << ", "; } os << I.TV << " " << I.sample_weight; return os; } } LanguageMachines-timbl-642727d/src/LOOExperiment.cxx000066400000000000000000000116671451477526200223300ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include #include "timbl/MsgClass.h" #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/IBtree.h" #include "timbl/Instance.h" #include "timbl/MBLClass.h" #include "timbl/TimblExperiment.h" namespace Timbl { using namespace std; using namespace icu; void LOO_Experiment::initExperiment( bool all_vd ){ if ( !ExpInvalid() ){ if ( !MBL_init ){ // do this only when necessary initDecay(); if ( !is_copy ){ calculate_fv_entropy( true ); if ( initProbabilityArrays( all_vd ) ){ calculatePrestored(); } else { Error( "not enough memory for Probability Arrays in (" + string(__FILE__) + "," + TiCC::toString(__LINE__) + ")\n" + "ABORTING now" ); throw std::bad_alloc(); } InitWeights(); if ( do_diversify ){ diverseWeights(); } srand( random_seed ); } initTesters(); MBL_init = true; } } } bool LOO_Experiment::checkTestFile(){ // no need to test the Testfile // it is the same as the trainfile, so already checked if ( doSamples() ){ FatalError( "Cannot Leave One Out on a file with Examplar Weighting" ); return false; } return true; } void LOO_Experiment::showTestingInfo( ostream& os ){ if ( !Verbosity(SILENT) ){ if ( Verbosity(OPTIONS ) ){ ShowSettings( os ); } os << endl << "Starting to test using Leave One Out"; if ( Do_Sloppy_LOO() ) { os << " using SLOPPY metric calculations" << endl; } else { os << endl; } os << "Writing output in: " << outStreamName << endl << "Algorithm : LOO" << endl; show_metric_info( os ); show_weight_info( os ); os << decay << endl; } } bool LOO_Experiment::Test( const string& FileName, const string& OutFile ){ bool result = false; if ( initTestFiles( FileName, OutFile ) ){ if ( InstanceBase->nodeCount() == InstanceBase->depth() + 1 ){ // protect ourselves against 1-line trainfiles FatalError( "the file '" + FileName + "' contains only 1 usable line. LOO impossible!" ); } initExperiment(); stats.clear(); delete confusionInfo; confusionInfo = 0; if ( Verbosity(ADVANCED_STATS) ){ confusionInfo = new ConfusionMatrix( targets.num_of_values() ); } showTestingInfo( *mylog ); // Start time. // time_t lStartTime; time(&lStartTime); timeval startTime; gettimeofday( &startTime, 0 ); if ( InputFormat() == ARFF ){ skipARFFHeader( testStream ); } UnicodeString Buffer; while ( nextLine( testStream, Buffer ) ){ if ( !chopLine( Buffer ) ){ Warning( "testfile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } else { chopped_to_instance( TestWords ); Decrement( CurrInst ); double final_distance = 0.0; bool exact = false; const TargetValue *ResultTarget = LocalClassify( CurrInst, final_distance, exact ); normalizeResult(); string dString = bestResult.getResult(); double confi = 0; if ( Verbosity(CONFIDENCE) ){ confi = confidence(); } // Write it to the output file for later analysis. show_results( outStream, confi, dString, ResultTarget, final_distance ); if ( exact ){ // remember that a perfect match may be incorrect! if ( Verbosity(EXACT) ){ *mylog << "Exacte match:\n" << get_org_input() << endl; } } if ( !Verbosity(SILENT) ){ // Display progress counter. show_progress( *mylog, lStartTime, stats.dataLines() ); } Increment( CurrInst ); } }// end while. if ( !Verbosity(SILENT) ){ time_stamp( "Ready: ", stats.dataLines() ); show_speed_summary( *mylog, startTime ); showStatistics( *mylog ); } result = true; } return result; } bool LOO_Experiment::ReadInstanceBase( const string& ){ Error( "cannot combine Leave One Out with retrieving an Instancebase " ); return false; } } LanguageMachines-timbl-642727d/src/MBLClass.cxx000066400000000000000000001622601451477526200212320ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include #include #include #include #include #include "ticcutils/StringOps.h" #include "ticcutils/PrettyPrint.h" #include "ticcutils/Timer.h" #include "ticcutils/UniHash.h" #include "timbl/MsgClass.h" #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Options.h" #include "timbl/Instance.h" #include "timbl/IBtree.h" #include "timbl/BestArray.h" #include "timbl/Testers.h" #include "timbl/Metrics.h" #include "timbl/Choppers.h" #include "timbl/MBLClass.h" using namespace std; using namespace icu; using namespace nlohmann; namespace Timbl { using TiCC::operator<<; void MBLClass::init_options_table( size_t Size ){ if ( tableFilled ){ return; } else { tableFilled = true; } MaxFeatures = Size; UserOptions.resize(MaxFeatures+1); //cerr << "fill table() for " << (void*)this << endl; bool stat = Options.Add( new IntegerOption( "FLENGTH", &F_length, 0, 1, 32 ) ) && Options.Add( new SizeOption( "MAXBESTS", &MaxBests, 500, 10, 100000 ) ) && Options.Add( new SizeOption( "TRIBL_OFFSET", &tribl_offset, 0, 0, MaxFeatures ) ) && Options.Add( new UnsignedOption( "IG_THRESHOLD", &igThreshold, 1000, 0, std::numeric_limits::max() ) ) && Options.Add( new InputFormatOption( "INPUTFORMAT", &input_format, UnknownInputFormat ) ) && Options.Add( new OrdeningOption( "TREE_ORDER", &TreeOrder, UnknownOrdening ) ) && Options.Add( new BoolOption( "ALL_WEIGHTS", &need_all_weights, false ) ) && Options.Add( new WeightOption( "WEIGHTING", &Weighting, GR_w ) ) && Options.Add( new IntegerOption( "BIN_SIZE", &Bin_Size, 20, 2, 10000 ) ) && Options.Add( new UnsignedOption( "IB2_OFFSET", &ib2_offset, 0, 1, 10000000 ) ) && Options.Add( new BoolOption( "KEEP_DISTRIBUTIONS", &keep_distributions, false ) ) && Options.Add( new BoolOption( "DO_SLOPPY_LOO", &do_sloppy_loo, false ) ) && Options.Add( new SizeOption( "TARGET_POS", &target_pos, std::numeric_limits::max(), 0, MaxFeatures ) ); // cerr << "STAT 1=" << (stat?"true":"false") << endl; if ( stat ){ stat = Options.Add( new BoolOption( "DO_SILLY", &do_silly_testing, false ) ) && Options.Add( new BoolOption( "DO_DIVERSIFY", &do_diversify, false ) ) && Options.Add( new DecayOption( "DECAY", &decay_flag, Zero ) ) && Options.Add( new IntegerOption( "SEED", &random_seed, -1, -1, RAND_MAX ) ) && Options.Add( new IntegerOption( "BEAM_SIZE", &beamSize, 0, 1, INT_MAX ) ) && Options.Add( new RealOption( "DECAYPARAM_A", &decay_alfa, 1.0, 0.0, DBL_MAX ) ) && Options.Add( new RealOption( "DECAYPARAM_B", &decay_beta, 1.0, 0.0, DBL_MAX ) ) && Options.Add( new NormalisationOption( "NORMALISATION", &normalisation, noNorm ) ) && Options.Add( new RealOption( "NORM_FACTOR", &norm_factor, 1.0, Epsilon, DBL_MAX ) ) && Options.Add( new BoolOption( "EXEMPLAR_WEIGHTS", &do_sample_weighting, false ) ) && Options.Add( new BoolOption( "IGNORE_EXEMPLAR_WEIGHTS", &do_ignore_samples, true ) ) && Options.Add( new BoolOption( "NO_EXEMPLAR_WEIGHTS_TEST", &no_samples_test, true ) ) && Options.Add( new VerbosityOption( "VERBOSITY", &verbosity, NO_VERB ) ) && Options.Add( new BoolOption( "EXACT_MATCH", &do_exact_match, false ) ) && Options.Add( new BoolOption( "HASHED_TREE", &hashed_trees, true ) ) && Options.Add( new MetricOption( "GLOBAL_METRIC", &globalMetricOption, Overlap ) ) && Options.Add( new MetricArrayOption( "METRICS", UserOptions, globalMetricOption, MaxFeatures+1 ) ) && Options.Add( new IntegerOption( "MVD_LIMIT", &mvd_threshold, 1, 1, 100000 ) ) && Options.Add( new SizeOption( "NEIGHBORS", &num_of_neighbors, 1, 1, 100000 ) ) && Options.Add( new IntegerOption( "PROGRESS", &progress, 10000, 1, INT_MAX ) ) && Options.Add( new IntegerOption( "HANDLE_OCCURRENCES", &doOcc, 0, 0, 3 ) ) && Options.Add( new IntegerOption( "CLIP_FACTOR", &clip_factor, 10, 0, 1000000 ) ); } // cerr << "STAT 2=" << (stat?"true":"false") << endl; if ( !stat ){ FatalError( "Too many options for OptionTable" ); } } void MBLClass::InvalidMessage(void) const{ if ( err_cnt++ == 1 ){ Warning( "A preceding error prevents any operation on this " "Timbl Object\n" "other experiments might not be influenced" ); } else { Warning( "This Experiment is invalid due to errors" ); } } bool MBLClass::SetOption( const string& line ){ bool result = false; if ( !ExpInvalid(true) ){ // Info( "set Option:" + line ); enum SetOptRes opt_res = Options.SetOption( line ); switch ( opt_res ){ case Opt_OK: // OK MBL_init = false; // To assure redoing initializing stuff result = true; break; case Opt_Frozen: Warning( "SetOption '" + line + "' ignored.\nThis option may not " "be changed after an InstanceBase is already created" ); break; case Opt_Unknown: Warning( "SetOption '" + line + "' failed.\nOption unknown" ); break; case Opt_Ill_Val: Error( "SetOption '" + line + "' failed.\nIllegal value for this option" ); break; } } return result; } MBLClass::MBLClass( const string& name ): MsgClass(), sock_os(0), sock_is_json(false), targets(NULL), InstanceBase(NULL), mylog(&cout), myerr(&cerr), runningPhase(LearnWords), Weighting(GR_w), GlobalMetric(0), TreeOrder(UnknownOrdening), num_of_neighbors(1), dynamic_neighbors(false), decay_flag(Zero), exp_name( name ), MaxBests(500), decay(0), beamSize(0), normalisation(noNorm), norm_factor(1.0), is_copy(false), is_synced(false), ib2_offset(0), random_seed(-1), decay_alfa(1.0), decay_beta(1.0), MBL_init(false), tableFilled(false), globalMetricOption(Overlap), do_diversify(false), ChopInput(0), F_length(0), MaxFeatures(0), input_format(UnknownInputFormat), verbosity(NO_VERB), target_pos(std::numeric_limits::max()), clip_factor(10), Bin_Size(20), progress(10000), tribl_offset(0), igThreshold(1000), mvd_threshold(1), do_sloppy_loo(false), do_exact_match(false), do_silly_testing(false), hashed_trees(true), need_all_weights(false), do_sample_weighting(false), do_ignore_samples(true), no_samples_test(true), keep_distributions(false), DBEntropy(-1.0), tester(0), doOcc(0) { } MBLClass &MBLClass::operator=( const MBLClass& m ){ if ( this != &m ){ is_copy = true; is_synced = false; init_options_table( m.MaxFeatures ); F_length = m.F_length; MaxBests = m.MaxBests; TreeOrder = m.TreeOrder; decay_flag = m.decay_flag; input_format = m.input_format; random_seed = m.random_seed; beamSize = m.beamSize; decay_alfa = m.decay_alfa; decay_beta = m.decay_beta; normalisation = m.normalisation; norm_factor = m.norm_factor; do_sample_weighting = m.do_sample_weighting; do_ignore_samples = m.do_ignore_samples; no_samples_test = m.no_samples_test; keep_distributions = m.keep_distributions; verbosity = m.verbosity; do_exact_match = m.do_exact_match; sock_os = 0; sock_is_json = false; globalMetricOption = m.globalMetricOption; if ( m.GlobalMetric ){ GlobalMetric = getMetricClass( m.GlobalMetric->type() ); } UserOptions = m.UserOptions; mvd_threshold = m.mvd_threshold; num_of_neighbors = m.num_of_neighbors; dynamic_neighbors = m.dynamic_neighbors; target_pos = m.target_pos; progress = m.progress; Bin_Size = m.Bin_Size; tribl_offset = m.tribl_offset; ib2_offset = m.ib2_offset; clip_factor = m.clip_factor; runningPhase = m.runningPhase; Weighting = m.Weighting; do_sloppy_loo = m.do_sloppy_loo; do_silly_testing = m.do_silly_testing; do_diversify = m.do_diversify; tester = 0; decay = 0; targets = m.targets; features = m.features; MBL_init = false; need_all_weights = false; InstanceBase = m.InstanceBase->Copy(); DBEntropy = -1.0; ChopInput = 0; setInputFormat( m.input_format ); CurrInst.Init( NumOfFeatures() ); myerr = m.myerr; mylog = m.mylog; } return *this; } MBLClass::~MBLClass(){ // cerr << "MBLClass delete " << endl; CurrInst.clear(); if ( !is_copy ){ // cerr << "NO copy: also delete instancebase" << endl; delete InstanceBase; } else { if ( is_synced ){ // cerr << "is synced: also delete instancebase" << endl; delete InstanceBase; } else { // cerr << "only clean partition" << endl; InstanceBase->CleanPartition( false ); } } delete GlobalMetric; delete tester; delete decay; delete ChopInput; } void MBLClass::Info( const string& out_line ) const { #pragma omp critical { // Info NEVER to socket ! if ( exp_name != "" ){ *mylog << "-" << exp_name << "-" << out_line << endl; } else { *mylog << out_line << endl; } } } void MBLClass::Warning( const string& out_line ) const { #pragma omp critical { if ( sock_os ){ if ( sock_is_json ){ json out_json; out_json["status"] = "error"; out_json["message"] = out_line; last_error = out_json; } else { *sock_os << "ERROR { " << out_line << " }" << endl; } } else { if ( exp_name != "" ){ *myerr << "Warning:-" << exp_name << "-" << out_line << endl; } else { *myerr << "Warning: " << out_line << endl; } } } } void MBLClass::Error( const string& out_line ) const { if ( sock_os ){ if ( sock_is_json ){ json out_json; out_json["status"] = "error"; out_json["message"] = out_line; last_error = out_json; } else { *sock_os << "ERROR { " << out_line << " }" << endl; } } else { if ( exp_name != "" ){ *myerr << "Error:-" << exp_name << "-" << out_line << endl; } else { *myerr << "Error: " << out_line << endl; } } ++err_cnt; } void MBLClass::FatalError( const string& out_line ) const { if ( sock_os ){ if ( sock_is_json ){ json out_json; out_json["status"] = "error"; out_json["message"] = out_line; last_error = out_json; } else { *sock_os << "ERROR { " << out_line << " }" << endl; } } else { if ( exp_name != "" ){ *myerr << "FatalError:-" << exp_name << "-" << out_line << endl; } else { *myerr << "FatalError: " << out_line << endl; } throw( runtime_error("Stopped") ); } } bool MBLClass::ShowOptions( ostream& os ) const { os << "Possible Experiment Settings (current value between []):" << endl; Options.Show_Options( os ); os << endl; return true; } bool MBLClass::ShowSettings( ostream& os ) const{ os << "Current Experiment Settings :" << endl; Options.Show_Settings( os ); os << endl; return true; } bool MBLClass::connectToSocket( ostream *ss, bool is_json ){ if ( sock_os ){ throw( logic_error( "connectToSocket:: already connected!" ) ); } else { sock_os = ss; if ( sock_os && sock_os->good() ){ sock_is_json = is_json; return true; } else { FatalError( "connecting streams to socket failed" ); } } return false; } xmlNode *MBLClass::settingsToXml() const{ ostringstream tmp; Options.Show_Settings( tmp ); vector lines = TiCC::split_at( tmp.str(), "\n" ); xmlNode *result = TiCC::XmlNewNode("settings"); for ( const auto& line : lines ){ vector parts = TiCC::split_at( line, ":" ); if ( parts.size() ==2 ){ string tag = TiCC::trim( parts[0] ); string val = TiCC::trim( parts[1] ); TiCC::XmlNewTextChild( result, tag, val ); } } return result; } json MBLClass::settings_to_JSON() const{ ostringstream tmp; Options.Show_Settings( tmp ); vector lines = TiCC::split_at( tmp.str(), "\n" ); json result; json arr = json::array(); for ( const auto& line : lines ){ vector parts = TiCC::split_at( line, ":" ); if ( parts.size() ==2 ){ string tag = TiCC::trim( parts[0] ); string val = TiCC::trim( parts[1] ); // a lot of values are integers, some float's // in fact they should be added as such, not as strings.... json element; element[tag] = val; arr.push_back( element ); } } result["settings"] = arr; return result; } bool MBLClass::ShowWeights( ostream &os ) const { if ( ExpInvalid() ){ return false; } else { int OldPrec = os.precision(DBL_DIG); size_t pos = 0; for ( auto const& feat : features.feats ){ os.precision(DBL_DIG); os << "Feature " << ++pos << "\t : " << feat->Weight() << endl; } os.precision(OldPrec); } return true; } string MBLClass::extract_limited_m( size_t lim ){ default_order(); set_order(); string result; MetricType gm = globalMetricOption; result += TiCC::toString( gm ); set ignore; map> metrics; for ( size_t k=0; k < NumOfFeatures(); ++k ){ if ( features[features.permutation[k]]->Ignore() ){ // cerr << "Add " << k+1 << " to ignore" << endl; ignore.insert(k+1); } else { MetricType m = features[features.permutation[k]]->getMetricType(); if ( m != gm ){ metrics[TiCC::toString( m )].insert(k+1); } } } for ( size_t i=lim+ignore.size(); i < NumOfFeatures(); ++i ){ ignore.insert( features.permutation[i]+1 ); } if ( !ignore.empty() ){ result += ":I"; for ( auto it = ignore.begin(); it != ignore.end(); ++it ){ size_t value = *it; size_t steps = 0; for ( ; value <= *ignore.rbegin(); ++value ){ if ( ignore.find(value) == ignore.end() ){ break; } ++steps; } if ( value == *it+1 ){ // so only one value, output it if ( *it != *ignore.begin() ){ result += ","; } result += TiCC::toString(*it) + ","; } else if ( value == *it+2 ){ // so only two values, output them , separated result += TiCC::toString(*it); ++it; result += "," + TiCC::toString(*it); } else { // a range. output with a hyphen result += TiCC::toString(*it) + "-" + TiCC::toString( value-1) + ","; for ( size_t j=0; j < steps-1;++j){ ++it; if ( it == ignore.end() ){ --it; break; } } } } result.pop_back(); } result += ":"; for ( const auto& it : metrics ){ bool first = true; for ( const auto& ig : it.second ){ if ( ignore.find( ig ) == ignore.end() ){ if ( first ){ result += it.first; first = false; } result += TiCC::toString( ig ) + ","; } } if ( result.back() == ',' ){ result.pop_back(); result.push_back(':'); } } while ( result.back() == ':' || result.back() == ',' ){ result.pop_back(); } return result; } void MBLClass::writePermutation( ostream& os ) const { os << "Feature Permutation based on " << ( Weighting==UserDefined_w?"weightfile":TiCC::toString(TreeOrder, true)) << " :" << endl; features.write_permutation( os ); os << endl; } void MBLClass::time_stamp( const char *line, int number ) const { if ( !Verbosity(SILENT) ){ ostringstream ostr; ostr << line; if ( number > -1 ){ ostr.width(6); ostr.setf(ios::right, ios::adjustfield); ostr << number << " @ "; } else { ostr << " "; } ostr << TiCC::Timer::now(); Info( ostr.str() ); } } void MBLClass::InitWeights(void){ for ( auto const& feat : features.feats ){ if ( feat->Ignore() ){ feat->SetWeight( 0.0 ); } else { switch ( Weighting ){ case IG_w: feat->SetWeight( feat->InfoGain() ); break; case GR_w: feat->SetWeight( feat->GainRatio() ); break; case X2_w: feat->SetWeight( feat->ChiSquare() ); break; case SV_w: feat->SetWeight( feat->SharedVariance() ); break; case SD_w: feat->SetWeight( feat->StandardDeviation() ); break; case UserDefined_w: break; case No_w: feat->SetWeight( 1.0 ); break; case Unknown_w: case Max_w: FatalError( "InitWeights: Invalid Weight in switch: " + TiCC::toString( Weighting ) ); break; } } } } void MBLClass::diverseWeights(void){ double minW = DBL_MAX; for ( auto const& feat : features.feats ){ if ( feat->Ignore() ){ continue; } if ( feat->Weight() < minW ){ minW = feat->Weight(); } } for ( auto const& feat : features.feats ){ if ( feat->Ignore() ){ continue; } feat->SetWeight( (feat->Weight() - minW ) + Epsilon ); } } void MBLClass::default_order(){ if ( TreeOrder == UnknownOrdening ){ switch ( Weighting ){ case GR_w: TreeOrder = GROrder; break; case IG_w: TreeOrder = IGOrder; break; case X2_w: TreeOrder = X2Order; break; case SV_w: TreeOrder = SVOrder; break; case SD_w: TreeOrder = SDOrder; break; case No_w: TreeOrder = NoOrder; break; case UserDefined_w: TreeOrder = GROrder; break; default: FatalError( "Illegal Weighting Value in Switch: " + TiCC::toString( Weighting ) ); break; } } } void MBLClass::set_order(){ calculate_fv_entropy(false); vector Order(NumOfFeatures()); size_t i = 0; for ( auto const& feat : features.feats ){ switch( TreeOrder ){ case DataFile: Order[i] = feat->Weight(); break; case NoOrder: Order[i] = (double)(NumOfFeatures()-i); break; case IGOrder: Order[i] = feat->InfoGain(); break; case GROrder: Order[i] = feat->GainRatio(); break; case IGEntropyOrder: Order[i] = feat->InfoGain() * feat->SplitInfo(); break; case GREntropyOrder: Order[i] = feat->GainRatio() * feat->SplitInfo(); break; case X2Order: Order[i] = feat->ChiSquare(); break; case SVOrder: Order[i] = feat->SharedVariance(); break; case SDOrder: Order[i] = feat->StandardDeviation(); break; case OneoverFeature: Order[i] = 1.0 / feat->values_array.size(); break; case GRoverFeature: Order[i] = feat->GainRatio() / feat->values_array.size(); break; case IGoverFeature: Order[i] = feat->InfoGain() / feat->values_array.size(); break; case X2overFeature: Order[i] = feat->ChiSquare() / feat->values_array.size(); break; case SVoverFeature: Order[i] = feat->SharedVariance() / feat->values_array.size(); break; case SDoverFeature: Order[i] = feat->StandardDeviation() / feat->values_array.size(); break; case OneoverSplitInfo: Order[i] = 1.0 / feat->SplitInfo(); break; case UnknownOrdening: case MaxOrdening: FatalError( "Setorder: Illegal Order Value in Switch: " + TiCC::toString( TreeOrder ) ); break; } ++i; } features.calculate_permutation( Order ); if ( !Verbosity(SILENT) ){ writePermutation( *mylog ); } } void MBLClass::MatrixInfo( ostream& os ) const { unsigned int TotalCount = 0; bool dummy; size_t m = 1; for ( const auto& feat : features.feats ){ if ( !feat->Ignore() && feat->isStorableMetric() && feat->matrixPresent( dummy ) ){ unsigned int Count = feat->matrix_byte_size(); os << "Size of value-matrix[" << m << "] = " << Count << " Bytes " << endl; TotalCount += Count; } ++m; } if ( TotalCount ){ os << "Total Size of value-matrices " << TotalCount << " Bytes " << endl << endl; } } bool MBLClass::readMatrices( istream& is ){ string line; bool skip = false; bool anything = false; while ( getline( is, line ) ){ line = TiCC::trim( line ); if ( line.empty() ){ continue; } if ( line.compare( 0, 7, "Feature" ) != 0 ){ if ( skip ){ continue; } else { return false; } } else { skip = false; line = line.substr( 8 ); string::size_type pos = line.find_first_not_of("0123456789"); string nums = line.substr( 0, pos ); size_t num; if ( !TiCC::stringTo( nums, num ) ){ FatalError( "no feature index found in the inputfile" ); } else { if ( pos == string::npos ){ line = ""; } else { line = TiCC::trim( line.substr( pos ) ); } if ( line.empty() ){ if ( !features[num-1]->isStorableMetric() ){ Warning( "Ignoring entry for feature " + nums + " which is NOT set to a storable metric type." + " use -m commandline option to set metrics" ); skip = true; } else if ( !features[num-1]->fill_matrix( is ) ){ return false; } else { Info( "read ValueMatrix for feature " + nums ); anything = true; } } } } } if ( !anything ){ Error( "NO metric values found" ); return false; } return true; } bool MBLClass::writeMatrices( ostream& os ) const { size_t pos = 0; for ( const auto& feat : features.feats ){ os << "Feature " << ++pos; bool dummy; if ( !feat->matrixPresent( dummy ) ){ os << " not available.\n" << endl; } else { os << endl; feat->print_matrix( os ); } } return os.good(); } bool MBLClass::readArrays( istream& is ){ bool result = true; size_t num; size_t index = 1; string buf; char kar; do { is >> ws >> buf; if ( compare_nocase_n( "feature", buf ) ){ is >> ws >> kar; // skip # if ( kar != '#' ){ Error( "Input out-of-sync, a '#' was expected" ); result = false; } else { is >> num; if ( num != index ){ Error( "Wrong feature number " + TiCC::toString(num) + " in file, " + TiCC::toString(index) + " expected" ); result = false; } else if ( index > NumOfFeatures() ){ Error( "Too many features matrices in this file " ); result = false; } else { is >> ws >> buf; if ( compare_nocase_n( "Ignored", buf ) ){ if ( features[index-1]->Ignore() ){ ++index; continue; } else { Error( "Feature #" + TiCC::toString(index) + " may not be ignored..."); result = false; } } else if ( compare_nocase_n( "Numeric", buf ) ){ if ( features[index-1]->isNumerical() ){ ++index; continue; } else { Error( "Feature #" + TiCC::toString(index) + " is not Numeric..." ); result = false; } } else if ( !compare_nocase_n( "Matrix", buf ) ){ Error( "Problem in Probability file, missing matrix info" ); result = false; } else if ( features[index-1]->Ignore() || features[index-1]->isNumerical() ){ Warning( "Matrix info found for feature #" + TiCC::toString(index) + " (skipped)" ); ++index; } else { is.ignore( std::numeric_limits::max(), '\n' ); result = features[index-1]->read_vc_pb_array( is ); ++index; } } } } } while ( result && !is.eof() && !is.bad() ); if ( index < NumOfFeatures()+1 ){ Error( "Not enough features matrices in this file " ); result = false; } return result; } bool MBLClass::writeArrays( ostream& os ) { if ( ExpInvalid() ){ return false; } else if ( !initProbabilityArrays( false ) ){ Warning( "couldn't Calculate probability Arrays's" ); return false; } else { // Print the possible classes. // os << "Targets : "; for ( const auto& it : targets.values_array ){ os << it; if ( &it != &targets.values_array.back() ){ os << ","; } } os << "." << endl << endl; size_t pos = 0; for ( const auto& feat : features.feats ){ os << "feature # " << ++pos ; if ( feat->Ignore() ){ os << " Ignored, (-s option)" << endl; } else if ( feat->isNumerical() ){ os << " Numeric, (-N option)" << endl; } else { os << " Matrix: " << endl; feat->print_vc_pb_array( os ); os << endl; } } return true; } } bool MBLClass::allocate_arrays(){ size_t Dim = targets.values_array.size(); for ( const auto& feat : features.feats ){ if ( !feat->Ignore() && !feat->isNumerical() ) { if ( !feat->AllocSparseArrays( Dim ) ){ return false; } } } return true; } bool MBLClass::initProbabilityArrays( bool force ){ bool result = true; if ( !is_copy ){ result = allocate_arrays(); if ( result ){ for ( const auto& feat : features.feats ){ if ( !feat->Ignore() && !feat->isNumerical() ){ feat->ClipFreq( (int)rint(clip_factor * log((double)feat->EffectiveValues()))); if ( !feat->ArrayRead() && ( force || feat->isStorableMetric() ) ){ feat->InitSparseArrays(); } } } // j } } return result; } /* For mvd metric. */ void MBLClass::calculatePrestored(){ if ( !is_copy ){ for ( size_t j = tribl_offset; j < EffectiveFeatures(); ++j ) { if ( !features.perm_feats[j]->Ignore() && features.perm_feats[j]->isStorableMetric() ){ features.perm_feats[j]->store_matrix( mvd_threshold ); } } if ( Verbosity(VD_MATRIX) ){ size_t pos = 0; for ( auto const& feat : features.feats ){ ++pos; if ( !feat->Ignore() ){ bool dummy; *mylog << "Value Difference matrix of feature # " << pos << endl; if ( feat->matrixPresent( dummy ) ){ feat->print_matrix( *mylog, true ); *mylog << endl; } else { *mylog << "Not available." << endl; } } } } } } const Instance *MBLClass::chopped_to_instance( PhaseValue phase ){ CurrInst.clear(); if ( NumOfFeatures() != target_pos ) { ChopInput->swapTarget( target_pos ); } int occ = ChopInput->getOcc(); if ( occ > 1 ){ CurrInst.Occurrences( occ ); } switch ( phase ){ case LearnWords: // Add the target. CurrInst.TV = targets.add_value( ChopInput->getField( NumOfFeatures() ), occ ); // Now add the Feature values. for ( size_t i = 0; i < NumOfFeatures(); ++i ){ // when learning, no need to bother about Permutation if ( features[i]->Ignore() ) { // but this might happen, take care! CurrInst.FV[i] = NULL; } else { // Add it to the Instance. // cerr << "Feature add: " << ChopInput->getField(i) << endl; CurrInst.FV[i] = features[i]->add_value( ChopInput->getField(i), CurrInst.TV, occ ); } } // i // cerr << "new instance: " << CurrInst << endl; break; case TrainWords: // Lookup for TreeBuilding // First the Features for ( size_t k = 0; k < EffectiveFeatures(); ++k ){ size_t j = features.permutation[k]; CurrInst.FV[k] = features[j]->Lookup( ChopInput->getField(j) ); } // k // and the Target CurrInst.TV = targets.Lookup( ChopInput->getField( NumOfFeatures() ) ); break; case TrainLearnWords: // Lookup for Incremental TreeBuilding // Assumes that somehow Permutation and effective_feats are known // First the Target CurrInst.TV = targets.add_value( (*ChopInput)[NumOfFeatures()], occ ); // Then the Features for ( size_t l = 0; l < EffectiveFeatures(); ++l ){ size_t j = features.permutation[l]; CurrInst.FV[l] = features[j]->add_value((*ChopInput)[j], CurrInst.TV, occ ); } // for l break; case TestWords: // Lookup for Testing // This might fail for unknown values, then we create a dummy value for ( size_t m = 0; m < EffectiveFeatures(); ++m ){ size_t j = features.permutation[m]; const UnicodeString& fld = ChopInput->getField(j); CurrInst.FV[m] = features[j]->Lookup( fld ); if ( !CurrInst.FV[m] ){ // for "unknown" values have to add a dummy value CurrInst.FV[m] = new FeatureValue( fld ); } } // i // the last string is the target CurrInst.TV = targets.Lookup( ChopInput->getField(NumOfFeatures()) ); break; default: FatalError( "Wrong value in Switch: " + TiCC::toString(phase) ); } if ( ( phase != TestWords ) && doSamples() ){ double exW = ChopInput->getExW(); if ( exW < 0 ){ exW = 1.0; } CurrInst.ExemplarWeight( exW ); } return &CurrInst; } bool empty_line( const UnicodeString& Line, const InputFormatType IF ){ // determine wether Line is empty or a commentline bool result = ( Line.isEmpty() || ( IF == ARFF && // ARFF "comment" ( Line[0] == '%' || Line[0] == '@' ) ) ); if ( result ){ for ( int i=0; i < Line.length();++i ){ if ( !u_isspace( Line[i] ) ){ return false; } } } return result; } UnicodeString MBLClass::get_org_input( ) const { return ChopInput->getString(); } void MBLClass::LearningInfo( ostream& os ) { if ( !ExpInvalid() && !Verbosity(SILENT) ){ calculate_fv_entropy( !MBL_init ); os.setf(ios::showpoint ); int OldPrec = os.precision(8); os << "DB Entropy : " << DBEntropy << endl; os << "Number of Classes : " << targets.EffectiveValues() << endl; os << endl; if ( Verbosity(FEAT_W) ){ if ( CurrentWeighting() == SD_w ){ os << "Feats\tVals\tStandard Deviation" << endl; size_t pos = 0; for ( const auto& feat : features.feats ){ os << setw(5) << ++pos; os.setf(ios::right, ios::adjustfield); if ( feat->Ignore() ){ os << " (ignored) " << endl; } else { os.setf(ios::right, ios::adjustfield); os << setw(7) << feat->EffectiveValues() << "\t" << feat->StandardDeviation(); if ( feat->isNumerical() ){ os << " NUMERIC"; } os << endl; } } os << endl; os.precision(OldPrec); } else if ( need_all_weights ){ os << "Feats\tVals\tX-square\tVariance\tInfoGain\tGainRatio" << endl; size_t pos = 0; for ( const auto& feat : features.feats ) { os << setw(5) << ++pos; os.setf(ios::right, ios::adjustfield); if ( feat->Ignore() ){ os << " (ignored) " << endl; } else { os.setf(ios::right, ios::adjustfield); os << setw(7) << feat->EffectiveValues() << "\t" << feat->ChiSquare() << "\t" << feat->SharedVariance() << "\t" << feat->InfoGain() << "\t" << feat->GainRatio(); if ( feat->isNumerical() ){ os << " NUMERIC"; } os << endl; } } os << endl; os.precision(OldPrec); } else { os << "Feats\tVals\tInfoGain\tGainRatio" << endl; size_t pos = 0; for ( const auto& feat : features.feats ) { os << setw(5) << ++pos; os.setf(ios::right, ios::adjustfield); if ( feat->Ignore() ){ os << " (ignored) " << endl; } else { os.setf(ios::right, ios::adjustfield); os << setw(7) << feat->EffectiveValues() << "\t" << feat->InfoGain() << "\t" << feat->GainRatio(); if ( feat->isNumerical() ){ os << " NUMERIC"; } os << endl; } } os << endl; os.precision(OldPrec); } } } } bool MBLClass::writeWeights( ostream& os ) const { bool result = false; if ( !ExpInvalid() ){ if ( features[0] == NULL ){ Warning( "unable to save Weights, nothing learned yet" ); } else { os << "# DB Entropy: " << DBEntropy << endl; os << "# Classes: " << targets.values_array.size() << endl; os << "# Lines of data: " << targets.TotalValues() << endl; int OldPrec = os.precision(DBL_DIG); if ( CurrentWeighting() == SD_w ){ os << "#" << endl; os << "# " << TiCC::toString( SD_w ) << endl; os << "# Fea." << "\t" << "Weight" << endl; size_t pos = 0; for ( const auto& feat : features.feats ){ os.precision(DBL_DIG); os << ++pos << "\t"; if ( feat->Ignore() ){ os << "Ignore" << endl; } else { os << feat->StandardDeviation() << endl; } } os << "#" << endl; } else { os << "# " << TiCC::toString( No_w ) << endl; os << "# Fea." << "\t" << "Weight" << endl; size_t pos = 0; for ( const auto& feat : features.feats ){ os.precision(DBL_DIG); os << ++pos << "\t"; if ( feat->Ignore() ){ os << "Ignore" << endl; } else { os << 1.0 << endl; } } os << "#" << endl; os << "# " << TiCC::toString( GR_w ) << endl; os << "# Fea." << "\t" << "Weight" << endl; pos = 0; for ( const auto& feat : features.feats ){ os.precision(DBL_DIG); os << ++pos << "\t"; if ( feat->Ignore() ){ os << "Ignore" << endl; } else { os << feat->GainRatio() << endl; } } os << "#" << endl; os << "# " << TiCC::toString( IG_w ) << endl; os << "# Fea." << "\t" << "Weight" << endl; pos = 0; for ( const auto& feat : features.feats ){ os.precision(DBL_DIG); os << ++pos << "\t"; if ( feat->Ignore() ){ os << "Ignore" << endl; } else { os << feat->InfoGain() << endl; } } if ( need_all_weights ){ os << "#" << endl; os << "# " << TiCC::toString( SV_w ) << endl; os << "# Fea." << "\t" << "Weight" << endl; pos = 0; for ( const auto& feat : features.feats ){ os.precision(DBL_DIG); os << ++pos << "\t"; if ( feat->Ignore() ){ os << "Ignore" << endl; } else { os << feat->SharedVariance() << endl; } } os << "#" << endl; os << "# " << TiCC::toString( X2_w ) << endl; os << "# Fea." << "\t" << "Weight" << endl; pos = 0; for ( const auto& feat : features.feats ){ os.precision(DBL_DIG); os << ++pos << "\t"; if ( feat->Ignore() ){ os << "Ignore" << endl; } else { os << feat->ChiSquare() << endl; } } os << "#" << endl; } } os.precision(OldPrec); result = true; } } return result; } bool MBLClass::read_the_vals( istream& is ){ vector done( NumOfFeatures(), false );; string Buffer; while ( getline( is, Buffer) ){ if ( !Buffer.empty() ){ if ( Buffer[0] == '#'){ break; } // Line looks like: // 28 0.445481 // or: // 13 Ignore // vector vals = TiCC::split( Buffer ); if ( vals.size() == 2 ){ size_t i_f = TiCC::stringTo( vals[0] ); if ( i_f > NumOfFeatures() ){ Error( "in weightsfile, Feature index > Maximum, (" + TiCC::toString(NumOfFeatures()) + ")" ); } else if ( done[i_f-1] ){ Error( "in weightsfile, Feature index " + vals[0] + " is mentioned twice" ); } else { done[i_f-1] = true; if ( !compare_nocase( vals[1], "Ignore" ) ){ double w; if ( !TiCC::stringTo( vals[1], w ) ){ Error( "in weightsfile, Feature " + vals[0] + " has illegal value: " + vals[1] ); } else { features[i_f-1]->SetWeight( w ); if ( features[i_f-1]->Ignore() ){ Warning( "in weightsfile, " "Feature " + vals[0] + " has value: " + TiCC::toString( w ) + " assigned, but will be ignored" ); } } } else { features[i_f-1]->SetWeight( 0.0 ); if ( !features[i_f-1]->Ignore() ){ Warning( "in weightsfile, Feature " + vals[0] + " has value: 'Ignore', we will use: 0.0 " ); } } } } } } bool result = true; for ( size_t j=0; j < NumOfFeatures(); ++j ){ if ( !done[j] ) { Error( "in weightsfile, Feature index " + TiCC::toString(j+1) + " is not mentioned" ); result = false; } } return result; } bool MBLClass::readWeights( istream& is, WeightType wanted ){ if ( !ExpInvalid() ){ bool old_style = true; bool result = false; string Buffer; while( getline( is, Buffer ) ) { // A comment starts with '#' // if ( Buffer.empty() ){ continue; } else { if ( Buffer[0] == '#'){ vector vals = TiCC::split_at( Buffer, " " ); if ( vals.size() == 2 ){ WeightType tmp_w = Unknown_w; if ( !TiCC::stringTo( vals[1], tmp_w ) ){ continue; } else { old_style = false; if ( tmp_w == wanted ){ getline( is, Buffer ); result = read_the_vals( is ); break; } } } } } } if ( is.eof() ){ if ( old_style ){ // wanted weighting not found // Old style weightsfile? // Warning( "Old Style weightsfile. Please update" ); is.clear(); is.seekg(0); size_t pos = 0; while( getline( is, Buffer ) ) { // A comment starts with '#' // if ( Buffer.empty() ){ pos = is.tellg(); continue; } else { if ( Buffer[0] == '#'){ pos = is.tellg(); continue; } is.seekg(pos); result = read_the_vals( is ); break; } } } } if ( !result ){ Warning( "Unable to retrieve " + TiCC::toString( wanted ) + " Weights" ); Warning( "unable to continue" ); return false; } // make shure all weights are correct // Paranoid? for ( const auto& feat : features.feats ){ feat->InfoGain( feat->Weight() ); feat->GainRatio( feat->Weight() ); feat->ChiSquare( feat->Weight() ); feat->SharedVariance( feat->Weight() ); feat->StandardDeviation( 0.0 ); } Weighting = UserDefined_w; } return true; } bool MBLClass::recalculate_stats( Feature_List& feats, vector& feat_status, bool check_change ){ bool changed = false; for ( size_t g = 0; g < NumOfFeatures(); ++g ) { feat_status[g] = Unknown; if ( feats.feats[g]->Ignore() ){ continue; } bool metricChanged = false; MetricType TmpMetricType = UserOptions[g+1]; metricClass *tmpMetric = getMetricClass( TmpMetricType ); if ( tmpMetric->isNumerical() ){ feat_status[g] = feats[g]->prepare_numeric_stats(); if ( feat_status[g] == SingletonNumeric && input_format == SparseBin && GlobalMetric->isSimilarityMetric( ) ){ // ok } else if ( feat_status[g] != NumericValue ){ if ( GlobalMetric->isNumerical() ){ TmpMetricType = Overlap; } else { TmpMetricType = globalMetricOption; } } } else if ( feats[g]->values_array.size() == 1 ){ feat_status[g] = Singleton; } delete tmpMetric; if ( check_change ){ bool isRead; if ( feats.feats[g]->metric && feats.feats[g]->getMetricType() != TmpMetricType && feats.feats[g]->isStorableMetric() && feats.feats[g]->matrixPresent( isRead ) && isRead ){ Error( "The metric " + TiCC::toString(feats.feats[g]->getMetricType()) + " for feature " + TiCC::toString( g+1 ) + " is set from a file. It cannot be changed!" ); abort(); } metricChanged = !feats.feats[g]->setMetricType(TmpMetricType); } if ( metricChanged ){ changed = true; } } // end g return changed; } void MBLClass::calculate_fv_entropy( bool always ){ bool realy_first = DBEntropy < 0.0; bool redo = always || realy_first; if ( redo ){ // if it's the first time (DBEntropy == 0 ) or // if always, we have to (re)calculate everything double Entropy = 0.0; // first get the Database Entropy size_t totval = targets.TotalValues(); for ( const auto& it : targets.values_array ){ double Ratio = it->ValFreq() / (double)totval; if ( Ratio > 0 ){ Entropy += Ratio * Log2(Ratio); } } DBEntropy = fabs(-Entropy); allocate_arrays(); // create ValueClassProb arrays.. } // Loop over the Features, see if the numerics are non-singular // and do the statistics for those features where the metric is changed. vector feat_status(NumOfFeatures()); bool changed = recalculate_stats( features, feat_status, redo ); if ( ( CurrentWeighting() == SD_w || GlobalMetric->isSimilarityMetric() ) && changed ){ // check to see if ALL features are still Numeric. // otherwise we can't do Standard Deviation weighting, // or Similarity Metrics! bool first = true; string str1; for ( size_t ff = 0; ff < NumOfFeatures(); ++ff ){ if ( feat_status[ff] == NotNumeric ){ if ( first ){ str1 += "The following feature(s) have non numeric value: "; first = false; } else { str1 += ", "; } size_t n = ff; while ( ff < NumOfFeatures()-1 && feat_status[ff+1] == NotNumeric ){ ++ff; } if ( n != ff ){ str1 += to_string(n+1) + "-" + to_string(ff+1); } else { str1 + to_string(ff+1); } } } if ( !first ){ Error( str1 ); if ( GlobalMetric->isSimilarityMetric() ){ Error( "Therefore InnerProduct/Cosine operations are impossible" ); } else { Error( "Therefore " + TiCC::toString(CurrentWeighting()) + " weighting is impossible" ); } return; } } // Give a warning for singular features, except when it's // a result of a forced recalculation if ( realy_first ){ bool first = true; string str1; for ( size_t ff = 0; ff < NumOfFeatures(); ++ff ) { if ( feat_status[ff] == Singleton || feat_status[ff] == SingletonNumeric ){ if ( first ){ str1 += "The following feature(s) have only 1 value: "; first = false; } else { str1 += ", "; } size_t n = ff; while ( ff < NumOfFeatures()-1 && ( feat_status[ff+1] == Singleton || feat_status[ff+1] == SingletonNumeric ) ){ ++ff; } if ( n != ff ){ str1 += to_string(n+1) + "-" + to_string(ff+1); } else { str1 += to_string(ff+1); } } } if ( !first && !is_copy ){ Warning( str1 ); } string str2; first = true; for ( size_t ff = 0; ff < NumOfFeatures(); ++ff ){ if ( feat_status[ff] == NotNumeric ){ if ( first ){ str2 += "The following feature(s) contained non-numeric values and" "\nwill be treated as NON-Numeric: "; first = false; } else { str2 += ", "; } size_t n = ff; while ( ff < NumOfFeatures()-1 && feat_status[ff+1] == NotNumeric ) ff++; if ( n != ff ){ str2 += to_string(n+1) + "-" + to_string(ff+1); } else { str2 += to_string(ff+1); } } } if ( !first ){ Warning( str2 ); } } if ( redo ){ for ( const auto& feat : features.feats ){ if ( Weighting != UserDefined_w ){ if ( CurrentWeighting() == SD_w ){ feat->StandardDeviationStatistics( ); } else if ( feat->isNumerical() ){ feat->NumStatistics( DBEntropy, targets, Bin_Size, need_all_weights ); } else { feat->Statistics( DBEntropy, targets, need_all_weights ); } } } } } bool MBLClass::writeNamesFile( ostream& os ) const { bool result = true; if ( ExpInvalid() ){ result = false; } else { // Print the possible classes. // for ( const auto& it : targets.values_array ){ os << it; if ( &it != &targets.values_array.back() ){ os << ","; } } os << "." << endl << endl; size_t pos = 0; for ( auto const& feat : features.feats ){ os << "a" << ++pos << ": "; if ( feat->Ignore() ){ os << "Ignore" << endl; } else if ( feat->isNumerical() ){ os << "Numeric" << endl; } else { // Loop over the values. // for ( const auto& val : feat->values_array ){ os << val; if ( &val != &feat->values_array.back() ){ os << ","; } } os << "." << endl; } } } return result; } bool MBLClass::Chop( const UnicodeString& line ) { try { return ChopInput->chop( line, NumOfFeatures() ); } catch ( const exception& e ){ Warning( e.what() ); return false; } } bool MBLClass::setInputFormat( const InputFormatType IF ){ if ( ChopInput ){ delete ChopInput; ChopInput = 0; } ChopInput = Chopper::create( IF, chopExamples(), F_length, chopOcc() ); if ( ChopInput ){ input_format = IF; return true; } return false; } const ClassDistribution *MBLClass::ExactMatch( const Instance& inst ) const { const ClassDistribution *result = NULL; if ( !GlobalMetric->isSimilarityMetric() && ( do_exact_match || ( num_of_neighbors == 1 && !( Verbosity( NEAR_N | ALL_K) ) ) ) ){ result = InstanceBase->ExactMatch( inst ); } return result; } double MBLClass::getBestDistance() const { return nSet.bestDistance(); } WClassDistribution *MBLClass::getBestDistribution( unsigned int k ){ return nSet.bestDistribution( decay, k ); } UnicodeString MBLClass::formatInstance( const vector& OrgFV, const vector& RedFV, size_t OffSet, size_t Size ) const { UnicodeString result; Instance inst( Size ); for ( size_t i=0; i< OffSet; ++i ){ inst.FV[i] = OrgFV[i]; } for ( size_t j=OffSet; j< Size; ++j ){ inst.FV[j] = RedFV[j-OffSet]; } vector InvPerm(NumOfFeatures(),0); for ( size_t i=0; i< NumOfFeatures(); ++i ){ InvPerm[features.permutation[i]] = i; } for ( size_t j=0; j< NumOfFeatures(); ++j ){ switch ( input_format ) { case C4_5: // fall through case ARFF: if ( features[j]->Ignore() ){ result += "-*-,"; } else { result += inst.FV[InvPerm[j]]->name() + ","; } break; case Sparse: if ( inst.FV[InvPerm[j]]->name() != DefaultSparseString ){ result += "(" + TiCC::toUnicodeString(j+1) + "," + CodeToStr( inst.FV[InvPerm[j]]->name() ) + ")"; } break; case SparseBin: if ( inst.FV[InvPerm[j]]->name()[0] == '1' ){ result += TiCC::toUnicodeString( j+1 ) + ","; } break; case Columns: if ( features[j]->Ignore() ){ result += "-*- "; } else { result += inst.FV[InvPerm[j]]->name() + " "; } break; case Tabbed: if ( features[j]->Ignore() ){ result += "-*- "; } else { result += inst.FV[InvPerm[j]]->name() + "\t"; } break; default: if ( features[j]->Ignore() ){ result += UnicodeString( F_length, '*', F_length ); } else { result += inst.FV[InvPerm[j]]->name(); } break; } } return result; } inline double WeightFun( double D, double W ){ return D / (W + Common::Epsilon); } void MBLClass::test_instance_ex( const Instance& Inst, InstanceBase_base *IB, size_t ib_offset ){ vector CurrentFV(NumOfFeatures()); const ClassDistribution *best_distrib = IB->InitGraphTest( CurrentFV, &Inst.FV, ib_offset, EffectiveFeatures() ); if ( !best_distrib ){ // no use to do more work then return; } tester->init( Inst, EffectiveFeatures(), ib_offset ); auto lastpos = best_distrib->begin(); Vfield *Bpnt = lastpos->second; size_t EffFeat = EffectiveFeatures() - ib_offset; size_t CurPos = 0; while ( Bpnt ) { // call test() with a maximum threshold, to prevent stepping out early size_t EndPos = tester->test( CurrentFV, CurPos, DBL_MAX ); if ( EndPos != EffFeat ){ throw( logic_error( "Exemplar testing: test should not stop before last feature" ) ); } ClassDistribution ResultDist; ResultDist.SetFreq( Bpnt->Value(), Bpnt->Freq() ); UnicodeString origI; if ( Verbosity(NEAR_N) ){ origI = formatInstance( Inst.FV, CurrentFV, ib_offset, NumOfFeatures() ); } double Distance = WeightFun( tester->getDistance(EndPos), Bpnt->Weight() ); bestArray.addResult( Distance, &ResultDist, origI ); CurPos = EndPos-1; ++lastpos; if ( lastpos != best_distrib->end() ){ Bpnt = lastpos->second; } else { best_distrib = IB->NextGraphTest( CurrentFV, CurPos ); Bpnt = NULL; if ( best_distrib ){ lastpos = best_distrib->begin(); if ( lastpos != best_distrib->end() ){ Bpnt = lastpos->second; } } } } } void MBLClass::initDecay(){ if ( decay ){ delete decay; decay = 0; } switch ( decay_flag ){ case InvDist: decay = new invDistDecay(); break; case InvLinear: decay = new invLinDecay(); break; case ExpDecay: decay = new expDecay( decay_alfa, decay_beta ); break; case Zero: // fall through default: break; } } void MBLClass::initTesters() { delete GlobalMetric; GlobalMetric = getMetricClass( globalMetricOption ); delete tester; tester = getTester( globalMetricOption, features, mvd_threshold ); } void MBLClass::test_instance( const Instance& Inst, InstanceBase_base *IB, size_t ib_offset ){ vector CurrentFV(NumOfFeatures()); double Threshold = DBL_MAX; size_t EffFeat = EffectiveFeatures() - ib_offset; const ClassDistribution *best_distrib = IB->InitGraphTest( CurrentFV, &Inst.FV, ib_offset, EffectiveFeatures() ); tester->init( Inst, EffectiveFeatures(), ib_offset ); size_t CurPos = 0; while ( best_distrib ){ size_t EndPos = tester->test( CurrentFV, CurPos, Threshold + Epsilon ); if ( EndPos == EffFeat ){ // we finished with a certain amount of succes double Distance = tester->getDistance(EndPos); if ( Distance >= 0.0 ){ UnicodeString origI; if ( Verbosity(NEAR_N) ){ origI = formatInstance( Inst.FV, CurrentFV, ib_offset, NumOfFeatures() ); } Threshold = bestArray.addResult( Distance, best_distrib, origI ); if ( do_silly_testing ){ Threshold = DBL_MAX; } } else { Error( "DISTANCE == " + TiCC::toString(Distance) ); FatalError( "we are dead" ); } } else { ++EndPos; // out of luck, compensate for roll-back } size_t pos=EndPos-1; while ( true ){ // rollback if ( tester->getDistance(pos) <= Threshold ){ CurPos = pos; best_distrib = IB->NextGraphTest( CurrentFV, CurPos ); break; } if ( pos == 0 ){ break; } --pos; } } } void MBLClass::test_instance_sim( const Instance& Inst, InstanceBase_base *IB, size_t ib_offset ){ vector CurrentFV(NumOfFeatures()); size_t EffFeat = EffectiveFeatures() - ib_offset; const ClassDistribution *best_distrib = IB->InitGraphTest( CurrentFV, &Inst.FV, ib_offset, EffectiveFeatures() ); tester->init( Inst, EffectiveFeatures(), ib_offset ); while ( best_distrib ){ double dummy_t = -1.0; size_t dummy_p = 0; // similarity::test() doesn't need CurPos, nor a Threshold // it recalculates the whole vector size_t EndPos = tester->test( CurrentFV, dummy_p, dummy_t ); if ( EndPos == EffFeat ){ // this should always be true! double Distance = tester->getDistance(EndPos); if ( Distance >= 0.0 ){ UnicodeString origI; if ( Verbosity(NEAR_N) ){ origI = formatInstance( Inst.FV, CurrentFV, ib_offset, NumOfFeatures() ); } bestArray.addResult( Distance, best_distrib, origI ); } else if ( GlobalMetric->type() == DotProduct ){ Error( "The Dot Product metric fails on your data: intermediate result too big to handle," ); Info( "you might consider using the Cosine metric '-mC' " ); FatalError( "timbl terminated" ); } else { Error( "negative similarity DISTANCE: " + TiCC::toString(Distance) ); FatalError( "we are dead" ); } } else { throw( logic_error( "Similarity testing: test should consider all features" ) ); } --EndPos; best_distrib = IB->NextGraphTest( CurrentFV, EndPos ); } } void MBLClass::TestInstance( const Instance& Inst, InstanceBase_base *SubTree, size_t level ){ // must be cleared for EVERY test if ( doSamples() ){ test_instance_ex( Inst, SubTree, level ); } else { if ( GlobalMetric->isSimilarityMetric( ) ){ test_instance_sim( Inst, SubTree, level ); } else { test_instance( Inst, SubTree, level ); } } } size_t MBLClass::countFeatures( const UnicodeString& inBuffer, const InputFormatType IF ) const { size_t result = 0; if ( IF == Sparse || IF == SparseBin ){ return NumOfFeatures(); } else { try { result = Chopper::countFeatures( inBuffer, IF, F_length, chopExamples() || chopOcc() ); } catch( const runtime_error& e ){ Error( e.what() ); } catch( const exception& e ){ FatalError( e.what() ); } } return result; } InputFormatType MBLClass::getInputFormat( const UnicodeString& inBuffer ) const { return Chopper::getInputFormat( inBuffer, chopExamples() || chopOcc() ); } size_t MBLClass::examineData( const string& FileName ){ // Looks at the data files, counts number of features. // and sets input_format variables. // size_t NumF = 0; InputFormatType IF = UnknownInputFormat; // Open the file. // if ( FileName == "" ) { Warning( "couldn't initialize: No FileName specified " ); return 0; } else { UnicodeString Buffer; ifstream datafile( FileName, ios::in); if (!datafile) { Warning( "can't open DataFile: " + FileName ); return 0; } else if ( input_format != UnknownInputFormat ){ // The format is somehow already known, so use that if ( input_format == SparseBin || input_format == Sparse ){ NumF = MaxFeatures; } else { if ( !TiCC::getline( datafile, Buffer ) ) { Warning( "empty data file" ); } else { bool more = true; if ( input_format == ARFF ){ while ( Buffer.caseCompare( "@DATA", 5 ) ){ if ( !TiCC::getline( datafile, Buffer ) ){ Warning( "empty data file" ); more = false; break; }; } if ( more && !TiCC::getline( datafile, Buffer ) ){ Warning( "empty data file" ); more = false; }; } while ( more && empty_line( Buffer, input_format ) ){ if ( !TiCC::getline( datafile, Buffer ) ){ Warning( "empty data file" ); more = false; }; } // now we have a usable line, //analyze it using the User defined input_format NumF = countFeatures( Buffer, input_format ); } } IF = input_format; } else if ( !TiCC::getline( datafile, Buffer ) ){ Warning( "empty data file: " + FileName ); } // We start by reading the first line so we can figure out the number // of Features, and see if the file is comma seperated or not, etc. // else{ if ( IF == ARFF ){ // Remember, we DON't want to auto-detect ARFF while ( Buffer.caseCompare( "@DATA", 5 ) ){ if ( !TiCC::getline( datafile, Buffer ) ) { Warning( "no ARRF data after comments: " + FileName ); return 0; } } do { if ( !TiCC::getline( datafile, Buffer ) ) { Warning( "no ARRF data after comments: " + FileName ); return 0; } } while ( empty_line( Buffer, input_format ) ); } else { while ( empty_line( Buffer, input_format ) ) { if ( !TiCC::getline( datafile, Buffer ) ) { Warning( "no data after comments: " + FileName ); return 0; } } // We found a useful line! // Now determine the input_format (if not already known, // and Count Features as well. } IF = getInputFormat( Buffer ); NumF = countFeatures( Buffer, IF ); } } if ( NumF > 0 ){ if ( input_format != UnknownInputFormat && input_format != IF ){ Warning( "assumed inputformat differs from specified!" ); return 0; } else { if ( NumF > MaxFeatures ){ Error( "Number of Features exceeds the maximum number. " "(currently " + TiCC::toString(MaxFeatures) + ")\nPlease increase.\n" ); return 0; } setInputFormat( IF ); } } return NumF; } void MBLClass::Initialize( size_t numF ){ // Allocate memory. Will be reused again and again .... // if ( target_pos == std::numeric_limits::max() ){ target_pos = numF; // the default } else if ( target_pos > numF ){ FatalError( "Initialize: TARGET_POS cannot exceed NUM_OF_FEATURES+1 " + TiCC::toString( numF+1 ) ); } targets.init(); features.init( numF, UserOptions ); CurrInst.Init( numF ); delete GlobalMetric; GlobalMetric = getMetricClass( globalMetricOption ); Options.FreezeTable(); if ( Weighting > IG_w || TreeOrder >= X2Order ){ need_all_weights = true; } } } // namespace LanguageMachines-timbl-642727d/src/Makefile.am000066400000000000000000000013641451477526200211370ustar00rootroot00000000000000AM_CPPFLAGS = -I@top_srcdir@/include AM_CXXFLAGS = -std=c++14 -W -Wall -O3 -g -pedantic bin_PROGRAMS = timbl check_PROGRAMS = simpletest TESTS = $(check_PROGRAMS) TESTS_ENVIRONMENT = topsrcdir=$(top_srcdir) simpletest_SOURCES = simpletest.cxx CLEANFILES = dimin.out LDADD = libtimbl.la timbl_SOURCES = Timbl.cxx lib_LTLIBRARIES = libtimbl.la libtimbl_la_LDFLAGS= -version-info 7:0:0 libtimbl_la_SOURCES = Common.cxx \ GetOptClass.cxx IBtree.cxx IBprocs.cxx \ Targets.cxx Features.cxx Instance.cxx \ MBLClass.cxx MsgClass.cxx \ StringOps.cxx TimblAPI.cxx Choppers.cxx\ TimblExperiment.cxx IGExperiment.cxx Metrics.cxx Testers.cxx \ TRIBLExperiments.cxx LOOExperiment.cxx CVExperiment.cxx \ Types.cxx neighborSet.cxx Statistics.cxx BestArray.cxx LanguageMachines-timbl-642727d/src/Metrics.cxx000066400000000000000000000274031451477526200212370ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Instance.h" #include "timbl/Metrics.h" #include "unicode/schriter.h" using namespace std; using Common::Epsilon; using Common::Log2; //#define METRIC_DEBUG namespace Timbl{ double lv_distance( const icu::UnicodeString& source, const icu::UnicodeString& target ){ // code taken from: http://www.merriampark.com/ldcpp.htm // Levenshtein Distance Algorithm: C++ Implementation // by Anders Sewerin Johansen // Step 1 const size_t n = source.length(); const size_t m = target.length(); if (n == 0) { return (double)m; } if (m == 0) { return (double)n; } // Good form to declare a TYPEDEF typedef std::vector< std::vector > Tmatrix; Tmatrix matrix(n+1); // Size the vectors in the 2.nd dimension. Unfortunately C++ doesn't // allow for allocation on declaration of 2.nd dimension of vec of vec for ( size_t i = 0; i <= n; ++i ) { matrix[i].resize(m+1); } // Step 2 for ( size_t i = 0; i <= n; ++i ) { matrix[i][0]=i; } for ( size_t i = 0; i <= m; ++i ) { matrix[0][i]=i; } // Step 3 for ( size_t i = 1; i <= n; ++i ) { const char s_i = source[i-1]; // Step 4 for ( size_t j = 1; j <= m; ++j ) { const char t_j = target[j-1]; // Step 5 int cost; if (s_i == t_j) { cost = 0; } else { cost = 1; } // Step 6 const size_t above = matrix[i-1][j]; const size_t left = matrix[i][j-1]; const size_t diag = matrix[i-1][j-1]; size_t cell = min( above + 1, min(left + 1, diag + cost)); // Step 6A: Cover transposition, in addition to deletion, // insertion and substitution. This step is taken from: // Berghel, Hal ; Roach, David : "An Extension of Ukkonen's // Enhanced Dynamic Programming ASM Algorithm" // (http://www.acm.org/~hlb/publications/asm/asm.html) if (i>2 && j>2) { size_t trans=matrix[i-2][j-2]+1; if (source[i-2]!=t_j) { trans++; }; if (s_i!=target[j-2]) { trans++; }; if (cell>trans) { cell=trans; }; } matrix[i][j]=cell; } } return (double)matrix[n][m]; } double dc_distance( const icu::UnicodeString& string1, const icu::UnicodeString& string2 ){ // code taken from: // http://en.wikibooks.org/wiki/Algorithm_implementation/Strings/Dice's_coefficient unsigned int ls1 = string1.length(); unsigned int ls2 = string2.length(); double dice; int overlap = 0; int total = 0; if ( ls1 <= 1 || ls2 <= 1 ){ // back-off naar unigrammen set string1_unigrams; set string2_unigrams; icu::StringCharacterIterator it1(string1); while ( it1.hasNext() ){ string1_unigrams.insert(it1.current32()); it1.next32(); } icu::StringCharacterIterator it2(string2); while ( it2.hasNext() ){ string2_unigrams.insert(it2.current32()); it2.next32(); } for ( const auto& ug : string2_unigrams ){ if ( string1_unigrams.find( ug ) != string1_unigrams.end() ){ ++overlap; } } total = string1_unigrams.size() + string2_unigrams.size(); } else { set string1_bigrams; set string2_bigrams; for ( unsigned int i = 0; i < (ls1 - 1); ++i ) { // extract character bigrams from string1 string1_bigrams.insert( icu::UnicodeString( string1, i, 2 ) ); } for ( unsigned int i = 0; i < (ls2 - 1); ++i ) { // extract character bigrams from string2 string2_bigrams.insert( icu::UnicodeString( string2, i, 2 ) ); } for ( const auto& bg : string2_bigrams ){ if ( string1_bigrams.find( bg ) != string1_bigrams.end() ){ ++overlap; } } total = string1_bigrams.size() + string2_bigrams.size(); } dice = (double)(overlap * 2) / (double)total; // we will return 1 - dice coefficient as distance return 1.0 - dice; } double vd_distance( const SparseValueProbClass *r, const SparseValueProbClass *s ){ double result = 0.0; if ( ! ( r && s ) ){ return 1.0; } auto p1 = r->begin(); auto p2 = s->begin(); while( p1 != r->end() && p2 != s->end() ){ if ( p2->first < p1->first ){ result += p2->second; ++p2; } else if ( p2->first == p1->first ){ result += fabs( p1->second - p2->second ); ++p1; ++p2; } else { result += p1->second; ++p1; } } while ( p1 != r->end() ){ result += p1->second; ++p1; } while ( p2 != s->end() ){ result += p2->second; ++p2; } result = result / 2.0; return result; } double p_log_p_div_q( double p, double q ) { if ( abs(q) < Epsilon ){ return 0; } return p * Log2( p/q ); } double jd_distance( const SparseValueProbClass *r, const SparseValueProbClass *s ){ double part1 = 0.0; double part2 = 0.0; auto p1 = r->begin(); auto p2 = s->begin(); while( p1 != r->end() && p2 != s->end() ){ if ( p2->first < p1->first ){ part2 += p2->second; ++p2; } else if ( p2->first == p1->first ){ part1 += p_log_p_div_q( p1->second, p2->second ); part2 += p_log_p_div_q( p2->second, p1->second ); ++p1; ++p2; } else { part1 += p1->second; ++p1; } } while ( p1 != r->end() ){ part1 += p1->second; ++p1; } while ( p2 != s->end() ){ part2 += p2->second; ++p2; } double result = part1 + part2; result = result / 2.0; return result; } double k_log_k_div_m( double k, double l ) { if ( abs(k+l) < Epsilon ){ return 0; } return k * Log2( (2.0 * k)/( k + l ) ); } double js_distance( const SparseValueProbClass *r, const SparseValueProbClass *s ){ double part1 = 0.0; double part2 = 0.0; auto p1 = r->begin(); auto p2 = s->begin(); while( p1 != r->end() && p2 != s->end() ){ if ( p2->first < p1->first ){ part2 += p2->second; ++p2; } else if ( p2->first == p1->first ){ part1 += k_log_k_div_m( p1->second, p2->second ); part2 += k_log_k_div_m( p2->second, p1->second ); ++p1; ++p2; } else { part1 += p1->second; ++p1; } } while ( p1 != r->end() ){ part1 += p1->second; ++p1; } while ( p2 != s->end() ){ part2 += p2->second; ++p2; } double result = part1 + part2; result = result / 2.0; return result; } metricClass *getMetricClass( MetricType mt ){ switch ( mt ){ case Overlap: return new OverlapMetric(); break; case Numeric: return new NumericMetric(); break; case Euclidean: return new EuclideanMetric(); break; case Cosine: return new CosineMetric(); break; case DotProduct: return new DotProductMetric(); break; case ValueDiff: return new ValueDiffMetric(); break; case JeffreyDiv: return new JeffreyMetric(); break; case JSDiv: return new JSMetric(); break; case Levenshtein: return new LevenshteinMetric(); break; case Dice: return new DiceMetric(); break; case Ignore: return 0; break; default: throw logic_error("getMetricClass: unknown MetricType " + TiCC::toString(mt) ); } } double OverlapMetric::distance( const FeatureValue *F, const FeatureValue *G, size_t, double ) const { if ( F == G ){ return 0.0; } else { return 1.0; } } inline bool FV_to_real( const FeatureValue *FV, double &result ){ if ( FV ){ if ( TiCC::stringTo( FV->name(), result ) ){ return true; } } return false; } double JeffreyMetric::distance( const FeatureValue *F, const FeatureValue *G, size_t limit, double ) const { double result = 0.0; if ( G != F ){ if ( F->ValFreq() < limit || G->ValFreq() < limit ){ #ifdef METRIC_DEBUG cerr << "result = 1.0 vanwege F.valFreq=" << F->ValFreq() << " en G.valFreq()=" << G ->ValFreq() << " met limiet= " << limit << endl; #endif result = 1.0; } else { result = jd_distance( F->valueClassProb(), G->valueClassProb() ); } } return result; } double JSMetric::distance( const FeatureValue *F, const FeatureValue *G, size_t limit, double ) const { double result = 0.0; if ( G != F ){ if ( F->ValFreq() < limit || G->ValFreq() < limit ){ #ifdef METRIC_DEBUG cerr << "result = 1.0 vanwege F.valFreq=" << F->ValFreq() << " en G.valFreq()=" << G ->ValFreq() << " met limiet= " << limit << endl; #endif result = 1.0; } else { result = js_distance( F->valueClassProb(), G->valueClassProb() ); } } return result; } double LevenshteinMetric::distance( const FeatureValue *F, const FeatureValue *G, size_t, double) const { double result = 0.0; if ( G != F ){ result = lv_distance( F->name(), G->name() ); } return result; } double DiceMetric::distance( const FeatureValue *F, const FeatureValue *G, size_t, double ) const { double result = 0.0; if ( G != F ){ result = dc_distance( F->name(), G->name() ); } return result; } double ValueDiffMetric::distance( const FeatureValue *F, const FeatureValue *G, size_t limit, double ) const { double result = 0.0; if ( G != F ){ if ( F->ValFreq() < limit || G->ValFreq() < limit ){ #ifdef METRIC_DEBUG cerr << "result = 1.0 vanwege F.valFreq=" << F->ValFreq() << " en G.valFreq()=" << G ->ValFreq() << " met limiet= " << limit << endl; #endif result = 1.0; } else { result = vd_distance( F->valueClassProb(), G->valueClassProb() ); } } return result; } double NumericMetric::distance( const FeatureValue *F, const FeatureValue *G, size_t, double scale ) const { double r1=0, r2=0, result; if ( FV_to_real( F, r1 ) && FV_to_real( G, r2 ) ){ result = fabs( (r1-r2)/ ( scale ) ); } else { result = 1.0; } return result; } double EuclideanMetric::distance( const FeatureValue *F, const FeatureValue *G, size_t, double scale ) const { double r1=0, r2=0, result; if ( FV_to_real( F, r1 ) && FV_to_real( G, r2 ) ){ result = sqrt(fabs(r1*r1-r2*r2))/ ( scale ); } else { result = 1.0; } return result; } double DotProductMetric::distance( const FeatureValue *, const FeatureValue *, size_t, double ) const { throw( logic_error( "unimplemented distance() for Dotproduct metric!" ) ); } double CosineMetric::distance( const FeatureValue *, const FeatureValue *, size_t, double ) const { throw( logic_error( "unimplemented distance() for Cosine metric!" ) ); } } LanguageMachines-timbl-642727d/src/MsgClass.cxx000066400000000000000000000035601451477526200213430ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include "timbl/MsgClass.h" using std::cerr; using std::endl; using std::string; namespace Timbl { void MsgClass::Info( const string& out_line ) const { cerr << out_line << endl; } void MsgClass::Warning( const string& out_line ) const { cerr << "Warning:" << out_line << endl; } void MsgClass::Error( const string& out_line ) const { ++err_cnt; cerr << "Error:" << out_line << endl; } void MsgClass::FatalError( const string& out_line ) const { cerr << "Fatal timbl Error:" << out_line << endl << "Please send a bugreport to timbl@uvt.nl" << endl << "include enough information, like:" << endl << "- Type of computer, type and version of OS, " << "and type and version of the compiler" << endl << "- Which Commands and switches were used" << endl << "- Which input was used, and which output was produced" << endl; throw std::runtime_error( "aborted" ); } } LanguageMachines-timbl-642727d/src/Statistics.cxx000066400000000000000000000143561451477526200217660ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include "timbl/Common.h" #include "timbl/MsgClass.h" #include "timbl/Types.h" #include "timbl/Instance.h" #include "timbl/Statistics.h" namespace Timbl { using std::bad_alloc; using std::ostream; using std::ios; using std::ios_base; using std::endl; using Common::Epsilon; ConfusionMatrix::ConfusionMatrix( size_t s ): size(s){ try { mat.resize(size+1); for ( size_t i=0; i <= size; ++i ){ mat[i].resize(size,0); } } catch( const bad_alloc& ){ Error ( "Not enough memory for ConfusionMatrix" ); throw; } } ConfusionMatrix::~ConfusionMatrix(){ for ( unsigned int i=0; i <= size; ++i ){ mat[i].clear(); } mat.clear(); } void ConfusionMatrix::Increment( const TargetValue *t1, const TargetValue *t2 ){ if ( t2 ){ if ( t1 ){ ++mat[t1->Index()-1][t2->Index()-1]; } else { ++mat[size][t2->Index()-1]; } } else { throw std::out_of_range( "ConfusionMatrix, index out of range" ); } } void ConfusionMatrix::Print( ostream& os, const Targets& targets ) const { os << "Confusion Matrix:" << endl; os << " "; for ( const auto& val : targets.values_array ){ // Print the class names. os.width(6); os.setf(ios::right, ios::adjustfield); os << val << " "; } os << endl; os << " "; for ( unsigned int i=0; i < size; ++i ){ os << "-------"; } os << endl; for ( unsigned int i=0; i < targets.values_array.size(); ++i ){ os.width(6); os.setf(ios::right, ios::adjustfield); os << targets.values_array[i] << " | "; for ( const auto& mv : mat[i] ){ os.width(6); os.setf(ios::right, ios::adjustfield); os << mv << " "; } os << endl; if ( i == targets.values_array.size() - 1 ){ os << " -*- | "; for ( const auto& mv : mat[size] ){ os.width(6); os.setf(ios::right, ios::adjustfield); os << mv << " "; } os << endl; } } os << endl; } void pf( ostream& os, size_t d ){ os.width(4); os << " \t" << d; } void pf( ostream& os, double d ){ if ( d < 0 ){ os << " \t (nan)\t"; } else { os.setf(ios::showpoint); os << " \t" << d; } } void ConfusionMatrix::FScore( ostream& os, const Targets& targets, bool cs_too ) const { double maf = 0.0; double mif = 0.0; double maa = 0.0; double mia = 0.0; ios_base::fmtflags flags = os.flags(ios::fixed); int oldPrec = os.precision(5); size_t effF = 0; size_t testF = 0; size_t effA = 0; if ( cs_too ){ os << "Scores per Value Class:" << endl; os << "class |\tTP\tFP\tTN\tFN\tprecision\trecall(TPR)\tFPR\t\tF-score\t\tAUC" << endl; } for ( unsigned int i=0; i < targets.values_array.size(); ++i ){ // so we loop over all known (trained) target values size_t TP = 0; size_t FP = 0; size_t FN = 0; size_t TN = 0; ValueClass *tv = targets.values_array[i]; size_t testCount = 0; for ( unsigned int j=0; j < size; ++j ){ testCount += mat[i][j]; if ( i == j ){ TP = mat[i][j]; } else { FN += mat[i][j]; } } testF += testCount; for ( unsigned int j=0; j <= size; ++j ){ if ( j != i ){ FP += mat[j][i]; } } for ( unsigned int j=0; j <= size; ++j ){ if ( j != i ){ for ( unsigned int k=0; k < size; ++k ){ if ( k != i ){ TN += mat[j][k]; } } } } double precision; if ( TP + FP == 0 ){ precision = -1; } else { precision = TP / double(TP + FP); } double TPR; if ( TP + FN == 0 ){ TPR = -1; } else { TPR = TP / double(TP + FN); } double FPR; if ( FP + TN == 0 ){ FPR = -1; } else { FPR = FP / double(FP + TN); } double f_score; if ( precision < 0 || TPR < 0 || fabs(precision + TPR) < Epsilon ){ f_score = -1; } else { f_score = ( 2 * precision * TPR ) / (precision + TPR ); ++effF; maf += f_score; mif += (f_score * testCount); } double AUC; if ( TPR < 0 || FPR < 0 ){ AUC = -1; } else { AUC = ( 0.5 * TPR * FPR ) + ( TPR * ( 1.0 - FPR ) ) + ( 0.5 * ( ( 1.0 - TPR ) * ( 1.0 - FPR ) ) ); ++effA; maa += AUC; mia += (AUC * testCount); } if ( cs_too ){ os.width( 6 ); os << tv << " | "; os.width(0); pf(os,TP); pf(os,FP); pf(os,TN); pf(os,FN); pf(os,precision); pf(os,TPR); pf(os,FPR); pf(os,f_score); pf(os,AUC); os << endl; } } maf = maf / effF; mif = mif / testF; maa = maa / effA; mia = mia / testF; os.precision( oldPrec ); os.flags( flags ); os << "F-Score beta=1, microav: " << mif << endl; os << "F-Score beta=1, macroav: " << maf << endl; os << "AUC, microav: " << mia << endl; os << "AUC, macroav: " << maa << endl; } void ConfusionMatrix::merge( const ConfusionMatrix *cm ){ if ( cm ){ for ( size_t i=0; i <= size; ++i ){ for ( size_t j=0; j < size; ++j ){ mat[i][j] += cm->mat[i][j]; } } } } void StatisticsClass::merge( const StatisticsClass& in ){ _data += in._data; _skipped += in._skipped; _correct += in._correct; _tieOk += in._tieOk; _tieFalse += in._tieFalse; _exact += in._exact; } } LanguageMachines-timbl-642727d/src/StringOps.cxx000066400000000000000000000071451451477526200215620ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include #include "ticcutils/StringOps.h" #include "timbl/StringOps.h" #include "unicode/ustream.h" using namespace std; using namespace icu; namespace Timbl { UnicodeString StrToCode( const UnicodeString &par, bool trim ){ UnicodeString In = par; // cerr << "string to code IN: '" << In << "'" << endl; if ( trim ){ In.trim(); } else { In = par; } UnicodeString Out; for ( int i=0; i < In.length(); ++i ){ switch ( In[i] ){ case ' ': Out += '\\'; Out += '_'; break; case '\t': Out += '\\'; Out += 't'; break; case '\\': Out += '\\'; Out += '\\'; break; default: Out += In[i]; } } // cerr << "string to code Out: '" << Out << "'" << endl; return Out; } UnicodeString CodeToStr( const UnicodeString& in ){ UnicodeString out; for( int i=0; i < in.length(); ++i ){ if ( in[i] == '\\' ){ ++i; if ( i == in.length() ){ out += '\\'; break; } else { switch ( in[i] ){ case '_': out += ' '; break; case '\\': out += '\\'; break; case 't': out += '\t'; break; default: out += '\\'; out += in[i]; } } } else { out += in[i]; } } return out; } bool nocase_cmp( char c1, char c2 ){ return toupper(c1) == toupper(c2); } bool compare_nocase( const string& s1, const string& s2 ){ if ( s1.size() == s2.size() && equal( s1.begin(), s1.end(), s2.begin(), nocase_cmp ) ){ return true; } else { return false; } } bool compare_nocase_n( const string& s1, const string& s2 ){ if ( s1.size() <= s2.size() && equal( s1.begin(), s1.end(), s2.begin(), nocase_cmp ) ){ return true; } else { return false; } } string correct_path( const string& filename, const string& path, bool keep_origpath ){ // if filename contains pathinformation, it is replaced with path, except // when keep_origpath is true. // if filename contains NO pathinformation, path is always appended. // of course we don't append if the filename is empty or just '-' ! if ( path != "" && filename != "" && filename[0] != '-' ){ bool add_slash = path.back() != '/'; string result = path; if ( add_slash ){ result += "/"; } string::size_type pos = filename.rfind( '/' ); if ( pos == string::npos ){ result += filename; } else if ( keep_origpath ){ result += filename; } else { result += filename.substr( pos+1 ); } return result; } else { return filename; } } } // namespace Timbl LanguageMachines-timbl-642727d/src/TRIBLExperiments.cxx000066400000000000000000000260671451477526200227360ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include // for srand() #include #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Options.h" #include "timbl/Instance.h" #include "timbl/IBtree.h" #include "timbl/MBLClass.h" #include "timbl/TimblExperiment.h" namespace Timbl { using namespace std; void TRIBL_Experiment::InitInstanceBase(){ srand( RandomSeed() ); default_order(); set_order(); runningPhase = TrainWords; InstanceBase = new TRIBL_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0), KeepDistributions() ); } void TRIBL2_Experiment::InitInstanceBase(){ srand( RandomSeed() ); default_order(); set_order(); runningPhase = TrainWords; InstanceBase = new TRIBL2_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0), KeepDistributions() ); } bool TRIBL_Experiment::checkTestFile(){ if ( !TimblExperiment::checkTestFile() ){ return false; } else if ( IBStatus() == Pruned ){ Warning( "you tried to apply the " + TiCC::toString( algorithm) + " algorithm on a pruned Instance Base" ); return false; } else if ( TRIBL_offset() == 0 ){ Error( "TRIBL algorithm impossible while threshold not set\n" ); return false; } return true; } bool TRIBL2_Experiment::checkTestFile(){ if ( !TimblExperiment::checkTestFile() ){ return false; } else if ( IBStatus() == Pruned ){ Warning( "you tried to apply the " + TiCC::toString( algorithm) + " algorithm on a pruned Instance Base" ); return false; } return true; } const TargetValue *TRIBL_Experiment::LocalClassify( const Instance& Inst, double& Distance, bool& exact ){ const TargetValue *Res = NULL; bool Tie = false; exact = false; if ( !bestResult.reset( beamSize, normalisation, norm_factor, targets ) ){ Warning( "no normalisation possible because a BeamSize is specified\n" "output is NOT normalized!" ); } const ClassDistribution *ExResultDist = ExactMatch( Inst ); if ( ExResultDist ){ Distance = 0.0; Res = ExResultDist->BestTarget( Tie, (RandomSeed() >= 0) ); bestResult.addConstant( ExResultDist, Res ); exact = Do_Exact(); } else { size_t level = 0; const ClassDistribution *TrResultDist = 0; initExperiment(); IB_InstanceBase *SubTree = InstanceBase->TRIBL_test( Inst, TRIBL_offset(), Res, TrResultDist, level ); if ( !SubTree ){ match_depth = level; last_leaf = false; Distance = sum_remaining_weights(level); if ( TrResultDist ){ if ( level == 0 ){ bestResult.addTop( TrResultDist, Res ); } else { bestResult.addConstant( TrResultDist, Res ); } } } else { testInstance( Inst, SubTree, TRIBL_offset() ); bestArray.initNeighborSet( nSet ); WClassDistribution *ResultDist = getBestDistribution(); Res = ResultDist->BestTarget( Tie, (RandomSeed() >= 0) ); if ( Tie ){ ++num_of_neighbors; testInstance( Inst, SubTree, TRIBL_offset() ); bestArray.addToNeighborSet( nSet, num_of_neighbors ); WClassDistribution *ResultDist2 = getBestDistribution(); bool Tie2 = false; const TargetValue *Res2 = ResultDist2->BestTarget( Tie2, (RandomSeed() >= 0) ); --num_of_neighbors; if ( !Tie2 ){ delete ResultDist; bestResult.addDisposable( ResultDist2, Res2 ); Res = Res2; } else { delete ResultDist2; bestResult.addDisposable( ResultDist, Res ); } } else { bestResult.addDisposable( ResultDist, Res ); } SubTree->CleanPartition( true ); Distance = getBestDistance(); } } if ( confusionInfo ){ confusionInfo->Increment( Inst.TV, Res ); } bool correct = Inst.TV && ( Res == Inst.TV ); if ( correct ){ stats.addCorrect(); if ( Tie ){ stats.addTieCorrect(); } } else if ( Tie ){ stats.addTieFailure(); } exact = exact || (fabs(Distance) < Epsilon ); if ( exact ){ stats.addExact(); } return Res; } bool TRIBL_Experiment::checkLine( const icu::UnicodeString& line ){ if ( !TimblExperiment::checkLine( line ) ){ return false; } else if ( IBStatus() == Pruned ){ Warning( "you tried to apply the TRIBL algorithm on a pruned " " Instance Base" ); return false; } return true; } bool TRIBL2_Experiment::checkLine( const icu::UnicodeString& line ){ if ( !TimblExperiment::checkLine( line ) ){ return false; } else if ( IBStatus() == Pruned ){ Warning( "you tried to apply the TRIBL2 algorithm on a pruned " " Instance Base" ); return false; } return true; } const TargetValue *TRIBL2_Experiment::LocalClassify( const Instance& Inst, double& Distance, bool& exact ){ const TargetValue *Res = NULL; exact = false; if ( !bestResult.reset( beamSize, normalisation, norm_factor, targets ) ){ Warning( "no normalisation possible because a BeamSize is specified\n" "output is NOT normalized!" ); } bool Tie = false; const ClassDistribution *ExResultDist = ExactMatch( Inst ); if ( ExResultDist ){ Distance = 0.0; Res = ExResultDist->BestTarget( Tie, (RandomSeed() >= 0) ); bestResult.addConstant( ExResultDist, Res ); exact = Do_Exact(); } else { size_t level = 0; const ClassDistribution *TrResultDist = 0; IB_InstanceBase *SubTree = InstanceBase->TRIBL2_test( Inst, TrResultDist, level ); if ( SubTree ){ testInstance( Inst, SubTree, level ); bestArray.initNeighborSet( nSet ); WClassDistribution *ResultDist1 = getBestDistribution(); Res = ResultDist1->BestTarget( Tie, (RandomSeed() >= 0) ); if ( Tie ){ ++num_of_neighbors; testInstance( Inst, SubTree, level ); bestArray.addToNeighborSet( nSet, num_of_neighbors ); WClassDistribution *ResultDist2 = getBestDistribution(); bool Tie2 = false; const TargetValue *Res2 = ResultDist2->BestTarget( Tie2, (RandomSeed() >= 0) ); --num_of_neighbors; if ( !Tie2 ){ delete ResultDist1; bestResult.addDisposable( ResultDist2, Res2 ); Res = Res2; } else { delete ResultDist2; bestResult.addDisposable( ResultDist1, Res ); } } else { bestResult.addDisposable( ResultDist1, Res ); } SubTree->CleanPartition( true ); match_depth = level; Distance = getBestDistance(); } else { // an exact match Distance = 0.0; Res = TrResultDist->BestTarget( Tie, (RandomSeed() >= 0) ); bestResult.addConstant( TrResultDist, Res ); bestArray.init( num_of_neighbors, MaxBests, Verbosity(NEAR_N), Verbosity(DISTANCE), Verbosity(DISTRIB) ); bestArray.addResult( Distance, TrResultDist, get_org_input() ); bestArray.initNeighborSet( nSet ); } } if ( confusionInfo ){ confusionInfo->Increment( Inst.TV, Res ); } bool correct = Inst.TV && ( Res == Inst.TV ); if ( correct ){ stats.addCorrect(); if ( Tie ){ stats.addTieCorrect(); } } else if ( Tie ){ stats.addTieFailure(); } exact = exact || ( fabs(Distance) < Epsilon ); if ( exact ){ stats.addExact(); } return Res; } void TRIBL_Experiment::showTestingInfo( ostream& os ){ if ( !Verbosity(SILENT) ){ if ( Verbosity(OPTIONS) ){ ShowSettings( os ); } os << endl << "Starting to test, Testfile: " << testStreamName << endl << "Writing output in: " << outStreamName << endl << "Algorithm : TRIBL, q = " << TRIBL_offset() << endl; show_metric_info( os ); show_weight_info( os ); os << decay << endl; } } bool TRIBL_Experiment::GetInstanceBase( istream& is ){ bool result = false; bool Pruned; bool Hashed; int Version; string range_buf; size_t numF = get_IB_Info( is, Pruned, Version, Hashed, range_buf ); if ( numF == 0 ){ return false; } else if ( Pruned ){ Error( "Instance-base is Pruned!, NOT valid for " + TiCC::toString(algorithm) + " Algorithm" ); } else { TreeOrder = DataFile; Initialize( numF ); if ( !get_ranges( range_buf ) ){ Warning( "couldn't retrieve ranges..." ); } else { srand( RandomSeed() ); InstanceBase = new TRIBL_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0), KeepDistributions() ); int pos=0; for ( size_t i=0; i < NumOfFeatures(); ++i ){ features[i]->SetWeight( 1.0 ); if ( features[features.permutation[i]]->Ignore() ){ features.perm_feats[i] = NULL; } else { features.perm_feats[pos++] = features[features.permutation[i]]; } } if ( Hashed ){ result = InstanceBase->ReadIB_hashed( is, features, targets, Version ); } else { result = InstanceBase->ReadIB( is, features, targets, Version ); } } } return result; } bool TRIBL2_Experiment::GetInstanceBase( istream& is ){ bool result = false; bool Pruned; bool Hashed; int Version; string range_buf; size_t numF = get_IB_Info( is, Pruned, Version, Hashed, range_buf ); if ( numF == 0 ){ return false; } else if ( Pruned ){ Error( "Instance-base is Pruned!, NOT valid for " + TiCC::toString(algorithm) + " Algorithm" ); } else { TreeOrder = DataFile; Initialize( numF ); if ( !get_ranges( range_buf ) ){ Warning( "couldn't retrieve ranges..." ); } else { srand( RandomSeed() ); InstanceBase = new TRIBL2_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0), KeepDistributions() ); int pos=0; for ( size_t i=0; i < NumOfFeatures(); ++i ){ features[i]->SetWeight( 1.0 ); if ( features[features.permutation[i]]->Ignore() ){ features.perm_feats[i] = NULL; } else { features.perm_feats[pos++] = features[features.permutation[i]]; } } if ( Hashed ){ result = InstanceBase->ReadIB_hashed( is, features, targets, Version ); } else { result = InstanceBase->ReadIB( is, features, targets, Version ); } } } return result; } } LanguageMachines-timbl-642727d/src/Targets.cxx000066400000000000000000000531341451477526200212420ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include // for find_if() #include // for accumulate() #include #include #include "ticcutils/StringOps.h" #include "ticcutils/PrettyPrint.h" #include "ticcutils/UniHash.h" #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Targets.h" using namespace std; using namespace icu; namespace Timbl { using namespace Common; using TiCC::operator<<; size_t Vfield::Index() { return value->Index(); } ostream& operator<<(ostream& os, const Vfield *vd ) { return vd->put( os ); } ostream& operator<<(ostream& os, const Vfield& vd ) { return vd.put( os ); } ostream& Vfield::put( ostream& os ) const { os << value << " " << weight; return os; } inline int random_number( int Min, int Max ){ // calculate a random integer within the interval [min,max] if ( Min == Max ){ return Min; } double randnum = (double)rand()/(double)RAND_MAX; randnum *= (Max-Min); randnum += Min; return (int)floor(randnum+0.5); } void ClassDistribution::clear(){ for ( const auto& d : distribution ){ delete d.second; } distribution.clear(); total_items = 0; } double ClassDistribution::Confidence( const TargetValue *tv ) const { auto it = find_if( distribution.begin(), distribution.end(), [tv]( const std::pair& v ){ return v.second->Value() == tv ; } ); if ( it != distribution.end() ){ return it->second->Weight(); } return 0.0; } void ClassDistribution::DistToString( string& DistStr, double minf ) const { ostringstream oss; oss.setf(ios::showpoint); bool first = true; oss << "{ "; for ( const auto& it : distribution ){ const Vfield *f = it.second; if ( f->frequency >= minf ){ if ( !first ){ oss << ", "; } oss << f->value << " " << double(f->frequency); first = false; } } oss << " }"; DistStr = oss.str(); } void WClassDistribution::DistToString( string& DistStr, double minw ) const { ostringstream oss; oss.setf(ios::showpoint); bool first = true; oss << "{ "; for( const auto& it : distribution ){ const Vfield *f = it.second; if ( abs(f->weight) < minw ){ continue; } if ( abs(f->weight) < Epsilon ){ continue; } if ( !first ){ oss << ", "; } oss << f->value << " " << f->weight; first = false; } oss << " }"; DistStr = oss.str(); } class dblCmp { public: bool operator() ( const double d1, const double d2 ) const { return d1 - d2 > Epsilon; } }; void ClassDistribution::DistToStringWW( string& DistStr, int beam ) const { double minw = 0.0; if ( beam > 0 ){ set freqs; for ( const auto& it : distribution ){ const Vfield *f = it.second; freqs.insert( f->frequency ); } int cnt=0; for ( const auto& rit : freqs ){ if ( ++cnt == beam ) { minw = rit; break; } } } DistToString( DistStr, minw ); } void WClassDistribution::DistToStringWW( string& DistStr, int beam ) const { double minw = 0.0; if ( beam > 0 ){ set wgths; for ( const auto& it : distribution ){ const Vfield *f = it.second; wgths.insert( f->weight ); } int cnt=0; for ( const auto& rit : wgths ){ if ( ++cnt == beam ) { minw = rit; break; } } } DistToString( DistStr, minw ); } const string ClassDistribution::DistToString() const { string result; DistToString( result ); return result; } const string ClassDistribution::DistToStringW( int beam ) const { string result; DistToStringWW( result, beam ); return result; } double ClassDistribution::Entropy() const { double entropy = 0.0; size_t TotalVals = total_items; if ( TotalVals > 0 ){ // Loop over the classes in the distribution for ( const auto& it : distribution ){ size_t Freq = it.second->Freq(); if ( Freq > 0 ){ double Prob = Freq / (double)TotalVals; entropy += Prob * Log2(Prob); } } } return fabs(entropy); } void WClassDistribution::Normalize() { double sum = accumulate( distribution.begin(), distribution.end(), 0.0, []( double r, const std::pair& v ){ return r + v.second->Weight(); } ); for ( auto& it : distribution ){ it.second->SetWeight( it.second->Weight() / sum ); } } void WClassDistribution::Normalize_1( double factor, const Targets& targ ) { for ( const auto& val : targ.values_array ){ // search for val, if not there: add entry with frequency factor; // otherwise increment the ExamplarWeight size_t id = val->Index(); auto const& it = distribution.find( id ); if ( it != distribution.end() ){ it->second->SetWeight( it->second->Weight() + factor ); } else { distribution[id] = new Vfield( val, 1, factor ); } } total_items += targ.num_of_values(); Normalize(); } void WClassDistribution::Normalize_2( ) { for ( const auto& d : distribution ){ d.second->SetWeight( log1p( d.second->Weight() ) ); } Normalize(); } ClassDistribution *ClassDistribution::to_VD_Copy( ) const { ClassDistribution *res = new ClassDistribution(); for ( const auto& d : distribution ){ size_t key = d.first; const Vfield *vdf = d.second; res->distribution[key] = new Vfield( vdf->Value(), vdf->Freq(), vdf->Freq() ); } res->total_items = total_items; return res; } WClassDistribution *ClassDistribution::to_WVD_Copy() const { WClassDistribution *res = new WClassDistribution(); for ( const auto& d : distribution ){ size_t key = d.first; const Vfield *vdf = d.second; res->distribution[key] = new Vfield( vdf->Value(), vdf->Freq(), vdf->Freq() ); } res->total_items = total_items; return res; } WClassDistribution *WClassDistribution::to_WVD_Copy( ) const { WClassDistribution *result = new WClassDistribution(); for ( const auto& d : distribution ){ size_t key = d.first; const Vfield *vdf = d.second; result->distribution[key] = new Vfield( vdf->Value(), vdf->Freq(), vdf->Weight() ); } result->total_items = total_items; return result; } // // special functions to serialize distibutions including both frequency // AND weight information. Needed for store/retrieve InstanceBases // // First hashed variant: // const string ClassDistribution::SaveHashed() const{ ostringstream oss; oss << "{ "; bool first = true; for ( const auto& it : distribution ){ const Vfield *f = it.second; if ( f->frequency > 0 ){ if ( !first ){ oss << ", "; } oss << f->value->Index() << " " << f->frequency; first = false; } } oss << " }"; return oss.str(); } const string WClassDistribution::SaveHashed() const{ ostringstream oss; bool first = true; oss << "{ "; for ( const auto& it : distribution ){ const Vfield *f = it.second; if ( f->frequency > 0 ){ if ( !first ){ oss << ", "; } oss << f->Value()->Index() << " " << f->frequency << " " << f->weight; first = false; } } oss << " }"; return oss.str(); } // // non-hashed variant: // const string ClassDistribution::Save() const{ ostringstream oss; oss << "{ "; bool first = true; for ( const auto& it : distribution ){ const Vfield *f = it.second; if ( f->frequency > 0 ){ if ( !first ){ oss << ", "; } oss << f->value << " " << f->frequency; first = false; } } oss << " }"; return oss.str(); } const string WClassDistribution::Save() const{ ostringstream oss; oss << "{ "; bool first = true; for ( const auto& it : distribution ){ const Vfield *f = it.second; if ( f->frequency > 0 ){ if ( !first ){ oss << ", "; } oss.setf(ios::showpoint); oss << f->value << " " << f->frequency << " " << f->weight; first = false; } } oss << " }"; return oss.str(); } void ClassDistribution::SetFreq( const TargetValue *val, const int freq, double ){ // add entry with frequency freq; Vfield *temp = new Vfield( val, freq, freq ); distribution[val->Index()] = temp; total_items += freq; } void WClassDistribution::SetFreq( const TargetValue *val, const int freq, double sw ){ // add entry with frequency freq; // also sets the sample_weight Vfield *temp = new Vfield( val, freq, sw ); distribution[val->Index()] = temp; total_items += freq; } bool ClassDistribution::IncFreq( const TargetValue *val, size_t occ, double ){ // search for val, if not there: add entry with frequency 'occ'; // otherwise increment the freqency size_t id = val->Index(); auto const& it = distribution.find( id ); if ( it != distribution.end() ){ it->second->IncFreq( occ ); } else { distribution[id] = new Vfield( val, occ, 1.0 ); } total_items += occ; return true; } bool WClassDistribution::IncFreq( const TargetValue *val, size_t occ, double sw ){ // search for val, if not there: add entry with frequency 'occ'; // otherwise increment the freqency // also set sample weight size_t id = val->Index(); auto const& it = distribution.find( id ); if ( it != distribution.end() ){ it->second->IncFreq( occ ); } else { distribution[id] = new Vfield( val, occ, sw ); } total_items += occ; return fabs( distribution[id]->Weight() - sw ) > Epsilon; } void ClassDistribution::DecFreq( const TargetValue *val ){ // search for val, if not there, just forget // otherwise decrement the freqency auto const& it = distribution.find( val->Index() ); if ( it != distribution.end() ){ it->second->DecFreq(); total_items -= 1; } } void ClassDistribution::Merge( const ClassDistribution& VD ){ for ( const auto& it : VD.distribution ){ size_t key = it.first; const Vfield *vd = it.second; if ( distribution.find(key) != distribution.end() ){ // the key is already present, increment the frequency distribution[key]->AddFreq( vd->Freq() ); } else { // add a key // VD might be weighted. But we don't need/want that info here // Weight == Freq is more convenient distribution[key] = new Vfield( vd->Value(), vd->Freq(), vd->Freq() ); } } total_items += VD.total_items; } void WClassDistribution::MergeW( const ClassDistribution& VD, double Weight ){ for ( const auto& it : VD.distribution ){ size_t key = it.first; const Vfield *vd = it.second; if ( distribution.find(key) != distribution.end() ){ distribution[key]->SetWeight( distribution[key]->Weight() + vd->Weight() *Weight ); } else { distribution[key] = new Vfield( vd->Value(), 1, vd->Weight() * Weight); } } total_items += VD.total_items; } const TargetValue *ClassDistribution::BestTarget( bool& tie, bool do_rand ) const { // get the most frequent target from the distribution. // In case of a tie take the one which is GLOBALLY the most frequent, // OR (if do_rand) take random one of the most frequents // and signal if this ties also! const TargetValue *best = NULL; tie = false; auto It = distribution.begin(); if ( It != distribution.end() ){ Vfield *pnt = It->second; size_t Max = pnt->Freq(); if ( do_rand ){ int nof_best=1, pick=1; ++It; while ( It != distribution.end() ){ pnt = It->second; if ( pnt->Freq() > Max ){ Max = pnt->Freq(); nof_best = 1; } else { if ( pnt->Freq() == Max ){ nof_best++; } } ++It; } tie = ( nof_best > 1 ); pick = random_number( 1, nof_best ); It = distribution.begin(); nof_best = 0; while ( It != distribution.end() ){ pnt = It->second; if ( pnt->Freq() == Max ){ if ( ++nof_best == pick ){ return pnt->Value(); } } ++It; } return NULL; } else { best = pnt->Value(); ++It; while ( It != distribution.end() ){ pnt = It->second; if ( pnt->Freq() > Max ){ tie = false; best = pnt->Value(); Max = pnt->Freq(); } else { if ( pnt->Freq() == Max ) { tie = true; if ( pnt->Value()->ValFreq() > best->ValFreq() ){ best = pnt->Value(); } } } ++It; } return best; } } return best; } const TargetValue *WClassDistribution::BestTarget( bool& tie, bool do_rand ) const { // get the most frequent target from the distribution. // In case of a tie take the one which is GLOBALLY the most frequent, // OR (if do_rand) take random one of the most frequents // and signal if this ties also! const TargetValue *best = NULL; auto It = distribution.begin(); tie = false; if ( It != distribution.end() ){ double Max = It->second->Weight(); if ( do_rand ){ int nof_best=1, pick=1; ++It; while ( It != distribution.end() ){ if ( It->second->Weight() > Max ){ Max = It->second->Weight(); nof_best = 1; } else { if ( abs(It->second->Weight()- Max) < Epsilon ){ nof_best++; } } ++It; } tie = ( nof_best > 1 ); pick = random_number( 1, nof_best ); It = distribution.begin(); nof_best = 0; while ( It != distribution.end() ){ if ( abs(It->second->Weight() - Max) < Epsilon ){ if ( ++nof_best == pick ){ return It->second->Value(); } } ++It; } return NULL; } else { best = It->second->Value(); ++It; while ( It != distribution.end() ){ if ( It->second->Weight() > Max ){ tie = false; best = It->second->Value(); Max = It->second->Weight(); } else { if ( abs(It->second->Weight() - Max) < Epsilon ) { tie = true; if ( It->second->Value()->ValFreq() > best->ValFreq() ){ best = It->second->Value(); } } } ++It; } return best; } } return best; } ostream& operator<<(ostream& os, const ClassDistribution& vd ) { string tmp; vd.DistToString( tmp ); os << tmp; return os; } ostream& operator<<(ostream& os, const ClassDistribution *vd ) { string tmp = "{null}"; if ( vd ){ vd->DistToString( tmp ); } os << tmp; return os; } ClassDistribution *ClassDistribution::read_distribution( istream &is, Targets& Targ, bool do_fr ){ // read a distribution from stream is into Target // if do_f we also adjust the value of Frequency of the Target, which is // otherwise 1. Special case when reading the TopDistribution. // ClassDistribution *result = 0; char nextCh; is >> nextCh; // skip { if ( nextCh != '{' ){ throw runtime_error( "missing '{' in distribution string." ); } else { int next; do { size_t freq; UnicodeString buf; is >> ws >> buf; is >> freq; TargetValue *target; if ( do_fr ){ target = Targ.add_value( buf, freq ); } else { target = Targ.Lookup( buf ); } if ( !target ){ delete result; result = 0; break; } next = look_ahead(is); if ( next == ',' ){ if ( !result ) { result = new ClassDistribution(); } result->SetFreq( target, freq ); is >> nextCh; next = look_ahead(is); } else if ( next == '}' ){ if ( !result ){ result = new ClassDistribution(); } result->SetFreq( target, freq ); } else if ( isdigit(next) ){ if ( !result ){ result = new WClassDistribution(); } double sw; is >> sw; result->SetFreq( target, freq, sw ); next = look_ahead(is); if ( next == ',' ){ is >> nextCh; next = look_ahead(is); } } } while ( is && next != '}' ); if ( is ){ is >> nextCh; // skip } } else { delete result; throw runtime_error( "missing '}' in distribution string." ); } } return result; } ClassDistribution *ClassDistribution::read_distribution_hashed( istream &is, Targets& Targ, bool do_fr ){ ClassDistribution *result = 0; // read a distribution from stream is into Target // if do_f we also adjust the value of Frequency of the Target, which is // otherwise 1. Special case when reading the TopDistribution. // char nextCh; is >> nextCh; // skip { if ( nextCh != '{' ){ throw runtime_error( "missing '{' in distribution string." ); } else { int next; do { unsigned int index; size_t freq; is >> index; is >> freq; TargetValue *target; if ( do_fr ){ target = Targ.add_value( index, freq ); } else { target = Targ.ReverseLookup( index ); } if ( !target ){ delete result; result = 0; break; } next = look_ahead(is); if ( next == ',' ){ if ( !result ){ result = new ClassDistribution(); } result->SetFreq( target, freq ); is >> nextCh; next = look_ahead(is); } else if ( next == '}' ){ if ( !result ){ result = new ClassDistribution(); } result->SetFreq( target, freq ); } else if ( isdigit(next) ){ double sw; is >> sw; if ( !result ){ result = new WClassDistribution(); } result->SetFreq( target, freq, sw ); next = look_ahead(is); if ( next == ',' ){ is >> nextCh; next = look_ahead(is); } } } while ( is && next != '}' ); if ( is ){ is >> nextCh; // skip thr '}' } else { delete result; throw runtime_error( "missing '}' in distribution string" ); } } return result; } ostream& operator<<( std::ostream& os, ValueClass const *vc ){ if ( vc ){ os << vc->name(); } else { os << "*FV-NF*"; } return os; } TargetValue::TargetValue( const UnicodeString& value, size_t value_hash ): ValueClass( value, value_hash ){} size_t Targets::EffectiveValues() const { return count_if( values_array.begin(), values_array.end(), [&]( const TargetValue* v ){ return (v->ValFreq() > 0); } ); } size_t Targets::TotalValues() const { return accumulate( values_array.begin(), values_array.end(), 0, [&]( size_t r, const TargetValue *v ){ return r + v->ValFreq(); } ); } Targets &Targets::operator=( const Targets& t ){ if ( this != &t ){ values_array = t.values_array; reverse_values = t.reverse_values; target_hash = t.target_hash; // shared ?? is_reference =true; } return *this; } Targets::~Targets() { if ( !is_reference ){ for ( const auto& it : values_array ){ delete it; } delete target_hash; } reverse_values.clear(); } void Targets::init(){ assert( target_hash == 0 ); // Safeguard init() may only called once target_hash = new Hash::UnicodeHash(); } TargetValue *Targets::Lookup( const UnicodeString& str ) const { TargetValue *result = 0; size_t index = target_hash->lookup( str ); if ( index ) { auto const& it = reverse_values.find( index ); result = it->second; } return result; } TargetValue *Targets::ReverseLookup( size_t index ) const { auto const& it = reverse_values.find( index ); return it->second; } TargetValue *Targets::add_value( const UnicodeString& valstr, int freq ){ unsigned int hash_val = target_hash->hash( valstr ); // cerr << "target hash(" << valstr << ") geeft: " << hash_val << endl; return add_value( hash_val, freq ); } TargetValue *Targets::add_value( size_t index, int freq ){ auto const& it = reverse_values.find( index ); if ( it == reverse_values.end() ){ const UnicodeString& name = target_hash->reverse_lookup( index ); // cerr << "target lookup(" << index << ") geeft: " << name << endl; // we want to store the singleton value for this index // so we MUST reverse lookup the index TargetValue *tv = new TargetValue( name, index ); tv->ValFreq( freq ); reverse_values[index] = tv; values_array.push_back( tv ); } else { it->second->IncValFreq( freq ); } return reverse_values[index]; } TargetValue *Targets::MajorityClass() const { TargetValue *result = 0; size_t freq = 0; for ( const auto& it : values_array ){ if ( it->ValFreq() > freq ){ result = it; freq = result->ValFreq(); } } return result; } bool Targets::increment_value( TargetValue *TV ){ bool result = false; if ( TV ){ TV->incr_val_freq(); result = true; } return result; } bool Targets::decrement_value( TargetValue *TV ){ bool result = false; if ( TV ){ TV->decr_val_freq(); result = true; } return result; } } LanguageMachines-timbl-642727d/src/Testers.cxx000066400000000000000000000167461451477526200212720ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Instance.h" #include "timbl/Metrics.h" #include "timbl/Testers.h" using namespace std; using Common::Epsilon; using Common::Log2; namespace Timbl{ //#define DBGTEST //#define DBGTEST_DOT double overlapTestFunction::test( const FeatureValue *F, const FeatureValue *G, const Feature *Feat ) const { #ifdef DBGTEST cerr << "overlap_distance(" << F << "," << G << ") = "; #endif double result = Feat->fvDistance( F, G ); #ifdef DBGTEST cerr << result; #endif result *= Feat->Weight(); #ifdef DBGTEST cerr << " gewogen " << result << endl; #endif return result; } double valueDiffTestFunction::test( const FeatureValue *F, const FeatureValue *G, const Feature *Feat ) const { #ifdef DBGTEST cerr << TiCC::toString(Feat->getMetricType()) << "_distance(" << F << "," << G << ") = "; #endif double result = Feat->fvDistance( F, G, threshold ); #ifdef DBGTEST cerr << result; #endif result *= Feat->Weight(); #ifdef DBGTEST cerr << " gewogen " << result << endl; #endif return result; } TesterClass* getTester( MetricType m, const Feature_List& features, int mvdThreshold ){ if ( m == Cosine ){ return new CosineTester( features ); } else if ( m == DotProduct ){ return new DotProductTester( features ); } else { return new DistanceTester( features, mvdThreshold ); } } TesterClass::TesterClass( const Feature_List& features ): _size(features.feats.size()), effSize(_size), offSet(0), FV(0), features(features.feats), permutation(features.permutation) { permFeatures.resize(_size,0); #ifdef DBGTEST cerr << "created TesterClass(" << _size << ")" << endl; #endif for ( size_t j=0; j < _size; ++j ){ permFeatures[j] = features.feats[features.permutation[j]]; } distances.resize(_size+1, 0.0); } void TesterClass::init( const Instance& inst, size_t effective, size_t oset ){ #ifdef DBGTEST cerr << "tester Initialized!" << endl; #endif effSize = effective-oset; offSet = oset; FV = &inst.FV; } DistanceTester::~DistanceTester(){ for ( const auto& it : metricTest ){ delete it; } } DistanceTester::DistanceTester( const Feature_List& features, int mvdmThreshold ): TesterClass( features ){ #ifdef DBGTEST cerr << "create a tester with threshold = " << mvdmThreshold << endl; #endif metricTest.resize(_size,0); for ( size_t i=0; i < _size; ++i ){ #ifdef DBGTEST cerr << "set metric[" << i+1 << "]=" << TiCC::toString(features.feats[i]->getMetricType()) << endl; #endif if ( features[i]->Ignore() ) continue; if ( features[i]->isStorableMetric() ){ #ifdef DBGTEST cerr << "created valueDiffTestFunction " << endl; #endif metricTest[i] = new valueDiffTestFunction( mvdmThreshold ); } else { #ifdef DBGTEST cerr << "created overlapFunction " << endl; #endif metricTest[i] = new overlapTestFunction(); } } } size_t DistanceTester::test( const vector& G, size_t CurPos, double Threshold ) { size_t i; size_t TrueF; for ( i=CurPos, TrueF = i + offSet; i < effSize; ++i,++TrueF ){ #ifdef DBGTEST cerr << "feature " << TrueF << " (perm=" << permutation[TrueF] << ")" << endl; #endif double result = metricTest[permutation[TrueF]]->test( (*FV)[TrueF], G[i], permFeatures[TrueF] ); distances[i+1] = distances[i] + result; if ( distances[i+1] > Threshold ){ #ifdef DBGTEST cerr << "threshold reached at " << i << " distance=" << distances[i+1] << endl; #endif return i; } } #ifdef DBGTEST cerr << "threshold reached at end, distance=" << distances[effSize] << endl; #endif return effSize; } double DistanceTester::getDistance( size_t pos ) const{ return distances[pos]; } inline bool FV_to_real( const FeatureValue *FV, double &result ){ if ( FV ){ if ( TiCC::stringTo( FV->name(), result ) ){ return true; } } return false; } double innerProduct( const FeatureValue *FV, const FeatureValue *G ) { double r1=0, r2=0, result; #ifdef DBGTEST_DOT cerr << "innerproduct " << FV << " x " << G << endl; #endif if ( FV_to_real( FV, r1 ) && FV_to_real( G, r2 ) ){ #ifdef DBGTEST_DOT cerr << "innerproduct " << r1 << " x " << r2 << endl; #endif result = r1 * r2; } else { result = 0.0; } #ifdef DBGTEST_DOT cerr << " resultaat == " << result << endl; #endif return result; } size_t CosineTester::test( const vector& G, size_t, double ){ double denom1 = 0.0; double denom2 = 0.0; double result = 0.0; size_t TrueF; size_t i; for ( i=0, TrueF = i + offSet; i < effSize; ++i,++TrueF ){ double W = permFeatures[TrueF]->Weight(); denom1 += innerProduct( (*FV)[TrueF], (*FV)[TrueF] ) * W; denom2 += innerProduct( G[i], G[i] ) * W; result += innerProduct( (*FV)[TrueF], G[i] ) * W; } double denom = sqrt( denom1 * denom2 ); distances[effSize] = result/ (denom + Common::Epsilon); #ifdef DBGTEST cerr << "denom1 " << denom1 << endl; cerr << "denom2 " << denom2 << endl; cerr << "denom " << denom << endl; cerr << "result " << result << endl; cerr << "cosine::test() distance " << distances[effSize] << endl; #endif return effSize; } size_t DotProductTester::test( const vector& G, size_t, double ) { size_t TrueF; size_t i; for ( i=0, TrueF = i + offSet; i < effSize; ++i,++TrueF ){ double result = innerProduct( (*FV)[TrueF], G[i] ); result *= permFeatures[TrueF]->Weight(); distances[i+1] = distances[i] + result; #ifdef DBGTEST cerr << "gewogen result " << result << endl; cerr << "dot::test() distance[" << i+1 << "]=" << distances[i+1] << endl; #endif } return effSize; } double CosineTester::getDistance( size_t pos ) const{ #ifdef DBGTEST cerr << "getDistance, maxSim = " << 1.0 << endl; cerr << " distances[" << pos << "]= " << distances[pos] << endl; #endif return 1.0 - distances[pos]; } double DotProductTester::getDistance( size_t pos ) const{ #ifdef DBGTEST_DOT cerr << "getDistance, maxSim = " << std::numeric_limits::max() << endl; cerr << " distances[" << pos << "]= " << distances[pos] << endl; #endif return (std::numeric_limits::max() - distances[pos])/std::numeric_limits::max();; } } LanguageMachines-timbl-642727d/src/Timbl.cxx000066400000000000000000000664231451477526200207050ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include #include #include "config.h" #include "ticcutils/CommandLine.h" #include "ticcutils/Timer.h" #include "timbl/TimblAPI.h" using namespace std; using namespace Timbl; static list ind_lines; Algorithm algorithm; bool Do_CV = false; bool Do_LOO = false; bool Do_NS = false; bool Do_Indirect = false; bool Do_Save_Perc = false; bool Do_Limit = false; size_t limit_val = 0; string I_Path = ""; string O_Path = ""; string Q_value = ""; string dataFile = ""; string TestFile = ""; string OutputFile = ""; string PercFile = ""; string MatrixInFile = ""; string MatrixOutFile = ""; string TreeInFile = ""; string TreeOutFile = ""; string levelTreeOutFile = ""; int levelTreeLevel = 0; string XOutFile = ""; string WgtInFile = ""; Weighting WgtType = UNKNOWN_W; string WgtOutFile = ""; string ProbInFile = ""; string ProbOutFile = ""; string NamesFile = ""; inline void usage_full(void){ cerr << "usage: timbl -f data-file {-t test-file} [options]" << endl; cerr << "Algorithm and Metric options:" << endl; cerr << "-a n : algorithm" << endl; cerr << " 0 or IB1 : IB1 (default)" << endl; cerr << " 1 or IG : IGTree" << endl; cerr << " 2 or TRIBL : TRIBL" << endl; cerr << " 3 or IB2 : IB2" << endl; cerr << " 4 or TRIBL2 : TRIBL2" << endl; cerr << "-m s : use feature metrics as specified in string s:" << endl << " format: GlobalMetric:MetricRange:MetricRange" << endl << " e.g.: mO:N3:I2,5-7" << endl; cerr << " C: Cosine distance. (Global only. numeric features implied)" << endl; cerr << " D: Dot product. (Global only. numeric features implied)" << endl; cerr << " DC: Dice Coefficient" << endl; cerr << " O: weighted Overlap (default)" << endl; cerr << " L: Levenshtein distance" << endl; cerr << " E: Euclidean Distance" << endl; cerr << " M: Modified value difference" << endl; cerr << " J: Jeffrey Divergence" << endl; cerr << " S: Jensen-Shannon Divergence" << endl; cerr << " N: numeric values" << endl; cerr << " I: Ignore named values" << endl; cerr << "-w n : Weighting" << endl; cerr << " 0 or nw: No Weighting" << endl; cerr << " 1 or gr: Weight using GainRatio (default)" << endl; cerr << " 2 or ig: Weight using InfoGain" << endl; cerr << " 3 or x2: Weight using Chi-square" << endl; cerr << " 4 or sv: Weight using Shared Variance" << endl; cerr << " 5 or sd: Weight using Standard Deviation. (all features must be numeric)" << endl; cerr << "-w f : read Weights from file 'f'" << endl; cerr << "-w f:n : read Weight n from file 'f'" << endl; cerr << "-b n : number of lines used for bootstrapping (IB2 only)" << endl; #ifdef HAVE_OPENMP cerr << "--clones= : use 'n' threads for parallel testing" << endl; #endif cerr << "--Diversify: rescale weight (see docs)" << endl; cerr << "-d val : weight neighbors as function of their distance:" << endl; cerr << " Z : equal weights to all (default)" << endl; cerr << " ID : Inverse Distance" << endl; cerr << " IL : Inverse Linear" << endl; cerr << " ED:a : Exponential Decay with factor a (no whitespace!)" << endl; cerr << " ED:a:b : Exponential Decay with factor a and b (no whitespace!)" << endl; cerr << "-k n : k nearest neighbors (default n = 1)" << endl; cerr << "-q n : TRIBL threshold at level n" << endl; cerr << "-L n : MVDM threshold at level n" << endl; cerr << "-R n : solve ties at random with seed n" << endl; cerr << "-t f : test using file 'f'" << endl; cerr << "-t leave_one_out:" << " test with Leave One Out,using IB1" << endl; cerr << " you may add -sloppy to speed up Leave One Out testing (see docs)" << endl; cerr << "-t cross_validate:" << " Cross Validate Test,using IB1" << endl; cerr << " @f : test using files and options described in file 'f'" << endl; cerr << " Supported options: d e F k m o p q R t u v w x % -" << endl; cerr << " -t is mandatory" << endl; cerr << "Input options:" << endl; cerr << "-f f : read from Datafile 'f'" << endl; cerr << "-f f : OR: use filenames from 'f' for CV test" << endl; cerr << "-F format : Assume the specified inputformat" << endl; cerr << " (Compact, C4.5, ARFF, Columns, Tabbed, Binary, Sparse )" << endl; cerr << "-l n : length of Features (Compact format only)" << endl; cerr << "-i f : read the InstanceBase from file 'f' " << "(skips phase 1 & 2 )" << endl; cerr << "--matrixin= read ValueDifference Matrices from file 'f'" << endl; cerr << "-u f : read value_class probabilities from file 'f'" << endl; cerr << "--occurrences=train|test|both assume occurrence info in the files." << endl; cerr << " (train: in the train file, test: in the test file, both: in both)" << endl; cerr << "-P d : read data using path 'd'" << endl; cerr << "-s : use exemplar weights from the input file" << endl; cerr << "-s0 : silently ignore the exemplar weights from the input file" << endl; cerr << "-T n : use input field 'n' as the target. (default is: the last field)" << endl; cerr << "Output options:" << endl; cerr << "-e n : estimate time until n patterns tested" << endl; cerr << "--Beam= : limit +v db output to n highest-vote classes" << endl; cerr << "-I f : dump the InstanceBase in file 'f'" << endl; cerr << "--matrixout= store ValueDifference Matrices in file 'f'" << endl; cerr << "-X f : dump the InstanceBase as XML in file 'f'" << endl; cerr << "-n f : create names file 'f'" << endl; cerr << "-p n : show progress every n lines (default p = 100,000)" << endl; cerr << "-U f : save value_class probabilities in file 'f'" << endl; cerr << "-V or --version : Show VERSION" << endl; cerr << "+v or -v level : set or unset verbosity level, where level is" << endl; cerr << " s: work silently" << endl; cerr << " o: show all options set" << endl; cerr << " b: show node/branch count and branching factor" << endl; cerr << " f: show Calculated Feature Weights (default)" << endl; cerr << " p: show Value Difference matrices" << endl; cerr << " e: show exact matches" << endl; cerr << " as: show advanced statistics (memory consuming)" << endl; cerr << " cm: show Confusion Matrix (implies +vas)" << endl; cerr << " cs: show per Class Statistics (implies +vas)" << endl; cerr << " cf: add confidence to the output file. (needs -G)" << endl; cerr << " di: add distance to output file" << endl; cerr << " db: add distribution of best matched to output file" << endl; cerr << " md: add matching depth to output file." << endl; cerr << " k: add a summary for all k neighbors to output file" << " (sets -x)" << endl; cerr << " n: add nearest neighbors to output file (sets -x)" << endl; cerr << " You may combine levels using '+' e.g. +v p+db or -v o+di" << endl; cerr << "-G : normalize distibutions (+vdb option only)" << endl << " Probability : normalize between 0 and 1" << endl << " 0 : does the same " << endl << " addFactor: : add f to all possible targets" << endl << " then normalize between 0 and 1 (default f=1.0)" << endl << " 1: : does the same" << endl << " logProbability : Add 1 to the target Weight, take the 10Log and" << endl << " then normalize between 0 and 1." << endl << " 2 : does the same" << endl; cerr << "-W f : calculate and save all Weights in file 'f'" << endl; cerr << "+% or -% : do or don't save test result (%) to file" << endl; cerr << "-o s : use s as output filename" << endl; cerr << "-O d : save output using path 'd'" << endl; cerr << "Internal representation options:" << endl; cerr << "-B n : number of bins used for discretization of numeric " << "feature values (default B=20)" << endl; cerr << "-c n : clipping frequency for prestoring MVDM matrices" << endl; cerr << "+D : store distributions on all nodes" << endl << " (necessary for using +v db with IGTree, but wastes memory otherwise)" << endl; cerr << "+H or -H : write hashed trees (default +H)" << endl; cerr << "-M n : size of MaxBests Array" << endl; cerr << "-N n : Number of features (default " << TimblAPI::Default_Max_Feats() << ")" << endl; cerr << "--limit l : limit the number of features used to the 'l' with the highest weights." << endl; cerr << " (will restart Timbl with an adapted -m option)" << endl; cerr << "--Treeorder= : ordering of the Tree :" << endl; cerr << " DO: none" << endl; cerr << " GRO: using GainRatio" << endl; cerr << " IGO: using InformationGain" << endl; cerr << " 1/V: using 1/# of Vals" << endl; cerr << " G/V: using GainRatio/# of Vals" << endl; cerr << " I/V: using InfoGain/# of Vals" << endl; cerr << " X2O: using X-square" << endl; cerr << " X/V: using X-square/# of Vals" << endl; cerr << " SVO: using Shared Variance" << endl; cerr << " S/V: using Shared Variance/# of Vals" << endl; cerr << " SDO: using Standard Deviation" << endl; cerr << " SD/V: using Standard Deviation/# of Vals" << endl; cerr << " GxE: using GainRatio * SplitInfo" << endl; cerr << " IxE: using InformationGain * SplitInfo" << endl; cerr << " 1/S: using 1/SplitInfo" << endl; cerr << "+x or -x : Do or don't use the exact match shortcut " << endl << " (IB1 and IB2 only, default is -x)" << endl; } inline void usage(void){ cerr << "usage: timbl -f data-file {-t test-file}" << endl; cerr << "or see: timbl -h" << endl; cerr << " for all possible options" << endl; cerr << endl; } void get_command_lines( const string& value, list& result ){ result.clear(); ifstream ind( value.substr(1) ); // skip @ if ( ind.bad() ){ cerr << "Problem reading command-lines from file '" << value << "'" << endl; throw( "command line failure" ); } string Buf; while ( getline( ind, Buf ) ){ if ( Buf.empty() ){ continue; } result.push_back( Buf ); } } class softExit : public exception {}; class hardExit : public exception {}; void Preset_Values( TiCC::CL_Options& opts ){ string value; if ( opts.is_present( 'h' ) || opts.is_present( "help" ) ){ usage_full(); throw( softExit() ); } if ( opts.is_present( 'V' ) || opts.is_present( "version" ) ){ cerr << "TiMBL " << Timbl::BuildInfo() << endl; throw( softExit() ); } if ( opts.is_present( 'S' ) ){ cerr << "Server mode is no longer available in timbl" << endl; cerr << "Please use the 'timblserver' command instead." << endl; throw( hardExit() ); } if ( opts.extract( 'a', value ) ){ // the user gave an algorithm if ( !string_to( value, algorithm ) ){ cerr << "illegal -a value: " << value << endl; throw( hardExit() ); // no chance to proceed } } else { algorithm = IB1; // general default } opts.insert( 'a', to_string( algorithm ), false ); if ( opts.extract( 'Z', value ) ){ // Special case // spitting neighborSets only Do_NS = true; } if ( opts.is_present( 't', value ) ){ if ( value == "cross_validate" ){ // Special case // running Cross Validation Do_CV = true; } else if ( value == "leave_one_out" ){ // Special case // running Leave_one_out Do_LOO = true; } else if ( value != "" && value[0] == '@' ){ Do_Indirect = true; opts.remove( 't' ); get_command_lines( value, ind_lines ); } if ( Do_LOO || Do_CV ){ if ( algorithm != IB1 ){ cerr << "Invalid Algorithm: Only IB1 possible for LOO and CV " << endl; throw( hardExit() ); // no chance to proceed } } } if ( opts.extract( "limit", value ) ){ Do_Limit = true; if ( !TiCC::stringTo( value, limit_val ) ){ cerr << "illegal --limit value: " << value << endl; throw( hardExit() ); // no chance to proceed } if ( Do_CV ){ cerr << "--limit is not implemented for --cross-validation" << endl; throw( hardExit() ); // no chance to proceed } } if ( opts.extract( 'P', value ) ){ I_Path = value; } if ( opts.is_present( 'O', value ) ){ // output path is needed for CV testing O_Path = value; } if ( opts.extract( 'f', value ) ){ dataFile = correct_path( value, I_Path ); } opts.is_present( 'q', Q_value ); opts.insert( 'v', "F", true ); opts.insert( 'v', "S", false ); Weighting W = GR; // default Weighting = GainRatio if ( opts.is_present( 'w', value ) ){ // user specified weighting if ( !string_to( value, W ) ){ // no valid weight, hopefully a filename return; } else { // valid Weight, but maybe a number, so replace opts.remove( 'w' ); } } opts.insert( 'w', to_string(W), false ); } void Adjust_Default_Values( TiCC::CL_Options& opts ){ if ( !opts.is_present( 'm' ) ){ opts.insert( 'm', "O", false ); // Default Metric = Overlap } if ( opts.extract( '%' ) ){ Do_Save_Perc = true; } } bool next_test( string& line ){ bool result = false; line = ""; if ( !ind_lines.empty() ){ line = ind_lines.front(); ind_lines.pop_front(); result = true; } return result; } bool get_file_names( TiCC::CL_Options& opts ){ TestFile = ""; OutputFile = ""; PercFile = ""; MatrixInFile = ""; MatrixOutFile = ""; TreeInFile = ""; TreeOutFile = ""; levelTreeOutFile = ""; levelTreeLevel = 0; XOutFile = ""; WgtInFile = ""; WgtType = UNKNOWN_W; WgtOutFile = ""; ProbInFile = ""; ProbOutFile = ""; NamesFile = ""; string value; if ( opts.extract( 'P', value ) || opts.extract( 'f', value ) ){ cerr << "illegal option, value = " << value << endl; return false; } if ( Do_LOO ){ if ( dataFile == "" ){ cerr << "Missing datafile name for Leave One Out test" << endl; return false; } TestFile = dataFile; } else if ( Do_CV ){ if ( dataFile == "" ){ cerr << "Missing datafile name for Cross Validation test" << endl; return false; } TestFile = dataFile; } else if ( opts.extract( 't', value ) ){ TestFile = correct_path( value, I_Path ); } if ( opts.extract( 'n', value ) ){ NamesFile = correct_path( value, O_Path ); } if ( opts.extract( "matrixout", value ) ){ MatrixOutFile = correct_path( value, O_Path ); } if ( opts.extract( "matrixin", value ) ){ MatrixInFile = correct_path( value, I_Path ); } if ( opts.extract( 'o', value ) ){ if ( Do_CV ){ cerr << "-o option not possible for Cross Validation testing" << endl; return false; } OutputFile = correct_path( value, O_Path ); } if ( opts.extract( "IL", value ) ){ vector vec = TiCC::split_at( value, ":" ); if ( vec.size() > 1 ){ levelTreeOutFile = correct_path( vec[0], O_Path ); levelTreeLevel = TiCC::stringTo( vec[1] ); } else { levelTreeOutFile = correct_path( value, O_Path ); } } if ( opts.extract( 'I', value ) ){ TreeOutFile = correct_path( value, O_Path ); } if ( opts.extract( 'X', value ) ){ XOutFile = correct_path( value, O_Path ); } if ( opts.extract( 'i', value ) ){ TreeInFile = correct_path( value, I_Path ); } if ( opts.extract( 'U', value ) ){ ProbOutFile = correct_path( value, O_Path ); } if ( opts.extract( 'u', value ) ){ if ( algorithm == IGTREE ){ cerr << "-u option is useless for IGtree" << endl; return false; } ProbInFile = correct_path( value, I_Path ); } if ( opts.is_present( 'W', value ) ){ WgtOutFile = correct_path( value, O_Path ); // leave the option, to signal that we need ALL feature weights } if ( opts.is_present( 'w', value ) ){ Weighting W; if ( !string_to( value, W ) ){ // No valid weighting, so assume it also has a filename vector parts = TiCC::split_at( value, ":" ); size_t num = parts.size(); if ( num == 2 ){ if ( !string_to( parts[1], W ) ){ cerr << "invalid weighting option: " << value << endl; return false; } WgtInFile = correct_path( parts[0], I_Path ); WgtType = W; opts.remove( 'w' ); } else if ( num == 1 ){ WgtInFile = correct_path( value, I_Path ); opts.remove( 'w' ); } else { cerr << "invalid weighting option: " << value << endl; return false; } } } return true; } bool Default_Output_Names( TiCC::CL_Options& opts ){ if ( OutputFile == "" && TestFile != "" ){ string value; string temp = correct_path( TestFile, O_Path, false ); temp += "."; switch ( algorithm ){ case IB1: if ( Do_LOO ){ temp += "LOO"; } else if ( Do_CV ){ temp += "CV"; } else { temp += "IB1"; } break; case IB2: temp +="IB2"; break; case IGTREE: temp += "IGTree"; break; case TRIBL: temp += "TRIBL"; if ( Q_value != "" ){ temp += "-"; temp += Q_value; } else { temp += "-0"; } break; case TRIBL2: temp += "TRIBL2"; break; case LOO: temp += "LOO"; break; case CV: temp += "CV"; break; default: temp += "ERROR"; } if ( algorithm != IGTREE ){ temp += "."; if ( opts.is_present( 'm', value ) ){ temp += value; } else { temp += "ErRoR"; } if ( opts.is_present( 'L', value ) ){ temp += ".L"; temp += value; } } temp += "."; if ( opts.is_present( 'w', value ) ){ temp += value; } else if ( !WgtInFile.empty() ){ temp += "ud"; } else { temp += "gr"; } if ( algorithm != IGTREE ){ if ( opts.is_present( 'k', value ) ){ temp += ".k"; temp += value; } else { temp += ".k1"; } if ( opts.is_present( 'd', value ) ){ temp += "."; temp += value; } } bool mood; if ( opts.is_present( 'x', value, mood ) ){ if ( mood ){ temp += ".X"; } } OutputFile = temp + ".out"; if ( Do_Save_Perc ){ PercFile = temp + ".%"; } } else if ( OutputFile != "" ){ if ( Do_Save_Perc ){ PercFile = OutputFile; string::size_type pos = PercFile.rfind( '.' ); if ( pos != string::npos ){ PercFile.resize( pos ); } PercFile += ".%"; } } return true; } void Do_Test( TimblAPI *Run ){ if ( WgtInFile != "" ) { Run->GetWeights( WgtInFile, WgtType ); } if ( ind_lines.empty() ){ // just one test... if ( ProbInFile != "" ){ Run->GetArrays( ProbInFile ); } if ( MatrixInFile != "" ) { Run->GetMatrices( MatrixInFile ); } if ( Do_NS ) { Run->NS_Test( TestFile, OutputFile ); } else { Run->Test( TestFile, OutputFile, PercFile ); } } else { // multiple tests from indirect file string tmp_line; while ( next_test( tmp_line) ){ TiCC::CL_Options opts( timbl_indirect_opts, "" ); try { opts.init( tmp_line ); Adjust_Default_Values( opts ); } catch ( TiCC::OptionError& e ){ cerr << e.what() << endl; cerr << "Warning: Skipped a line from indirect testfile:\n'" << tmp_line << "'" << endl; continue; } if ( !get_file_names( opts ) || TestFile == "" ){ cerr << "Warning: Skipped a line from indirect testfile:\n'" << tmp_line << "'" << endl; if ( TestFile == "" ){ cerr << "missing a Testfile name (-t option)" << endl; } } else if ( Run->SetIndirectOptions( opts ) ){ Default_Output_Names( opts ); if ( WgtInFile != "" ) { if ( !Run->GetWeights( WgtInFile, WgtType ) ){ continue; } } if ( ProbInFile != "" ){ Run->GetArrays( ProbInFile ); } if ( MatrixInFile != "" ) { Run->GetMatrices( MatrixInFile ); } if ( Do_NS ){ Run->NS_Test( TestFile, OutputFile ); } else { Run->Test( TestFile, OutputFile, PercFile ); } } else { cerr << "Warning: Skipped a line from indirect testfile:\n'" << tmp_line << "'" << endl; } } } } bool checkInputFile( const string& name ){ if ( !name.empty() ){ ifstream is( name ); if ( !is.good() ){ cerr << "unable to find or use input file '" << name << "'" << endl; return false; } } return true; } bool checkOutputFile( const string& name ){ if ( !name.empty() ){ ofstream os( name ); if ( !os.good() ) { cerr << "unable to find or use output file" << name << "'" << endl; return false; } } return true; } int main(int argc, char *argv[]){ try { // Start. // cerr << "TiMBL " << TimblAPI::VersionInfo() << " (c) CLST/ILK/CLIPS 1998 - 2023.\n" << "Tilburg Memory Based Learner\n" << "Centre for Language and Speech Technology, Radboud University\n" << "Induction of Linguistic Knowledge Research Group, Tilburg University\n" << "CLiPS Computational Linguistics Group, University of Antwerp" << endl; cerr << TiCC::Timer::now() << endl << endl; if ( argc <= 1 ){ usage(); return 1; } TiCC::CL_Options opts( timbl_short_opts, timbl_long_opts ); try { opts.init( argc, argv ); } catch ( TiCC::OptionError& e ){ cerr << e.what() << endl; usage(); return 666; } Preset_Values( opts ); Adjust_Default_Values( opts ); if ( !get_file_names( opts ) ){ return 2; } TimblAPI *Run = new TimblAPI( opts ); if ( !Run->isValid() ){ delete Run; usage(); return 3; } Default_Output_Names( opts ); vector mas = opts.getMassOpts(); if ( !mas.empty() ){ cerr << "unknown value in option string: " << mas[0] << endl; usage(); return 33; } if ( Do_CV ){ if ( checkInputFile( TestFile ) ){ Run->CVprepare( WgtInFile, WgtType, ProbInFile ); Run->Test( TestFile, "" ); } delete Run; } else { bool do_test = false; if ( !checkInputFile( TreeInFile ) || !checkInputFile( dataFile ) || !checkInputFile( TestFile ) || !checkInputFile( WgtInFile ) || !checkInputFile( MatrixInFile ) || !checkInputFile( ProbInFile ) || !checkOutputFile( TreeOutFile ) || !checkOutputFile( levelTreeOutFile ) || !checkOutputFile( XOutFile ) || !checkOutputFile( NamesFile ) || !checkOutputFile( WgtOutFile ) || !checkOutputFile( MatrixOutFile ) || !checkOutputFile( ProbOutFile ) ){ delete Run; return 3; } // normal cases.... if ( TreeInFile == "" ){ // normal case // learning and maybe a testing phase if ( WgtOutFile != "" ) { Run->SetOptions( "ALL_WEIGHTS: true" ); } if ( Run->Prepare( dataFile ) ){ if ( Do_Limit ){ if ( Run->NumOfFeatures() < limit_val ){ cerr << "value of --limit is larger then the number of features!" << endl; return 32; } string m_val = Run->extract_limited_m( limit_val ); // cerr << endl << endl << "NEW M: " << m_val << endl << endl; opts.extract( 'm' ); opts.insert( 'm', m_val, true ); cerr << "\t--limit=" << limit_val << " is specified, so we retrain " << "the data with option: -m" << m_val << endl; delete Run; Run = new TimblAPI( opts ); } if ( WgtOutFile != "" ) { Run->SaveWeights( WgtOutFile ); } // If we want to create a namesfile, do it here. // if ( NamesFile != "" ) { Run->WriteNamesFile( NamesFile ); } if ( ProbOutFile != "" ){ Run->WriteArrays( ProbOutFile ); } do_test = TestFile != "" || Do_Indirect; if ( do_test || // something to test ? MatrixOutFile != "" || // or at least to produce TreeOutFile != "" || // or at least to produce levelTreeOutFile != "" || // or at least to produce XOutFile != "" ){ // or at least to produce bool ok = true; if ( WgtInFile != "" ){ if ( Run->GetWeights( WgtInFile, WgtType ) ){ cerr << "Calculated weights replaced by:" << endl; Run->ShowWeights( cerr ); } else { cerr << "problems reading weights" << endl; ok = do_test = false; } } if ( ok && Run->Learn( dataFile ) ){ if ( TreeOutFile != "" ){ Run->WriteInstanceBase( TreeOutFile ); } if ( levelTreeOutFile != "" ){ Run->WriteInstanceBaseLevels( levelTreeOutFile, levelTreeLevel ); } } else { do_test = false; // no testing because of problems } } } } else if ( !dataFile.empty() && !( TestFile.empty() && TreeOutFile.empty() && levelTreeOutFile.empty() ) ){ // it seems we want to expand our tree do_test = false; if ( Run->GetInstanceBase( TreeInFile ) ) { if ( Run->Expand( dataFile ) ){ if ( !TreeOutFile.empty() ){ Run->WriteInstanceBase( TreeOutFile ); } if ( levelTreeOutFile != "" ){ Run->WriteInstanceBaseLevels( levelTreeOutFile, levelTreeLevel ); } do_test = !TestFile.empty(); } } } else { // normal case // running a testing phase from recovered tree if ( TestFile.empty() && XOutFile == "" && !Do_Indirect ){ cerr << "reading an instancebase(-i option) without a testfile (-t option) is useless" << endl; do_test = false; } else { do_test = true; } if ( do_test ){ do_test = Run->GetInstanceBase( TreeInFile ); } } if ( do_test ){ Do_Test( Run ); } if ( Run->isValid() ) { if ( XOutFile != "" ){ Run->WriteInstanceBaseXml( XOutFile ); } if ( MatrixOutFile != "" ) { Run->WriteMatrices( MatrixOutFile ); } } if ( !do_test || !Run->isValid() ){ delete Run; return EXIT_FAILURE; } delete Run; } return EXIT_SUCCESS; } catch( const softExit& e ){ return EXIT_SUCCESS; } catch( const std::string& what ){ cerr << what << ", sorry" << endl; } catch( const std::bad_alloc&){ cerr << "ran out of memory somewhere" << endl; cerr << "timbl terminated, Sorry for that" << endl; } catch( const std::exception& e ){ cerr << e.what() << ", sorry" << endl; } return EXIT_FAILURE; } LanguageMachines-timbl-642727d/src/TimblAPI.cxx000066400000000000000000000405031451477526200212260ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include "timbl/Common.h" #include "timbl/MsgClass.h" #include "timbl/Types.h" #include "timbl/Options.h" #include "timbl/Instance.h" #include "timbl/neighborSet.h" #include "timbl/BestArray.h" #include "timbl/Statistics.h" #include "timbl/MBLClass.h" #include "ticcutils/CommandLine.h" #include "timbl/GetOptClass.h" using namespace std; using namespace icu; #include "timbl/TimblAPI.h" #include "timbl/TimblExperiment.h" namespace Timbl { TimblExperiment *Create_Pimpl( AlgorithmType algo, const string& ex_name, GetOptClass *opt ){ TimblExperiment *result = NULL; switch ( algo ){ case IB1_a: result = new IB1_Experiment( opt->MaxFeatures(), ex_name ); break; case IB2_a: result = new IB2_Experiment( opt->MaxFeatures(), ex_name ); break; case IGTREE_a: result = new IG_Experiment( opt->MaxFeatures(), ex_name ); break; case TRIBL_a: result = new TRIBL_Experiment( opt->MaxFeatures(), ex_name ); break; case TRIBL2_a: result = new TRIBL2_Experiment( opt->MaxFeatures(), ex_name ); break; case LOO_a: result = new LOO_Experiment( opt->MaxFeatures(), ex_name ); break; case CV_a: result = new CV_Experiment( opt->MaxFeatures(), ex_name ); break; default: cerr << "wrong algorithm to create TimblAPI" << endl; return NULL; } result->setOptParams( opt ); return result; } TimblAPI::TimblAPI( const TimblAPI& exp ): pimpl( exp.pimpl->splitChild() ), i_am_fine(true) { } TimblAPI::TimblAPI( ): pimpl( 0 ), i_am_fine(false) { } TimblAPI::TimblAPI( const TiCC::CL_Options& opts, const string& name ): pimpl(), i_am_fine(false) { GetOptClass *OptPars = new GetOptClass( opts ); if ( !OptPars->parse_options( opts ) ){ delete OptPars; } else if ( OptPars->Algo() != Unknown_a ){ pimpl = Create_Pimpl( OptPars->Algo(), name, OptPars ); } else { pimpl = Create_Pimpl( IB1_a, name, OptPars ); } i_am_fine = (pimpl != NULL); } TimblAPI::TimblAPI( const string& pars, const string& name ): pimpl(), i_am_fine(false){ TiCC::CL_Options Opts; Opts.init( pars ); GetOptClass *OptPars = new GetOptClass( Opts ); if ( OptPars->parse_options( Opts ) ){ if ( OptPars->Algo() != Unknown_a ){ pimpl = Create_Pimpl( OptPars->Algo(), name, OptPars ); } else { pimpl = Create_Pimpl( IB1_a, name, OptPars ); } } i_am_fine = (pimpl != NULL); } TimblAPI::~TimblAPI(){ delete pimpl; } bool TimblAPI::Valid() const { return i_am_fine && pimpl && !pimpl->ExpInvalid(); } bool TimblAPI::isValid() const { return i_am_fine && pimpl && !pimpl->ExpInvalid(false); } const string to_string( const Algorithm A ) { string result; switch ( A ){ case IB1: result = "IB1"; break; case IB2: result = "IB2"; break; case IGTREE: result = "IGTREE"; break; case TRIBL: result = "TRIBL"; break; case TRIBL2: result = "TRIBL2"; break; case LOO: result = "LOO"; break; case CV: result = "CV"; break; default: cerr << "invalid algorithm in switch " << endl; result = "Unknown Algorithm"; } return result; } bool string_to( const string& s, Algorithm& A ){ A = UNKNOWN_ALG; AlgorithmType tmp; if ( TiCC::stringTo( s, tmp ) ){ switch ( tmp ){ case IB1_a: A = IB1; break; case IB2_a: A = IB2; break; case IGTREE_a: A = IGTREE; break; case TRIBL_a: A = TRIBL; break; case TRIBL2_a: A = TRIBL2; break; case LOO_a: A = LOO; break; case CV_a: A = CV; break; default: return false; } return true; } return false; } const string to_string( const Weighting W ) { string result; switch ( W ){ case UD: result = "ud"; break; case NW: result = "nw"; break; case GR: result = "gr"; break; case IG: result = "ig"; break; case X2: result = "x2"; break; case SV: result = "sv"; break; case SD: result = "sd"; break; default: cerr << "invalid Weighting in switch " << endl; result = "Unknown Weight"; } return result; } inline Weighting WT_to_W( WeightType wt ){ Weighting w; switch ( wt ){ case UserDefined_w: w = UD; break; case No_w: w = NW; break; case GR_w: w = GR; break; case IG_w: w = IG; break; case X2_w: w = X2; break; case SV_w: w = SV; break; case SD_w: w = SD; break; default: w = UNKNOWN_W; } return w; } bool string_to( const string& s, Weighting& w ){ w = UNKNOWN_W; WeightType tmp; if ( TiCC::stringTo( s, tmp ) ){ w = WT_to_W( tmp ); if ( w == UNKNOWN_W ){ return false; } return true; } return false; } Algorithm TimblAPI::Algo() const { Algorithm result = UNKNOWN_ALG; if ( pimpl ){ switch ( pimpl->Algorithm() ){ case IB1_a: result = IB1; break; case IB2_a: result = IB2; break; case IGTREE_a: result = IGTREE; break; case TRIBL_a: result = TRIBL; break; case TRIBL2_a: result = TRIBL2; break; case LOO_a: result = LOO; break; case CV_a: result = CV; break; default: cerr << "invalid algorithm in switch " << endl; break; } } return result; } bool TimblAPI::Learn( const string& s ){ if ( Valid() ){ return pimpl->Learn( s ); } else { return false; } } bool TimblAPI::Prepare( const string& s ){ if ( Valid() ){ return pimpl->Prepare( s ); } else { return false; } } bool TimblAPI::CVprepare( const string& wf, Weighting w, const string& pf ){ if ( Valid() ){ WeightType tmp; switch ( w ){ case UNKNOWN_W: tmp = Unknown_w; break; case NW: tmp = No_w; break; case GR: tmp = GR_w; break; case IG: tmp = IG_w; break; case X2: tmp = X2_w; break; case SV: tmp = SV_w; break; case SD: tmp = SD_w; break; default: return false; } return pimpl->CVprepare( wf, tmp, pf ); } else { return false; } } bool TimblAPI::Increment_u( const UnicodeString& us ){ return Valid() && pimpl->Increment( us ); } bool TimblAPI::Increment( const string& s ){ return Valid() && pimpl->Increment( TiCC::UnicodeFromUTF8(s) ); } bool TimblAPI::Decrement_u( const UnicodeString& us ){ return Valid() && pimpl->Decrement( us ); } bool TimblAPI::Decrement( const string& s ){ return Valid() && pimpl->Decrement( TiCC::UnicodeFromUTF8(s) ); } bool TimblAPI::Expand( const string& s ){ return Valid() && pimpl->Expand( s ); } bool TimblAPI::Remove( const string& s ){ return Valid() && pimpl->Remove( s ); } bool TimblAPI::Test( const string& in, const string& out, const string& p ){ if ( !Valid() ){ return false; } else { if ( in.empty() ){ return false; } if ( out.empty() && Algo() != CV ){ return false; } if ( !pimpl->Test( in, out ) ){ return false; } return pimpl->createPercFile( p ); } } bool TimblAPI::NS_Test( const string& in, const string& out ){ if ( !Valid() ){ return false; } else { if ( in.empty() ){ return false; } if ( out.empty() && Algo() != CV ){ return false; } return pimpl->NS_Test( in, out ); } } const TargetValue *TimblAPI::Classify( const string& s, const ClassDistribution *& db, double& di ){ if ( Valid() ){ return pimpl->Classify( TiCC::UnicodeFromUTF8(s), db, di ); } else { db = NULL; di = DBL_MAX; } return NULL; } const TargetValue *TimblAPI::Classify( const icu::UnicodeString& s, const ClassDistribution *& db, double& di ){ if ( Valid() ){ return pimpl->Classify( s, db, di ); } else { db = NULL; di = DBL_MAX; } return NULL; } const TargetValue *TimblAPI::Classify( const string& s ){ if ( Valid() ){ return pimpl->Classify( TiCC::UnicodeFromUTF8(s) ); } return NULL; } const TargetValue *TimblAPI::Classify( const icu::UnicodeString& s ){ if ( Valid() ){ return pimpl->Classify( s ); } return NULL; } const TargetValue *TimblAPI::Classify( const string& s, const ClassDistribution *& db ){ if ( Valid() ){ return pimpl->Classify( TiCC::UnicodeFromUTF8(s), db ); } else { db = NULL; } return NULL; } const TargetValue *TimblAPI::Classify( const icu::UnicodeString& s, const ClassDistribution *& db ){ if ( Valid() ){ return pimpl->Classify( s, db ); } else { db = NULL; } return NULL; } const TargetValue *TimblAPI::Classify( const string& s, double& di ){ if ( Valid() ){ return pimpl->Classify( TiCC::UnicodeFromUTF8(s), di ); } else { di = DBL_MAX; } return NULL; } const TargetValue *TimblAPI::Classify( const icu::UnicodeString& s, double& di ){ if ( Valid() ){ return pimpl->Classify( s, di ); } else { di = DBL_MAX; } return NULL; } const neighborSet *TimblAPI::classifyNS( const icu::UnicodeString& s ){ const neighborSet *ns = 0; if ( Valid() ){ ns = pimpl->NB_Classify( s ); } return ns; } bool TimblAPI::classifyNS( const icu::UnicodeString& s, neighborSet& ns ){ const neighborSet *b = classifyNS( s ); if ( b != 0 ){ ns = *b; return true; } return false; } const Instance *TimblAPI::lastHandledInstance() const { if ( Valid() ){ return &pimpl->CurrInst; } return 0; } const Targets& TimblAPI::myTargets() const{ if ( Valid() ){ return pimpl->targets; } abort(); } bool TimblAPI::Classify( const string& s, string& cls ){ string dummy; double f; return Valid() && pimpl->Classify( s, cls, dummy, f ); } bool TimblAPI::Classify( const icu::UnicodeString& s, icu::UnicodeString& cls ){ return Valid() && pimpl->Classify( s, cls ); } bool TimblAPI::Classify( const string& s, string& cls, double &f ) { string dummy; return Valid() && pimpl->Classify( s, cls, dummy, f ); } bool TimblAPI::Classify( const string& s, string& cls, string& dist, double &f ){ return Valid() && pimpl->Classify( s, cls, dist, f ); } size_t TimblAPI::matchDepth() const { if ( Valid() ){ return pimpl->matchDepth(); } else { return -1; } } double TimblAPI::confidence() const { if ( Valid() ){ return pimpl->confidence(); } else { return -1; } } bool TimblAPI::matchedAtLeaf() const { return Valid() && pimpl->matchedAtLeaf(); } bool TimblAPI::initExperiment( ){ if ( Valid() ){ pimpl->initExperiment( true ); return true; } else { return false; } } InputFormatType TimblAPI::getInputFormat() const { if ( Valid() ){ return pimpl->InputFormat(); } else { return UnknownInputFormat; } } bool TimblAPI::SaveWeights( const string& f ){ if ( Valid() ){ return pimpl->SaveWeights( f ); } else { return false; } } bool TimblAPI::GetWeights( const string& f, Weighting w ){ if ( Valid() ){ WeightType tmp; switch ( w ){ case UNKNOWN_W: tmp = Unknown_w; break; case NW: tmp = No_w; break; case GR: tmp = GR_w; break; case IG: tmp = IG_w; break; case X2: tmp = X2_w; break; case SV: tmp = SV_w; break; case SD: tmp = SD_w; break; default: return false; } return pimpl->GetWeights( f, tmp ); } else { return false; } } double TimblAPI::GetAccuracy() { if (Valid()) { return pimpl->stats.testedCorrect()/(double) pimpl->stats.dataLines(); } else { return -1; } } Weighting TimblAPI::CurrentWeighting() const{ if ( Valid() ){ return WT_to_W( pimpl->CurrentWeighting() ); } else { return UNKNOWN_W; } } Weighting TimblAPI::GetCurrentWeights( std::vector& res ) const { res.clear(); if ( Valid() && pimpl->GetCurrentWeights( res ) ){ return CurrentWeighting(); } return UNKNOWN_W; } bool TimblAPI::SetOptions( const string& argv ){ return Valid() && pimpl->SetOptions( argv ); } bool TimblAPI::SetIndirectOptions( const TiCC::CL_Options& opts ){ return Valid() && pimpl->IndirectOptions( opts ); } string TimblAPI::ExpName() const { if ( pimpl ) { // return the name, even when !Valid() return pimpl->ExpName(); } else { return "ERROR"; } } bool TimblAPI::WriteNamesFile( const string& f ){ if ( Valid() ) { return pimpl->WriteNamesFile( f ); } else { return false; } } bool TimblAPI::WriteInstanceBase( const string& f ){ if ( Valid() ){ return pimpl->WriteInstanceBase( f ); } else { return false; } } bool TimblAPI::WriteInstanceBaseXml( const string& f ){ if ( Valid() ){ return pimpl->WriteInstanceBaseXml( f ); } else { return false; } } bool TimblAPI::WriteInstanceBaseLevels( const string& f, unsigned int l ){ if ( Valid() ){ return pimpl->WriteInstanceBaseLevels( f, l ); } else { return false; } } bool TimblAPI::GetInstanceBase( const string& f ){ if ( Valid() ){ if ( !pimpl->ReadInstanceBase( f ) ){ i_am_fine = false; } return Valid(); } else { return false; } } bool TimblAPI::WriteArrays( const string& f ){ if ( Valid() ){ return pimpl->WriteArrays( f ); } else { return false; } } bool TimblAPI::GetArrays( const string& f ){ if ( Valid() ){ return pimpl->GetArrays( f ); } else { return false; } } bool TimblAPI::WriteMatrices( const string& f ){ return Valid() && pimpl->WriteMatrices( f ); } bool TimblAPI::GetMatrices( const string& f ){ return Valid() && pimpl->GetMatrices( f ); } bool TimblAPI::ShowBestNeighbors( ostream& os ) const{ return Valid() && pimpl->showBestNeighbors( os ); } bool TimblAPI::ShowWeights( ostream& os ) const{ return Valid() && pimpl->ShowWeights( os ); } bool TimblAPI::ShowOptions( ostream& os ) const{ return Valid() && pimpl->ShowOptions( os ); } bool TimblAPI::ShowSettings( ostream& os ) const{ return Valid() && pimpl->ShowSettings( os ); } bool TimblAPI::ShowIBInfo( ostream& os ) const{ if ( Valid() ){ pimpl->IBInfo( os ); return true; } else { return false; } } bool TimblAPI::ShowStatistics( ostream& os ) const{ return Valid() && pimpl->showStatistics( os ); } string TimblAPI::extract_limited_m( int lim ) const { if ( Valid() ){ return pimpl->extract_limited_m( lim ); } else { return "error"; } } string TimblAPI::VersionInfo( bool full ){ return Common::VersionInfo( full ); } size_t TimblAPI::Default_Max_Feats(){ return Common::DEFAULT_MAX_FEATS; } size_t TimblAPI::NumOfFeatures() const { if ( Valid() ){ return pimpl->NumOfFeatures(); } else { return -1; } } } LanguageMachines-timbl-642727d/src/TimblExperiment.cxx000066400000000000000000002127021451477526200227370ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include #include #include #include "config.h" #include "timbl/MsgClass.h" #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Options.h" #include "timbl/Instance.h" #include "timbl/Choppers.h" #include "timbl/Metrics.h" #include "timbl/Statistics.h" #include "timbl/neighborSet.h" #include "timbl/BestArray.h" #include "timbl/IBtree.h" #include "timbl/MBLClass.h" #include "timbl/GetOptClass.h" #include "timbl/TimblExperiment.h" #include "ticcutils/XMLtools.h" #include "ticcutils/Timer.h" #include "ticcutils/PrettyPrint.h" #include "ticcutils/CommandLine.h" #ifdef HAVE_OPENMP #include #endif using namespace std; using namespace icu; using namespace nlohmann; namespace Timbl { using TiCC::operator<<; const string timbl_short_opts = "a:b:B:c:C:d:De:f:F:G::hHi:I:k:l:L:m:M:n:N:o:O:p:P:q:QR:s::t:T:u:U:v:Vw:W:xX:Z%"; const string timbl_long_opts = ",Beam:,clones:,Diversify,occurrences:,sloppy::,silly::,Threshold:,Treeorder:,matrixin:,matrixout:,version,help,limit:"; const string timbl_serv_short_opts = "C:d:G::k:l:L:p:Qv:x"; const string timbl_indirect_opts = "d:e:G:k:L:m:o:p:QR:t:v:w:x%"; resultStore::~resultStore( ) { clear(); } bool resultStore::reset( int _beam, normType _norm, double _factor, const Targets& _targets ) { clear(); beam = _beam; norm = _norm; factor = _factor; targets = &_targets; bool result = true; if ( norm != noNorm && beam != 0 ){ norm = noNorm; result = false; } return result; } void resultStore::clear( ) { delete dist; dist = 0; if ( disposable ){ delete rawDist; } best_target = 0; rawDist = 0; beam = 0; isTop = false; resultCache.clear(); } const WClassDistribution *resultStore::getResultDist() { if ( rawDist && !dist ){ prepare(); } return dist; } string resultStore::getResult() { if ( isTop ){ if ( topCache.empty() ){ if ( dist ) { topCache = dist->DistToStringW( beam ); } else { topCache = "{}"; } } resultCache = topCache; } else if ( resultCache.empty() ){ if ( dist ) { resultCache = dist->DistToStringW( beam ); } else { resultCache = "{}"; } } return resultCache; } void resultStore::addConstant( const ClassDistribution *vd, const TargetValue *best_result ) { rawDist = vd; best_target = best_result; disposable = false; } void resultStore::addTop( const ClassDistribution *vd, const TargetValue *best_result ) { rawDist = vd; best_target = best_result; disposable = false; isTop = true; } void resultStore::addDisposable( ClassDistribution *vd, const TargetValue *best_result ) { rawDist = vd; best_target = best_result; disposable = true; } void resultStore::prepare() { if ( isTop && !topCache.empty() ){ return; } if ( !dist && rawDist ){ if ( !disposable ){ dist = rawDist->to_WVD_Copy(); } else { dist = dynamic_cast( const_cast(rawDist) ); rawDist = 0; } } } void resultStore::normalize() { if ( dist ){ switch ( norm ){ case noNorm: break; case probabilityNorm: dist->Normalize(); break; case addFactorNorm: dist->Normalize_1( factor, *targets ); break; case logProbNorm: dist->Normalize_2(); break; default: throw runtime_error( "unimplemented case " + TiCC::toString(norm) ); break; } } // silently do nothing when dist == 0; } void TimblExperiment::normalizeResult(){ bestResult.prepare(); bestResult.normalize(); } TimblExperiment::TimblExperiment( const AlgorithmType Alg, const string& s ): MBLClass( s ), Initialized( false ), OptParams( NULL ), algorithm( Alg ), ibCount( 0 ), confusionInfo( 0 ), match_depth(-1), last_leaf(true), estimate( 0 ), numOfThreads( 1 ) { Weighting = GR_w; } TimblExperiment::~TimblExperiment() { delete OptParams; delete confusionInfo; } TimblExperiment& TimblExperiment::operator=( const TimblExperiment&in ){ if ( this != &in ){ MBLClass::operator=(in); Initialized = false; OptParams = NULL; algorithm = in.algorithm; CurrentDataFile = in.CurrentDataFile; WFileName = in.WFileName; estimate = in.estimate; Weighting = in.Weighting; confusionInfo = 0; numOfThreads = in.numOfThreads; } return *this; } TimblExperiment *TimblExperiment::splitChild( ) const { TimblExperiment *result = 0; switch ( Algorithm() ){ case IB1_a: case TRIBL_a: case TRIBL2_a: case IGTREE_a: result = clone(); break; default: FatalError( "You may not split experiments for Special cases like " + TiCC::toString(algorithm) ); exit(EXIT_FAILURE); // never reached, (FatalError throws) but satisfies // scan-build } *result = *this; if ( OptParams ){ result->OptParams = OptParams->Clone( 0 ); } result->WFileName = WFileName; result->CurrentDataFile.clear(); result->InstanceBase->CleanPartition( false ); result->InstanceBase = 0; result->is_synced = true; return result; } void TimblExperiment::initExperiment( bool all_vd ){ if ( !ExpInvalid() ){ match_depth = NumOfFeatures(); if ( !MBL_init ){ // do this only when necessary stats.clear(); delete confusionInfo; confusionInfo = 0; if ( Verbosity(ADVANCED_STATS) ){ confusionInfo = new ConfusionMatrix( targets.num_of_values() ); } initDecay(); calculate_fv_entropy( true ); if (!is_copy ){ if ( ib2_offset != 0 ){ // // isn't this obsolete for the new IB2 implementation? // You should think so, But I see small differences :{ // So leave this code for now // // invalidate MVDM matrices, they might be changing in size for ( size_t j=0; j < NumOfFeatures(); ++j ){ if ( !features[j]->Ignore() ){ features[j]->clear_matrix(); } } } if ( initProbabilityArrays( all_vd ) ){ calculatePrestored(); } else { Error( "not enough memory for Probability Arrays in (" + string(__FILE__) + "," + TiCC::toString(__LINE__) + ")\n" + "ABORTING now" ); throw std::bad_alloc(); } InitWeights(); if ( do_diversify ){ diverseWeights(); } } srand( random_seed ); initTesters(); MBL_init = true; } } } bool TimblExperiment::skipARFFHeader( istream& is ){ string Buffer; while ( getline( is, Buffer ) && !compare_nocase_n( "@DATA", Buffer) ) stats.addSkipped(); return true; } bool TimblExperiment::nextLine( istream& datafile, UnicodeString& Line ){ int dummy; return nextLine( datafile, Line, dummy ); } bool TimblExperiment::nextLine( istream& datafile, UnicodeString& Line, int& cnt ){ // Function that takes a line from a file, skipping comment // returns true if some line is found // bool found = false; cnt = 0; while ( !found && TiCC::getline( datafile, Line ) ){ ++cnt; if ( empty_line( Line, InputFormat() ) ){ stats.addSkipped(); continue; } else { found = true; } } return found; } bool TimblExperiment::chopLine( const UnicodeString& line ){ if ( !Chop( line ) ){ stats.addSkipped(); return false; } else { stats.addLine(); return true; } } /* First learning Phase: Learning of the names of the FeatureValues and TargetValues also their distribution etc. */ bool TimblExperiment::Prepare( const string& FileName, bool warnOnSingleTarget, bool expand ){ assert( runningPhase == LearnWords ); bool result = false; if ( !FileName.empty() && ConfirmOptions() ){ if ( !ExpInvalid() ){ if ( !expand && ( Options.TableFrozen() || NumOfFeatures() != 0 ) ){ Error( "couldn't learn from file '" + FileName + "'\nInstanceBase already filled" ); } else { size_t Num = examineData( FileName ); if ( Num == 0 ){ Error( "Unable to initialize from file :'" + FileName + "'\n" ); } else { if ( !Verbosity(SILENT) ){ *mylog << "Examine datafile '" << FileName << "' gave the following results:" << endl << "Number of Features: " << Num << endl; showInputFormat( *mylog ); } if ( NumOfFeatures() == 0 ){ Initialize( Num ); } CurrentDataFile = FileName; if ( Verbosity(OPTIONS) ){ ShowSettings( *mylog ); } // Open the file. // ifstream datafile( FileName, ios::in); stats.clear(); UnicodeString Buffer; if ( InputFormat() == ARFF ){ skipARFFHeader( datafile ); } if ( !nextLine( datafile, Buffer ) ){ Error( "no useful data in: " + FileName ); result = false; } else if ( !chopLine( Buffer ) ){ Error( "no useful data in: " + FileName ); result = false; } else { TiCC::Timer prepT; prepT.start(); bool go_on = true; if ( !Verbosity(SILENT) ){ Info( "Phase 1: Reading Datafile: " + FileName ); time_stamp( "Start: ", 0 ); } while( go_on ){ chopped_to_instance( LearnWords ); // Progress update. // if ( !Verbosity(SILENT) ){ if ( ( stats.dataLines() % Progress() ) == 0 ){ time_stamp( "Examining: ", stats.dataLines() ); } } bool found = false; while ( !found && nextLine( datafile, Buffer ) ){ found = chopLine( Buffer ); if ( !found ){ Warning( "datafile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } } go_on = found; } if ( stats.dataLines() < 1 ){ Error( "no useful data in: " + FileName ); } else { if ( !Verbosity(SILENT) ){ time_stamp( "Finished: ", stats.totalLines() ); time_stamp( "Calculating Entropy " ); if ( Verbosity(FEAT_W) ){ *mylog << "Lines of data : " << stats.dataLines() << endl; if ( stats.skippedLines() != 0 ) { *mylog << "SkippedLines : " << stats.skippedLines() << endl; } LearningInfo( *mylog ); } } else { calculate_fv_entropy( false ); } prepT.stop(); if ( !Verbosity(SILENT) ){ Info( "Preparation took " + prepT.toString() ); } if ( warnOnSingleTarget && targets.EffectiveValues() <=1 ){ Warning( "Training file contains only 1 class." ); } result = true; } } } } } } return result; } bool TimblExperiment::CVprepare( const string&, WeightType, const string& ){ Error( "CVprepare called for NON CV experiment" ); return false; } ostream& operator<< ( ostream& os, const fileIndex& fi ){ for ( const auto& it : fi ){ os << "<"; os << it.first << "," << it.second; os << ">"; } return os; } bool TimblExperiment::learnFromFileIndex( const fileIndex& fi, istream& datafile ){ InstanceBase_base *outInstanceBase = 0; for ( const auto& fit : fi ){ for ( const auto& sit : fit.second ){ datafile.clear(); datafile.seekg( sit ); UnicodeString Buffer; nextLine( datafile, Buffer ); chopLine( Buffer ); // Progress update. // if ( ( stats.dataLines() % Progress() ) == 0 ){ time_stamp( "Learning: ", stats.dataLines() ); } chopped_to_instance( TrainWords ); if ( !outInstanceBase ){ outInstanceBase = InstanceBase->clone(); } // cerr << "add instance " << &CurrInst << endl; if ( !outInstanceBase->AddInstance( CurrInst ) ){ Warning( "deviating exemplar weight in:\n" + TiCC::UnicodeToUTF8(Buffer) + "\nIgnoring the new weight" ); } } } if ( outInstanceBase ){ if ( !InstanceBase->MergeSub( outInstanceBase ) ){ FatalError( "Merging InstanceBases failed. PANIC" ); return false; } delete outInstanceBase; } return true; } bool TimblExperiment::ClassicLearn( const string& FileName, bool warnOnSingleTarget ){ bool result = true; if ( is_synced ){ CurrentDataFile = FileName; // assume magic! } if ( CurrentDataFile.empty() ) { if ( FileName.empty() ){ Warning( "unable to build an InstanceBase: No datafile defined yet" ); result = false; } else if ( !Prepare( FileName, warnOnSingleTarget ) || ExpInvalid() ){ result = false; } } else if ( !FileName.empty() && CurrentDataFile != FileName ){ Error( "Unable to Learn from file '" + FileName + "'\n" "while previously instantiated from file '" + CurrentDataFile + "'" ); result = false; } if ( result ) { TiCC::Timer learnT; learnT.start(); InitInstanceBase(); if ( ExpInvalid() ){ return false; } if ( EffectiveFeatures() < 2 ) { fileIndex fmIndex; // TiCC::Timer t; // t.start(); result = build_file_index( CurrentDataFile, fmIndex ); // t.stop(); // cerr << "indexing took " << t << endl; // totalT.start(); if ( result ){ // cerr << "index = " << fmIndex << endl; stats.clear(); if ( !Verbosity(SILENT) ) { Info( "\nPhase 3: Learning from Datafile: " + CurrentDataFile ); time_stamp( "Start: ", 0 ); } // Open the file. // ifstream datafile( CurrentDataFile, ios::in); // learnFromFileIndex( fmIndex, datafile ); } } else { fileDoubleIndex fIndex; // TiCC::Timer t; // t.start(); result = build_file_multi_index( CurrentDataFile, fIndex ); // cerr << "index: " << fIndex << endl; // t.stop(); // cerr << "indexing took " << t << endl; // totalT.start(); if ( result ){ stats.clear(); if ( !Verbosity(SILENT) ) { Info( "\nPhase 3: Learning from Datafile: " + CurrentDataFile ); time_stamp( "Start: ", 0 ); } // Open the file. // ifstream datafile( CurrentDataFile, ios::in); // for ( const auto& mit : fIndex ){ learnFromFileIndex( mit.second, datafile ); } } } if ( !Verbosity(SILENT) ){ time_stamp( "Finished: ", stats.dataLines() ); } learnT.stop(); // cerr << "Endresult " << endl; // cerr << InstanceBase << endl; if ( !Verbosity(SILENT) ){ IBInfo( *mylog ); Info( "Learning took " + learnT.toString() ); } #ifdef IBSTATS cerr << "final mismatches: " << InstanceBase->mismatch << endl; #endif } return result; } bool TimblExperiment::Learn( const std::string& s, bool warnOnSingleTarget ){ if ( ExpInvalid() || !ConfirmOptions() ){ return false; } return ClassicLearn( s, warnOnSingleTarget ); } IB1_Experiment::IB1_Experiment( const size_t N, const string& s, const bool init ): TimblExperiment( IB1_a, s ){ if ( init ) { init_options_table(N); } TreeOrder = GRoverFeature; } /* Increment the Instancebase with one instance (IB1 Class only) */ bool IB1_Experiment::Increment( const icu::UnicodeString& InstanceString ){ bool result = true; if ( ExpInvalid() ){ result = false; } else if ( IBStatus() == Invalid ){ Warning( "unable to Increment, No InstanceBase available" ); result = false; } else if ( !Chop( InstanceString ) ){ Error( "Couldn't convert to Instance: " + TiCC::UnicodeToUTF8(InstanceString) ); result = false; // No more input } else { chopped_to_instance( TrainLearnWords ); MBL_init = false; bool happy = InstanceBase->AddInstance( CurrInst ); if ( !happy ){ Warning( "deviating exemplar weight in:\n" + TiCC::UnicodeToUTF8(InstanceString) + "\nIgnoring the new weight" ); } } return result; } /* Decrement the Instancebase with one instance (IB1 Class only) */ bool IB1_Experiment::Decrement( const UnicodeString& InstanceString ){ bool result = true; if ( ExpInvalid() ){ result = false; } else if ( IBStatus() == Invalid ){ Warning( "unable to Decrement, No InstanceBase available" ); result = false; } else { if ( !Chop( InstanceString ) ){ Error( "Couldn't convert to Instance: " + TiCC::UnicodeToUTF8(InstanceString) ); result = false; // No more input } else { chopped_to_instance( TestWords ); HideInstance( CurrInst ); } } return result; } /* Expand an Instance Base */ bool TimblExperiment::Expand( const string& FileName ){ bool result = true; if ( ExpInvalid() ){ result = false; } else if ( IBStatus() == Invalid ){ Warning( "unable to expand the InstanceBase: Not there" ); result = false; } else if ( FileName.empty() ){ Warning( "unable to expand the InstanceBase: No inputfile specified" ); result = false; } else { if ( InputFormat() == UnknownInputFormat ){ // we may expand from 'nothing' if ( !Prepare( FileName, false, true ) ){ Error( "Unable to expand from file :'" + FileName + "'\n" ); return false; } } UnicodeString Buffer; stats.clear(); // Open the file. // ifstream datafile( FileName, ios::in); if ( InputFormat() == ARFF ){ skipARFFHeader( datafile ); } if ( !nextLine( datafile, Buffer ) ){ Error( "no useful data in: " + FileName ); result = false; // No more input } else if ( !chopLine( Buffer ) ){ Error( "no useful data in: " + FileName ); result = false; // No more input } else { MBL_init = false; if ( !Verbosity(SILENT) ) { Info( "Phase 2: Expanding from Datafile: " + FileName ); time_stamp( "Start: ", 0 ); } bool found; do { // The next Instance to store. chopped_to_instance( TrainLearnWords ); bool happy = InstanceBase->AddInstance( CurrInst ); if ( !happy ){ Warning( "deviating exemplar weight in line #" + TiCC::toString(stats.totalLines() ) + ":\n" + TiCC::UnicodeToUTF8(Buffer) + "\nIgnoring the new weight" ); } // Progress update. // if ( (stats.dataLines() % Progress() ) == 0 ){ time_stamp( "Learning: ", stats.dataLines() ); } found = false; while ( !found && nextLine( datafile, Buffer ) ){ found = chopLine( Buffer ); if ( !found ){ Warning( "datafile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } } } while( found ); time_stamp( "Finished: ", stats.dataLines() ); if ( !Verbosity(SILENT) ){ IBInfo( *mylog ); } } } return result; } /* Remove Instances from an Instance Base (IB1 only ) */ bool IB1_Experiment::Remove( const string& FileName ){ bool result = true; if ( ExpInvalid() ){ result = false; } else if ( IBStatus() == Invalid ){ Warning( "unable to remove from InstanceBase: Not there" ); result = false; } else if ( FileName.empty() ){ Warning( "unable to remove from InstanceBase: No input specified" ); result = false; } else { UnicodeString Buffer; stats.clear(); // Open the file. // ifstream datafile( FileName, ios::in); if ( InputFormat() == ARFF ){ skipARFFHeader( datafile ); } if ( !nextLine( datafile, Buffer ) ){ Error( "no useful data in: " + FileName ); result = false; // No more input } else if ( !chopLine( Buffer ) ){ Error( "no useful data in: " + FileName ); result = false; // No more input } else { if ( !Verbosity(SILENT) ) { Info( "Phase 2: Removing using Datafile: " + FileName ); time_stamp( "Start: ", 0 ); } bool found; do { // The next Instance to remove. chopped_to_instance( TestWords ); HideInstance( CurrInst ); // Progress update. // if ( (stats.dataLines() % Progress() ) == 0 ){ time_stamp( "Removing: ", stats.dataLines() ); } found = false; while ( !found && nextLine( datafile, Buffer ) ){ found = chopLine( Buffer ); if ( !found ){ Warning( "datafile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } } } while( found ); time_stamp( "Finished: ", stats.dataLines() ); if ( !Verbosity(SILENT) ){ IBInfo( *mylog ); } } } return result; } void TimblExperiment::showInputFormat( ostream& os ) const { switch ( InputFormat() ){ case C4_5: os << "InputFormat : C4.5"; break; case SparseBin: os << "InputFormat : Sparse Binary"; break; case Sparse: os << "InputFormat : Sparse"; break; case ARFF: os << "InputFormat : ARFF"; break; case Columns: os << "InputFormat : Columns"; break; case Tabbed: os << "InputFormat : Tabbed"; break; case Compact: os << "InputFormat : Compact, (Feature Length = " << F_length << ")"; break; default: os << "InputFormat unknown\n"; } os << endl << endl; } void TimblExperiment::show_progress( ostream& os, time_t start, unsigned int line ){ int local_progress = Progress(); if ( ( (line % local_progress ) == 0) || ( line <= 10 ) || ( line == 100 || line == 1000 || line == 10000 ) ){ time_t Time; time(&Time); if ( line == 1000 ){ // check if we are slow, if so, change progress value if ( Time - start > 120 ) { // more then two minutes for 1,000 // very slow ! Progress( 1000 ); } } else if ( line == 10000 ){ if ( Time - start > 600 ) { // more then ten minutes for 10,000 // quite slow ! Progress( 10000 ); } } if ( !exp_name.empty() ){ os << "-" << exp_name << "-"; } os << "Tested: "; os.width(6); os.setf(ios::right, ios::adjustfield); os << line << " @ " << TiCC::Timer::now(); // Estimate time until Estimate. // if ( Estimate() > 0 && (unsigned int)Estimate() > line ) { time_t SecsUsed = Time - start; if ( SecsUsed > 0 ) { double Estimated = (SecsUsed / (float)line) * (float)Estimate(); time_t EstimatedTime = (long)Estimated + start; string time_string = ctime(&EstimatedTime); time_string.pop_back(); // the newline from ctime os << ", " << Estimate() << ": " << time_string; } } os << endl; } } bool IB2_Experiment::show_learn_progress( ostream& os, time_t start, size_t added ){ int local_progress = Progress(); unsigned int lines = stats.dataLines(); unsigned int line = lines - IB2_offset() ; if ( ( (line % local_progress ) == 0) || ( line <= 10 ) || ( line == 100 || line == 1000 || line == 10000 ) ){ time_t Time; time(&Time); if ( line == 100 ){ // check if we are slow, if so, change progress value if ( Time - start > 120 && local_progress > 100 ){ // very slow ! Progress( 100 ); } } else if ( line == 1000 ){ // check if we are slow, if so, change progress value if ( Time - start > 120 && local_progress > 1000 ){ // very slow ! Progress( 1000 ); } } else if ( line == 10000 ){ if ( Time - start > 120 && local_progress > 10000 ){ // quite slow ! Progress( 10000 ); } } if ( !exp_name.empty() ){ os << "-" << exp_name << "-"; } os << "Learning: "; os.width(6); os.setf(ios::right, ios::adjustfield); os << lines << " @ " << TiCC::Timer::now(); os << "\t added:" << added; // Estime time until Estimate. // if ( Estimate() > 0 && (unsigned int)Estimate() > lines ) { time_t SecsUsed = Time - start; if ( SecsUsed > 0 ) { double Estimated = (SecsUsed / (float)line) * ( (float)Estimate() - IB2_offset() ); time_t EstimatedTime = (long)Estimated + start; string time_string = ctime(&EstimatedTime); time_string.pop_back(); // the newline from ctime os << "\t, " << Estimate() << ": " << time_string; } } os << endl; return true; } else { return false; } } void TimblExperiment::show_speed_summary( ostream& os, const timeval& Start ) const { timeval Time; gettimeofday( &Time, 0 ); long int uSecsUsed = (Time.tv_sec - Start.tv_sec) * 1000000 + (Time.tv_usec - Start.tv_usec); double secsUsed = (double)uSecsUsed / 1000000 + Epsilon; int oldPrec = os.precision(4); os << setprecision(4); os.setf( ios::fixed, ios::floatfield ); os << "Seconds taken: " << secsUsed << " ("; os << setprecision(2); os << stats.dataLines() / secsUsed << " p/s)" << endl; os << setprecision(oldPrec); } bool TimblExperiment::showStatistics( ostream& os ) const { os << endl; if ( confusionInfo ){ confusionInfo->FScore( os, targets, Verbosity(CLASS_STATS) ); } os << "overall accuracy: " << stats.testedCorrect()/(double) stats.dataLines() << " (" << stats.testedCorrect() << "/" << stats.dataLines() << ")" ; if ( stats.exactMatches() != 0 ){ os << ", of which " << stats.exactMatches() << " exact matches " ; } os << endl; int totalTies = stats.tiedCorrect() + stats.tiedFailure(); if ( totalTies > 0 ){ if ( totalTies == 1 ) { os << "There was 1 tie"; } else { os << "There were " << totalTies << " ties"; } double tie_perc = 100 * ( stats.tiedCorrect() / (double)totalTies); int oldPrec = os.precision(2); os << " of which " << stats.tiedCorrect() << " (" << setprecision(2) << tie_perc << setprecision(6) << "%)"; if ( totalTies == 1 ){ os << " was correctly resolved" << endl; } else { os << " were correctly resolved" << endl; } os.precision(oldPrec); } if ( confusionInfo && Verbosity(CONF_MATRIX) ){ os << endl; confusionInfo->Print( os, targets ); } return true; } bool TimblExperiment::createPercFile( const string& fileName ) const { if ( !fileName.empty() ) { ofstream outfile( fileName, ios::out | ios::trunc); if (!outfile) { Warning( "can't open: " + fileName ); return false; } else { outfile << (stats.testedCorrect() / (float)stats.dataLines()) * 100.0 << endl << "tested " << stats.dataLines() << " lines " << endl << "correct " << stats.testedCorrect() << " lines " << endl; outfile.close(); } } return true; } bool TimblExperiment::showBestNeighbors( ostream& outfile ) const { if ( Verbosity( NEAR_N | ALL_K) ){ outfile << bestArray; return true; } else { return false; } } xmlNode *TimblExperiment::bestNeighborsToXML() const { if ( Verbosity( NEAR_N | ALL_K) ){ return bestArray.toXML(); } else { return 0; } } json TimblExperiment::best_neighbors_to_JSON() const { if ( Verbosity( NEAR_N | ALL_K) ){ return bestArray.to_JSON(); } else { return json(); } } void TimblExperiment::show_results( ostream& outfile, const double confidence, const string& dString, const TargetValue *Best, const double Distance ) { outfile << get_org_input() << CodeToStr(Best->name()); if ( Verbosity(CONFIDENCE) ){ outfile << " [" << confidence << "]"; } if ( Verbosity(DISTRIB) ){ outfile << " " << dString; } if ( Verbosity(DISTANCE) ) { int OldPrec = outfile.precision(DBL_DIG-1); outfile.setf(ios::showpoint); outfile.width(8); outfile << " " << Distance; outfile.precision(OldPrec); } if ( Verbosity(MATCH_DEPTH) ){ outfile << " " << matchDepth() << ":" << (matchedAtLeaf()?"L":"N"); } outfile << endl; showBestNeighbors( outfile ); } bool IB2_Experiment::Prepare( const string& FileName, bool, bool expand ){ if ( !ConfirmOptions() || ( IB2_offset() == 0 && InstanceBase == 0 ) ){ Error( "IB2 learning failed, invalid bootstrap option?" ); return false; } else { return TimblExperiment::Prepare( FileName, false, expand ); } } bool IB2_Experiment::Learn( const string& FileName, bool ){ if ( IB2_offset() == 0 ){ Error( "IB2 learning failed, invalid bootstrap option?" ); return false; } else { bool result = true; TiCC::Timer learnT; if ( ExpInvalid() || !ConfirmOptions() ){ result = false; } else { if ( is_synced ){ CurrentDataFile = FileName; // assume magic! } if ( CurrentDataFile == "" ){ if ( FileName == "" ){ Warning( "unable to build an InstanceBase: No datafile defined yet" ); result = false; } else if ( !Prepare( FileName, false ) || ExpInvalid() ){ result = false; } } else if ( FileName != "" && CurrentDataFile != FileName ){ Error( "Unable to Learn from file '" + FileName + "'\n" "while previously instantiated from file '" + CurrentDataFile + "'" ); result = false; } } if ( result ) { UnicodeString Buffer; stats.clear(); // Open the file. // ifstream datafile( CurrentDataFile, ios::in); if ( InputFormat() == ARFF ){ skipARFFHeader( datafile ); } if ( !nextLine( datafile, Buffer ) ){ Error( "cannot start learning from in: " + CurrentDataFile ); result = false; // No more input } else if ( !chopLine( Buffer ) ){ Error( "no useful data in: " + CurrentDataFile ); result = false; // No more input } else { learnT.start(); InitInstanceBase( ); if ( ExpInvalid() ){ return false; } MBL_init = false; if ( !Verbosity(SILENT) ) { Info( "Phase 2: Learning from Datafile: " + CurrentDataFile ); time_stamp( "Start: ", 0 ); } bool found; bool go_on = ( stats.dataLines() <= IB2_offset() ); while( go_on ){ // The next Instance to store. chopped_to_instance( TrainWords ); bool happy = InstanceBase->AddInstance( CurrInst ); if ( !happy ){ Warning( "deviating exemplar weight in line #" + TiCC::toString(stats.totalLines()) + ":\n" + TiCC::UnicodeToUTF8(Buffer) + "\nIgnoring the new weight" ); } // Progress update. // if ( (stats.dataLines() % Progress() ) == 0 ){ time_stamp( "Learning: ", stats.dataLines() ); } if ( stats.dataLines() >= IB2_offset() ){ go_on = false; } else { found = false; while ( !found && nextLine( datafile, Buffer ) ){ found = chopLine( Buffer ); if ( !found ){ Warning( "datafile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } } go_on = found; } } if ( !Verbosity(SILENT) ){ time_stamp( "Finished: ", stats.dataLines() ); } learnT.stop(); if ( !Verbosity(SILENT) ){ IBInfo( *mylog ); Info( "Learning took " + learnT.toString() ); } #ifdef IBSTATS cerr << "IB2 mismatches: " << InstanceBase->mismatch << endl; #endif } if ( result ){ result = Expand_N( FileName ); } } return result; } } bool IB2_Experiment::Expand( const string& FileName ){ bool result = false; if ( CurrentDataFile == "" && InstanceBase == 0 ){ Warning( "IB2, cannot Append data: No datafile bootstrapped yet" ); } else { IB2_offset( 0 ); if ( InputFormat() == UnknownInputFormat ){ // we may expand from 'nothing' if ( !Prepare( FileName, false, true ) ){ Error( "Unable to expand from file :'" + FileName + "'\n" ); return false; } } result = Expand_N( FileName ); } return result; } bool IB2_Experiment::Remove( const string& ){ Warning( "IB2, remove impossible, (ignored) " ); return false; } bool IB2_Experiment::Expand_N( const string& FileName ){ bool result = true; if ( ExpInvalid() ){ result = false; } else if ( CurrentDataFile == "" && InstanceBase == 0 ){ Warning( "IB2, cannot Append data: No datafile bootstrapped yet" ); result = false; } else if ( IBStatus() == Invalid ){ Warning( "unable to expand the InstanceBase: Not there" ); result = false; } else { string file_name; if ( FileName == "" ){ file_name = CurrentDataFile; } else { file_name = FileName; } UnicodeString Buffer; stats.clear(); // Open the file. // ifstream datafile( file_name, ios::in); if ( InputFormat() == ARFF ){ skipARFFHeader( datafile ); } if ( !nextLine( datafile, Buffer ) ){ Error( "no useful data in: " + file_name ); result = false; // No more input } else if ( !chopLine( Buffer ) ){ Error( "no useful data in: " + file_name ); result = false; // No more input } else { while ( stats.dataLines() <= IB2_offset() ){ if ( !nextLine( datafile, Buffer ) ){ Error( "not enough lines to skip in " + FileName ); result = false; break; } else if ( !chopLine( Buffer ) ){ Warning( "datafile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } } if ( result ){ time_t lStartTime; time(&lStartTime); if ( !Verbosity(SILENT) ) { Info( "Phase 2: Appending from Datafile: " + FileName + " (starting at line " + TiCC::toString( stats.dataLines() ) + ")" ); time_stamp( "Start: ", stats.dataLines() ); } bool found; size_t TotalAdded = 0; size_t Added = 0; initExperiment(); do { // The next Instance to store. chopped_to_instance( TestWords ); double final_distance; bool dummy = false; StatisticsClass stats_keep = stats; const TargetValue *ResultTarget = LocalClassify( CurrInst, final_distance, dummy ); stats = stats_keep; if ( ResultTarget != CurrInst.TV ) { chopped_to_instance( TrainLearnWords ); bool happy = InstanceBase->AddInstance( CurrInst ); if ( !happy ){ Warning( "deviating exemplar weight in line #" + TiCC::toString(stats.totalLines() ) + ":\n" + TiCC::UnicodeToUTF8(Buffer) + "\nIgnoring the new weight" ); } ++Added; ++TotalAdded; MBL_init = true; // avoid recalculations in LocalClassify } // Progress update. // if ( show_learn_progress( *mylog, lStartTime, Added ) ){ Added = 0; } found = false; while ( !found && nextLine( datafile, Buffer ) ){ found = chopLine( Buffer ); if ( !found ){ Warning( "datafile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } } } while( found ); time_stamp( "Finished: ", stats.dataLines() ); *mylog << "in total added " << TotalAdded << " new entries" << endl; if ( !Verbosity(SILENT) ){ IBInfo( *mylog ); LearningInfo( *mylog ); } MBL_init = false; // force recalculations when testing } } } return result; } bool TimblExperiment::initTestFiles( const string& InFileName, const string& OutFileName ){ if ( !ExpInvalid() && ConfirmOptions() ){ testStream.close(); testStream.clear(); // just to be shure. old G++ libraries are in error here testStream.open( InFileName, ios::in); if ( !testStream ) { Error( "can't open: " + InFileName ); } else { outStream.close(); outStream.clear(); // just to be shure. old G++ libraries are in error here // first we check if the outFile is writable. // We don't write it though, because we don't want to have // it mangled when checkTestFile fails outStream.open( OutFileName, ios::app ); if ( !outStream ) { Error( "can't open: " + OutFileName ); } else { testStreamName = InFileName; outStreamName = OutFileName; if ( checkTestFile() ){ outStream.close(); outStream.clear(); // just to be shure. old G++ libraries are in error here outStream.open( OutFileName, ios::out | ios::trunc ); return true; } } } } return false; } bool TimblExperiment::checkTestFile(){ if ( IBStatus() == Invalid ){ Warning( "you tried to apply the " + TiCC::toString( algorithm ) + " algorithm, but no Instance Base is available yet" ); } else { runningPhase = TestWords; size_t numF =0; if ( (numF = examineData( testStreamName )) != NumOfFeatures() ){ if ( numF == 0 ){ Error( "unable to use the data from '" + testStreamName + "', wrong Format?" ); } else { Error( "mismatch between number of features in Testfile " + testStreamName + " and the Instancebase (" + TiCC::toString(numF) + " vs. " + TiCC::toString(NumOfFeatures()) + ")" ); } return false; } if ( !Verbosity(SILENT) ){ *mylog << "Examine datafile '" << testStreamName << "' gave the following results:" << endl << "Number of Features: " << numF << endl; showInputFormat( *mylog ); } } return true; } bool IB1_Experiment::checkTestFile(){ if ( !TimblExperiment::checkTestFile() ){ return false; } else if ( IBStatus() == Pruned ){ Warning( "you tried to apply the " + TiCC::toString( algorithm) + " algorithm on a pruned Instance Base" ); return false; } return true; } bool IB2_Experiment::checkTestFile(){ if ( !IB1_Experiment::checkTestFile() ){ return false; } else if ( IB2_offset() == 0 && InstanceBase == 0 ){ Error( "missing bootstrap information for IB2 algorithm." ); return false; } return true; } bool TimblExperiment::checkLine( const UnicodeString& line ){ bool result = false; if ( !ExpInvalid() && ConfirmOptions() ) { runningPhase = TestWords; InputFormatType IF = InputFormat(); if ( IF == UnknownInputFormat ){ IF = getInputFormat( line ); } size_t i = countFeatures( line, IF ); if ( i != NumOfFeatures() ){ if ( i > 0 ){ Warning( "mismatch between number of features in testline '" + TiCC::UnicodeToUTF8(line) + "' and the Instancebase (" + TiCC::toString(i) + " vs. " + TiCC::toString(NumOfFeatures()) + ")" ); } } else if ( Initialized ){ result = true; } else if ( IBStatus() == Invalid ){ Warning( "no Instance Base is available yet" ); } else if ( !setInputFormat( IF ) ){ Error( "Couldn't set input format to " + TiCC::toString( IF ) ); } else { if ( Verbosity(NEAR_N) ){ Do_Exact( false ); } initExperiment(); Initialized = true; result = true; } } return result; } bool IB1_Experiment::checkLine( const UnicodeString& line ){ if ( !TimblExperiment::checkLine( line ) ){ return false; } else if ( IBStatus() == Pruned ){ Warning( "you tried to apply the IB1 algorithm on a pruned" " Instance Base" ); return false; } else if ( TRIBL_offset() != 0 ){ Error( "IB1 algorithm impossible while threshold > 0\n" "Please use TRIBL" ); return false; } return true; } json TimblExperiment::classify_to_JSON( const string& inst ) { json result; double distance = 0.0; const TargetValue *targ = classifyString( TiCC::UnicodeFromUTF8(inst), distance ); if ( targ ){ string cat = targ->name_string(); normalizeResult(); result["category"] = cat; if ( Verbosity(NEAR_N) ){ json tmp = best_neighbors_to_JSON(); if ( !tmp.empty() ){ result["neighbors"] = tmp; } } if ( Verbosity(DISTANCE) ){ result["distance"] = distance; } string distribution = bestResult.getResult(); if ( Verbosity(DISTRIB) ){ result["distribution"] = distribution; } if ( Verbosity(MATCH_DEPTH) ){ result["match_depth"] = matchDepth(); } if ( Verbosity(NEAR_N) ){ json tmp = best_neighbors_to_JSON(); if ( !tmp.empty() ){ result["neighbors"] = tmp; } } if (Verbosity(CONFIDENCE) ){ result["confidence"] = confidence(); } } else { result = last_error; } return result; } json TimblExperiment::classify_to_JSON( const vector& instances ) { json result = json::array(); for ( const auto& i : instances ){ json tmp = classify_to_JSON( i ); result.push_back( tmp ); } if ( result.size() != instances.size() ){ json error; error["status"] = "error"; error["message"] = "total confusion in Timbl"; result = error; } return result; } bool TimblExperiment::Classify( const string& Line, string& Result, string& Dist, double& Distance ){ Result.clear(); Dist.clear(); const TargetValue *targ = classifyString( TiCC::UnicodeFromUTF8(Line), Distance ); if ( targ ){ Result = targ->name_string(); normalizeResult(); Dist = bestResult.getResult(); return true; } return false; } bool TimblExperiment::Classify( const UnicodeString& Line, UnicodeString& Result, UnicodeString& Dist, double& Distance ){ Result.remove(); Dist.remove(); const TargetValue *targ = classifyString( Line, Distance ); if ( targ ){ Result = targ->name(); normalizeResult(); Dist = TiCC::UnicodeFromUTF8(bestResult.getResult()); return true; } return false; } bool TimblExperiment::Classify( const UnicodeString& Line, UnicodeString& Result ) { UnicodeString dist; double dummy; return Classify( Line, Result, dist, dummy ); } void TimblExperiment::testInstance( const Instance& Inst, InstanceBase_base *base, size_t offset ) { initExperiment(); bestArray.init( num_of_neighbors, MaxBests, Verbosity(NEAR_N), Verbosity(DISTANCE), Verbosity(DISTRIB) ); TestInstance( Inst, base, offset ); } const TargetValue *TimblExperiment::LocalClassify( const Instance& Inst, double& Distance, bool& exact ){ bool recurse = true; bool Tie = false; exact = false; if ( !bestResult.reset( beamSize, normalisation, norm_factor, targets ) ){ Warning( "no normalisation possible because a BeamSize is specified\n" "output is NOT normalized!" ); } const ClassDistribution *ExResultDist = ExactMatch( Inst ); WClassDistribution *ResultDist = 0; nSet.clear(); const TargetValue *Res; if ( ExResultDist ){ Distance = 0.0; recurse = !Do_Exact(); // no retesting when exact match and the user ASKED for them.. Res = ExResultDist->BestTarget( Tie, (RandomSeed() >= 0) ); // // add the exact match to bestArray. It should be taken into account // for Tie resolution. this fixes bug 44 // bestArray.init( num_of_neighbors, MaxBests, Verbosity(NEAR_N), Verbosity(DISTANCE), Verbosity(DISTRIB) ); bestArray.addResult( Distance, ExResultDist, get_org_input() ); bestArray.initNeighborSet( nSet ); } else { testInstance( Inst, InstanceBase ); bestArray.initNeighborSet( nSet ); ResultDist = getBestDistribution( ); Res = ResultDist->BestTarget( Tie, (RandomSeed() >= 0) ); Distance = getBestDistance(); } if ( Tie && recurse ){ bool Tie2 = true; ++num_of_neighbors; testInstance( Inst, InstanceBase ); bestArray.addToNeighborSet( nSet, num_of_neighbors ); WClassDistribution *ResultDist2 = getBestDistribution(); const TargetValue *Res2 = ResultDist2->BestTarget( Tie2, (RandomSeed() >= 0) ); --num_of_neighbors; if ( !Tie2 ){ Res = Res2; delete ResultDist; ResultDist = ResultDist2; } else { delete ResultDist2; } } exact = fabs(Distance) < Epsilon ; if ( ResultDist ){ bestResult.addDisposable( ResultDist, Res ); } else { bestResult.addConstant( ExResultDist, Res ); exact = exact || Do_Exact(); } if ( exact ){ stats.addExact(); } if ( confusionInfo ){ confusionInfo->Increment( Inst.TV, Res ); } bool correct = Inst.TV && ( Res == Inst.TV ); if ( correct ){ stats.addCorrect(); if ( Tie ){ stats.addTieCorrect(); } } else if ( Tie ){ stats.addTieFailure(); } return Res; } const TargetValue *TimblExperiment::classifyString( const UnicodeString& Line, double& Distance ){ Distance = -1.0; const TargetValue *BestT = NULL; if ( checkLine( Line ) && chopLine( Line ) ){ chopped_to_instance( TestWords ); bool exact = false; BestT = LocalClassify( CurrInst, Distance, exact ); } return BestT; } const neighborSet *TimblExperiment::NB_Classify( const UnicodeString& line ){ initExperiment(); if ( checkLine( line ) && chopLine( line ) ){ chopped_to_instance( TestWords ); return LocalClassify( CurrInst ); } return 0; } const neighborSet *TimblExperiment::LocalClassify( const Instance& Inst ){ testInstance( Inst, InstanceBase ); bestArray.initNeighborSet( nSet ); nSet.setShowDistance( Verbosity(DISTANCE) ); nSet.setShowDistribution( Verbosity(DISTRIB) ); return &nSet; } double TimblExperiment::sum_remaining_weights( size_t level ) const { double result = 0.0; for ( size_t i = level; i < EffectiveFeatures(); ++i ){ result += features.perm_feats[i]->Weight(); } return result; } void TimblExperiment::show_metric_info( ostream& os ) const { os << "Global metric : " << TiCC::toString( globalMetricOption, true); if ( GlobalMetric->isStorable() ){ os << ", Prestored matrix"; } if ( Do_Exact() ){ os << ", prefering exact matches"; } os << endl; os << "Deviant Feature Metrics:"; int cnt = 0; vector InvPerm( NumOfFeatures() ); for ( size_t i = 0; i < NumOfFeatures(); ++i ){ InvPerm[features.permutation[i]] = i; } for ( size_t i = 0; i < NumOfFeatures(); ++i ){ if ( !features[i]->Ignore() && InvPerm[i]+1 > TRIBL_offset() ){ MetricType mt = features[i]->getMetricType(); if ( mt != globalMetricOption ){ ++cnt; os << endl << " Feature[" << i+1 << "] : " << TiCC::toString( mt, true ); if ( features[i]->isStorableMetric() ){ bool readM = false; if ( features[i]->matrixPresent( readM ) ){ if ( readM ){ os << " (User Defined)"; } else { os << " (Prestored)"; } } else { os << " (Not Prestored)"; } } } } } if ( cnt ){ os << endl; } else { os << "(none)" << endl; } MatrixInfo( os ); show_ignore_info( os ); } void TimblExperiment::show_weight_info( ostream& os ) const { os << "Weighting : " << TiCC::toString(CurrentWeighting(), true); if ( CurrentWeighting() == UserDefined_w ){ if ( WFileName != "" ){ os << " (" << WFileName << ")"; } else { os << " (no weights loaded, using No Weighting)" ; } } os << endl; if ( Verbosity( FEAT_W ) && CurrentWeighting() != No_w ){ ShowWeights( os ); } } void TimblExperiment::show_ignore_info( ostream& os ) const{ bool first = true; for ( size_t i=0; i< NumOfFeatures(); ++i ){ if ( features[i]->Ignore() ){ if ( first ){ first = false; os << "Ignored features : { "; } else { os << ", "; } os << i+1; } } if ( !first ){ os << " } " << endl; } } void TimblExperiment::showTestingInfo( ostream& os ) { if ( !Verbosity(SILENT) ){ if ( Verbosity(OPTIONS ) ){ ShowSettings( os ); } os << endl << "Starting to test, Testfile: " << testStreamName << endl << "Writing output in: " << outStreamName << endl << "Algorithm : " << TiCC::toString( Algorithm() ) << endl; show_metric_info( os ); show_weight_info( os ); os << decay << endl; } } class threadData { public: threadData():exp(0), lineNo(0), resultTarget(0), exact(false), distance(-1), confidence(0) {}; bool exec(); void show( ostream& ) const; TimblExperiment *exp; UnicodeString Buffer; unsigned int lineNo; const TargetValue *resultTarget; bool exact; string distrib; double distance; double confidence; }; bool threadData::exec(){ resultTarget = 0; // #pragma omp critical // cerr << "exec " << lineNo << " '" << Buffer << "'" << endl; if ( Buffer.isEmpty() ){ return false; } if ( !exp->chopLine( Buffer ) ){ exp->Warning( "testfile, skipped line #" + TiCC::toString( lineNo ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); return false; } else { exp->chopped_to_instance( TimblExperiment::TestWords ); exact = false; resultTarget = exp->LocalClassify( exp->CurrInst, distance, exact ); exp->normalizeResult(); distrib = exp->bestResult.getResult(); if ( exp->Verbosity(CONFIDENCE) ){ confidence = exp->confidence(); } else { confidence = 0; } return true; } } void threadData::show( ostream& os ) const { if ( resultTarget != 0 ){ exp->show_results( os, confidence, distrib, resultTarget, distance ); if ( exact ){ // remember that a perfect match may be incorrect! if ( exp->Verbosity(EXACT) ) { *exp->mylog << "Exacte match:\n" << exp->get_org_input() << endl; } } } } class threadBlock { public: explicit threadBlock( TimblExperiment *, int = 1 ); bool readLines( istream& ); void finalize(); vector exps; private: size_t size; }; threadBlock::threadBlock( TimblExperiment *parent, int num ){ if ( num <= 0 ){ throw range_error( "threadBlock size cannot be <=0" ); } size = num; exps.resize( size ); exps[0].exp = parent; for ( size_t i = 1; i < size; ++i ){ exps[i].exp = parent->clone(); *exps[i].exp = *parent; exps[i].exp->initExperiment(); }; } bool threadBlock::readLines( istream& is ){ bool result = true; for ( size_t i=0; i < size; ++i ){ exps[i].Buffer = ""; int cnt; bool goon = exps[0].exp->nextLine( is, exps[i].Buffer, cnt ); exps[i].lineNo += cnt; if ( !goon && i == 0 ){ result = false; } } return result; } void threadBlock::finalize(){ for ( size_t i=1; i < size; ++i ){ exps[0].exp->stats.merge( exps[i].exp->stats ); if ( exps[0].exp->confusionInfo ){ exps[0].exp->confusionInfo->merge( exps[i].exp->confusionInfo ); } delete exps[i].exp; } } #ifdef HAVE_OPENMP bool TimblExperiment::Test( const string& FileName, const string& OutFile ){ bool result = false; if ( initTestFiles( FileName, OutFile ) ){ initExperiment(); stats.clear(); showTestingInfo( *mylog ); if ( numOfThreads > 1 ){ omp_set_num_threads( numOfThreads ); } threadBlock experiments( this, numOfThreads ); // Start time. // time_t lStartTime; time(&lStartTime); timeval startTime; gettimeofday( &startTime, 0 ); if ( InputFormat() == ARFF ){ skipARFFHeader( testStream ); } unsigned int dataCount = stats.dataLines(); while ( experiments.readLines( testStream ) ){ if ( numOfThreads > 1 ){ #pragma omp parallel for shared( experiments, dataCount ) for ( int i=0; i < numOfThreads; ++i ){ if ( experiments.exps[i].exec() && !Verbosity(SILENT) ) // Display progress counter. #pragma omp critical show_progress( *mylog, lStartTime, ++dataCount ); } for ( int i=0; i < numOfThreads; ++i ){ // Write it to the output file for later analysis. experiments.exps[i].show( outStream ); } } else { if ( experiments.exps[0].exec() && !Verbosity(SILENT) ){ // Display progress counter. show_progress( *mylog, lStartTime, ++dataCount ); } // Write it to the output file for later analysis. experiments.exps[0].show( outStream ); } } experiments.finalize(); if ( !Verbosity(SILENT) ){ time_stamp( "Ready: ", stats.dataLines() ); show_speed_summary( *mylog, startTime ); showStatistics( *mylog ); } result = true; } return result; } #else bool TimblExperiment::Test( const string& FileName, const string& OutFile ){ bool result = false; if ( initTestFiles( FileName, OutFile ) ){ initExperiment(); stats.clear(); showTestingInfo( *mylog ); // Start time. // time_t lStartTime; time(&lStartTime); timeval startTime; gettimeofday( &startTime, 0 ); if ( InputFormat() == ARFF ){ skipARFFHeader( testStream ); } UnicodeString Buffer; while ( nextLine( testStream, Buffer ) ){ if ( !chopLine( Buffer ) ) { Warning( "testfile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } else { chopped_to_instance( TestWords ); bool exact = false; string distrib; double distance; double confi = 0; const TargetValue *resultTarget = LocalClassify( CurrInst, distance, exact ); normalizeResult(); distrib = bestResult.getResult(); if ( Verbosity(CONFIDENCE) ){ confi = confidence(); } show_results( outStream, confi, distrib, resultTarget, distance ); if ( exact ){ // remember that a perfect match may be incorrect! if ( Verbosity(EXACT) ) { *mylog << "Exacte match:\n" << get_org_input() << endl; } } if ( !Verbosity(SILENT) ){ // Display progress counter. show_progress( *mylog, lStartTime, stats.dataLines() ); } } } if ( !Verbosity(SILENT) ){ time_stamp( "Ready: ", stats.dataLines() ); show_speed_summary( *mylog, startTime ); showStatistics( *mylog ); } result = true; } return result; } #endif bool TimblExperiment::NS_Test( const string& , const string& ){ FatalError( "wrong algorithm" ); return false; } bool IB1_Experiment::NS_Test( const string& FileName, const string& OutFile ){ bool result = false; if ( initTestFiles( FileName, OutFile ) ){ initExperiment(); stats.clear(); showTestingInfo( *mylog ); // Start time. // time_t lStartTime; time(&lStartTime); timeval startTime; gettimeofday( &startTime, 0 ); if ( InputFormat() == ARFF ){ skipARFFHeader( testStream ); } UnicodeString Buffer; while ( nextLine( testStream, Buffer ) ){ if ( !chopLine( Buffer ) ) { Warning( "testfile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } else { chopped_to_instance( TestWords ); const neighborSet *res = LocalClassify( CurrInst ); outStream << get_org_input() << endl << *res; if ( !Verbosity(SILENT) ){ // Display progress counter. show_progress( *mylog, lStartTime, stats.dataLines() ); } } }// end while. if ( !Verbosity(SILENT) ){ time_stamp( "Ready: ", stats.dataLines() ); show_speed_summary( *mylog, startTime ); } result = true; } return result; } bool TimblExperiment::SetOptions( int argc, const char *argv[] ){ if ( IsClone() ){ TiCC::CL_Options Opts( timbl_serv_short_opts, "" ); try { Opts.init( argc, argv ); } catch( exception& e ){ Error( string(e.what()) + ": valid options: " + timbl_serv_short_opts ); } return SetOptions( Opts ); } else { TiCC::CL_Options Opts( timbl_short_opts, timbl_long_opts); try { Opts.init( argc, argv ); } catch( exception& e ){ Error( string(e.what()) + ": valid options: " + timbl_short_opts + " " + timbl_long_opts ); } return SetOptions( Opts ); } } bool TimblExperiment::SetOptions( const string& arg ){ if ( IsClone() ){ TiCC::CL_Options Opts( timbl_serv_short_opts, "" ); try { Opts.init( arg ); } catch( exception& e ){ Error( string(e.what()) + ": valid options: " + timbl_serv_short_opts ); } return SetOptions( Opts ); } else { TiCC::CL_Options Opts( timbl_short_opts, timbl_long_opts); try { Opts.init( arg ); } catch( exception& e ){ Error( string(e.what()) + ": valid options: " + timbl_short_opts + " " + timbl_long_opts ); } return SetOptions( Opts ); } } bool TimblExperiment::SetOptions( const TiCC::CL_Options& Opts ){ bool result; if ( IsClone() ){ result = OptParams->parse_options( Opts, 2 ); } else { result = OptParams->parse_options( Opts, 0 ); } return result; } bool TimblExperiment::IndirectOptions( const TiCC::CL_Options& Opts ){ OptParams->set_default_options(); return OptParams->parse_options( Opts, 1 ); } bool TimblExperiment::ConfirmOptions(){ return OptParams->definitive_options( this ); } bool TimblExperiment::ShowOptions( ostream& os ){ return ( ConfirmOptions() && MBLClass::ShowOptions( os ) ); } bool TimblExperiment::ShowSettings( ostream& os ){ return ( ConfirmOptions() && MBLClass::ShowSettings( os ) ); } xmlNode *TimblExperiment::settingsToXML(){ if ( ConfirmOptions() ){ return MBLClass::settingsToXml( ); } else { return 0; } } json TimblExperiment::settings_to_JSON(){ if ( ConfirmOptions() ){ return MBLClass::settings_to_JSON( ); } else { return 0; } } xmlNode *TimblExperiment::weightsToXML(){ xmlNode *result = TiCC::XmlNewNode( "currentWeights" ); TiCC::XmlSetAttribute( result, "weighting", TiCC::toString( CurrentWeighting() ) ); vector wghts; GetCurrentWeights( wghts ); for ( unsigned int i=0; i < wghts.size(); ++i ){ xmlNode *n = TiCC::XmlNewTextChild( result, "feature", TiCC::toString(wghts[i]) ); TiCC::XmlSetAttribute( n, "index", TiCC::toString(i+1) ); } return result; } json TimblExperiment::weights_to_JSON(){ json result; result["weighting"] = TiCC::toString( CurrentWeighting() ); json arr = json::array(); vector wghts; GetCurrentWeights( wghts ); copy( wghts.begin(), wghts.end(), back_inserter(arr) ); result["weights"] = arr; return result; } bool TimblExperiment::WriteArrays( const std::string& FileName ){ ofstream out( FileName, ios::out | ios::trunc ); if ( !out ) { Warning( "Problem opening Probability file '" + FileName + "' (not written)" ); return false; } else { if ( !Verbosity(SILENT) ){ Info( "Saving Probability Arrays in " + FileName ); } return MBLClass::writeArrays( out ); } } bool TimblExperiment::GetArrays( const std::string& FileName ){ ifstream inf( FileName, ios::in ); if ( !inf ){ Error( "Problem opening Probability file " + FileName ); return false; } else { if ( !Verbosity(SILENT) ){ Info( "Reading Probability Arrays from " + FileName ); } if ( !readArrays( inf ) ){ Error( "Errors found in file " + FileName ); return false; } else { return true; } } } bool TimblExperiment::WriteMatrices( const std::string& FileName ){ ofstream out( FileName, ios::out | ios::trunc ); if ( !out ) { Warning( "Problem opening matrices file '" + FileName + "' (not written)" ); return false; } else { if ( !Verbosity(SILENT) ){ Info( "Saving Matrices in " + FileName ); } initExperiment( ); return writeMatrices( out ); } } bool TimblExperiment::GetMatrices( const std::string& FileName ){ ifstream inf( FileName, ios::in ); if ( !inf ){ Error( "Problem opening matrices file " + FileName ); return false; } else { if ( !Verbosity(SILENT) ){ Info( "Reading matrices from " + FileName ); } if ( !readMatrices( inf ) ){ Error( "Errors found in file " + FileName ); return false; } else { return true; } } } bool TimblExperiment::SaveWeights( const std::string& FileName ){ if ( ConfirmOptions() ){ // Open the output file. // ofstream outfile( FileName, ios::out | ios::trunc); if (!outfile) { Warning( "can't open Weightsfile: " + FileName ); return false; } else { if ( !Verbosity(SILENT) ){ Info( "Saving Weights in " + FileName ); } if ( writeWeights( outfile ) ){ return true; } else { Error( "failed to store weights in file " + FileName ); return false; } } } else { return false; } } bool TimblExperiment::GetWeights( const std::string& FileName, WeightType w ){ if ( ConfirmOptions() ){ // Open the file. // ifstream weightsfile( FileName, ios::in); if ( !weightsfile) { Error( "can't open WeightsFile " + FileName ); return false; } else { if ( w == Unknown_w ){ w = GR_w; } if ( !Verbosity(SILENT) ){ Info( "Reading weights from " + FileName ); } if ( readWeights( weightsfile, w ) ){ WFileName = FileName; return true; } else { Warning( "Errors in Weightsfile " + FileName ); return false; } } } else { return false; } } bool TimblExperiment::WriteInstanceBase( const std::string& FileName ){ bool result = false; if ( ConfirmOptions() ){ ofstream outfile( FileName, ios::out | ios::trunc ); if (!outfile) { Warning( "can't open outputfile: " + FileName ); } else { if ( !Verbosity(SILENT) ){ Info( "Writing Instance-Base in: " + FileName ); } result = PutInstanceBase( outfile ); } } return result; } bool TimblExperiment::WriteInstanceBaseXml( const std::string& FileName ) { bool result = false; if ( ConfirmOptions() ){ ofstream os( FileName, ios::out | ios::trunc ); if (!os) { Warning( "can't open outputfile: " + FileName ); } else { if ( !Verbosity(SILENT) ){ Info( "Writing Instance-Base in: " + FileName ); } if ( ExpInvalid() ){ result = false; } else if ( InstanceBase == NULL ){ Warning( "unable to write an Instance Base, nothing learned yet" ); } else { InstanceBase->toXML( os ); } } } return result; } bool TimblExperiment::WriteInstanceBaseLevels( const std::string& FileName, unsigned int levels ) { bool result = false; if ( ConfirmOptions() ){ ofstream os( FileName, ios::out | ios::trunc ); if (!os) { Warning( "can't open outputfile: " + FileName ); } else { if ( !Verbosity(SILENT) ){ Info( "Writing Instance-Base in: " + FileName ); } if ( ExpInvalid() ){ result = false; } else if ( InstanceBase == NULL ){ Warning( "unable to write an Instance Base, nothing learned yet" ); } else { InstanceBase->printStatsTree( os, levels ); } } } return result; } bool IB1_Experiment::GetInstanceBase( istream& is ){ bool result = false; bool Pruned; bool Hashed; int Version; string range_buf; size_t numF = get_IB_Info( is, Pruned, Version, Hashed, range_buf ); if ( numF == 0 ){ return false; } else if ( Pruned ){ Error( "Instance-base is Pruned!, NOT valid for " + TiCC::toString(algorithm) + " Algorithm" ); } else { TreeOrder = DataFile; Initialize( numF ); if ( !get_ranges( range_buf ) ){ Warning( "couldn't retrieve ranges..." ); } else { srand( RandomSeed() ); int pos=0; for ( size_t i=0; i < NumOfFeatures(); ++i ){ features[i]->SetWeight( 1.0 ); if ( features[features.permutation[i]]->Ignore() ){ features.perm_feats[i] = NULL; } else { features.perm_feats[pos++] = features[features.permutation[i]]; } } InstanceBase = new IB_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0) ); if ( Hashed ){ result = InstanceBase->ReadIB_hashed( is, features, targets, Version ); } else { result = InstanceBase->ReadIB( is, features, targets, Version ); } } } return result; } bool TimblExperiment::ReadInstanceBase( const string& FileName ){ bool result = false; if ( ConfirmOptions() ){ ifstream infile( FileName, ios::in ); if ( !infile ) { Error( "can't open: " + FileName ); } else { if ( !Verbosity(SILENT) ){ Info( "Reading Instance-Base from: " + FileName ); } if ( GetInstanceBase( infile ) ){ if ( !Verbosity(SILENT) ){ IBInfo( cout ); writePermutation( cout ); } result = true; } } } return result; } bool TimblExperiment::WriteNamesFile( const string& FileName ) const { // Open the file. // ofstream namesfile( FileName, ios::out | ios::trunc); if (!namesfile) { Warning( "can't open NamesFile: '" + FileName + "' (not written)" ); return false; } else { if ( !Verbosity(SILENT) ){ Info( "Saving names in " + FileName ); } MBLClass::writeNamesFile( namesfile ); return true; } } void IB1_Experiment::InitInstanceBase(){ srand( RandomSeed() ); set_order(); runningPhase = TrainWords; InstanceBase = new IB_InstanceBase( EffectiveFeatures(), ibCount, (RandomSeed()>=0) ); } bool TimblExperiment::GetCurrentWeights( vector& res ) { res.clear(); if ( ExpInvalid() ){ return false; } else { initExperiment(); for ( size_t i=0; i< NumOfFeatures(); ++i ){ res.push_back( features[i]->Weight() ); } } return true; } bool TimblExperiment::build_file_index( const string& file_name, fileIndex& fmIndex ){ bool result = true; UnicodeString Buffer; stats.clear(); size_t cur_pos = 0; // Open the file. // ifstream datafile( file_name, ios::in); if ( InputFormat() == ARFF ){ skipARFFHeader( datafile ); } cur_pos = datafile.tellg(); if ( !nextLine( datafile, Buffer ) ){ Error( "cannot start learning from in: " + file_name ); result = false; // No more input } else if ( !chopLine( Buffer ) ){ Error( "no useful data in: " + file_name ); result = false; // No more input } else { if ( !Verbosity(SILENT) ) { Info( "Phase 2: Building index on Datafile: " + file_name ); time_stamp( "Start: ", 0 ); } bool go_on = true; while ( go_on ){ // The next Instance to store. // cerr << "line at pos " << cur_pos << " : " << Buffer << endl; chopped_to_instance( TrainWords ); // cerr << "gives Instance " << &CurrInst << endl; FeatureValue *fv0 = CurrInst.FV[0]; auto const it = fmIndex.find( fv0 ); if ( it == fmIndex.end() ){ set st; st.insert(cur_pos); fmIndex[fv0] = st; } else { it->second.insert( cur_pos ); } if ( (stats.dataLines() % Progress() ) == 0 ){ time_stamp( "Indexing: ", stats.dataLines() ); } bool found = false; while ( !found && ( cur_pos = datafile.tellg(), nextLine( datafile, Buffer ) ) ){ found = chopLine( Buffer ); if ( !found ){ Warning( "datafile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } } go_on = found; } time_stamp( "Finished: ", stats.dataLines() ); } return result; } bool TimblExperiment::build_file_multi_index( const string& file_name, fileDoubleIndex& fmIndex ){ bool result = true; UnicodeString Buffer; stats.clear(); size_t cur_pos = 0; // Open the file. // ifstream datafile( file_name, ios::in); if ( InputFormat() == ARFF ){ skipARFFHeader( datafile ); } cur_pos = datafile.tellg(); if ( !nextLine( datafile, Buffer ) ){ Error( "cannot start learning from in: " + file_name ); result = false; // No more input } else if ( !chopLine( Buffer ) ){ Error( "no useful data in: " + file_name ); result = false; // No more input } else { if ( !Verbosity(SILENT) ){ Info( "Phase 2: Building multi index on Datafile: " + file_name ); time_stamp( "Start: ", 0 ); } bool go_on = true; while( go_on ){ // The next Instance to store. // cerr << "line at pos " << cur_pos << " : " << Buffer << endl; chopped_to_instance( TrainWords ); // cerr << "gives Instance " << &CurrInst << endl; FeatureValue *fv0 = CurrInst.FV[0]; FeatureValue *fv1 = CurrInst.FV[1]; auto const it = fmIndex.find( fv0 ); if ( it != fmIndex.end() ){ it->second[fv1].insert( cur_pos ); } else { fileIndex mi; mi[fv1].insert( cur_pos ); fmIndex[fv0] = mi; } if ( (stats.dataLines() % Progress() ) == 0 ){ time_stamp( "Indexing: ", stats.dataLines() ); } bool found = false; while ( !found && ( cur_pos = datafile.tellg(), nextLine( datafile, Buffer ) ) ){ found = chopLine( Buffer ); if ( !found ){ Warning( "datafile, skipped line #" + TiCC::toString( stats.totalLines() ) + "\n" + TiCC::UnicodeToUTF8(Buffer) ); } } go_on = found; } time_stamp( "Finished: ", stats.dataLines() ); } return result; } } LanguageMachines-timbl-642727d/src/Types.cxx000066400000000000000000000126071451477526200207350ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include #include #include #include #include #include "timbl/Types.h" namespace Timbl { using std::string; using std::vector; // initializers const string AlgorithmName[][2] = { { "Unknown", "Unknown Algorithm" }, { "IB1", "Memory Based Learning" }, { "IB2", "Adapted Memory Based Learning"}, { "IGTree", "Information Gain Tree" }, { "TRIBL", "Tree IB1" }, { "TRIBL2", "Tribl 2" }, { "LOO", "Leave One Out" }, { "CV", "Cross Validate" } }; const string MetricName[][2] = { { "U", "Unknown Metric" }, { "I", "Ignore" }, { "N", "Numeric" }, { "D", "Dot product" }, { "C", "Cosine metric" }, { "O", "Overlap" }, { "L", "Levenshtein" }, { "DC", "Dice coefficient" }, { "M", "Value Difference" }, { "J", "Jeffrey Divergence" }, { "S", "Jensen-Shannon Divergence" }, { "E", "Euclidean Distance" } }; const string WeightName[][2] = { { "un", "Unknown Weighting" }, { "nw", "No Weighting" }, { "gr", "GainRatio" }, { "ig", "InfoGain" }, { "x2", "Chi-square" }, { "sv", "Shared Variance" }, { "sd", "Standard Deviation" }, { "ud", "User Defined"} }; const string DecayName[][2] = { { "Unknown", "Unknown Decay" }, { "Z", "Zero Decay" }, { "ID", "Inverse Distance" }, { "IL", "Inverse Linear Distance" }, { "ED", "Exponential Decay" } }; const string SmoothingName[][2] = { { "Unknown", "Unknown Smoothing" }, { "Default", "Default Smoothing" }, { "L", "Lidstone Smoothing" } }; const string OrdeningName[][2] = { { "Unknown", "Unknown Ordering" }, { "UDO", "Data File Ordering" }, { "DO", "Default Ordering" }, { "GRO", "GainRatio" }, { "IGO", "InformationGain" }, { "1/V", "Inverse Values" }, { "1/S", "Inverse SplitInfo" }, { "G/V", "GainRatio/Values" }, { "I/V", "InformationGain/Values" }, { "GxE", "GainRatio*Entropy" }, { "IxE", "InformationGain*Entropy" }, { "X2O", "Chi-Squared" }, { "SVO", "Shared Variance" }, { "SDO", "Standard Deviation" }, { "X/V", "Chi-Squared/Values" }, { "S/V", "Shared Variance/Values" }, { "SD/V", "Standard Deviation/Values" } }; const string InputFormatName[][2] = { { "Unknown", "Unknown Input Format" }, { "Compact", "Compact" }, { "C45", "C4.5" }, { "Column", "Columns" }, { "Tabbed", "Tabbed" }, { "ARFF", "ARFF" }, { "BINARY", "Sparse Binary" }, { "SPARSE", "Sparse" } }; const string VerbosityName[][2] = { { "Unknown", "erroneous" }, { "S", "Silent" }, { "O", "Options" }, { "F", "Feature_Statistics" }, { "P", "Probability_arrays" }, { "E", "Exact_match" }, { "DI", "Distances" }, { "DB", "Distribution" }, { "N", "Nearest_Neighbours" }, { "AS", "Advanced_Statistics" }, { "CM", "Confusion_Matrix" }, { "CS", "Class_Statistics" }, { "CD", "Client_Debug" }, { "K", "All_K_values" }, { "MD", "MatchingDepth" }, { "B", "BranchingFactor" }, { "CF", "Confidence" }, // Verbosity is special! // should end with "" strings! { "", "" } }; const string NormalisationName[][2] = { { "Unknown", "Unknown normalisation" }, { "None", "No Normalisation" }, { "Probability", "Normalise to 100%" }, { "AddFactor", "Add a factor to all targets, then normalise to 100%" }, { "LogProbability", "Take 10log, then Normalise to 100%" } }; WeightType charToWeig( char w ){ switch ( w ){ case '0': return No_w; case '1': return GR_w; case '2': return IG_w; case '3': return X2_w; case '4': return SV_w; case '5': return SD_w; default: return Unknown_w; } } AlgorithmType charToAlg( char a ){ switch ( a ){ case '0': return IB1_a; case '1': return IGTREE_a; case '2': return TRIBL_a; case '3': return IB2_a; case '4': return TRIBL2_a; default: return Unknown_a; } } normType charToNorm( char a ){ switch ( a ){ case '0': return probabilityNorm; case '1': return addFactorNorm; case '2': return logProbNorm; default: return unknownNorm; } } } LanguageMachines-timbl-642727d/src/neighborSet.cxx000066400000000000000000000145511451477526200221020ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include #include "timbl/Common.h" #include "timbl/Types.h" #include "timbl/Instance.h" #include "timbl/neighborSet.h" namespace Timbl { using namespace std; using namespace Common; neighborSet::neighborSet(): showDistance(false),showDistribution(false){} neighborSet::~neighborSet(){ clear(); } neighborSet::neighborSet( const neighborSet& in ){ showDistance = in.showDistance; showDistribution = in.showDistribution; merge( in ); } neighborSet& neighborSet::operator=( const neighborSet& in ){ if ( this != &in ){ clear(); showDistance = in.showDistance; showDistribution = in.showDistribution; merge( in ); } return *this; } size_t neighborSet::size() const{ return distances.size(); } void neighborSet::clear(){ distances.clear(); for ( auto const& db : distributions ){ delete db; } distributions.clear(); } void neighborSet::reserve( size_t s ){ distances.reserve( s ); distributions.reserve( s ); } void neighborSet::truncate( size_t len ){ if ( len < distributions.size() ){ for ( size_t i=len; i < distributions.size(); ++i ){ delete distributions[i]; } distributions.resize( len ); distances.resize( len); } } void neighborSet::push_back( double d, const ClassDistribution &dist ){ distances.push_back( d ); distributions.push_back( dist.to_VD_Copy() ); } void neighborSet::merge( const neighborSet& s ){ // reserve enough space to avoid reallocations // reallocation invalidates pointers! reserve( size() + s.size() ); auto dit1 = distances.begin(); auto dit2 = s.distances.begin(); auto dis1 = distributions.begin(); auto dis2 = s.distributions.begin(); while ( dit1 != distances.end() ){ if ( dit2 != s.distances.end() ){ if (fabs(*dit1 - *dit2) < Epsilon) { // equal (*dis1)->Merge( **dis2 ); ++dit1; ++dis1; ++dit2; ++dis2; } else if ( *dit1 < *dit2 ){ ++dit1; ++dis1; } else { dit1 = distances.insert( dit1, *dit2 ); ++dit1; ++dit2; dis1 = distributions.insert( dis1, (*dis2)->to_VD_Copy() ); ++dis1; ++dis2; } } else { break; } } while ( dit2 != s.distances.end() ){ distances.push_back( *dit2 ); ++dit2; distributions.push_back( (*dis2)->to_VD_Copy() ); ++dis2; } } double neighborSet::relativeWeight( const decayStruct *d, size_t k ) const{ double result = 1.0; if ( !d ){ return result; } switch ( d->type() ){ case Zero: break; case InvDist: result = 1.0/(distances[k] + Epsilon); break; case InvLinear: if ( k > 0 && size() != 1 ){ double nearest_dist, furthest_dist; nearest_dist = distances[0]; furthest_dist = distances[size()-1]; result = (furthest_dist - distances[k]) / (furthest_dist-nearest_dist); } break; case ExpDecay: result = exp(-d->alpha*pow(distances[k], d->beta)); if ( result == 0 ){ // A result of zero is undesirable. (bug 89) // We optimisticly replace it with Epsilon result = Epsilon; } break; default: throw std::logic_error( "wrong value in switch" ); } return result; } double neighborSet::getDistance( size_t n ) const { if ( size() <= n ){ throw std::range_error( "getDistance() parameter exceeds size of neighborSet" ); } return distances[n]; } const ClassDistribution *neighborSet::getDistribution( size_t n ) const { if ( size() <= n ){ throw std::range_error( "getDistribution() parameter exceeds size of neighborSet" ); } return distributions[n]; } WClassDistribution *neighborSet::bestDistribution( const decayStruct *d, size_t max ) const { // Analyse the set to find THE best ClassDistribution. // For each neighbor, we loop over the number of bests in that // bin, and merge that distribution into the result // WClassDistribution *result = new WClassDistribution(); size_t stop = distributions.size(); stop = ( max > 0 && max < stop ? max : stop ); for ( size_t k = 0; k < stop; ++k ) { result->MergeW( *distributions[k], relativeWeight( d, k ) ); } return result; } ostream& operator<<( ostream& os, const neighborSet& set ){ for ( unsigned int i=0; i < set.size(); ++i ){ os << "# k=" << i+1; if ( set.showDistribution ){ os << "\t" << set.distributions[i]->DistToStringW(0); } if ( set.showDistance ){ int OldPrec = os.precision(DBL_DIG-1); os.setf(ios::showpoint); os << "\t" << set.distances[i]; os.precision(OldPrec); } os << endl; } return os; } ostream& operator<<( ostream& os, const neighborSet *Set ){ os << *Set; return os; } ostream& operator<<( ostream& os, const decayStruct& dc ){ return dc.put( os ); } ostream& zeroDecay::put( ostream& os ) const { return os; } ostream& invLinDecay::put( ostream& os ) const { os << "Decay : " << TiCC::toString( type(), true); return os; } ostream& invDistDecay::put( ostream& os ) const { os << "Decay : " << TiCC::toString( type(), true); return os; } ostream& expDecay::put( ostream& os ) const { os << "Decay : " << TiCC::toString( type(), true); os << " a=" << alpha << " b= " << beta; return os; } ostream& operator<<( ostream& os, const decayStruct *dc ){ if ( dc ){ os << *dc; } return os; } } LanguageMachines-timbl-642727d/src/simpletest.cxx000066400000000000000000000032311451477526200220130ustar00rootroot00000000000000/* Copyright (c) 1998 - 2023 ILK - Tilburg University CLST - Radboud University CLiPS - University of Antwerp This file is part of timbl timbl is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. timbl is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see . For questions and suggestions, see: https://github.com/LanguageMachines/timbl/issues or send mail to: lamasoftware (at ) science.ru.nl */ #include "timbl/TimblAPI.h" #include int main(){ std::string path = std::getenv( "topsrcdir" ); std::cerr << path << std::endl; std::cerr << "version: " << Timbl::VersionName() << std::endl; Timbl::AlgorithmType alg = Timbl::charToAlg( '0' ); assert( alg == Timbl::IB1_a ); Timbl::normType nor = Timbl::charToNorm( '0' ); assert( nor == Timbl::probabilityNorm ); Timbl::WeightType w = Timbl::charToWeig( '0' ); assert( w == Timbl::No_w ); Timbl::TimblAPI exp( "+vdi+db", "test1" ); if ( exp.isValid() ){ exp.Learn( path + "/demos/dimin.train" ); if ( exp.isValid() ){ exp.Test( path + "/demos/dimin.test", "dimin.out" ); if ( exp.isValid() ){ return EXIT_SUCCESS; } } } return EXIT_FAILURE; } LanguageMachines-timbl-642727d/timbl.pc.in000066400000000000000000000004401451477526200203460ustar00rootroot00000000000000prefix=@prefix@ exec_prefix=@exec_prefix@ libdir=@libdir@ includedir=@includedir@ openmpflags=@OPENMP_CXXFLAGS@ Name: timbl Version: @VERSION@ Description: timbl library. Requires.private: libxml-2.0 Libs: -L${libdir} -ltimbl Libs.private: @LIBS@ ${openmpflags} Cflags: -I${includedir}