pax_global_header00006660000000000000000000000064134210272130014505gustar00rootroot0000000000000052 comment=baccb793fc68701cce499f255e4f12eca8977c00 dtfabric-20190120/000077500000000000000000000000001342102721300135235ustar00rootroot00000000000000dtfabric-20190120/.gitignore000066400000000000000000000004221342102721300155110ustar00rootroot00000000000000# Files to ignore by git. # Back-up files *~ *.swp # Generic auto-generated build files *.pyc *.pyo # Specific auto-generated build files /.tox /__pycache__ /build /dtfabric.egg-info /dist # Code review files /.review # Test coverage files .coverage tests-coverage.txt dtfabric-20190120/.pylintrc000066400000000000000000000356121342102721300153770ustar00rootroot00000000000000# Pylint 1.7.x - 1.9.x configuration file # # This file is generated by l2tdevtools update-dependencies.py, any dependency # related changes should be made in dependencies.ini. [MASTER] # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code extension-pkg-whitelist= # Add files or directories to the blacklist. They should be base names, not # paths. ignore=CVS # Add files or directories matching the regex patterns to the blacklist. The # regex matches against base names, not paths. ignore-patterns= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. jobs=1 # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. load-plugins=pylint.extensions.docparams # Pickle collected data for later comparisons. persistent=yes # Specify a configuration file. #rcfile= # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED confidence= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" # disable= duplicate-code, parameter-unpacking, raw-checker-failed, bad-inline-option, locally-disabled, locally-enabled, file-ignored, suppressed-message, useless-suppression, deprecated-pragma, no-absolute-import, missing-param-doc, metaclass-assignment, eq-without-hash, fixme, logging-format-interpolation, no-self-use, too-few-public-methods, too-many-ancestors, too-many-boolean-expressions, too-many-branches, too-many-instance-attributes, too-many-lines, too-many-locals, too-many-nested-blocks, too-many-public-methods, too-many-return-statements, too-many-statements, unsubscriptable-object # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. enable= [REPORTS] # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details #msg-template= # Set the output format. Available formats are text, parseable, colorized, json # and msvs (visual studio).You can also give a reporter class, eg # mypackage.mymodule.MyReporterClass. output-format=text # Tells whether to display a full report or only the messages reports=no # Activate the evaluation score. # score=yes score=no [REFACTORING] # Maximum number of nested blocks for function / method body max-nested-blocks=5 [VARIABLES] # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_,_cb # A regular expression matching the name of dummy variables (i.e. expectedly # not used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. init-import=no # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,future.builtins [TYPECHECK] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference # can return multiple potential results while evaluating a Python object, but # some branches might not be evaluated, which results in partial inference. In # that case, it might be useful to still emit no-member and other checks for # the rest of the inferred objects. ignore-on-opaque-inference=yes # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis. It # supports qualified module names, as well as Unix pattern matching. ignored-modules= # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. missing-member-hint=yes # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance=1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices=1 [LOGGING] # Logging modules to check that the string format arguments are in logging # function parameter format logging-modules=logging [BASIC] # Naming hint for argument names # argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ argument-name-hint=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ # Regular expression matching correct argument names # argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ argument-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ # Naming hint for attribute names # attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ attr-name-hint=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ # Regular expression matching correct attribute names # attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ attr-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ # Bad variable names which should always be refused, separated by a comma bad-names=foo,bar,baz,toto,tutu,tata # Naming hint for class attribute names # class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]*|(__.*__))$ # Regular expression matching correct class attribute names # class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]*|(__.*__))$ # Naming hint for class names class-name-hint=[A-Z_][a-zA-Z0-9]+$ # Regular expression matching correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ # Naming hint for constant names # const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ const-name-hint=(([a-zA-Z_][a-zA-Z0-9_]*)|(__.*__))$ # Regular expression matching correct constant names # const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ const-rgx=(([a-zA-Z_][a-zA-Z0-9_]*)|(__.*__))$ # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 # Naming hint for function names # function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ function-name-hint=[A-Z_][a-zA-Z0-9_]*$ # Regular expression matching correct function names # function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ function-rgx=[A-Z_][a-zA-Z0-9_]*$ # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,ex,Run,_ # Include a hint for the correct naming format with invalid-name include-naming-hint=no # Naming hint for inline iteration names inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ # Regular expression matching correct inline iteration names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Naming hint for method names # method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ method-name-hint=(test|[A-Z_])[a-zA-Z0-9_]*$ # Regular expression matching correct method names # method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ method-rgx=(test|[A-Z_])[a-zA-Z0-9_]*$ # Naming hint for module names module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression matching correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. property-classes=abc.abstractproperty # Naming hint for variable names # variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ variable-name-hint=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ # Regular expression matching correct variable names # variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ variable-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME,XXX,TODO [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). # indent-string=' ' indent-string=' ' # Maximum number of characters on a single line. # max-line-length=100 max-line-length=80 # Maximum number of lines in a module max-module-lines=1000 # List of optional constructs for which whitespace checking is disabled. `dict- # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. # `trailing-comma` allows a space between comma and closing bracket: (a, ). # `empty-line` allows space-only lines. no-space-check=trailing-comma,dict-separator # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no [SPELLING] # Spelling dictionary name. Available dictionaries: en_US (myspell). spelling-dict= # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to indicated private dictionary in # --spelling-private-dict-file option instead of raising a message. spelling-store-unknown-words=no [SIMILARITIES] # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=no # Minimum lines number of a similarity. min-similarity-lines=4 [DESIGN] # Maximum number of arguments for function / method # max-args=5 max-args=10 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Maximum number of boolean expressions in a if statement max-bool-expr=5 # Maximum number of branch for function / method body max-branches=12 # Maximum number of locals for function / method body max-locals=15 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of public methods for a class (see R0904). max-public-methods=20 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of statements in function / method body max-statements=50 # Minimum number of public methods for a class (see R0903). min-public-methods=2 [CLASSES] # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__,__new__,setUp # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict,_fields,_replace,_source,_make # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs [IMPORTS] # Allow wildcard imports from modules that define __all__. allow-wildcard-with-all=no # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no # Deprecated modules which should not be used, separated by a comma deprecated-modules=optparse,tkinter.tix # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception dtfabric-20190120/.travis.yml000066400000000000000000000046371342102721300156460ustar00rootroot00000000000000matrix: include: - env: TARGET="pylint" os: linux dist: trusty sudo: required group: edge language: python python: 2.7 virtualenv: system_site_packages: true - env: TARGET="linux-python27" os: linux dist: xenial sudo: required group: edge language: python python: 2.7 virtualenv: system_site_packages: true - env: TARGET="linux-python35" os: linux dist: xenial sudo: required group: edge language: python python: 3.5 virtualenv: system_site_packages: true - env: [TARGET="linux-python27-tox", TOXENV="py27"] os: linux dist: xenial sudo: required group: edge language: python python: 2.7 virtualenv: system_site_packages: false - env: [TARGET="linux-python34-tox", TOXENV="py34"] os: linux dist: xenial sudo: required group: edge language: python python: 3.4 virtualenv: system_site_packages: false - env: [TARGET="linux-python35-tox", TOXENV="py35"] os: linux dist: xenial sudo: required group: edge language: python python: 3.5 virtualenv: system_site_packages: false - env: [TARGET="linux-python36-tox", TOXENV="py36"] os: linux dist: xenial sudo: required group: edge language: python python: 3.6 virtualenv: system_site_packages: false - env: [TARGET="linux-python37-tox", TOXENV="py37"] os: linux dist: xenial sudo: required group: edge language: python python: 3.7 virtualenv: system_site_packages: false - env: [TARGET="macos-python27", PYTHONPATH="/Library/Python/2.7/site-packages/"] os: osx osx_image: xcode9.2 language: generic - env: TARGET="trusty-python27" os: linux dist: trusty sudo: required group: edge language: python python: 2.7 virtualenv: system_site_packages: true - env: TARGET="trusty-python34" os: linux dist: trusty sudo: required group: edge language: python python: 3.4 virtualenv: system_site_packages: true install: - ./config/travis/install.sh script: - ./config/travis/run_with_timeout.sh 30 ./config/travis/runtests.sh after_success: - if ! test -f /usr/bin/coverage; then sudo ln -s /usr/bin/python-coverage /usr/bin/coverage; fi - if test ${TARGET} = "linux-python27"; then curl -o codecov.sh -s https://codecov.io/bash && /bin/bash ./codecov.sh; fi dtfabric-20190120/ACKNOWLEDGEMENTS000066400000000000000000000001341342102721300157760ustar00rootroot00000000000000Acknowledgements: dtfabric Copyright (c) 2016-2018, Joachim Metz dtfabric-20190120/AUTHORS000066400000000000000000000004111342102721300145670ustar00rootroot00000000000000# Names should be added to this file with this pattern: # # For individuals: # Name (email address) # # For organizations: # Organization (fnmatch pattern) # # See python fnmatch module documentation for more information. Joachim Metz (joachim.metz@gmail.com) dtfabric-20190120/LICENSE000066400000000000000000000261361342102721300145400ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. dtfabric-20190120/MANIFEST.in000066400000000000000000000006571342102721300152710ustar00rootroot00000000000000include AUTHORS LICENSE README include run_tests.py exclude .gitignore exclude *.pyc recursive-include config * recursive-exclude dtfabric *.pyc recursive-include scripts *.py recursive-exclude scripts *.pyc recursive-include test_data * # The test scripts are not required in a binary distribution package they # are considered source distribution files and excluded in find_package() # in setup.py. recursive-include tests *.py dtfabric-20190120/README000066400000000000000000000002761342102721300144100ustar00rootroot00000000000000dtfabric is a project to manage data types and structures, as used in the libyal projects. For more information see: * Project documentation: https://github.com/libyal/dtfabric/wiki/Home dtfabric-20190120/appveyor.yml000066400000000000000000000025471342102721300161230ustar00rootroot00000000000000environment: matrix: - TARGET: windows_python27 MACHINE_TYPE: "x86" PYTHON: "C:\\Python27" - TARGET: windows_python27 MACHINE_TYPE: "amd64" PYTHON: "C:\\Python27-x64" - TARGET: windows_python36 MACHINE_TYPE: "x86" PYTHON: "C:\\Python36" - TARGET: windows_python36 MACHINE_TYPE: "amd64" PYTHON: "C:\\Python36-x64" install: - cmd: '"C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd" /x86 /release' - cmd: "%PYTHON%\\python.exe -m pip install --upgrade pip" - cmd: "%PYTHON%\\python.exe -m pip install pywin32 WMI" - cmd: "%PYTHON%\\python.exe %PYTHON%\\Scripts\\pywin32_postinstall.py -install" - cmd: git clone https://github.com/log2timeline/l2tdevtools.git ..\l2tdevtools - cmd: if [%TARGET%]==[windows_python27] ( mkdir dependencies && set PYTHONPATH=..\l2tdevtools && "%PYTHON%\\python.exe" ..\l2tdevtools\tools\update.py --download-directory dependencies --machine-type %MACHINE_TYPE% --msi-targetdir "%PYTHON%" --track dev PyYAML funcsigs mock pbr six ) - cmd: if [%TARGET%]==[windows_python36] ( mkdir dependencies && set PYTHONPATH=..\l2tdevtools && "%PYTHON%\\python.exe" ..\l2tdevtools\tools\update.py --download-directory dependencies --machine-type %MACHINE_TYPE% --msi-targetdir "%PYTHON%" --track dev PyYAML mock pbr six ) build: off test_script: - cmd: "%PYTHON%\\python.exe run_tests.py" dtfabric-20190120/config/000077500000000000000000000000001342102721300147705ustar00rootroot00000000000000dtfabric-20190120/config/dpkg/000077500000000000000000000000001342102721300157155ustar00rootroot00000000000000dtfabric-20190120/config/dpkg/changelog000066400000000000000000000002131342102721300175630ustar00rootroot00000000000000dtfabric (20190120-1) unstable; urgency=low * Auto-generated -- Joachim Metz Sun, 20 Jan 2019 09:06:15 +0100dtfabric-20190120/config/dpkg/clean000066400000000000000000000000461342102721300167220ustar00rootroot00000000000000dtfabric/*.pyc dtfabric/*/*.pyc *.pyc dtfabric-20190120/config/dpkg/compat000066400000000000000000000000021342102721300171130ustar00rootroot000000000000009 dtfabric-20190120/config/dpkg/control000066400000000000000000000021651342102721300173240ustar00rootroot00000000000000Source: dtfabric Section: python Priority: extra Maintainer: Joachim Metz Build-Depends: debhelper (>= 9), python-all (>= 2.7~), python-setuptools, python3-all (>= 3.4~), python3-setuptools Standards-Version: 3.9.5 X-Python-Version: >= 2.7 X-Python3-Version: >= 3.4 Homepage: https://github.com/libyal/dtfabric Package: python-dtfabric Architecture: all Depends: python-yaml (>= 3.10), ${python:Depends}, ${misc:Depends} Description: Python 2 module of dtFabric dtFabric, or data type fabric, is a project to manage data types and structures, as used in the libyal projects. Package: python3-dtfabric Architecture: all Depends: python3-yaml (>= 3.10), ${python3:Depends}, ${misc:Depends} Description: Python 3 module of dtFabric dtFabric, or data type fabric, is a project to manage data types and structures, as used in the libyal projects. Package: dtfabric-tools Architecture: all Depends: python-dtfabric, python (>= 2.7~), ${python:Depends}, ${misc:Depends} Description: Tools of dtFabric dtFabric, or data type fabric, is a project to manage data types and structures, as used in the libyal projects. dtfabric-20190120/config/dpkg/copyright000066400000000000000000000015641342102721300176560ustar00rootroot00000000000000Format: http://dep.debian.net/deps/dep5 Upstream-Name: dtfabric Source: https://github.com/libyal/dtfabric Files: * Copyright: 2016-2017, Joachim Metz License: Apache-2.0 License: Apache-2.0 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at . http://www.apache.org/licenses/LICENSE-2.0 . Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. . On Debian systems, the complete text of the Apache version 2.0 license can be found in "/usr/share/common-licenses/Apache-2.0". dtfabric-20190120/config/dpkg/dtfabric-data.dirs000066400000000000000000000000241342102721300212610ustar00rootroot00000000000000/usr/share/dtfabric dtfabric-20190120/config/dpkg/dtfabric-data.install000066400000000000000000000000321342102721300217650ustar00rootroot00000000000000data/* usr/share/dtfabric dtfabric-20190120/config/dpkg/python-dtfabric.install000066400000000000000000000002201342102721300223740ustar00rootroot00000000000000usr/lib/python2*/dist-packages/dtfabric/*.py usr/lib/python2*/dist-packages/dtfabric/*/*.py usr/lib/python2*/dist-packages/dtfabric*.egg-info/* dtfabric-20190120/config/dpkg/python3-dtfabric.install000066400000000000000000000002201342102721300224570ustar00rootroot00000000000000usr/lib/python3*/dist-packages/dtfabric/*.py usr/lib/python3*/dist-packages/dtfabric/*/*.py usr/lib/python3*/dist-packages/dtfabric*.egg-info/* dtfabric-20190120/config/dpkg/rules000077500000000000000000000011471342102721300170000ustar00rootroot00000000000000#!/usr/bin/make -f %: dh $@ --buildsystem=python_distutils --with=python2,python3 .PHONY: override_dh_auto_clean override_dh_auto_clean: dh_auto_clean rm -rf build dtfabric.egg-info/SOURCES.txt dtfabric.egg-info/PKG-INFO .PHONY: override_dh_auto_build override_dh_auto_build: dh_auto_build set -ex; for python in $(shell py3versions -r); do \ $$python setup.py build; \ done; .PHONY: override_dh_auto_install override_dh_auto_install: dh_auto_install --destdir $(CURDIR) set -ex; for python in $(shell py3versions -r); do \ $$python setup.py install --root=$(CURDIR) --install-layout=deb; \ done; dtfabric-20190120/config/dpkg/source/000077500000000000000000000000001342102721300172155ustar00rootroot00000000000000dtfabric-20190120/config/dpkg/source/format000066400000000000000000000000141342102721300204230ustar00rootroot000000000000003.0 (quilt) dtfabric-20190120/config/pylint/000077500000000000000000000000001342102721300163075ustar00rootroot00000000000000dtfabric-20190120/config/pylint/spelling-private-dict000066400000000000000000000004261342102721300224420ustar00rootroot00000000000000argparse args bool boolean config datatypedefinition datatypemap datatypemapcontext deregisters dev dpkg dtfabric enumerationvalue filenames macos membersectiondefinition metadata msi os pre py rc readfp runtime sdist stdin str struct sys tuple unresolvable urls uuid validator dtfabric-20190120/config/travis/000077500000000000000000000000001342102721300163005ustar00rootroot00000000000000dtfabric-20190120/config/travis/install.sh000077500000000000000000000036721342102721300203150ustar00rootroot00000000000000#!/bin/bash # # Script to set up Travis-CI test VM. # # This file is generated by l2tdevtools update-dependencies.py any dependency # related changes should be made in dependencies.ini. L2TBINARIES_DEPENDENCIES="PyYAML"; L2TBINARIES_TEST_DEPENDENCIES="funcsigs mock pbr six"; PYTHON2_DEPENDENCIES="python-yaml"; PYTHON2_TEST_DEPENDENCIES="python-coverage python-funcsigs python-mock python-pbr python-six python-tox"; PYTHON3_DEPENDENCIES="python3-yaml"; PYTHON3_TEST_DEPENDENCIES="python3-mock python3-pbr python3-setuptools python3-six python3-tox"; # Exit on error. set -e; if test ${TRAVIS_OS_NAME} = "osx"; then git clone https://github.com/log2timeline/l2tbinaries.git -b dev; mv l2tbinaries ../; for PACKAGE in ${L2TBINARIES_DEPENDENCIES}; do echo "Installing: ${PACKAGE}"; sudo /usr/bin/hdiutil attach ../l2tbinaries/macos/${PACKAGE}-*.dmg; sudo /usr/sbin/installer -target / -pkg /Volumes/${PACKAGE}-*.pkg/${PACKAGE}-*.pkg; sudo /usr/bin/hdiutil detach /Volumes/${PACKAGE}-*.pkg done for PACKAGE in ${L2TBINARIES_TEST_DEPENDENCIES}; do echo "Installing: ${PACKAGE}"; sudo /usr/bin/hdiutil attach ../l2tbinaries/macos/${PACKAGE}-*.dmg; sudo /usr/sbin/installer -target / -pkg /Volumes/${PACKAGE}-*.pkg/${PACKAGE}-*.pkg; sudo /usr/bin/hdiutil detach /Volumes/${PACKAGE}-*.pkg done elif test ${TRAVIS_OS_NAME} = "linux" && test ${TARGET} != "jenkins"; then sudo rm -f /etc/apt/sources.list.d/travis_ci_zeromq3-source.list; if test ${TARGET} = "pylint"; then if test ${TRAVIS_PYTHON_VERSION} = "2.7"; then sudo add-apt-repository ppa:gift/pylint2 -y; fi fi sudo add-apt-repository ppa:gift/dev -y; sudo apt-get update -q; if test ${TRAVIS_PYTHON_VERSION} = "2.7"; then sudo apt-get install -y ${PYTHON2_DEPENDENCIES} ${PYTHON2_TEST_DEPENDENCIES}; else sudo apt-get install -y ${PYTHON3_DEPENDENCIES} ${PYTHON3_TEST_DEPENDENCIES}; fi if test ${TARGET} = "pylint"; then sudo apt-get install -y pylint; fi fi dtfabric-20190120/config/travis/run_with_timeout.sh000077500000000000000000000017111342102721300222440ustar00rootroot00000000000000#!/bin/bash # # Script to run commands on a Travis-CI test VM that otherwise would time out # after 10 minutes. This replaces travis_wait and outputs stdout of the command # running. # # This file is generated by l2tdevtools update-dependencies.py, any dependency # related changes should be made in dependencies.ini. # Exit on error. set -e # Usage: ./run_with_timeout.sh [TIMEOUT] [COMMAND] [OPTION] [...] TIMEOUT=$1; shift # Launch a command in the background. $* & PID_COMMAND=$!; # Probe the command every minute. MINUTES=0; while kill -0 ${PID_COMMAND} >/dev/null 2>&1; do # Print to stdout, seeing this prints a space and a backspace # there is no visible trace. echo -n -e " \b"; if test ${MINUTES} -ge ${TIMEOUT}; then kill -9 ${PID_COMMAND} >/dev/null 2>&1; echo -e "\033[0;31m[ERROR] command: $* timed out after: ${MINUTES} minute(s).\033[0m"; exit 1; fi MINUTES=$(( ${MINUTES} + 1 )); sleep 60; done wait ${PID_COMMAND}; exit $?; dtfabric-20190120/config/travis/runtests.sh000077500000000000000000000032641342102721300205330ustar00rootroot00000000000000#!/bin/bash # # Script to run tests on Travis-CI. # # This file is generated by l2tdevtools update-dependencies.py, any dependency # related changes should be made in dependencies.ini. # Exit on error. set -e; if test "${TARGET}" = "jenkins"; then ./config/jenkins/linux/run_end_to_end_tests.sh "travis"; elif test "${TARGET}" = "pylint"; then pylint --version for FILE in `find setup.py config dtfabric scripts tests -name \*.py`; do echo "Checking: ${FILE}"; pylint --rcfile=.pylintrc ${FILE}; done elif test "${TRAVIS_OS_NAME}" = "osx"; then PYTHONPATH=/Library/Python/2.7/site-packages/ /usr/bin/python ./run_tests.py; python ./setup.py build python ./setup.py sdist python ./setup.py bdist if test -f tests/end-to-end.py; then PYTHONPATH=. python ./tests/end-to-end.py --debug -c config/end-to-end.ini; fi elif test "${TRAVIS_OS_NAME}" = "linux"; then COVERAGE="/usr/bin/coverage"; if ! test -x "${COVERAGE}"; then # Ubuntu has renamed coverage. COVERAGE="/usr/bin/python-coverage"; fi if test -n "${TOXENV}"; then tox --sitepackages ${TOXENV}; elif test "${TRAVIS_PYTHON_VERSION}" = "2.7"; then ${COVERAGE} erase ${COVERAGE} run --source=dtfabric --omit="*_test*,*__init__*,*test_lib*" ./run_tests.py else python ./run_tests.py python ./setup.py build python ./setup.py sdist python ./setup.py bdist TMPDIR="${PWD}/tmp"; TMPSITEPACKAGES="${TMPDIR}/lib/python${TRAVIS_PYTHON_VERSION}/site-packages"; mkdir -p ${TMPSITEPACKAGES}; PYTHONPATH=${TMPSITEPACKAGES} python ./setup.py install --prefix=${TMPDIR}; if test -f tests/end-to-end.py; then PYTHONPATH=. python ./tests/end-to-end.py --debug -c config/end-to-end.ini; fi fi fi dtfabric-20190120/dependencies.ini000066400000000000000000000002261342102721300166520ustar00rootroot00000000000000[yaml] dpkg_name: python-yaml l2tbinaries_name: PyYAML minimum_version: 3.10 pypi_name: PyYAML rpm_name: python2-pyyaml version_property: __version__ dtfabric-20190120/documentation/000077500000000000000000000000001342102721300163745ustar00rootroot00000000000000dtfabric-20190120/documentation/Data types fabric (dtFabric) format.asciidoc000066400000000000000000001037411342102721300264000ustar00rootroot00000000000000= Data types fabric (dtFabric) format specification :toc: :toclevels: 4 :numbered!: [abstract] == Summary Data types fabric (dtFabric) is a proof-of-concept YAML-based definition language to specify format and data types. [preface] == Document information [cols="1,5"] |=== | Author(s): | Joachim Metz | Abstract: | Data types fabric (dtFabric) format specification | Classification: | Public | Keywords: | dtFabric |=== [preface] == License .... Copyright (C) 2016-2017, Joachim Metz . Permission is granted to copy, distribute and/or modify this document under the terms of the GNU Free Documentation License, Version 1.3 or any later version published by the Free Software Foundation; with no Invariant Sections, no Front-Cover Texts, and no Back-Cover Texts. A copy of the license is included in the section entitled "GNU Free Documentation License". .... [preface] == Revision history [cols="1,1,1,5",options="header"] |=== | Version | Author | Date | Comments | 0.0.1 | J.B. Metz | November 2016 | Initial version. | 0.0.2 | J.B. Metz | March 2017 | Moved documentation to asciidoc. | 0.0.3 | J.B. Metz | March 2017 | Worked on documentation. | 0.0.4 | J.B. Metz | April 2017 | Worked on documentation. | 0.0.5 | J.B. Metz | April 2017 | Worked on documentation. | 0.0.6 | J.B. Metz | May 2017 | Worked on documentation. | 0.0.7 | J.B. Metz | October 2017 | Worked on documentation. |=== :numbered: == Overview Data types fabric (dtFabric) is a proof-of-concept YAML-based definition language to specify format and data types. * storage data types, such as integers, characters, structures * semantic data types, such as constants, enumerations * layout data types, such as format, vectors, trees == [[data_type_definition]]Data type definition [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | name | string | Name of the format | type | string | Definition type + See section: <> | description | string | Description of the format | urls | List of strings | List of URLS that contain more information about the format |=== === [[data_type_definition_types]]Data type definition types [cols="1,5",options="header"] |=== | Identifier | Description | boolean | Boolean | character | Character | enumeration | Enumeration | floating-point | Floating-point | format | Data format metadata + See section: <> | integer | Integer | stream | Stream | string | String | structure | Structure | uuid | UUID (or GUID) |=== [yellow-background]*TODO: consider adding the following types* [cols="1,5",options="header"] |=== | Identifier | Description | bit-field | Bit field (or group of bits) | constant | Constant | fixed-point | Fixed-point data type | reference | [yellow-background]*TODO* | union | Union data type |=== == Storage data types Storage data types are data types that represent stored (or serialized) values. In addition to the <> storage data types also define: [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | attributes | | Data type attributes + See section: <> |=== === [[storage_data_type_definition_attributes]]Storage data type definition attributes [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | byte_order | string | Optional byte-order of the data type + Valid options are: "big-endian", "little-endian", "native" + The default is native |=== [NOTE] middle-endian is a valid byte-ordering but currently not supported. === Fixed-size data types In addition to the <> fixed-size data types also define the following attributes: [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | size | integer or string | size of data type in number of units or "native" if architecture dependent | units | string | units of the size of the data type + The default is bytes |=== ==== Boolean A boolean is a data type to represent true-or-false values. [source,yaml] ---- name: bool32 aliases: [BOOL] type: boolean description: 32-bit boolean type attributes: - size: 4 units: byte false_value: 0 true_value: 1 ---- Boolean data type specfic attributes: [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | false_value | integer | Integer value that represents False + The default is 0 | true_value | integer | Integer value that represents True + The default is not-set, which represent any value except for the false_value |=== === Character A character is a data type to represent elements of textual strings. [source,yaml] ---- name: wchar16 aliases: [WCHAR] type: character description: 16-bit wide character type attributes: - size: 2 units: byte ---- === Fixed-point A fixed-point is a data type to represent elements of fixed-point values. [yellow-background]*TODO: add example* === Floating-point A floating-point is a data type to represent elements of floating-point values. [source,yaml] ---- name: float64 aliases: [double, DOUBLE] type: floating-point description: 64-bit double precision floating-point type attributes: size: 8 units: bytes ---- === Integer A integer is a data type to represent elements of integer values. [source,yaml] ---- name: int32le aliases: [LONG, LONG32] type: integer description: 32-bit little-endian signed integer type attributes: - byte_order: little-endian format: signed size: 4 units: byte ---- Integer data type specfic attributes: [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | format | string | Signed or unsiged + The default is signed |=== === UUID (or GUID) An UUID (or GUID) is a data type to represent a Globally or Universal unique identifier (GUID or UUID) data types. [source,yaml] ---- name: known_folder_identifier type: uuid description: Known folder identifier. attributes: byte_order: little-endian ---- === Variable-sized data types === Sequence A sequence is a data type to represent a sequence of individual elements such as an array of integers. [source,yaml] ---- name: page_numbers type: sequence description: Array of 32-bit page numbers. element_data_type: int32 number_of_elements: 32 ---- Sequence data type specfic attributes: [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | element_data_type | string | Data type of sequence element | elements_data_size | integer or string | Integer value or expression to determine the data size of the elements in the sequence | number_of_elements | integer or string | Integer value or expression to determine the number of elements in the sequence | elements_terminator | integer | element value that indicates the end-of-string |=== [yellow-background]*TODO: describe expressions and the map context* === Stream A stream is a data type to represent a continous sequence of elements such as a byte stream. [source,yaml] ---- name: data type: stream element_data_type: byte number_of_elements: data_size ---- Stream data type specfic attributes: [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | element_data_type | string | Data type of stream element | elements_data_size | integer or string | Integer value or expression to determine the data size of the elements in the stream | number_of_elements | integer or string | Integer value or expression to determine the number of elements in the stream | elements_terminator | integer | element value that indicates the end-of-string |=== [yellow-background]*TODO: describe expressions and the map context* === String A string is a data type to represent a continous sequence of elements with a known encoding such as an UTF-16 formatted string. [source,yaml] ---- name: utf16le_string type: string ecoding: utf-16-le element_data_type: wchar16 elements_data_size: string_data_size ---- Stream data type specfic attributes: [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | encoding | string | Encoding of the string | element_data_type | string | Data type of string element | elements_data_size | integer or string | Integer value or expression to determine the data size of the elements in the string | number_of_elements | integer or string | Integer value or expression to determine the number of elements in the string | elements_terminator | integer | element value that indicates the end-of-string |=== [yellow-background]*TODO: describe expressions and the map context* [yellow-background]*TODO: add definition of attributes* [yellow-background]*TODO: what about NUL-terminated strings?* === Structure A structure is a data type to represent a composition of members of other data types. [yellow-background]*TODO: add structure size hint?* [source,yaml] ---- name: point3d aliases: [POINT] type: structure description: Point in 3 dimensional space. attributes: byte_order: little-endian members: - name: x aliases: [XCOORD] data_type: int32 - name: y data_type: int32 - name: z data_type: int32 ---- [source,yaml] ---- name: sphere3d type: structure description: Sphere in 3 dimensional space. members: - name: number_of_triangles data_type: int32 - name: triangles type: sequence element_data_type: triangle3d number_of_elements: sphere3d.number_of_triangles ---- Structure data type specfic attributes: [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | members | list | List of member definitions + A member definition needs to specify either 'type' or 'data_type' |=== [yellow-background]*TODO: describe short-hand and long form* == Semantic types === Constant A constant is a data type to provide meaning (semantic value) to a single predefined value. The value of a constant is typically not stored in a byte stream but used at compile time. [source,yaml] ---- name: maximum_number_of_back_traces aliases: [AVRF_MAX_TRACES] type: constant description: Application verifier resource enumeration maximum number of back traces urls: ['https://msdn.microsoft.com/en-us/library/bb432193(v=vs.85).aspx'] value: 13 ---- Constant data type specfic attributes: [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | value | integer or string | Integer or string value that the constant represents |=== === Enumeration An enumeration is a data type to provide meaning (semantic value) to one or more predefined values. [source,yaml] ---- name: handle_trace_operation_types aliases: [eHANDLE_TRACE_OPERATIONS] type: enumeration description: Application verifier resource enumeration handle trace operation types urls: ['https://msdn.microsoft.com/en-us/library/bb432251(v=vs.85).aspx'] values: - name: OperationDbUnused number: 0 description: Unused - name: OperationDbOPEN number: 1 description: Open (create) handle operation - name: OperationDbCLOSE number: 2 description: Close handle operation - name: OperationDbBADREF number: 3 description: Invalid handle operation ---- Enumeration value attributes: [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | aliases | list of strings | Optional aliases the enumeration value maps to | description | string | Optional description of the enumeration value | name | string | Name the enumeration value maps to | number | integer | Number the enumeration value maps to |=== [yellow-background]*TODO: add description* == Layout types === [[data_format]]Data format [cols="1,1,5",options="header"] |=== | Attribute name | Attribute type | Description | name | string | Name of the format | type | string | Definition type + See section: <> | description | string | Description of the format | urls | List of strings | List of URLS that contain more information about the format | layout | [yellow-background]*TODO* | Format layout definition |=== Example: [source,yaml] ---- name: mdmp type: format description: Minidump file format urls: ['https://msdn.microsoft.com/en-us/library/windows/desktop/ms680369(v=vs.85).aspx'] layout: ---- === Structure family A structure family is a layout type to represent multiple generations (versions) of the same structure. [source,yaml] ---- name: group_descriptor type: type-family description: Group descriptor of Extended File System version 2, 3 and 4 runtime: group_descriptor_runtime members: - group_descriptor_ext2 - group_descriptor_ext4 ---- :numbered!: [appendix] == References `[YAML]` [cols="1,5",options="header"] |=== | Title: | YAML Ain’t Markup Language (YAMLâ„¢) | Version: | 1.2 | Data: | November 1, 2009 | URL: | http://yaml.org/spec/1.2/spec.html |=== [appendix] == GNU Free Documentation License Version 1.3, 3 November 2008 Copyright © 2000, 2001, 2002, 2007, 2008 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. === 0. PREAMBLE The purpose of this License is to make a manual, textbook, or other functional and useful document "free" in the sense of freedom: to assure everyone the effective freedom to copy and redistribute it, with or without modifying it, either commercially or noncommercially. Secondarily, this License preserves for the author and publisher a way to get credit for their work, while not being considered responsible for modifications made by others. This License is a kind of "copyleft", which means that derivative works of the document must themselves be free in the same sense. It complements the GNU General Public License, which is a copyleft license designed for free software. We have designed this License in order to use it for manuals for free software, because free software needs free documentation: a free program should come with manuals providing the same freedoms that the software does. But this License is not limited to software manuals; it can be used for any textual work, regardless of subject matter or whether it is published as a printed book. We recommend this License principally for works whose purpose is instruction or reference. === 1. APPLICABILITY AND DEFINITIONS This License applies to any manual or other work, in any medium, that contains a notice placed by the copyright holder saying it can be distributed under the terms of this License. Such a notice grants a world-wide, royalty-free license, unlimited in duration, to use that work under the conditions stated herein. The "Document", below, refers to any such manual or work. Any member of the public is a licensee, and is addressed as "you". You accept the license if you copy, modify or distribute the work in a way requiring permission under copyright law. A "Modified Version" of the Document means any work containing the Document or a portion of it, either copied verbatim, or with modifications and/or translated into another language. A "Secondary Section" is a named appendix or a front-matter section of the Document that deals exclusively with the relationship of the publishers or authors of the Document to the Document's overall subject (or to related matters) and contains nothing that could fall directly within that overall subject. (Thus, if the Document is in part a textbook of mathematics, a Secondary Section may not explain any mathematics.) The relationship could be a matter of historical connection with the subject or with related matters, or of legal, commercial, philosophical, ethical or political position regarding them. The "Invariant Sections" are certain Secondary Sections whose titles are designated, as being those of Invariant Sections, in the notice that says that the Document is released under this License. If a section does not fit the above definition of Secondary then it is not allowed to be designated as Invariant. The Document may contain zero Invariant Sections. If the Document does not identify any Invariant Sections then there are none. The "Cover Texts" are certain short passages of text that are listed, as Front-Cover Texts or Back-Cover Texts, in the notice that says that the Document is released under this License. A Front-Cover Text may be at most 5 words, and a Back-Cover Text may be at most 25 words. A "Transparent" copy of the Document means a machine-readable copy, represented in a format whose specification is available to the general public, that is suitable for revising the document straightforwardly with generic text editors or (for images composed of pixels) generic paint programs or (for drawings) some widely available drawing editor, and that is suitable for input to text formatters or for automatic translation to a variety of formats suitable for input to text formatters. A copy made in an otherwise Transparent file format whose markup, or absence of markup, has been arranged to thwart or discourage subsequent modification by readers is not Transparent. An image format is not Transparent if used for any substantial amount of text. A copy that is not "Transparent" is called "Opaque". Examples of suitable formats for Transparent copies include plain ASCII without markup, Texinfo input format, LaTeX input format, SGML or XML using a publicly available DTD, and standard-conforming simple HTML, PostScript or PDF designed for human modification. Examples of transparent image formats include PNG, XCF and JPG. Opaque formats include proprietary formats that can be read and edited only by proprietary word processors, SGML or XML for which the DTD and/or processing tools are not generally available, and the machine-generated HTML, PostScript or PDF produced by some word processors for output purposes only. The "Title Page" means, for a printed book, the title page itself, plus such following pages as are needed to hold, legibly, the material this License requires to appear in the title page. For works in formats which do not have any title page as such, "Title Page" means the text near the most prominent appearance of the work's title, preceding the beginning of the body of the text. The "publisher" means any person or entity that distributes copies of the Document to the public. A section "Entitled XYZ" means a named subunit of the Document whose title either is precisely XYZ or contains XYZ in parentheses following text that translates XYZ in another language. (Here XYZ stands for a specific section name mentioned below, such as "Acknowledgements", "Dedications", "Endorsements", or "History".) To "Preserve the Title" of such a section when you modify the Document means that it remains a section "Entitled XYZ" according to this definition. The Document may include Warranty Disclaimers next to the notice which states that this License applies to the Document. These Warranty Disclaimers are considered to be included by reference in this License, but only as regards disclaiming warranties: any other implication that these Warranty Disclaimers may have is void and has no effect on the meaning of this License. === 2. VERBATIM COPYING You may copy and distribute the Document in any medium, either commercially or noncommercially, provided that this License, the copyright notices, and the license notice saying this License applies to the Document are reproduced in all copies, and that you add no other conditions whatsoever to those of this License. You may not use technical measures to obstruct or control the reading or further copying of the copies you make or distribute. However, you may accept compensation in exchange for copies. If you distribute a large enough number of copies you must also follow the conditions in section 3. You may also lend copies, under the same conditions stated above, and you may publicly display copies. === 3. COPYING IN QUANTITY If you publish printed copies (or copies in media that commonly have printed covers) of the Document, numbering more than 100, and the Document's license notice requires Cover Texts, you must enclose the copies in covers that carry, clearly and legibly, all these Cover Texts: Front-Cover Texts on the front cover, and Back-Cover Texts on the back cover. Both covers must also clearly and legibly identify you as the publisher of these copies. The front cover must present the full title with all words of the title equally prominent and visible. You may add other material on the covers in addition. Copying with changes limited to the covers, as long as they preserve the title of the Document and satisfy these conditions, can be treated as verbatim copying in other respects. If the required texts for either cover are too voluminous to fit legibly, you should put the first ones listed (as many as fit reasonably) on the actual cover, and continue the rest onto adjacent pages. If you publish or distribute Opaque copies of the Document numbering more than 100, you must either include a machine-readable Transparent copy along with each Opaque copy, or state in or with each Opaque copy a computer-network location from which the general network-using public has access to download using public-standard network protocols a complete Transparent copy of the Document, free of added material. If you use the latter option, you must take reasonably prudent steps, when you begin distribution of Opaque copies in quantity, to ensure that this Transparent copy will remain thus accessible at the stated location until at least one year after the last time you distribute an Opaque copy (directly or through your agents or retailers) of that edition to the public. It is requested, but not required, that you contact the authors of the Document well before redistributing any large number of copies, to give them a chance to provide you with an updated version of the Document. === 4. MODIFICATIONS You may copy and distribute a Modified Version of the Document under the conditions of sections 2 and 3 above, provided that you release the Modified Version under precisely this License, with the Modified Version filling the role of the Document, thus licensing distribution and modification of the Modified Version to whoever possesses a copy of it. In addition, you must do these things in the Modified Version: A. Use in the Title Page (and on the covers, if any) a title distinct from that of the Document, and from those of previous versions (which should, if there were any, be listed in the History section of the Document). You may use the same title as a previous version if the original publisher of that version gives permission. B. List on the Title Page, as authors, one or more persons or entities responsible for authorship of the modifications in the Modified Version, together with at least five of the principal authors of the Document (all of its principal authors, if it has fewer than five), unless they release you from this requirement. C. State on the Title page the name of the publisher of the Modified Version, as the publisher. D. Preserve all the copyright notices of the Document. E. Add an appropriate copyright notice for your modifications adjacent to the other copyright notices. F. Include, immediately after the copyright notices, a license notice giving the public permission to use the Modified Version under the terms of this License, in the form shown in the Addendum below. G. Preserve in that license notice the full lists of Invariant Sections and required Cover Texts given in the Document's license notice. H. Include an unaltered copy of this License. I. Preserve the section Entitled "History", Preserve its Title, and add to it an item stating at least the title, year, new authors, and publisher of the Modified Version as given on the Title Page. If there is no section Entitled "History" in the Document, create one stating the title, year, authors, and publisher of the Document as given on its Title Page, then add an item describing the Modified Version as stated in the previous sentence. J. Preserve the network location, if any, given in the Document for public access to a Transparent copy of the Document, and likewise the network locations given in the Document for previous versions it was based on. These may be placed in the "History" section. You may omit a network location for a work that was published at least four years before the Document itself, or if the original publisher of the version it refers to gives permission. K. For any section Entitled "Acknowledgements" or "Dedications", Preserve the Title of the section, and preserve in the section all the substance and tone of each of the contributor acknowledgements and/or dedications given therein. L. Preserve all the Invariant Sections of the Document, unaltered in their text and in their titles. Section numbers or the equivalent are not considered part of the section titles. M. Delete any section Entitled "Endorsements". Such a section may not be included in the Modified Version. N. Do not retitle any existing section to be Entitled "Endorsements" or to conflict in title with any Invariant Section. O. Preserve any Warranty Disclaimers. If the Modified Version includes new front-matter sections or appendices that qualify as Secondary Sections and contain no material copied from the Document, you may at your option designate some or all of these sections as invariant. To do this, add their titles to the list of Invariant Sections in the Modified Version's license notice. These titles must be distinct from any other section titles. You may add a section Entitled "Endorsements", provided it contains nothing but endorsements of your Modified Version by various parties—for example, statements of peer review or that the text has been approved by an organization as the authoritative definition of a standard. You may add a passage of up to five words as a Front-Cover Text, and a passage of up to 25 words as a Back-Cover Text, to the end of the list of Cover Texts in the Modified Version. Only one passage of Front-Cover Text and one of Back-Cover Text may be added by (or through arrangements made by) any one entity. If the Document already includes a cover text for the same cover, previously added by you or by arrangement made by the same entity you are acting on behalf of, you may not add another; but you may replace the old one, on explicit permission from the previous publisher that added the old one. The author(s) and publisher(s) of the Document do not by this License give permission to use their names for publicity for or to assert or imply endorsement of any Modified Version. === 5. COMBINING DOCUMENTS You may combine the Document with other documents released under this License, under the terms defined in section 4 above for modified versions, provided that you include in the combination all of the Invariant Sections of all of the original documents, unmodified, and list them all as Invariant Sections of your combined work in its license notice, and that you preserve all their Warranty Disclaimers. The combined work need only contain one copy of this License, and multiple identical Invariant Sections may be replaced with a single copy. If there are multiple Invariant Sections with the same name but different contents, make the title of each such section unique by adding at the end of it, in parentheses, the name of the original author or publisher of that section if known, or else a unique number. Make the same adjustment to the section titles in the list of Invariant Sections in the license notice of the combined work. In the combination, you must combine any sections Entitled "History" in the various original documents, forming one section Entitled "History"; likewise combine any sections Entitled "Acknowledgements", and any sections Entitled "Dedications". You must delete all sections Entitled "Endorsements". === 6. COLLECTIONS OF DOCUMENTS You may make a collection consisting of the Document and other documents released under this License, and replace the individual copies of this License in the various documents with a single copy that is included in the collection, provided that you follow the rules of this License for verbatim copying of each of the documents in all other respects. You may extract a single document from such a collection, and distribute it individually under this License, provided you insert a copy of this License into the extracted document, and follow this License in all other respects regarding verbatim copying of that document. === 7. AGGREGATION WITH INDEPENDENT WORKS A compilation of the Document or its derivatives with other separate and independent documents or works, in or on a volume of a storage or distribution medium, is called an "aggregate" if the copyright resulting from the compilation is not used to limit the legal rights of the compilation's users beyond what the individual works permit. When the Document is included in an aggregate, this License does not apply to the other works in the aggregate which are not themselves derivative works of the Document. If the Cover Text requirement of section 3 is applicable to these copies of the Document, then if the Document is less than one half of the entire aggregate, the Document's Cover Texts may be placed on covers that bracket the Document within the aggregate, or the electronic equivalent of covers if the Document is in electronic form. Otherwise they must appear on printed covers that bracket the whole aggregate. === 8. TRANSLATION Translation is considered a kind of modification, so you may distribute translations of the Document under the terms of section 4. Replacing Invariant Sections with translations requires special permission from their copyright holders, but you may include translations of some or all Invariant Sections in addition to the original versions of these Invariant Sections. You may include a translation of this License, and all the license notices in the Document, and any Warranty Disclaimers, provided that you also include the original English version of this License and the original versions of those notices and disclaimers. In case of a disagreement between the translation and the original version of this License or a notice or disclaimer, the original version will prevail. If a section in the Document is Entitled "Acknowledgements", "Dedications", or "History", the requirement (section 4) to Preserve its Title (section 1) will typically require changing the actual title. === 9. TERMINATION You may not copy, modify, sublicense, or distribute the Document except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, or distribute it is void, and will automatically terminate your rights under this License. However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, receipt of a copy of some or all of the same material does not give you any rights to use it. === 10. FUTURE REVISIONS OF THIS LICENSE The Free Software Foundation may publish new, revised versions of the GNU Free Documentation License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. See http://www.gnu.org/copyleft/. Each version of the License is given a distinguishing version number. If the Document specifies that a particular numbered version of this License "or any later version" applies to it, you have the option of following the terms and conditions either of that specified version or of any later version that has been published (not as a draft) by the Free Software Foundation. If the Document does not specify a version number of this License, you may choose any version ever published (not as a draft) by the Free Software Foundation. If the Document specifies that a proxy can decide which future versions of this License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Document. === 11. RELICENSING "Massive Multiauthor Collaboration Site" (or "MMC Site") means any World Wide Web server that publishes copyrightable works and also provides prominent facilities for anybody to edit those works. A public wiki that anybody can edit is an example of such a server. A "Massive Multiauthor Collaboration" (or "MMC") contained in the site means any set of copyrightable works thus published on the MMC site. "CC-BY-SA" means the Creative Commons Attribution-Share Alike 3.0 license published by Creative Commons Corporation, a not-for-profit corporation with a principal place of business in San Francisco, California, as well as future copyleft versions of that license published by that same organization. "Incorporate" means to publish or republish a Document, in whole or in part, as part of another Document. An MMC is "eligible for relicensing" if it is licensed under this License, and if all works that were first published under this License somewhere other than this MMC, and subsequently incorporated in whole or in part into the MMC, (1) had no cover texts or invariant sections, and (2) were thus incorporated prior to November 1, 2008. The operator of an MMC Site may republish an MMC contained in the site under CC-BY-SA on the same site at any time before August 1, 2009, provided the MMC is eligible for relicensing. dtfabric-20190120/dtfabric.ini000066400000000000000000000005331342102721300160030ustar00rootroot00000000000000[project] name: dtfabric status: alpha name_description: dtFabric maintainer: Joachim Metz homepage_url: https://github.com/libyal/dtfabric description_short: Data type fabric (dtfabric) description_long: dtFabric, or data type fabric, is a project to manage data types and structures, as used in the libyal projects. dtfabric-20190120/dtfabric/000077500000000000000000000000001342102721300153015ustar00rootroot00000000000000dtfabric-20190120/dtfabric/__init__.py000066400000000000000000000001121342102721300174040ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Data type fabric.""" __version__ = '20190120' dtfabric-20190120/dtfabric/data_types.py000066400000000000000000000525451342102721300200230ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Data type definitions.""" from __future__ import unicode_literals import abc from dtfabric import definitions class DataTypeDefinition(object): """Data type definition interface. Attributes: aliases (list[str]): aliases. byte_order (str): byte-order the data type. description (str): description. name (str): name. urls (list[str]): URLs. """ # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc TYPE_INDICATOR = None _IS_COMPOSITE = False def __init__(self, name, aliases=None, description=None, urls=None): """Initializes a data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(DataTypeDefinition, self).__init__() self.aliases = aliases or [] self.description = description self.name = name self.urls = urls @abc.abstractmethod def GetByteSize(self): """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ def IsComposite(self): """Determines if the data type is composite. A composite data type consists of other data types. Returns: bool: True if the data type is composite, False otherwise. """ return self._IS_COMPOSITE class StorageDataTypeDefinition(DataTypeDefinition): """Storage data type definition interface. Attributes: byte_order (str): byte-order the data type. """ # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc def __init__(self, name, aliases=None, description=None, urls=None): """Initializes a storage data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(StorageDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.byte_order = definitions.BYTE_ORDER_NATIVE @abc.abstractmethod def GetByteSize(self): """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ class FixedSizeDataTypeDefinition(StorageDataTypeDefinition): """Fixed-size data type definition. Attributes: size (int|str): size of the data type or SIZE_NATIVE. units (str): units of the size of the data type. """ def __init__(self, name, aliases=None, description=None, urls=None): """Initializes a fixed-size data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(FixedSizeDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.size = definitions.SIZE_NATIVE self.units = 'bytes' def GetByteSize(self): """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ if self.size == definitions.SIZE_NATIVE or self.units != 'bytes': return None return self.size class BooleanDefinition(FixedSizeDataTypeDefinition): """Boolean data type definition. Attributes: false_value (int): value of False, None represents any value except that defined by true_value. true_value (int): value of True, None represents any value except that defined by false_value. """ TYPE_INDICATOR = definitions.TYPE_INDICATOR_BOOLEAN def __init__( self, name, aliases=None, description=None, false_value=0, urls=None): """Initializes a boolean data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. false_value (Optional[int]): value that represents false. urls (Optional[list[str]]): URLs. """ super(BooleanDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.false_value = false_value self.true_value = None class CharacterDefinition(FixedSizeDataTypeDefinition): """Character data type definition.""" TYPE_INDICATOR = definitions.TYPE_INDICATOR_CHARACTER class FloatingPointDefinition(FixedSizeDataTypeDefinition): """Floating point data type definition.""" TYPE_INDICATOR = definitions.TYPE_INDICATOR_FLOATING_POINT class IntegerDefinition(FixedSizeDataTypeDefinition): """Integer data type definition. Attributes: format (str): format of the data type. maximum_value (int): maximum allowed value of the integer data type. minimum_value (int): minimum allowed value of the integer data type. """ TYPE_INDICATOR = definitions.TYPE_INDICATOR_INTEGER def __init__( self, name, aliases=None, description=None, maximum_value=None, minimum_value=None, urls=None): """Initializes an integer data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. maximum_value (Optional[int]): maximum allowed value of the integer data type. minimum_value (Optional[int]): minimum allowed value of the integer data type. urls (Optional[list[str]]): URLs. """ super(IntegerDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.format = definitions.FORMAT_SIGNED self.maximum_value = maximum_value self.minimum_value = minimum_value class UUIDDefinition(FixedSizeDataTypeDefinition): """UUID (or GUID) data type definition.""" TYPE_INDICATOR = definitions.TYPE_INDICATOR_UUID _IS_COMPOSITE = True def __init__(self, name, aliases=None, description=None, urls=None): """Initializes an UUID data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(UUIDDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.size = 16 class ElementSequenceDataTypeDefinition(StorageDataTypeDefinition): """Element sequence data type definition. Attributes: elements_data_size (int): data size of the sequence elements. elements_data_size_expression (str): expression to determine the data size of the sequence elements. element_data_type (str): name of the sequence element data type. element_data_type_definition (DataTypeDefinition): sequence element data type definition. elements_terminator (bytes|int): element value that indicates the end-of-sequence. number_of_elements (int): number of sequence elements. number_of_elements_expression (str): expression to determine the number of sequence elements. """ _IS_COMPOSITE = True def __init__( self, name, data_type_definition, aliases=None, data_type=None, description=None, urls=None): """Initializes a sequence data type definition. Args: name (str): name. data_type_definition (DataTypeDefinition): sequence element data type definition. aliases (Optional[list[str]]): aliases. data_type (Optional[str]): name of the sequence element data type. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(ElementSequenceDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.byte_order = getattr( data_type_definition, 'byte_order', definitions.BYTE_ORDER_NATIVE) self.elements_data_size = None self.elements_data_size_expression = None self.element_data_type = data_type self.element_data_type_definition = data_type_definition self.elements_terminator = None self.number_of_elements = None self.number_of_elements_expression = None def GetByteSize(self): """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ if not self.element_data_type_definition: return None if self.elements_data_size: return self.elements_data_size if not self.number_of_elements: return None element_byte_size = self.element_data_type_definition.GetByteSize() if not element_byte_size: return None return element_byte_size * self.number_of_elements class SequenceDefinition(ElementSequenceDataTypeDefinition): """Sequence data type definition.""" TYPE_INDICATOR = definitions.TYPE_INDICATOR_SEQUENCE class StreamDefinition(ElementSequenceDataTypeDefinition): """Stream data type definition.""" TYPE_INDICATOR = definitions.TYPE_INDICATOR_STREAM class StringDefinition(ElementSequenceDataTypeDefinition): """String data type definition. Attributes: encoding (str): string encoding. """ TYPE_INDICATOR = definitions.TYPE_INDICATOR_STRING def __init__( self, name, data_type_definition, aliases=None, data_type=None, description=None, urls=None): """Initializes a string data type definition. Args: name (str): name. data_type_definition (DataTypeDefinition): string element data type definition. aliases (Optional[list[str]]): aliases. data_type (Optional[str]): name of the string element data type. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(StringDefinition, self).__init__( name, data_type_definition, aliases=aliases, data_type=data_type, description=description, urls=urls) self.encoding = 'ascii' class DataTypeDefinitionWithMembers(StorageDataTypeDefinition): """Data type definition with members. Attributes: members (list[DataTypeDefinition]): member data type definitions. sections (list[MemberSectionDefinition]): member section definitions. """ # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc _IS_COMPOSITE = True def __init__(self, name, aliases=None, description=None, urls=None): """Initializes a data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(DataTypeDefinitionWithMembers, self).__init__( name, aliases=aliases, description=description, urls=urls) self._byte_size = None self.members = [] self.sections = [] def AddMemberDefinition(self, member_definition): """Adds a member definition. Args: member_definition (DataTypeDefinition): member data type definition. """ self._byte_size = None self.members.append(member_definition) if self.sections: section_definition = self.sections[-1] section_definition.members.append(member_definition) def AddSectionDefinition(self, section_definition): """Adds a section definition. Args: section_definition (MemberSectionDefinition): member section definition. """ self.sections.append(section_definition) @abc.abstractmethod def GetByteSize(self): """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ class MemberDataTypeDefinition(StorageDataTypeDefinition): """Member data type definition. Attributes: member_data_type (str): member data type. member_data_type_definition (DataTypeDefinition): member data type definition. """ def __init__( self, name, data_type_definition, aliases=None, data_type=None, description=None, urls=None): """Initializes a member data type definition. Args: name (str): name. data_type_definition (DataTypeDefinition): member data type definition. aliases (Optional[list[str]]): aliases. data_type (Optional[str]): member data type. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(MemberDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.byte_order = getattr( data_type_definition, 'byte_order', definitions.BYTE_ORDER_NATIVE) self.member_data_type = data_type self.member_data_type_definition = data_type_definition def GetByteSize(self): """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ if not self.member_data_type_definition: return None return self.member_data_type_definition.GetByteSize() def IsComposite(self): """Determines if the data type is composite. A composite data type consists of other data types. Returns: bool: True if the data type is composite, False otherwise. """ return (self.member_data_type_definition and self.member_data_type_definition.IsComposite()) class MemberSectionDefinition(object): """Member section definition. Attributes: members (list[DataTypeDefinition]): member data type definitions of the section. """ def __init__(self, name): """Initializes a member section definition. Args: name (str): name. """ super(MemberSectionDefinition, self).__init__() self.name = name self.members = [] class StructureDefinition(DataTypeDefinitionWithMembers): """Structure data type definition. Attributes: family_definition (DataTypeDefinition): structure family data type definition. """ TYPE_INDICATOR = definitions.TYPE_INDICATOR_STRUCTURE def __init__(self, name, aliases=None, description=None, urls=None): """Initializes a data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(StructureDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.family_definition = None def GetByteSize(self): """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ if self._byte_size is None and self.members: self._byte_size = 0 for member_definition in self.members: byte_size = member_definition.GetByteSize() if byte_size is None: self._byte_size = None break self._byte_size += byte_size return self._byte_size class UnionDefinition(DataTypeDefinitionWithMembers): """Union data type definition.""" TYPE_INDICATOR = definitions.TYPE_INDICATOR_UNION def GetByteSize(self): """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ if self._byte_size is None and self.members: self._byte_size = 0 for member_definition in self.members: byte_size = member_definition.GetByteSize() if byte_size is None: self._byte_size = None break self._byte_size = max(self._byte_size, byte_size) return self._byte_size class SemanticDataTypeDefinition(DataTypeDefinition): """Semantic data type definition interface. Attributes: byte_order (str): byte-order the data type. """ def GetByteSize(self): # pylint: disable=redundant-returns-doc """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ return None class ConstantDefinition(SemanticDataTypeDefinition): """Constant data type definition. Attributes: value (int): constant value. """ TYPE_INDICATOR = definitions.TYPE_INDICATOR_CONSTANT def __init__(self, name, aliases=None, description=None, urls=None): """Initializes an enumeration data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(ConstantDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.value = None class EnumerationValue(object): """Enumeration value. Attributes: aliases (list[str]): aliases. description (str): description. name (str): name. number (int): number. """ def __init__(self, name, number, aliases=None, description=None): """Initializes an enumeration value. Args: name (str): name. number (int): number. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. """ super(EnumerationValue, self).__init__() self.aliases = aliases or [] self.description = description self.name = name self.number = number class EnumerationDefinition(SemanticDataTypeDefinition): """Enumeration data type definition. Attributes: values_per_alias (dict[str, EnumerationValue]): enumeration values per alias. values_per_name (dict[str, EnumerationValue]): enumeration values per name. values_per_number (dict[str, EnumerationValue]): enumeration values per number. values(list[EnumerationValue]): enumeration values. """ TYPE_INDICATOR = definitions.TYPE_INDICATOR_ENUMERATION def __init__(self, name, aliases=None, description=None, urls=None): """Initializes an enumeration data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(EnumerationDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.values = [] self.values_per_alias = {} self.values_per_name = {} self.values_per_number = {} def AddValue(self, name, number, aliases=None, description=None): """Adds an enumeration value. Args: name (str): name. number (int): number. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. Raises: KeyError: if the enumeration value already exists. """ if name in self.values_per_name: raise KeyError('Value with name: {0:s} already exists.'.format(name)) if number in self.values_per_number: raise KeyError('Value with number: {0!s} already exists.'.format(number)) for alias in aliases or []: if alias in self.values_per_alias: raise KeyError('Value with alias: {0:s} already exists.'.format(alias)) enumeration_value = EnumerationValue( name, number, aliases=aliases, description=description) self.values.append(enumeration_value) self.values_per_name[name] = enumeration_value self.values_per_number[number] = enumeration_value for alias in aliases or []: self.values_per_alias[alias] = enumeration_value class LayoutDataTypeDefinition(DataTypeDefinition): """Layout data type definition interface.""" _IS_COMPOSITE = True def GetByteSize(self): # pylint: disable=redundant-returns-doc """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ return None class FormatDefinition(LayoutDataTypeDefinition): """Data format definition. Attributes: metadata (dict[str, object]): metadata. """ TYPE_INDICATOR = definitions.TYPE_INDICATOR_FORMAT def __init__(self, name, aliases=None, description=None, urls=None): """Initializes a format data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(FormatDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.metadata = {} class StructureFamilyDefinition(LayoutDataTypeDefinition): """Structure family definition. Attributes: members (list[DataTypeDefinition]): member data type definitions. runtime (DataTypeDefinition): runtime data type definition. """ TYPE_INDICATOR = definitions.TYPE_INDICATOR_STRUCTURE_FAMILY def __init__(self, name, aliases=None, description=None, urls=None): """Initializes a structure family data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(StructureFamilyDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.members = [] self.runtime = None def AddMemberDefinition(self, member_definition): """Adds a member definition. Args: member_definition (DataTypeDefinition): member data type definition. """ self.members.append(member_definition) member_definition.family_definition = self def AddRuntimeDefinition(self, runtime_definition): """Adds a runtime definition. Args: runtime_definition (DataTypeDefinition): runtime data type definition. """ self.runtime = runtime_definition runtime_definition.family_definition = self dtfabric-20190120/dtfabric/definitions.py000066400000000000000000000025621342102721300201730ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Definitions.""" from __future__ import unicode_literals BYTE_ORDER_BIG_ENDIAN = 'big-endian' BYTE_ORDER_LITTLE_ENDIAN = 'little-endian' BYTE_ORDER_MIDDLE_ENDIAN = 'middle-endian' BYTE_ORDER_NATIVE = 'native' BYTE_ORDERS = frozenset([ BYTE_ORDER_BIG_ENDIAN, BYTE_ORDER_LITTLE_ENDIAN, BYTE_ORDER_NATIVE]) FORMAT_SIGNED = 'signed' FORMAT_UNSIGNED = 'unsigned' SIZE_NATIVE = 'native' TYPE_INDICATOR_BOOLEAN = 'boolean' TYPE_INDICATOR_CHARACTER = 'character' TYPE_INDICATOR_CONSTANT = 'constant' TYPE_INDICATOR_ENUMERATION = 'enumeration' TYPE_INDICATOR_FLOATING_POINT = 'floating-point' TYPE_INDICATOR_FORMAT = 'format' TYPE_INDICATOR_INTEGER = 'integer' TYPE_INDICATOR_SEQUENCE = 'sequence' TYPE_INDICATOR_STREAM = 'stream' TYPE_INDICATOR_STRING = 'string' TYPE_INDICATOR_STRUCTURE = 'structure' TYPE_INDICATOR_STRUCTURE_FAMILY = 'structure-family' TYPE_INDICATOR_UNION = 'union' TYPE_INDICATOR_UUID = 'uuid' TYPE_INDICATORS = frozenset([ TYPE_INDICATOR_BOOLEAN, TYPE_INDICATOR_CHARACTER, TYPE_INDICATOR_CONSTANT, TYPE_INDICATOR_ENUMERATION, TYPE_INDICATOR_FLOATING_POINT, TYPE_INDICATOR_FORMAT, TYPE_INDICATOR_INTEGER, TYPE_INDICATOR_SEQUENCE, TYPE_INDICATOR_STREAM, TYPE_INDICATOR_STRING, TYPE_INDICATOR_STRUCTURE, TYPE_INDICATOR_STRUCTURE_FAMILY, TYPE_INDICATOR_UNION, TYPE_INDICATOR_UUID]) dtfabric-20190120/dtfabric/errors.py000066400000000000000000000017331342102721300171730ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The error objects.""" class Error(Exception): """The error interface.""" class ByteStreamTooSmallError(Error): """Error that is raised when the byte stream is too small.""" class DefinitionReaderError(Error): """Error that is raised by the definition reader. Attributes: name (str): name of the definition. message (str): error message. """ def __init__(self, name, message): """Initializes an error. Args: name (str): name of the definition. message (str): error message. """ # pylint: disable=super-init-not-called # Do not call initialize of the super class. self.name = name self.message = message class FoldingError(Error): """Error that is raised when the definition cannot be folded.""" class FormatError(Error): """Error that is raised when the definition format is incorrect.""" class MappingError(Error): """Error that is raised when the definition cannot be mapped.""" dtfabric-20190120/dtfabric/py2to3.py000066400000000000000000000006241342102721300170150ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The Python 2 and 3 compatible definitions.""" import sys # pylint: disable=invalid-name,undefined-variable if sys.version_info[0] < 3: BYTES_TYPE = str INTEGER_TYPES = (int, long) STRING_TYPES = (basestring, ) UNICHR = unichr UNICODE_TYPE = unicode else: BYTES_TYPE = bytes INTEGER_TYPES = (int, ) STRING_TYPES = (str, ) UNICHR = chr UNICODE_TYPE = str dtfabric-20190120/dtfabric/reader.py000066400000000000000000001061411342102721300171200ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The data type definition reader objects.""" from __future__ import unicode_literals import abc import yaml from dtfabric import data_types from dtfabric import definitions from dtfabric import errors from dtfabric import py2to3 class DataTypeDefinitionsReader(object): """Data type definitions reader.""" _DATA_TYPE_CALLBACKS = { definitions.TYPE_INDICATOR_BOOLEAN: '_ReadBooleanDataTypeDefinition', definitions.TYPE_INDICATOR_CHARACTER: '_ReadCharacterDataTypeDefinition', definitions.TYPE_INDICATOR_CONSTANT: '_ReadConstantDataTypeDefinition', definitions.TYPE_INDICATOR_ENUMERATION: ( '_ReadEnumerationDataTypeDefinition'), definitions.TYPE_INDICATOR_FLOATING_POINT: ( '_ReadFloatingPointDataTypeDefinition'), definitions.TYPE_INDICATOR_INTEGER: '_ReadIntegerDataTypeDefinition', definitions.TYPE_INDICATOR_FORMAT: '_ReadFormatDataTypeDefinition', definitions.TYPE_INDICATOR_SEQUENCE: '_ReadSequenceDataTypeDefinition', definitions.TYPE_INDICATOR_STREAM: '_ReadStreamDataTypeDefinition', definitions.TYPE_INDICATOR_STRING: '_ReadStringDataTypeDefinition', definitions.TYPE_INDICATOR_STRUCTURE: '_ReadStructureDataTypeDefinition', definitions.TYPE_INDICATOR_STRUCTURE_FAMILY: ( '_ReadStructureFamilyDataTypeDefinition'), definitions.TYPE_INDICATOR_UNION: '_ReadUnionDataTypeDefinition', definitions.TYPE_INDICATOR_UUID: '_ReadUUIDDataTypeDefinition', } _INTEGER_FORMAT_ATTRIBUTES = frozenset([ definitions.FORMAT_SIGNED, definitions.FORMAT_UNSIGNED]) def _ReadBooleanDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads a boolean data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: BooleanDataTypeDefinition: boolean data type definition. """ return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.BooleanDefinition, definition_name) def _ReadCharacterDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads a character data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: CharacterDataTypeDefinition: character data type definition. """ return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.CharacterDefinition, definition_name) def _ReadConstantDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads a constant data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: ConstantDataTypeDefinition: constant data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ value = definition_values.get('value', None) if value is None: error_message = 'missing value' raise errors.DefinitionReaderError(definition_name, error_message) definition_object = self._ReadSemanticDataTypeDefinition( definitions_registry, definition_values, data_types.ConstantDefinition, definition_name) definition_object.value = value return definition_object # pylint: disable=unused-argument def _ReadDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name): """Reads a data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. Returns: DataTypeDefinition: data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ aliases = definition_values.get('aliases', None) description = definition_values.get('description', None) urls = definition_values.get('urls', None) return data_type_definition_class( definition_name, aliases=aliases, description=description, urls=urls) def _ReadDataTypeDefinitionWithMembers( self, definitions_registry, definition_values, data_type_definition_class, definition_name): """Reads a data type definition with members. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. Returns: StringDefinition: string data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ members = definition_values.get('members', None) if not members: error_message = 'missing members' raise errors.DefinitionReaderError(definition_name, error_message) definition_object = self._ReadDataTypeDefinition( definitions_registry, definition_values, data_type_definition_class, definition_name) attributes = definition_values.get('attributes', None) if attributes: byte_order = attributes.get('byte_order', definitions.BYTE_ORDER_NATIVE) if byte_order not in definitions.BYTE_ORDERS: error_message = 'unsupported byte-order attribute: {0!s}'.format( byte_order) raise errors.DefinitionReaderError(definition_name, error_message) definition_object.byte_order = byte_order for member in members: section = member.get('section', None) if section: member_section_definition = data_types.MemberSectionDefinition(section) definition_object.AddSectionDefinition(member_section_definition) else: member_data_type_definition = self._ReadMemberDataTypeDefinitionMember( definitions_registry, member, definition_object.name) definition_object.AddMemberDefinition(member_data_type_definition) return definition_object def _ReadEnumerationDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads an enumeration data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: EnumerationDataTypeDefinition: enumeration data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ values = definition_values.get('values') if not values: error_message = 'missing values' raise errors.DefinitionReaderError(definition_name, error_message) definition_object = self._ReadSemanticDataTypeDefinition( definitions_registry, definition_values, data_types.EnumerationDefinition, definition_name) last_name = None for enumeration_value in values: aliases = enumeration_value.get('aliases', None) description = enumeration_value.get('description', None) name = enumeration_value.get('name', None) number = enumeration_value.get('number', None) if not name or number is None: if last_name: error_location = 'after: {0:s}'.format(last_name) else: error_location = 'at start' error_message = '{0:s} missing name or number'.format(error_location) raise errors.DefinitionReaderError(definition_name, error_message) else: try: definition_object.AddValue( name, number, aliases=aliases, description=description) except KeyError as exception: error_message = '{0!s}'.format(exception) raise errors.DefinitionReaderError(definition_name, error_message) last_name = name return definition_object def _ReadElementSequenceDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name): """Reads an element sequence data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. Returns: SequenceDefinition: sequence data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ attributes = definition_values.get('attributes', None) if attributes is not None: error_message = 'attributes not supported by element sequence data type' raise errors.DefinitionReaderError(definition_name, error_message) element_data_type = definition_values.get('element_data_type', None) if not element_data_type: error_message = 'missing element data type' raise errors.DefinitionReaderError(definition_name, error_message) elements_data_size = definition_values.get('elements_data_size', None) elements_terminator = definition_values.get('elements_terminator', None) number_of_elements = definition_values.get('number_of_elements', None) size_values = (elements_data_size, elements_terminator, number_of_elements) size_values = [value for value in size_values if value is not None] if not size_values: error_message = ( 'missing element data size, elements terminator and number of ' 'elements') raise errors.DefinitionReaderError(definition_name, error_message) if len(size_values) > 1: error_message = ( 'element data size, elements terminator and number of elements ' 'not allowed to be set at the same time') raise errors.DefinitionReaderError(definition_name, error_message) element_data_type_definition = definitions_registry.GetDefinitionByName( element_data_type) if not element_data_type_definition: error_message = 'undefined element data type: {0:s}.'.format( element_data_type) raise errors.DefinitionReaderError(definition_name, error_message) element_byte_size = element_data_type_definition.GetByteSize() element_type_indicator = element_data_type_definition.TYPE_INDICATOR if not element_byte_size and element_type_indicator != ( definitions.TYPE_INDICATOR_STRING): error_message = ( 'unsupported variable size element data type: {0:s}'.format( element_data_type)) raise errors.DefinitionReaderError(definition_name, error_message) aliases = definition_values.get('aliases', None) description = definition_values.get('description', None) urls = definition_values.get('urls', None) definition_object = data_type_definition_class( definition_name, element_data_type_definition, aliases=aliases, data_type=element_data_type, description=description, urls=urls) if elements_data_size is not None: try: definition_object.elements_data_size = int(elements_data_size) except ValueError: definition_object.elements_data_size_expression = elements_data_size elif elements_terminator is not None: if isinstance(elements_terminator, py2to3.UNICODE_TYPE): elements_terminator = elements_terminator.encode('ascii') definition_object.elements_terminator = elements_terminator elif number_of_elements is not None: try: definition_object.number_of_elements = int(number_of_elements) except ValueError: definition_object.number_of_elements_expression = number_of_elements return definition_object def _ReadFixedSizeDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name, default_size=definitions.SIZE_NATIVE, default_units='bytes'): """Reads a fixed-size data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. default_size (Optional[int]): default size. default_units (Optional[str]): default units. Returns: FixedSizeDataTypeDefinition: fixed-size data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ definition_object = self._ReadStorageDataTypeDefinition( definitions_registry, definition_values, data_type_definition_class, definition_name) attributes = definition_values.get('attributes', None) if attributes: size = attributes.get('size', default_size) if size != definitions.SIZE_NATIVE: try: int(size) except ValueError: error_message = 'unuspported size attribute: {0!s}'.format(size) raise errors.DefinitionReaderError(definition_name, error_message) definition_object.size = size definition_object.units = attributes.get('units', default_units) return definition_object def _ReadFloatingPointDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads a floating-point data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: FloatingPointDefinition: floating-point data type definition. """ return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.FloatingPointDefinition, definition_name) def _ReadFormatDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads a format data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: FormatDefinition: format definition. """ definition_object = self._ReadLayoutDataTypeDefinition( definitions_registry, definition_values, data_types.FormatDefinition, definition_name) definition_object.metadata = definition_values.get('metadata', {}) return definition_object def _ReadIntegerDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads an integer data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: IntegerDataTypeDefinition: integer data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ definition_object = self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, definition_name) attributes = definition_values.get('attributes', None) if attributes: format_attribute = attributes.get('format', definitions.FORMAT_SIGNED) if format_attribute not in self._INTEGER_FORMAT_ATTRIBUTES: error_message = 'unsupported format attribute: {0!s}'.format( format_attribute) raise errors.DefinitionReaderError(definition_name, error_message) definition_object.format = format_attribute return definition_object def _ReadLayoutDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name): """Reads a layout data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. Returns: LayoutDataTypeDefinition: layout data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ definition_object = self._ReadDataTypeDefinition( definitions_registry, definition_values, data_type_definition_class, definition_name) attributes = definition_values.get('attributes', None) if attributes is not None: byte_order = attributes.get('byte_order', definitions.BYTE_ORDER_NATIVE) if byte_order not in definitions.BYTE_ORDERS: error_message = 'unsupported byte-order attribute: {0!s}'.format( byte_order) raise errors.DefinitionReaderError(definition_name, error_message) definition_object.byte_order = byte_order return definition_object def _ReadMemberDataTypeDefinitionMember( self, definitions_registry, definition_values, definition_name): """Reads a member data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: DataTypeDefinition: structure member data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if not definition_values: error_message = 'invalid structure member missing definition values' raise errors.DefinitionReaderError(definition_name, error_message) name = definition_values.get('name', None) type_indicator = definition_values.get('type', None) if not name and type_indicator != definitions.TYPE_INDICATOR_UNION: error_message = 'invalid structure member missing name' raise errors.DefinitionReaderError(definition_name, error_message) # TODO: detect duplicate names. data_type = definition_values.get('data_type', None) type_values = (data_type, type_indicator) type_values = [value for value in type_values if value is not None] if not type_values: error_message = ( 'invalid structure member: {0:s} both data type and type are ' 'missing').format(name or '') raise errors.DefinitionReaderError(definition_name, error_message) if len(type_values) > 1: error_message = ( 'invalid structure member: {0:s} data type and type not allowed to ' 'be set at the same time').format(name or '') raise errors.DefinitionReaderError(definition_name, error_message) if type_indicator is not None: data_type_callback = self._DATA_TYPE_CALLBACKS.get(type_indicator, None) if data_type_callback: data_type_callback = getattr(self, data_type_callback, None) if not data_type_callback: error_message = 'unuspported data type definition: {0:s}.'.format( type_indicator) raise errors.DefinitionReaderError(name, error_message) try: definition_object = data_type_callback( definitions_registry, definition_values, name) except errors.DefinitionReaderError as exception: error_message = 'in: {0:s} {1:s}'.format( exception.name or '', exception.message) raise errors.DefinitionReaderError(definition_name, error_message) if data_type is not None: data_type_definition = definitions_registry.GetDefinitionByName( data_type) if not data_type_definition: error_message = ( 'invalid structure member: {0:s} undefined data type: ' '{1:s}').format(name or '', data_type) raise errors.DefinitionReaderError(definition_name, error_message) aliases = definition_values.get('aliases', None) description = definition_values.get('description', None) definition_object = data_types.MemberDataTypeDefinition( name, data_type_definition, aliases=aliases, data_type=data_type, description=description) return definition_object def _ReadSemanticDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name): """Reads a semantic data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. Returns: SemanticDataTypeDefinition: semantic data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ attributes = definition_values.get('attributes', None) if attributes is not None: error_message = 'attributes not supported by semantic data type' raise errors.DefinitionReaderError(definition_name, error_message) return self._ReadDataTypeDefinition( definitions_registry, definition_values, data_type_definition_class, definition_name) def _ReadSequenceDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads a sequence data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: SequenceDefinition: sequence data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ return self._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, definition_name) def _ReadStorageDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name): """Reads a storage data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. Returns: StorageDataTypeDefinition: storage data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ definition_object = self._ReadDataTypeDefinition( definitions_registry, definition_values, data_type_definition_class, definition_name) attributes = definition_values.get('attributes', None) if attributes: byte_order = attributes.get('byte_order', definitions.BYTE_ORDER_NATIVE) if byte_order not in definitions.BYTE_ORDERS: error_message = 'unsupported byte-order attribute: {0!s}'.format( byte_order) raise errors.DefinitionReaderError(definition_name, error_message) definition_object.byte_order = byte_order return definition_object def _ReadStreamDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads a stream data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: StreamDefinition: stream data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ return self._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.StreamDefinition, definition_name) def _ReadStringDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads a string data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: StringDefinition: string data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ encoding = definition_values.get('encoding', None) if not encoding: error_message = 'missing encoding' raise errors.DefinitionReaderError(definition_name, error_message) definition_object = self._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.StringDefinition, definition_name) definition_object.encoding = encoding return definition_object def _ReadStructureDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads a structure data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: StructureDefinition: structure data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ return self._ReadDataTypeDefinitionWithMembers( definitions_registry, definition_values, data_types.StructureDefinition, definition_name) def _ReadStructureFamilyDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads a structure family data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: StructureDefinition: structure data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ definition_object = self._ReadLayoutDataTypeDefinition( definitions_registry, definition_values, data_types.StructureFamilyDefinition, definition_name) runtime = definition_values.get('runtime', None) if not runtime: error_message = 'missing runtime' raise errors.DefinitionReaderError(definition_name, error_message) runtime_data_type_definition = definitions_registry.GetDefinitionByName( runtime) if not runtime_data_type_definition: error_message = 'undefined runtime: {0:s}.'.format(runtime) raise errors.DefinitionReaderError(definition_name, error_message) if runtime_data_type_definition.family_definition: error_message = 'runtime: {0:s} already part of a family.'.format(runtime) raise errors.DefinitionReaderError(definition_name, error_message) definition_object.AddRuntimeDefinition(runtime_data_type_definition) members = definition_values.get('members', None) if not members: error_message = 'missing members' raise errors.DefinitionReaderError(definition_name, error_message) for member in members: member_data_type_definition = definitions_registry.GetDefinitionByName( member) if not member_data_type_definition: error_message = 'undefined member: {0:s}.'.format(member) raise errors.DefinitionReaderError(definition_name, error_message) if member_data_type_definition.family_definition: error_message = 'member: {0:s} already part of a family.'.format(member) raise errors.DefinitionReaderError(definition_name, error_message) definition_object.AddMemberDefinition(member_data_type_definition) return definition_object def _ReadUnionDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads an union data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: UnionDefinition: union data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ return self._ReadDataTypeDefinitionWithMembers( definitions_registry, definition_values, data_types.UnionDefinition, definition_name) def _ReadUUIDDataTypeDefinition( self, definitions_registry, definition_values, definition_name): """Reads an UUID data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. Returns: UUIDDataTypeDefinition: UUID data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ definition_object = self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.UUIDDefinition, definition_name, default_size=16) if definition_object.size != 16: error_message = 'unsupported size: {0:d}.'.format(definition_object.size) raise errors.DefinitionReaderError(definition_name, error_message) return definition_object class DataTypeDefinitionsFileReader(DataTypeDefinitionsReader): """Data type definitions file reader.""" def _ReadDefinition(self, definitions_registry, definition_values): """Reads a data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. Returns: DataTypeDefinition: data type definition or None. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if not definition_values: error_message = 'missing definition values' raise errors.DefinitionReaderError(None, error_message) name = definition_values.get('name', None) if not name: error_message = 'missing name' raise errors.DefinitionReaderError(None, error_message) type_indicator = definition_values.get('type', None) if not type_indicator: error_message = 'invalid definition missing type' raise errors.DefinitionReaderError(name, error_message) data_type_callback = self._DATA_TYPE_CALLBACKS.get(type_indicator, None) if data_type_callback: data_type_callback = getattr(self, data_type_callback, None) if not data_type_callback: error_message = 'unuspported data type definition: {0:s}.'.format( type_indicator) raise errors.DefinitionReaderError(name, error_message) return data_type_callback(definitions_registry, definition_values, name) def ReadFile(self, definitions_registry, path): """Reads data type definitions from a file into the registry. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. path (str): path of the file to read from. """ with open(path, 'r') as file_object: self.ReadFileObject(definitions_registry, file_object) @abc.abstractmethod def ReadFileObject(self, definitions_registry, file_object): """Reads data type definitions from a file-like object into the registry. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. file_object (file): file-like object to read from. """ class YAMLDataTypeDefinitionsFileReader(DataTypeDefinitionsFileReader): """YAML data type definitions file reader. Attributes: dict[str, object]: metadata. """ def __init__(self): """Initializes a YAML data type definitions file reader.""" super(YAMLDataTypeDefinitionsFileReader, self).__init__() self.metadata = {} def _GetFormatErrorLocation( self, yaml_definition, last_definition_object): """Retrieves a format error location. Args: yaml_definition (dict[str, object]): current YAML definition. last_definition_object (DataTypeDefinition): previous data type definition. Returns: str: format error location. """ name = yaml_definition.get('name', None) if name: error_location = 'in: {0:s}'.format(name or '') elif last_definition_object: error_location = 'after: {0:s}'.format(last_definition_object.name) else: error_location = 'at start' return error_location def ReadFileObject(self, definitions_registry, file_object): """Reads data type definitions from a file-like object into the registry. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. file_object (file): file-like object to read from. Raises: FormatError: if the definitions values are missing or if the format is incorrect. """ last_definition_object = None error_location = None error_message = None try: yaml_generator = yaml.safe_load_all(file_object) for yaml_definition in yaml_generator: definition_object = self._ReadDefinition( definitions_registry, yaml_definition) if not definition_object: error_location = self._GetFormatErrorLocation( yaml_definition, last_definition_object) error_message = '{0:s} Missing definition object.'.format( error_location) raise errors.FormatError(error_message) definitions_registry.RegisterDefinition(definition_object) last_definition_object = definition_object except errors.DefinitionReaderError as exception: error_message = 'in: {0:s} {1:s}'.format( exception.name or '', exception.message) raise errors.FormatError(error_message) except (yaml.reader.ReaderError, yaml.scanner.ScannerError) as exception: error_location = self._GetFormatErrorLocation({}, last_definition_object) error_message = '{0:s} {1!s}'.format(error_location, exception) raise errors.FormatError(error_message) dtfabric-20190120/dtfabric/registry.py000066400000000000000000000054061342102721300175300ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The data type definitions registry.""" from __future__ import unicode_literals from dtfabric import definitions class DataTypeDefinitionsRegistry(object): """Data type definitions registry.""" def __init__(self): """Initializes a data type definitions registry.""" super(DataTypeDefinitionsRegistry, self).__init__() self._aliases = {} self._definitions = {} self._format_definitions = [] def DeregisterDefinition(self, data_type_definition): """Deregisters a data type definition. The data type definitions are identified based on their lower case name. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: KeyError: if a data type definition is not set for the corresponding name. """ name = data_type_definition.name.lower() if name not in self._definitions: raise KeyError('Definition not set for name: {0:s}.'.format( data_type_definition.name)) del self._definitions[name] def GetDefinitionByName(self, name): """Retrieves a specific data type definition by name. Args: name (str): name of the data type definition. Returns: DataTypeDefinition: data type definition or None if not available. """ lookup_name = name.lower() if lookup_name not in self._definitions: lookup_name = self._aliases.get(name, None) return self._definitions.get(lookup_name, None) def GetDefinitions(self): """Retrieves the data type definitions. Returns: list[DataTypeDefinition]: data type definitions. """ return self._definitions.values() def RegisterDefinition(self, data_type_definition): """Registers a data type definition. The data type definitions are identified based on their lower case name. Args: data_type_definition (DataTypeDefinition): data type definitions. Raises: KeyError: if data type definition is already set for the corresponding name. """ name_lower = data_type_definition.name.lower() if name_lower in self._definitions: raise KeyError('Definition already set for name: {0:s}.'.format( data_type_definition.name)) if data_type_definition.name in self._aliases: raise KeyError('Alias already set for name: {0:s}.'.format( data_type_definition.name)) for alias in data_type_definition.aliases: if alias in self._aliases: raise KeyError('Alias already set for name: {0:s}.'.format(alias)) self._definitions[name_lower] = data_type_definition for alias in data_type_definition.aliases: self._aliases[alias] = name_lower if data_type_definition.TYPE_INDICATOR == definitions.TYPE_INDICATOR_FORMAT: self._format_definitions.append(name_lower) dtfabric-20190120/dtfabric/runtime/000077500000000000000000000000001342102721300167645ustar00rootroot00000000000000dtfabric-20190120/dtfabric/runtime/__init__.py000066400000000000000000000000301342102721300210660ustar00rootroot00000000000000# -*- coding: utf-8 -*- dtfabric-20190120/dtfabric/runtime/byte_operations.py000066400000000000000000000050651342102721300225520ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Byte stream operations.""" from __future__ import unicode_literals import abc import struct from dtfabric import errors class ByteStreamOperation(object): """Byte stream operation.""" # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc @abc.abstractmethod def ReadFrom(self, byte_stream): """Read values from a byte stream. Args: byte_stream (bytes): byte stream. Returns: tuple[object, ...]: values copies from the byte stream. """ @abc.abstractmethod def WriteTo(self, values): """Writes values to a byte stream. Args: values (tuple[object, ...]): values to copy to the byte stream. Returns: bytes: byte stream. """ class StructOperation(ByteStreamOperation): """Python struct-base byte stream operation.""" def __init__(self, format_string): """Initializes a Python struct-base byte stream operation. Args: format_string (str): format string as used by Python struct. Raises: FormatError: if the struct operation cannot be determined from the data type definition. """ try: struct_object = struct.Struct(format_string) except (TypeError, struct.error) as exception: raise errors.FormatError(( 'Unable to create struct object from data type definition ' 'with error: {0!s}').format(exception)) super(StructOperation, self).__init__() self._struct = struct_object self._struct_format_string = format_string def ReadFrom(self, byte_stream): """Read values from a byte stream. Args: byte_stream (bytes): byte stream. Returns: tuple[object, ...]: values copies from the byte stream. Raises: IOError: if byte stream cannot be read. OSError: if byte stream cannot be read. """ try: return self._struct.unpack_from(byte_stream) except (TypeError, struct.error) as exception: raise IOError('Unable to read byte stream with error: {0!s}'.format( exception)) def WriteTo(self, values): """Writes values to a byte stream. Args: values (tuple[object, ...]): values to copy to the byte stream. Returns: bytes: byte stream. Raises: IOError: if byte stream cannot be written. OSError: if byte stream cannot be read. """ try: return self._struct.pack(*values) except (TypeError, struct.error) as exception: raise IOError('Unable to write stream with error: {0!s}'.format( exception)) dtfabric-20190120/dtfabric/runtime/data_maps.py000066400000000000000000001616631342102721300213040ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Data type maps.""" from __future__ import unicode_literals import abc import copy import uuid from dtfabric import data_types from dtfabric import definitions from dtfabric import errors from dtfabric import py2to3 from dtfabric.runtime import byte_operations from dtfabric.runtime import runtime # TODO: add FormatMap. class DataTypeMapContext(object): """Data type map context. Attributes: byte_size (int): byte size. state (dict[str, object]): state values per name. values (dict[str, object]): values per name. """ def __init__(self, values=None): """Initializes a data type map context. Args: values (dict[str, object]): values per name. """ super(DataTypeMapContext, self).__init__() self.byte_size = None self.state = {} self.values = values or {} class DataTypeMapSizeHint(object): """Data type map size hint. Attributes: byte_size (int): byte size. is_complete (bool): True if the size is the complete size of the data type. """ def __init__(self, byte_size, is_complete=False): """Initializes a data type map size hint. Args: byte_size (int): byte size. is_complete (optional[bool]): True if the size is the complete size of the data type. """ super(DataTypeMapSizeHint, self).__init__() self.byte_size = byte_size self.is_complete = is_complete class DataTypeMap(object): """Data type map.""" # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc def __init__(self, data_type_definition): """Initializes a data type map. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type map cannot be determined from the data type definition. """ super(DataTypeMap, self).__init__() self._data_type_definition = data_type_definition @property def name(self): """str: name of the data type definition or None if not available.""" if not self._data_type_definition: return None return self._data_type_definition.name def GetByteSize(self): """Retrieves the byte size of the data type map. Returns: int: data type size in bytes or None if size cannot be determined. """ if not self._data_type_definition: return None return self._data_type_definition.GetByteSize() def GetSizeHint(self, **unused_kwargs): """Retrieves a hint about the size. Returns: int: hint of the number of bytes needed from the byte stream or None. """ return self.GetByteSize() @abc.abstractmethod def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ @abc.abstractmethod def MapByteStream(self, byte_stream, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ class StorageDataTypeMap(DataTypeMap): """Storage data type map.""" # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc _BYTE_ORDER_STRINGS = { definitions.BYTE_ORDER_BIG_ENDIAN: '>', definitions.BYTE_ORDER_LITTLE_ENDIAN: '<', definitions.BYTE_ORDER_NATIVE: '='} def _CheckByteStreamSize(self, byte_stream, byte_offset, data_type_size): """Checks if the byte stream is large enough for the data type. Args: byte_stream (bytes): byte stream. byte_offset (int): offset into the byte stream where to start. data_type_size (int): data type size. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the size of the byte stream cannot be determined. """ try: byte_stream_size = len(byte_stream) except Exception as exception: raise errors.MappingError(exception) if byte_stream_size - byte_offset < data_type_size: raise errors.ByteStreamTooSmallError( 'Byte stream too small requested: {0:d} available: {1:d}'.format( data_type_size, byte_stream_size)) def _GetByteStreamOperation(self): """Retrieves the byte stream operation. Returns: ByteStreamOperation: byte stream operation or None if unable to determine. """ byte_order_string = self.GetStructByteOrderString() format_string = self.GetStructFormatString() # pylint: disable=assignment-from-none if not format_string: return None format_string = ''.join([byte_order_string, format_string]) return byte_operations.StructOperation(format_string) def GetStructByteOrderString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if not self._data_type_definition: return None return self._BYTE_ORDER_STRINGS.get( self._data_type_definition.byte_order, None) def GetStructFormatString(self): # pylint: disable=redundant-returns-doc """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ return None @abc.abstractmethod def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ @abc.abstractmethod def MapByteStream(self, byte_stream, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ class PrimitiveDataTypeMap(StorageDataTypeMap): """Primitive data type map.""" # pylint: disable=arguments-differ def __init__(self, data_type_definition): """Initializes a primitive data type map. Args: data_type_definition (DataTypeDefinition): data type definition. """ super(PrimitiveDataTypeMap, self).__init__(data_type_definition) self._operation = self._GetByteStreamOperation() def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: value = self.FoldValue(mapped_value) return self._operation.WriteTo(tuple([value])) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string) def FoldValue(self, value): """Folds the data type into a value. Args: value (object): value. Returns: object: folded value. Raises: ValueError: if the data type definition cannot be folded into the value. """ return value def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ data_type_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, data_type_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) mapped_value = self.MapValue(*struct_tuple) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = data_type_size return mapped_value def MapValue(self, value): """Maps the data type on a value. Args: value (object): value. Returns: object: mapped value. Raises: ValueError: if the data type definition cannot be mapped on the value. """ return value class BooleanMap(PrimitiveDataTypeMap): """Boolean data type map.""" # We use 'I' here instead of 'L' because 'L' behaves architecture dependent. _FORMAT_STRINGS_UNSIGNED = { 1: 'B', 2: 'H', 4: 'I', } def __init__(self, data_type_definition): """Initializes a boolean data type map. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type map cannot be determined from the data type definition. """ if (data_type_definition.false_value is None and data_type_definition.true_value is None): raise errors.FormatError( 'Boolean data type has no True or False values.') super(BooleanMap, self).__init__(data_type_definition) def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ return self._FORMAT_STRINGS_UNSIGNED.get( self._data_type_definition.size, None) def FoldValue(self, value): """Folds the data type into a value. Args: value (object): value. Returns: object: folded value. Raises: ValueError: if the data type definition cannot be folded into the value. """ if value is False and self._data_type_definition.false_value is not None: return self._data_type_definition.false_value if value is True and self._data_type_definition.true_value is not None: return self._data_type_definition.true_value raise ValueError('No matching True and False values') def MapValue(self, value): """Maps the data type on a value. Args: value (object): value. Returns: bool: mapped value. Raises: ValueError: if the data type definition cannot be mapped on the value. """ if self._data_type_definition.false_value == value: return False if self._data_type_definition.true_value == value: return True if self._data_type_definition.false_value is None: return False if self._data_type_definition.true_value is None: return True raise ValueError('No matching True and False values') class CharacterMap(PrimitiveDataTypeMap): """Character data type map.""" # We use 'i' here instead of 'l' because 'l' behaves architecture dependent. _FORMAT_STRINGS = { 1: 'b', 2: 'h', 4: 'i', } def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ return self._FORMAT_STRINGS.get( self._data_type_definition.size, None) def FoldValue(self, value): """Folds the data type into a value. Args: value (object): value. Returns: object: folded value. Raises: ValueError: if the data type definition cannot be folded into the value. """ return ord(value) def MapValue(self, value): """Maps the data type on a value. Args: value (object): value. Returns: str: mapped value. Raises: ValueError: if the data type definition cannot be mapped on the value. """ return py2to3.UNICHR(value) class FloatingPointMap(PrimitiveDataTypeMap): """Floating-point data type map.""" _FORMAT_STRINGS = { 4: 'f', 8: 'd', } def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ return self._FORMAT_STRINGS.get( self._data_type_definition.size, None) class IntegerMap(PrimitiveDataTypeMap): """Integer data type map.""" # We use 'i' here instead of 'l' because 'l' behaves architecture dependent. _FORMAT_STRINGS_SIGNED = { 1: 'b', 2: 'h', 4: 'i', 8: 'q', } # We use 'I' here instead of 'L' because 'L' behaves architecture dependent. _FORMAT_STRINGS_UNSIGNED = { 1: 'B', 2: 'H', 4: 'I', 8: 'Q', } def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if self._data_type_definition.format == definitions.FORMAT_UNSIGNED: return self._FORMAT_STRINGS_UNSIGNED.get( self._data_type_definition.size, None) return self._FORMAT_STRINGS_SIGNED.get( self._data_type_definition.size, None) class UUIDMap(StorageDataTypeMap): """UUID (or GUID) data type map.""" # pylint: disable=arguments-differ def __init__(self, data_type_definition): """Initializes an UUID (or GUID) data type map. Args: data_type_definition (DataTypeDefinition): data type definition. """ super(UUIDMap, self).__init__(data_type_definition) self._byte_order = data_type_definition.byte_order def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ value = None try: if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN: value = mapped_value.bytes elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN: value = mapped_value.bytes_le except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string) return value def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: uuid.UUID: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ data_type_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, data_type_size) try: if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN: mapped_value = uuid.UUID( bytes=byte_stream[byte_offset:byte_offset + 16]) elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN: mapped_value = uuid.UUID( bytes_le=byte_stream[byte_offset:byte_offset + 16]) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = data_type_size return mapped_value class ElementSequenceDataTypeMap(StorageDataTypeMap): """Element sequence data type map.""" # pylint: disable=arguments-differ # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc def __init__(self, data_type_definition): """Initializes a sequence data type map. Args: data_type_definition (DataTypeDefinition): data type definition. """ element_data_type_definition = self._GetElementDataTypeDefinition( data_type_definition) super(ElementSequenceDataTypeMap, self).__init__(data_type_definition) self._element_data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( element_data_type_definition) self._element_data_type_definition = element_data_type_definition def _CalculateElementsDataSize(self, context): """Calculates the elements data size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: the elements data size or None if not available. """ elements_data_size = None if self._HasElementsDataSize(): elements_data_size = self._EvaluateElementsDataSize(context) elif self._HasNumberOfElements(): element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is not None: number_of_elements = self._EvaluateNumberOfElements(context) elements_data_size = number_of_elements * element_byte_size return elements_data_size def _EvaluateElementsDataSize(self, context): """Evaluates elements data size. Args: context (DataTypeMapContext): data type map context. Returns: int: elements data size. Raises: MappingError: if the elements data size cannot be determined. """ elements_data_size = None if self._data_type_definition.elements_data_size: elements_data_size = self._data_type_definition.elements_data_size elif self._data_type_definition.elements_data_size_expression: expression = self._data_type_definition.elements_data_size_expression namespace = {} if context and context.values: namespace.update(context.values) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: elements_data_size = eval(expression, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( 'Unable to determine elements data size with error: {0!s}'.format( exception)) if elements_data_size is None or elements_data_size < 0: raise errors.MappingError( 'Invalid elements data size: {0!s}'.format(elements_data_size)) return elements_data_size def _EvaluateNumberOfElements(self, context): """Evaluates number of elements. Args: context (DataTypeMapContext): data type map context. Returns: int: number of elements. Raises: MappingError: if the number of elements cannot be determined. """ number_of_elements = None if self._data_type_definition.number_of_elements: number_of_elements = self._data_type_definition.number_of_elements elif self._data_type_definition.number_of_elements_expression: expression = self._data_type_definition.number_of_elements_expression namespace = {} if context and context.values: namespace.update(context.values) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: number_of_elements = eval(expression, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( 'Unable to determine number of elements with error: {0!s}'.format( exception)) if number_of_elements is None or number_of_elements < 0: raise errors.MappingError( 'Invalid number of elements: {0!s}'.format(number_of_elements)) return number_of_elements def _GetElementDataTypeDefinition(self, data_type_definition): """Retrieves the element data type definition. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: DataTypeDefinition: element data type definition. Raises: FormatError: if the element data type cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') element_data_type_definition = getattr( data_type_definition, 'element_data_type_definition', None) if not element_data_type_definition: raise errors.FormatError( 'Invalid data type definition missing element') return element_data_type_definition def _HasElementsDataSize(self): """Checks if the data type defines an elements data size. Returns: bool: True if the data types defines an elements data size. """ return ( self._data_type_definition.elements_data_size is not None or self._data_type_definition.elements_data_size_expression is not None) def _HasElementsTerminator(self): """Checks if the data type defines an elements terminator. Returns: bool: True if the data types defines an elements terminator. """ return self._data_type_definition.elements_terminator is not None def _HasNumberOfElements(self): """Checks if the data type defines a number of elements. Returns: bool: True if the data types defines a number of elements. """ return( self._data_type_definition.number_of_elements is not None or self._data_type_definition.number_of_elements_expression is not None) @abc.abstractmethod def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ def GetSizeHint(self, context=None, **unused_kwargs): """Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None. """ context_state = getattr(context, 'state', {}) elements_data_size = self.GetByteSize() if elements_data_size: return elements_data_size try: elements_data_size = self._CalculateElementsDataSize(context) except errors.MappingError: pass if elements_data_size is None and self._HasElementsTerminator(): size_hints = context_state.get('size_hints', {}) size_hint = size_hints.get(self._data_type_definition.name, None) elements_data_size = 0 if size_hint: elements_data_size = size_hint.byte_size if not size_hint or not size_hint.is_complete: elements_data_size += self._element_data_type_definition.GetByteSize() return elements_data_size def GetStructByteOrderString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if not self._element_data_type_map: return None return self._element_data_type_map.GetStructByteOrderString() @abc.abstractmethod def MapByteStream(self, byte_stream, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ class SequenceMap(ElementSequenceDataTypeMap): """Sequence data type map.""" # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc def __init__(self, data_type_definition): """Initializes a sequence data type map. Args: data_type_definition (DataTypeDefinition): data type definition. """ super(SequenceMap, self).__init__(data_type_definition) self._fold_byte_stream = None self._map_byte_stream = None self._operation = None if (self._element_data_type_definition.IsComposite() or data_type_definition.elements_data_size_expression is not None or data_type_definition.elements_terminator is not None or data_type_definition.number_of_elements_expression is not None): self._fold_byte_stream = self._CompositeFoldByteStream self._map_byte_stream = self._CompositeMapByteStream else: self._fold_byte_stream = self._LinearFoldByteStream self._map_byte_stream = self._LinearMapByteStream self._operation = self._GetByteStreamOperation() def _CompositeFoldByteStream( self, mapped_value, byte_offset=0, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ # TODO: implement. def _CompositeMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream. """ elements_data_size = None elements_terminator = None number_of_elements = None if self._HasElementsDataSize(): elements_data_size = self._EvaluateElementsDataSize(context) element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is not None: number_of_elements, _ = divmod(elements_data_size, element_byte_size) else: elements_terminator = ( self._element_data_type_definition.elements_terminator) elif self._HasElementsTerminator(): elements_terminator = self._data_type_definition.elements_terminator elif self._HasNumberOfElements(): number_of_elements = self._EvaluateNumberOfElements(context) if elements_terminator is None and number_of_elements is None: raise errors.MappingError( 'Unable to determine element terminator or number of elements') context_state = getattr(context, 'state', {}) elements_data_offset = context_state.get('elements_data_offset', 0) element_index = context_state.get('element_index', 0) element_value = None mapped_values = context_state.get('mapped_values', []) size_hints = context_state.get('size_hints', {}) subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext() try: while byte_stream[byte_offset:]: if (number_of_elements is not None and element_index == number_of_elements): break if (elements_data_size is not None and elements_data_offset >= elements_data_size): break element_value = self._element_data_type_map.MapByteStream( byte_stream, byte_offset=byte_offset, context=subcontext) byte_offset += subcontext.byte_size elements_data_offset += subcontext.byte_size element_index += 1 mapped_values.append(element_value) if (elements_terminator is not None and element_value == elements_terminator): break except errors.ByteStreamTooSmallError as exception: context_state['context'] = subcontext context_state['elements_data_offset'] = elements_data_offset context_state['element_index'] = element_index context_state['mapped_values'] = mapped_values raise errors.ByteStreamTooSmallError(exception) except Exception as exception: raise errors.MappingError(exception) if number_of_elements is not None and element_index != number_of_elements: context_state['context'] = subcontext context_state['elements_data_offset'] = elements_data_offset context_state['element_index'] = element_index context_state['mapped_values'] = mapped_values error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: missing element: {2:d}').format( self._data_type_definition.name, byte_offset, element_index - 1) raise errors.ByteStreamTooSmallError(error_string) if (elements_terminator is not None and element_value != elements_terminator and ( elements_data_size is None or elements_data_offset < elements_data_size)): byte_stream_size = len(byte_stream) size_hints[self._data_type_definition.name] = DataTypeMapSizeHint( byte_stream_size - byte_offset) context_state['context'] = subcontext context_state['elements_data_offset'] = elements_data_offset context_state['element_index'] = element_index context_state['mapped_values'] = mapped_values context_state['size_hints'] = size_hints error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: unable to find elements terminator').format( self._data_type_definition.name, byte_offset) raise errors.ByteStreamTooSmallError(error_string) if context: context.byte_size = elements_data_offset context.state = {} return tuple(mapped_values) def _LinearFoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: return self._operation.WriteTo(mapped_value) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string) def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ elements_data_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, elements_data_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) mapped_values = map(self._element_data_type_map.MapValue, struct_tuple) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = elements_data_size return tuple(mapped_values) def FoldByteStream(self, mapped_value, **kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ return self._fold_byte_stream(mapped_value, **kwargs) def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if not self._element_data_type_map: return None number_of_elements = None if self._data_type_definition.elements_data_size: element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is None: return None number_of_elements, _ = divmod( self._data_type_definition.elements_data_size, element_byte_size) elif self._data_type_definition.number_of_elements: number_of_elements = self._data_type_definition.number_of_elements format_string = self._element_data_type_map.GetStructFormatString() if not number_of_elements or not format_string: return None return '{0:d}{1:s}'.format(number_of_elements, format_string) def MapByteStream(self, byte_stream, **kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: tuple[object, ...]: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ return self._map_byte_stream(byte_stream, **kwargs) class StreamMap(ElementSequenceDataTypeMap): """Stream data type map.""" # pylint: disable=arguments-differ # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc def __init__(self, data_type_definition): """Initializes a stream data type map. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type map cannot be determined from the data type definition. """ super(StreamMap, self).__init__(data_type_definition) self._fold_byte_stream = None self._map_byte_stream = None if self._element_data_type_definition.IsComposite(): raise errors.FormatError('Unsupported composite element data type') def FoldByteStream(self, mapped_value, context=None, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ elements_data_size = self._CalculateElementsDataSize(context) if elements_data_size is not None: if elements_data_size != len(mapped_value): raise errors.FoldingError( 'Mismatch between elements data size and mapped value size') elif not self._HasElementsTerminator(): raise errors.FoldingError('Unable to determine elements data size') else: elements_terminator = self._data_type_definition.elements_terminator elements_terminator_size = len(elements_terminator) if mapped_value[-elements_terminator_size:] != elements_terminator: mapped_value = b''.join([mapped_value, elements_terminator]) return mapped_value def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ byte_size = self.GetByteSize() if not byte_size: return None return '{0:d}B'.format(byte_size) def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ context_state = getattr(context, 'state', {}) size_hints = context_state.get('size_hints', {}) elements_data_size = self._CalculateElementsDataSize(context) if elements_data_size is not None: self._CheckByteStreamSize(byte_stream, byte_offset, elements_data_size) elif not self._HasElementsTerminator(): raise errors.MappingError( 'Unable to determine elements data size and missing elements ' 'terminator') else: byte_stream_size = len(byte_stream) element_byte_size = self._element_data_type_definition.GetByteSize() elements_data_offset = byte_offset next_elements_data_offset = elements_data_offset + element_byte_size elements_terminator = self._data_type_definition.elements_terminator element_value = byte_stream[ elements_data_offset:next_elements_data_offset] while byte_stream[elements_data_offset:]: elements_data_offset = next_elements_data_offset if element_value == elements_terminator: elements_data_size = elements_data_offset - byte_offset break next_elements_data_offset += element_byte_size element_value = byte_stream[ elements_data_offset:next_elements_data_offset] if element_value != elements_terminator: size_hints[self._data_type_definition.name] = DataTypeMapSizeHint( byte_stream_size - byte_offset) context_state['size_hints'] = size_hints error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: unable to find elements terminator').format( self._data_type_definition.name, byte_offset) raise errors.ByteStreamTooSmallError(error_string) if context: context.byte_size = elements_data_size size_hints[self._data_type_definition.name] = DataTypeMapSizeHint( elements_data_size, is_complete=True) context_state['size_hints'] = size_hints return byte_stream[byte_offset:byte_offset + elements_data_size] class StringMap(StreamMap): """String data type map.""" # pylint: disable=arguments-differ def FoldByteStream(self, mapped_value, **kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: byte_stream = mapped_value.encode(self._data_type_definition.encoding) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.MappingError(error_string) return super(StringMap, self).FoldByteStream(byte_stream, **kwargs) def MapByteStream(self, byte_stream, byte_offset=0, **kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: str: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ byte_stream = super(StringMap, self).MapByteStream( byte_stream, byte_offset=byte_offset, **kwargs) if self._HasElementsTerminator(): # Remove the elements terminator and any trailing data from # the byte stream. elements_terminator = self._data_type_definition.elements_terminator elements_terminator_size = len(elements_terminator) byte_offset = 0 byte_stream_size = len(byte_stream) while byte_offset < byte_stream_size: end_offset = byte_offset + elements_terminator_size if byte_stream[byte_offset:end_offset] == elements_terminator: break byte_offset += elements_terminator_size byte_stream = byte_stream[:byte_offset] try: return byte_stream.decode(self._data_type_definition.encoding) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) class StructureMap(StorageDataTypeMap): """Structure data type map.""" # pylint: disable=arguments-differ # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc def __init__(self, data_type_definition): """Initializes a structure data type map. Args: data_type_definition (DataTypeDefinition): data type definition. """ super(StructureMap, self).__init__(data_type_definition) self._attribute_names = self._GetAttributeNames(data_type_definition) self._data_type_map_cache = {} self._data_type_maps = self._GetMemberDataTypeMaps( data_type_definition, self._data_type_map_cache) self._fold_byte_stream = None self._format_string = None self._map_byte_stream = None self._number_of_attributes = len(self._attribute_names) self._operation = None self._structure_values_class = ( runtime.StructureValuesClassFactory.CreateClass( data_type_definition)) if self._CheckCompositeMap(data_type_definition): self._fold_byte_stream = self._CompositeFoldByteStream self._map_byte_stream = self._CompositeMapByteStream else: self._fold_byte_stream = self._LinearFoldByteStream self._map_byte_stream = self._LinearMapByteStream self._operation = self._GetByteStreamOperation() def _CheckCompositeMap(self, data_type_definition): """Determines if the data type definition needs a composite map. Args: data_type_definition (DataTypeDefinition): structure data type definition. Returns: bool: True if a composite map is needed, False otherwise. Raises: FormatError: if a composite map is needed cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition missing members') is_composite_map = False last_member_byte_order = data_type_definition.byte_order for member_definition in members: if member_definition.IsComposite(): is_composite_map = True break if (last_member_byte_order != definitions.BYTE_ORDER_NATIVE and member_definition.byte_order != definitions.BYTE_ORDER_NATIVE and last_member_byte_order != member_definition.byte_order): is_composite_map = True break last_member_byte_order = member_definition.byte_order return is_composite_map def _CompositeFoldByteStream( self, mapped_value, context=None, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ context_state = getattr(context, 'state', {}) attribute_index = context_state.get('attribute_index', 0) subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext(values={ type(mapped_value).__name__: mapped_value}) data_attributes = [] for attribute_index in range(attribute_index, self._number_of_attributes): attribute_name = self._attribute_names[attribute_index] data_type_map = self._data_type_maps[attribute_index] member_value = getattr(mapped_value, attribute_name, None) if data_type_map is None or member_value is None: continue member_data = data_type_map.FoldByteStream( member_value, context=subcontext) if member_data is None: return None data_attributes.append(member_data) if context: context.state = {} return b''.join(data_attributes) def _CompositeMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ context_state = getattr(context, 'state', {}) attribute_index = context_state.get('attribute_index', 0) mapped_values = context_state.get('mapped_values', None) subcontext = context_state.get('context', None) if not mapped_values: mapped_values = self._structure_values_class() if not subcontext: subcontext = DataTypeMapContext(values={ type(mapped_values).__name__: mapped_values}) members_data_size = 0 for attribute_index in range(attribute_index, self._number_of_attributes): attribute_name = self._attribute_names[attribute_index] data_type_map = self._data_type_maps[attribute_index] try: value = data_type_map.MapByteStream( byte_stream, byte_offset=byte_offset, context=subcontext) setattr(mapped_values, attribute_name, value) except errors.ByteStreamTooSmallError as exception: context_state['attribute_index'] = attribute_index context_state['context'] = subcontext context_state['mapped_values'] = mapped_values raise errors.ByteStreamTooSmallError(exception) except Exception as exception: raise errors.MappingError(exception) byte_offset += subcontext.byte_size members_data_size += subcontext.byte_size if attribute_index != (self._number_of_attributes - 1): context_state['attribute_index'] = attribute_index context_state['context'] = subcontext context_state['mapped_values'] = mapped_values error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: missing attribute: {2:d}').format( self._data_type_definition.name, byte_offset, attribute_index) raise errors.ByteStreamTooSmallError(error_string) if context: context.byte_size = members_data_size context.state = {} return mapped_values def _GetAttributeNames(self, data_type_definition): """Determines the attribute (or field) names of the members. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: list[str]: attribute names. Raises: FormatError: if the attribute names cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') attribute_names = [] for member_definition in data_type_definition.members: attribute_names.append(member_definition.name) return attribute_names def _GetMemberDataTypeMaps(self, data_type_definition, data_type_map_cache): """Retrieves the member data type maps. Args: data_type_definition (DataTypeDefinition): data type definition. data_type_map_cache (dict[str, DataTypeMap]): cached data type maps. Returns: list[DataTypeMap]: member data type maps. Raises: FormatError: if the data type maps cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition missing members') data_type_maps = [] for member_definition in members: if isinstance(member_definition, data_types.MemberDataTypeDefinition): member_definition = member_definition.member_data_type_definition if (data_type_definition.byte_order != definitions.BYTE_ORDER_NATIVE and member_definition.byte_order == definitions.BYTE_ORDER_NATIVE): # Make a copy of the data type definition where byte-order can be # safely changed. member_definition = copy.copy(member_definition) member_definition.name = '_{0:s}_{1:s}'.format( data_type_definition.name, member_definition.name) member_definition.byte_order = data_type_definition.byte_order if member_definition.name not in data_type_map_cache: data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( member_definition) data_type_map_cache[member_definition.name] = data_type_map data_type_maps.append(data_type_map_cache[member_definition.name]) return data_type_maps def _LinearFoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: attribute_values = [ getattr(mapped_value, attribute_name, None) for attribute_name in self._attribute_names] attribute_values = [ value for value in attribute_values if value is not None] return self._operation.WriteTo(tuple(attribute_values)) except Exception as exception: error_string = ( 'Unable to write: {0:s} to byte stream with error: {1!s}').format( self._data_type_definition.name, exception) raise errors.FoldingError(error_string) def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ members_data_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, members_data_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) values = [ self._data_type_maps[index].MapValue(value) for index, value in enumerate(struct_tuple)] mapped_value = self._structure_values_class(*values) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = members_data_size return mapped_value def CreateStructureValues(self, *args, **kwargs): """Creates a structure values object. Returns: object: structure values. """ return self._structure_values_class(*args, **kwargs) def FoldByteStream(self, mapped_value, **kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ return self._fold_byte_stream(mapped_value, **kwargs) def GetSizeHint(self, context=None, **unused_kwargs): """Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None. """ context_state = getattr(context, 'state', {}) subcontext = context_state.get('context', None) if not subcontext: mapped_values = context_state.get('mapped_values', None) subcontext = DataTypeMapContext(values={ type(mapped_values).__name__: mapped_values}) size_hint = 0 for data_type_map in self._data_type_maps: data_type_size = data_type_map.GetSizeHint(context=subcontext) if data_type_size is None: break size_hint += data_type_size return size_hint def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if self._format_string is None and self._data_type_maps: format_strings = [] for member_data_type_map in self._data_type_maps: if member_data_type_map is None: return None member_format_string = member_data_type_map.GetStructFormatString() if member_format_string is None: return None format_strings.append(member_format_string) self._format_string = ''.join(format_strings) return self._format_string def MapByteStream(self, byte_stream, **kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ return self._map_byte_stream(byte_stream, **kwargs) class SemanticDataTypeMap(DataTypeMap): """Semantic data type map.""" def FoldByteStream(self, mapped_value, **unused_kwargs): # pylint: disable=redundant-returns-doc """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ raise errors.FoldingError( 'Unable to fold {0:s} data type into byte stream'.format( self._data_type_definition.TYPE_INDICATOR)) def MapByteStream(self, byte_stream, **unused_kwargs): # pylint: disable=redundant-returns-doc """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ raise errors.MappingError( 'Unable to map {0:s} data type to byte stream'.format( self._data_type_definition.TYPE_INDICATOR)) class ConstantMap(SemanticDataTypeMap): """Constant data type map.""" class EnumerationMap(SemanticDataTypeMap): """Enumeration data type map.""" def GetName(self, number): """Retrieves the name of an enumeration value by number. Args: number (int): number. Returns: str: name of the enumeration value or None if no corresponding enumeration value was found. """ value = self._data_type_definition.values_per_number.get(number, None) if not value: return None return value.name class DataTypeMapFactory(object): """Factory for data type maps.""" # TODO: add support for definitions.TYPE_INDICATOR_FORMAT _MAP_PER_DEFINITION = { definitions.TYPE_INDICATOR_BOOLEAN: BooleanMap, definitions.TYPE_INDICATOR_CHARACTER: CharacterMap, definitions.TYPE_INDICATOR_CONSTANT: ConstantMap, definitions.TYPE_INDICATOR_ENUMERATION: EnumerationMap, definitions.TYPE_INDICATOR_FLOATING_POINT: FloatingPointMap, definitions.TYPE_INDICATOR_INTEGER: IntegerMap, definitions.TYPE_INDICATOR_SEQUENCE: SequenceMap, definitions.TYPE_INDICATOR_STREAM: StreamMap, definitions.TYPE_INDICATOR_STRING: StringMap, definitions.TYPE_INDICATOR_STRUCTURE: StructureMap, definitions.TYPE_INDICATOR_UUID: UUIDMap} def __init__(self, definitions_registry): """Initializes a data type maps factory. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. """ super(DataTypeMapFactory, self).__init__() self._definitions_registry = definitions_registry def CreateDataTypeMap(self, definition_name): """Creates a specific data type map by name. Args: definition_name (str): name of the data type definition. Returns: DataTypeMap: data type map or None if the date type definition is not available. """ data_type_definition = self._definitions_registry.GetDefinitionByName( definition_name) if not data_type_definition: return None return DataTypeMapFactory.CreateDataTypeMapByType(data_type_definition) @classmethod def CreateDataTypeMapByType(cls, data_type_definition): """Creates a specific data type map by type indicator. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: DataTypeMap: data type map or None if the date type definition is not available. """ data_type_map_class = cls._MAP_PER_DEFINITION.get( data_type_definition.TYPE_INDICATOR, None) if not data_type_map_class: return None return data_type_map_class(data_type_definition) dtfabric-20190120/dtfabric/runtime/fabric.py000066400000000000000000000013731342102721300205700ustar00rootroot00000000000000# -*- coding: utf-8 -*- """dtFabric helper objects.""" import io from dtfabric import reader from dtfabric import registry from dtfabric.runtime import data_maps class DataTypeFabric(data_maps.DataTypeMapFactory): """Data type fabric.""" def __init__(self, yaml_definition=None): """Initializes a data type fabric. Args: yaml_definition (str): YAML formatted data type definitions. """ definitions_registry = registry.DataTypeDefinitionsRegistry() if yaml_definition: definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() file_object = io.BytesIO(yaml_definition) definitions_reader.ReadFileObject(definitions_registry, file_object) super(DataTypeFabric, self).__init__(definitions_registry) dtfabric-20190120/dtfabric/runtime/runtime.py000066400000000000000000000141721342102721300210260ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Run-time objects.""" from __future__ import unicode_literals try: import __builtin__ as builtins except ImportError: import builtins import keyword import sys from dtfabric import data_types from dtfabric import definitions class StructureValuesClassFactory(object): """Structure values class factory.""" _CLASS_TEMPLATE = '\n'.join([ 'class {type_name:s}(object):', ' """{type_description:s}.', '', ' Attributes:', '{class_attributes_description:s}', ' """', '', ' def __init__(self, {init_arguments:s}):', ' """Initializes an instance of {type_name:s}."""', ' super({type_name:s}, self).__init__()', '{instance_attributes:s}', '']) _PYTHON_NATIVE_TYPES = { definitions.TYPE_INDICATOR_BOOLEAN: 'bool', definitions.TYPE_INDICATOR_CHARACTER: 'str', definitions.TYPE_INDICATOR_FLOATING_POINT: 'float', definitions.TYPE_INDICATOR_INTEGER: 'int', definitions.TYPE_INDICATOR_UUID: 'uuid.UUID'} @classmethod def _CreateClassTemplate(cls, data_type_definition): """Creates the class template. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: str: class template. """ type_name = data_type_definition.name type_description = data_type_definition.description or type_name while type_description.endswith('.'): type_description = type_description[:-1] class_attributes_description = [] init_arguments = [] instance_attributes = [] for member_definition in data_type_definition.members: attribute_name = member_definition.name description = member_definition.description or attribute_name while description.endswith('.'): description = description[:-1] member_data_type = getattr(member_definition, 'member_data_type', '') if isinstance(member_definition, data_types.MemberDataTypeDefinition): member_definition = member_definition.member_data_type_definition member_type_indicator = member_definition.TYPE_INDICATOR if member_type_indicator == definitions.TYPE_INDICATOR_SEQUENCE: element_type_indicator = member_definition.element_data_type member_type_indicator = 'tuple[{0:s}]'.format(element_type_indicator) else: member_type_indicator = cls._PYTHON_NATIVE_TYPES.get( member_type_indicator, member_data_type) argument = '{0:s}=None'.format(attribute_name) definition = ' self.{0:s} = {0:s}'.format(attribute_name) description = ' {0:s} ({1:s}): {2:s}.'.format( attribute_name, member_type_indicator, description) class_attributes_description.append(description) init_arguments.append(argument) instance_attributes.append(definition) class_attributes_description = '\n'.join( sorted(class_attributes_description)) init_arguments = ', '.join(init_arguments) instance_attributes = '\n'.join(sorted(instance_attributes)) template_values = { 'class_attributes_description': class_attributes_description, 'init_arguments': init_arguments, 'instance_attributes': instance_attributes, 'type_description': type_description, 'type_name': type_name} return cls._CLASS_TEMPLATE.format(**template_values) @classmethod def _IsIdentifier(cls, string): """Checks if a string contains an identifier. Args: string (str): string to check. Returns: bool: True if the string contains an identifier, False otherwise. """ return ( string and not string[0].isdigit() and all(character.isalnum() or character == '_' for character in string)) @classmethod def _ValidateDataTypeDefinition(cls, data_type_definition): """Validates the data type definition. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: ValueError: if the data type definition is not considered valid. """ if not cls._IsIdentifier(data_type_definition.name): raise ValueError( 'Data type definition name: {0!s} not a valid identifier'.format( data_type_definition.name)) if keyword.iskeyword(data_type_definition.name): raise ValueError( 'Data type definition name: {0!s} matches keyword'.format( data_type_definition.name)) members = getattr(data_type_definition, 'members', None) if not members: raise ValueError( 'Data type definition name: {0!s} missing members'.format( data_type_definition.name)) defined_attribute_names = set() for member_definition in members: attribute_name = member_definition.name if not cls._IsIdentifier(attribute_name): raise ValueError('Attribute name: {0!s} not a valid identifier'.format( attribute_name)) if attribute_name.startswith('_'): raise ValueError('Attribute name: {0!s} starts with underscore'.format( attribute_name)) if keyword.iskeyword(attribute_name): raise ValueError('Attribute name: {0!s} matches keyword'.format( attribute_name)) if attribute_name in defined_attribute_names: raise ValueError('Attribute name: {0!s} already defined'.format( attribute_name)) defined_attribute_names.add(attribute_name) @classmethod def CreateClass(cls, data_type_definition): """Creates a new structure values class. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: class: structure values class. """ cls._ValidateDataTypeDefinition(data_type_definition) class_definition = cls._CreateClassTemplate(data_type_definition) namespace = { '__builtins__' : { 'object': builtins.object, 'super': builtins.super}, '__name__': '{0:s}'.format(data_type_definition.name)} if sys.version_info[0] >= 3: # pylint: disable=no-member namespace['__builtins__']['__build_class__'] = builtins.__build_class__ exec(class_definition, namespace) # pylint: disable=exec-used return namespace[data_type_definition.name] dtfabric-20190120/requirements.txt000066400000000000000000000000341342102721300170040ustar00rootroot00000000000000pip >= 7.0.0 PyYAML >= 3.10 dtfabric-20190120/run_tests.py000077500000000000000000000014221342102721300161250ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Script to run the tests.""" import sys import unittest # Change PYTHONPATH to include dtFabric. sys.path.insert(0, '.') if __name__ == '__main__': print('Using Python version {0!s}'.format(sys.version)) fail_unless_has_test_file = '--fail-unless-has-test-file' in sys.argv setattr(unittest, 'fail_unless_has_test_file', fail_unless_has_test_file) if fail_unless_has_test_file: # Remove --fail-unless-has-test-file otherwise it will conflict with # the argparse tests. sys.argv.remove('--fail-unless-has-test-file') test_suite = unittest.TestLoader().discover('tests', pattern='*.py') test_results = unittest.TextTestRunner(verbosity=2).run(test_suite) if not test_results.wasSuccessful(): sys.exit(1) dtfabric-20190120/scripts/000077500000000000000000000000001342102721300152125ustar00rootroot00000000000000dtfabric-20190120/scripts/validate-definitions.py000077500000000000000000000064651342102721300217040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # pylint: disable=invalid-name """Script to validate dtFabric format definitions.""" from __future__ import print_function from __future__ import unicode_literals import argparse import glob import logging import os import sys from dtfabric import errors from dtfabric import reader from dtfabric import registry class DefinitionsValidator(object): """dtFabric definitions validator.""" def CheckDirectory(self, path, extension='yaml'): """Validates definition files in a directory. Args: path (str): path of the definition file. extension (Optional[str]): extension of the filenames to read. Returns: bool: True if the directory contains valid definitions. """ result = True if extension: glob_spec = os.path.join(path, '*.{0:s}'.format(extension)) else: glob_spec = os.path.join(path, '*') for definition_file in sorted(glob.glob(glob_spec)): if not self.CheckFile(definition_file): result = False return result def CheckFile(self, path): """Validates the definition in a file. Args: path (str): path of the definition file. Returns: bool: True if the file contains valid definitions. """ print('Checking: {0:s}'.format(path)) definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() result = False try: definitions_reader.ReadFile(definitions_registry, path) result = True except KeyError as exception: logging.warning(( 'Unable to register data type definition in file: {0:s} with ' 'error: {1:s}').format(path, exception)) except errors.FormatError as exception: logging.warning( 'Unable to validate file: {0:s} with error: {1:s}'.format( path, exception)) return result def Main(): """The main program function. Returns: bool: True if successful or False if not. """ argument_parser = argparse.ArgumentParser( description='Validates dtFabric format definitions.') argument_parser.add_argument( 'source', nargs='?', action='store', metavar='PATH', default=None, help=( 'path of the file or directory containing the dtFabric format ' 'definitions.')) options = argument_parser.parse_args() if not options.source: print('Source value is missing.') print('') argument_parser.print_help() print('') return False if not os.path.exists(options.source): print('No such file: {0:s}'.format(options.source)) print('') return False logging.basicConfig( level=logging.INFO, format='[%(levelname)s] %(message)s') source_is_directory = os.path.isdir(options.source) validator = DefinitionsValidator() if source_is_directory: source_description = os.path.join(options.source, '*.yaml') else: source_description = options.source print('Validating dtFabric definitions in: {0:s}'.format(source_description)) if source_is_directory: result = validator.CheckDirectory(options.source) else: result = validator.CheckFile(options.source) if not result: print('FAILURE') else: print('SUCCESS') return result if __name__ == '__main__': if not Main(): sys.exit(1) else: sys.exit(0) dtfabric-20190120/setup.cfg000066400000000000000000000004471342102721300153510ustar00rootroot00000000000000[metadata] license_file = LICENSE [bdist_rpm] release = 1 packager = Joachim Metz doc_files = ACKNOWLEDGEMENTS AUTHORS LICENSE README build_requires = python-setuptools requires = python2-pyyaml >= 3.10 [bdist_wheel] universal = 1 dtfabric-20190120/setup.py000077500000000000000000000160621342102721300152450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Installation and deployment script.""" from __future__ import print_function import glob import locale import os import sys try: from setuptools import find_packages, setup except ImportError: from distutils.core import find_packages, setup try: from distutils.command.bdist_msi import bdist_msi except ImportError: bdist_msi = None try: from distutils.command.bdist_rpm import bdist_rpm except ImportError: bdist_rpm = None version_tuple = (sys.version_info[0], sys.version_info[1]) if version_tuple[0] not in (2, 3): print('Unsupported Python version: {0:s}.'.format(sys.version)) sys.exit(1) elif version_tuple[0] == 2 and version_tuple < (2, 7): print(( 'Unsupported Python 2 version: {0:s}, version 2.7 or higher ' 'required.').format(sys.version)) sys.exit(1) elif version_tuple[0] == 3 and version_tuple < (3, 4): print(( 'Unsupported Python 3 version: {0:s}, version 3.4 or higher ' 'required.').format(sys.version)) sys.exit(1) # Change PYTHONPATH to include dtfabric so that we can get the version. sys.path.insert(0, '.') import dtfabric # pylint: disable=wrong-import-position if not bdist_msi: BdistMSICommand = None else: class BdistMSICommand(bdist_msi): """Custom handler for the bdist_msi command.""" def run(self): """Builds an MSI.""" # Command bdist_msi does not support the library version, neither a date # as a version but if we suffix it with .1 everything is fine. self.distribution.metadata.version += '.1' bdist_msi.run(self) if not bdist_rpm: BdistRPMCommand = None else: class BdistRPMCommand(bdist_rpm): """Custom handler for the bdist_rpm command.""" def _make_spec_file(self): """Generates the text of an RPM spec file. Returns: list[str]: lines of the RPM spec file. """ # Note that bdist_rpm can be an old style class. if issubclass(BdistRPMCommand, object): spec_file = super(BdistRPMCommand, self)._make_spec_file() else: spec_file = bdist_rpm._make_spec_file(self) if sys.version_info[0] < 3: python_package = 'python2' else: python_package = 'python3' description = [] requires = '' summary = '' in_description = False python_spec_file = [] for line in iter(spec_file): if line.startswith('Summary: '): summary = line elif line.startswith('BuildRequires: '): line = 'BuildRequires: {0:s}-setuptools, {0:s}-devel'.format( python_package) elif line.startswith('Requires: '): requires = line[10:] if python_package == 'python3': requires = requires.replace('python-', 'python3-') requires = requires.replace('python2-', 'python3-') elif line.startswith('%description'): in_description = True elif line.startswith('python setup.py build'): if python_package == 'python3': line = '%py3_build' else: line = '%py2_build' elif line.startswith('python setup.py install'): if python_package == 'python3': line = '%py3_install' else: line = '%py2_install' elif line.startswith('%files'): lines = [ '%files -n {0:s}-%{{name}}'.format(python_package), '%defattr(644,root,root,755)', '%license LICENSE', '%doc ACKNOWLEDGEMENTS AUTHORS README'] if python_package == 'python3': lines.extend([ '%{python3_sitelib}/dtfabric/*.py', '%{python3_sitelib}/dtfabric/*/*.py', '%{python3_sitelib}/dtfabric*.egg-info/*', '', '%exclude %{_prefix}/share/doc/*', '%exclude %{python3_sitelib}/dtfabric/__pycache__/*', '%exclude %{python3_sitelib}/dtfabric/*/__pycache__/*', '%exclude %{_bindir}/*.py']) else: lines.extend([ '%{python2_sitelib}/dtfabric/*.py', '%{python2_sitelib}/dtfabric/*/*.py', '%{python2_sitelib}/dtfabric*.egg-info/*', '', '%exclude %{_prefix}/share/doc/*', '%exclude %{python2_sitelib}/dtfabric/*.pyc', '%exclude %{python2_sitelib}/dtfabric/*.pyo', '%exclude %{python2_sitelib}/dtfabric/*/*.pyc', '%exclude %{python2_sitelib}/dtfabric/*/*.pyo', '%exclude %{_bindir}/*.py']) python_spec_file.extend(lines) break elif line.startswith('%prep'): in_description = False python_spec_file.append( '%package -n {0:s}-%{{name}}'.format(python_package)) if python_package == 'python2': python_spec_file.extend([ 'Obsoletes: python-dtfabric < %{version}', 'Provides: python-dtfabric = %{version}']) python_spec_file.extend([ 'Requires: {0:s}'.format(requires), '{0:s}'.format(summary), '', '%description -n {0:s}-%{{name}}'.format(python_package)]) python_spec_file.extend(description) elif in_description: # Ignore leading white lines in the description. if not description and not line: continue description.append(line) python_spec_file.append(line) return python_spec_file if version_tuple[0] == 2: encoding = sys.stdin.encoding # pylint: disable=invalid-name # Note that sys.stdin.encoding can be None. if not encoding: encoding = locale.getpreferredencoding() # Make sure the default encoding is set correctly otherwise on Python 2 # setup.py sdist will fail to include filenames with Unicode characters. reload(sys) # pylint: disable=undefined-variable sys.setdefaultencoding(encoding) # pylint: disable=no-member dtfabric_description = ( 'Data type fabric (dtfabric)') dtfabric_long_description = ( 'dtFabric, or data type fabric, is a project to manage data types and ' 'structures, as used in the libyal projects.') setup( name='dtfabric', version=dtfabric.__version__, description=dtfabric_description, long_description=dtfabric_long_description, license='Apache License, Version 2.0', url='https://github.com/libyal/dtfabric', maintainer='Joachim Metz', maintainer_email='joachim.metz@gmail.com', cmdclass={ 'bdist_msi': BdistMSICommand, 'bdist_rpm': BdistRPMCommand}, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Operating System :: OS Independent', 'Programming Language :: Python', ], packages=find_packages('.', exclude=[ 'scripts', 'tests', 'tests.*', 'utils']), package_dir={ 'dtfabric': 'dtfabric' }, scripts=glob.glob(os.path.join('scripts', '[a-z]*.py')), data_files=[ ('share/doc/dtfabric', [ 'ACKNOWLEDGEMENTS', 'AUTHORS', 'LICENSE', 'README']), ], ) dtfabric-20190120/test_data/000077500000000000000000000000001342102721300154735ustar00rootroot00000000000000dtfabric-20190120/test_data/Notepad.lnk000066400000000000000000000026221342102721300175750ustar00rootroot00000000000000LÀFŸ `€$ð~Äâ˜d~ÄH%ó"zÄóPàOÐ ê:i¢Ø+00/C:\<1 1 }WINDOWS&ï¾ 1 } 1¸WINDOWS@1 1 }system32(ï¾ 1 } 1¸system32H21h notepad.exe.ï¾ 1Ã| 1¸notepad.exeQ0PØk5HDDC:\WINDOWS\system32\notepad.exe)@%SystemRoot%\system32\shell32.dll,-22563+..\..\..\..\..\WINDOWS\system32\notepad.exe%HOMEDRIVE%%HOMEPATH% %SystemRoot%\system32\notepad.exeP;´ÏF‘|P;(;ˆÐ\ ‘|E ‘|N ‘|Ö$LÑ(A‘|pý|ÄÏ”Ðî|p ‘|Àä—|o>‘|b>‘|%SystemRoot%\system32\notepad.exe>C> ÔäÑ6Ôp‘|ÿÿÿÿ8ÓÐìÐî|=‘|ÿÿÿÿb>‘|¨m‘|dÑÔÜÐE ‘|N ‘|pýôÐpý|tÃF‘|$ÔøÑ.žŸv´Ñù£€|ø,äÑ Ô Ô9š€|ÑÜÿî|ˆD‘|äÑD‘|ÚE‘||шڞŸvÔÄÑf|ˆÚq| Ô9š€| Ñ¤ÑÜÿó™ƒ|x|ÿÿÿÿq|zëŸvˆÚ$ÔˆÚ€ØÐëŸvˆÚC:\WINDOWSÂÂ@Ò3R‘| %©dtfabric-20190120/test_data/boolean.yaml000066400000000000000000000000761342102721300200010ustar00rootroot00000000000000name: bool type: boolean attributes: size: 1 units: bytes dtfabric-20190120/test_data/character.yaml000066400000000000000000000001001342102721300203020ustar00rootroot00000000000000name: char type: character attributes: size: 1 units: bytes dtfabric-20190120/test_data/constant.yaml000066400000000000000000000003671342102721300202160ustar00rootroot00000000000000name: maximum_number_of_back_traces aliases: [AVRF_MAX_TRACES] type: constant description: Application verifier resource enumeration maximum number of back traces urls: ['https://msdn.microsoft.com/en-us/library/bb432193(v=vs.85).aspx'] value: 32 dtfabric-20190120/test_data/definitions/000077500000000000000000000000001342102721300200065ustar00rootroot00000000000000dtfabric-20190120/test_data/definitions/booleans.yaml000066400000000000000000000007471342102721300225040ustar00rootroot00000000000000name: bool8 aliases: [BOOLEAN] type: boolean description: 8-bit boolean type attributes: size: 1 units: bytes false_value: 0 true_value: 1 --- name: bool16 type: boolean description: 16-bit boolean type attributes: byte_order: little-endian size: 2 units: bytes false_value: 0 true_value: 1 --- name: bool32 aliases: [BOOL] type: boolean description: 32-bit boolean type attributes: byte_order: little-endian size: 4 units: bytes false_value: 0 true_value: 1 dtfabric-20190120/test_data/definitions/characters.yaml000066400000000000000000000006401342102721300230110ustar00rootroot00000000000000name: char aliases: [CHAR] type: character description: 8-bit narrow character type attributes: size: 1 units: bytes --- name: wchar16 aliases: [WCHAR] type: character description: 16-bit wide character type attributes: byte_order: little-endian size: 2 units: bytes --- name: wchar32 type: character description: 32-bit wide character type attributes: byte_order: little-endian size: 4 units: bytes dtfabric-20190120/test_data/definitions/floating-points.yaml000066400000000000000000000005601342102721300240100ustar00rootroot00000000000000name: float32 aliases: [float, FLOAT] type: floating-point description: 32-bit single precision floating-point type attributes: byte_order: little-endian size: 4 units: bytes --- name: float64 aliases: [double, DOUBLE] type: floating-point description: 64-bit double precision floating-point type attributes: byte_order: little-endian size: 8 units: bytes dtfabric-20190120/test_data/definitions/integers.yaml000066400000000000000000000024571342102721300225220ustar00rootroot00000000000000name: int8 type: integer description: 8-bit signed integer type attributes: format: signed size: 1 units: bytes --- name: int16 type: integer description: 16-bit signed integer type attributes: byte_order: little-endian format: signed size: 2 units: bytes --- name: int32 aliases: [LONG, LONG32] type: integer description: 32-bit signed integer type attributes: byte_order: little-endian format: signed size: 4 units: bytes --- name: int64 aliases: [LONG64] type: integer description: 64-bit signed integer type attributes: byte_order: little-endian format: signed size: 8 units: bytes --- name: uint8 aliases: [BYTE] type: integer description: 8-bit unsigned integer type attributes: format: unsigned size: 1 units: bytes --- name: uint16 aliases: [WORD] type: integer description: 16-bit unsigned integer type attributes: byte_order: little-endian format: unsigned size: 2 units: bytes --- name: uint32 aliases: [DWORD, DWORD32, ULONG, ULONG32] type: integer description: 32-bit unsigned integer type attributes: byte_order: little-endian format: unsigned size: 4 units: bytes --- name: uint64 aliases: [DWORDLONG, DWORD64, ULONG64] type: integer description: 64-bit unsigned integer type attributes: byte_order: little-endian format: unsigned size: 8 units: bytes dtfabric-20190120/test_data/enumeration.yaml000066400000000000000000000014071342102721300207070ustar00rootroot00000000000000name: object_information_type aliases: [MINIDUMP_HANDLE_OBJECT_INFORMATION_TYPE] type: enumeration description: Minidump object information type urls: ['https://msdn.microsoft.com/en-us/library/windows/desktop/ms680376(v=vs.85).aspx'] values: - name: MiniHandleObjectInformationNone number: 0 description: No object-specific information available - name: MiniThreadInformation1 number: 1 description: Thread object information - name: MiniMutantInformation1 number: 2 description: Mutant object information - name: MiniMutantInformation2 number: 3 description: Mutant object information - name: MiniProcessInformation1 number: 4 description: Process object information - name: MiniProcessInformation2 number: 5 description: Process object information dtfabric-20190120/test_data/floating-point.yaml000066400000000000000000000002651342102721300213140ustar00rootroot00000000000000name: float32 aliases: [float, FLOAT] type: floating-point description: 32-bit single precision floating-point type attributes: byte_order: little-endian size: 4 units: bytes dtfabric-20190120/test_data/integer.yaml000066400000000000000000000005521342102721300200160ustar00rootroot00000000000000name: int32 type: integer attributes: format: signed size: 4 units: bytes --- name: int32be type: integer attributes: byte_order: big-endian format: signed size: 4 units: bytes --- name: int32le type: integer attributes: byte_order: little-endian format: signed size: 4 units: bytes --- name: int type: integer attributes: format: signed dtfabric-20190120/test_data/sequence.yaml000066400000000000000000000006441342102721300201730ustar00rootroot00000000000000name: int32 type: integer description: 32-bit signed integer type attributes: byte_order: little-endian format: signed size: 4 units: bytes --- name: vector4 aliases: [VECTOR] type: sequence description: 4-dimensional vector element_data_type: int32 number_of_elements: 4 --- name: triangle4 aliases: [TRIANGLE] type: sequence description: 4-dimensional triangle element_data_type: vector4 number_of_elements: 3 dtfabric-20190120/test_data/sequence_with_structure.yaml000066400000000000000000000006141342102721300233430ustar00rootroot00000000000000name: int32 type: integer attributes: format: signed size: 4 units: bytes --- name: vector type: structure attributes: byte_order: little-endian members: - name: number_of_elements data_type: int32 - name: valuse type: sequence element_data_type: int32 number_of_elements: vector.number_of_elements --- name: vectors type: sequence element_data_type: vector number_of_elements: 3 dtfabric-20190120/test_data/stream.yaml000066400000000000000000000011541342102721300176530ustar00rootroot00000000000000name: wchar16 aliases: [WCHAR] type: character description: 16-bit wide character type attributes: byte_order: little-endian size: 2 units: bytes --- name: utf16le_stream aliases: [UTF16LE] type: stream description: UTF-16 little-endian stream element_data_type: wchar16 number_of_elements: 8 --- name: utf16le_stream_with_size type: stream description: UTF-16 little-endian stream with size element_data_type: wchar16 elements_data_size: size --- name: utf16le_stream_with_terminator type: stream description: UTF-16 little-endian stream with terminator element_data_type: wchar16 elements_terminator: "\x00\x00" dtfabric-20190120/test_data/string.yaml000066400000000000000000000007741342102721300176750ustar00rootroot00000000000000name: char type: character attributes: byte_order: little-endian size: 1 units: bytes --- name: wchar16 aliases: [WCHAR] type: character description: 16-bit wide character type attributes: byte_order: little-endian size: 2 units: bytes --- name: utf8_string type: string description: UTF-8 string encoding: utf8 element_data_type: char elements_terminator: "\x00" --- name: utf16_string type: string description: UTF-16 string encoding: utf-16-le element_data_type: wchar16 number_of_elements: 8 dtfabric-20190120/test_data/string_array.yaml000066400000000000000000000014151342102721300210640ustar00rootroot00000000000000name: char type: integer attributes: format: signed size: 1 units: bytes --- name: uint32 type: integer attributes: format: signed size: 4 units: bytes --- name: cstring type: string encoding: ascii element_data_type: char elements_terminator: "\x00" --- name: string_array type: structure attributes: byte_order: little-endian members: - name: number_of_strings data_type: uint32 - name: strings type: sequence element_data_type: cstring number_of_elements: string_array.number_of_strings --- name: string_array_with_size type: structure attributes: byte_order: little-endian members: - name: strings_data_size data_type: uint32 - name: strings type: sequence element_data_type: cstring elements_data_size: string_array_with_size.strings_data_size dtfabric-20190120/test_data/structure.yaml000066400000000000000000000017261342102721300204250ustar00rootroot00000000000000name: int32 type: integer description: 32-bit signed integer type attributes: format: signed size: 4 units: bytes --- name: point3d aliases: [POINT] type: structure description: Point in 3 dimensional space. attributes: byte_order: little-endian members: - name: x aliases: [XCOORD] data_type: int32 - name: y data_type: int32 - name: z data_type: int32 --- name: triangle3d type: structure description: Triangle in 3 dimensional space. members: - name: a data_type: point3d - name: b data_type: point3d - name: c data_type: point3d --- name: box3d type: structure description: Box in 3 dimensional space. members: - name: triangles type: sequence element_data_type: triangle3d number_of_elements: 12 --- name: sphere3d type: structure description: Sphere in 3 dimensional space. members: - name: number_of_triangles data_type: int32 - name: triangles type: sequence element_data_type: triangle3d number_of_elements: sphere3d.number_of_triangles dtfabric-20190120/test_data/structure_family.yaml000066400000000000000000000060371342102721300217660ustar00rootroot00000000000000name: byte type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint16 type: integer attributes: format: unsigned size: 2 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: uint64 type: integer attributes: format: unsigned size: 8 units: bytes --- name: group_descriptor_ext2 type: structure description: Group descriptor members: - name: block_bitmap_block_number data_type: uint32 - name: inode_bitmap_block_number data_type: uint32 - name: inode_table_block_number data_type: uint32 - name: number_of_unallocated_blocks data_type: uint16 - name: number_of_unallocated_inodes data_type: uint16 - name: number_of_directories data_type: uint16 - name: padding1 data_type: uint16 - name: reserved1 type: stream element_data_type: byte elements_data_size: 12 --- name: group_descriptor_ext4 type: structure description: Group descriptor members: - name: block_bitmap_block_number_lower data_type: uint32 - name: inode_bitmap_block_number_lower data_type: uint32 - name: inode_table_block_number_lower data_type: uint32 - name: number_of_unallocated_blocks_lower data_type: uint16 - name: number_of_unallocated_inodes_lower data_type: uint16 - name: number_of_directories_lower data_type: uint16 - name: block_group_flags data_type: uint16 - name: exclude_bitmap_block_number_lower data_type: uint32 - name: block_bitmap_checksum_lower data_type: uint16 - name: inode_bitmap_checksum_lower data_type: uint16 - name: number_of_unused_inodes data_type: uint16 - name: checksum data_type: uint16 - name: block_bitmap_block_number_upper data_type: uint32 - name: inode_bitmap_block_number_upper data_type: uint32 - name: inode_table_block_number_upper data_type: uint32 - name: number_of_unallocated_blocks_upper data_type: uint16 - name: number_of_unallocated_inodes_upper data_type: uint16 - name: number_of_directories_upper data_type: uint16 - name: number_of_unused_inodes_upper data_type: uint16 - name: exclude_bitmap_block_number_upper data_type: uint32 - name: block_bitmap_checksum_upper data_type: uint16 - name: inode_bitmap_checksum_upper data_type: uint16 - name: reserved1 data_type: uint32 --- name: group_descriptor_runtime type: structure description: Group descriptor members: - name: block_bitmap_block_number data_type: uint64 - name: inode_bitmap_block_number data_type: uint64 - name: inode_table_block_number data_type: uint64 - name: number_of_unallocated_blocks data_type: uint32 - name: number_of_unallocated_inodes data_type: uint32 - name: number_of_directories data_type: uint32 - name: block_group_flags data_type: uint16 - name: exclude_bitmap_block_number data_type: uint64 - name: block_bitmap_checksum data_type: uint32 - name: inode_bitmap_checksum data_type: uint32 - name: number_of_unused_inodes data_type: uint32 --- name: group_descriptor type: structure-family description: Group descriptor runtime: group_descriptor_runtime members: - group_descriptor_ext2 - group_descriptor_ext4 dtfabric-20190120/test_data/structure_with_sections.yaml000066400000000000000000000004351342102721300233630ustar00rootroot00000000000000name: int32 type: integer attributes: format: signed size: 4 units: bytes --- name: 3dsphere type: structure members: - section: 3dcoordinate - name: x data_type: int32 - name: y data_type: int32 - name: z data_type: int32 - section: size - name: radius data_type: int32 dtfabric-20190120/test_data/structure_with_sequence.yaml000066400000000000000000000006071342102721300233450ustar00rootroot00000000000000name: byte type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: extension_block type: structure members: - name: size data_type: uint32 - name: data type: sequence element_data_type: byte number_of_elements: 0 if extension_block.size == 0 else extension_block.size - 4 dtfabric-20190120/test_data/structure_with_stream.yaml000066400000000000000000000006051342102721300230260ustar00rootroot00000000000000name: byte type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: extension_block type: structure members: - name: size data_type: uint32 - name: data type: stream element_data_type: byte elements_data_size: 0 if extension_block.size == 0 else extension_block.size - 4 dtfabric-20190120/test_data/structure_with_string.yaml000066400000000000000000000005611342102721300230420ustar00rootroot00000000000000name: wchar16 type: integer attributes: format: signed size: 2 units: bytes --- name: uint16 type: integer attributes: format: unsigned size: 2 units: bytes --- name: utf16_string type: structure members: - name: size data_type: uint16 - name: text type: string encoding: utf-16-le element_data_type: wchar16 elements_data_size: utf16_string.size dtfabric-20190120/test_data/structure_with_union.yaml000066400000000000000000000005151342102721300226630ustar00rootroot00000000000000name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: float32 type: floating-point attributes: byte_order: little-endian size: 4 units: bytes --- name: intfloat32 type: structure members: - type: union members: - name: int data_type: uint32 - name: float data_type: float32 dtfabric-20190120/test_data/union.yaml000066400000000000000000000005351342102721300175120ustar00rootroot00000000000000name: int16 type: integer description: 16-bit signed integer type attributes: format: signed size: 2 units: bytes --- name: int32 type: integer description: 32-bit signed integer type attributes: format: signed size: 4 units: bytes --- name: my_union type: union members: - name: long data_type: int32 - name: short data_type: int16 dtfabric-20190120/test_data/uuid.yaml000066400000000000000000000002431342102721300173240ustar00rootroot00000000000000name: uuid aliases: [guid, GUID, UUID] type: uuid description: Globally or Universal unique identifier (GUID or UUID) type attributes: byte_order: little-endian dtfabric-20190120/test_dependencies.ini000066400000000000000000000006461342102721300177170ustar00rootroot00000000000000[funcsigs] dpkg_name: python-funcsigs minimum_version: 1.0.2 python2_only: true rpm_name: python2-funcsigs version_property: __version__ [mock] dpkg_name: python-mock minimum_version: 2.0.0 rpm_name: python2-mock version_property: __version__ [pbr] dpkg_name: python-pbr minimum_version: 4.2.0 rpm_name: python2-pbr [six] dpkg_name: python-six minimum_version: 1.1.0 rpm_name: python2-six version_property: __version__ dtfabric-20190120/tests/000077500000000000000000000000001342102721300146655ustar00rootroot00000000000000dtfabric-20190120/tests/__init__.py000066400000000000000000000000301342102721300167670ustar00rootroot00000000000000# -*- coding: utf-8 -*- dtfabric-20190120/tests/data_types.py000066400000000000000000000354421342102721300174040ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the data type definitions.""" from __future__ import unicode_literals import unittest from dtfabric import data_types from dtfabric import definitions from tests import test_lib class DataTypeDefinitionTest(test_lib.BaseTestCase): """Data type definition tests.""" def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.DataTypeDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') result = data_type_definition.IsComposite() self.assertFalse(result) class StorageDataTypeDefinitionTest(test_lib.BaseTestCase): """Storage data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.StorageDataTypeDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') self.assertIsNotNone(data_type_definition) class FixedSizeDataTypeDefinitionTest(test_lib.BaseTestCase): """Fixed-size data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.FixedSizeDataTypeDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') self.assertIsNotNone(data_type_definition) def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.FixedSizeDataTypeDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) data_type_definition.size = 4 byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) class BooleanDefinitionTest(test_lib.BaseTestCase): """Boolean data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.BooleanDefinition( 'bool32', aliases=['BOOL'], description='boolean') self.assertIsNotNone(data_type_definition) class CharacterDefinitionTest(test_lib.BaseTestCase): """Character data type definition tests.""" class FloatingPointDefinitionTest(test_lib.BaseTestCase): """Floating-point data type definition tests.""" class IntegerDefinitionTest(test_lib.BaseTestCase): """Integer data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.IntegerDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') self.assertIsNotNone(data_type_definition) class UUIDDefinitionTest(test_lib.BaseTestCase): """UUID data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.UUIDDefinition( 'guid', aliases=['GUID'], description='GUID') self.assertIsNotNone(data_type_definition) def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.UUIDDefinition( 'guid', aliases=['GUID'], description='GUID') result = data_type_definition.IsComposite() self.assertTrue(result) class ElementSequenceDataTypeDefinitionTest(test_lib.BaseTestCase): """Element sequence data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" element_definition = data_types.IntegerDefinition('int32') data_type_definition = data_types.ElementSequenceDataTypeDefinition( 'offsets', element_definition, description='offsets array') self.assertIsNotNone(data_type_definition) def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.ElementSequenceDataTypeDefinition( 'offsets', None, description='offsets array') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) element_definition = data_types.IntegerDefinition('int32') element_definition.format = definitions.FORMAT_SIGNED element_definition.size = 4 data_type_definition.element_data_type_definition = element_definition byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) data_type_definition.elements_data_size = 0 data_type_definition.number_of_elements = 32 byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 128) data_type_definition.elements_data_size = 128 data_type_definition.number_of_elements = 0 byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 128) # TODO: test self.element_data_type_definition.GetByteSize() returns None class SequenceDefinitionTest(test_lib.BaseTestCase): """Sequence data type definition tests.""" class StreamDefinitionTest(test_lib.BaseTestCase): """Stream data type definition tests.""" class StringDefinitionTest(test_lib.BaseTestCase): """String data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" element_definition = data_types.IntegerDefinition('wchar16') data_type_definition = data_types.StringDefinition( 'utf16', element_definition, description='UTF-16 formatted string') self.assertIsNotNone(data_type_definition) def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.StringDefinition( 'utf16', None, description='UTF-16 formatted string') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) element_definition = data_types.IntegerDefinition('wchar16') element_definition.format = definitions.FORMAT_SIGNED element_definition.size = 2 data_type_definition.element_data_type_definition = element_definition byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) data_type_definition.number_of_elements = 32 byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 64) def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.StringDefinition( 'utf16', None, description='UTF-16 formatted string') result = data_type_definition.IsComposite() self.assertTrue(result) class DataTypeDefinitionWithMembersTest(test_lib.BaseTestCase): """Data type definition with members tests.""" # TODO: add tests for AddMemberDefinition @test_lib.skipUnlessHasTestFile(['structure.yaml']) class MemberDataTypeDefinitionTest(test_lib.BaseTestCase): """Member data type definition tests.""" # pylint: disable=protected-access def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') data_type_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', member_definition, aliases=['MY_STRUCT_MEMBER'], data_type='int32', description='my structure member') self.assertIsNotNone(data_type_definition) def testGetByteSize(self): """Tests the GetByteSize function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') data_type_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', None, aliases=['MY_STRUCT_MEMBER'], data_type='int32', description='my structure member') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) data_type_definition.member_data_type_definition = member_definition byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) def testIsComposite(self): """Tests the IsComposite function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') data_type_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', None, aliases=['MY_STRUCT_MEMBER'], data_type='int32', description='my structure member') result = data_type_definition.IsComposite() self.assertIsNone(result) data_type_definition.member_data_type_definition = member_definition result = data_type_definition.IsComposite() self.assertFalse(result) class MemberSectionDefinitionTest(test_lib.BaseTestCase): """Member section definition tests.""" def testInitialize(self): """Tests the __init__ function.""" section_definition = data_types.MemberSectionDefinition( 'my_struct_section') self.assertIsNotNone(section_definition) class StructureDefinitionTest(test_lib.BaseTestCase): """Structure data type definition tests.""" @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.StructureDefinition( 'my_struct_type', aliases=['MY_STRUCT_TYPE'], description='my structure type') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') structure_member_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', member_definition, aliases=['MY_STRUCT_MEMBER'], data_type='int32', description='my structure member') data_type_definition.AddMemberDefinition(structure_member_definition) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.StructureDefinition( 'my_struct_type', aliases=['MY_STRUCT_TYPE'], description='my structure type') result = data_type_definition.IsComposite() self.assertTrue(result) class UnionDefinitionTest(test_lib.BaseTestCase): """Union data type definition tests.""" @test_lib.skipUnlessHasTestFile(['union.yaml']) def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.UnionDefinition( 'my_union_type', aliases=['MY_UNION_TYPE'], description='my union type') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) definitions_file = self._GetTestFilePath(['union.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') union_member_definition = data_types.MemberDataTypeDefinition( 'my_union_member', member_definition, aliases=['MY_UNION_MEMBER'], data_type='int32', description='my union member') data_type_definition.AddMemberDefinition(union_member_definition) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) # TODO: test member_definition.GetByteSize() returns None def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.UnionDefinition( 'my_union_type', aliases=['MY_UNION_TYPE'], description='my union type') result = data_type_definition.IsComposite() self.assertTrue(result) class SemanticDataTypeDefinitionTest(test_lib.BaseTestCase): """Semantic data type definition tests.""" # pylint: disable=assignment-from-none def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.SemanticDataTypeDefinition( 'enum', description='enumeration') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) class ConstantDefinitionTest(test_lib.BaseTestCase): """Constant data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.ConstantDefinition( 'const', description='contant') self.assertIsNotNone(data_type_definition) class EnumerationValueTest(test_lib.BaseTestCase): """Enumeration value tests.""" def testInitialize(self): """Tests the __init__ function.""" enumeration_value = data_types.EnumerationValue('enum_value', 5) self.assertIsNotNone(enumeration_value) class EnumerationDefinitionTest(test_lib.BaseTestCase): """Enumeration data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.EnumerationDefinition( 'enum', description='enumeration') self.assertIsNotNone(data_type_definition) def testAddValue(self): """Tests the AddValue function.""" data_type_definition = data_types.EnumerationDefinition( 'enum', description='enumeration') data_type_definition.AddValue('enum_value', 5, aliases=['value5']) with self.assertRaises(KeyError): data_type_definition.AddValue('enum_value', 7, aliases=['value7']) with self.assertRaises(KeyError): data_type_definition.AddValue('myenum', 5, aliases=['value7']) with self.assertRaises(KeyError): data_type_definition.AddValue('myenum', 7, aliases=['value5']) class LayoutDataTypeDefinitionTest(test_lib.BaseTestCase): """Layout data type definition tests.""" # pylint: disable=assignment-from-none def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.LayoutDataTypeDefinition( 'format', description='data format') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.FormatDefinition( 'format', description='data format') result = data_type_definition.IsComposite() self.assertTrue(result) class FormatDefinitionTest(test_lib.BaseTestCase): """Data format definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.FormatDefinition( 'format', description='data format') self.assertIsNotNone(data_type_definition) class StructureFamilyDefinitionTest(test_lib.BaseTestCase): """Structure family definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.StructureFamilyDefinition( 'family', description='structure family') self.assertIsNotNone(data_type_definition) # TODO: add tests for AddMemberDefinition # TODO: add tests for AddRuntimeDefinition if __name__ == '__main__': unittest.main() dtfabric-20190120/tests/py2to3.py000066400000000000000000000026761342102721300164120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for the Python 2 and 3 compatible type definitions.""" from __future__ import unicode_literals import sys import unittest # pylint: disable=no-name-in-module,wrong-import-position if sys.version_info[0] < 3: from mock import MagicMock # pylint: disable=import-error else: from unittest.mock import MagicMock # pylint: disable=import-error from importlib import reload # pylint: disable=redefined-builtin from dtfabric import py2to3 from tests import test_lib as shared_test_lib class Py2To3Test(shared_test_lib.BaseTestCase): """Tests for the Python 2 and 3 compatible type definitions.""" _SYS_MODULE = sys @unittest.skipIf(sys.version_info[0] > 2, 'Python version not supported') def testPython2Definitions(self): """Tests the Python 2 definitions.""" mock_sys = MagicMock(version_info=[2, 7]) self._SYS_MODULE.modules['sys'] = mock_sys reload(py2to3) self._SYS_MODULE.modules['sys'] = self._SYS_MODULE # Make sure to reload the module after clearing the mock. reload(py2to3) def testPython3Definitions(self): """Tests the Python 3 definitions.""" mock_sys = MagicMock(version_info=[3, 4]) self._SYS_MODULE.modules['sys'] = mock_sys reload(py2to3) self._SYS_MODULE.modules['sys'] = self._SYS_MODULE # Make sure to reload the module after clearing the mock. reload(py2to3) if __name__ == '__main__': unittest.main() dtfabric-20190120/tests/reader.py000066400000000000000000001464621342102721300165160ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the data type definitions readers.""" from __future__ import unicode_literals import io import unittest from dtfabric import data_types from dtfabric import definitions from dtfabric import errors from dtfabric import reader from dtfabric import registry from tests import test_lib # TODO: test errors, such as duplicate structure members. class DataTypeDefinitionsReaderTest(test_lib.BaseTestCase): """Data type definitions reader tests.""" # pylint: disable=protected-access def testReadBooleanDataTypeDefinition(self): """Tests the _ReadBooleanDataTypeDefinition function.""" definition_values = { 'aliases': ['BOOL'], 'attributes': { 'size': 4, }, 'description': '32-bit boolean type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadBooleanDataTypeDefinition( definitions_registry, definition_values, 'bool') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.BooleanDefinition) def testReadCharacterDataTypeDefinition(self): """Tests the _ReadCharacterDataTypeDefinition function.""" definition_values = { 'aliases': ['CHAR'], 'attributes': { 'size': 1, }, 'description': '8-bit character type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadCharacterDataTypeDefinition( definitions_registry, definition_values, 'char') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.CharacterDefinition) def testReadConstantDataTypeDefinition(self): """Tests the _ReadConstantDataTypeDefinition function.""" definition_values = { 'aliases': ['AVRF_MAX_TRACES'], 'description': ( 'Application verifier resource enumeration maximum number of ' 'back traces'), 'value': 32, } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadConstantDataTypeDefinition( definitions_registry, definition_values, 'const')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.ConstantDefinition) # Test with missing value definition. del definition_values['value'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadConstantDataTypeDefinition( definitions_registry, definition_values, 'const') def testReadDataTypeDefinition(self): """Tests the _ReadDataTypeDefinition function.""" definition_values = { 'aliases': ['LONG', 'LONG32'], 'description': 'signed 32-bit integer type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, 'int32') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) def testReadDataTypeDefinitionWithMembers(self): """Tests the _ReadDataTypeDefinitionWithMembers function.""" definition_values = { 'aliases': ['POINT'], 'attributes': { 'byte_order': 'big-endian', }, 'description': 'Point in 3 dimensional space.', 'members': [ {'name': 'x', 'data_type': 'int32'}, {'name': 'y', 'data_type': 'int32'}, {'name': 'z', 'data_type': 'int32'}], } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() definition_object = definitions_reader._ReadDataTypeDefinitionWithMembers( definitions_registry, definition_values, data_types.StructureDefinition, 'point3d') self.assertIsNotNone(definition_object) # Test with incorrect byte order. definition_values['attributes']['byte_order'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadDataTypeDefinitionWithMembers( definitions_registry, definition_values, data_types.StructureDefinition, 'point3d') definition_values['attributes']['byte_order'] = 'big-endian' @test_lib.skipUnlessHasTestFile(['definitions', 'integers.yaml']) def testReadElementSequenceDataTypeDefinition(self): """Tests the _ReadElementSequenceDataTypeDefinition function.""" definition_values = { 'description': 'vector with 4 elements', 'element_data_type': 'int32', 'number_of_elements': 4, } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) # Test with attributes. definition_values['attributes'] = {} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4') definition_values['attributes'] = None # Test with undefined element data type. definition_values['element_data_type'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4') definition_values['element_data_type'] = 'int32' # Test with missing element data type definition. del definition_values['element_data_type'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4') definition_values['element_data_type'] = 'int32' # Test with missing number of elements definition. del definition_values['number_of_elements'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4') definition_values['number_of_elements'] = 4 # Test with elements data size and number of elements definition set at # at the same time. definition_values['elements_data_size'] = 32 with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4') del definition_values['elements_data_size'] # Test with unsupported attributes definition. definition_values['attributes'] = {'byte_order': 'little-endian'} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4') del definition_values['attributes'] # Test with elements terminator. definition_values = { 'description': 'vector with terminator', 'element_data_type': 'int32', 'elements_terminator': b'\xff\xff\xff\xff', } data_type_definition = ( definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) # Test with (Unicode) string elements terminator. definition_values['elements_terminator'] = '\0' data_type_definition = ( definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) def testReadEnumerationDataTypeDefinition(self): """Tests the _ReadEnumerationDataTypeDefinition function.""" definition_values = { 'description': 'Minidump object information type', 'values': [ {'description': 'No object-specific information available', 'name': 'MiniHandleObjectInformationNone', 'number': 0}, {'description': 'Thread object information', 'name': 'MiniThreadInformation1', 'number': 1}, ], } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum')) self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.EnumerationDefinition) # Test with missing name in first enumeration value definition. del definition_values['values'][0]['name'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum') definition_values['values'][0]['name'] = 'MiniHandleObjectInformationNone' # Test with missing name in successive enumeration value definition. del definition_values['values'][-1]['name'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum') definition_values['values'][-1]['name'] = 'MiniThreadInformation1' # Test with missing value in enumeration number definition. del definition_values['values'][-1]['number'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum') definition_values['values'][-1]['number'] = 1 # Test with duplicate enumeration number definition. definition_values['values'].append({ 'description': 'Thread object information', 'name': 'MiniThreadInformation1', 'number': 1}) with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum') del definition_values['values'][-1] # Test with missing enumeration values definitions. del definition_values['values'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum') def testReadFixedSizeDataTypeDefinition(self): """Tests the _ReadFixedSizeDataTypeDefinition function.""" definition_values = { 'aliases': ['LONG', 'LONG32'], 'attributes': { 'byte_order': 'little-endian', 'size': 4, }, 'description': 'signed 32-bit integer type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, 'int32') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) self.assertEqual(data_type_definition.size, 4) # Test with incorrect size. definition_values['attributes']['size'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, 'int32') definition_values['attributes']['size'] = 4 def testReadFloatingPointDataTypeDefinition(self): """Tests the _ReadFloatingPointDataTypeDefinition function.""" definition_values = { 'aliases': ['float', 'FLOAT'], 'attributes': { 'size': 4, }, 'description': '32-bit floating-point type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadFloatingPointDataTypeDefinition( definitions_registry, definition_values, 'float32')) self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.FloatingPointDefinition) def testReadFormatDataTypeDefinition(self): """Tests the _ReadFormatDataTypeDefinition function.""" definition_values = { 'description': 'Windows Shortcut (LNK) file format', 'type': 'format', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadFormatDataTypeDefinition( definitions_registry, definition_values, 'lnk') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.FormatDefinition) def testReadIntegerDataTypeDefinition(self): """Tests the _ReadIntegerDataTypeDefinition function.""" definition_values = { 'aliases': ['LONG', 'LONG32'], 'attributes': { 'format': 'signed', 'size': 4, }, 'description': 'signed 32-bit integer type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadIntegerDataTypeDefinition( definitions_registry, definition_values, 'int32') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) # Test with unsupported format attribute. definition_values['attributes']['format'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadIntegerDataTypeDefinition( definitions_registry, definition_values, 'int32') def testReadLayoutDataTypeDefinition(self): """Tests the _ReadLayoutDataTypeDefinition function.""" definition_values = { 'description': 'layout data type', 'attributes': { 'byte_order': 'little-endian', }, } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadLayoutDataTypeDefinition( definitions_registry, definition_values, data_types.EnumerationDefinition, 'format') self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.EnumerationDefinition) # Test with incorrect byte order. definition_values['attributes']['byte_order'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadLayoutDataTypeDefinition( definitions_registry, definition_values, data_types.EnumerationDefinition, 'format') definition_values['attributes']['byte_order'] = 'big-endian' @test_lib.skipUnlessHasTestFile(['definitions', 'integers.yaml']) def testReadMemberDataTypeDefinitionMember(self): """Tests the _ReadMemberDataTypeDefinitionMember function.""" definition_values = {'name': 'x', 'data_type': 'int32'} definition_object = data_types.StructureDefinition('point3d') definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') # TODO: implement. _ = definition_object # Test without definitions values. definition_values = {} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') # Test definitions values without name. definition_values = {'bogus': 'BOGUS'} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') # Test definitions values without data type and type. definition_values = {'name': 'x'} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') # Test definitions values with both data type and type. definition_values = {'name': 'x', 'data_type': 'int32', 'type': 'bogus'} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') # Test definitions values with unresolvable type. definition_values = {'name': 'x', 'type': 'bogus'} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') def testReadSemanticDataTypeDefinition(self): """Tests the _ReadSemanticDataTypeDefinition function.""" definition_values = { 'description': 'semantic data type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadSemanticDataTypeDefinition( definitions_registry, definition_values, data_types.EnumerationDefinition, 'enum') self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.EnumerationDefinition) # Test with attributes. definition_values['attributes'] = {} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadSemanticDataTypeDefinition( definitions_registry, definition_values, data_types.EnumerationDefinition, 'enum') definition_values['attributes'] = None @test_lib.skipUnlessHasTestFile(['definitions', 'integers.yaml']) def testReadSequenceDataTypeDefinition(self): """Tests the _ReadSequenceDataTypeDefinition function.""" definition_values = { 'description': 'vector with 4 elements', 'element_data_type': 'int32', 'number_of_elements': 4, } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadSequenceDataTypeDefinition( definitions_registry, definition_values, 'vector4')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) def testReadStorageDataTypeDefinition(self): """Tests the _ReadStorageDataTypeDefinition function.""" definition_values = { 'aliases': ['LONG', 'LONG32'], 'attributes': { 'byte_order': 'little-endian', }, 'description': 'signed 32-bit integer type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadStorageDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, 'int32') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_LITTLE_ENDIAN) # Test with incorrect byte-order. definition_values['attributes']['byte_order'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStorageDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, 'int32') definition_values['attributes']['byte_order'] = 'little-endian' @test_lib.skipUnlessHasTestFile(['definitions', 'integers.yaml']) def testReadStreamDataTypeDefinition(self): """Tests the _ReadStreamDataTypeDefinition function.""" definition_values = { 'description': 'stream with 4 elements', 'element_data_type': 'uint8', 'number_of_elements': 4, } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadStreamDataTypeDefinition( definitions_registry, definition_values, 'array4')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.StreamDefinition) @test_lib.skipUnlessHasTestFile(['definitions', 'characters.yaml']) def testReadStringDataTypeDefinition(self): """Tests the _ReadStringDataTypeDefinition function.""" definition_values = { 'description': 'string with 4 characters', 'encoding': 'ascii', 'element_data_type': 'char', 'number_of_elements': 4, } definitions_file = self._GetTestFilePath([ 'definitions', 'characters.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadStringDataTypeDefinition( definitions_registry, definition_values, 'string4')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.StringDefinition) # Test definitions values without encoding. del definition_values['encoding'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStringDataTypeDefinition( definitions_registry, definition_values, 'string4') definition_values['encoding'] = 'ascii' @test_lib.skipUnlessHasTestFile(['definitions', 'integers.yaml']) def testReadStructureDataTypeDefinition(self): """Tests the _ReadStructureDataTypeDefinition function.""" definition_values = { 'aliases': ['POINT'], 'attributes': { 'byte_order': 'big-endian', }, 'description': 'Point in 3 dimensional space.', 'members': [ {'name': 'x', 'data_type': 'int32'}, {'name': 'y', 'data_type': 'int32'}, {'name': 'z', 'data_type': 'int32'}], } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'point3d')) self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.StructureDefinition) self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_BIG_ENDIAN) # Test with undefined data type. definition_values['members'][1]['data_type'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'point3d') # Test with missing member definitions. del definition_values['members'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'point3d') # TODO: add tests for _ReadStructureFamilyDataTypeDefinition @test_lib.skipUnlessHasTestFile(['definitions', 'integers.yaml']) def testReadUnionDataTypeDefinition(self): """Tests the _ReadUnionDataTypeDefinition function.""" definition_values = { 'members': [ {'name': 'long', 'data_type': 'int32'}, {'name': 'short', 'data_type': 'int16'}], } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'union')) self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.StructureDefinition) # Test with undefined data type. definition_values['members'][1]['data_type'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'point3d') # Test with missing member definitions. del definition_values['members'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'point3d') def testReadUUIDDataTypeDefinition(self): """Tests the _ReadUUIDDataTypeDefinition function.""" definition_values = { 'aliases': ['guid', 'GUID', 'UUID'], 'attributes': { 'byte_order': 'little-endian', }, 'description': ( 'Globally or Universal unique identifier (GUID or UUID) type'), } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadUUIDDataTypeDefinition( definitions_registry, definition_values, 'uuid') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.UUIDDefinition) # Test with unsupported size. definition_values['attributes']['size'] = 32 with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadUUIDDataTypeDefinition( definitions_registry, definition_values, 'uuid') class DataTypeDefinitionsFileReaderTest(test_lib.BaseTestCase): """Data type definitions file reader tests.""" # pylint: disable=protected-access def testReadDefinition(self): """Tests the _ReadDefinition function.""" definition_values = { 'aliases': ['LONG', 'LONG32'], 'attributes': { 'format': 'signed', 'size': 4, }, 'description': 'signed 32-bit integer type', 'name': 'int32', 'type': 'integer', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsFileReader() data_type_definition = definitions_reader._ReadDefinition( definitions_registry, definition_values) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadDefinition(definitions_registry, None) definition_values['type'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadDefinition( definitions_registry, definition_values) @test_lib.skipUnlessHasTestFile(['definitions', 'integers.yaml']) def testReadFile(self): """Tests the ReadFile function.""" definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsFileReader() definitions_reader.ReadFile(definitions_registry, definitions_file) class YAMLDataTypeDefinitionsFileReaderTest(test_lib.BaseTestCase): """YAML data type definitions reader tests.""" # pylint: disable=protected-access # TODO: add tests for _GetFormatErrorLocation @test_lib.skipUnlessHasTestFile(['boolean.yaml']) def testReadFileObjectBoolean(self): """Tests the ReadFileObject function of a boolean data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['boolean.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName('bool') self.assertIsInstance(data_type_definition, data_types.BooleanDefinition) self.assertEqual(data_type_definition.name, 'bool') self.assertEqual(data_type_definition.size, 1) self.assertEqual(data_type_definition.units, 'bytes') byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 1) @test_lib.skipUnlessHasTestFile(['character.yaml']) def testReadFileObjectCharacter(self): """Tests the ReadFileObject function of a character data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['character.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName('char') self.assertIsInstance(data_type_definition, data_types.CharacterDefinition) self.assertEqual(data_type_definition.name, 'char') self.assertEqual(data_type_definition.size, 1) self.assertEqual(data_type_definition.units, 'bytes') byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 1) @test_lib.skipUnlessHasTestFile(['constant.yaml']) def testReadFileObjectConstant(self): """Tests the ReadFileObject function of a constant data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['constant.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName( 'maximum_number_of_back_traces') self.assertIsInstance(data_type_definition, data_types.ConstantDefinition) self.assertEqual( data_type_definition.name, 'maximum_number_of_back_traces') self.assertEqual(data_type_definition.value, 32) @test_lib.skipUnlessHasTestFile(['enumeration.yaml']) def testReadFileObjectEnumeration(self): """Tests the ReadFileObject function of an enumeration data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['enumeration.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName( 'object_information_type') self.assertIsInstance( data_type_definition, data_types.EnumerationDefinition) self.assertEqual(data_type_definition.name, 'object_information_type') self.assertEqual(len(data_type_definition.values), 6) byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) @test_lib.skipUnlessHasTestFile(['floating-point.yaml']) def testReadFileObjectFloatingPoint(self): """Tests the ReadFileObject function of a floating-point data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['floating-point.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName('float32') self.assertIsInstance( data_type_definition, data_types.FloatingPointDefinition) self.assertEqual(data_type_definition.name, 'float32') self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_LITTLE_ENDIAN) self.assertEqual(data_type_definition.size, 4) self.assertEqual(data_type_definition.units, 'bytes') byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) def testReadFileObjectInteger(self): """Tests the ReadFileObject function of an integer data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() yaml_data = '\n'.join([ 'name: int32le', 'type: integer', 'attributes:', ' byte_order: little-endian', ' format: signed', ' size: 4', ' units: bytes']).encode('ascii') with io.BytesIO(initial_bytes=yaml_data) as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) data_type_definition = definitions_registry.GetDefinitionByName('int32le') self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) self.assertEqual(data_type_definition.name, 'int32le') self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_LITTLE_ENDIAN) self.assertEqual(data_type_definition.format, 'signed') self.assertEqual(data_type_definition.size, 4) self.assertEqual(data_type_definition.units, 'bytes') byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) yaml_data = '\n'.join([ 'name: int', 'type: integer', 'attributes:', ' format: signed']).encode('ascii') with io.BytesIO(initial_bytes=yaml_data) as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) data_type_definition = definitions_registry.GetDefinitionByName('int') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) yaml_data = '\n'.join([ 'name: int32le', 'type: integer', 'attributes:', ' format: bogus', ' size: 4', ' units: bytes']).encode('ascii') with self.assertRaises(errors.FormatError): with io.BytesIO(initial_bytes=yaml_data) as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) yaml_data = '\n'.join([ 'name: int32le', 'type: integer', 'attributes:', ' format: signed', ' size: bogus', ' units: bytes']).encode('ascii') with self.assertRaises(errors.FormatError): with io.BytesIO(initial_bytes=yaml_data) as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) def testReadFileObjectMissingName(self): """Tests the ReadFileObject function with a missing name.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() yaml_data = '\n'.join([ 'type: integer', 'attributes:', ' format: signed', ' size: 1', ' units: bytes']).encode('ascii') file_object = io.BytesIO(initial_bytes=yaml_data) with self.assertRaises(errors.FormatError): definitions_reader.ReadFileObject(definitions_registry, file_object) def testReadFileObjectMissingType(self): """Tests the ReadFileObject function with a missing type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() yaml_data = '\n'.join([ 'name: int8', 'attributes:', ' format: signed', ' size: 1', ' units: bytes']).encode('ascii') file_object = io.BytesIO(initial_bytes=yaml_data) with self.assertRaises(errors.FormatError): definitions_reader.ReadFileObject(definitions_registry, file_object) yaml_data = '\n'.join([ 'name: int8', 'type: integer', 'attributes:', ' format: signed', ' size: 1', ' units: bytes', '---', 'name: int16', 'attributes:', ' format: signed', ' size: 2', ' units: bytes']).encode('ascii') file_object = io.BytesIO(initial_bytes=yaml_data) with self.assertRaises(errors.FormatError): definitions_reader.ReadFileObject(definitions_registry, file_object) @test_lib.skipUnlessHasTestFile(['sequence.yaml']) @test_lib.skipUnlessHasTestFile(['sequence_with_structure.yaml']) def testReadFileObjectSequence(self): """Tests the ReadFileObject function of a sequence data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['sequence.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 3) data_type_definition = definitions_registry.GetDefinitionByName('vector4') self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) self.assertEqual(data_type_definition.name, 'vector4') self.assertEqual(data_type_definition.description, '4-dimensional vector') self.assertEqual(data_type_definition.aliases, ['VECTOR']) self.assertEqual(data_type_definition.element_data_type, 'int32') self.assertIsNotNone(data_type_definition.element_data_type_definition) self.assertEqual(data_type_definition.number_of_elements, 4) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 16) definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['sequence_with_structure.yaml']) with self.assertRaises(errors.FormatError): with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) @test_lib.skipUnlessHasTestFile(['stream.yaml']) def testReadFileObjectStream(self): """Tests the ReadFileObject function of a stream data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['stream.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 4) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream') self.assertIsInstance(data_type_definition, data_types.StreamDefinition) self.assertEqual(data_type_definition.name, 'utf16le_stream') self.assertEqual( data_type_definition.description, 'UTF-16 little-endian stream') self.assertEqual(data_type_definition.aliases, ['UTF16LE']) self.assertEqual(data_type_definition.element_data_type, 'wchar16') self.assertIsNotNone(data_type_definition.element_data_type_definition) self.assertEqual(data_type_definition.number_of_elements, 8) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 16) @test_lib.skipUnlessHasTestFile(['string.yaml']) def testReadFileObjectString(self): """Tests the ReadFileObject function of a string data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['string.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 4) data_type_definition = definitions_registry.GetDefinitionByName( 'utf8_string') self.assertIsInstance(data_type_definition, data_types.StringDefinition) self.assertEqual(data_type_definition.name, 'utf8_string') self.assertEqual( data_type_definition.description, 'UTF-8 string') self.assertEqual(data_type_definition.element_data_type, 'char') self.assertIsNotNone(data_type_definition.element_data_type_definition) self.assertEqual(data_type_definition.elements_terminator, b'\x00') self.assertEqual(data_type_definition.encoding, 'utf8') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testReadFileObjectStructure(self): """Tests the ReadFileObject function of a structure data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 5) data_type_definition = definitions_registry.GetDefinitionByName('point3d') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'point3d') self.assertEqual( data_type_definition.description, 'Point in 3 dimensional space.') self.assertEqual(data_type_definition.aliases, ['POINT']) self.assertEqual(len(data_type_definition.members), 3) member_definition = data_type_definition.members[0] self.assertIsInstance( member_definition, data_types.MemberDataTypeDefinition) self.assertEqual(member_definition.name, 'x') self.assertEqual(member_definition.aliases, ['XCOORD']) self.assertEqual(member_definition.member_data_type, 'int32') self.assertIsNotNone(member_definition.member_data_type_definition) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 12) @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testReadFileObjectStructureWithSequence(self): """Tests the ReadFileObject function of a structure with a sequence.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 5) data_type_definition = definitions_registry.GetDefinitionByName('box3d') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'box3d') self.assertEqual( data_type_definition.description, 'Box in 3 dimensional space.') self.assertEqual(len(data_type_definition.members), 1) member_definition = data_type_definition.members[0] self.assertIsInstance(member_definition, data_types.SequenceDefinition) self.assertEqual(member_definition.name, 'triangles') self.assertEqual(member_definition.element_data_type, 'triangle3d') self.assertIsNotNone(member_definition.element_data_type_definition) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 432) @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testReadFileObjectStructureWithSequenceWithExpression(self): """Tests the ReadFileObject function of a structure with a sequence.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 5) data_type_definition = definitions_registry.GetDefinitionByName('sphere3d') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'sphere3d') self.assertEqual( data_type_definition.description, 'Sphere in 3 dimensional space.') self.assertEqual(len(data_type_definition.members), 2) member_definition = data_type_definition.members[1] self.assertIsInstance(member_definition, data_types.SequenceDefinition) self.assertEqual(member_definition.name, 'triangles') self.assertEqual(member_definition.element_data_type, 'triangle3d') self.assertIsNotNone(member_definition.element_data_type_definition) byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) @test_lib.skipUnlessHasTestFile(['structure_with_sections.yaml']) def testReadFileObjectStructureWithSections(self): """Tests the ReadFileObject function of a structure with sections.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure_with_sections.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 2) data_type_definition = definitions_registry.GetDefinitionByName('3dsphere') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, '3dsphere') self.assertEqual(len(data_type_definition.members), 4) self.assertEqual(len(data_type_definition.sections), 2) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 16) @test_lib.skipUnlessHasTestFile(['structure_with_union.yaml']) def testReadFileObjectStructureWithUnion(self): """Tests the ReadFileObject function of a structure with an union.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure_with_union.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 3) data_type_definition = definitions_registry.GetDefinitionByName( 'intfloat32') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'intfloat32') self.assertEqual(len(data_type_definition.members), 1) member_definition = data_type_definition.members[0] self.assertIsInstance(member_definition, data_types.UnionDefinition) self.assertIsNone(member_definition.name) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) @test_lib.skipUnlessHasTestFile(['string_array.yaml']) def testReadFileObjectStructureWithStringArray(self): """Tests the ReadFileObject function of a string array.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['string_array.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 5) data_type_definition = definitions_registry.GetDefinitionByName( 'string_array') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'string_array') self.assertEqual(len(data_type_definition.members), 2) byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) @test_lib.skipUnlessHasTestFile(['structure_family.yaml']) def testReadFileObjectStructureFamily(self): """Tests the ReadFileObject function of a structure family data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure_family.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 8) data_type_definition = definitions_registry.GetDefinitionByName( 'group_descriptor') self.assertIsInstance( data_type_definition, data_types.StructureFamilyDefinition) self.assertEqual(data_type_definition.name, 'group_descriptor') self.assertEqual(data_type_definition.description, 'Group descriptor') self.assertEqual(len(data_type_definition.members), 2) member_definition = data_type_definition.members[0] self.assertIsInstance(member_definition, data_types.StructureDefinition) self.assertEqual(member_definition.name, 'group_descriptor_ext2') byte_size = data_type_definition.GetByteSize() # TODO: determine the size of the largest family member. self.assertIsNone(byte_size) # TODO: add test for member already part of a family. @test_lib.skipUnlessHasTestFile(['uuid.yaml']) def testReadFileObjectUUID(self): """Tests the ReadFileObject function of an UUID data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['uuid.yaml']) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName('uuid') self.assertIsInstance( data_type_definition, data_types.UUIDDefinition) self.assertEqual(data_type_definition.name, 'uuid') self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_LITTLE_ENDIAN) self.assertEqual(data_type_definition.size, 16) self.assertEqual(data_type_definition.units, 'bytes') byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 16) if __name__ == '__main__': unittest.main() dtfabric-20190120/tests/registry.py000066400000000000000000000057431342102721300171200ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the data type definitions registry.""" from __future__ import unicode_literals import unittest from dtfabric import data_types from dtfabric import registry from tests import test_lib class DataTypeDefinitionsRegistryTest(test_lib.BaseTestCase): """Data type definitions registry tests.""" def testRegistration(self): """Tests the RegisterDefinition and DeregisterDefinition functions.""" definitions_registry = registry.DataTypeDefinitionsRegistry() data_type_definition = data_types.IntegerDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') definitions_registry.RegisterDefinition(data_type_definition) with self.assertRaises(KeyError): definitions_registry.RegisterDefinition(data_type_definition) test_definition = data_types.IntegerDefinition( 'LONG', description='long integer') with self.assertRaises(KeyError): definitions_registry.RegisterDefinition(test_definition) test_definition = data_types.IntegerDefinition( 'test', aliases=['LONG'], description='long integer') with self.assertRaises(KeyError): definitions_registry.RegisterDefinition(test_definition) definitions_registry.DeregisterDefinition(data_type_definition) with self.assertRaises(KeyError): definitions_registry.DeregisterDefinition(data_type_definition) def testGetDefinitionByName(self): """Tests the GetDefinitionByName function.""" definitions_registry = registry.DataTypeDefinitionsRegistry() data_type_definition = data_types.IntegerDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') definitions_registry.RegisterDefinition(data_type_definition) test_definition = definitions_registry.GetDefinitionByName('int32') self.assertIsNotNone(test_definition) self.assertIsInstance(test_definition, data_types.IntegerDefinition) test_definition = definitions_registry.GetDefinitionByName('LONG32') self.assertIsNotNone(test_definition) self.assertIsInstance(test_definition, data_types.IntegerDefinition) test_definition = definitions_registry.GetDefinitionByName('bogus') self.assertIsNone(test_definition) definitions_registry.DeregisterDefinition(data_type_definition) def testGetDefinitions(self): """Tests the GetDefinitions function.""" definitions_registry = registry.DataTypeDefinitionsRegistry() test_definitions = definitions_registry.GetDefinitions() self.assertEqual(len(test_definitions), 0) data_type_definition = data_types.IntegerDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') definitions_registry.RegisterDefinition(data_type_definition) test_definitions = definitions_registry.GetDefinitions() self.assertEqual(len(test_definitions), 1) definitions_registry.DeregisterDefinition(data_type_definition) if __name__ == '__main__': unittest.main() dtfabric-20190120/tests/runtime/000077500000000000000000000000001342102721300163505ustar00rootroot00000000000000dtfabric-20190120/tests/runtime/__init__.py000066400000000000000000000000301342102721300204520ustar00rootroot00000000000000# -*- coding: utf-8 -*- dtfabric-20190120/tests/runtime/byte_operations.py000066400000000000000000000032241342102721300221310ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the byte stream operations.""" from __future__ import unicode_literals import unittest from dtfabric import errors from dtfabric.runtime import byte_operations from tests import test_lib class StructOperationTest(test_lib.BaseTestCase): """Python struct-base byte stream operation tests.""" def testInitialize(self): """Tests the __init__ function.""" byte_stream_operation = byte_operations.StructOperation('b') self.assertIsNotNone(byte_stream_operation) with self.assertRaises(errors.FormatError): byte_operations.StructOperation(None) with self.assertRaises(errors.FormatError): byte_operations.StructOperation('z') def testReadFrom(self): """Tests the ReadFrom function.""" byte_stream_operation = byte_operations.StructOperation('i') value = byte_stream_operation.ReadFrom(b'\x12\x34\x56\x78') self.assertEqual(value, tuple([0x78563412])) with self.assertRaises(IOError): byte_stream_operation.ReadFrom(None) with self.assertRaises(IOError): byte_stream_operation.ReadFrom(b'\x12\x34\x56') def testWriteTo(self): """Tests the WriteTo function.""" byte_stream_operation = byte_operations.StructOperation('i') byte_stream = byte_stream_operation.WriteTo(tuple([0x78563412])) self.assertEqual(byte_stream, b'\x12\x34\x56\x78') with self.assertRaises(IOError): byte_stream_operation.WriteTo(None) with self.assertRaises(IOError): byte_stream_operation.WriteTo(0x78563412) with self.assertRaises(IOError): byte_stream_operation.WriteTo(tuple([0x9078563412])) if __name__ == '__main__': unittest.main() dtfabric-20190120/tests/runtime/data_maps.py000066400000000000000000001756251342102721300206730ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the data type maps.""" from __future__ import unicode_literals import unittest import uuid from dtfabric import data_types from dtfabric import definitions from dtfabric import errors from dtfabric.runtime import byte_operations from dtfabric.runtime import data_maps from tests import test_lib class EmptyDataTypeDefinition(data_types.DataTypeDefinition): """Empty data type definition for testing.""" def GetByteSize(self): # pylint: disable=redundant-returns-doc """Determines the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ return None class TestDataTypeDefinition(data_types.DataTypeDefinition): """Data type definition for testing.""" def GetByteSize(self): # pylint: disable=redundant-returns-doc """Determines the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ return None class DataTypeMapContextTest(test_lib.BaseTestCase): """Data type map context tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_map_context = data_maps.DataTypeMapContext() self.assertIsNotNone(data_type_map_context) @test_lib.skipUnlessHasTestFile(['integer.yaml']) class DataTypeMapTest(test_lib.BaseTestCase): """Data type map tests.""" def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.DataTypeMap(data_type_definition) self.assertIsNotNone(data_type_map) def testName(self): """Tests the name property.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.DataTypeMap(data_type_definition) self.assertEqual(data_type_map.name, 'int32le') def testGetByteSize(self): """Tests the GetByteSize function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.DataTypeMap(data_type_definition) byte_size = data_type_map.GetByteSize() self.assertEqual(byte_size, 4) def testGetSizeHint(self): """Tests the GetSizeHint function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.DataTypeMap(data_type_definition) size_hint = data_type_map.GetSizeHint() self.assertEqual(size_hint, 4) @test_lib.skipUnlessHasTestFile(['integer.yaml']) class StorageDataTypeMapTest(test_lib.BaseTestCase): """Storage data type map tests.""" # pylint: disable=assignment-from-none,protected-access # TODO: add tests for _CheckByteStreamSize def testGetByteStreamOperation(self): """Tests the _GetByteStreamOperation function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.StorageDataTypeMap(data_type_definition) map_operation = data_type_map._GetByteStreamOperation() self.assertIsNone(map_operation) def testGetStructByteOrderString(self): """Tests the GetStructByteOrderString function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32') data_type_map = data_maps.StorageDataTypeMap(data_type_definition) byte_order_string = data_type_map.GetStructByteOrderString() self.assertEqual(byte_order_string, '=') data_type_definition = definitions_registry.GetDefinitionByName('int32be') data_type_map = data_maps.StorageDataTypeMap(data_type_definition) byte_order_string = data_type_map.GetStructByteOrderString() self.assertEqual(byte_order_string, '>') data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.StorageDataTypeMap(data_type_definition) byte_order_string = data_type_map.GetStructByteOrderString() self.assertEqual(byte_order_string, '<') def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.StorageDataTypeMap(data_type_definition) format_string = data_type_map.GetStructFormatString() self.assertIsNone(format_string) @test_lib.skipUnlessHasTestFile(['integer.yaml']) class PrimitiveDataTypeMapTest(test_lib.BaseTestCase): """Primitive data type map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.PrimitiveDataTypeMap(data_type_definition) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(1) def testFoldValue(self): """Tests the FoldValue function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.PrimitiveDataTypeMap(data_type_definition) integer_value = data_type_map.FoldValue(1) self.assertEqual(integer_value, 1) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.PrimitiveDataTypeMap(data_type_definition) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(b'\x01\x00\x00\x00') def testMapValue(self): """Tests the MapValue function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.PrimitiveDataTypeMap(data_type_definition) integer_value = data_type_map.MapValue(1) self.assertEqual(integer_value, 1) @test_lib.skipUnlessHasTestFile(['definitions', 'booleans.yaml']) class BooleanMapTest(test_lib.BaseTestCase): """Boolean map tests.""" def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['definitions', 'booleans.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('bool32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_definition.false_value = None data_type_definition.true_value = None with self.assertRaises(errors.FormatError): data_maps.BooleanMap(data_type_definition) def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['definitions', 'booleans.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('bool8') data_type_map = data_maps.BooleanMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'B') data_type_definition = definitions_registry.GetDefinitionByName('bool16') data_type_map = data_maps.BooleanMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'H') data_type_definition = definitions_registry.GetDefinitionByName('bool32') data_type_map = data_maps.BooleanMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'I') def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['definitions', 'booleans.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('bool8') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.BooleanMap(data_type_definition) data_type_definition.false_value = 0 data_type_definition.true_value = 1 byte_stream = data_type_map.FoldByteStream(False) self.assertEqual(byte_stream, b'\x00') byte_stream = data_type_map.FoldByteStream(True) self.assertEqual(byte_stream, b'\x01') with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(None) data_type_definition = definitions_registry.GetDefinitionByName('bool16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_definition.false_value = 0xffff data_type_definition.true_value = 1 data_type_map = data_maps.BooleanMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(False) self.assertEqual(byte_stream, b'\xff\xff') byte_stream = data_type_map.FoldByteStream(True) self.assertEqual(byte_stream, b'\x01\x00') data_type_definition = definitions_registry.GetDefinitionByName('bool32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_definition.false_value = 0 data_type_definition.true_value = None data_type_map = data_maps.BooleanMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(False) self.assertEqual(byte_stream, b'\x00\x00\x00\x00') with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(True) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['definitions', 'booleans.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('bool8') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.BooleanMap(data_type_definition) data_type_definition.true_value = 1 bool_value = data_type_map.MapByteStream(b'\x00') self.assertFalse(bool_value) bool_value = data_type_map.MapByteStream(b'\x01') self.assertTrue(bool_value) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(b'\xff') data_type_definition = definitions_registry.GetDefinitionByName('bool16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_definition.false_value = None data_type_definition.true_value = 1 data_type_map = data_maps.BooleanMap(data_type_definition) bool_value = data_type_map.MapByteStream(b'\xff\xff') self.assertFalse(bool_value) bool_value = data_type_map.MapByteStream(b'\x01\x00') self.assertTrue(bool_value) data_type_definition = definitions_registry.GetDefinitionByName('bool32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_definition.true_value = None data_type_map = data_maps.BooleanMap(data_type_definition) bool_value = data_type_map.MapByteStream(b'\x00\x00\x00\x00') self.assertFalse(bool_value) bool_value = data_type_map.MapByteStream(b'\xff\xff\xff\xff') self.assertTrue(bool_value) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\x01\x00') @test_lib.skipUnlessHasTestFile(['definitions', 'characters.yaml']) class CharacterMapTest(test_lib.BaseTestCase): """Character map tests.""" def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath([ 'definitions', 'characters.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('char') data_type_map = data_maps.CharacterMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'b') data_type_definition = definitions_registry.GetDefinitionByName('wchar16') data_type_map = data_maps.CharacterMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'h') data_type_definition = definitions_registry.GetDefinitionByName('wchar32') data_type_map = data_maps.CharacterMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'i') def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath([ 'definitions', 'characters.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('char') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) byte_stream = data_type_map.FoldByteStream('A') self.assertEqual(byte_stream, b'\x41') data_type_definition = definitions_registry.GetDefinitionByName('wchar16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) byte_stream = data_type_map.FoldByteStream('\u24b6') self.assertEqual(byte_stream, b'\xb6\x24') data_type_definition = definitions_registry.GetDefinitionByName('wchar32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) byte_stream = data_type_map.FoldByteStream('\u24b6') self.assertEqual(byte_stream, b'\xb6\x24\x00\x00') def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath([ 'definitions', 'characters.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('char') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) string_value = data_type_map.MapByteStream(b'\x41') self.assertEqual(string_value, 'A') data_type_definition = definitions_registry.GetDefinitionByName('wchar16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) string_value = data_type_map.MapByteStream(b'\xb6\x24') self.assertEqual(string_value, '\u24b6') data_type_definition = definitions_registry.GetDefinitionByName('wchar32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) string_value = data_type_map.MapByteStream(b'\xb6\x24\x00\x00') self.assertEqual(string_value, '\u24b6') with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\xb6\x24') @test_lib.skipUnlessHasTestFile(['definitions', 'floating-points.yaml']) class FloatingPointMapTest(test_lib.BaseTestCase): """Floating-point map tests.""" def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath([ 'definitions', 'floating-points.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('float32') data_type_map = data_maps.FloatingPointMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'f') data_type_definition = definitions_registry.GetDefinitionByName('float64') data_type_map = data_maps.FloatingPointMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'd') def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath([ 'definitions', 'floating-points.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('float32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.FloatingPointMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(12.34000015258789) self.assertEqual(byte_stream, b'\xa4\x70\x45\x41') data_type_definition = definitions_registry.GetDefinitionByName('float64') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.FloatingPointMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(12.34) self.assertEqual(byte_stream, b'\xae\x47\xe1\x7a\x14\xae\x28\x40') def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath([ 'definitions', 'floating-points.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('float32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.FloatingPointMap(data_type_definition) float_value = data_type_map.MapByteStream(b'\xa4\x70\x45\x41') self.assertEqual(float_value, 12.34000015258789) data_type_definition = definitions_registry.GetDefinitionByName('float64') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.FloatingPointMap(data_type_definition) float_value = data_type_map.MapByteStream( b'\xae\x47\xe1\x7a\x14\xae\x28\x40') self.assertEqual(float_value, 12.34) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\xa4\x70\x45\x41') class IntegerMapTest(test_lib.BaseTestCase): """Integer map tests.""" # pylint: disable=protected-access @test_lib.skipUnlessHasTestFile(['integer.yaml']) def testGetByteStreamOperation(self): """Tests the _GetByteStreamOperation function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.IntegerMap(data_type_definition) map_operation = data_type_map._GetByteStreamOperation() self.assertIsInstance(map_operation, byte_operations.StructOperation) @test_lib.skipUnlessHasTestFile(['definitions', 'integers.yaml']) def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int8') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'b') data_type_definition = definitions_registry.GetDefinitionByName('int16') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'h') data_type_definition = definitions_registry.GetDefinitionByName('int32') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'i') data_type_definition = definitions_registry.GetDefinitionByName('int64') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'q') data_type_definition = definitions_registry.GetDefinitionByName('uint8') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'B') data_type_definition = definitions_registry.GetDefinitionByName('uint16') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'H') data_type_definition = definitions_registry.GetDefinitionByName('uint32') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'I') data_type_definition = definitions_registry.GetDefinitionByName('uint64') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'Q') def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('uint8') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(0x12) self.assertEqual(byte_stream, b'\x12') data_type_definition = definitions_registry.GetDefinitionByName('uint16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(0x3412) self.assertEqual(byte_stream, b'\x12\x34') data_type_definition = definitions_registry.GetDefinitionByName('uint32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(0x78563412) self.assertEqual(byte_stream, b'\x12\x34\x56\x78') data_type_definition = definitions_registry.GetDefinitionByName('uint32') data_type_definition.byte_order = definitions.BYTE_ORDER_BIG_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(0x12345678) self.assertEqual(byte_stream, b'\x12\x34\x56\x78') data_type_definition = definitions_registry.GetDefinitionByName('uint64') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(0xf0debc9a78563412) self.assertEqual(byte_stream, b'\x12\x34\x56\x78\x9a\xbc\xde\xf0') @test_lib.skipUnlessHasTestFile(['definitions', 'integers.yaml']) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('uint8') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) integer_value = data_type_map.MapByteStream(b'\x12') self.assertEqual(integer_value, 0x12) data_type_definition = definitions_registry.GetDefinitionByName('uint16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) integer_value = data_type_map.MapByteStream(b'\x12\x34') self.assertEqual(integer_value, 0x3412) data_type_definition = definitions_registry.GetDefinitionByName('uint32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) integer_value = data_type_map.MapByteStream(b'\x12\x34\x56\x78') self.assertEqual(integer_value, 0x78563412) data_type_definition = definitions_registry.GetDefinitionByName('uint32') data_type_definition.byte_order = definitions.BYTE_ORDER_BIG_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) integer_value = data_type_map.MapByteStream(b'\x12\x34\x56\x78') self.assertEqual(integer_value, 0x12345678) data_type_definition = definitions_registry.GetDefinitionByName('uint64') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) integer_value = data_type_map.MapByteStream( b'\x12\x34\x56\x78\x9a\xbc\xde\xf0') self.assertEqual(integer_value, 0xf0debc9a78563412) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\x12\x34\x56\x78') @test_lib.skipUnlessHasTestFile(['uuid.yaml']) class UUIDMapTest(test_lib.BaseTestCase): """UUID map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['uuid.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('uuid') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.UUIDMap(data_type_definition) uuid_value = uuid.UUID('{00021401-0000-0000-c000-000000000046}') byte_stream = data_type_map.FoldByteStream(uuid_value) expected_byte_stream = ( b'\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x46') self.assertEqual(byte_stream, expected_byte_stream) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['uuid.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('uuid') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.UUIDMap(data_type_definition) expected_uuid_value = uuid.UUID('{00021401-0000-0000-c000-000000000046}') uuid_value = data_type_map.MapByteStream( b'\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x46') self.assertEqual(uuid_value, expected_uuid_value) @test_lib.skipUnlessHasTestFile(['sequence.yaml']) class ElementSequenceDataTypeMapTest(test_lib.BaseTestCase): """Element sequence data type map tests.""" # pylint: disable=protected-access def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) self.assertIsNotNone(data_type_map) # TODO: add tests for _CalculateElementsDataSize. # TODO: add tests for _EvaluateElementsDataSize. # TODO: add tests for _EvaluateNumberOfElements. def testGetElementDataTypeDefinition(self): """Tests the _GetElementDataTypeDefinition function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) element_data_type_definition = data_type_map._GetElementDataTypeDefinition( data_type_definition) self.assertIsNotNone(element_data_type_definition) with self.assertRaises(errors.FormatError): data_type_map._GetElementDataTypeDefinition(None) with self.assertRaises(errors.FormatError): data_type_definition = EmptyDataTypeDefinition('empty') data_type_map._GetElementDataTypeDefinition(data_type_definition) # TODO: add tests for _HasElementsDataSize. # TODO: add tests for _HasElementsTerminator. # TODO: add tests for _HasNumberOfElements. # TODO: add tests for GetSizeHint. def testGetStructByteOrderString(self): """Tests the GetStructByteOrderString function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) byte_order_string = data_type_map.GetStructByteOrderString() self.assertEqual(byte_order_string, '<') @test_lib.skipUnlessHasTestFile(['sequence.yaml']) class SequenceMapTest(test_lib.BaseTestCase): """Sequence map tests.""" # pylint: disable=protected-access def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) self.assertIsNotNone(data_type_map) # TODO: add tests for _CompositeFoldByteStream once implemented. def testCompositeMapByteStream(self): """Tests the _CompositeMapByteStream function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'triangle4') data_type_map = data_maps.SequenceMap(data_type_definition) byte_values = [] for value in range(1, 13): byte_value_upper, byte_value_lower = divmod(value, 256) byte_values.extend([byte_value_lower, byte_value_upper, 0, 0]) byte_stream = bytes(bytearray(byte_values)) sequence_value = data_type_map._CompositeMapByteStream(byte_stream) self.assertEqual( sequence_value, ((1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12))) with self.assertRaises(errors.MappingError): data_type_map._CompositeMapByteStream(None) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map._CompositeMapByteStream(b'\x12\x34\x56') def testLinearFoldByteStream(self): """Tests the _LinearFoldByteStream function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) byte_stream = data_type_map._LinearFoldByteStream((1, 2, 3, 4)) expected_sequence_value = ( b'\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00') self.assertEqual(byte_stream, expected_sequence_value) def testLinearMapByteStream(self): """Tests the _LinearMapByteStream function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) sequence_value = data_type_map._LinearMapByteStream( b'\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00') self.assertEqual(sequence_value, (1, 2, 3, 4)) with self.assertRaises(errors.MappingError): data_type_map._LinearMapByteStream(None) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map._LinearMapByteStream(b'\x12\x34\x56') def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) byte_stream = data_type_map.FoldByteStream((1, 2, 3, 4)) expected_sequence_value = ( b'\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00') self.assertEqual(byte_stream, expected_sequence_value) def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '4i') data_type_definition.elements_data_size = 16 data_type_definition.number_of_elements = 0 data_type_map = data_maps.SequenceMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '4i') data_type_definition.elements_data_size = 0 data_type_definition.number_of_elements = 0 data_type_map = data_maps.SequenceMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertIsNone(struct_format_string) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) sequence_value = data_type_map.MapByteStream( b'\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00') self.assertEqual(sequence_value, (1, 2, 3, 4)) @test_lib.skipUnlessHasTestFile(['stream.yaml']) class StreamMapTest(test_lib.BaseTestCase): """Stream map tests.""" # pylint: disable=protected-access def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['stream.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream') data_type_map = data_maps.StreamMap(data_type_definition) self.assertIsNotNone(data_type_map) def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['stream.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream') data_type_map = data_maps.StreamMap(data_type_definition) expected_byte_stream = b'd\x00t\x00F\x00a\x00b\x00r\x00i\x00c\x00' byte_stream = data_type_map.FoldByteStream(expected_byte_stream) self.assertEqual(byte_stream, expected_byte_stream) # Test with data type definition with elements date size. data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream_with_size') data_type_map = data_maps.StreamMap(data_type_definition) context = data_maps.DataTypeMapContext({'size': 16}) expected_byte_stream = b'd\x00t\x00F\x00a\x00b\x00r\x00i\x00c\x00' byte_stream = data_type_map.FoldByteStream( expected_byte_stream, context=context) self.assertEqual(byte_stream, expected_byte_stream) context = data_maps.DataTypeMapContext({'size': 8}) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(expected_byte_stream, context=context) # Test with data type definition with elements terminator. data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream_with_terminator') data_type_map = data_maps.StreamMap(data_type_definition) expected_byte_stream = b'd\x00t\x00F\x00a\x00b\x00r\x00i\x00c\x00\x00\x00' byte_stream = data_type_map.FoldByteStream(expected_byte_stream) self.assertEqual(byte_stream, expected_byte_stream) def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['stream.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream') data_type_map = data_maps.StreamMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '16B') data_type_definition.elements_data_size = 16 data_type_definition.number_of_elements = 0 data_type_map = data_maps.StreamMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '16B') data_type_definition.elements_data_size = 0 data_type_definition.number_of_elements = 0 data_type_map = data_maps.StreamMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertIsNone(struct_format_string) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['stream.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream') data_type_map = data_maps.StreamMap(data_type_definition) byte_stream = 'dtFabric'.encode('utf-16-le') stream_value = data_type_map.MapByteStream(byte_stream) self.assertEqual(stream_value, b'd\x00t\x00F\x00a\x00b\x00r\x00i\x00c\x00') with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(None) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\x12\x34\x56') @test_lib.skipUnlessHasTestFile(['string.yaml']) class StringMapTest(test_lib.BaseTestCase): """String map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StringMap(data_type_definition) expected_byte_stream = 'dtFabric'.encode('utf-16-le') byte_stream = data_type_map.FoldByteStream('dtFabric') self.assertEqual(byte_stream, expected_byte_stream) def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StreamMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '16B') data_type_definition.elements_data_size = 16 data_type_definition.number_of_elements = 0 data_type_map = data_maps.StringMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '16B') data_type_definition.elements_data_size = 0 data_type_definition.number_of_elements = 0 data_type_map = data_maps.StringMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertIsNone(struct_format_string) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StringMap(data_type_definition) byte_stream = 'dtFabric'.encode('utf-16-le') string_value = data_type_map.MapByteStream(byte_stream) self.assertEqual(string_value, 'dtFabric') with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(None) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\x12\x34\x56') data_type_definition = definitions_registry.GetDefinitionByName( 'utf8_string') data_type_map = data_maps.StringMap(data_type_definition) byte_stream = 'dtFabric\x00and more'.encode('utf8') string_value = data_type_map.MapByteStream(byte_stream) self.assertEqual(string_value, 'dtFabric') with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream[:7]) class StructureMapTest(test_lib.BaseTestCase): """Structure map tests.""" # pylint: disable=protected-access @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) self.assertIsNotNone(data_type_map) @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testCheckCompositeMap(self): """Tests the _CheckCompositeMap function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) result = data_type_map._CheckCompositeMap(data_type_definition) self.assertFalse(result) with self.assertRaises(errors.FormatError): data_type_map._CheckCompositeMap(None) with self.assertRaises(errors.FormatError): data_type_definition = EmptyDataTypeDefinition('empty') data_type_map._CheckCompositeMap(data_type_definition) data_type_definition = definitions_registry.GetDefinitionByName( 'triangle3d') data_type_map = data_maps.StructureMap(data_type_definition) result = data_type_map._CheckCompositeMap(data_type_definition) self.assertTrue(result) data_type_definition = definitions_registry.GetDefinitionByName('box3d') data_type_map = data_maps.StructureMap(data_type_definition) result = data_type_map._CheckCompositeMap(data_type_definition) self.assertTrue(result) data_type_definition = definitions_registry.GetDefinitionByName( 'sphere3d') data_type_map = data_maps.StructureMap(data_type_definition) result = data_type_map._CheckCompositeMap(data_type_definition) self.assertTrue(result) # TODO: add tests for _CompositeFoldByteStream. # TODO: add tests for _CompositeMapByteStream. @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testGetAttributeNames(self): """Tests the _GetAttributeNames function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) attribute_names = data_type_map._GetAttributeNames(data_type_definition) self.assertEqual(attribute_names, ['x', 'y', 'z']) with self.assertRaises(errors.FormatError): data_type_map._GetAttributeNames(None) @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testGetMemberDataTypeMaps(self): """Tests the _GetMemberDataTypeMaps function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) members_data_type_maps = data_type_map._GetMemberDataTypeMaps( data_type_definition, {}) self.assertIsNotNone(members_data_type_maps) with self.assertRaises(errors.FormatError): data_type_map._GetMemberDataTypeMaps(None, {}) with self.assertRaises(errors.FormatError): data_type_definition = EmptyDataTypeDefinition('empty') data_type_map._GetMemberDataTypeMaps(data_type_definition, {}) @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testLinearFoldByteStream(self): """Tests the _LinearFoldByteStream function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [] for value in range(1, 4): byte_value_upper, byte_value_lower = divmod(value, 256) byte_values.extend([byte_value_lower, byte_value_upper, 0, 0]) point3d = data_type_map.CreateStructureValues(x=1, y=2, z=3) expected_byte_stream = bytes(bytearray(byte_values)) byte_stream = data_type_map._LinearFoldByteStream(point3d) self.assertEqual(byte_stream, expected_byte_stream) @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testLinearMapByteStream(self): """Tests the _LinearMapByteStream function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [] for value in range(1, 4): byte_value_upper, byte_value_lower = divmod(value, 256) byte_values.extend([byte_value_lower, byte_value_upper, 0, 0]) byte_stream = bytes(bytearray(byte_values)) point3d = data_type_map._LinearMapByteStream(byte_stream) self.assertEqual(point3d.x, 1) self.assertEqual(point3d.y, 2) self.assertEqual(point3d.z, 3) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_definition.byte_order = definitions.BYTE_ORDER_BIG_ENDIAN data_type_map = data_maps.StructureMap(data_type_definition) point3d = data_type_map._LinearMapByteStream(byte_stream) self.assertEqual(point3d.x, 0x01000000) self.assertEqual(point3d.y, 0x02000000) self.assertEqual(point3d.z, 0x03000000) # TODO: add tests for CreateStructureValues. def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'iii') # Test with member without a struct format string. data_type_definition = data_types.StructureDefinition( 'my_struct_type', aliases=['MY_STRUCT_TYPE'], description='my structure type') member_definition = TestDataTypeDefinition('test') structure_member_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', member_definition, aliases=['MY_STRUCT_MEMBER'], data_type='test', description='my structure member') data_type_definition.AddMemberDefinition(structure_member_definition) data_type_map = data_maps.StructureMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertIsNone(struct_format_string) @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [] for value in range(1, 4): byte_value_upper, byte_value_lower = divmod(value, 256) byte_values.extend([byte_value_lower, byte_value_upper, 0, 0]) byte_stream = bytes(bytearray(byte_values)) point3d = data_type_map.MapByteStream(byte_stream) self.assertEqual(point3d.x, 1) self.assertEqual(point3d.y, 2) self.assertEqual(point3d.z, 3) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_definition.byte_order = definitions.BYTE_ORDER_BIG_ENDIAN data_type_map = data_maps.StructureMap(data_type_definition) point3d = data_type_map.MapByteStream(byte_stream) self.assertEqual(point3d.x, 0x01000000) self.assertEqual(point3d.y, 0x02000000) self.assertEqual(point3d.z, 0x03000000) @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testMapByteStreamWithSequence(self): """Tests the MapByteStream function with a sequence.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('box3d') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [] for value in range(1, 433): byte_value_upper, byte_value_lower = divmod(value, 256) byte_values.extend([byte_value_lower, byte_value_upper, 0, 0]) byte_stream = bytes(bytearray(byte_values)) box = data_type_map.MapByteStream(byte_stream) self.assertEqual(box.triangles[0].a.x, 1) self.assertEqual(box.triangles[0].a.y, 2) self.assertEqual(box.triangles[0].a.z, 3) @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testMapByteStreamWithSequenceWithExpression(self): """Tests the MapByteStream function with a sequence with expression.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('sphere3d') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [3, 0, 0, 0] for value in range(1, 113): byte_value_upper, byte_value_lower = divmod(value, 256) byte_values.extend([byte_value_lower, byte_value_upper, 0, 0]) byte_stream = bytes(bytearray(byte_values)) sphere = data_type_map.MapByteStream(byte_stream) self.assertEqual(sphere.number_of_triangles, 3) self.assertEqual(sphere.triangles[0].a.x, 1) self.assertEqual(sphere.triangles[0].a.y, 2) self.assertEqual(sphere.triangles[0].a.z, 3) self.assertEqual(sphere.triangles[0].b.x, 4) self.assertEqual(sphere.triangles[0].b.y, 5) self.assertEqual(sphere.triangles[0].b.z, 6) self.assertEqual(sphere.triangles[0].c.x, 7) self.assertEqual(sphere.triangles[0].c.y, 8) self.assertEqual(sphere.triangles[0].c.z, 9) self.assertEqual(sphere.triangles[2].c.x, 25) self.assertEqual(sphere.triangles[2].c.y, 26) self.assertEqual(sphere.triangles[2].c.z, 27) # Test incremental map. context = data_maps.DataTypeMapContext() with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream[:64], context=context) sphere = data_type_map.MapByteStream(byte_stream[64:], context=context) self.assertEqual(sphere.number_of_triangles, 3) self.assertEqual(sphere.triangles[0].a.x, 1) self.assertEqual(sphere.triangles[0].a.y, 2) self.assertEqual(sphere.triangles[0].a.z, 3) self.assertEqual(sphere.triangles[2].c.x, 25) self.assertEqual(sphere.triangles[2].c.y, 26) self.assertEqual(sphere.triangles[2].c.z, 27) @test_lib.skipUnlessHasTestFile(['structure_with_sequence.yaml']) def testMapByteStreamWithSequenceWithExpression2(self): """Tests the MapByteStream function with a sequence with expression.""" definitions_file = self._GetTestFilePath(['structure_with_sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'extension_block') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [4, 1, 0, 0] for byte_value in range(0, 256): byte_values.extend([byte_value]) byte_stream = bytes(bytearray(byte_values)) extension_block = data_type_map.MapByteStream(byte_stream) self.assertEqual(extension_block.size, 260) self.assertEqual(extension_block.data[0], 0) self.assertEqual(extension_block.data[-1], 255) byte_values = [3, 0, 0, 0] for byte_value in range(0, 256): byte_values.extend([byte_value]) byte_stream = bytes(bytearray(byte_values)) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(byte_stream) @test_lib.skipUnlessHasTestFile(['structure_with_stream.yaml']) def testMapByteStreamWithStream(self): """Tests the MapByteStream function with a stream.""" definitions_file = self._GetTestFilePath(['structure_with_stream.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'extension_block') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [4, 1, 0, 0] for byte_value in range(0, 256): byte_values.extend([byte_value]) byte_stream = bytes(bytearray(byte_values)) extension_block = data_type_map.MapByteStream(byte_stream) self.assertEqual(extension_block.size, 260) self.assertEqual(extension_block.data, byte_stream[4:]) byte_values = [3, 0, 0, 0] for byte_value in range(0, 256): byte_values.extend([byte_value]) byte_stream = bytes(bytearray(byte_values)) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(byte_stream) @test_lib.skipUnlessHasTestFile(['structure_with_string.yaml']) def testMapByteStreamWithString(self): """Tests the MapByteStream function with a string.""" definitions_file = self._GetTestFilePath(['structure_with_string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StructureMap(data_type_definition) text_stream = 'dtFabric'.encode('utf-16-le') byte_stream = b''.join([ bytes(bytearray([len(text_stream), 0])), text_stream]) utf16_string = data_type_map.MapByteStream(byte_stream) self.assertEqual(utf16_string.size, len(text_stream)) self.assertEqual(utf16_string.text, 'dtFabric') byte_stream = b''.join([bytes(bytearray([3, 0])), text_stream]) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(byte_stream) @test_lib.skipUnlessHasTestFile(['string_array.yaml']) def testMapByteStreamWithStringArray(self): """Tests the MapByteStream function with a string array.""" definitions_file = self._GetTestFilePath(['string_array.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'string_array') data_type_map = data_maps.StructureMap(data_type_definition) text_stream1 = 'dtFabric\x00'.encode('ascii') text_stream2 = 'supports\x00'.encode('ascii') text_stream3 = 'a string array\x00'.encode('ascii') byte_stream = b''.join([ bytes(bytearray([3, 0, 0, 0])), text_stream1, text_stream2, text_stream3]) string_array = data_type_map.MapByteStream(byte_stream) self.assertEqual(string_array.number_of_strings, 3) self.assertEqual(string_array.strings[0], 'dtFabric') self.assertEqual(string_array.strings[1], 'supports') self.assertEqual(string_array.strings[2], 'a string array') byte_stream = b''.join([ bytes(bytearray([3, 0, 0, 0])), text_stream1, text_stream2]) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream) data_type_definition = definitions_registry.GetDefinitionByName( 'string_array_with_size') data_type_map = data_maps.StructureMap(data_type_definition) text_stream1 = 'dtFabric\x00'.encode('ascii') text_stream2 = 'supports\x00'.encode('ascii') text_stream3 = 'a string array\x00'.encode('ascii') byte_stream = b''.join([ bytes(bytearray([33, 0, 0, 0])), text_stream1, text_stream2, text_stream3]) string_array = data_type_map.MapByteStream(byte_stream) self.assertEqual(string_array.strings_data_size, 33) self.assertEqual(string_array.strings[0], 'dtFabric') self.assertEqual(string_array.strings[1], 'supports') self.assertEqual(string_array.strings[2], 'a string array') byte_stream = b''.join([ bytes(bytearray([33, 0, 0, 0])), text_stream1, text_stream2]) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream) @test_lib.skipUnlessHasTestFile(['structure_with_string.yaml']) def testGetSizeHint(self): """Tests the GetSizeHint function with a string.""" definitions_file = self._GetTestFilePath(['structure_with_string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StructureMap(data_type_definition) context = data_maps.DataTypeMapContext() text_stream = 'dtFabric'.encode('utf-16-le') byte_stream = b''.join([ bytes(bytearray([len(text_stream), 0])), text_stream]) size_hint = data_type_map.GetSizeHint(context=context) self.assertEqual(size_hint, 2) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream[:size_hint], context=context) size_hint = data_type_map.GetSizeHint(context=context) self.assertEqual(size_hint, 18) @test_lib.skipUnlessHasTestFile(['constant.yaml']) class SemanticDataTypeMapTest(test_lib.BaseTestCase): """Semantic data type map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['constant.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'maximum_number_of_back_traces') data_type_map = data_maps.SemanticDataTypeMap(data_type_definition) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(1) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['constant.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'maximum_number_of_back_traces') data_type_map = data_maps.SemanticDataTypeMap(data_type_definition) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(b'\x01\x00\x00\x00') class ConstantMapTest(test_lib.BaseTestCase): """Constant map tests.""" @test_lib.skipUnlessHasTestFile(['enumeration.yaml']) class EnumerationMapTest(test_lib.BaseTestCase): """Enumeration map tests.""" def testGetName(self): """Tests the GetName function.""" definitions_file = self._GetTestFilePath(['enumeration.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'object_information_type') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.EnumerationMap(data_type_definition) name = data_type_map.GetName(2) self.assertEqual(name, 'MiniMutantInformation1') name = data_type_map.GetName(-1) self.assertIsNone(name) @test_lib.skipUnlessHasTestFile(['integer.yaml']) class DataTypeMapFactoryTest(test_lib.BaseTestCase): """Data type map factory tests.""" def testCreateDataTypeMap(self): """Tests the CreateDataTypeMap function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = EmptyDataTypeDefinition('empty') definitions_registry.RegisterDefinition(data_type_definition) factory = data_maps.DataTypeMapFactory(definitions_registry) data_type_map = factory.CreateDataTypeMap('int32le') self.assertIsNotNone(data_type_map) data_type_map = factory.CreateDataTypeMap('empty') self.assertIsNone(data_type_map) data_type_map = factory.CreateDataTypeMap('bogus') self.assertIsNone(data_type_map) def testCreateDataTypeMapByType(self): """Tests the CreateDataTypeMapByType function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.DataTypeMapFactory.CreateDataTypeMapByType( data_type_definition) self.assertIsNotNone(data_type_map) data_type_definition = EmptyDataTypeDefinition('empty') data_type_map = data_maps.DataTypeMapFactory.CreateDataTypeMapByType( data_type_definition) self.assertIsNone(data_type_map) if __name__ == '__main__': unittest.main() dtfabric-20190120/tests/runtime/fabric.py000066400000000000000000000013021342102721300201440ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the dtFabric helper objects.""" from __future__ import unicode_literals import unittest from dtfabric.runtime import fabric from tests import test_lib class DataTypeFabricTest(test_lib.BaseTestCase): """Data type fabric tests.""" @test_lib.skipUnlessHasTestFile(['integer.yaml']) def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) with open(definitions_file, 'rb') as file_object: yaml_definition = file_object.read() factory = fabric.DataTypeFabric(yaml_definition=yaml_definition) self.assertIsNotNone(factory) if __name__ == '__main__': unittest.main() dtfabric-20190120/tests/runtime/runtime.py000066400000000000000000000050351342102721300204100ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the run-time object.""" from __future__ import unicode_literals import unittest from dtfabric.runtime import runtime from tests import test_lib class StructureValuesClassFactoryTest(test_lib.BaseTestCase): """Structure values class factory tests.""" # pylint: disable=protected-access @test_lib.skipUnlessHasTestFile(['structure.yaml']) def testCreateClassTemplate(self): """Tests the _CreateClassTemplate function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') class_template = runtime.StructureValuesClassFactory._CreateClassTemplate( data_type_definition) self.assertIsNotNone(class_template) # TODO: implement error conditions. def testIsIdentifier(self): """Tests the _IsIdentifier function.""" result = runtime.StructureValuesClassFactory._IsIdentifier('valid') self.assertTrue(result) result = runtime.StructureValuesClassFactory._IsIdentifier('_valid') self.assertTrue(result) result = runtime.StructureValuesClassFactory._IsIdentifier('valid1') self.assertTrue(result) result = runtime.StructureValuesClassFactory._IsIdentifier('') self.assertFalse(result) result = runtime.StructureValuesClassFactory._IsIdentifier('0invalid') self.assertFalse(result) result = runtime.StructureValuesClassFactory._IsIdentifier('in-valid') self.assertFalse(result) def testValidateDataTypeDefinition(self): """Tests the _ValidateDataTypeDefinition function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') runtime.StructureValuesClassFactory._ValidateDataTypeDefinition( data_type_definition) # TODO: implement error conditions. def testCreateClass(self): """Tests the CreateClass function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') structure_values_class = runtime.StructureValuesClassFactory.CreateClass( data_type_definition) self.assertIsNotNone(structure_values_class) if __name__ == '__main__': unittest.main() dtfabric-20190120/tests/test_lib.py000066400000000000000000000047471342102721300170600ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Shared test case.""" from __future__ import unicode_literals import os import sys import unittest from dtfabric import reader from dtfabric import registry def skipUnlessHasTestFile(path_segments): # pylint: disable=invalid-name """Decorator to skip a test if the test file does not exist. Args: path_segments (list[str]): path segments inside the test data directory. Returns: function: to invoke. """ fail_unless_has_test_file = getattr( unittest, 'fail_unless_has_test_file', False) path = os.path.join('test_data', *path_segments) if fail_unless_has_test_file or os.path.exists(path): return lambda function: function if sys.version_info[0] < 3: path = path.encode('utf-8') # Note that the message should be of type str which is different for # different versions of Python. return unittest.skip('missing test file: {0:s}'.format(path)) class BaseTestCase(unittest.TestCase): """The base test case.""" _TEST_DATA_PATH = os.path.join(os.getcwd(), 'test_data') # Show full diff results, part of TestCase so does not follow our naming # conventions. maxDiff = None def _CreateDefinitionRegistryFromFile(self, path): """Creates a data type definition registry from a file. Args: path (str): path to the data definition file. Returns: DataTypeDefinitionsRegistry: data type definition registry or None on error. """ definitions_registry = registry.DataTypeDefinitionsRegistry() self._FillDefinitionRegistryFromFile(definitions_registry, path) return definitions_registry def _FillDefinitionRegistryFromFile(self, definitions_registry, path): """Fills a data type definition registry from a file. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. path (str): path to the data definition file. """ definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() with open(path, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) def _GetTestFilePath(self, path_segments): """Retrieves the path of a test file in the test data directory. Args: path_segments (list[str]): path segments inside the test data directory. Returns: str: path of the test file. """ # Note that we need to pass the individual path segments to os.path.join # and not a list. return os.path.join(self._TEST_DATA_PATH, *path_segments) dtfabric-20190120/tox.ini000066400000000000000000000010161342102721300150340ustar00rootroot00000000000000[tox] envlist = py2, py3 [testenv] pip_pre = True setenv = PYTHONPATH = {toxinidir} deps = funcsigs ; python_version < '3.0' mock pbr six pytest -rrequirements.txt commands = ./run_tests.py [testenv:py27] pip_pre = True setenv = PYTHONPATH = {toxinidir} deps = coverage funcsigs ; python_version < '3.0' mock pbr six pytest -rrequirements.txt commands = coverage erase coverage run --source=dtfabric --omit="*_test*,*__init__*,*test_lib*" run_tests.py dtfabric-20190120/utils/000077500000000000000000000000001342102721300146635ustar00rootroot00000000000000dtfabric-20190120/utils/__init__.py000066400000000000000000000001061342102721300167710ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Data formats.""" __version__ = '20170423' dtfabric-20190120/utils/check_dependencies.py000077500000000000000000000006461342102721300210310ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Script to check for the availability and version of dependencies.""" import sys # Change PYTHONPATH to include dependencies. sys.path.insert(0, '.') import utils.dependencies # pylint: disable=wrong-import-position if __name__ == '__main__': dependency_helper = utils.dependencies.DependencyHelper() if not dependency_helper.CheckDependencies(): sys.exit(1) dtfabric-20190120/utils/dependencies.py000066400000000000000000000311661342102721300176720ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Helper to check for availability and version of dependencies.""" from __future__ import print_function from __future__ import unicode_literals import re try: import ConfigParser as configparser except ImportError: import configparser # pylint: disable=import-error class DependencyDefinition(object): """Dependency definition. Attributes: dpkg_name (str): name of the dpkg package that provides the dependency. is_optional (bool): True if the dependency is optional. l2tbinaries_macos_name (str): name of the l2tbinaries macos package that provides the dependency. l2tbinaries_name (str): name of the l2tbinaries package that provides the dependency. maximum_version (str): maximum supported version. minimum_version (str): minimum supported version. name (str): name of (the Python module that provides) the dependency. pypi_name (str): name of the PyPI package that provides the dependency. python2_only (bool): True if the dependency is only supported by Python 2. python3_only (bool): True if the dependency is only supported by Python 3. rpm_name (str): name of the rpm package that provides the dependency. version_property (str): name of the version attribute or function. """ def __init__(self, name): """Initializes a dependency configuration. Args: name (str): name of the dependency. """ super(DependencyDefinition, self).__init__() self.dpkg_name = None self.is_optional = False self.l2tbinaries_macos_name = None self.l2tbinaries_name = None self.maximum_version = None self.minimum_version = None self.name = name self.pypi_name = None self.python2_only = False self.python3_only = False self.rpm_name = None self.version_property = None class DependencyDefinitionReader(object): """Dependency definition reader.""" _VALUE_NAMES = frozenset([ 'dpkg_name', 'is_optional', 'l2tbinaries_macos_name', 'l2tbinaries_name', 'maximum_version', 'minimum_version', 'pypi_name', 'python2_only', 'python3_only', 'rpm_name', 'version_property']) def _GetConfigValue(self, config_parser, section_name, value_name): """Retrieves a value from the config parser. Args: config_parser (ConfigParser): configuration parser. section_name (str): name of the section that contains the value. value_name (str): name of the value. Returns: object: configuration value or None if the value does not exists. """ try: return config_parser.get(section_name, value_name) except configparser.NoOptionError: return None def Read(self, file_object): """Reads dependency definitions. Args: file_object (file): file-like object to read from. Yields: DependencyDefinition: dependency definition. """ config_parser = configparser.RawConfigParser() # pylint: disable=deprecated-method # TODO: replace readfp by read_file, check if Python 2 compatible config_parser.readfp(file_object) for section_name in config_parser.sections(): dependency_definition = DependencyDefinition(section_name) for value_name in self._VALUE_NAMES: value = self._GetConfigValue(config_parser, section_name, value_name) setattr(dependency_definition, value_name, value) yield dependency_definition class DependencyHelper(object): """Dependency helper. Attributes: dependencies (dict[str, DependencyDefinition]): dependencies. """ _VERSION_NUMBERS_REGEX = re.compile(r'[0-9.]+') _VERSION_SPLIT_REGEX = re.compile(r'\.|\-') def __init__(self, configuration_file='dependencies.ini'): """Initializes a dependency helper. Args: configuration_file (Optional[str]): path to the dependencies configuration file. """ super(DependencyHelper, self).__init__() self._test_dependencies = {} self.dependencies = {} dependency_reader = DependencyDefinitionReader() with open(configuration_file, 'r') as file_object: for dependency in dependency_reader.Read(file_object): self.dependencies[dependency.name] = dependency dependency = DependencyDefinition('mock') dependency.minimum_version = '0.7.1' dependency.version_property = '__version__' self._test_dependencies['mock'] = dependency def _CheckPythonModule(self, dependency): """Checks the availability of a Python module. Args: dependency (DependencyDefinition): dependency definition. Returns: tuple: consists: bool: True if the Python module is available and conforms to the minimum required version, False otherwise. str: status message. """ module_object = self._ImportPythonModule(dependency.name) if not module_object: status_message = 'missing: {0:s}'.format(dependency.name) return False, status_message if not dependency.version_property: return True, dependency.name return self._CheckPythonModuleVersion( dependency.name, module_object, dependency.version_property, dependency.minimum_version, dependency.maximum_version) def _CheckPythonModuleVersion( self, module_name, module_object, version_property, minimum_version, maximum_version): """Checks the version of a Python module. Args: module_object (module): Python module. module_name (str): name of the Python module. version_property (str): version attribute or function. minimum_version (str): minimum version. maximum_version (str): maximum version. Returns: tuple: consists: bool: True if the Python module is available and conforms to the minimum required version, False otherwise. str: status message. """ module_version = None if not version_property.endswith('()'): module_version = getattr(module_object, version_property, None) else: version_method = getattr( module_object, version_property[:-2], None) if version_method: module_version = version_method() if not module_version: status_message = ( 'unable to determine version information for: {0:s}').format( module_name) return False, status_message # Make sure the module version is a string. module_version = '{0!s}'.format(module_version) # Split the version string and convert every digit into an integer. # A string compare of both version strings will yield an incorrect result. # Strip any semantic suffixes such as a1, b1, pre, post, rc, dev. module_version = self._VERSION_NUMBERS_REGEX.findall(module_version)[0] if module_version[-1] == '.': module_version = module_version[:-1] try: module_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(module_version))) except ValueError: status_message = 'unable to parse module version: {0:s} {1:s}'.format( module_name, module_version) return False, status_message if minimum_version: try: minimum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(minimum_version))) except ValueError: status_message = 'unable to parse minimum version: {0:s} {1:s}'.format( module_name, minimum_version) return False, status_message if module_version_map < minimum_version_map: status_message = ( '{0:s} version: {1!s} is too old, {2!s} or later required').format( module_name, module_version, minimum_version) return False, status_message if maximum_version: try: maximum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(maximum_version))) except ValueError: status_message = 'unable to parse maximum version: {0:s} {1:s}'.format( module_name, maximum_version) return False, status_message if module_version_map > maximum_version_map: status_message = ( '{0:s} version: {1!s} is too recent, {2!s} or earlier ' 'required').format(module_name, module_version, maximum_version) return False, status_message status_message = '{0:s} version: {1!s}'.format(module_name, module_version) return True, status_message def _CheckSQLite3(self): """Checks the availability of sqlite3. Returns: tuple: consists: bool: True if the Python module is available and conforms to the minimum required version, False otherwise. str: status message. """ # On Windows sqlite3 can be provided by both pysqlite2.dbapi2 and # sqlite3. sqlite3 is provided with the Python installation and # pysqlite2.dbapi2 by the pysqlite2 Python module. Typically # pysqlite2.dbapi2 would contain a newer version of sqlite3, hence # we check for its presence first. module_name = 'pysqlite2.dbapi2' minimum_version = '3.7.8' module_object = self._ImportPythonModule(module_name) if not module_object: module_name = 'sqlite3' module_object = self._ImportPythonModule(module_name) if not module_object: status_message = 'missing: {0:s}.'.format(module_name) return False, status_message return self._CheckPythonModuleVersion( module_name, module_object, 'sqlite_version', minimum_version, None) def _ImportPythonModule(self, module_name): """Imports a Python module. Args: module_name (str): name of the module. Returns: module: Python module or None if the module cannot be imported. """ try: module_object = list(map(__import__, [module_name]))[0] except ImportError: return None # If the module name contains dots get the upper most module object. if '.' in module_name: for submodule_name in module_name.split('.')[1:]: module_object = getattr(module_object, submodule_name, None) return module_object def _PrintCheckDependencyStatus( self, dependency, result, status_message, verbose_output=True): """Prints the check dependency status. Args: dependency (DependencyDefinition): dependency definition. result (bool): True if the Python module is available and conforms to the minimum required version, False otherwise. status_message (str): status message. verbose_output (Optional[bool]): True if output should be verbose. """ if not result or dependency.is_optional: if dependency.is_optional: status_indicator = '[OPTIONAL]' else: status_indicator = '[FAILURE]' print('{0:s}\t{1:s}'.format(status_indicator, status_message)) elif verbose_output: print('[OK]\t\t{0:s}'.format(status_message)) def CheckDependencies(self, verbose_output=True): """Checks the availability of the dependencies. Args: verbose_output (Optional[bool]): True if output should be verbose. Returns: bool: True if the dependencies are available, False otherwise. """ print('Checking availability and versions of dependencies.') check_result = True for module_name, dependency in sorted(self.dependencies.items()): if module_name == 'sqlite3': result, status_message = self._CheckSQLite3() else: result, status_message = self._CheckPythonModule(dependency) if not result and module_name == 'lzma': dependency.name = 'backports.lzma' result, status_message = self._CheckPythonModule(dependency) if not result and not dependency.is_optional: check_result = False self._PrintCheckDependencyStatus( dependency, result, status_message, verbose_output=verbose_output) if check_result and not verbose_output: print('[OK]') print('') return check_result def CheckTestDependencies(self, verbose_output=True): """Checks the availability of the dependencies when running tests. Args: verbose_output (Optional[bool]): True if output should be verbose. Returns: bool: True if the dependencies are available, False otherwise. """ if not self.CheckDependencies(verbose_output=verbose_output): return False print('Checking availability and versions of test dependencies.') check_result = True for dependency in sorted( self._test_dependencies.values(), key=lambda dependency: dependency.name): result, status_message = self._CheckPythonModule(dependency) if not result: check_result = False self._PrintCheckDependencyStatus( dependency, result, status_message, verbose_output=verbose_output) if check_result and not verbose_output: print('[OK]') print('') return check_result dtfabric-20190120/utils/update_version.sh000077500000000000000000000007371342102721300202600ustar00rootroot00000000000000#!/bin/bash # Script to update the version information. DATE_VERSION=`date +"%Y%m%d"`; DATE_DPKG=`date -R`; EMAIL_DPKG="Joachim Metz "; sed -i -e "s/^\(__version__ = \)'[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'$/\1'${DATE_VERSION}'/" dtfabric/__init__.py sed -i -e "s/^\(dtfabric \)([0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-1)/\1(${DATE_VERSION}-1)/" config/dpkg/changelog sed -i -e "s/^\( -- ${EMAIL_DPKG} \).*$/\1${DATE_DPKG}/" config/dpkg/changelog