pax_global_header00006660000000000000000000000064145620472570014526gustar00rootroot0000000000000052 comment=75ef3ca004f6c168cbf55e7f1ba5eaf66b5377d0 dtfabric-20240211/000077500000000000000000000000001456204725700135415ustar00rootroot00000000000000dtfabric-20240211/.github/000077500000000000000000000000001456204725700151015ustar00rootroot00000000000000dtfabric-20240211/.github/workflows/000077500000000000000000000000001456204725700171365ustar00rootroot00000000000000dtfabric-20240211/.github/workflows/test_docker.yml000066400000000000000000000051371456204725700221750ustar00rootroot00000000000000# Run tests on Fedora and Ubuntu Docker images using GIFT CORP and GIFT PPA on commit name: test_docker on: [push] permissions: read-all jobs: test_fedora: runs-on: ubuntu-latest strategy: matrix: version: ['38'] container: image: registry.fedoraproject.org/fedora:${{ matrix.version }} steps: - uses: actions/checkout@v3 - name: Set up container run: | dnf install -y dnf-plugins-core langpacks-en - name: Install dependencies run: | dnf copr -y enable @gift/dev dnf install -y @development-tools python3 python3-devel python3-mock python3-pyyaml python3-setuptools - name: Run tests env: LANG: C.utf8 run: | python3 ./run_tests.py - name: Run end-to-end tests run: | if test -f tests/end-to-end.py; then PYTHONPATH=. python3 ./tests/end-to-end.py --debug -c config/end-to-end.ini; fi - name: Build source distribution run: | python3 ./setup.py sdist - name: Build binary distribution run: | python3 ./setup.py bdist - name: Run build and install test run: | python3 ./setup.py build python3 ./setup.py install test_ubuntu: runs-on: ubuntu-latest strategy: matrix: version: ['22.04'] container: image: ubuntu:${{ matrix.version }} steps: - uses: actions/checkout@v3 - name: Set up container env: DEBIAN_FRONTEND: noninteractive run: | apt-get update -q apt-get install -y libterm-readline-gnu-perl locales software-properties-common locale-gen en_US.UTF-8 ln -f -s /usr/share/zoneinfo/UTC /etc/localtime - name: Install dependencies run: | add-apt-repository -y ppa:gift/dev apt-get update -q apt-get install -y build-essential python3 python3-dev python3-distutils python3-mock python3-pip python3-setuptools python3-wheel python3-yaml - name: Run tests env: LANG: en_US.UTF-8 run: | python3 ./run_tests.py - name: Run end-to-end tests env: LANG: en_US.UTF-8 run: | if test -f tests/end-to-end.py; then PYTHONPATH=. python3 ./tests/end-to-end.py --debug -c config/end-to-end.ini; fi - name: Update setuptools run: | python3 -m pip install -U setuptools - name: Build source distribution run: | python3 ./setup.py sdist - name: Build binary distribution run: | python3 ./setup.py bdist - name: Run build and install test run: | python3 ./setup.py build python3 ./setup.py install dtfabric-20240211/.github/workflows/test_docs.yml000066400000000000000000000025301456204725700216500ustar00rootroot00000000000000# Run docs tox tests on Ubuntu Docker images using GIFT PPA name: test_docs on: pull_request: branches: - main push: branches: - main permissions: read-all jobs: build: runs-on: ubuntu-latest strategy: matrix: include: - python-version: '3.12' toxenv: 'docs' container: image: ubuntu:22.04 steps: - uses: actions/checkout@v3 - name: Set up container env: DEBIAN_FRONTEND: noninteractive run: | apt-get update -q apt-get install -y libterm-readline-gnu-perl locales software-properties-common locale-gen en_US.UTF-8 ln -f -s /usr/share/zoneinfo/UTC /etc/localtime - name: Install dependencies env: DEBIAN_FRONTEND: noninteractive run: | add-apt-repository -y universe add-apt-repository -y ppa:deadsnakes/ppa add-apt-repository -y ppa:gift/dev apt-get update -q apt-get install -y build-essential git python${{ matrix.python-version }} python${{ matrix.python-version }}-dev python${{ matrix.python-version }}-venv python3-distutils python3-mock python3-pip python3-setuptools python3-yaml - name: Install tox run: | python3 -m pip install tox - name: Run tests env: LANG: en_US.UTF-8 run: | tox -e${{ matrix.toxenv }} dtfabric-20240211/.github/workflows/test_tox.yml000066400000000000000000000100061456204725700215270ustar00rootroot00000000000000# Run tox tests on Ubuntu Docker images using GIFT PPA name: test_tox on: pull_request: branches: - main push: branches: - main permissions: read-all jobs: build: runs-on: ubuntu-latest strategy: matrix: include: - python-version: '3.8' toxenv: 'py38,wheel' - python-version: '3.9' toxenv: 'py39,wheel' - python-version: '3.10' toxenv: 'py310,wheel' - python-version: '3.11' toxenv: 'py311,wheel' - python-version: '3.12' toxenv: 'py312,wheel' container: image: ubuntu:22.04 steps: - uses: actions/checkout@v3 - name: Set up container env: DEBIAN_FRONTEND: noninteractive run: | apt-get update -q apt-get install -y libterm-readline-gnu-perl locales software-properties-common locale-gen en_US.UTF-8 ln -f -s /usr/share/zoneinfo/UTC /etc/localtime - name: Install dependencies env: DEBIAN_FRONTEND: noninteractive run: | add-apt-repository -y universe add-apt-repository -y ppa:deadsnakes/ppa add-apt-repository -y ppa:gift/dev apt-get update -q apt-get install -y build-essential git python${{ matrix.python-version }} python${{ matrix.python-version }}-dev python${{ matrix.python-version }}-venv python3-distutils python3-mock python3-pip python3-setuptools python3-yaml - name: Install tox run: | python3 -m pip install tox - name: Run tests env: LANG: en_US.UTF-8 run: | tox -e${{ matrix.toxenv }} coverage: runs-on: ubuntu-latest strategy: matrix: include: - python-version: '3.10' toxenv: 'coverage' container: image: ubuntu:22.04 steps: - uses: actions/checkout@v3 - name: Set up container env: DEBIAN_FRONTEND: noninteractive run: | apt-get update -q apt-get install -y libterm-readline-gnu-perl locales software-properties-common locale-gen en_US.UTF-8 ln -f -s /usr/share/zoneinfo/UTC /etc/localtime - name: Install dependencies env: DEBIAN_FRONTEND: noninteractive run: | add-apt-repository -y universe add-apt-repository -y ppa:deadsnakes/ppa add-apt-repository -y ppa:gift/dev apt-get update -q apt-get install -y build-essential git python${{ matrix.python-version }} python${{ matrix.python-version }}-dev python${{ matrix.python-version }}-venv python3-distutils python3-mock python3-pip python3-setuptools python3-yaml - name: Install tox run: | python3 -m pip install tox - name: Run tests with coverage env: LANG: en_US.UTF-8 run: | tox -e${{ matrix.toxenv }} - name: Upload coverage report to Codecov uses: codecov/codecov-action@v3 lint: runs-on: ubuntu-latest strategy: matrix: include: - python-version: '3.12' toxenv: 'lint' container: image: ubuntu:22.04 steps: - uses: actions/checkout@v3 - name: Set up container env: DEBIAN_FRONTEND: noninteractive run: | apt-get update -q apt-get install -y libterm-readline-gnu-perl locales software-properties-common locale-gen en_US.UTF-8 ln -f -s /usr/share/zoneinfo/UTC /etc/localtime - name: Install dependencies env: DEBIAN_FRONTEND: noninteractive run: | add-apt-repository -y universe add-apt-repository -y ppa:deadsnakes/ppa add-apt-repository -y ppa:gift/dev apt-get update -q apt-get install -y build-essential git python${{ matrix.python-version }} python${{ matrix.python-version }}-dev python${{ matrix.python-version }}-venv python3-distutils python3-mock python3-pip python3-setuptools python3-yaml - name: Install tox run: | python3 -m pip install tox - name: Run linter env: LANG: en_US.UTF-8 run: | tox -e${{ matrix.toxenv }} dtfabric-20240211/.gitignore000066400000000000000000000004631456204725700155340ustar00rootroot00000000000000# Files to ignore by git. # Back-up files *~ *.swp # Generic auto-generated build files *.pyc *.pyo # Specific auto-generated build files /.tox /__pycache__ /build /dtfabric.egg-info /dist # Code review files /.review # Type check files /.mypy_cache # Test coverage files .coverage tests-coverage.txt dtfabric-20240211/.pylintrc000066400000000000000000000551471456204725700154220ustar00rootroot00000000000000# Pylint 3.0.x configuration file # # This file is generated by l2tdevtools update-dependencies.py, any dependency # related changes should be made in dependencies.ini. [MAIN] # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no # Clear in-memory caches upon conclusion of linting. Useful if running pylint # in a server-like mode. clear-cache-post-run=no # Load and enable all available extensions. Use --list-extensions to see a list # all available extensions. #enable-all-extensions= # In error mode, messages with a category besides ERROR or FATAL are # suppressed, and no reports are done by default. Error mode is compatible with # disabling specific errors. #errors-only= # Always return a 0 (non-error) status code, even if lint errors are found. # This is primarily useful in continuous integration scripts. #exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. extension-pkg-allow-list= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. (This is an alternative name to extension-pkg-allow-list # for backward compatibility.) extension-pkg-whitelist= # Return non-zero exit code if any of these messages/categories are detected, # even if score is above --fail-under value. Syntax same as enable. Messages # specified are enabled, while categories only check already-enabled messages. fail-on= # Specify a score threshold under which the program will exit with error. fail-under=10 # Interpret the stdin as a python script, whose filename needs to be passed as # the module_or_package argument. #from-stdin= # Files or directories to be skipped. They should be base names, not paths. ignore=CVS # Add files or directories matching the regular expressions patterns to the # ignore-list. The regex matches against paths and can be in Posix or Windows # format. Because '\\' represents the directory delimiter on Windows systems, # it can't be used as an escape character. ignore-paths= # Files or directories matching the regular expression patterns are skipped. # The regex matches against base names, not paths. The default value ignores # Emacs file locks ignore-patterns=^\.# # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis). It # supports qualified module names, as well as Unix pattern matching. ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use, and will cap the count on Windows to # avoid hangs. jobs=1 # Control the amount of potential inferred values when inferring a single # object. This can help the performance when dealing with large functions or # complex, nested conditions. limit-inference-results=100 # List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins=pylint.extensions.docparams # Pickle collected data for later comparisons. persistent=yes # Minimum Python version to use for version dependent checks. Will default to # the version used to run pylint. py-version=3.12 # Discover python modules and packages in the file system subtree. # recursive=no recursive=yes # Add paths to the list of the source roots. Supports globbing patterns. The # source root is an absolute path or a path relative to the current working # directory used to determine a package namespace for modules located under the # source root. source-roots= # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no # In verbose mode, extra non-checker-related info will be displayed. #verbose= [BASIC] # Naming style matching correct argument names. argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- # naming-style. If left empty, argument names will be checked with the set # naming style. #argument-rgx= argument-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- # style. If left empty, attribute names will be checked with the set naming # style. #attr-rgx= attr-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ # Bad variable names which should always be refused, separated by a comma. bad-names=foo, bar, baz, toto, tutu, tata # Bad variable names regexes, separated by a comma. If names match any regex, # they will always be refused bad-names-rgxs= # Naming style matching correct class attribute names. class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- # attribute-naming-style. If left empty, class attribute names will be checked # with the set naming style. #class-attribute-rgx= class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]*|(__.*__))$ # Naming style matching correct class constant names. class-const-naming-style=UPPER_CASE # Regular expression matching correct class constant names. Overrides class- # const-naming-style. If left empty, class constant names will be checked with # the set naming style. #class-const-rgx= # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- # style. If left empty, class names will be checked with the set naming style. #class-rgx= class-rgx=[A-Z_][a-zA-Z0-9]+$ # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- # style. If left empty, constant names will be checked with the set naming # style. #const-rgx= const-rgx=(([a-zA-Z_][a-zA-Z0-9_]*)|(__.*__))$ # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 # Naming style matching correct function names. function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- # naming-style. If left empty, function names will be checked with the set # naming style. #function-rgx= function-rgx=[A-Z_][a-zA-Z0-9_]*$ # Good variable names which should always be accepted, separated by a comma. good-names=i, j, k, ex, Run, _ # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted good-names-rgxs= # Include a hint for the correct naming format with invalid-name. include-naming-hint=no # Naming style matching correct inline iteration names. inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides # inlinevar-naming-style. If left empty, inline iteration names will be checked # with the set naming style. #inlinevar-rgx= inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- # style. If left empty, method names will be checked with the set naming style. #method-rgx= method-rgx=(test|[A-Z_])[a-zA-Z0-9_]*$ # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- # style. If left empty, module names will be checked with the set naming style. #module-rgx= module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty # Regular expression matching correct type alias names. If left empty, type # alias names will be checked with the set naming style. #typealias-rgx= # Regular expression matching correct type variable names. If left empty, type # variable names will be checked with the set naming style. #typevar-rgx= # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- # naming-style. If left empty, variable names will be checked with the set # naming style. #variable-rgx= variable-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$ [CLASSES] # Warn about protected attribute access inside special methods check-protected-access-in-special-methods=no # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, setUp, asyncSetUp, __post_init__ # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. # valid-metaclass-classmethod-first-arg=mcs valid-metaclass-classmethod-first-arg=cls [DESIGN] # List of regular expressions of class ancestor names to ignore when counting # public methods (see R0903) exclude-too-few-public-methods= # List of qualified class names to ignore when counting class parents (see # R0901) ignored-parents= # Maximum number of arguments for function / method. # max-args=5 max-args=10 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Maximum number of boolean expressions in an if statement (see R0916). max-bool-expr=5 # Maximum number of branch for function / method body. max-branches=12 # Maximum number of locals for function / method body. max-locals=15 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of public methods for a class (see R0904). max-public-methods=20 # Maximum number of return / yield for function / method body. max-returns=6 # Maximum number of statements in function / method body. max-statements=50 # Minimum number of public methods for a class (see R0903). min-public-methods=2 [EXCEPTIONS] # Exceptions that will emit a warning when caught. overgeneral-exceptions=builtins.BaseException,builtins.Exception [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). # indent-string=' ' indent-string=' ' # Maximum number of characters on a single line. # max-line-length=100 max-line-length=80 # Maximum number of lines in a module. max-module-lines=1000 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no [IMPORTS] # List of modules that can be imported at any level, not just the top level # one. allow-any-import-level= # Allow explicit reexports by alias from a package __init__. allow-reexport-from-package=no # Allow wildcard imports from modules that define __all__. allow-wildcard-with-all=no # Deprecated modules which should not be used, separated by a comma. deprecated-modules= # Output a graph (.gv or any supported image format) of external dependencies # to the given file (report RP0402 must not be disabled). ext-import-graph= # Output a graph (.gv or any supported image format) of all (i.e. internal and # external) dependencies to the given file (report RP0402 must not be # disabled). import-graph= # Output a graph (.gv or any supported image format) of internal dependencies # to the given file (report RP0402 must not be disabled). int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant # Couples of modules and preferred modules, separated by a comma. preferred-modules= [LOGGING] # The type of string formatting that logging methods do. `old` means using % # formatting, `new` is for `{}` formatting. logging-format-style=old # Logging modules to check that the string format arguments are in logging # function parameter format. logging-modules=logging [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, # UNDEFINED. confidence=HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to # disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". disable=assignment-from-none, bad-inline-option, consider-using-f-string, deprecated-pragma, duplicate-code, file-ignored, fixme, locally-disabled, logging-format-interpolation, logging-fstring-interpolation, missing-param-doc, raise-missing-from, raw-checker-failed, super-with-arguments, suppressed-message, too-few-public-methods, too-many-ancestors, too-many-boolean-expressions, too-many-branches, too-many-instance-attributes, too-many-lines, too-many-locals, too-many-nested-blocks, too-many-public-methods, too-many-return-statements, too-many-statements, unsubscriptable-object, use-implicit-booleaness-not-comparison-to-string, use-implicit-booleaness-not-comparison-to-zero, useless-object-inheritance, useless-suppression, use-symbolic-message-instead # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. enable=c-extension-no-member [METHOD_ARGS] # List of qualified names (i.e., library.method) which require a timeout # parameter e.g. 'requests.api.get,requests.api.post' timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME, XXX, TODO # Regular expression of note tags to take in consideration. notes-rgx= [REFACTORING] # Maximum number of nested blocks for function / method body max-nested-blocks=5 # Complete name of functions that never returns. When checking for # inconsistent-return-statements if a never returning function is called then # it will be considered as an explicit return statement and no message will be # printed. never-returning-functions=sys.exit,argparse.parse_error [REPORTS] # Python expression which should return a score less than or equal to 10. You # have access to the variables 'fatal', 'error', 'warning', 'refactor', # 'convention', and 'info' which contain the number of messages in each # category, as well as 'statement' which is the total number of statements # analyzed. This score is used by the global evaluation report (RP0004). evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details. msg-template= # Set the output format. Available formats are: text, parseable, colorized, # json2 (improved json format), json (old json format) and msvs (visual # studio). You can also give a reporter class, e.g. # mypackage.mymodule.MyReporterClass. #output-format= # Tells whether to display a full report or only the messages. reports=no # Activate the evaluation score. # score=yes score=no [SIMILARITIES] # Comments are removed from the similarity computation ignore-comments=yes # Docstrings are removed from the similarity computation ignore-docstrings=yes # Imports are removed from the similarity computation ignore-imports=yes # Signatures are removed from the similarity computation ignore-signatures=yes # Minimum lines number of a similarity. min-similarity-lines=4 [SPELLING] # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 # Spelling dictionary name. Available dictionaries: en_AG (hunspell), en_AU # (hunspell), en_BS (hunspell), en_BW (hunspell), en_BZ (hunspell), en_CA # (hunspell), en_DK (hunspell), en_GB (hunspell), en_GH (hunspell), en_HK # (hunspell), en_IE (hunspell), en_IN (hunspell), en_JM (hunspell), en_MW # (hunspell), en_NA (hunspell), en_NG (hunspell), en_NZ (hunspell), en_PH # (hunspell), en_SG (hunspell), en_TT (hunspell), en_US (hunspell), en_ZA # (hunspell), en_ZM (hunspell), en_ZW (hunspell). spelling-dict= # List of comma separated words that should be considered directives if they # appear at the beginning of a comment and should not be checked. spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains the private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to the private dictionary (see the # --spelling-private-dict-file option) instead of raising a message. spelling-store-unknown-words=no [STRING] # This flag controls whether inconsistent-quotes generates a warning when the # character used as a quote delimiter is used inconsistently within a module. check-quote-consistency=no # This flag controls whether the implicit-str-concat should generate a warning # on implicit string concatenation in sequences defined over several lines. check-str-concat-over-line-jumps=no [TYPECHECK] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. ignore-none=yes # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference # can return multiple potential results while evaluating a Python object, but # some branches might not be evaluated, which results in partial inference. In # that case, it might be useful to still emit no-member and other checks for # the rest of the inferred objects. ignore-on-opaque-inference=yes # List of symbolic message names to ignore for Mixin members. ignored-checks-for-mixins=no-member, not-async-context-manager, not-context-manager, attribute-defined-outside-init # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. missing-member-hint=yes # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance=1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices=1 # Regex pattern to define which classes are considered mixins. mixin-class-rgx=.*[Mm]ixin # List of decorators that change the signature of a decorated function. signature-mutators= [VARIABLES] # List of additional names supposed to be defined in builtins. Remember that # you should avoid defining new builtins when possible. additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes # List of names allowed to shadow builtins allowed-redefined-builtins= # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_, _cb # A regular expression matching the name of dummy variables (i.e. expected to # not be used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ # Argument names that match this expression will be ignored. ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. init-import=no # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io dtfabric-20240211/.yamllint.yaml000066400000000000000000000002251456204725700163330ustar00rootroot00000000000000extends: default rules: line-length: disable indentation: spaces: consistent indent-sequences: false check-multi-line-strings: true dtfabric-20240211/ACKNOWLEDGEMENTS000066400000000000000000000001341456204725700160140ustar00rootroot00000000000000Acknowledgements: dtfabric Copyright (c) 2016-2022, Joachim Metz dtfabric-20240211/AUTHORS000066400000000000000000000004111456204725700146050ustar00rootroot00000000000000# Names should be added to this file with this pattern: # # For individuals: # Name (email address) # # For organizations: # Organization (fnmatch pattern) # # See python fnmatch module documentation for more information. Joachim Metz (joachim.metz@gmail.com) dtfabric-20240211/LICENSE000066400000000000000000000261361456204725700145560ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. dtfabric-20240211/MANIFEST.in000066400000000000000000000011201456204725700152710ustar00rootroot00000000000000include AUTHORS LICENSE README include dependencies.ini run_tests.py utils/__init__.py utils/dependencies.py include utils/check_dependencies.py include requirements.txt test_requirements.txt include run_tests.py exclude .gitignore exclude *.pyc recursive-include config * recursive-exclude dtfabric *.pyc recursive-include scripts *.py recursive-exclude scripts *.pyc recursive-include test_data * # The test scripts are not required in a binary distribution package they # are considered source distribution files and excluded in find_package() # in setup.py. recursive-include tests *.py dtfabric-20240211/README000066400000000000000000000003031456204725700144150ustar00rootroot00000000000000Data types fabric (dtFabric) is a YAML-based definition language to specify format and data types. For more information see: * Project documentation: https://dtfabric.readthedocs.io/en/latest dtfabric-20240211/appveyor.yml000066400000000000000000000033351456204725700161350ustar00rootroot00000000000000environment: matrix: - DESCRIPTION: "Run tests on Windows with 32-bit Python 3.12" MACHINE_TYPE: "x86" APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022 PYTHON: "C:\\Python312" PYTHON_VERSION: "3.12" L2TBINARIES_TRACK: "dev" TARGET: tests - DESCRIPTION: "Run tests on Windows with 64-bit Python 3.12" MACHINE_TYPE: "amd64" APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022 PYTHON: "C:\\Python312-x64" PYTHON_VERSION: "3.12" L2TBINARIES_TRACK: "dev" TARGET: tests - DESCRIPTION: "Build wheel on Windows with 32-bit Python 3.12" MACHINE_TYPE: "amd64" APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022 PYTHON: "C:\\Python312-x64" PYTHON_VERSION: "3.12" L2TBINARIES_TRACK: "dev" TARGET: wheel - DESCRIPTION: "Build wheel on Windows with 64-bit Python 3.12" MACHINE_TYPE: "amd64" APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022 PYTHON: "C:\\Python312-x64" PYTHON_VERSION: "3.12" L2TBINARIES_TRACK: "dev" TARGET: wheel - DESCRIPTION: "Run tests on Mac OS with Python 3.12" APPVEYOR_BUILD_WORKER_IMAGE: macos-monterey HOMEBREW_NO_INSTALL_CLEANUP: 1 TARGET: tests install: - cmd: "%PYTHON%\\python.exe -m pip install -U build pip setuptools twine wheel" - ps: If ($isWindows) { .\config\appveyor\install.ps1 } - sh: config/appveyor/install.sh build_script: - cmd: IF [%TARGET%]==[wheel] ( "%PYTHON%\\python.exe" -m build --wheel ) test_script: - cmd: IF [%TARGET%]==[tests] ( "%PYTHON%\\python.exe" run_tests.py && IF EXIST "tests\\end-to-end.py" ( set PYTHONPATH=. && "%PYTHON%\\python.exe" "tests\\end-to-end.py" --debug -c "config\\end-to-end.ini" ) ) - sh: config/appveyor/runtests.sh artifacts: - path: dist\*.whl dtfabric-20240211/config/000077500000000000000000000000001456204725700150065ustar00rootroot00000000000000dtfabric-20240211/config/appveyor/000077500000000000000000000000001456204725700166535ustar00rootroot00000000000000dtfabric-20240211/config/appveyor/install.ps1000066400000000000000000000014511456204725700207470ustar00rootroot00000000000000# Script to set up tests on AppVeyor Windows. $Dependencies = "PyYAML mock" If ($Dependencies.Length -gt 0) { $Dependencies = ${Dependencies} -split " " $Output = Invoke-Expression -Command "git clone https://github.com/log2timeline/l2tdevtools.git ..\l2tdevtools 2>&1" | %{ "$_" } Write-Host (${Output} | Out-String) If ($env:APPVEYOR_REPO_BRANCH -eq "main") { $Track = "stable" } Else { $Track = $env:APPVEYOR_REPO_BRANCH } New-Item -ItemType "directory" -Name "dependencies" $env:PYTHONPATH = "..\l2tdevtools" $Output = Invoke-Expression -Command "& '${env:PYTHON}\python.exe' ..\l2tdevtools\tools\update.py --download-directory dependencies --machine-type ${env:MACHINE_TYPE} --track ${env:L2TBINARIES_TRACK} ${Dependencies} 2>&1" | %{ "$_" } Write-Host (${Output} | Out-String) } dtfabric-20240211/config/appveyor/install.sh000077500000000000000000000001751456204725700206630ustar00rootroot00000000000000# Script to set up tests on AppVeyor MacOS. set -e brew update -q brew install -q gettext gnu-sed python@3.12 tox || true dtfabric-20240211/config/appveyor/runtests.sh000077500000000000000000000011541456204725700211020ustar00rootroot00000000000000#!/bin/sh # Script to run tests # Set the following environment variables to build libyal with gettext. export CPPFLAGS="-I/usr/local/include -I/usr/local/opt/gettext/include ${CPPFLAGS}"; export LDFLAGS="-L/usr/local/lib -L/usr/local/opt/gettext/lib ${LDFLAGS}"; # Set the following environment variables to build pycrypto and yara-python. export CPPFLAGS="-I/usr/local/opt/openssl@1.1/include ${CPPFLAGS}"; export LDFLAGS="-L/usr/local/opt/openssl@1.1/lib ${LDFLAGS}"; # Set the following environment variables to ensure tox can find Python 3.12. export PATH="/usr/local/opt/python@3.12/bin:${PATH}"; tox -e py312 dtfabric-20240211/config/dpkg/000077500000000000000000000000001456204725700157335ustar00rootroot00000000000000dtfabric-20240211/config/dpkg/changelog000066400000000000000000000002141456204725700176020ustar00rootroot00000000000000dtfabric (20240211-1) unstable; urgency=low * Auto-generated -- Joachim Metz Sun, 11 Feb 2024 05:34:33 +0100 dtfabric-20240211/config/dpkg/clean000066400000000000000000000000461456204725700167400ustar00rootroot00000000000000dtfabric/*.pyc dtfabric/*/*.pyc *.pyc dtfabric-20240211/config/dpkg/compat000066400000000000000000000000021456204725700171310ustar00rootroot000000000000009 dtfabric-20240211/config/dpkg/control000066400000000000000000000014221456204725700173350ustar00rootroot00000000000000Source: dtfabric Section: python Priority: extra Maintainer: Joachim Metz Build-Depends: debhelper (>= 9), dh-python, python3-all (>= 3.6~), python3-setuptools Standards-Version: 4.1.4 X-Python3-Version: >= 3.6 Homepage: https://github.com/libyal/dtfabric Package: python3-dtfabric Architecture: all Depends: python3-yaml (>= 3.10), ${misc:Depends} Description: Python 3 module of dtFabric dtFabric, or data type fabric, is a project to manage data types and structures, as used in the libyal projects. Package: dtfabric-tools Architecture: all Depends: python3-dtfabric (>= ${binary:Version}), ${misc:Depends} Description: Tools of dtFabric dtFabric, or data type fabric, is a project to manage data types and structures, as used in the libyal projects. dtfabric-20240211/config/dpkg/copyright000066400000000000000000000015641456204725700176740ustar00rootroot00000000000000Format: http://dep.debian.net/deps/dep5 Upstream-Name: dtfabric Source: https://github.com/libyal/dtfabric Files: * Copyright: 2016-2017, Joachim Metz License: Apache-2.0 License: Apache-2.0 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at . http://www.apache.org/licenses/LICENSE-2.0 . Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. . On Debian systems, the complete text of the Apache version 2.0 license can be found in "/usr/share/common-licenses/Apache-2.0". dtfabric-20240211/config/dpkg/dtfabric-data.dirs000066400000000000000000000000241456204725700212770ustar00rootroot00000000000000/usr/share/dtfabric dtfabric-20240211/config/dpkg/dtfabric-data.install000066400000000000000000000000321456204725700220030ustar00rootroot00000000000000data/* usr/share/dtfabric dtfabric-20240211/config/dpkg/python-dtfabric.install000066400000000000000000000002201456204725700224120ustar00rootroot00000000000000usr/lib/python2*/dist-packages/dtfabric/*.py usr/lib/python2*/dist-packages/dtfabric/*/*.py usr/lib/python2*/dist-packages/dtfabric*.egg-info/* dtfabric-20240211/config/dpkg/python3-dtfabric.install000066400000000000000000000002201456204725700224750ustar00rootroot00000000000000usr/lib/python3*/dist-packages/dtfabric/*.py usr/lib/python3*/dist-packages/dtfabric/*/*.py usr/lib/python3*/dist-packages/dtfabric*.egg-info/* dtfabric-20240211/config/dpkg/rules000077500000000000000000000001721456204725700170130ustar00rootroot00000000000000#!/usr/bin/make -f %: dh $@ --buildsystem=pybuild --with=python3 .PHONY: override_dh_auto_test override_dh_auto_test: dtfabric-20240211/config/dpkg/source/000077500000000000000000000000001456204725700172335ustar00rootroot00000000000000dtfabric-20240211/config/dpkg/source/format000066400000000000000000000000141456204725700204410ustar00rootroot000000000000003.0 (quilt) dtfabric-20240211/config/pylint/000077500000000000000000000000001456204725700163255ustar00rootroot00000000000000dtfabric-20240211/config/pylint/spelling-private-dict000066400000000000000000000005521456204725700224600ustar00rootroot00000000000000apidoc argparse args backports bool boolean config datatypedefinition datatypemap datatypemapcontext deregisters dev docstrings docutils dpkg dtfabric enumerationvalue filenames linkcheck lzma macos md membersectiondefinition metadata msi os pre py pygments rc readfp readthedocs runtime sdist stdin str struct sys toctree tuple unresolvable urls uuid validator dtfabric-20240211/dependencies.ini000066400000000000000000000002271456204725700166710ustar00rootroot00000000000000[yaml] dpkg_name: python3-yaml l2tbinaries_name: PyYAML minimum_version: 3.10 pypi_name: PyYAML rpm_name: python3-pyyaml version_property: __version__ dtfabric-20240211/docs/000077500000000000000000000000001456204725700144715ustar00rootroot00000000000000dtfabric-20240211/docs/conf.py000066400000000000000000000117531456204725700157770ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Sphinx build configuration file.""" import os import sys from sphinx.ext import apidoc from docutils import nodes from docutils import transforms # Change PYTHONPATH to include dtfabric module and dependencies. sys.path.insert(0, os.path.abspath('..')) import dtfabric # pylint: disable=wrong-import-position import utils.dependencies # pylint: disable=wrong-import-position # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. needs_sphinx = '2.0.1' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'recommonmark', 'sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.doctest', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode', 'sphinx_markdown_tables', 'sphinx_rtd_theme', ] # We cannot install architecture dependent Python modules on readthedocs, # therefore we mock most imports. pip_installed_modules = set() dependency_helper = utils.dependencies.DependencyHelper( dependencies_file=os.path.join('..', 'dependencies.ini'), test_dependencies_file=os.path.join('..', 'test_dependencies.ini')) modules_to_mock = set(dependency_helper.dependencies.keys()) modules_to_mock = modules_to_mock.difference(pip_installed_modules) autodoc_mock_imports = sorted(modules_to_mock) # Options for the Sphinx Napoleon extension, which reads Google-style # docstrings. napoleon_google_docstring = True napoleon_numpy_docstring = False napoleon_include_init_with_doc = True napoleon_include_private_with_doc = False napoleon_include_special_with_doc = True # General information about the project. # pylint: disable=redefined-builtin project = 'dtFabric' copyright = 'The dtFabric authors' version = dtfabric.__version__ release = dtfabric.__version__ # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The master toctree document. master_doc = 'index' # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Output file base name for HTML help builder. htmlhelp_basename = 'dtfabricdoc' # -- Options linkcheck ---------------------------------------------------- linkcheck_ignore = [ ] # -- Code to rewrite links for readthedocs -------------------------------- # This function is a Sphinx core event callback, the format of which is detailed # here: https://www.sphinx-doc.org/en/master/extdev/appapi.html#events # pylint: disable=unused-argument def RunSphinxAPIDoc(app): """Runs sphinx-apidoc to auto-generate documentation. Args: app (sphinx.application.Sphinx): Sphinx application. Required by the the Sphinx event callback API. """ current_directory = os.path.abspath(os.path.dirname(__file__)) module_path = os.path.join(current_directory, '..', 'dtfabric') api_directory = os.path.join(current_directory, 'sources', 'api') apidoc.main(['-o', api_directory, module_path, '--force']) class MarkdownLinkFixer(transforms.Transform): """Transform definition to parse .md references to internal pages.""" default_priority = 1000 _URI_PREFIXES = [] def _FixLinks(self, node): """Corrects links to .md files not part of the documentation. Args: node (docutils.nodes.Node): docutils node. Returns: docutils.nodes.Node: docutils node, with correct URIs outside of Markdown pages outside the documentation. """ if isinstance(node, nodes.reference) and 'refuri' in node: reference_uri = node['refuri'] for uri_prefix in self._URI_PREFIXES: if (reference_uri.startswith(uri_prefix) and not ( reference_uri.endswith('.asciidoc') or reference_uri.endswith('.md'))): node['refuri'] = reference_uri + '.md' break return node def _Traverse(self, node): """Traverses the document tree rooted at node. Args: node (docutils.nodes.Node): docutils node. """ self._FixLinks(node) for child_node in node.children: self._Traverse(child_node) # pylint: disable=arguments-differ def apply(self): """Applies this transform on document tree.""" self._Traverse(self.document) # pylint: invalid-name def setup(app): """Called at Sphinx initialization. Args: app (sphinx.application.Sphinx): Sphinx application. """ # Triggers sphinx-apidoc to generate API documentation. app.connect('builder-inited', RunSphinxAPIDoc) app.add_config_value( 'recommonmark_config', {'enable_auto_toc_tree': True}, True) app.add_transform(MarkdownLinkFixer) dtfabric-20240211/docs/index.rst000066400000000000000000000010661456204725700163350ustar00rootroot00000000000000Welcome to the dtFabric documentation ======================================== Data types fabric (dtFabric) is a YAML-based definition language to specify format and data types. The source code is available from the `project page `__. .. toctree:: :maxdepth: 2 sources/user/index .. toctree:: :maxdepth: 2 Format specification .. toctree:: :maxdepth: 2 API documentation Indices and tables ================== * :ref:`genindex` * :ref:`modindex` dtfabric-20240211/docs/requirements.txt000066400000000000000000000001661456204725700177600ustar00rootroot00000000000000certifi >= 2023.11.17 docutils Markdown recommonmark sphinx >= 4.1.0 sphinx-markdown-tables sphinx-rtd-theme >= 0.5.1 dtfabric-20240211/docs/sources/000077500000000000000000000000001456204725700161545ustar00rootroot00000000000000dtfabric-20240211/docs/sources/Format-specification.md000066400000000000000000000373321456204725700225540ustar00rootroot00000000000000# Format specification ## Overview Data types fabric (dtFabric) is a YAML-based definition language to specify format and data types. * storage data types, such as integers, characters, structures * semantic data types, such as constants, enumerations * layout data types, such as format, vectors, trees ## Data type definition Attribute name | Attribute type | Required | Description --- | --- | --- | --- aliases | List of strings | No | List of alternative names for the data type description | string | No | Description of the data type name | string | Yes | Name of the data type type | string | Yes | Definition type
See section: [Data type definition types](#data-type-definition-types) urls | List of strings | No | List of URLS that contain more information about the data type ### Data type definition types Identifier | Description --- | --- boolean | Boolean character | Character constant | Constant enumeration | Enumeration floating-point | Floating-point format | Data format metadata
See section: [Data format](#data-format) integer | Integer padding | Alignment padding, only supported as a member definition of a structure data type stream | Stream string | String structure | Structure structure-family | **TODO: add description** union | Union data type uuid | UUID (or GUID) **TODO: consider adding the following types** Identifier | Description --- | --- bit-field | Bit field (or group of bits) fixed-point | Fixed-point data type reference | **TODO: add description** ## Storage data types Storage data types are data types that represent stored (or serialized) values. In addition to the [Data type definition attributes](#data-type-definition) storage data types also define: Attribute name | Attribute type | Required | Description --- | --- | --- | --- attributes | mapping | No | Data type attributes
See section: [Storage data type definition attributes](#storage-data-type-definition-attributes) ### Storage data type definition attributes Attribute name | Attribute type | Required | Description --- | --- | --- | --- byte_order | string | No | Byte-order of the data type
Valid options are: "big-endian", "little-endian", "native"
The default is native --- **NOTE:** middle-endian is a valid byte-ordering but currently not supported. --- ### Fixed-size data types In addition to the [Storage data type definition attributes](#storage-data-type-definition-attributes) fixed-size data types also define the following attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- size | integer or string | No | size of data type in number of units or "native" if architecture dependent
The default is "native" units | string | No | units of the size of the data type
The default is bytes #### Boolean A boolean is a data type to represent true-or-false values. ```yaml name: bool32 aliases: [BOOL] type: boolean description: 32-bit boolean type attributes: size: 4 units: bytes false_value: 0 true_value: 1 ``` Boolean data type specfic attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- false_value | integer | No | Integer value that represents False
The default is 0 true_value | integer | No | Integer value that represents True
The default is not-set, which represent any value except for the false_value Currently supported size attribute values are: 1, 2 and 4 bytes. #### Character A character is a data type to represent elements of textual strings. ```yaml name: wchar16 aliases: [WCHAR] type: character description: 16-bit wide character type attributes: size: 2 units: bytes ``` Currently supported size attribute values are: 1, 2 and 4 bytes. #### Fixed-point A fixed-point is a data type to represent elements of fixed-point values. **TODO: add example** #### Floating-point A floating-point is a data type to represent elements of floating-point values. ```yaml name: float64 aliases: [double, DOUBLE] type: floating-point description: 64-bit double precision floating-point type attributes: size: 8 units: bytes ``` Currently supported size attribute values are: 4 and 8 bytes. #### Integer An integer is a data type to represent elements of integer values. ```yaml name: int32le aliases: [LONG, LONG32] type: integer description: 32-bit little-endian signed integer type attributes: byte_order: little-endian format: signed size: 4 units: bytes ``` Integer data type specfic attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- format | string | No | Signed or unsiged
The default is signed Currently supported size attribute values are: 1, 2, 4 and 8 bytes. #### UUID (or GUID) An UUID (or GUID) is a data type to represent a Globally or Universal unique identifier (GUID or UUID) data types. ```yaml name: known_folder_identifier type: uuid description: Known folder identifier. attributes: byte_order: little-endian ``` Currently supported size attribute values are: 16 bytes. ### Variable-sized data types #### Sequence A sequence is a data type to represent a sequence of individual elements such as an array of integers. ```yaml name: page_numbers type: sequence description: Array of 32-bit page numbers. element_data_type: int32 number_of_elements: 32 ``` Sequence data type specfic attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- element_data_type | string | Yes | Data type of sequence element elements_data_size | integer or string | See note | Integer value or expression to determine the data size of the elements in the sequence elements_terminator | integer | See note | element value that indicates the end-of-string number_of_elements | integer or string | See note | Integer value or expression to determine the number of elements in the sequence --- **NOTE:** At least one of the elements attributes: "elements_data_size", "elements_terminator" or "number_of_elements" must be set. As of version 20200621 "elements_terminator" can be set in combination with "elements_data_size" or "number_of_elements". --- **TODO: describe expressions and the map context** #### Stream A stream is a data type to represent a continous sequence of elements such as a byte stream. ```yaml name: data type: stream element_data_type: byte number_of_elements: data_size ``` Stream data type specfic attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- element_data_type | string | Yes | Data type of stream element elements_data_size | integer or string | See note | Integer value or expression to determine the data size of the elements in the stream elements_terminator | integer | See note | element value that indicates the end-of-string number_of_elements | integer or string | See note | Integer value or expression to determine the number of elements in the stream --- **NOTE:** At least one of the elements attributes: "elements_data_size", "elements_terminator" or "number_of_elements" must be set. As of version 20200621 "elements_terminator" can be set in combination with "elements_data_size" or "number_of_elements". --- **TODO: describe expressions and the map context** #### String A string is a data type to represent a continous sequence of elements with a known encoding such as an UTF-16 formatted string. ```yaml name: utf16le_string_with_size type: string ecoding: utf-16-le element_data_type: wchar16 elements_data_size: string_data_size ``` ```yaml name: utf16le_string_with_terminator type: string ecoding: utf-16-le element_data_type: wchar16 elements_terminator: "\x00\x00" ``` String data type specfic attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- encoding | string | Yes | Encoding of the string element_data_type | string | Yes | Data type of string element elements_data_size | integer or string | See note | Integer value or expression to determine the data size of the elements in the string elements_terminator | integer | See note | element value that indicates the end-of-string number_of_elements | integer or string | See note | Integer value or expression to determine the number of elements in the string --- **NOTE:** At least one of the elements attributes: "elements_data_size", "elements_terminator" or "number_of_elements" must be set. As of version 20200621 "elements_terminator" can be set in combination with "elements_data_size" or "number_of_elements". --- **TODO: describe elements_data_size and number_of_elements expressions and the map context** ### Storage data types with members In addition to the [Storage data type definition attributes](#storage-data-type-definition-attributes) storage data types with member also define the following attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- members | list | Yes | List of member definitions
See section: [Member definition](#member-definition) #### Member definition A member definition supports the following attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- aliases | List of strings | No | List of alternative names for the member condition | string | No | Condition under which the member is condisidered to be present data_type | string | See note | Name of the data type definition of the member description | string | No | Description of the member name | string | See note | Name of the member type | string | See note | Name of the definition type of the member
See section: [Data type definition types](#data-type-definition-types) value | integer or string | See note | Supported value values | List of integers or strings | See note | Supported values --- **NOTE:** The name attribute: "name" must be set for storage data types with members except for the Union type where it is optional. --- --- **NOTE:** One of the type attributes: "data_type" or "type" must be set. The following definition types cannot be directly defined as a member definition: "constant", "enumeration", "format" and "structure". --- **TODO: describe member definition not supporting attributes.** --- **NOTE:** Both the value attributes: "value" and "values" are optional but only one is supported at a time. --- **TODO: describe conditions** #### Structure A structure is a data type to represent a composition of members of other data types. **TODO: add structure size hint?** ```yaml name: point3d aliases: [POINT] type: structure description: Point in 3 dimensional space. attributes: byte_order: little-endian members: - name: x aliases: [XCOORD] data_type: int32 - name: y data_type: int32 - name: z data_type: int32 ``` ```yaml name: sphere3d type: structure description: Sphere in 3 dimensional space. members: - name: number_of_triangles data_type: int32 - name: triangles type: sequence element_data_type: triangle3d number_of_elements: sphere3d.number_of_triangles ``` #### Padding Padding is a member definition to represent (alignment) padding as a byte stream. ```yaml name: padding1 type: padding alignment_size: 8 ``` Padding data type specfic attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- alignment_size | integer | Yes | Alignment size Currently supported alignment_size attribute values are: 2, 4, 8 and 16 bytes. --- **NOTE:** The padding is currently considered as required in the data stream. --- #### Union **TODO: describe union** ## Semantic types ### Constant A constant is a data type to provide meaning (semantic value) to a single predefined value. The value of a constant is typically not stored in a byte stream but used at compile time. ```yaml name: maximum_number_of_back_traces aliases: [AVRF_MAX_TRACES] type: constant description: Application verifier resource enumeration maximum number of back traces urls: ['https://msdn.microsoft.com/en-us/library/bb432193(v=vs.85).aspx'] value: 13 ``` Constant data type specfic attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- value | integer or string | Yes | Integer or string value that the constant represents ### Enumeration An enumeration is a data type to provide meaning (semantic value) to one or more predefined values. ```yaml name: handle_trace_operation_types aliases: [eHANDLE_TRACE_OPERATIONS] type: enumeration description: Application verifier resource enumeration handle trace operation types urls: ['https://msdn.microsoft.com/en-us/library/bb432251(v=vs.85).aspx'] values: - name: OperationDbUnused number: 0 description: Unused - name: OperationDbOPEN number: 1 description: Open (create) handle operation - name: OperationDbCLOSE number: 2 description: Close handle operation - name: OperationDbBADREF number: 3 description: Invalid handle operation ``` Enumeration value attributes: Attribute name | Attribute type | Required | Description --- | --- | --- | --- aliases | list of strings | No | List of alternative names for the enumeration description | string | No | Description of the enumeration value name | string | Yes | Name the enumeration value maps to number | integer | Yes | Number the enumeration value maps to **TODO: add description** ## Layout types ### Data format Attribute name | Attribute type | Required | Description --- | --- | --- | --- attributes | mapping | No | Data type attributes
See section: [Data format attributes](#data-format-attributes) description | string | No | Description of the format layout | mapping | Yes | Format layout definition metadata | mapping | No | Metadata name | string | Yes | Name of the format type | string | Yes | Definition type
See section: [Data type definition types](#data-type-definition-types) urls | List of strings | No | List of URLS that contain more information about the format Example: ```yaml name: mdmp type: format description: Minidump file format urls: ['https://docs.microsoft.com/en-us/windows/win32/debug/minidump-files'] metadata: authors: ['John Doe '] year: 2022 attributes: byte_order: big-endian layout: - data_type: file_header offset: 0 ``` #### Data format attributes Attribute name | Attribute type | Required | Description --- | --- | --- | --- byte_order | string | No | Byte-order of the data type
Valid options are: "big-endian", "little-endian", "native"
The default is native --- **NOTE:** middle-endian is a valid byte-ordering but currently not supported. --- ### Structure family A structure family is a layout type to represent multiple generations (versions) of the same structure. ```yaml name: group_descriptor type: structure-family description: Group descriptor of Extended File System version 2, 3 and 4 base: group_descriptor_base members: - group_descriptor_ext2 - group_descriptor_ext4 ``` The structure members defined in the base structure are exposed at runtime. **TODO:** define behavior if a structure family member does not define a structure member defined in the base structure. ### Structure group A structure group is a layout type to represent a group structures that share a common trait. ```yaml name: bsm_token type: structure-group description: BSM token group base: bsm_token_base identifier: token_type members: - bsm_token_arg32 - bsm_token_arg64 ``` The structure group members are required to define the identifier structure member with its values specific to the group member. Attribute name | Attribute type | Required | Description --- | --- | --- | --- base | string | Yes | Base data type. Note that this must be a structure data type. default | string | None | Default data type as fallback if no corresponding member data type is defined. Note that this must be a structure data type. identifier | string | Yes | Name of the member in the base (structure) data type that identified a (group) member. members | list | Yes | List of (group) member data types. Note that these must be a structure data types. dtfabric-20240211/docs/sources/api/000077500000000000000000000000001456204725700167255ustar00rootroot00000000000000dtfabric-20240211/docs/sources/api/dtfabric.rst000066400000000000000000000021111456204725700212300ustar00rootroot00000000000000dtfabric package ================ Subpackages ----------- .. toctree:: :maxdepth: 4 dtfabric.runtime Submodules ---------- dtfabric.data\_types module --------------------------- .. automodule:: dtfabric.data_types :members: :undoc-members: :show-inheritance: dtfabric.decorators module -------------------------- .. automodule:: dtfabric.decorators :members: :undoc-members: :show-inheritance: dtfabric.definitions module --------------------------- .. automodule:: dtfabric.definitions :members: :undoc-members: :show-inheritance: dtfabric.errors module ---------------------- .. automodule:: dtfabric.errors :members: :undoc-members: :show-inheritance: dtfabric.reader module ---------------------- .. automodule:: dtfabric.reader :members: :undoc-members: :show-inheritance: dtfabric.registry module ------------------------ .. automodule:: dtfabric.registry :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: dtfabric :members: :undoc-members: :show-inheritance: dtfabric-20240211/docs/sources/api/dtfabric.runtime.rst000066400000000000000000000015451456204725700227240ustar00rootroot00000000000000dtfabric.runtime package ======================== Submodules ---------- dtfabric.runtime.byte\_operations module ---------------------------------------- .. automodule:: dtfabric.runtime.byte_operations :members: :undoc-members: :show-inheritance: dtfabric.runtime.data\_maps module ---------------------------------- .. automodule:: dtfabric.runtime.data_maps :members: :undoc-members: :show-inheritance: dtfabric.runtime.fabric module ------------------------------ .. automodule:: dtfabric.runtime.fabric :members: :undoc-members: :show-inheritance: dtfabric.runtime.runtime module ------------------------------- .. automodule:: dtfabric.runtime.runtime :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: dtfabric.runtime :members: :undoc-members: :show-inheritance: dtfabric-20240211/docs/sources/api/modules.rst000066400000000000000000000000751456204725700211310ustar00rootroot00000000000000dtfabric ======== .. toctree:: :maxdepth: 4 dtfabric dtfabric-20240211/docs/sources/user/000077500000000000000000000000001456204725700171325ustar00rootroot00000000000000dtfabric-20240211/docs/sources/user/Installation-instructions.md000066400000000000000000000031501456204725700246560ustar00rootroot00000000000000# Installation instructions ## pip **Note that using pip outside virtualenv is not recommended since it ignores your systems package manager. If you aren't comfortable debugging package installation issues, this is not the option for you.** Create and activate a virtualenv: ```bash virtualenv dtfabricenv cd dtfabricenv source ./bin/activate ``` Upgrade pip and install dtFabric dependencies: ```bash pip install --upgrade pip pip install dtfabric ``` To deactivate the virtualenv run: ```bash deactivate ``` ## Ubuntu 22.04 LTS To install dtFabric from the [GIFT Personal Package Archive (PPA)](https://launchpad.net/~gift): ```bash sudo add-apt-repository ppa:gift/stable ``` Update and install dtFabric: ```bash sudo apt-get update sudo apt-get install python3-dtfabric ``` ## Windows The [l2tbinaries](https://github.com/log2timeline/l2tbinaries) contains the necessary packages for running dtFabric. l2tbinaries provides the following branches: * master; branch intended for the "packaged release" of dtFabric and dependencies; * staging; branch intended for testing pre-releases of dtFabric; * dev; branch intended for the "development release" of dtFabric; * testing; branch intended for testing newly created packages. The l2tdevtools project provides [an update script](https://github.com/log2timeline/l2tdevtools/wiki/Update-script) to ease the process of keeping the dependencies up to date. The script requires [pywin32](https://github.com/mhammond/pywin32/releases). To install the release versions of the dependencies run: ``` set PYTHONPATH=. C:\Python3\python.exe tools\update.py --preset dtfabric ``` dtfabric-20240211/docs/sources/user/index.rst000066400000000000000000000004401456204725700207710ustar00rootroot00000000000000############### Getting started ############### To be able to use dtFabric you first need to install it. There are multiple ways to install dtFabric, check the following instructions for more detail. .. toctree:: :maxdepth: 2 Installation instructions dtfabric-20240211/dtfabric.ini000066400000000000000000000006131456204725700160200ustar00rootroot00000000000000[project] name: dtfabric status: alpha name_description: dtFabric maintainer: Joachim Metz homepage_url: https://github.com/libyal/dtfabric git_url: https://github.com/libyal/dtfabric.git description_short: Data type fabric (dtfabric) description_long: dtFabric, or data type fabric, is a project to manage data types and structures, as used in the libyal projects. dtfabric-20240211/dtfabric/000077500000000000000000000000001456204725700153175ustar00rootroot00000000000000dtfabric-20240211/dtfabric/__init__.py000066400000000000000000000001121456204725700174220ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Data type fabric.""" __version__ = '20240211' dtfabric-20240211/dtfabric/data_types.py000066400000000000000000000763071456204725700200430ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Data type definitions.""" import abc import collections import typing from typing import Dict, List, Optional, Union # pylint: disable=unused-import from dtfabric import definitions class DataTypeDefinition(object): """Data type definition interface. Attributes: aliases (list[str]): aliases. description (str): description. name (str): name. urls (list[str]): URLs. """ # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc TYPE_INDICATOR: 'Union[str, None]' = None _IS_COMPOSITE: 'bool' = False def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(DataTypeDefinition, self).__init__() self.aliases: 'List[str]' = aliases or [] self.description: 'Union[str, None]' = description self.name: 'str' = name self.urls: 'Union[List[str], None]' = urls @abc.abstractmethod def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ def IsComposite(self) -> 'bool': """Determines if the data type is composite. A composite data type consists of other data types. Returns: bool: True if the data type is composite, False otherwise. """ return self._IS_COMPOSITE class StorageDataTypeDefinition(DataTypeDefinition): """Storage data type definition interface. Attributes: byte_order (str): byte-order the data type. """ # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a storage data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(StorageDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.byte_order: 'str' = definitions.BYTE_ORDER_NATIVE @abc.abstractmethod def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ class FixedSizeDataTypeDefinition(StorageDataTypeDefinition): """Fixed-size data type definition. Attributes: size (int|str): size of the data type or SIZE_NATIVE. units (str): units of the size of the data type. """ def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a fixed-size data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(FixedSizeDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.size: 'Union[int, str]' = definitions.SIZE_NATIVE self.units: 'str' = 'bytes' def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ if self.size == definitions.SIZE_NATIVE or self.units != 'bytes': return None return typing.cast('Union[int, None]', self.size) class BooleanDefinition(FixedSizeDataTypeDefinition): """Boolean data type definition. Attributes: false_value (int): value of False, None represents any value except that defined by true_value. true_value (int): value of True, None represents any value except that defined by false_value. """ TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_BOOLEAN def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, false_value: 'int' = 0, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a boolean data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. false_value (Optional[int]): value that represents false. urls (Optional[list[str]]): URLs. """ super(BooleanDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.false_value: 'Union[int, None]' = false_value self.true_value: 'Union[int, None]' = None class CharacterDefinition(FixedSizeDataTypeDefinition): """Character data type definition.""" TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_CHARACTER class FloatingPointDefinition(FixedSizeDataTypeDefinition): """Floating point data type definition.""" TYPE_INDICATOR: 'Union[str, None]' = ( definitions.TYPE_INDICATOR_FLOATING_POINT) class IntegerDefinition(FixedSizeDataTypeDefinition): """Integer data type definition. Attributes: format (str): format of the data type. maximum_value (int): maximum allowed value of the integer data type. minimum_value (int): minimum allowed value of the integer data type. """ TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_INTEGER def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, maximum_value: 'Optional[int]' = None, minimum_value: 'Optional[int]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes an integer data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. maximum_value (Optional[int]): maximum allowed value of the integer data type. minimum_value (Optional[int]): minimum allowed value of the integer data type. urls (Optional[list[str]]): URLs. """ super(IntegerDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.format: 'str' = definitions.FORMAT_SIGNED self.maximum_value: 'Union[int, None]' = maximum_value self.minimum_value: 'Union[int, None]' = minimum_value class UUIDDefinition(FixedSizeDataTypeDefinition): """UUID (or GUID) data type definition.""" TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_UUID _IS_COMPOSITE: 'bool' = True def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes an UUID data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(UUIDDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.size: 'Union[int, str]' = 16 class PaddingDefinition(StorageDataTypeDefinition): """Padding data type definition. Attributes: alignment_size (int): alignment size. """ # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_PADDING def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, alignment_size: 'Optional[int]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a padding data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. alignment_size (Optional[int]): alignment size. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(PaddingDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.alignment_size: 'Union[int, None]' = alignment_size def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ return None class ElementSequenceDataTypeDefinition(StorageDataTypeDefinition): """Element sequence data type definition. Attributes: byte_order (str): byte-order the data type. elements_data_size (int): data size of the sequence elements. elements_data_size_expression (str): expression to determine the data size of the sequence elements. element_data_type (str): name of the sequence element data type. element_data_type_definition (DataTypeDefinition): sequence element data type definition. elements_terminator (bytes|int): element value that indicates the end-of-sequence. number_of_elements (int): number of sequence elements. number_of_elements_expression (str): expression to determine the number of sequence elements. """ _IS_COMPOSITE: 'bool' = True def __init__( self, name: 'str', data_type_definition: 'DataTypeDefinition', aliases: 'Optional[List[str]]' = None, data_type: 'Optional[str]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a sequence data type definition. Args: name (str): name. data_type_definition (DataTypeDefinition): sequence element data type definition. aliases (Optional[list[str]]): aliases. data_type (Optional[str]): name of the sequence element data type. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(ElementSequenceDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.byte_order: 'str' = getattr( data_type_definition, 'byte_order', definitions.BYTE_ORDER_NATIVE) self.elements_data_size: 'Union[int, None]' = None self.elements_data_size_expression: 'Union[str, None]' = None self.element_data_type: 'Union[str, None]' = data_type self.element_data_type_definition: 'DataTypeDefinition' = ( data_type_definition) self.elements_terminator: 'Union[str, None]' = None self.number_of_elements: 'Union[int, None]' = None self.number_of_elements_expression: 'Union[str, None]' = None def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ if not self.element_data_type_definition: return None if self.elements_data_size: return self.elements_data_size if not self.number_of_elements: return None element_byte_size = self.element_data_type_definition.GetByteSize() if not element_byte_size: return None return element_byte_size * self.number_of_elements class SequenceDefinition(ElementSequenceDataTypeDefinition): """Sequence data type definition.""" TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_SEQUENCE class StreamDefinition(ElementSequenceDataTypeDefinition): """Stream data type definition.""" TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_STREAM class StringDefinition(ElementSequenceDataTypeDefinition): """String data type definition. Attributes: encoding (str): string encoding. """ TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_STRING def __init__( self, name: 'str', data_type_definition: 'DataTypeDefinition', aliases: 'Optional[List[str]]' = None, data_type: 'Optional[str]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a string data type definition. Args: name (str): name. data_type_definition (DataTypeDefinition): string element data type definition. aliases (Optional[list[str]]): aliases. data_type (Optional[str]): name of the string element data type. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(StringDefinition, self).__init__( name, data_type_definition, aliases=aliases, data_type=data_type, description=description, urls=urls) self.encoding: 'str' = 'ascii' class DataTypeDefinitionWithMembers(StorageDataTypeDefinition): """Data type definition with members. Attributes: members (list[DataTypeDefinition]): member data type definitions. sections (list[MemberSectionDefinition]): member section definitions. """ # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc _IS_COMPOSITE: 'bool' = True def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(DataTypeDefinitionWithMembers, self).__init__( name, aliases=aliases, description=description, urls=urls) self._byte_size: 'Union[int, None]' = None self._members_by_name: 'OrderedDict[str, DataTypeDefinition]' = ( collections.OrderedDict()) self.sections: 'List[MemberSectionDefinition]' = [] @property def members(self) -> 'List[DataTypeDefinition]': """Retrieves the member data type definitions. Returns: list[DataTypeDefinition]: member data type definitions. """ return list(self._members_by_name.values()) def AddMemberDefinition( self, member_definition: 'DataTypeDefinition') -> 'None': """Adds a member definition. Args: member_definition (DataTypeDefinition): member data type definition. Raises: KeyError: if a member with the name already exists. """ if member_definition.name in self._members_by_name: raise KeyError(f'Member: {member_definition.name:s} already set.') self._byte_size = None self._members_by_name[member_definition.name] = member_definition if self.sections: section_definition = self.sections[-1] section_definition.members.append(member_definition) def AddSectionDefinition( self, section_definition: 'MemberSectionDefinition') -> 'None': """Adds a section definition. Args: section_definition (MemberSectionDefinition): member section definition. """ self.sections.append(section_definition) @abc.abstractmethod def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ def GetMemberDefinitionByName( self, name: 'str') -> 'Union[int, DataTypeDefinition]': """Retrieve a specific member definition. Args: name (str): name of the member definition. Returns: DataTypeDefinition: member data type definition or None if not available. """ return self._members_by_name.get(name, None) class MemberDataTypeDefinition(StorageDataTypeDefinition): """Member data type definition. Attributes: byte_order (str): byte-order the data type. condition (str): condition under which the data type applies. member_data_type (str): member data type. member_data_type_definition (DataTypeDefinition): member data type definition. values (list[int|str]): supported values. """ def __init__( self, name: 'str', data_type_definition: 'DataTypeDefinition', aliases: 'Optional[List[str]]' = None, condition: 'Optional[str]' = None, data_type: 'Optional[str]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None, values: 'Optional[List[Union[int, str]]]' = None) -> 'None': """Initializes a member data type definition. Args: name (str): name. data_type_definition (DataTypeDefinition): member data type definition. aliases (Optional[list[str]]): aliases. condition (Optional[str]): condition under which the member is considered present. data_type (Optional[str]): member data type. description (Optional[str]): description. urls (Optional[list[str]]): URLs. values (Optional[list[int|str]]): supported values defined. """ super(MemberDataTypeDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.byte_order: 'str' = getattr( data_type_definition, 'byte_order', definitions.BYTE_ORDER_NATIVE) self.condition: 'Union[str, None]' = condition self.member_data_type: 'Union[str, None]' = data_type self.member_data_type_definition: 'DataTypeDefinition' = ( data_type_definition) self.values: 'Union[List[Union[int, str]], None]' = values def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ if self.condition or not self.member_data_type_definition: return None return self.member_data_type_definition.GetByteSize() def IsComposite(self) -> 'bool': """Determines if the data type is composite. A composite data type consists of other data types. Returns: bool: True if the data type is composite, False otherwise. """ return bool(self.condition) or bool( self.member_data_type_definition and self.member_data_type_definition.IsComposite()) class MemberSectionDefinition(object): """Member section definition. Attributes: name (str): name of the section. members (list[DataTypeDefinition]): member data type definitions of the section. """ def __init__(self, name: 'str') -> 'None': """Initializes a member section definition. Args: name (str): name. """ super(MemberSectionDefinition, self).__init__() self.name: 'str' = name self.members: 'List[DataTypeDefinition]' = [] class StructureDefinition(DataTypeDefinitionWithMembers): """Structure data type definition.""" TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_STRUCTURE def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ if self._byte_size is None and self._members_by_name: self._byte_size = 0 for member_definition in self._members_by_name.values(): if (not isinstance(member_definition, PaddingDefinition) or not member_definition.alignment_size): byte_size = member_definition.GetByteSize() if byte_size is None: self._byte_size = None break else: _, byte_size = divmod( self._byte_size, member_definition.alignment_size) if byte_size > 0: byte_size = member_definition.alignment_size - byte_size self._byte_size += byte_size return self._byte_size class UnionDefinition(DataTypeDefinitionWithMembers): """Union data type definition.""" TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_UNION def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ if self._byte_size is None and self._members_by_name: self._byte_size = 0 for member_definition in self._members_by_name.values(): byte_size = member_definition.GetByteSize() if byte_size is None: self._byte_size = None break self._byte_size = max(self._byte_size, byte_size) return self._byte_size class SemanticDataTypeDefinition(DataTypeDefinition): """Semantic data type definition interface.""" # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ return None class ConstantDefinition(SemanticDataTypeDefinition): """Constant data type definition. Attributes: value (int): constant value. """ TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_CONSTANT def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes an enumeration data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(ConstantDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.value: 'Union[int, None]' = None class EnumerationValue(object): """Enumeration value. Attributes: aliases (list[str]): aliases. description (str): description. name (str): name. number (int): number. """ def __init__( self, name: 'str', number: 'int', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None) -> 'None': """Initializes an enumeration value. Args: name (str): name. number (int): number. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. """ super(EnumerationValue, self).__init__() self.aliases: 'List[str]' = aliases or [] self.description: 'Union[str, None]' = description self.name: 'str' = name self.number: 'int' = number class EnumerationDefinition(SemanticDataTypeDefinition): """Enumeration data type definition. Attributes: values (list[EnumerationValue]): enumeration values. values_per_alias (dict[str, EnumerationValue]): enumeration values per alias. values_per_name (dict[str, EnumerationValue]): enumeration values per name. values_per_number (dict[int, EnumerationValue]): enumeration values per number. """ TYPE_INDICATOR: 'Union[str, None]' = ( definitions.TYPE_INDICATOR_ENUMERATION) def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes an enumeration data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(EnumerationDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.values: 'List[EnumerationValue]' = [] self.values_per_alias: 'Dict[str, EnumerationValue]' = {} self.values_per_name: 'Dict[str, EnumerationValue]' = {} self.values_per_number: 'Dict[int, EnumerationValue]' = {} def AddValue( self, name: 'str', number: 'int', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None) -> 'None': """Adds an enumeration value. Args: name (str): name. number (int): number. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. Raises: KeyError: if the enumeration value already exists. """ if name in self.values_per_name: raise KeyError(f'Value with name: {name:s} already exists.') if number in self.values_per_number: raise KeyError(f'Value with number: {number!s} already exists.') for alias in aliases or []: if alias in self.values_per_alias: raise KeyError(f'Value with alias: {alias:s} already exists.') enumeration_value = EnumerationValue( name, number, aliases=aliases, description=description) self.values.append(enumeration_value) self.values_per_name[name] = enumeration_value self.values_per_number[number] = enumeration_value for alias in aliases or []: self.values_per_alias[alias] = enumeration_value class LayoutDataTypeDefinition(DataTypeDefinition): """Layout data type definition interface.""" # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc _IS_COMPOSITE: 'bool' = True def GetByteSize(self) -> 'Union[int, None]': """Retrieves the byte size of the data type definition. Returns: int: data type size in bytes or None if size cannot be determined. """ return None class LayoutElementDefinition(object): """Layout element definition. Attributes: data_type (str): name of the data type definition of the layout element. offset (int): offset of the layout element. """ def __init__( self, data_type: 'str', offset: 'Optional[int]' = None) -> 'None': """Initializes a layout element definition. Args: data_type (str): name of the data type of the layout element. offset (Optional[int]): offset of the layout element. """ super(LayoutElementDefinition, self).__init__() self.data_type: 'str' = data_type self.offset: 'Union[int, None]' = offset class FormatDefinition(LayoutDataTypeDefinition): """Data format definition. Attributes: metadata (dict[str, object]): metadata. layout (list[LayoutElementDefinition]): layout element definitions. """ TYPE_INDICATOR: 'Union[str, None]' = definitions.TYPE_INDICATOR_FORMAT def __init__( self, name: 'str', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a format data type definition. Args: name (str): name. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(FormatDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self.layout: 'List[LayoutElementDefinition]' = [] self.metadata: 'Dict[str, object]' = {} class StructureFamilyDefinition(LayoutDataTypeDefinition): """Structure family definition. Attributes: base (DataTypeDefinition): base data type definition. members (list[DataTypeDefinition]): member data type definitions. """ TYPE_INDICATOR: 'Union[str, None]' = ( definitions.TYPE_INDICATOR_STRUCTURE_FAMILY) def __init__( self, name: 'str', base_definition: 'StructureDefinition', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a structure family data type definition. Args: name (str): name. base_definition (StructureDefinition): base data type definition. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(StructureFamilyDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self._members_by_name: 'OrderedDict[str, DataTypeDefinition]' = ( collections.OrderedDict()) self.base: 'Union[DataTypeDefinition, None]' = base_definition @property def members(self) -> 'List[DataTypeDefinition]': """Retrieves the member data type definitions. Returns: list[DataTypeDefinition]: member data type definitions. """ return list(self._members_by_name.values()) def AddMemberDefinition( self, member_definition: 'StructureDefinition') -> 'None': """Adds a member definition. Args: member_definition (StructureDefinition): member data type definition. Raises: KeyError: if a member with the name already exists. """ if member_definition.name in self._members_by_name: raise KeyError(f'Member: {member_definition.name:s} already set.') self._members_by_name[member_definition.name] = member_definition def SetBaseDefinition( self, base_definition: 'StructureDefinition') -> 'None': """Sets a base definition. Args: base_definition (StructureDefinition): base data type definition. """ self.base = base_definition class StructureGroupDefinition(LayoutDataTypeDefinition): """Structure group definition. Attributes: base (DataTypeDefinition): base data type definition. byte_order (str): byte-order the data type. default (DataTypeDefinition): default data type definition. identifier (str): name of the base structure member to identify the group members. members (list[DataTypeDefinition]): member data type definitions. """ TYPE_INDICATOR: 'Union[str, None]' = ( definitions.TYPE_INDICATOR_STRUCTURE_GROUP) def __init__( self, name: 'str', base_definition: 'StructureDefinition', identifier: 'str', default_definition: 'StructureDefinition', aliases: 'Optional[List[str]]' = None, description: 'Optional[str]' = None, urls: 'Optional[List[str]]' = None) -> 'None': """Initializes a structure group data type definition. Args: name (str): name. base_definition (StructureDefinition): base data type definition. identifier (str): name of the base structure member to identify the group members. default_definition (StructureDefinition): default data type definition. aliases (Optional[list[str]]): aliases. description (Optional[str]): description. urls (Optional[list[str]]): URLs. """ super(StructureGroupDefinition, self).__init__( name, aliases=aliases, description=description, urls=urls) self._members_by_name: 'OrderedDict[str, DataTypeDefinition]' = ( collections.OrderedDict()) self.base: 'Union[DataTypeDefinition, None]' = base_definition self.byte_order: 'str' = getattr( base_definition, 'byte_order', definitions.BYTE_ORDER_NATIVE) self.default: 'Union[DataTypeDefinition, None]' = default_definition self.identifier: 'Union[str, None]' = identifier @property def members(self) -> 'List[DataTypeDefinition]': """Retrieves the member data type definitions. Returns: list[DataTypeDefinition]: member data type definitions. """ return list(self._members_by_name.values()) def AddMemberDefinition( self, member_definition: 'StructureDefinition') -> 'None': """Adds a member definition. Args: member_definition (StructureDefinition): member data type definition. Raises: KeyError: if a member with the name already exists. """ if member_definition.name in self._members_by_name: raise KeyError(f'Member: {member_definition.name:s} already set.') self._members_by_name[member_definition.name] = member_definition dtfabric-20240211/dtfabric/decorators.py000066400000000000000000000013061456204725700200360ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Function decorators.""" import warnings def deprecated(function): # pylint: disable=invalid-name """Decorator to mark functions or methods as deprecated.""" def IssueDeprecationWarning(*args, **kwargs): """Issue a deprecation warning.""" warnings.simplefilter('default', DeprecationWarning) warnings.warn( f'Call to deprecated function: {function.__name__:s}.', category=DeprecationWarning, stacklevel=2) return function(*args, **kwargs) IssueDeprecationWarning.__name__ = function.__name__ IssueDeprecationWarning.__doc__ = function.__doc__ IssueDeprecationWarning.__dict__.update(function.__dict__) return IssueDeprecationWarning dtfabric-20240211/dtfabric/definitions.py000066400000000000000000000027361456204725700202140ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Definitions.""" BYTE_ORDER_BIG_ENDIAN = 'big-endian' BYTE_ORDER_LITTLE_ENDIAN = 'little-endian' BYTE_ORDER_MIDDLE_ENDIAN = 'middle-endian' BYTE_ORDER_NATIVE = 'native' BYTE_ORDERS = frozenset([ BYTE_ORDER_BIG_ENDIAN, BYTE_ORDER_LITTLE_ENDIAN, BYTE_ORDER_NATIVE]) FORMAT_SIGNED = 'signed' FORMAT_UNSIGNED = 'unsigned' SIZE_NATIVE = 'native' TYPE_INDICATOR_BOOLEAN = 'boolean' TYPE_INDICATOR_CHARACTER = 'character' TYPE_INDICATOR_CONSTANT = 'constant' TYPE_INDICATOR_ENUMERATION = 'enumeration' TYPE_INDICATOR_FLOATING_POINT = 'floating-point' TYPE_INDICATOR_FORMAT = 'format' TYPE_INDICATOR_INTEGER = 'integer' TYPE_INDICATOR_PADDING = 'padding' TYPE_INDICATOR_SEQUENCE = 'sequence' TYPE_INDICATOR_STREAM = 'stream' TYPE_INDICATOR_STRING = 'string' TYPE_INDICATOR_STRUCTURE = 'structure' TYPE_INDICATOR_STRUCTURE_FAMILY = 'structure-family' TYPE_INDICATOR_STRUCTURE_GROUP = 'structure-group' TYPE_INDICATOR_UNION = 'union' TYPE_INDICATOR_UUID = 'uuid' TYPE_INDICATORS = frozenset([ TYPE_INDICATOR_BOOLEAN, TYPE_INDICATOR_CHARACTER, TYPE_INDICATOR_CONSTANT, TYPE_INDICATOR_ENUMERATION, TYPE_INDICATOR_FLOATING_POINT, TYPE_INDICATOR_FORMAT, TYPE_INDICATOR_INTEGER, TYPE_INDICATOR_PADDING, TYPE_INDICATOR_SEQUENCE, TYPE_INDICATOR_STREAM, TYPE_INDICATOR_STRING, TYPE_INDICATOR_STRUCTURE, TYPE_INDICATOR_STRUCTURE_FAMILY, TYPE_INDICATOR_STRUCTURE_GROUP, TYPE_INDICATOR_UNION, TYPE_INDICATOR_UUID]) dtfabric-20240211/dtfabric/errors.py000066400000000000000000000017701456204725700172120ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The error objects.""" class Error(Exception): """The error interface.""" class ByteStreamTooSmallError(Error): """Error that is raised when the byte stream is too small.""" class DefinitionReaderError(Error): """Error that is raised by the definition reader. Attributes: name (str): name of the definition. message (str): error message. """ def __init__(self, name: str, message: str) -> None: """Initializes an error. Args: name (str): name of the definition. message (str): error message. """ # pylint: disable=super-init-not-called # Do not call initialize of the super class. self.name: str = name self.message: str = message class FoldingError(Error): """Error that is raised when the definition cannot be folded.""" class FormatError(Error): """Error that is raised when the definition format is incorrect.""" class MappingError(Error): """Error that is raised when the definition cannot be mapped.""" dtfabric-20240211/dtfabric/reader.py000066400000000000000000001500031456204725700171320ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The data type definition reader objects.""" import abc import yaml from dtfabric import data_types from dtfabric import definitions from dtfabric import errors class DataTypeDefinitionsReader(object): """Data type definitions reader.""" _DATA_TYPE_CALLBACKS = { definitions.TYPE_INDICATOR_BOOLEAN: '_ReadBooleanDataTypeDefinition', definitions.TYPE_INDICATOR_CHARACTER: '_ReadCharacterDataTypeDefinition', definitions.TYPE_INDICATOR_CONSTANT: '_ReadConstantDataTypeDefinition', definitions.TYPE_INDICATOR_ENUMERATION: ( '_ReadEnumerationDataTypeDefinition'), definitions.TYPE_INDICATOR_FLOATING_POINT: ( '_ReadFloatingPointDataTypeDefinition'), definitions.TYPE_INDICATOR_FORMAT: '_ReadFormatDataTypeDefinition', definitions.TYPE_INDICATOR_INTEGER: '_ReadIntegerDataTypeDefinition', definitions.TYPE_INDICATOR_PADDING: '_ReadPaddingDataTypeDefinition', definitions.TYPE_INDICATOR_SEQUENCE: '_ReadSequenceDataTypeDefinition', definitions.TYPE_INDICATOR_STREAM: '_ReadStreamDataTypeDefinition', definitions.TYPE_INDICATOR_STRING: '_ReadStringDataTypeDefinition', definitions.TYPE_INDICATOR_STRUCTURE: '_ReadStructureDataTypeDefinition', definitions.TYPE_INDICATOR_STRUCTURE_FAMILY: ( '_ReadStructureFamilyDataTypeDefinition'), definitions.TYPE_INDICATOR_STRUCTURE_GROUP: ( '_ReadStructureGroupDataTypeDefinition'), definitions.TYPE_INDICATOR_UNION: '_ReadUnionDataTypeDefinition', definitions.TYPE_INDICATOR_UUID: '_ReadUUIDDataTypeDefinition', } _INTEGER_FORMAT_ATTRIBUTES = frozenset([ definitions.FORMAT_SIGNED, definitions.FORMAT_UNSIGNED]) _SUPPORTED_DEFINITION_VALUES_DATA_TYPE = set([ 'aliases', 'description', 'name', 'type', 'urls']) _SUPPORTED_DEFINITION_VALUES_LAYOUT_ELEMENT = set([ 'data_type', 'offset']) _SUPPORTED_DEFINITION_VALUES_MEMBER_DATA_TYPE = set([ 'aliases', 'condition', 'data_type', 'description', 'name', 'type', 'value', 'values']) _SUPPORTED_DEFINITION_VALUES_STORAGE_DATA_TYPE = set([ 'attributes']).union(_SUPPORTED_DEFINITION_VALUES_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_STORAGE_DATA_TYPE_WITH_MEMBERS = set([ 'members']).union(_SUPPORTED_DEFINITION_VALUES_STORAGE_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_CONSTANT = set([ 'value']).union(_SUPPORTED_DEFINITION_VALUES_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_ENUMERATION = set([ 'values']).union(_SUPPORTED_DEFINITION_VALUES_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE = set([ 'element_data_type', 'elements_data_size', 'elements_terminator', 'number_of_elements']).union(_SUPPORTED_DEFINITION_VALUES_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_ELEMENTS_MEMBER_DATA_TYPE = set([ 'element_data_type', 'elements_data_size', 'elements_terminator', 'number_of_elements']).union( _SUPPORTED_DEFINITION_VALUES_MEMBER_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_FORMAT = set([ 'attributes', 'layout', 'metadata']).union( _SUPPORTED_DEFINITION_VALUES_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_PADDING = set([ 'alignment_size']).union(_SUPPORTED_DEFINITION_VALUES_MEMBER_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_STRING = set([ 'encoding']).union(_SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_STRING_MEMBER = set([ 'encoding']).union(_SUPPORTED_DEFINITION_VALUES_ELEMENTS_MEMBER_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_STRUCTURE_FAMILY = set([ 'base', 'members']).union(_SUPPORTED_DEFINITION_VALUES_DATA_TYPE) _SUPPORTED_DEFINITION_VALUES_STRUCTURE_GROUP = set([ 'base', 'default', 'identifier', 'members']).union( _SUPPORTED_DEFINITION_VALUES_DATA_TYPE) _SUPPORTED_ATTRIBUTES_STORAGE_DATA_TYPE = set([ 'byte_order']) _SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE = set([ 'size', 'units']).union(_SUPPORTED_ATTRIBUTES_STORAGE_DATA_TYPE) _SUPPORTED_ATTRIBUTES_BOOLEAN = set([ 'false_value', 'true_value']).union( _SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE) _SUPPORTED_ATTRIBUTES_FORMAT = set([ 'byte_order']) _SUPPORTED_ATTRIBUTES_INTEGER = set([ 'format']).union(_SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE) def _ReadBooleanDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a boolean data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: BooleanDataTypeDefinition: boolean data type definition. """ return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.BooleanDefinition, definition_name, self._SUPPORTED_ATTRIBUTES_BOOLEAN, is_member=is_member, supported_size_values=(1, 2, 4)) def _ReadCharacterDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a character data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: CharacterDataTypeDefinition: character data type definition. """ return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.CharacterDefinition, definition_name, self._SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE, is_member=is_member, supported_size_values=(1, 2, 4)) def _ReadConstantDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a constant data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: ConstantDataTypeDefinition: constant data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if is_member: raise errors.DefinitionReaderError(definition_name, ( 'data type not supported as member')) value = definition_values.get('value', None) if value is None: raise errors.DefinitionReaderError(definition_name, 'missing value') definition_object = self._ReadSemanticDataTypeDefinition( definitions_registry, definition_values, data_types.ConstantDefinition, definition_name, self._SUPPORTED_DEFINITION_VALUES_CONSTANT) definition_object.value = value return definition_object # pylint: disable=unused-argument def _ReadDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name, supported_definition_values): """Reads a data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. supported_definition_values (set[str]): names of the supported definition values. Returns: DataTypeDefinition: data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ unsupported_definition_values = set(definition_values.keys()).difference( supported_definition_values) if unsupported_definition_values: values_string = ', '.join(unsupported_definition_values) raise errors.DefinitionReaderError(definition_name, ( f'unsupported definition values: {values_string:s}')) aliases = definition_values.get('aliases', None) description = definition_values.get('description', None) urls = definition_values.get('urls', None) return data_type_definition_class( definition_name, aliases=aliases, description=description, urls=urls) def _ReadDataTypeDefinitionWithMembers( self, definitions_registry, definition_values, data_type_definition_class, definition_name, supports_conditions=False): """Reads a data type definition with members. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. supports_conditions (Optional[bool]): True if conditions are supported by the data type definition. Returns: StringDefinition: string data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ members = definition_values.get('members', None) if not members: raise errors.DefinitionReaderError(definition_name, 'missing members') supported_definition_values = ( self._SUPPORTED_DEFINITION_VALUES_STORAGE_DATA_TYPE_WITH_MEMBERS) definition_object = self._ReadDataTypeDefinition( definitions_registry, definition_values, data_type_definition_class, definition_name, supported_definition_values) attributes = definition_values.get('attributes', None) if attributes: unsupported_attributes = set(attributes.keys()).difference( self._SUPPORTED_ATTRIBUTES_STORAGE_DATA_TYPE) if unsupported_attributes: attributes_string = ', '.join(unsupported_attributes) raise errors.DefinitionReaderError(definition_name, ( f'unsupported attributes: {attributes_string:s}')) byte_order = attributes.get('byte_order', definitions.BYTE_ORDER_NATIVE) if byte_order not in definitions.BYTE_ORDERS: raise errors.DefinitionReaderError(definition_name, ( f'unsupported byte-order attribute: {byte_order!s}')) definition_object.byte_order = byte_order for member in members: section = member.get('section', None) if section: member_section_definition = data_types.MemberSectionDefinition(section) definition_object.AddSectionDefinition(member_section_definition) else: member_data_type_definition = self._ReadMemberDataTypeDefinitionMember( definitions_registry, member, definition_object.name, supports_conditions=supports_conditions) try: definition_object.AddMemberDefinition(member_data_type_definition) except KeyError as exception: raise errors.DefinitionReaderError(definition_name, f'{exception!s}') return definition_object def _ReadEnumerationDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads an enumeration data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: EnumerationDataTypeDefinition: enumeration data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if is_member: raise errors.DefinitionReaderError(definition_name, ( 'data type not supported as member')) values = definition_values.get('values') if not values: raise errors.DefinitionReaderError(definition_name, 'missing values') definition_object = self._ReadSemanticDataTypeDefinition( definitions_registry, definition_values, data_types.EnumerationDefinition, definition_name, self._SUPPORTED_DEFINITION_VALUES_ENUMERATION) last_name = None for enumeration_value in values: aliases = enumeration_value.get('aliases', None) description = enumeration_value.get('description', None) name = enumeration_value.get('name', None) number = enumeration_value.get('number', None) if not name or number is None: if last_name: error_location = f'after: {last_name:s}' else: error_location = 'at start' raise errors.DefinitionReaderError(definition_name, ( f'{error_location:s} missing name or number')) try: definition_object.AddValue( name, number, aliases=aliases, description=description) except KeyError as exception: raise errors.DefinitionReaderError(definition_name, f'{exception!s}') last_name = name return definition_object def _ReadElementSequenceDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name, supported_definition_values): """Reads an element sequence data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. supported_definition_values (set[str]): names of the supported definition values. Returns: SequenceDefinition: sequence data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ unsupported_definition_values = set(definition_values.keys()).difference( supported_definition_values) if unsupported_definition_values: values_string = ', '.join(unsupported_definition_values) raise errors.DefinitionReaderError(definition_name, ( f'unsupported definition values: {values_string:s}')) element_data_type = definition_values.get('element_data_type', None) if not element_data_type: raise errors.DefinitionReaderError(definition_name, ( 'missing element data type')) elements_data_size = definition_values.get('elements_data_size', None) elements_terminator = definition_values.get('elements_terminator', None) number_of_elements = definition_values.get('number_of_elements', None) size_values = (elements_data_size, elements_terminator, number_of_elements) size_values = [value for value in size_values if value is not None] if not size_values: raise errors.DefinitionReaderError(definition_name, ( 'missing element data size, elements terminator and number of ' 'elements')) if elements_data_size is not None and number_of_elements is not None: raise errors.DefinitionReaderError(definition_name, ( 'element data size and number of elements not allowed to be set ' 'at the same time')) element_data_type_definition = definitions_registry.GetDefinitionByName( element_data_type) if not element_data_type_definition: raise errors.DefinitionReaderError(definition_name, ( f'undefined element data type: {element_data_type:s}')) aliases = definition_values.get('aliases', None) description = definition_values.get('description', None) urls = definition_values.get('urls', None) definition_object = data_type_definition_class( definition_name, element_data_type_definition, aliases=aliases, data_type=element_data_type, description=description, urls=urls) if elements_data_size is not None: try: definition_object.elements_data_size = int(elements_data_size) except ValueError: definition_object.elements_data_size_expression = elements_data_size elif number_of_elements is not None: try: definition_object.number_of_elements = int(number_of_elements) except ValueError: definition_object.number_of_elements_expression = number_of_elements if elements_terminator is not None: if isinstance(elements_terminator, str): elements_terminator = elements_terminator.encode('ascii') definition_object.elements_terminator = elements_terminator return definition_object def _ReadFixedSizeDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name, supported_attributes, default_size=definitions.SIZE_NATIVE, default_units='bytes', is_member=False, supported_size_values=None): """Reads a fixed-size data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. supported_attributes (set[str]): names of the supported attributes. default_size (Optional[int]): default size. default_units (Optional[str]): default units. is_member (Optional[bool]): True if the data type definition is a member data type definition. supported_size_values (Optional[tuple[int]]): supported size values, or None if not set. Returns: FixedSizeDataTypeDefinition: fixed-size data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ definition_object = self._ReadStorageDataTypeDefinition( definitions_registry, definition_values, data_type_definition_class, definition_name, supported_attributes, is_member=is_member) attributes = definition_values.get('attributes', None) if attributes: size = attributes.get('size', default_size) if size != definitions.SIZE_NATIVE: try: int(size) except ValueError: raise errors.DefinitionReaderError(definition_name, ( f'unuspported size attribute: {size!s}')) if supported_size_values and size not in supported_size_values: raise errors.DefinitionReaderError(definition_name, ( f'unuspported size value: {size!s}')) definition_object.size = size definition_object.units = attributes.get('units', default_units) return definition_object def _ReadFloatingPointDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a floating-point data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: FloatingPointDefinition: floating-point data type definition. """ return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.FloatingPointDefinition, definition_name, self._SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE, is_member=is_member, supported_size_values=(4, 8)) def _ReadFormatDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a format data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: FormatDefinition: format definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if is_member: raise errors.DefinitionReaderError(definition_name, ( 'data type not supported as member')) definition_object = self._ReadLayoutDataTypeDefinition( definitions_registry, definition_values, data_types.FormatDefinition, definition_name, self._SUPPORTED_DEFINITION_VALUES_FORMAT) layout = definition_values.get('layout', []) definition_object.layout = self._ReadFormatLayout( definitions_registry, layout, definition_name) definition_object.metadata = definition_values.get('metadata', {}) attributes = definition_values.get('attributes', None) if attributes: unsupported_attributes = set(attributes.keys()).difference( self._SUPPORTED_ATTRIBUTES_FORMAT) if unsupported_attributes: attributes_string = ', '.join(unsupported_attributes) raise errors.DefinitionReaderError(definition_name, ( f'unsupported attributes: {attributes_string:s}')) byte_order = attributes.get('byte_order', definitions.BYTE_ORDER_NATIVE) if byte_order not in definitions.BYTE_ORDERS: raise errors.DefinitionReaderError(definition_name, ( f'unsupported byte-order attribute: {byte_order!s}')) definition_object.byte_order = byte_order return definition_object def _ReadFormatLayout( self, definitions_registry, definition_values, definition_name): """Reads the layout of a format data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: FormatDefinition: format definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ layout_elements = [] for index, layout_element in enumerate(definition_values): data_type = layout_element.get('data_type', None) offset = layout_element.get('offset', None) if not data_type: raise errors.DefinitionReaderError(definition_name, ( f'invalid layout element: {index:d} missing data type')) unsupported_definition_values = set(layout_element.keys()).difference( self._SUPPORTED_DEFINITION_VALUES_LAYOUT_ELEMENT) if unsupported_definition_values: values_string = ', '.join(unsupported_definition_values) raise errors.DefinitionReaderError(definition_name, ( f'unsupported definition values: {values_string:s}')) definition_object = data_types.LayoutElementDefinition( data_type=data_type, offset=offset) layout_elements.append(definition_object) return layout_elements def _ReadIntegerDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads an integer data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: IntegerDataTypeDefinition: integer data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ definition_object = self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, definition_name, self._SUPPORTED_ATTRIBUTES_INTEGER, is_member=is_member, supported_size_values=(1, 2, 4, 8)) attributes = definition_values.get('attributes', None) if attributes: format_attribute = attributes.get('format', definitions.FORMAT_SIGNED) if format_attribute not in self._INTEGER_FORMAT_ATTRIBUTES: raise errors.DefinitionReaderError(definition_name, ( f'unsupported format attribute: {format_attribute!s}')) definition_object.format = format_attribute return definition_object def _ReadLayoutDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name, supported_definition_values): """Reads a layout data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. supported_definition_values (set[str]): names of the supported definition values. Returns: LayoutDataTypeDefinition: layout data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ return self._ReadDataTypeDefinition( definitions_registry, definition_values, data_type_definition_class, definition_name, supported_definition_values) def _ReadMemberDataTypeDefinitionMember( self, definitions_registry, definition_values, definition_name, supports_conditions=False): """Reads a member data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. supports_conditions (Optional[bool]): True if conditions are supported by the data type definition. Returns: DataTypeDefinition: structure member data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if not definition_values: raise errors.DefinitionReaderError(definition_name, ( 'invalid structure member missing definition values')) name = definition_values.get('name', None) type_indicator = definition_values.get('type', None) if not name and type_indicator != definitions.TYPE_INDICATOR_UNION: raise errors.DefinitionReaderError(definition_name, ( 'invalid structure member missing name')) # TODO: detect duplicate names. data_type = definition_values.get('data_type', None) type_values = (data_type, type_indicator) type_values = [value for value in type_values if value is not None] if not type_values: name = name or '' raise errors.DefinitionReaderError(definition_name, ( f'invalid structure member: {name:s} both data type and type are ' f'missing')) if len(type_values) > 1: name = name or '' raise errors.DefinitionReaderError(definition_name, ( f'invalid structure member: {name:s} data type and type not allowed ' f'to be set at the same time')) condition = definition_values.get('condition', None) if not supports_conditions and condition: name = name or '' raise errors.DefinitionReaderError(definition_name, ( f'invalid structure member: {name:s} unsupported condition')) value = definition_values.get('value', None) values = definition_values.get('values', None) if None not in (value, values): name = name or '' raise errors.DefinitionReaderError(definition_name, ( f'invalid structure member: {name:s} value and values not allowed to ' f'be set at the same time')) if value is not None and values is None: values = [value] supported_values = None if values: supported_values = [] for value in values: if isinstance(value, str): # Note that latin1 is used here since the ascii encoding is limited # to 127 characters. value = value.encode('latin1') supported_values.append(value) if type_indicator is not None: data_type_callback = self._DATA_TYPE_CALLBACKS.get(type_indicator, None) if data_type_callback: data_type_callback = getattr(self, data_type_callback, None) if not data_type_callback: raise errors.DefinitionReaderError(name, ( f'unuspported data type definition: {type_indicator:s}')) try: data_type_definition = data_type_callback( definitions_registry, definition_values, name, is_member=True) except errors.DefinitionReaderError as exception: exception_name = exception.name or '' raise errors.DefinitionReaderError(definition_name, ( f'in: {exception_name:s} {exception.message:s}')) if condition or supported_values: definition_object = data_types.MemberDataTypeDefinition( name, data_type_definition, condition=condition, values=supported_values) else: definition_object = data_type_definition elif data_type is not None: data_type_definition = definitions_registry.GetDefinitionByName( data_type) if not data_type_definition: name = name or '' raise errors.DefinitionReaderError(definition_name, ( f'invalid structure member: {name:s} undefined data type: ' f'{data_type:s}')) unsupported_definition_values = set(definition_values.keys()).difference( self._SUPPORTED_DEFINITION_VALUES_MEMBER_DATA_TYPE) if unsupported_definition_values: values_string = ', '.join(unsupported_definition_values) raise errors.DefinitionReaderError(definition_name, ( f'unsupported definition values: {values_string:s}')) aliases = definition_values.get('aliases', None) description = definition_values.get('description', None) definition_object = data_types.MemberDataTypeDefinition( name, data_type_definition, aliases=aliases, condition=condition, data_type=data_type, description=description, values=supported_values) return definition_object def _ReadPaddingDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a padding data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: PaddingtDefinition: padding definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if not is_member: raise errors.DefinitionReaderError(definition_name, ( 'data type only supported as member')) definition_object = self._ReadDataTypeDefinition( definitions_registry, definition_values, data_types.PaddingDefinition, definition_name, self._SUPPORTED_DEFINITION_VALUES_PADDING) alignment_size = definition_values.get('alignment_size', None) if not alignment_size: raise errors.DefinitionReaderError(definition_name, ( 'missing alignment_size')) try: int(alignment_size) except ValueError: raise errors.DefinitionReaderError(definition_name, ( f'unuspported alignment size attribute: {alignment_size!s}')) if alignment_size not in (2, 4, 8, 16): raise errors.DefinitionReaderError(definition_name, ( f'unuspported alignment size value: {alignment_size!s}')) definition_object.alignment_size = alignment_size return definition_object def _ReadSemanticDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name, supported_definition_values): """Reads a semantic data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. supported_definition_values (set[str]): names of the supported definition values. Returns: SemanticDataTypeDefinition: semantic data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ return self._ReadDataTypeDefinition( definitions_registry, definition_values, data_type_definition_class, definition_name, supported_definition_values) def _ReadSequenceDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a sequence data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: SequenceDefinition: sequence data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if is_member: supported_definition_values = ( self._SUPPORTED_DEFINITION_VALUES_ELEMENTS_MEMBER_DATA_TYPE) else: supported_definition_values = ( self._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE) return self._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, definition_name, supported_definition_values) def _ReadStorageDataTypeDefinition( self, definitions_registry, definition_values, data_type_definition_class, definition_name, supported_attributes, is_member=False): """Reads a storage data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. data_type_definition_class (str): data type definition class. definition_name (str): name of the definition. supported_attributes (set[str]): names of the supported attributes. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: StorageDataTypeDefinition: storage data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if is_member: supported_definition_values = ( self._SUPPORTED_DEFINITION_VALUES_MEMBER_DATA_TYPE) else: supported_definition_values = ( self._SUPPORTED_DEFINITION_VALUES_STORAGE_DATA_TYPE) definition_object = self._ReadDataTypeDefinition( definitions_registry, definition_values, data_type_definition_class, definition_name, supported_definition_values) attributes = definition_values.get('attributes', None) if attributes: unsupported_attributes = set(attributes.keys()).difference( supported_attributes) if unsupported_attributes: attributes_string = ', '.join(unsupported_attributes) raise errors.DefinitionReaderError(definition_name, ( f'unsupported attributes: {attributes_string:s}')) byte_order = attributes.get('byte_order', definitions.BYTE_ORDER_NATIVE) if byte_order not in definitions.BYTE_ORDERS: raise errors.DefinitionReaderError(definition_name, ( f'unsupported byte-order attribute: {byte_order!s}')) definition_object.byte_order = byte_order return definition_object def _ReadStreamDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a stream data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: StreamDefinition: stream data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if is_member: supported_definition_values = ( self._SUPPORTED_DEFINITION_VALUES_ELEMENTS_MEMBER_DATA_TYPE) else: supported_definition_values = ( self._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE) return self._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.StreamDefinition, definition_name, supported_definition_values) def _ReadStringDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a string data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: StringDefinition: string data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if is_member: supported_definition_values = ( self._SUPPORTED_DEFINITION_VALUES_STRING_MEMBER) else: supported_definition_values = self._SUPPORTED_DEFINITION_VALUES_STRING definition_object = self._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.StringDefinition, definition_name, supported_definition_values) encoding = definition_values.get('encoding', None) if not encoding: raise errors.DefinitionReaderError(definition_name, 'missing encoding') definition_object.encoding = encoding return definition_object def _ReadStructureDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a structure data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: StructureDefinition: structure data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if is_member: raise errors.DefinitionReaderError(definition_name, ( 'data type not supported as member')) return self._ReadDataTypeDefinitionWithMembers( definitions_registry, definition_values, data_types.StructureDefinition, definition_name, supports_conditions=True) def _ReadStructureFamilyDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a structure family data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: StructureDefinition: structure data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if is_member: raise errors.DefinitionReaderError(definition_name, ( 'data type not supported as member')) unsupported_definition_values = set(definition_values.keys()).difference( self._SUPPORTED_DEFINITION_VALUES_STRUCTURE_FAMILY) if unsupported_definition_values: values_string = ', '.join(unsupported_definition_values) raise errors.DefinitionReaderError(definition_name, ( f'unsupported definition values: {values_string:s}')) base = definition_values.get('base', None) if not base: raise errors.DefinitionReaderError(definition_name, 'missing base') base_data_type_definition = definitions_registry.GetDefinitionByName(base) if not base_data_type_definition: raise errors.DefinitionReaderError(definition_name, ( f'undefined base: {base:s}')) aliases = definition_values.get('aliases', None) description = definition_values.get('description', None) urls = definition_values.get('urls', None) definition_object = data_types.StructureFamilyDefinition( definition_name, base_data_type_definition, aliases=aliases, description=description, urls=urls) members = definition_values.get('members', None) if not members: raise errors.DefinitionReaderError(definition_name, 'missing members') for member in members: member_data_type_definition = definitions_registry.GetDefinitionByName( member) if not member_data_type_definition: raise errors.DefinitionReaderError(definition_name, ( f'undefined member: {member:s}')) try: definition_object.AddMemberDefinition(member_data_type_definition) except KeyError as exception: raise errors.DefinitionReaderError(definition_name, f'{exception!s}') return definition_object def _ReadStructureGroupDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads a structure group data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: StructureDefinition: structure data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if is_member: raise errors.DefinitionReaderError(definition_name, ( 'data type not supported as member')) unsupported_definition_values = set(definition_values.keys()).difference( self._SUPPORTED_DEFINITION_VALUES_STRUCTURE_GROUP) if unsupported_definition_values: values_string = ', '.join(unsupported_definition_values) raise errors.DefinitionReaderError(definition_name, ( f'unsupported definition values: {values_string:s}')) base = definition_values.get('base', None) if not base: raise errors.DefinitionReaderError(definition_name, 'missing base') base_data_type_definition = definitions_registry.GetDefinitionByName(base) if not base_data_type_definition: raise errors.DefinitionReaderError(definition_name, ( f'undefined base: {base:s}')) identifier = definition_values.get('identifier', None) if not identifier: raise errors.DefinitionReaderError(definition_name, 'missing identifier') default = definition_values.get('default', None) if not default: default_data_type_definition = None else: default_data_type_definition = definitions_registry.GetDefinitionByName( default) if not default_data_type_definition: raise errors.DefinitionReaderError(definition_name, ( f'undefined default: {default:s}')) aliases = definition_values.get('aliases', None) description = definition_values.get('description', None) urls = definition_values.get('urls', None) definition_object = data_types.StructureGroupDefinition( definition_name, base_data_type_definition, identifier, default_data_type_definition, aliases=aliases, description=description, urls=urls) members = definition_values.get('members', None) if not members: raise errors.DefinitionReaderError(definition_name, 'missing members') for member in members: member_data_type_definition = definitions_registry.GetDefinitionByName( member) if not member_data_type_definition: raise errors.DefinitionReaderError(definition_name, ( f'undefined member: {member:s}')) member_names = [ structure_member.name for structure_member in member_data_type_definition.members] if definition_object.identifier not in member_names: raise errors.DefinitionReaderError(definition_name, ( f'member: {member:s} has no identifier: ' f'{definition_object.identifier:s}')) try: definition_object.AddMemberDefinition(member_data_type_definition) except KeyError as exception: raise errors.DefinitionReaderError(definition_name, f'{exception!s}') return definition_object def _ReadUnionDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads an union data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: UnionDefinition: union data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ return self._ReadDataTypeDefinitionWithMembers( definitions_registry, definition_values, data_types.UnionDefinition, definition_name, supports_conditions=False) def _ReadUUIDDataTypeDefinition( self, definitions_registry, definition_values, definition_name, is_member=False): """Reads an UUID data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type definition. Returns: UUIDDataTypeDefinition: UUID data type definition. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ return self._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.UUIDDefinition, definition_name, self._SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE, default_size=16, is_member=is_member, supported_size_values=(16, )) class DataTypeDefinitionsFileReader(DataTypeDefinitionsReader): """Data type definitions file reader.""" def _ReadDefinition(self, definitions_registry, definition_values): """Reads a data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. Returns: DataTypeDefinition: data type definition or None. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. """ if not definition_values: raise errors.DefinitionReaderError(None, 'missing definition values') name = definition_values.get('name', None) if not name: raise errors.DefinitionReaderError(None, 'missing name') type_indicator = definition_values.get('type', None) if not type_indicator: raise errors.DefinitionReaderError(name, ( 'invalid definition missing type')) data_type_callback = self._DATA_TYPE_CALLBACKS.get(type_indicator, None) if data_type_callback: data_type_callback = getattr(self, data_type_callback, None) if not data_type_callback: raise errors.DefinitionReaderError(name, ( f'unuspported data type definition: {type_indicator:s}')) return data_type_callback(definitions_registry, definition_values, name) def ReadFile(self, definitions_registry, path): """Reads data type definitions from a file into the registry. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. path (str): path of the file to read from. """ with open(path, 'r', encoding='utf-8') as file_object: self.ReadFileObject(definitions_registry, file_object) @abc.abstractmethod def ReadFileObject(self, definitions_registry, file_object): """Reads data type definitions from a file-like object into the registry. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. file_object (file): file-like object to read from. """ class YAMLDataTypeDefinitionsFileReader(DataTypeDefinitionsFileReader): """YAML data type definitions file reader. Attributes: dict[str, object]: metadata. """ def __init__(self): """Initializes a YAML data type definitions file reader.""" super(YAMLDataTypeDefinitionsFileReader, self).__init__() self.metadata = {} def _GetFormatErrorLocation( self, yaml_definition, last_definition_object): """Retrieves a format error location. Args: yaml_definition (dict[str, object]): current YAML definition. last_definition_object (DataTypeDefinition): previous data type definition. Returns: str: format error location. """ name = yaml_definition.get('name', None) if name: name = name or '' error_location = f'in: {name:s}' elif last_definition_object: error_location = f'after: {last_definition_object.name:s}' else: error_location = 'at start' return error_location def ReadFileObject(self, definitions_registry, file_object): """Reads data type definitions from a file-like object into the registry. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. file_object (file): file-like object to read from. Raises: DefinitionReaderError: if the definitions values are missing or if the format is incorrect. FormatError: if the definitions values are missing or if the format is incorrect. """ last_definition_object = None error_location = None try: yaml_generator = yaml.safe_load_all(file_object) for yaml_definition in yaml_generator: definition_object = self._ReadDefinition( definitions_registry, yaml_definition) if not definition_object: error_location = self._GetFormatErrorLocation( yaml_definition, last_definition_object) raise errors.FormatError( f'{error_location:s} missing definition object') definitions_registry.RegisterDefinition(definition_object) last_definition_object = definition_object except errors.DefinitionReaderError as exception: exception_name = exception.name or '' raise errors.FormatError(f'in: {exception_name:s} {exception.message:s}') except (yaml.reader.ReaderError, yaml.scanner.ScannerError) as exception: error_location = self._GetFormatErrorLocation({}, last_definition_object) raise errors.FormatError(f'{error_location:s} {exception!s}') dtfabric-20240211/dtfabric/registry.py000066400000000000000000000061701456204725700175450ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The data type definitions registry.""" import typing from typing import Dict, List, Union # pylint: disable=unused-import from dtfabric import definitions if typing.TYPE_CHECKING: from dtfabric import data_types class DataTypeDefinitionsRegistry(object): """Data type definitions registry.""" def __init__(self) -> 'None': """Initializes a data type definitions registry.""" super(DataTypeDefinitionsRegistry, self).__init__() self._aliases: 'Dict[str, str]' = {} self._definitions: 'Dict[str, data_types.DataTypeDefinition]' = {} self._format_definitions: 'List[str]' = [] def DeregisterDefinition( self, data_type_definition: 'data_types.DataTypeDefinition') -> 'None': """Deregisters a data type definition. The data type definitions are identified based on their lower case name. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: KeyError: if a data type definition is not set for the corresponding name. """ name = data_type_definition.name.lower() if name not in self._definitions: raise KeyError( f'Definition not set for name: {data_type_definition.name:s}.') del self._definitions[name] def GetDefinitionByName( self, name: 'str') -> 'Union[data_types.DataTypeDefinition, None]': """Retrieves a specific data type definition by name. Args: name (str): name of the data type definition. Returns: DataTypeDefinition: data type definition or None if not available. """ lookup_name = name.lower() if lookup_name not in self._definitions: lookup_name = self._aliases.get(name, lookup_name) return self._definitions.get(lookup_name, None) def GetDefinitions(self) -> 'List[data_types.DataTypeDefinition]': """Retrieves the data type definitions. Returns: list[DataTypeDefinition]: data type definitions. """ return list(self._definitions.values()) def RegisterDefinition( self, data_type_definition: 'data_types.DataTypeDefinition') -> 'None': """Registers a data type definition. The data type definitions are identified based on their lower case name. Args: data_type_definition (DataTypeDefinition): data type definitions. Raises: KeyError: if data type definition is already set for the corresponding name. """ name_lower = data_type_definition.name.lower() if name_lower in self._definitions: raise KeyError( f'Definition already set for name: {data_type_definition.name:s}.') if data_type_definition.name in self._aliases: raise KeyError( f'Alias already set for name: {data_type_definition.name:s}.') for alias in data_type_definition.aliases: if alias in self._aliases: raise KeyError(f'Alias already set for name: {alias:s}.') self._definitions[name_lower] = data_type_definition for alias in data_type_definition.aliases: self._aliases[alias] = name_lower if data_type_definition.TYPE_INDICATOR == definitions.TYPE_INDICATOR_FORMAT: self._format_definitions.append(name_lower) dtfabric-20240211/dtfabric/runtime/000077500000000000000000000000001456204725700170025ustar00rootroot00000000000000dtfabric-20240211/dtfabric/runtime/__init__.py000066400000000000000000000000301456204725700211040ustar00rootroot00000000000000# -*- coding: utf-8 -*- dtfabric-20240211/dtfabric/runtime/byte_operations.py000066400000000000000000000047331456204725700225710ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Byte stream operations.""" import abc import struct from dtfabric import errors class ByteStreamOperation(object): """Byte stream operation.""" # Note that redundant-returns-doc is broken for pylint 1.7.x for abstract # methods. # pylint: disable=redundant-returns-doc @abc.abstractmethod def ReadFrom(self, byte_stream): """Read values from a byte stream. Args: byte_stream (bytes): byte stream. Returns: tuple[object, ...]: values copies from the byte stream. """ @abc.abstractmethod def WriteTo(self, values): """Writes values to a byte stream. Args: values (tuple[object, ...]): values to copy to the byte stream. Returns: bytes: byte stream. """ class StructOperation(ByteStreamOperation): """Python struct-base byte stream operation.""" def __init__(self, format_string): """Initializes a Python struct-base byte stream operation. Args: format_string (str): format string as used by Python struct. Raises: FormatError: if the struct operation cannot be determined from the data type definition. """ try: struct_object = struct.Struct(format_string) except (TypeError, struct.error) as exception: raise errors.FormatError( f'Unable to create struct object from data type definition ' f'with error: {exception!s}') super(StructOperation, self).__init__() self._struct = struct_object self._struct_format_string = format_string def ReadFrom(self, byte_stream): """Read values from a byte stream. Args: byte_stream (bytes): byte stream. Returns: tuple[object, ...]: values copies from the byte stream. Raises: IOError: if byte stream cannot be read. OSError: if byte stream cannot be read. """ try: return self._struct.unpack_from(byte_stream) except (TypeError, struct.error) as exception: raise IOError(f'Unable to read byte stream with error: {exception!s}') def WriteTo(self, values): """Writes values to a byte stream. Args: values (tuple[object, ...]): values to copy to the byte stream. Returns: bytes: byte stream. Raises: IOError: if byte stream cannot be written. OSError: if byte stream cannot be read. """ try: return self._struct.pack(*values) except (TypeError, struct.error) as exception: raise IOError(f'Unable to write stream with error: {exception!s}') dtfabric-20240211/dtfabric/runtime/data_maps.py000066400000000000000000002261161456204725700213150ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Data type maps.""" import abc import ast import copy import uuid from dtfabric import data_types from dtfabric import decorators from dtfabric import definitions from dtfabric import errors from dtfabric.runtime import byte_operations from dtfabric.runtime import runtime class DataTypeMapContext(object): """Data type map context. Attributes: byte_size (int): byte size. requested_size (int): requested size. state (dict[str, object]): state values per name. values (dict[str, object]): values per name. """ def __init__(self, values=None): """Initializes a data type map context. Args: values (dict[str, object]): values per name. """ super(DataTypeMapContext, self).__init__() self.byte_size = None self.requested_size = None self.state = {} self.values = values or {} class DataTypeMapSizeHint(object): """Data type map size hint. Attributes: byte_size (int): byte size. is_complete (bool): True if the size is the complete size of the data type. """ def __init__(self, byte_size, is_complete=False): """Initializes a data type map size hint. Args: byte_size (int): byte size. is_complete (optional[bool]): True if the size is the complete size of the data type. """ super(DataTypeMapSizeHint, self).__init__() self.byte_size = byte_size self.is_complete = is_complete class DataTypeMap(object): """Data type map.""" _MAXIMUM_RECURSION_DEPTH = 10 def __init__(self, data_type_definition): """Initializes a data type map. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type map cannot be determined from the data type definition. """ super(DataTypeMap, self).__init__() self._data_type_definition = data_type_definition @property def name(self): """str: name of the data type definition or None if not available.""" return getattr(self._data_type_definition, 'name', None) @decorators.deprecated def GetByteSize(self): """Retrieves the byte size of the data type map. This method is deprecated use GetSizeHint instead. Returns: int: data type size in bytes or None if size cannot be determined. """ if not self._data_type_definition: return None return self._data_type_definition.GetByteSize() def GetSizeHint(self, **unused_kwargs): """Retrieves a hint about the size. Returns: int: hint of the number of bytes needed from the byte stream or None. """ if not self._data_type_definition: return None return self._data_type_definition.GetByteSize() @abc.abstractmethod def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ @abc.abstractmethod def MapByteStream(self, byte_stream, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ class StorageDataTypeMap(DataTypeMap): """Storage data type map.""" _BYTE_ORDER_STRINGS = { definitions.BYTE_ORDER_BIG_ENDIAN: '>', definitions.BYTE_ORDER_LITTLE_ENDIAN: '<', definitions.BYTE_ORDER_NATIVE: '='} def _CheckByteStreamSize(self, byte_stream, byte_offset, data_type_size): """Checks if the byte stream is large enough for the data type. Args: byte_stream (bytes): byte stream. byte_offset (int): offset into the byte stream where to start. data_type_size (int): data type size. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the size of the byte stream cannot be determined. """ try: byte_stream_size = len(byte_stream) except Exception as exception: raise errors.MappingError(exception) if byte_stream_size - byte_offset < data_type_size: raise errors.ByteStreamTooSmallError( f'Byte stream too small requested: {data_type_size:d} available: ' f'{byte_stream_size:d}') def _GetByteStreamOperation(self): """Retrieves the byte stream operation. Returns: ByteStreamOperation: byte stream operation or None if unable to determine. """ byte_order_string = self.GetStructByteOrderString() format_string = self.GetStructFormatString() # pylint: disable=assignment-from-none if not format_string: return None format_string = ''.join([byte_order_string, format_string]) return byte_operations.StructOperation(format_string) def GetStructByteOrderString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if not self._data_type_definition: return None return self._BYTE_ORDER_STRINGS.get( self._data_type_definition.byte_order, None) def GetStructFormatString(self): # pylint: disable=redundant-returns-doc """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ return None @abc.abstractmethod def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ @abc.abstractmethod def MapByteStream(self, byte_stream, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ class PrimitiveDataTypeMap(StorageDataTypeMap): """Primitive data type map.""" def __init__(self, data_type_definition): """Initializes a primitive data type map. Args: data_type_definition (DataTypeDefinition): data type definition. """ super(PrimitiveDataTypeMap, self).__init__(data_type_definition) self._operation = self._GetByteStreamOperation() def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: value = self.FoldValue(mapped_value) return self._operation.WriteTo(tuple([value])) except Exception as exception: raise errors.FoldingError( f'Unable to write: {self._data_type_definition.name:s} to byte ' f'stream with error: {exception!s}') def FoldValue(self, value): """Folds the data type into a value. Args: value (object): value. Returns: object: folded value. Raises: ValueError: if the data type definition cannot be folded into the value. """ return value def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ data_type_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, data_type_size) if context: context.byte_size = None context.requested_size = data_type_size try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) mapped_value = self.MapValue(*struct_tuple) except Exception as exception: raise errors.MappingError( f'Unable to read: {self._data_type_definition.name:s} from byte ' f'stream at offset: {byte_offset:d} with error: {exception!s}') if context: context.byte_size = data_type_size return mapped_value def MapValue(self, value): """Maps the data type on a value. Args: value (object): value. Returns: object: mapped value. Raises: ValueError: if the data type definition cannot be mapped on the value. """ return value class BooleanMap(PrimitiveDataTypeMap): """Boolean data type map.""" # We use 'I' here instead of 'L' because 'L' behaves architecture dependent. _FORMAT_STRINGS_UNSIGNED = { 1: 'B', 2: 'H', 4: 'I', } def __init__(self, data_type_definition): """Initializes a boolean data type map. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type map cannot be determined from the data type definition. """ if (data_type_definition.false_value is None and data_type_definition.true_value is None): raise errors.FormatError( 'Boolean data type has no True or False values.') super(BooleanMap, self).__init__(data_type_definition) def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ return self._FORMAT_STRINGS_UNSIGNED.get( self._data_type_definition.size, None) def FoldValue(self, value): """Folds the data type into a value. Args: value (object): value. Returns: object: folded value. Raises: ValueError: if the data type definition cannot be folded into the value. """ if value is False and self._data_type_definition.false_value is not None: return self._data_type_definition.false_value if value is True and self._data_type_definition.true_value is not None: return self._data_type_definition.true_value raise ValueError('No matching True and False values') def MapValue(self, value): """Maps the data type on a value. Args: value (object): value. Returns: bool: mapped value. Raises: ValueError: if the data type definition cannot be mapped on the value. """ if self._data_type_definition.false_value == value: return False if self._data_type_definition.true_value == value: return True if self._data_type_definition.false_value is None: return False if self._data_type_definition.true_value is None: return True raise ValueError('No matching True and False values') class CharacterMap(PrimitiveDataTypeMap): """Character data type map.""" # We use 'i' here instead of 'l' because 'l' behaves architecture dependent. _FORMAT_STRINGS = { 1: 'b', 2: 'h', 4: 'i', } def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ return self._FORMAT_STRINGS.get( self._data_type_definition.size, None) def FoldValue(self, value): """Folds the data type into a value. Args: value (object): value. Returns: object: folded value. Raises: ValueError: if the data type definition cannot be folded into the value. """ return ord(value) def MapValue(self, value): """Maps the data type on a value. Args: value (object): value. Returns: str: mapped value. Raises: ValueError: if the data type definition cannot be mapped on the value. """ return chr(value) class FloatingPointMap(PrimitiveDataTypeMap): """Floating-point data type map.""" _FORMAT_STRINGS = { 4: 'f', 8: 'd', } def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ return self._FORMAT_STRINGS.get( self._data_type_definition.size, None) class IntegerMap(PrimitiveDataTypeMap): """Integer data type map.""" # We use 'i' here instead of 'l' because 'l' behaves architecture dependent. _FORMAT_STRINGS_SIGNED = { 1: 'b', 2: 'h', 4: 'i', 8: 'q', } # We use 'I' here instead of 'L' because 'L' behaves architecture dependent. _FORMAT_STRINGS_UNSIGNED = { 1: 'B', 2: 'H', 4: 'I', 8: 'Q', } def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if self._data_type_definition.format == definitions.FORMAT_UNSIGNED: return self._FORMAT_STRINGS_UNSIGNED.get( self._data_type_definition.size, None) return self._FORMAT_STRINGS_SIGNED.get( self._data_type_definition.size, None) class UUIDMap(StorageDataTypeMap): """UUID (or GUID) data type map.""" def __init__(self, data_type_definition): """Initializes an UUID (or GUID) data type map. Args: data_type_definition (DataTypeDefinition): data type definition. """ super(UUIDMap, self).__init__(data_type_definition) self._byte_order = data_type_definition.byte_order def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ value = None try: if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN: value = mapped_value.bytes elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN: value = mapped_value.bytes_le except Exception as exception: raise errors.FoldingError( f'Unable to write: {self._data_type_definition.name:s} to byte ' f'stream with error: {exception!s}') return value def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: uuid.UUID: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ data_type_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, data_type_size) if context: context.byte_size = None context.requested_size = data_type_size try: if self._byte_order == definitions.BYTE_ORDER_BIG_ENDIAN: mapped_value = uuid.UUID( bytes=byte_stream[byte_offset:byte_offset + 16]) elif self._byte_order == definitions.BYTE_ORDER_LITTLE_ENDIAN: mapped_value = uuid.UUID( bytes_le=byte_stream[byte_offset:byte_offset + 16]) except Exception as exception: raise errors.MappingError( f'Unable to read: {self._data_type_definition.name:s} from byte ' f'stream at offset: {byte_offset:d} with error: {exception!s}') if context: context.byte_size = data_type_size return mapped_value class ElementSequenceDataTypeMap(StorageDataTypeMap): """Element sequence data type map.""" def __init__(self, data_type_definition): """Initializes a sequence data type map. Args: data_type_definition (DataTypeDefinition): data type definition. """ super(ElementSequenceDataTypeMap, self).__init__(data_type_definition) self._element_data_type_map = None self._element_data_type_definition = None self._elements_data_size_expression = None self._number_of_elements_expression = None self._GetElementDataTypeMap(data_type_definition) if data_type_definition.elements_data_size_expression: expression_ast = ast.parse( data_type_definition.elements_data_size_expression, mode='eval') self._elements_data_size_expression = compile( expression_ast, '', mode='eval') if data_type_definition.number_of_elements_expression: expression_ast = ast.parse( data_type_definition.number_of_elements_expression, mode='eval') self._number_of_elements_expression = compile( expression_ast, '', mode='eval') def _CalculateElementsDataSize(self, context): """Calculates the elements data size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: the elements data size or None if not available. """ context_state = getattr(context, 'state', {}) elements_data_size = context_state.get('elements_data_size', None) if elements_data_size: return elements_data_size if self._HasElementsDataSize(): elements_data_size = self._EvaluateElementsDataSize(context) elif self._HasNumberOfElements(): element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is not None: number_of_elements = context_state.get('number_of_elements', None) if number_of_elements is None: number_of_elements = self._EvaluateNumberOfElements(context) if number_of_elements is not None: elements_data_size = number_of_elements * element_byte_size return elements_data_size def _EvaluateElementsDataSize(self, context): """Evaluates elements data size. Args: context (DataTypeMapContext): data type map context. Returns: int: elements data size. Raises: MappingError: if the elements data size cannot be determined. """ elements_data_size = None if self._data_type_definition.elements_data_size: elements_data_size = self._data_type_definition.elements_data_size elif self._elements_data_size_expression: expression = self._elements_data_size_expression namespace = {} if context and context.values: namespace.update(context.values) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: elements_data_size = eval(expression, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( f'Unable to determine elements data size with error: {exception!s}') try: invalid_value = elements_data_size is None or elements_data_size < 0 except TypeError: invalid_value = True if invalid_value: raise errors.MappingError( f'Invalid elements data size: {elements_data_size!s}') return elements_data_size def _EvaluateNumberOfElements(self, context): """Evaluates number of elements. Args: context (DataTypeMapContext): data type map context. Returns: int: number of elements. Raises: MappingError: if the number of elements cannot be determined. """ number_of_elements = None if self._data_type_definition.number_of_elements: number_of_elements = self._data_type_definition.number_of_elements elif self._number_of_elements_expression: expression = self._number_of_elements_expression namespace = getattr(context, 'values', {}) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: number_of_elements = eval(expression, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( f'Unable to determine number of elements with error: {exception!s}') try: invalid_value = number_of_elements is None or number_of_elements < 0 except TypeError: invalid_value = True if invalid_value: raise errors.MappingError( f'Invalid number of elements: {number_of_elements!s}') return number_of_elements def _GetElementDataTypeDefinition(self, data_type_definition): """Retrieves the element data type definition. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: DataTypeDefinition: element data type definition. Raises: FormatError: if the element data type cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') element_data_type_definition = getattr( data_type_definition, 'element_data_type_definition', None) if not element_data_type_definition: raise errors.FormatError( 'Invalid data type definition missing element') return element_data_type_definition def _GetElementDataTypeMap(self, data_type_definition): """Retrieves the element data type map. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type map cannot be determined from the data type definition. """ element_data_type_definition = self._GetElementDataTypeDefinition( data_type_definition) element_byte_order = element_data_type_definition.byte_order if (data_type_definition.byte_order != definitions.BYTE_ORDER_NATIVE and element_byte_order == definitions.BYTE_ORDER_NATIVE and not element_data_type_definition.IsComposite()): # Make a copy of the data type definition where byte-order can be # safely changed. element_data_type_definition = copy.copy(element_data_type_definition) element_data_type_definition.name = ( f'_{data_type_definition.name:s}' f'_{element_data_type_definition.name:s}') element_data_type_definition.byte_order = data_type_definition.byte_order self._element_data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( element_data_type_definition) self._element_data_type_definition = element_data_type_definition def _HasElementsDataSize(self): """Checks if the data type defines an elements data size. Returns: bool: True if the data types defines an elements data size. """ return ( self._data_type_definition.elements_data_size is not None or self._data_type_definition.elements_data_size_expression is not None) def _HasElementsTerminator(self): """Checks if the data type defines an elements terminator. Returns: bool: True if the data types defines an elements terminator. """ return self._data_type_definition.elements_terminator is not None def _HasNumberOfElements(self): """Checks if the data type defines a number of elements. Returns: bool: True if the data types defines a number of elements. """ return( self._data_type_definition.number_of_elements is not None or self._data_type_definition.number_of_elements_expression is not None) @abc.abstractmethod def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ def GetSizeHint(self, context=None, **unused_kwargs): """Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None. """ size_hint = getattr(context, 'byte_size', None) if size_hint is not None: return size_hint context_state = getattr(context, 'state', {}) elements_data_size = context_state.get('elements_data_size', None) if elements_data_size: return elements_data_size if self._HasElementsDataSize(): elements_data_size = self._data_type_definition.GetByteSize() if elements_data_size is None: try: elements_data_size = self._EvaluateElementsDataSize(context) except errors.MappingError: pass elif self._HasElementsTerminator(): element_size_hint = self._element_data_type_definition.GetByteSize() if element_size_hint is None: subcontext = context_state.get('context', None) element_size_hint = self._element_data_type_map.GetSizeHint( context=subcontext) if element_size_hint is not None: elements_data_size = context_state.get('elements_data_offset', 0) elements_data_size += element_size_hint elif self._HasNumberOfElements(): number_of_elements = context_state.get('number_of_elements', None) if number_of_elements is None: try: number_of_elements = self._EvaluateNumberOfElements(context) except errors.MappingError: pass if number_of_elements is not None: element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size: elements_data_size = number_of_elements * element_byte_size else: subcontext = context_state.get('context', None) element_size_hint = self._element_data_type_map.GetSizeHint( context=subcontext) if element_size_hint is not None: elements_data_size = context_state.get('elements_data_offset', 0) elements_data_size += element_size_hint return elements_data_size def GetStructByteOrderString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if not self._element_data_type_map: return None return self._element_data_type_map.GetStructByteOrderString() @abc.abstractmethod def MapByteStream(self, byte_stream, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ class SequenceMap(ElementSequenceDataTypeMap): """Sequence data type map.""" def __init__(self, data_type_definition): """Initializes a sequence data type map. Args: data_type_definition (DataTypeDefinition): data type definition. """ super(SequenceMap, self).__init__(data_type_definition) self._fold_byte_stream = None self._map_byte_stream = None self._operation = None if (self._element_data_type_definition.IsComposite() or data_type_definition.elements_data_size_expression is not None or data_type_definition.elements_terminator is not None or data_type_definition.number_of_elements_expression is not None): self._fold_byte_stream = self._CompositeFoldByteStream self._map_byte_stream = self._CompositeMapByteStream else: self._fold_byte_stream = self._LinearFoldByteStream self._map_byte_stream = self._LinearMapByteStream self._operation = self._GetByteStreamOperation() def _CompositeFoldByteStream( self, mapped_value, byte_offset=0, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ # TODO: implement. def _CompositeMapByteStream( self, byte_stream, byte_offset=0, context=None, recursion_depth=0, **unused_kwargs): """Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. recursion_depth (Optional[int]): recursion depth. Returns: tuple[object, ...]: mapped values. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream. """ if recursion_depth > self._MAXIMUM_RECURSION_DEPTH: raise errors.MappingError('At maximum recursion depth') elements_data_size = None elements_terminator = None number_of_elements = None if self._HasElementsDataSize(): elements_data_size = self._EvaluateElementsDataSize(context) element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is not None: number_of_elements, _ = divmod(elements_data_size, element_byte_size) else: elements_terminator = ( self._element_data_type_definition.elements_terminator) elif self._HasElementsTerminator(): elements_terminator = self._data_type_definition.elements_terminator elif self._HasNumberOfElements(): number_of_elements = self._EvaluateNumberOfElements(context) if elements_terminator is None and number_of_elements is None: raise errors.MappingError( 'Unable to determine element terminator or number of elements') context_state = getattr(context, 'state', {}) elements_data_offset = context_state.get('elements_data_offset', 0) element_index = context_state.get('element_index', 0) element_value = None mapped_values = context_state.get('mapped_values', []) subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext() if context: context.byte_size = None try: while byte_stream[byte_offset:]: if (number_of_elements is not None and element_index == number_of_elements): break if (elements_data_size is not None and elements_data_offset >= elements_data_size): break element_value = self._element_data_type_map.MapByteStream( byte_stream, byte_offset=byte_offset, context=subcontext, recursion_depth=recursion_depth + 1) byte_offset += subcontext.byte_size elements_data_offset += subcontext.byte_size element_index += 1 mapped_values.append(element_value) if (elements_terminator is not None and element_value == elements_terminator): break except errors.ByteStreamTooSmallError: context_state['context'] = subcontext context_state['element_index'] = element_index context_state['elements_data_offset'] = elements_data_offset context_state['elements_data_size'] = elements_data_size context_state['mapped_values'] = mapped_values context_state['number_of_elements'] = number_of_elements requested_size = byte_offset + (subcontext.requested_size or 0) byte_stream_size = len(byte_stream) raise errors.ByteStreamTooSmallError( f'Byte stream too small requested: {requested_size:d} available: ' f'{byte_stream_size:d}') except Exception as exception: raise errors.MappingError(exception) if number_of_elements is not None and element_index != number_of_elements: context_state['context'] = subcontext context_state['element_index'] = element_index context_state['elements_data_offset'] = elements_data_offset context_state['elements_data_size'] = elements_data_size context_state['mapped_values'] = mapped_values context_state['number_of_elements'] = number_of_elements error_element_index = element_index - 1 raise errors.ByteStreamTooSmallError( f'Unable to read: {self._data_type_definition.name:s} from byte ' f'stream at offset: {byte_offset:d} with error: missing element: ' f'{error_element_index:d}') if (elements_terminator is not None and element_value != elements_terminator and ( elements_data_size is None or elements_data_offset < elements_data_size)): context_state['context'] = subcontext context_state['element_index'] = element_index context_state['elements_data_offset'] = elements_data_offset context_state['elements_data_size'] = elements_data_size context_state['mapped_values'] = mapped_values context_state['number_of_elements'] = number_of_elements raise errors.ByteStreamTooSmallError( f'Unable to read: {self._data_type_definition.name:s} from byte ' f'stream at offset: {byte_offset:d} with error: unable to find ' f'elements terminator') if context: context.byte_size = elements_data_offset context.state = {} return tuple(mapped_values) def _LinearFoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: return self._operation.WriteTo(mapped_value) except Exception as exception: raise errors.FoldingError( f'Unable to write: {self._data_type_definition.name:s} to byte ' f'stream with error: {exception!s}') def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream. """ context_state = getattr(context, 'state', {}) elements_data_size = self._data_type_definition.GetByteSize() if context: context.byte_size = None context.requested_size = elements_data_size try: byte_stream_size = len(byte_stream) except Exception as exception: raise errors.MappingError(exception) if byte_stream_size - byte_offset < elements_data_size: context_state['elements_data_size'] = elements_data_size raise errors.ByteStreamTooSmallError( f'Byte stream too small requested: {elements_data_size:d} ' f'available: {byte_stream_size:d}') try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) mapped_values = map(self._element_data_type_map.MapValue, struct_tuple) except Exception as exception: raise errors.MappingError( f'Unable to read: {self._data_type_definition.name:s} from byte ' f'stream at offset: {byte_offset:d} with error: {exception!s}') if context: context.byte_size = elements_data_size context.state = {} return tuple(mapped_values) def FoldByteStream(self, mapped_value, **kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ return self._fold_byte_stream(mapped_value, **kwargs) def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if not self._element_data_type_map: return None number_of_elements = None if self._data_type_definition.elements_data_size: element_byte_size = self._element_data_type_definition.GetByteSize() if element_byte_size is None: return None number_of_elements, _ = divmod( self._data_type_definition.elements_data_size, element_byte_size) elif self._data_type_definition.number_of_elements: number_of_elements = self._data_type_definition.number_of_elements format_string = self._element_data_type_map.GetStructFormatString() if not number_of_elements or not format_string: return None return f'{number_of_elements:d}{format_string:s}' def MapByteStream(self, byte_stream, **kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: tuple[object, ...]: mapped values. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ return self._map_byte_stream(byte_stream, **kwargs) class StreamMap(ElementSequenceDataTypeMap): """Stream data type map.""" def __init__(self, data_type_definition): """Initializes a stream data type map. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type map cannot be determined from the data type definition. """ super(StreamMap, self).__init__(data_type_definition) self._fold_byte_stream = None self._map_byte_stream = None if self._element_data_type_definition.IsComposite(): raise errors.FormatError('Unsupported composite element data type') def FoldByteStream(self, mapped_value, context=None, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ elements_data_size = self._CalculateElementsDataSize(context) if elements_data_size is not None: if elements_data_size != len(mapped_value): raise errors.FoldingError( 'Mismatch between elements data size and mapped value size') elif not self._HasElementsTerminator(): raise errors.FoldingError('Unable to determine elements data size') else: elements_terminator = self._data_type_definition.elements_terminator elements_terminator_size = len(elements_terminator) if mapped_value[-elements_terminator_size:] != elements_terminator: mapped_value = b''.join([mapped_value, elements_terminator]) return mapped_value def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ byte_size = self._data_type_definition.GetByteSize() if not byte_size: return None return f'{byte_size:d}B' def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: tuple[object, ...]: mapped values. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream. """ context_state = getattr(context, 'state', {}) elements_data_size = self._CalculateElementsDataSize(context) if context: context.byte_size = None context.requested_size = elements_data_size if elements_data_size is not None: try: byte_stream_size = len(byte_stream) except Exception as exception: raise errors.MappingError(exception) if byte_stream_size - byte_offset < elements_data_size: context_state['elements_data_size'] = elements_data_size raise errors.ByteStreamTooSmallError( f'Byte stream too small requested: {elements_data_size:d} ' f'available: {byte_stream_size:d}') elif not self._HasElementsTerminator(): raise errors.MappingError( 'Unable to determine elements data size and missing elements ' 'terminator') else: element_byte_size = self._element_data_type_definition.GetByteSize() elements_data_offset = byte_offset next_elements_data_offset = elements_data_offset + element_byte_size elements_terminator = self._data_type_definition.elements_terminator element_value = byte_stream[ elements_data_offset:next_elements_data_offset] while byte_stream[elements_data_offset:]: elements_data_offset = next_elements_data_offset if element_value == elements_terminator: elements_data_size = elements_data_offset - byte_offset break next_elements_data_offset += element_byte_size element_value = byte_stream[ elements_data_offset:next_elements_data_offset] if element_value != elements_terminator: context_state['elements_data_offset'] = ( elements_data_offset - byte_offset) raise errors.ByteStreamTooSmallError( f'Unable to read: {self._data_type_definition.name:s} from byte ' f'stream at offset: {byte_offset:d} with error: unable to find ' f'elements terminator') if context: context.byte_size = elements_data_size context.state = {} return byte_stream[byte_offset:byte_offset + elements_data_size] class PaddingMap(DataTypeMap): """Padding data type map.""" def _CalculatePaddingSize(self, byte_offset): """Calculates the padding size. Args: byte_offset (int): offset into the byte stream where to start. Returns: int: size of the padding in number of bytes. """ alignment_size = self._data_type_definition.alignment_size _, byte_size = divmod(byte_offset, alignment_size) if byte_size > 0: byte_size = alignment_size - byte_size return byte_size def FoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ return mapped_value def FoldValue(self, value): """Folds the data type into a value. Args: value (object): value. Returns: object: folded value. Raises: ValueError: if the data type definition cannot be folded into the value. """ return value def GetSizeHint(self, byte_offset=0, context=None, **unused_kwargs): """Retrieves a hint about the size. Args: byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None. """ size_hint = getattr(context, 'byte_size', None) if size_hint is not None: return size_hint return self._CalculatePaddingSize(byte_offset) def GetStructFormatString(self): # pylint: disable=redundant-returns-doc """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ return None def MapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: ByteStreamTooSmallError: if the byte stream is too small. """ padding_size = self._CalculatePaddingSize(byte_offset) if context: context.byte_size = None context.requested_size = padding_size byte_stream_size = len(byte_stream) if byte_stream_size - byte_offset < padding_size: raise errors.ByteStreamTooSmallError( f'Byte stream too small requested: {padding_size:d} available: ' f'{byte_stream_size:d}') mapped_value = byte_stream[byte_offset:byte_offset + padding_size] if context: context.byte_size = padding_size return mapped_value def MapValue(self, value): """Maps the data type on a value. Args: value (object): value. Returns: object: mapped value. Raises: ValueError: if the data type definition cannot be mapped on the value. """ return value class StringMap(StreamMap): """String data type map.""" def FoldByteStream(self, mapped_value, **kwargs): # pylint: disable=arguments-differ """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: byte_stream = mapped_value.encode(self._data_type_definition.encoding) except Exception as exception: raise errors.FoldingError( f'Unable to write: {self._data_type_definition.name:s} to byte ' f'stream with error: {exception!s}') return super(StringMap, self).FoldByteStream(byte_stream, **kwargs) def MapByteStream(self, byte_stream, byte_offset=0, **kwargs): # pylint: disable=arguments-differ """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. Returns: str: mapped values. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream. """ byte_stream = super(StringMap, self).MapByteStream( byte_stream, byte_offset=byte_offset, **kwargs) if self._HasElementsTerminator(): # Remove the elements terminator and any trailing data from # the byte stream. elements_terminator = self._data_type_definition.elements_terminator elements_terminator_size = len(elements_terminator) byte_offset = 0 byte_stream_size = len(byte_stream) while byte_offset < byte_stream_size: end_offset = byte_offset + elements_terminator_size if byte_stream[byte_offset:end_offset] == elements_terminator: break byte_offset += elements_terminator_size byte_stream = byte_stream[:byte_offset] try: return byte_stream.decode(self._data_type_definition.encoding) except Exception as exception: raise errors.MappingError( f'Unable to read: {self._data_type_definition.name:s} from byte ' f'stream at offset: {byte_offset:d} with error: {exception!s}') class StructureMap(StorageDataTypeMap): """Structure data type map.""" def __init__(self, data_type_definition): """Initializes a structure data type map. Args: data_type_definition (DataTypeDefinition): data type definition. """ super(StructureMap, self).__init__(data_type_definition) self._attribute_names = None self._data_type_maps = None self._fold_byte_stream = None self._format_string = None self._map_byte_stream = None self._number_of_attributes = None self._operation = None self._structure_values_class = ( runtime.StructureValuesClassFactory.CreateClass( data_type_definition)) self._GetMemberDataTypeMaps(data_type_definition) if self._CheckLinearMap(data_type_definition): self._fold_byte_stream = self._LinearFoldByteStream self._map_byte_stream = self._LinearMapByteStream self._operation = self._GetByteStreamOperation() else: self._fold_byte_stream = self._CompositeFoldByteStream self._map_byte_stream = self._CompositeMapByteStream def _CheckLinearMap(self, data_type_definition): """Determines if the data type definition supports using a linear map. Args: data_type_definition (DataTypeDefinition): structure data type definition. Returns: bool: True if a composite map is needed, False otherwise. Raises: FormatError: if a composite map is needed cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition missing members') supports_linear_map = True last_member_byte_order = data_type_definition.byte_order for member_definition in members: if (member_definition.IsComposite() or isinstance(member_definition, data_types.PaddingDefinition)): supports_linear_map = False break if (last_member_byte_order != definitions.BYTE_ORDER_NATIVE and member_definition.byte_order != definitions.BYTE_ORDER_NATIVE and last_member_byte_order != member_definition.byte_order): supports_linear_map = False break last_member_byte_order = member_definition.byte_order return supports_linear_map def _CompositeFoldByteStream( self, mapped_value, context=None, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. context (Optional[DataTypeMapContext]): data type map context. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ context_state = getattr(context, 'state', {}) attribute_index = context_state.get('attribute_index', 0) subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext(values={ type(mapped_value).__name__: mapped_value}) data_attributes = [] for attribute_index in range(attribute_index, self._number_of_attributes): attribute_name = self._attribute_names[attribute_index] data_type_map = self._data_type_maps[attribute_index] member_value = getattr(mapped_value, attribute_name, None) if data_type_map is None or member_value is None: continue member_data = data_type_map.FoldByteStream( member_value, context=subcontext) if member_data is None: return None data_attributes.append(member_data) if context: context.state = {} return b''.join(data_attributes) def _CompositeMapByteStream( self, byte_stream, byte_offset=0, context=None, recursion_depth=0, **unused_kwargs): """Maps a sequence of composite data types on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. recursion_depth (Optional[int]): recursion depth. Returns: object: mapped value. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream. """ if recursion_depth > self._MAXIMUM_RECURSION_DEPTH: raise errors.MappingError('At maximum recursion depth') context_state = getattr(context, 'state', {}) context_values = getattr(context, 'values', {}) attribute_index = context_state.get('attribute_index', 0) mapped_values = context_state.get('mapped_values', None) subcontext = context_state.get('context', None) if not mapped_values: mapped_values = self._structure_values_class() if not subcontext: subcontext_values = {type(mapped_values).__name__: mapped_values} # Pass externally defined values. subcontext_values.update(context_values) subcontext = DataTypeMapContext(values=subcontext_values) if context: context.byte_size = None members_data_size = 0 for attribute_index in range(attribute_index, self._number_of_attributes): attribute_name = self._attribute_names[attribute_index] data_type_map = self._data_type_maps[attribute_index] member_definition = self._data_type_definition.members[attribute_index] # TODO: pre-compile condition condition = getattr(member_definition, 'condition', None) if condition: namespace = dict(subcontext.values) # Make sure __builtins__ contains an empty dictionary. namespace['__builtins__'] = {} try: condition_result = eval(condition, namespace) # pylint: disable=eval-used except Exception as exception: raise errors.MappingError( f'Unable to evaluate condition with error: {exception!s}') if not isinstance(condition_result, bool): raise errors.MappingError( 'Condition does not result in a boolean value') if not condition_result: continue try: value = data_type_map.MapByteStream( byte_stream, byte_offset=byte_offset, context=subcontext, recursion_depth=recursion_depth + 1) setattr(mapped_values, attribute_name, value) except errors.ByteStreamTooSmallError: context_state['attribute_index'] = attribute_index context_state['context'] = subcontext context_state['mapped_values'] = mapped_values context_state['members_data_size'] = members_data_size requested_size = byte_offset + (subcontext.requested_size or 0) byte_stream_size = len(byte_stream) raise errors.ByteStreamTooSmallError( f'Byte stream too small requested: {requested_size:d} available: ' f'{byte_stream_size:d}') except Exception as exception: raise errors.MappingError(exception) supported_values = getattr(member_definition, 'values', None) if supported_values and value not in supported_values: supported_values_string = ', '.join([ f'{value!s}' for value in supported_values]) raise errors.MappingError( f'Value: {value!s} not in supported values: ' f'{supported_values_string:s}') byte_offset += subcontext.byte_size members_data_size += subcontext.byte_size if attribute_index != (self._number_of_attributes - 1): context_state['attribute_index'] = attribute_index context_state['context'] = subcontext context_state['mapped_values'] = mapped_values context_state['members_data_size'] = members_data_size raise errors.ByteStreamTooSmallError( f'Unable to read: {self._data_type_definition.name:s} from byte ' f'stream at offset: {byte_offset:d} with error: missing attribute: ' f'{attribute_index:d}') if context: context.byte_size = members_data_size context.state = {} return mapped_values def _GetMemberDataTypeMaps(self, data_type_definition): """Retrieves the member data type maps. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type maps cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition missing members') self._attribute_names = [] self._data_type_maps = [] self._number_of_attributes = 0 data_type_map_cache = {} members_data_size = 0 for member_definition in members: member_name = member_definition.name if isinstance(member_definition, data_types.MemberDataTypeDefinition): member_definition = member_definition.member_data_type_definition if (data_type_definition.byte_order != definitions.BYTE_ORDER_NATIVE and member_definition.byte_order == definitions.BYTE_ORDER_NATIVE): # Make a copy of the data type definition where byte-order can be # safely changed. member_definition = copy.copy(member_definition) member_definition.name = ( f'_{data_type_definition.name:s}_{member_definition.name:s}') member_definition.byte_order = data_type_definition.byte_order if member_definition.name not in data_type_map_cache: data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( member_definition) data_type_map_cache[member_definition.name] = data_type_map data_type_map = data_type_map_cache[member_definition.name] if members_data_size is not None: byte_size = member_definition.GetByteSize() if byte_size is None: members_data_size = None else: members_data_size += byte_size self._attribute_names.append(member_name) self._data_type_maps.append(data_type_map) self._number_of_attributes += 1 def _LinearFoldByteStream(self, mapped_value, **unused_kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ try: attribute_values = [ getattr(mapped_value, attribute_name, None) for attribute_name in self._attribute_names] attribute_values = [ value for value in attribute_values if value is not None] return self._operation.WriteTo(tuple(attribute_values)) except Exception as exception: raise errors.FoldingError( f'Unable to write: {self._data_type_definition.name:s} to byte ' f'stream with error: {exception!s}') def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream. """ members_data_size = self._data_type_definition.GetByteSize() context_state = getattr(context, 'state', {}) if context: context.byte_size = None context.requested_size = members_data_size try: byte_stream_size = len(byte_stream) except Exception as exception: raise errors.MappingError(exception) if byte_stream_size - byte_offset < members_data_size: context_state['attribute_index'] = self._number_of_attributes context_state['members_data_size'] = members_data_size raise errors.ByteStreamTooSmallError( f'Byte stream too small requested: {members_data_size:d} available: ' f'{byte_stream_size:d}') try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) struct_values = [] for attribute_index, value in enumerate(struct_tuple): data_type_map = self._data_type_maps[attribute_index] member_definition = self._data_type_definition.members[attribute_index] value = data_type_map.MapValue(value) supported_values = getattr(member_definition, 'values', None) if supported_values and value not in supported_values: supported_values_string = ', '.join([ f'{value!s}' for value in supported_values]) raise errors.MappingError( f'Value: {value!s} not in supported values: ' f'{supported_values_string:s}') struct_values.append(value) mapped_value = self._structure_values_class(*struct_values) except Exception as exception: raise errors.MappingError( f'Unable to read: {self._data_type_definition.name:s} from byte ' f'stream at offset: {byte_offset:d} with error: {exception!s}') if context: context.byte_size = members_data_size context.state = {} return mapped_value def CreateStructureValues(self, *args, **kwargs): """Creates a structure values object. Returns: object: structure values. """ return self._structure_values_class(*args, **kwargs) def FoldByteStream(self, mapped_value, **kwargs): """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ return self._fold_byte_stream(mapped_value, **kwargs) def GetSizeHint(self, context=None, **unused_kwargs): """Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None. """ size_hint = getattr(context, 'byte_size', None) if size_hint is not None: return size_hint context_state = getattr(context, 'state', {}) attribute_index = context_state.get('attribute_index', 0) mapped_values = context_state.get('mapped_values', None) subcontext = context_state.get('context', None) if not mapped_values: mapped_values = self._structure_values_class() if not subcontext: subcontext_values = {type(mapped_values).__name__: mapped_values} subcontext = DataTypeMapContext(values=subcontext_values) size_hint = context_state.get('members_data_size', 0) for attribute_index in range(attribute_index, self._number_of_attributes): data_type_map = self._data_type_maps[attribute_index] attribute_size_hint = data_type_map.GetSizeHint( byte_offset=size_hint, context=subcontext) if attribute_size_hint is None: break size_hint += attribute_size_hint # A new subcontext is created to prevent the current state affecting # the size hint calculation of subsequent attributes. subcontext = DataTypeMapContext() return size_hint def GetStructFormatString(self): """Retrieves the Python struct format string. Returns: str: format string as used by Python struct or None if format string cannot be determined. """ if self._format_string is None and self._data_type_maps: format_strings = [] for member_data_type_map in self._data_type_maps: if member_data_type_map is None: return None member_format_string = member_data_type_map.GetStructFormatString() if member_format_string is None: return None format_strings.append(member_format_string) self._format_string = ''.join(format_strings) return self._format_string def MapByteStream(self, byte_stream, **kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ return self._map_byte_stream(byte_stream, **kwargs) class SemanticDataTypeMap(DataTypeMap): """Semantic data type map.""" def FoldByteStream(self, mapped_value, **unused_kwargs): # pylint: disable=redundant-returns-doc """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ raise errors.FoldingError( f'Unable to fold {self._data_type_definition.TYPE_INDICATOR:s} data ' f'type into byte stream') def MapByteStream(self, byte_stream, **unused_kwargs): # pylint: disable=redundant-returns-doc """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ raise errors.MappingError( f'Unable to map {self._data_type_definition.TYPE_INDICATOR:s} data ' f'type to byte stream') class ConstantMap(SemanticDataTypeMap): """Constant data type map.""" class EnumerationMap(SemanticDataTypeMap): """Enumeration data type map.""" def GetName(self, number): """Retrieves the name of an enumeration value by number. Args: number (int): number. Returns: str: name of the enumeration value or None if no corresponding enumeration value was found. """ value = self._data_type_definition.values_per_number.get(number, None) if not value: return None return value.name class LayoutDataTypeMap(DataTypeMap): """Layout data type map.""" def FoldByteStream(self, mapped_value, **unused_kwargs): # pylint: disable=redundant-returns-doc """Folds the data type into a byte stream. Args: mapped_value (object): mapped value. Returns: bytes: byte stream. Raises: FoldingError: if the data type definition cannot be folded into the byte stream. """ raise errors.FoldingError( f'Unable to fold {self._data_type_definition.TYPE_INDICATOR:s} data ' f'type into byte stream') @abc.abstractmethod def MapByteStream(self, byte_stream, **unused_kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ class FormatMap(LayoutDataTypeMap): """Format data type map.""" @property def layout(self): """Retrieves the layout element definitions. Returns: list[LayoutElementDefinition]: layout element definitions. """ return getattr(self._data_type_definition, 'layout', []) def MapByteStream(self, byte_stream, **unused_kwargs): # pylint: disable=redundant-returns-doc """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ raise errors.MappingError( f'Unable to map {self._data_type_definition.TYPE_INDICATOR:s} data ' f'type to byte stream') class StructureGroupMap(LayoutDataTypeMap): """Structure group data type map.""" def __init__(self, data_type_definition): """Initializes a data type map. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type map cannot be determined from the data type definition. """ default_data_type_map = None if data_type_definition.default: default_data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( data_type_definition.default) super(StructureGroupMap, self).__init__(data_type_definition) self._base_data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( data_type_definition.base) self._data_type_maps = None self._default_data_type_map = default_data_type_map self._GetMemberDataTypeMaps(data_type_definition) def _GetMemberDataTypeMaps(self, data_type_definition): """Retrieves the member data type maps. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: FormatError: if the data type maps cannot be determined from the data type definition. """ if not data_type_definition: raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if not members: raise errors.FormatError('Invalid data type definition missing members') self._data_type_maps = {} for group_member_definition in members: struct_member_definition = ( group_member_definition.GetMemberDefinitionByName( data_type_definition.identifier)) if not struct_member_definition: raise errors.FormatError( f'No such member: {data_type_definition.identifier:s} of: ' f'{group_member_definition.name:s}') if struct_member_definition.values is None: raise errors.FormatError( f'No values defined for member: ' f'{data_type_definition.identifier:s} of: ' f'{group_member_definition.name:s}') for value in struct_member_definition.values: if value in self._data_type_maps: raise errors.FormatError( f'Duplicate value: {value!s} for member: ' f'{data_type_definition.identifier:s} of: ' f'{group_member_definition.name:s}') data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( group_member_definition) self._data_type_maps[value] = data_type_map @decorators.deprecated def GetByteSize(self): # pylint: disable=redundant-returns-doc """Retrieves the byte size of the data type map. This method is deprecated use GetSizeHint instead. Returns: int: data type size in bytes or None if size cannot be determined. """ return None def GetSizeHint(self, context=None, **kwargs): """Retrieves a hint about the size. Args: context (Optional[DataTypeMapContext]): data type map context, used to determine the size hint. Returns: int: hint of the number of bytes needed from the byte stream or None. """ size_hint = getattr(context, 'byte_size', None) if size_hint is not None: return size_hint context_state = getattr(context, 'state', {}) member_identifier = context_state.get('member_identifier', None) subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext() member_data_type_map = self._data_type_maps.get(member_identifier, None) if member_data_type_map: return member_data_type_map.GetSizeHint(context=subcontext, **kwargs) return self._base_data_type_map.GetSizeHint(context=subcontext, **kwargs) def MapByteStream(self, byte_stream, context=None, **kwargs): """Maps the data type on a byte stream. Args: byte_stream (bytes): byte stream. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the data type definition cannot be mapped on the byte stream. """ context_state = getattr(context, 'state', {}) member_identifier = context_state.get('member_identifier', None) if member_identifier is None: subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext() try: mapped_base_value = self._base_data_type_map.MapByteStream( byte_stream, context=subcontext, **kwargs) except errors.ByteStreamTooSmallError as exception: context_state['context'] = subcontext raise exception except Exception as exception: raise errors.MappingError(exception) member_identifier = getattr( mapped_base_value, self._data_type_definition.identifier, None) if member_identifier is None: raise errors.MappingError( f'Unable to determine value of ' f'{self._data_type_definition.identifier:s}') context_state['member_identifier'] = member_identifier member_data_type_map = self._data_type_maps.get( member_identifier, self._default_data_type_map) if member_data_type_map is None: raise errors.MappingError( f'Missing member data type map for ' f'{self._data_type_definition.identifier:s}: ' f'{member_identifier!s}') subcontext = context_state.get('context', None) if not subcontext: subcontext = DataTypeMapContext() if context: context.byte_size = None try: value = member_data_type_map.MapByteStream( byte_stream, context=subcontext, **kwargs) except errors.ByteStreamTooSmallError as exception: context_state['context'] = subcontext raise exception except Exception as exception: raise errors.MappingError(exception) if context: context.byte_size = subcontext.byte_size context.state = {} return value class DataTypeMapFactory(object): """Factory for data type maps.""" # TODO: add support for definitions.TYPE_INDICATOR_FORMAT # TODO: add support for definitions.TYPE_INDICATOR_STRUCTURE_FAMILY _MAP_PER_DEFINITION = { definitions.TYPE_INDICATOR_BOOLEAN: BooleanMap, definitions.TYPE_INDICATOR_CHARACTER: CharacterMap, definitions.TYPE_INDICATOR_CONSTANT: ConstantMap, definitions.TYPE_INDICATOR_ENUMERATION: EnumerationMap, definitions.TYPE_INDICATOR_FLOATING_POINT: FloatingPointMap, definitions.TYPE_INDICATOR_FORMAT: FormatMap, definitions.TYPE_INDICATOR_INTEGER: IntegerMap, definitions.TYPE_INDICATOR_PADDING: PaddingMap, definitions.TYPE_INDICATOR_SEQUENCE: SequenceMap, definitions.TYPE_INDICATOR_STREAM: StreamMap, definitions.TYPE_INDICATOR_STRING: StringMap, definitions.TYPE_INDICATOR_STRUCTURE: StructureMap, definitions.TYPE_INDICATOR_STRUCTURE_GROUP: StructureGroupMap, definitions.TYPE_INDICATOR_UUID: UUIDMap} def __init__(self, definitions_registry): """Initializes a data type maps factory. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. """ super(DataTypeMapFactory, self).__init__() self._definitions_registry = definitions_registry def CreateDataTypeMap(self, definition_name): """Creates a specific data type map by name. Args: definition_name (str): name of the data type definition. Returns: DataTypeMap: data type map or None if the date type definition is not available. """ data_type_map = None data_type_definition = self.GetDataTypeDefinition(definition_name) if data_type_definition: data_type_map = DataTypeMapFactory.CreateDataTypeMapByType( data_type_definition) return data_type_map @classmethod def CreateDataTypeMapByType(cls, data_type_definition): """Creates a specific data type map by type indicator. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: DataTypeMap: data type map or None if the date type definition is not available. """ data_type_map_class = cls._MAP_PER_DEFINITION.get( data_type_definition.TYPE_INDICATOR, None) if not data_type_map_class: return None return data_type_map_class(data_type_definition) def GetDataTypeDefinition(self, definition_name): """Retrieves a specific data type definition by name. Args: definition_name (str): name of the data type definition. Returns: DataTypeDefinition: data type definition or None if the date type definition is not available. """ return self._definitions_registry.GetDefinitionByName(definition_name) dtfabric-20240211/dtfabric/runtime/fabric.py000066400000000000000000000022051456204725700206010ustar00rootroot00000000000000# -*- coding: utf-8 -*- """dtFabric helper objects.""" import io from dtfabric import reader from dtfabric import registry from dtfabric.runtime import data_maps class DataTypeFabric(data_maps.DataTypeMapFactory): """Data type fabric.""" def __init__(self, yaml_definition=None): """Initializes a data type fabric. Args: yaml_definition (bytes): UTF-8 and YAML formatted data type definitions. """ definitions_registry = registry.DataTypeDefinitionsRegistry() if yaml_definition: definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() file_object = io.BytesIO(yaml_definition) definitions_reader.ReadFileObject(definitions_registry, file_object) super(DataTypeFabric, self).__init__(definitions_registry) def GetDefinitionByName( self, name: 'str') -> 'Union[data_types.DataTypeDefinition, None]': """Retrieves a specific data type definition by name. Args: name (str): name of the data type definition. Returns: DataTypeDefinition: data type definition or None if not available. """ return self._definitions_registry.GetDefinitionByName(name) dtfabric-20240211/dtfabric/runtime/runtime.py000066400000000000000000000141311456204725700210370ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Run-time objects.""" try: import __builtin__ as builtins except ImportError: import builtins import keyword import sys from dtfabric import data_types from dtfabric import definitions class StructureValuesClassFactory(object): """Structure values class factory.""" _CLASS_TEMPLATE = '\n'.join([ 'class {type_name:s}(object):', ' """{type_description:s}.', '', ' Attributes:', '{class_attributes_description:s}', ' """', '', ' def __init__(self, {init_arguments:s}):', ' """Initializes an instance of {type_name:s}."""', ' super({type_name:s}, self).__init__()', '{instance_attributes:s}', '']) _PYTHON_NATIVE_TYPES = { definitions.TYPE_INDICATOR_BOOLEAN: 'bool', definitions.TYPE_INDICATOR_CHARACTER: 'str', definitions.TYPE_INDICATOR_FLOATING_POINT: 'float', definitions.TYPE_INDICATOR_INTEGER: 'int', definitions.TYPE_INDICATOR_STREAM: 'bytes', definitions.TYPE_INDICATOR_STRING: 'str', definitions.TYPE_INDICATOR_UUID: 'uuid.UUID'} @classmethod def _CreateClassTemplate(cls, data_type_definition): """Creates the class template. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: str: class template. """ type_name = data_type_definition.name type_description = data_type_definition.description or type_name while type_description.endswith('.'): type_description = type_description[:-1] class_attributes_description = [] init_arguments = [] instance_attributes = [] for member_definition in data_type_definition.members: attribute_name = member_definition.name description = member_definition.description or attribute_name while description.endswith('.'): description = description[:-1] member_data_type = getattr(member_definition, 'member_data_type', '') if isinstance(member_definition, data_types.MemberDataTypeDefinition): member_definition = member_definition.member_data_type_definition member_type_indicator = member_definition.TYPE_INDICATOR if member_type_indicator == definitions.TYPE_INDICATOR_SEQUENCE: element_type_indicator = member_definition.element_data_type member_type_indicator = f'tuple[{element_type_indicator:s}]' else: member_type_indicator = cls._PYTHON_NATIVE_TYPES.get( member_type_indicator, member_data_type) argument = f'{attribute_name:s}=None' definition = f' self.{attribute_name:s} = {attribute_name:s}' description = ( f' {attribute_name:s} ({member_type_indicator:s}): ' f'{description:s}.') class_attributes_description.append(description) init_arguments.append(argument) instance_attributes.append(definition) class_attributes_description = '\n'.join( sorted(class_attributes_description)) init_arguments = ', '.join(init_arguments) instance_attributes = '\n'.join(sorted(instance_attributes)) template_values = { 'class_attributes_description': class_attributes_description, 'init_arguments': init_arguments, 'instance_attributes': instance_attributes, 'type_description': type_description, 'type_name': type_name} return cls._CLASS_TEMPLATE.format(**template_values) @classmethod def _IsIdentifier(cls, string): """Checks if a string contains an identifier. Args: string (str): string to check. Returns: bool: True if the string contains an identifier, False otherwise. """ return ( string and not string[0].isdigit() and all(character.isalnum() or character == '_' for character in string)) @classmethod def _ValidateDataTypeDefinition(cls, data_type_definition): """Validates the data type definition. Args: data_type_definition (DataTypeDefinition): data type definition. Raises: ValueError: if the data type definition is not considered valid. """ if not cls._IsIdentifier(data_type_definition.name): raise ValueError( f'Data type definition name: {data_type_definition.name!s} not ' f'a valid identifier') if keyword.iskeyword(data_type_definition.name): raise ValueError( f'Data type definition name: {data_type_definition.name!s} matches ' f'keyword') members = getattr(data_type_definition, 'members', None) if not members: raise ValueError( f'Data type definition name: {data_type_definition.name!s} missing ' f'members') defined_attribute_names = set() for member_definition in members: attribute_name = member_definition.name if not cls._IsIdentifier(attribute_name): raise ValueError( f'Attribute name: {attribute_name!s} not a valid identifier') if attribute_name.startswith('_'): raise ValueError( f'Attribute name: {attribute_name!s} starts with underscore') if keyword.iskeyword(attribute_name): raise ValueError( f'Attribute name: {attribute_name!s} matches keyword') if attribute_name in defined_attribute_names: raise ValueError( f'Attribute name: {attribute_name!s} already defined') defined_attribute_names.add(attribute_name) @classmethod def CreateClass(cls, data_type_definition): """Creates a new structure values class. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: class: structure values class. """ cls._ValidateDataTypeDefinition(data_type_definition) class_definition = cls._CreateClassTemplate(data_type_definition) namespace = { '__builtins__' : { 'object': builtins.object, 'super': builtins.super}, '__name__': f'{data_type_definition.name:s}'} if sys.version_info[0] >= 3: # pylint: disable=no-member namespace['__builtins__']['__build_class__'] = builtins.__build_class__ exec(class_definition, namespace) # pylint: disable=exec-used return namespace[data_type_definition.name] dtfabric-20240211/pyproject.toml000066400000000000000000000004501456204725700164540ustar00rootroot00000000000000[build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" [tool.docformatter] black = false non-cap = ["dfDateTime", "dfImageTools", "dfVFS", "dfWinReg", "dtFabric", "iMessage", "iOS", "iPod", "mDNS"] non-strict = false wrap-summaries = 80 wrap-descriptions = 80 dtfabric-20240211/requirements.txt000066400000000000000000000000171456204725700170230ustar00rootroot00000000000000PyYAML >= 3.10 dtfabric-20240211/run_tests.py000077500000000000000000000014111456204725700161410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Script to run the tests.""" import sys import unittest # Change PYTHONPATH to include dtFabric. sys.path.insert(0, '.') if __name__ == '__main__': print(f'Using Python version {sys.version!s}') fail_unless_has_test_file = '--fail-unless-has-test-file' in sys.argv setattr(unittest, 'fail_unless_has_test_file', fail_unless_has_test_file) if fail_unless_has_test_file: # Remove --fail-unless-has-test-file otherwise it will conflict with # the argparse tests. sys.argv.remove('--fail-unless-has-test-file') test_suite = unittest.TestLoader().discover('tests', pattern='*.py') test_results = unittest.TextTestRunner(verbosity=2).run(test_suite) if not test_results.wasSuccessful(): sys.exit(1) dtfabric-20240211/scripts/000077500000000000000000000000001456204725700152305ustar00rootroot00000000000000dtfabric-20240211/scripts/validate-definitions.py000077500000000000000000000062221456204725700217110ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # pylint: disable=invalid-name """Script to validate dtFabric format definitions.""" import argparse import glob import logging import os import sys from dtfabric import errors from dtfabric import reader from dtfabric import registry class DefinitionsValidator(object): """dtFabric definitions validator.""" def CheckDirectory(self, path, extension='yaml'): """Validates definition files in a directory. Args: path (str): path of the definition file. extension (Optional[str]): extension of the filenames to read. Returns: bool: True if the directory contains valid definitions. """ result = True if extension: glob_spec = os.path.join(path, f'*.{extension:s}') else: glob_spec = os.path.join(path, '*') for definition_file in sorted(glob.glob(glob_spec)): if not self.CheckFile(definition_file): result = False return result def CheckFile(self, path): """Validates the definition in a file. Args: path (str): path of the definition file. Returns: bool: True if the file contains valid definitions. """ print(f'Checking: {path:s}') definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() result = False try: definitions_reader.ReadFile(definitions_registry, path) result = True except KeyError as exception: logging.warning( f'Unable to register data type definition in file: {path:s} with ' f'error: {exception!s}') except errors.FormatError as exception: logging.warning( f'Unable to validate file: {path:s} with error: {exception!s}') return result def Main(): """The main program function. Returns: bool: True if successful or False if not. """ argument_parser = argparse.ArgumentParser( description='Validates dtFabric format definitions.') argument_parser.add_argument( 'source', nargs='?', action='store', metavar='PATH', default=None, help=('path of the file or directory containing the dtFabric format ' 'definitions.')) options = argument_parser.parse_args() if not options.source: print('Source value is missing.') print('') argument_parser.print_help() print('') return False if not os.path.exists(options.source): print(f'No such file: {options.source:s}') print('') return False logging.basicConfig( level=logging.INFO, format='[%(levelname)s] %(message)s') source_is_directory = os.path.isdir(options.source) validator = DefinitionsValidator() if source_is_directory: source_description = os.path.join(options.source, '*.yaml') else: source_description = options.source print(f'Validating dtFabric definitions in: {source_description:s}') if source_is_directory: result = validator.CheckDirectory(options.source) else: result = validator.CheckFile(options.source) if not result: print('FAILURE') else: print('SUCCESS') return result if __name__ == '__main__': if not Main(): sys.exit(1) else: sys.exit(0) dtfabric-20240211/setup.cfg000066400000000000000000000020211456204725700153550ustar00rootroot00000000000000[metadata] name = dtfabric version = 20240211 description = Data type fabric (dtfabric) long_description = dtFabric, or data type fabric, is a project to manage data types and structures, as used in the libyal projects. long_description_content_type = text/plain url = https://github.com/libyal/dtfabric maintainer = Joachim Metz maintainer_email = joachim.metz@gmail.com license = Apache License, Version 2.0 license_files = ACKNOWLEDGEMENTS AUTHORS LICENSE README classifiers = Development Status :: 3 - Alpha Programming Language :: Python [options] install_requires = file:requirements.txt package_dir = dtfabric = dtfabric packages = find: python_requires = >=3.8 scripts = scripts/validate-definitions.py [options.packages.find] exclude = docs tests tests.* utils where = . [bdist_rpm] release = 1 packager = Joachim Metz doc_files = ACKNOWLEDGEMENTS AUTHORS LICENSE README build_requires = python3-setuptools requires = python3-pyyaml >= 3.10 [bdist_wheel] universal = 1 dtfabric-20240211/setup.py000077500000000000000000000002001456204725700152460ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Installation and deployment script.""" from setuptools import setup setup() dtfabric-20240211/test_data/000077500000000000000000000000001456204725700155115ustar00rootroot00000000000000dtfabric-20240211/test_data/Notepad.lnk000066400000000000000000000026221456204725700176130ustar00rootroot00000000000000LÀFŸ `€$ð~Äâ˜d~ÄH%ó"zÄóPàOÐ ê:i¢Ø+00/C:\<1 1 }WINDOWS&ï¾ 1 } 1¸WINDOWS@1 1 }system32(ï¾ 1 } 1¸system32H21h notepad.exe.ï¾ 1Ã| 1¸notepad.exeQ0PØk5HDDC:\WINDOWS\system32\notepad.exe)@%SystemRoot%\system32\shell32.dll,-22563+..\..\..\..\..\WINDOWS\system32\notepad.exe%HOMEDRIVE%%HOMEPATH% %SystemRoot%\system32\notepad.exeP;´ÏF‘|P;(;ˆÐ\ ‘|E ‘|N ‘|Ö$LÑ(A‘|pý|ÄÏ”Ðî|p ‘|Àä—|o>‘|b>‘|%SystemRoot%\system32\notepad.exe>C> ÔäÑ6Ôp‘|ÿÿÿÿ8ÓÐìÐî|=‘|ÿÿÿÿb>‘|¨m‘|dÑÔÜÐE ‘|N ‘|pýôÐpý|tÃF‘|$ÔøÑ.žŸv´Ñù£€|ø,äÑ Ô Ô9š€|ÑÜÿî|ˆD‘|äÑD‘|ÚE‘||шڞŸvÔÄÑf|ˆÚq| Ô9š€| Ñ¤ÑÜÿó™ƒ|x|ÿÿÿÿq|zëŸvˆÚ$ÔˆÚ€ØÐëŸvˆÚC:\WINDOWSÂÂ@Ò3R‘| %©dtfabric-20240211/test_data/boolean.yaml000066400000000000000000000001431456204725700200120ustar00rootroot00000000000000# dtFabric format specification. --- name: bool type: boolean attributes: size: 1 units: bytes dtfabric-20240211/test_data/character.yaml000066400000000000000000000001451456204725700203310ustar00rootroot00000000000000# dtFabric format specification. --- name: char type: character attributes: size: 1 units: bytes dtfabric-20240211/test_data/constant.yaml000066400000000000000000000004341456204725700202270ustar00rootroot00000000000000# dtFabric format specification. --- name: maximum_number_of_back_traces aliases: [AVRF_MAX_TRACES] type: constant description: Application verifier resource enumeration maximum number of back traces urls: ['https://msdn.microsoft.com/en-us/library/bb432193(v=vs.85).aspx'] value: 32 dtfabric-20240211/test_data/definitions/000077500000000000000000000000001456204725700200245ustar00rootroot00000000000000dtfabric-20240211/test_data/definitions/booleans.yaml000066400000000000000000000010141456204725700225060ustar00rootroot00000000000000# dtFabric format specification. --- name: bool8 aliases: [BOOLEAN] type: boolean description: 8-bit boolean type attributes: size: 1 units: bytes false_value: 0 true_value: 1 --- name: bool16 type: boolean description: 16-bit boolean type attributes: byte_order: little-endian size: 2 units: bytes false_value: 0 true_value: 1 --- name: bool32 aliases: [BOOL] type: boolean description: 32-bit boolean type attributes: byte_order: little-endian size: 4 units: bytes false_value: 0 true_value: 1 dtfabric-20240211/test_data/definitions/characters.yaml000066400000000000000000000007051456204725700230310ustar00rootroot00000000000000# dtFabric format specification. --- name: char aliases: [CHAR] type: character description: 8-bit narrow character type attributes: size: 1 units: bytes --- name: wchar16 aliases: [WCHAR] type: character description: 16-bit wide character type attributes: byte_order: little-endian size: 2 units: bytes --- name: wchar32 type: character description: 32-bit wide character type attributes: byte_order: little-endian size: 4 units: bytes dtfabric-20240211/test_data/definitions/floating-points.yaml000066400000000000000000000006251456204725700240300ustar00rootroot00000000000000# dtFabric format specification. --- name: float32 aliases: [float, FLOAT] type: floating-point description: 32-bit single precision floating-point type attributes: byte_order: little-endian size: 4 units: bytes --- name: float64 aliases: [double, DOUBLE] type: floating-point description: 64-bit double precision floating-point type attributes: byte_order: little-endian size: 8 units: bytes dtfabric-20240211/test_data/definitions/integers.yaml000066400000000000000000000025241456204725700225330ustar00rootroot00000000000000# dtFabric format specification. --- name: int8 type: integer description: 8-bit signed integer type attributes: format: signed size: 1 units: bytes --- name: int16 type: integer description: 16-bit signed integer type attributes: byte_order: little-endian format: signed size: 2 units: bytes --- name: int32 aliases: [LONG, LONG32] type: integer description: 32-bit signed integer type attributes: byte_order: little-endian format: signed size: 4 units: bytes --- name: int64 aliases: [LONG64] type: integer description: 64-bit signed integer type attributes: byte_order: little-endian format: signed size: 8 units: bytes --- name: uint8 aliases: [BYTE] type: integer description: 8-bit unsigned integer type attributes: format: unsigned size: 1 units: bytes --- name: uint16 aliases: [WORD] type: integer description: 16-bit unsigned integer type attributes: byte_order: little-endian format: unsigned size: 2 units: bytes --- name: uint32 aliases: [DWORD, DWORD32, ULONG, ULONG32] type: integer description: 32-bit unsigned integer type attributes: byte_order: little-endian format: unsigned size: 4 units: bytes --- name: uint64 aliases: [DWORDLONG, DWORD64, ULONG64] type: integer description: 64-bit unsigned integer type attributes: byte_order: little-endian format: unsigned size: 8 units: bytes dtfabric-20240211/test_data/enumeration.yaml000066400000000000000000000014541456204725700207270ustar00rootroot00000000000000# dtFabric format specification. --- name: object_information_type aliases: [MINIDUMP_HANDLE_OBJECT_INFORMATION_TYPE] type: enumeration description: Minidump object information type urls: ['https://msdn.microsoft.com/en-us/library/windows/desktop/ms680376(v=vs.85).aspx'] values: - name: MiniHandleObjectInformationNone number: 0 description: No object-specific information available - name: MiniThreadInformation1 number: 1 description: Thread object information - name: MiniMutantInformation1 number: 2 description: Mutant object information - name: MiniMutantInformation2 number: 3 description: Mutant object information - name: MiniProcessInformation1 number: 4 description: Process object information - name: MiniProcessInformation2 number: 5 description: Process object information dtfabric-20240211/test_data/floating-point.yaml000066400000000000000000000003321456204725700213250ustar00rootroot00000000000000# dtFabric format specification. --- name: float32 aliases: [float, FLOAT] type: floating-point description: 32-bit single precision floating-point type attributes: byte_order: little-endian size: 4 units: bytes dtfabric-20240211/test_data/format.yaml000066400000000000000000000010531456204725700176640ustar00rootroot00000000000000# dtFabric format specification. --- name: format_with_layout type: format description: Test format with layout attributes: byte_order: big-endian layout: - data_type: file_header offset: 0 --- name: byte type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: file_header type: structure members: - name: signature type: stream element_data_type: byte elements_data_size: 4 value: "1234" - name: format_version data_type: uint32 dtfabric-20240211/test_data/integer.yaml000066400000000000000000000006171456204725700200360ustar00rootroot00000000000000# dtFabric format specification. --- name: int32 type: integer attributes: format: signed size: 4 units: bytes --- name: int32be type: integer attributes: byte_order: big-endian format: signed size: 4 units: bytes --- name: int32le type: integer attributes: byte_order: little-endian format: signed size: 4 units: bytes --- name: int type: integer attributes: format: signed dtfabric-20240211/test_data/padding.yaml000066400000000000000000000001231456204725700177770ustar00rootroot00000000000000# dtFabric format specification. --- name: padding type: padding alignment_size: 8 dtfabric-20240211/test_data/sequence.yaml000066400000000000000000000007111456204725700202040ustar00rootroot00000000000000# dtFabric format specification. --- name: int32 type: integer description: 32-bit signed integer type attributes: byte_order: little-endian format: signed size: 4 units: bytes --- name: vector4 aliases: [VECTOR] type: sequence description: 4-dimensional vector element_data_type: int32 number_of_elements: 4 --- name: triangle4 aliases: [TRIANGLE] type: sequence description: 4-dimensional triangle element_data_type: vector4 number_of_elements: 3 dtfabric-20240211/test_data/sequence_with_context.yaml000066400000000000000000000010741456204725700230060ustar00rootroot00000000000000# dtFabric format specification. --- name: int32 type: integer description: 32-bit signed integer type attributes: byte_order: little-endian format: signed size: 4 units: bytes --- name: nvector type: sequence description: n-dimensional vector element_data_type: int32 number_of_elements: n --- name: fixed_size_vector type: sequence description: vector with a fixed size element_data_type: int32 elements_data_size: 32 --- name: variable_size_vector type: sequence description: vector with a variable size element_data_type: int32 elements_data_size: vector_size dtfabric-20240211/test_data/sequence_with_structure.yaml000066400000000000000000000006611456204725700233630ustar00rootroot00000000000000# dtFabric format specification. --- name: int32 type: integer attributes: format: signed size: 4 units: bytes --- name: vector type: structure attributes: byte_order: little-endian members: - name: number_of_elements data_type: int32 - name: values type: sequence element_data_type: int32 number_of_elements: vector.number_of_elements --- name: vectors type: sequence element_data_type: vector number_of_elements: 3 dtfabric-20240211/test_data/stream.yaml000066400000000000000000000012211456204725700176640ustar00rootroot00000000000000# dtFabric format specification. --- name: wchar16 aliases: [WCHAR] type: character description: 16-bit wide character type attributes: byte_order: little-endian size: 2 units: bytes --- name: utf16le_stream aliases: [UTF16LE] type: stream description: UTF-16 little-endian stream element_data_type: wchar16 number_of_elements: 8 --- name: utf16le_stream_with_size type: stream description: UTF-16 little-endian stream with size element_data_type: wchar16 elements_data_size: size --- name: utf16le_stream_with_terminator type: stream description: UTF-16 little-endian stream with terminator element_data_type: wchar16 elements_terminator: "\x00\x00" dtfabric-20240211/test_data/string.yaml000066400000000000000000000013031456204725700177000ustar00rootroot00000000000000# dtFabric format specification. --- name: char type: character attributes: byte_order: little-endian size: 1 units: bytes --- name: wchar16 aliases: [WCHAR] type: character description: 16-bit wide character type attributes: byte_order: little-endian size: 2 units: bytes --- name: utf8_string type: string description: UTF-8 string encoding: utf8 element_data_type: char elements_terminator: "\x00" --- name: utf8_string_fixed_size type: string description: UTF-8 string encoding: utf8 element_data_type: char elements_data_size: 16 elements_terminator: "\x00" --- name: utf16_string type: string description: UTF-16 string encoding: utf-16-le element_data_type: wchar16 number_of_elements: 8 dtfabric-20240211/test_data/string_array.yaml000066400000000000000000000014621456204725700211040ustar00rootroot00000000000000# dtFabric format specification. --- name: char type: integer attributes: format: signed size: 1 units: bytes --- name: uint32 type: integer attributes: format: signed size: 4 units: bytes --- name: cstring type: string encoding: ascii element_data_type: char elements_terminator: "\x00" --- name: string_array type: structure attributes: byte_order: little-endian members: - name: number_of_strings data_type: uint32 - name: strings type: sequence element_data_type: cstring number_of_elements: string_array.number_of_strings --- name: string_array_with_size type: structure attributes: byte_order: little-endian members: - name: strings_data_size data_type: uint32 - name: strings type: sequence element_data_type: cstring elements_data_size: string_array_with_size.strings_data_size dtfabric-20240211/test_data/structure.yaml000066400000000000000000000020431456204725700204340ustar00rootroot00000000000000# dtFabric format specification. --- name: int32 type: integer description: 32-bit signed integer type attributes: format: signed size: 4 units: bytes --- name: point3d aliases: [POINT] type: structure description: Point in 3 dimensional space. attributes: byte_order: little-endian members: - name: x aliases: [XCOORD] data_type: int32 - name: y data_type: int32 - name: z data_type: int32 --- name: triangle3d type: structure description: Triangle in 3 dimensional space. members: - name: a data_type: point3d - name: b data_type: point3d - name: c data_type: point3d --- name: box3d type: structure description: Box in 3 dimensional space. members: - name: triangles type: sequence element_data_type: triangle3d number_of_elements: 12 --- name: sphere3d type: structure attributes: byte_order: little-endian description: Sphere in 3 dimensional space. members: - name: number_of_triangles data_type: int32 - name: triangles type: sequence element_data_type: triangle3d number_of_elements: sphere3d.number_of_triangles dtfabric-20240211/test_data/structure_family.yaml000066400000000000000000000060731456204725700220040ustar00rootroot00000000000000# dtFabric format specification. --- name: byte type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint16 type: integer attributes: format: unsigned size: 2 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: uint64 type: integer attributes: format: unsigned size: 8 units: bytes --- name: group_descriptor_ext2 type: structure description: Group descriptor members: - name: block_bitmap_block_number data_type: uint32 - name: inode_bitmap_block_number data_type: uint32 - name: inode_table_block_number data_type: uint32 - name: number_of_unallocated_blocks data_type: uint16 - name: number_of_unallocated_inodes data_type: uint16 - name: number_of_directories data_type: uint16 - name: padding1 data_type: uint16 - name: reserved1 type: stream element_data_type: byte elements_data_size: 12 --- name: group_descriptor_ext4 type: structure description: Group descriptor members: - name: block_bitmap_block_number_lower data_type: uint32 - name: inode_bitmap_block_number_lower data_type: uint32 - name: inode_table_block_number_lower data_type: uint32 - name: number_of_unallocated_blocks_lower data_type: uint16 - name: number_of_unallocated_inodes_lower data_type: uint16 - name: number_of_directories_lower data_type: uint16 - name: block_group_flags data_type: uint16 - name: exclude_bitmap_block_number_lower data_type: uint32 - name: block_bitmap_checksum_lower data_type: uint16 - name: inode_bitmap_checksum_lower data_type: uint16 - name: number_of_unused_inodes data_type: uint16 - name: checksum data_type: uint16 - name: block_bitmap_block_number_upper data_type: uint32 - name: inode_bitmap_block_number_upper data_type: uint32 - name: inode_table_block_number_upper data_type: uint32 - name: number_of_unallocated_blocks_upper data_type: uint16 - name: number_of_unallocated_inodes_upper data_type: uint16 - name: number_of_directories_upper data_type: uint16 - name: number_of_unused_inodes_upper data_type: uint16 - name: exclude_bitmap_block_number_upper data_type: uint32 - name: block_bitmap_checksum_upper data_type: uint16 - name: inode_bitmap_checksum_upper data_type: uint16 - name: reserved1 data_type: uint32 --- name: group_descriptor_base type: structure description: Group descriptor members: - name: block_bitmap_block_number data_type: uint64 - name: inode_bitmap_block_number data_type: uint64 - name: inode_table_block_number data_type: uint64 - name: number_of_unallocated_blocks data_type: uint32 - name: number_of_unallocated_inodes data_type: uint32 - name: number_of_directories data_type: uint32 - name: block_group_flags data_type: uint16 - name: exclude_bitmap_block_number data_type: uint64 - name: block_bitmap_checksum data_type: uint32 - name: inode_bitmap_checksum data_type: uint32 - name: number_of_unused_inodes data_type: uint32 --- name: group_descriptor type: structure-family description: Group descriptor base: group_descriptor_base members: - group_descriptor_ext2 - group_descriptor_ext4 dtfabric-20240211/test_data/structure_group.yaml000066400000000000000000000030501456204725700216470ustar00rootroot00000000000000# dtFabric format specification. --- name: char type: integer attributes: format: signed size: 1 units: bytes --- name: uint8 type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint16 type: integer attributes: format: unsigned size: 2 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: uint64 type: integer attributes: format: unsigned size: 8 units: bytes --- name: bsm_token_arg32 type: structure attributes: byte_order: big-endian members: - name: token_type data_type: uint8 value: 0x2d - name: argument_index data_type: uint8 - name: argument_name data_type: uint32 - name: argument_value_size data_type: uint16 - name: argument_value type: string encoding: ascii element_data_type: char elements_data_size: bsm_token_data_arg32.argument_value_size --- name: bsm_token_arg64 type: structure attributes: byte_order: big-endian members: - name: token_type data_type: uint8 value: 0x71 - name: argument_index data_type: uint8 - name: argument_name data_type: uint64 - name: argument_value_size data_type: uint16 - name: argument_value type: string encoding: ascii element_data_type: char elements_data_size: bsm_token_data_arg64.argument_value_size --- name: bsm_token_base type: structure attributes: byte_order: big-endian members: - name: token_type data_type: uint8 --- name: bsm_token type: structure-group description: BSM token base: bsm_token_base identifier: token_type members: - bsm_token_arg32 - bsm_token_arg64 dtfabric-20240211/test_data/structure_with_condition.yaml000066400000000000000000000012131456204725700235330ustar00rootroot00000000000000# dtFabric format specification. --- name: uint16 type: integer attributes: format: unsigned size: 2 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: structure_with_condition type: structure attributes: byte_order: little-endian members: - name: flags data_type: uint16 - name: data1 data_type: uint32 - name: conditional_data1 data_type: uint32 condition: structure_with_condition.flags & 0x0001 != 0 - name: data2 data_type: uint32 - name: conditional_data2 data_type: uint32 condition: structure_with_condition.flags & 0x8000 != 0 - name: data3 data_type: uint32 dtfabric-20240211/test_data/structure_with_context.yaml000066400000000000000000000007601456204725700232370ustar00rootroot00000000000000# dtFabric format specification. --- name: byte type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: instance_block_header type: structure attributes: byte_order: little-endian members: - name: name_offset data_type: uint32 - name: unknown1 data_type: byte - name: property_value_offsets type: sequence element_data_type: uint32 number_of_elements: number_of_properties dtfabric-20240211/test_data/structure_with_padding.yaml000066400000000000000000000013101456204725700231510ustar00rootroot00000000000000# dtFabric format specification. --- name: byte type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint16 type: integer attributes: format: unsigned size: 2 units: bytes --- name: structure_with_padding type: structure attributes: byte_order: little-endian members: - name: data_size data_type: uint16 - name: padding type: padding alignment_size: 8 --- name: structure_with_padding_and_stream type: structure attributes: byte_order: little-endian members: - name: data_size data_type: uint16 - name: data type: stream element_data_type: byte elements_data_size: structure_with_padding_and_stream.data_size - name: padding type: padding alignment_size: 8 dtfabric-20240211/test_data/structure_with_section.yaml000066400000000000000000000005021456204725700232110ustar00rootroot00000000000000# dtFabric format specification. --- name: int32 type: integer attributes: format: signed size: 4 units: bytes --- name: 3dsphere type: structure members: - section: 3dcoordinate - name: x data_type: int32 - name: y data_type: int32 - name: z data_type: int32 - section: size - name: radius data_type: int32 dtfabric-20240211/test_data/structure_with_sequence.yaml000066400000000000000000000007241456204725700233630ustar00rootroot00000000000000# dtFabric format specification. --- name: byte type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: extension_block type: structure attributes: byte_order: little-endian members: - name: size data_type: uint32 - name: data type: sequence element_data_type: byte number_of_elements: 0 if extension_block.size == 0 else extension_block.size - 4 dtfabric-20240211/test_data/structure_with_stream.yaml000066400000000000000000000007221456204725700230440ustar00rootroot00000000000000# dtFabric format specification. --- name: byte type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: extension_block type: structure attributes: byte_order: little-endian members: - name: size data_type: uint32 - name: data type: stream element_data_type: byte elements_data_size: 0 if extension_block.size == 0 else extension_block.size - 4 dtfabric-20240211/test_data/structure_with_string.yaml000066400000000000000000000006761456204725700230670ustar00rootroot00000000000000# dtFabric format specification. --- name: wchar16 type: integer attributes: format: signed size: 2 units: bytes --- name: uint16 type: integer attributes: format: unsigned size: 2 units: bytes --- name: utf16_string type: structure attributes: byte_order: little-endian members: - name: size data_type: uint16 - name: text type: string encoding: utf-16-le element_data_type: wchar16 elements_data_size: utf16_string.size dtfabric-20240211/test_data/structure_with_union.yaml000066400000000000000000000005621456204725700227030ustar00rootroot00000000000000# dtFabric format specification. --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: float32 type: floating-point attributes: byte_order: little-endian size: 4 units: bytes --- name: intfloat32 type: structure members: - type: union members: - name: int data_type: uint32 - name: float data_type: float32 dtfabric-20240211/test_data/structure_with_values.yaml000066400000000000000000000012741456204725700230530ustar00rootroot00000000000000# dtFabric format specification. --- name: byte type: integer attributes: format: unsigned size: 1 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: structure_with_value type: structure members: - name: signature type: stream element_data_type: byte elements_data_size: 4 value: "test" - name: data_size data_type: uint32 - name: data type: stream element_data_type: byte elements_data_size: structure_with_value.data_size --- name: structure_with_values type: structure attributes: byte_order: little-endian members: - name: format_version data_type: uint32 values: [2, 3] - name: data_size data_type: uint32 dtfabric-20240211/test_data/union.yaml000066400000000000000000000005771456204725700175360ustar00rootroot00000000000000# dtFabric format specification. --- name: int16 type: integer description: 16-bit signed integer type attributes: format: signed size: 2 units: bytes --- name: int32 type: integer description: 32-bit signed integer type attributes: format: signed size: 4 units: bytes --- name: union type: union members: - name: long data_type: int32 - name: short data_type: int16 dtfabric-20240211/test_data/union_with_condition.yaml000066400000000000000000000005741456204725700226340ustar00rootroot00000000000000# dtFabric format specification. --- name: uint16 type: integer attributes: format: unsigned size: 2 units: bytes --- name: uint32 type: integer attributes: format: unsigned size: 4 units: bytes --- name: union_with_condition type: union members: - name: flags data_type: uint16 - name: long data_type: uint32 condition: union_with_condition.flags & 0x8000 != 0 dtfabric-20240211/test_data/uuid.yaml000066400000000000000000000003101456204725700173350ustar00rootroot00000000000000# dtFabric format specification. --- name: uuid aliases: [guid, GUID, UUID] type: uuid description: Globally or Universal unique identifier (GUID or UUID) type attributes: byte_order: little-endian dtfabric-20240211/test_dependencies.ini000066400000000000000000000001531456204725700177260ustar00rootroot00000000000000[mock] dpkg_name: python3-mock minimum_version: 2.0.0 rpm_name: python3-mock version_property: __version__ dtfabric-20240211/test_requirements.txt000066400000000000000000000000161456204725700200610ustar00rootroot00000000000000mock >= 2.0.0 dtfabric-20240211/tests/000077500000000000000000000000001456204725700147035ustar00rootroot00000000000000dtfabric-20240211/tests/__init__.py000066400000000000000000000000301456204725700170050ustar00rootroot00000000000000# -*- coding: utf-8 -*- dtfabric-20240211/tests/data_types.py000066400000000000000000000464011456204725700174170ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the data type definitions.""" import unittest from dtfabric import data_types from dtfabric import definitions from tests import test_lib class DataTypeDefinitionTest(test_lib.BaseTestCase): """Data type definition tests.""" def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.DataTypeDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') result = data_type_definition.IsComposite() self.assertFalse(result) class StorageDataTypeDefinitionTest(test_lib.BaseTestCase): """Storage data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.StorageDataTypeDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') self.assertIsNotNone(data_type_definition) class FixedSizeDataTypeDefinitionTest(test_lib.BaseTestCase): """Fixed-size data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.FixedSizeDataTypeDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') self.assertIsNotNone(data_type_definition) def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.FixedSizeDataTypeDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) data_type_definition.size = 4 byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) class BooleanDefinitionTest(test_lib.BaseTestCase): """Boolean data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.BooleanDefinition( 'bool32', aliases=['BOOL'], description='boolean') self.assertIsNotNone(data_type_definition) class CharacterDefinitionTest(test_lib.BaseTestCase): """Character data type definition tests.""" class FloatingPointDefinitionTest(test_lib.BaseTestCase): """Floating-point data type definition tests.""" class IntegerDefinitionTest(test_lib.BaseTestCase): """Integer data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.IntegerDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') self.assertIsNotNone(data_type_definition) class UUIDDefinitionTest(test_lib.BaseTestCase): """UUID data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.UUIDDefinition( 'guid', aliases=['GUID'], description='GUID') self.assertIsNotNone(data_type_definition) def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.UUIDDefinition( 'guid', aliases=['GUID'], description='GUID') result = data_type_definition.IsComposite() self.assertTrue(result) class ElementSequenceDataTypeDefinitionTest(test_lib.BaseTestCase): """Element sequence data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" element_definition = data_types.IntegerDefinition('int32') data_type_definition = data_types.ElementSequenceDataTypeDefinition( 'offsets', element_definition, description='offsets array') self.assertIsNotNone(data_type_definition) def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.ElementSequenceDataTypeDefinition( 'offsets', None, description='offsets array') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) element_definition = data_types.IntegerDefinition('int32') element_definition.format = definitions.FORMAT_SIGNED element_definition.size = 4 data_type_definition.element_data_type_definition = element_definition byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) data_type_definition.elements_data_size = 0 data_type_definition.number_of_elements = 32 byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 128) data_type_definition.elements_data_size = 128 data_type_definition.number_of_elements = 0 byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 128) # TODO: test self.element_data_type_definition.GetByteSize() returns None class SequenceDefinitionTest(test_lib.BaseTestCase): """Sequence data type definition tests.""" class StreamDefinitionTest(test_lib.BaseTestCase): """Stream data type definition tests.""" class StringDefinitionTest(test_lib.BaseTestCase): """String data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" element_definition = data_types.IntegerDefinition('wchar16') data_type_definition = data_types.StringDefinition( 'utf16', element_definition, description='UTF-16 formatted string') self.assertIsNotNone(data_type_definition) def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.StringDefinition( 'utf16', None, description='UTF-16 formatted string') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) element_definition = data_types.IntegerDefinition('wchar16') element_definition.format = definitions.FORMAT_SIGNED element_definition.size = 2 data_type_definition.element_data_type_definition = element_definition byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) data_type_definition.number_of_elements = 32 byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 64) def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.StringDefinition( 'utf16', None, description='UTF-16 formatted string') result = data_type_definition.IsComposite() self.assertTrue(result) class DataTypeDefinitionWithMembersTest(test_lib.BaseTestCase): """Data type definition with members tests.""" def testAddMemberDefinition(self): """Tests the AddMemberDefinition function.""" data_type_definition = data_types.DataTypeDefinitionWithMembers( 'my_type_with_member', aliases=['MY_TYPE_WITH_MEMBERS'], description='my type with members') definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') structure_member_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', member_definition, aliases=['MY_STRUCT_MEMBER'], data_type='int32', description='my structure member') data_type_definition.AddMemberDefinition(structure_member_definition) with self.assertRaises(KeyError): data_type_definition.AddMemberDefinition(structure_member_definition) # TODO: add tests for AddSectionDefinition def testGetMemberDefinitionByName(self): """Tests the GetMemberDefinitionByName function.""" data_type_definition = data_types.DataTypeDefinitionWithMembers( 'my_type_with_member', aliases=['MY_TYPE_WITH_MEMBERS'], description='my type with members') definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') structure_member_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', member_definition, aliases=['MY_STRUCT_MEMBER'], data_type='int32', description='my structure member') data_type_definition.AddMemberDefinition(structure_member_definition) test_member_definition = data_type_definition.GetMemberDefinitionByName( 'my_struct_member') self.assertIsNotNone(test_member_definition) test_member_definition = data_type_definition.GetMemberDefinitionByName( 'bogus') self.assertIsNone(test_member_definition) class MemberDataTypeDefinitionTest(test_lib.BaseTestCase): """Member data type definition tests.""" # pylint: disable=protected-access def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') data_type_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', member_definition, aliases=['MY_STRUCT_MEMBER'], data_type='int32', description='my structure member') self.assertIsNotNone(data_type_definition) def testGetByteSize(self): """Tests the GetByteSize function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') data_type_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', None, aliases=['MY_STRUCT_MEMBER'], data_type='int32', description='my structure member') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) data_type_definition.member_data_type_definition = member_definition byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) data_type_definition.condition = 'bogus' byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) def testIsComposite(self): """Tests the IsComposite function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') data_type_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', None, aliases=['MY_STRUCT_MEMBER'], data_type='int32', description='my structure member') result = data_type_definition.IsComposite() self.assertFalse(result) data_type_definition.member_data_type_definition = member_definition result = data_type_definition.IsComposite() self.assertFalse(result) data_type_definition.condition = 'bogus' result = data_type_definition.IsComposite() self.assertTrue(result) class MemberSectionDefinitionTest(test_lib.BaseTestCase): """Member section definition tests.""" def testInitialize(self): """Tests the __init__ function.""" section_definition = data_types.MemberSectionDefinition( 'my_struct_section') self.assertIsNotNone(section_definition) class StructureDefinitionTest(test_lib.BaseTestCase): """Structure data type definition tests.""" def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.StructureDefinition( 'my_struct_type', aliases=['MY_STRUCT_TYPE'], description='my structure type') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') structure_member_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', member_definition, aliases=['MY_STRUCT_MEMBER'], data_type='int32', description='my structure member') data_type_definition.AddMemberDefinition(structure_member_definition) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.StructureDefinition( 'my_struct_type', aliases=['MY_STRUCT_TYPE'], description='my structure type') result = data_type_definition.IsComposite() self.assertTrue(result) class PaddingDefinitionTest(test_lib.BaseTestCase): """Padding data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.PaddingDefinition( 'padding', alignment_size=4, description='alignment_padding') self.assertIsNotNone(data_type_definition) def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.PaddingDefinition( 'padding', alignment_size=4, description='alignment_padding') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) class UnionDefinitionTest(test_lib.BaseTestCase): """Union data type definition tests.""" def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.UnionDefinition( 'my_union_type', aliases=['MY_UNION_TYPE'], description='my union type') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) definitions_file = self._GetTestFilePath(['union.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) member_definition = definitions_registry.GetDefinitionByName('int32') union_member_definition = data_types.MemberDataTypeDefinition( 'my_union_member', member_definition, aliases=['MY_UNION_MEMBER'], data_type='int32', description='my union member') data_type_definition.AddMemberDefinition(union_member_definition) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) # TODO: test member_definition.GetByteSize() returns None def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.UnionDefinition( 'my_union_type', aliases=['MY_UNION_TYPE'], description='my union type') result = data_type_definition.IsComposite() self.assertTrue(result) class SemanticDataTypeDefinitionTest(test_lib.BaseTestCase): """Semantic data type definition tests.""" # pylint: disable=assignment-from-none def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.SemanticDataTypeDefinition( 'enum', description='enumeration') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) class ConstantDefinitionTest(test_lib.BaseTestCase): """Constant data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.ConstantDefinition( 'const', description='contant') self.assertIsNotNone(data_type_definition) class EnumerationValueTest(test_lib.BaseTestCase): """Enumeration value tests.""" def testInitialize(self): """Tests the __init__ function.""" enumeration_value = data_types.EnumerationValue('enum_value', 5) self.assertIsNotNone(enumeration_value) class EnumerationDefinitionTest(test_lib.BaseTestCase): """Enumeration data type definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.EnumerationDefinition( 'enum', description='enumeration') self.assertIsNotNone(data_type_definition) def testAddValue(self): """Tests the AddValue function.""" data_type_definition = data_types.EnumerationDefinition( 'enum', description='enumeration') data_type_definition.AddValue('enum_value', 5, aliases=['value5']) with self.assertRaises(KeyError): data_type_definition.AddValue('enum_value', 7, aliases=['value7']) with self.assertRaises(KeyError): data_type_definition.AddValue('myenum', 5, aliases=['value7']) with self.assertRaises(KeyError): data_type_definition.AddValue('myenum', 7, aliases=['value5']) class LayoutDataTypeDefinitionTest(test_lib.BaseTestCase): """Layout data type definition tests.""" # pylint: disable=assignment-from-none def testGetByteSize(self): """Tests the GetByteSize function.""" data_type_definition = data_types.LayoutDataTypeDefinition( 'format', description='data format') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) def testIsComposite(self): """Tests the IsComposite function.""" data_type_definition = data_types.FormatDefinition( 'format', description='data format') result = data_type_definition.IsComposite() self.assertTrue(result) class FormatDefinitionTest(test_lib.BaseTestCase): """Data format definition tests.""" def testInitialize(self): """Tests the __init__ function.""" data_type_definition = data_types.FormatDefinition( 'format', description='data format') self.assertIsNotNone(data_type_definition) class StructureFamilyDefinitionTest(test_lib.BaseTestCase): """Structure family definition tests.""" def testInitialize(self): """Tests the __init__ function.""" base_definition = data_types.StructureDefinition( 'base', description='my base structure type') data_type_definition = data_types.StructureFamilyDefinition( 'family', base_definition, description='structure family') self.assertIsNotNone(data_type_definition) def testAddMemberDefinition(self): """Tests the AddMemberDefinition function.""" base_definition = data_types.StructureDefinition( 'base', description='my base structure type') data_type_definition = data_types.StructureFamilyDefinition( 'family', base_definition, description='structure family') self.assertIsNotNone(data_type_definition) group_member_definition = data_types.StructureDefinition( 'member', description='my member structure type') data_type_definition.AddMemberDefinition(group_member_definition) with self.assertRaises(KeyError): data_type_definition.AddMemberDefinition(group_member_definition) class StructureGroupDefinitionTest(test_lib.BaseTestCase): """Structure group definition tests.""" def testInitialize(self): """Tests the __init__ function.""" base_definition = data_types.StructureDefinition( 'base', description='my base structure type') data_type_definition = data_types.StructureGroupDefinition( 'group', base_definition, 'identifier', None, description='structure group') self.assertIsNotNone(data_type_definition) def testAddMemberDefinition(self): """Tests the AddMemberDefinition function.""" base_definition = data_types.StructureDefinition( 'base', description='my base structure type') data_type_definition = data_types.StructureGroupDefinition( 'group', base_definition, 'identifier', None, description='structure group') self.assertIsNotNone(data_type_definition) group_member_definition = data_types.StructureDefinition( 'member', description='my member structure type') data_type_definition.AddMemberDefinition(group_member_definition) with self.assertRaises(KeyError): data_type_definition.AddMemberDefinition(group_member_definition) if __name__ == '__main__': unittest.main() dtfabric-20240211/tests/reader.py000066400000000000000000001714321456204725700165270ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the data type definitions readers.""" import io import unittest from dtfabric import data_types from dtfabric import definitions from dtfabric import errors from dtfabric import reader from dtfabric import registry from tests import test_lib # TODO: test errors, such as duplicate structure members. class DataTypeDefinitionsReaderTest(test_lib.BaseTestCase): """Data type definitions reader tests.""" # pylint: disable=protected-access def testReadBooleanDataTypeDefinition(self): """Tests the _ReadBooleanDataTypeDefinition function.""" definition_values = { 'aliases': ['BOOL'], 'attributes': { 'size': 4, }, 'description': '32-bit boolean type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadBooleanDataTypeDefinition( definitions_registry, definition_values, 'bool') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.BooleanDefinition) def testReadCharacterDataTypeDefinition(self): """Tests the _ReadCharacterDataTypeDefinition function.""" definition_values = { 'aliases': ['CHAR'], 'attributes': { 'size': 1, }, 'description': '8-bit character type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadCharacterDataTypeDefinition( definitions_registry, definition_values, 'char') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.CharacterDefinition) def testReadConstantDataTypeDefinition(self): """Tests the _ReadConstantDataTypeDefinition function.""" definition_values = { 'aliases': ['AVRF_MAX_TRACES'], 'description': ( 'Application verifier resource enumeration maximum number of ' 'back traces'), 'value': 32, } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadConstantDataTypeDefinition( definitions_registry, definition_values, 'const')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.ConstantDefinition) # Test with missing value definition. del definition_values['value'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadConstantDataTypeDefinition( definitions_registry, definition_values, 'const') def testReadDataTypeDefinition(self): """Tests the _ReadDataTypeDefinition function.""" definition_values = { 'aliases': ['LONG', 'LONG32'], 'description': 'signed 32-bit integer type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, 'int32', definitions_reader._SUPPORTED_DEFINITION_VALUES_DATA_TYPE) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) def testReadDataTypeDefinitionWithMembers(self): """Tests the _ReadDataTypeDefinitionWithMembers function.""" definition_values = { 'aliases': ['POINT'], 'attributes': { 'byte_order': 'big-endian', }, 'description': 'Point in 3 dimensional space.', 'members': [ {'name': 'x', 'data_type': 'int32'}, {'name': 'y', 'data_type': 'int32'}, {'name': 'z', 'data_type': 'int32'}], } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() definition_object = definitions_reader._ReadDataTypeDefinitionWithMembers( definitions_registry, definition_values, data_types.StructureDefinition, 'point3d') self.assertIsNotNone(definition_object) # Test with incorrect byte order. definition_values['attributes']['byte_order'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadDataTypeDefinitionWithMembers( definitions_registry, definition_values, data_types.StructureDefinition, 'point3d') definition_values['attributes']['byte_order'] = 'big-endian' def testReadElementSequenceDataTypeDefinition(self): """Tests the _ReadElementSequenceDataTypeDefinition function.""" definition_values = { 'description': 'vector with 4 elements', 'element_data_type': 'int32', 'number_of_elements': 4, } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4', definitions_reader._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE)) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) # Test with attributes. definition_values['attributes'] = {} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4', definitions_reader._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE) definition_values['attributes'] = None # Test with undefined element data type. definition_values['element_data_type'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4', definitions_reader._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE) definition_values['element_data_type'] = 'int32' # Test with missing element data type definition. del definition_values['element_data_type'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4', definitions_reader._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE) definition_values['element_data_type'] = 'int32' # Test with missing number of elements definition. del definition_values['number_of_elements'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4', definitions_reader._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE) definition_values['number_of_elements'] = 4 # Test with elements data size and number of elements definition set at # at the same time. definition_values['elements_data_size'] = 32 with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4', definitions_reader._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE) del definition_values['elements_data_size'] # Test with unsupported attributes definition. definition_values['attributes'] = {'byte_order': 'little-endian'} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4', definitions_reader._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE) del definition_values['attributes'] # Test with elements terminator. definition_values = { 'description': 'vector with terminator', 'element_data_type': 'int32', 'elements_terminator': b'\xff\xff\xff\xff', } data_type_definition = ( definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4', definitions_reader._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE)) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) # Test with (Unicode) string elements terminator. definition_values['elements_terminator'] = '\0' data_type_definition = ( definitions_reader._ReadElementSequenceDataTypeDefinition( definitions_registry, definition_values, data_types.SequenceDefinition, 'vector4', definitions_reader._SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE)) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) def testReadEnumerationDataTypeDefinition(self): """Tests the _ReadEnumerationDataTypeDefinition function.""" definition_values = { 'description': 'Minidump object information type', 'values': [ {'description': 'No object-specific information available', 'name': 'MiniHandleObjectInformationNone', 'number': 0}, {'description': 'Thread object information', 'name': 'MiniThreadInformation1', 'number': 1}, ], } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum')) self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.EnumerationDefinition) # Test with missing name in first enumeration value definition. del definition_values['values'][0]['name'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum') definition_values['values'][0]['name'] = 'MiniHandleObjectInformationNone' # Test with missing name in successive enumeration value definition. del definition_values['values'][-1]['name'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum') definition_values['values'][-1]['name'] = 'MiniThreadInformation1' # Test with missing value in enumeration number definition. del definition_values['values'][-1]['number'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum') definition_values['values'][-1]['number'] = 1 # Test with duplicate enumeration number definition. definition_values['values'].append({ 'description': 'Thread object information', 'name': 'MiniThreadInformation1', 'number': 1}) with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum') del definition_values['values'][-1] # Test with missing enumeration values definitions. del definition_values['values'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadEnumerationDataTypeDefinition( definitions_registry, definition_values, 'enum') def testReadFixedSizeDataTypeDefinition(self): """Tests the _ReadFixedSizeDataTypeDefinition function.""" definition_values = { 'aliases': ['LONG', 'LONG32'], 'attributes': { 'byte_order': 'little-endian', 'size': 4, }, 'description': 'signed 32-bit integer type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, 'int32', definitions_reader._SUPPORTED_ATTRIBUTES_INTEGER) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) self.assertEqual(data_type_definition.size, 4) # Test with incorrect size. definition_values['attributes']['size'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadFixedSizeDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, 'int32', definitions_reader._SUPPORTED_ATTRIBUTES_INTEGER) definition_values['attributes']['size'] = 4 def testReadFloatingPointDataTypeDefinition(self): """Tests the _ReadFloatingPointDataTypeDefinition function.""" definition_values = { 'aliases': ['float', 'FLOAT'], 'attributes': { 'size': 4, }, 'description': '32-bit floating-point type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadFloatingPointDataTypeDefinition( definitions_registry, definition_values, 'float32')) self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.FloatingPointDefinition) def testReadFormatDataTypeDefinition(self): """Tests the _ReadFormatDataTypeDefinition function.""" definition_values = { 'description': 'Windows Shortcut (LNK) file format', 'type': 'format', 'layout': [ {'data_type': 'file_header', 'offset': 0}, ], } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadFormatDataTypeDefinition( definitions_registry, definition_values, 'lnk') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.FormatDefinition) self.assertEqual(len(data_type_definition.layout), 1) layout_element = data_type_definition.layout[0] self.assertIsNotNone(layout_element) self.assertIsInstance(layout_element, data_types.LayoutElementDefinition) self.assertEqual(layout_element.data_type, 'file_header') self.assertEqual(layout_element.offset, 0) def testReadIntegerDataTypeDefinition(self): """Tests the _ReadIntegerDataTypeDefinition function.""" definition_values = { 'aliases': ['LONG', 'LONG32'], 'attributes': { 'format': 'signed', 'size': 4, }, 'description': 'signed 32-bit integer type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadIntegerDataTypeDefinition( definitions_registry, definition_values, 'int32') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) # Test with unsupported format attribute. definition_values['attributes']['format'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadIntegerDataTypeDefinition( definitions_registry, definition_values, 'int32') definition_values['attributes']['format'] = 'signed' # Test with unsupported size. definition_values['attributes']['size'] = 3 with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadIntegerDataTypeDefinition( definitions_registry, definition_values, 'int32') definition_values['attributes']['size'] = 4 def testReadLayoutDataTypeDefinition(self): """Tests the _ReadLayoutDataTypeDefinition function.""" definition_values = { 'description': 'layout data type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadLayoutDataTypeDefinition( definitions_registry, definition_values, data_types.FormatDefinition, 'format', definitions_reader._SUPPORTED_DEFINITION_VALUES_DATA_TYPE) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.FormatDefinition) def testReadMemberDataTypeDefinitionMember(self): """Tests the _ReadMemberDataTypeDefinitionMember function.""" definition_values = {'name': 'x', 'data_type': 'int32'} definition_object = data_types.StructureDefinition('point3d') definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') # TODO: implement. _ = definition_object # Test without definitions values. definition_values = {} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') # Test definitions values without name. definition_values = {'bogus': 'BOGUS'} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') # Test definitions values without data type and type. definition_values = {'name': 'x'} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') # Test definitions values with both data type and type. definition_values = {'name': 'x', 'data_type': 'int32', 'type': 'bogus'} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') # Test definitions values with unresolvable type. definition_values = {'name': 'x', 'type': 'bogus'} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadMemberDataTypeDefinitionMember( definitions_registry, definition_values, 'point3d') def testReadSemanticDataTypeDefinition(self): """Tests the _ReadSemanticDataTypeDefinition function.""" definition_values = { 'description': 'semantic data type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadSemanticDataTypeDefinition( definitions_registry, definition_values, data_types.EnumerationDefinition, 'enum', definitions_reader._SUPPORTED_DEFINITION_VALUES_ENUMERATION) self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.EnumerationDefinition) # Test with attributes. definition_values['attributes'] = {} with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadSemanticDataTypeDefinition( definitions_registry, definition_values, data_types.EnumerationDefinition, 'enum', definitions_reader._SUPPORTED_DEFINITION_VALUES_ENUMERATION) definition_values['attributes'] = None def testReadSequenceDataTypeDefinition(self): """Tests the _ReadSequenceDataTypeDefinition function.""" definition_values = { 'description': 'vector with 4 elements', 'element_data_type': 'int32', 'number_of_elements': 4, } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadSequenceDataTypeDefinition( definitions_registry, definition_values, 'vector4')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) def testReadStorageDataTypeDefinition(self): """Tests the _ReadStorageDataTypeDefinition function.""" definition_values = { 'aliases': ['LONG', 'LONG32'], 'attributes': { 'byte_order': 'little-endian', }, 'description': 'signed 32-bit integer type', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadStorageDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, 'int32', definitions_reader._SUPPORTED_ATTRIBUTES_INTEGER) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_LITTLE_ENDIAN) # Test with incorrect byte-order. definition_values['attributes']['byte_order'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStorageDataTypeDefinition( definitions_registry, definition_values, data_types.IntegerDefinition, 'int32', definitions_reader._SUPPORTED_ATTRIBUTES_INTEGER) definition_values['attributes']['byte_order'] = 'little-endian' def testReadStreamDataTypeDefinition(self): """Tests the _ReadStreamDataTypeDefinition function.""" definition_values = { 'description': 'stream with 4 elements', 'element_data_type': 'uint8', 'number_of_elements': 4, } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadStreamDataTypeDefinition( definitions_registry, definition_values, 'array4')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.StreamDefinition) def testReadStringDataTypeDefinition(self): """Tests the _ReadStringDataTypeDefinition function.""" definition_values = { 'description': 'string with 4 characters', 'encoding': 'ascii', 'element_data_type': 'char', 'number_of_elements': 4, } definitions_file = self._GetTestFilePath([ 'definitions', 'characters.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadStringDataTypeDefinition( definitions_registry, definition_values, 'string4')) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.StringDefinition) # Test definitions values without encoding. del definition_values['encoding'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStringDataTypeDefinition( definitions_registry, definition_values, 'string4') definition_values['encoding'] = 'ascii' def testReadStructureDataTypeDefinition(self): """Tests the _ReadStructureDataTypeDefinition function.""" definition_values = { 'aliases': ['POINT'], 'attributes': { 'byte_order': 'big-endian', }, 'description': 'Point in 3 dimensional space.', 'members': [ {'name': 'x', 'data_type': 'int32'}, {'name': 'y', 'data_type': 'int32'}, {'name': 'z', 'data_type': 'int32'}], } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'point3d')) self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.StructureDefinition) self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_BIG_ENDIAN) # Test with undefined data type. definition_values['members'][1]['data_type'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'point3d') # Test with missing member definitions. del definition_values['members'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'point3d') # TODO: add tests for _ReadStructureFamilyDataTypeDefinition # TODO: add tests for _ReadStructureGroupDataTypeDefinition def testReadUnionDataTypeDefinition(self): """Tests the _ReadUnionDataTypeDefinition function.""" definition_values = { 'members': [ {'name': 'long', 'data_type': 'int32'}, {'name': 'short', 'data_type': 'int16'}], } definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = ( definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'union')) self.assertIsNotNone(data_type_definition) self.assertIsInstance( data_type_definition, data_types.StructureDefinition) # Test with undefined data type. definition_values['members'][1]['data_type'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'point3d') # Test with missing member definitions. del definition_values['members'] with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadStructureDataTypeDefinition( definitions_registry, definition_values, 'point3d') def testReadUUIDDataTypeDefinition(self): """Tests the _ReadUUIDDataTypeDefinition function.""" definition_values = { 'aliases': ['guid', 'GUID', 'UUID'], 'attributes': { 'byte_order': 'little-endian', }, 'description': ( 'Globally or Universal unique identifier (GUID or UUID) type'), } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsReader() data_type_definition = definitions_reader._ReadUUIDDataTypeDefinition( definitions_registry, definition_values, 'uuid') self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.UUIDDefinition) # Test with unsupported size. definition_values['attributes']['size'] = 32 with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadUUIDDataTypeDefinition( definitions_registry, definition_values, 'uuid') class DataTypeDefinitionsFileReaderTest(test_lib.BaseTestCase): """Data type definitions file reader tests.""" # pylint: disable=protected-access def testReadDefinition(self): """Tests the _ReadDefinition function.""" definition_values = { 'aliases': ['LONG', 'LONG32'], 'attributes': { 'format': 'signed', 'size': 4, }, 'description': 'signed 32-bit integer type', 'name': 'int32', 'type': 'integer', } definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsFileReader() data_type_definition = definitions_reader._ReadDefinition( definitions_registry, definition_values) self.assertIsNotNone(data_type_definition) self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadDefinition(definitions_registry, None) definition_values['type'] = 'bogus' with self.assertRaises(errors.DefinitionReaderError): definitions_reader._ReadDefinition( definitions_registry, definition_values) def testReadFile(self): """Tests the ReadFile function.""" definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) self._SkipIfPathNotExists(definitions_file) definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.DataTypeDefinitionsFileReader() definitions_reader.ReadFile(definitions_registry, definitions_file) class YAMLDataTypeDefinitionsFileReaderTest(test_lib.BaseTestCase): """YAML data type definitions reader tests.""" # pylint: disable=protected-access # TODO: add tests for _GetFormatErrorLocation def testReadFileObjectBoolean(self): """Tests the ReadFileObject function of a boolean data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['boolean.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName('bool') self.assertIsInstance(data_type_definition, data_types.BooleanDefinition) self.assertEqual(data_type_definition.name, 'bool') self.assertEqual(data_type_definition.size, 1) self.assertEqual(data_type_definition.units, 'bytes') byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 1) def testReadFileObjectCharacter(self): """Tests the ReadFileObject function of a character data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['character.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName('char') self.assertIsInstance(data_type_definition, data_types.CharacterDefinition) self.assertEqual(data_type_definition.name, 'char') self.assertEqual(data_type_definition.size, 1) self.assertEqual(data_type_definition.units, 'bytes') byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 1) def testReadFileObjectConstant(self): """Tests the ReadFileObject function of a constant data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['constant.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName( 'maximum_number_of_back_traces') self.assertIsInstance(data_type_definition, data_types.ConstantDefinition) self.assertEqual( data_type_definition.name, 'maximum_number_of_back_traces') self.assertEqual(data_type_definition.value, 32) def testReadFileObjectEnumeration(self): """Tests the ReadFileObject function of an enumeration data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['enumeration.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName( 'object_information_type') self.assertIsInstance( data_type_definition, data_types.EnumerationDefinition) self.assertEqual(data_type_definition.name, 'object_information_type') self.assertEqual(len(data_type_definition.values), 6) byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) def testReadFileObjectFloatingPoint(self): """Tests the ReadFileObject function of a floating-point data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['floating-point.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName('float32') self.assertIsInstance( data_type_definition, data_types.FloatingPointDefinition) self.assertEqual(data_type_definition.name, 'float32') self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_LITTLE_ENDIAN) self.assertEqual(data_type_definition.size, 4) self.assertEqual(data_type_definition.units, 'bytes') byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) def testReadFileObjectFormat(self): """Tests the ReadFileObject function of a format data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['format.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 4) data_type_definition = definitions_registry.GetDefinitionByName( 'format_with_layout') self.assertIsInstance(data_type_definition, data_types.FormatDefinition) self.assertEqual(data_type_definition.name, 'format_with_layout') self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_BIG_ENDIAN) self.assertEqual(len(data_type_definition.layout), 1) layout_element = data_type_definition.layout[0] self.assertIsNotNone(layout_element) self.assertIsInstance(layout_element, data_types.LayoutElementDefinition) self.assertEqual(layout_element.data_type, 'file_header') self.assertEqual(layout_element.offset, 0) byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) def testReadFileObjectInteger(self): """Tests the ReadFileObject function of an integer data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() yaml_data = '\n'.join([ 'name: int32le', 'type: integer', 'attributes:', ' byte_order: little-endian', ' format: signed', ' size: 4', ' units: bytes']).encode('ascii') with io.BytesIO(initial_bytes=yaml_data) as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) data_type_definition = definitions_registry.GetDefinitionByName('int32le') self.assertIsInstance(data_type_definition, data_types.IntegerDefinition) self.assertEqual(data_type_definition.name, 'int32le') self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_LITTLE_ENDIAN) self.assertEqual(data_type_definition.format, 'signed') self.assertEqual(data_type_definition.size, 4) self.assertEqual(data_type_definition.units, 'bytes') byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) yaml_data = '\n'.join([ 'name: int', 'type: integer', 'attributes:', ' format: signed']).encode('ascii') with io.BytesIO(initial_bytes=yaml_data) as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) data_type_definition = definitions_registry.GetDefinitionByName('int') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) yaml_data = '\n'.join([ 'name: int32le', 'type: integer', 'attributes:', ' format: bogus', ' size: 4', ' units: bytes']).encode('ascii') with self.assertRaises(errors.FormatError): with io.BytesIO(initial_bytes=yaml_data) as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) yaml_data = '\n'.join([ 'name: int32le', 'type: integer', 'attributes:', ' format: signed', ' size: bogus', ' units: bytes']).encode('ascii') with self.assertRaises(errors.FormatError): with io.BytesIO(initial_bytes=yaml_data) as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) def testReadFileObjectMissingName(self): """Tests the ReadFileObject function with a missing name.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() yaml_data = '\n'.join([ 'type: integer', 'attributes:', ' format: signed', ' size: 1', ' units: bytes']).encode('ascii') file_object = io.BytesIO(initial_bytes=yaml_data) with self.assertRaises(errors.FormatError): definitions_reader.ReadFileObject(definitions_registry, file_object) def testReadFileObjectMissingType(self): """Tests the ReadFileObject function with a missing type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() yaml_data = '\n'.join([ 'name: int8', 'attributes:', ' format: signed', ' size: 1', ' units: bytes']).encode('ascii') file_object = io.BytesIO(initial_bytes=yaml_data) with self.assertRaises(errors.FormatError): definitions_reader.ReadFileObject(definitions_registry, file_object) yaml_data = '\n'.join([ 'name: int8', 'type: integer', 'attributes:', ' format: signed', ' size: 1', ' units: bytes', '---', 'name: int16', 'attributes:', ' format: signed', ' size: 2', ' units: bytes']).encode('ascii') file_object = io.BytesIO(initial_bytes=yaml_data) with self.assertRaises(errors.FormatError): definitions_reader.ReadFileObject(definitions_registry, file_object) def testReadFileObjectPadding(self): """Tests the ReadFileObject function of a padding data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['padding.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: with self.assertRaises(errors.FormatError): definitions_reader.ReadFileObject(definitions_registry, file_object) def testReadFileObjectSequence(self): """Tests the ReadFileObject function of a sequence data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['sequence.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 3) data_type_definition = definitions_registry.GetDefinitionByName('vector4') self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) self.assertEqual(data_type_definition.name, 'vector4') self.assertEqual(data_type_definition.description, '4-dimensional vector') self.assertEqual(data_type_definition.aliases, ['VECTOR']) self.assertEqual(data_type_definition.element_data_type, 'int32') self.assertIsNotNone(data_type_definition.element_data_type_definition) self.assertEqual(data_type_definition.number_of_elements, 4) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 16) definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['sequence_with_structure.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 3) data_type_definition = definitions_registry.GetDefinitionByName('vectors') self.assertIsInstance(data_type_definition, data_types.SequenceDefinition) def testReadFileObjectStream(self): """Tests the ReadFileObject function of a stream data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['stream.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 4) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream') self.assertIsInstance(data_type_definition, data_types.StreamDefinition) self.assertEqual(data_type_definition.name, 'utf16le_stream') self.assertEqual( data_type_definition.description, 'UTF-16 little-endian stream') self.assertEqual(data_type_definition.aliases, ['UTF16LE']) self.assertEqual(data_type_definition.element_data_type, 'wchar16') self.assertIsNotNone(data_type_definition.element_data_type_definition) self.assertEqual(data_type_definition.number_of_elements, 8) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 16) def testReadFileObjectString(self): """Tests the ReadFileObject function of a string data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['string.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 5) data_type_definition = definitions_registry.GetDefinitionByName( 'utf8_string') self.assertIsInstance(data_type_definition, data_types.StringDefinition) self.assertEqual(data_type_definition.name, 'utf8_string') self.assertEqual( data_type_definition.description, 'UTF-8 string') self.assertEqual(data_type_definition.element_data_type, 'char') self.assertIsNotNone(data_type_definition.element_data_type_definition) self.assertEqual(data_type_definition.elements_terminator, b'\x00') self.assertEqual(data_type_definition.encoding, 'utf8') byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) def testReadFileObjectStructure(self): """Tests the ReadFileObject function of a structure data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 5) data_type_definition = definitions_registry.GetDefinitionByName('point3d') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'point3d') self.assertEqual( data_type_definition.description, 'Point in 3 dimensional space.') self.assertEqual(data_type_definition.aliases, ['POINT']) self.assertEqual(len(data_type_definition.members), 3) member_definition = data_type_definition.members[0] self.assertIsInstance( member_definition, data_types.MemberDataTypeDefinition) self.assertEqual(member_definition.name, 'x') self.assertEqual(member_definition.aliases, ['XCOORD']) self.assertEqual(member_definition.member_data_type, 'int32') self.assertIsNotNone(member_definition.member_data_type_definition) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 12) def testReadFileObjectStructureWithSequence(self): """Tests the ReadFileObject function of a structure with a sequence.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 5) data_type_definition = definitions_registry.GetDefinitionByName('box3d') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'box3d') self.assertEqual( data_type_definition.description, 'Box in 3 dimensional space.') self.assertEqual(len(data_type_definition.members), 1) member_definition = data_type_definition.members[0] self.assertIsInstance(member_definition, data_types.SequenceDefinition) self.assertEqual(member_definition.name, 'triangles') self.assertEqual(member_definition.element_data_type, 'triangle3d') self.assertIsNotNone(member_definition.element_data_type_definition) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 432) def testReadFileObjectStructureWithSequenceWithExpression(self): """Tests the ReadFileObject function of a structure with a sequence.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 5) data_type_definition = definitions_registry.GetDefinitionByName('sphere3d') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'sphere3d') self.assertEqual( data_type_definition.description, 'Sphere in 3 dimensional space.') self.assertEqual(len(data_type_definition.members), 2) member_definition = data_type_definition.members[1] self.assertIsInstance(member_definition, data_types.SequenceDefinition) self.assertEqual(member_definition.name, 'triangles') self.assertEqual(member_definition.element_data_type, 'triangle3d') self.assertIsNotNone(member_definition.element_data_type_definition) byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) def testReadFileObjectStructureWithCondition(self): """Tests the ReadFileObject function of a structure with condition.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure_with_condition.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 3) data_type_definition = definitions_registry.GetDefinitionByName( 'structure_with_condition') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'structure_with_condition') self.assertEqual(len(data_type_definition.members), 6) byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) def testReadFileObjectStructureWithPadding(self): """Tests the ReadFileObject function of a structure with padding.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure_with_padding.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 4) data_type_definition = definitions_registry.GetDefinitionByName( 'structure_with_padding') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'structure_with_padding') self.assertEqual(len(data_type_definition.members), 2) member_definition = data_type_definition.members[1] self.assertIsInstance(member_definition, data_types.PaddingDefinition) self.assertEqual(member_definition.name, 'padding') self.assertEqual(member_definition.alignment_size, 8) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 8) # TODO: add test with composite structure with padding? def testReadFileObjectStructureWithSection(self): """Tests the ReadFileObject function of a structure with section.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure_with_section.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 2) data_type_definition = definitions_registry.GetDefinitionByName('3dsphere') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, '3dsphere') self.assertEqual(len(data_type_definition.members), 4) self.assertEqual(len(data_type_definition.sections), 2) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 16) def testReadFileObjectStructureWithUnion(self): """Tests the ReadFileObject function of a structure with an union.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure_with_union.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 3) data_type_definition = definitions_registry.GetDefinitionByName( 'intfloat32') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'intfloat32') self.assertEqual(len(data_type_definition.members), 1) member_definition = data_type_definition.members[0] self.assertIsInstance(member_definition, data_types.UnionDefinition) self.assertIsNone(member_definition.name) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) def testReadFileObjectStructureWithValues(self): """Tests the ReadFileObject function of a structure with values.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure_with_values.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 4) data_type_definition = definitions_registry.GetDefinitionByName( 'structure_with_value') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'structure_with_value') self.assertEqual(len(data_type_definition.members), 3) byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) data_type_definition = definitions_registry.GetDefinitionByName( 'structure_with_values') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'structure_with_values') self.assertEqual(len(data_type_definition.members), 2) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 8) def testReadFileObjectStructureWithStringArray(self): """Tests the ReadFileObject function of a string array.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['string_array.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 5) data_type_definition = definitions_registry.GetDefinitionByName( 'string_array') self.assertIsInstance(data_type_definition, data_types.StructureDefinition) self.assertEqual(data_type_definition.name, 'string_array') self.assertEqual(len(data_type_definition.members), 2) byte_size = data_type_definition.GetByteSize() self.assertIsNone(byte_size) def testReadFileObjectStructureFamily(self): """Tests the ReadFileObject function of a structure family data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure_family.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 8) data_type_definition = definitions_registry.GetDefinitionByName( 'group_descriptor') self.assertIsInstance( data_type_definition, data_types.StructureFamilyDefinition) self.assertEqual(data_type_definition.name, 'group_descriptor') self.assertEqual(data_type_definition.description, 'Group descriptor') self.assertEqual(len(data_type_definition.members), 2) member_definition = data_type_definition.members[0] self.assertIsInstance(member_definition, data_types.StructureDefinition) self.assertEqual(member_definition.name, 'group_descriptor_ext2') byte_size = data_type_definition.GetByteSize() # TODO: determine the size of the largest family member. self.assertIsNone(byte_size) def testReadFileObjectStructureGroup(self): """Tests the ReadFileObject function of a structure group data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['structure_group.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 9) data_type_definition = definitions_registry.GetDefinitionByName('bsm_token') self.assertIsInstance( data_type_definition, data_types.StructureGroupDefinition) self.assertEqual(data_type_definition.name, 'bsm_token') self.assertEqual(data_type_definition.description, 'BSM token') base_definition = data_type_definition.base self.assertIsInstance(base_definition, data_types.StructureDefinition) self.assertEqual(base_definition.name, 'bsm_token_base') self.assertEqual(data_type_definition.identifier, 'token_type') self.assertEqual(len(data_type_definition.members), 2) member_definition = data_type_definition.members[0] self.assertIsInstance(member_definition, data_types.StructureDefinition) self.assertEqual(member_definition.name, 'bsm_token_arg32') def testReadFileObjectUnion(self): """Tests the ReadFileObject function of an union data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['union.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 3) data_type_definition = definitions_registry.GetDefinitionByName('union') self.assertIsInstance(data_type_definition, data_types.UnionDefinition) self.assertEqual(data_type_definition.name, 'union') self.assertEqual(len(data_type_definition.members), 2) member_definition = data_type_definition.members[0] self.assertIsInstance( member_definition, data_types.MemberDataTypeDefinition) self.assertEqual(member_definition.name, 'long') self.assertEqual(member_definition.member_data_type, 'int32') self.assertIsNotNone(member_definition.member_data_type_definition) byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 4) def testReadFileObjectUnionWithCondition(self): """Tests the ReadFileObject function of an union with condition.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['union_with_condition.yaml']) self._SkipIfPathNotExists(definitions_file) with self.assertRaises(errors.FormatError): with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) def testReadFileObjectUUID(self): """Tests the ReadFileObject function of an UUID data type.""" definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() definitions_file = self._GetTestFilePath(['uuid.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) self.assertEqual(len(definitions_registry._definitions), 1) data_type_definition = definitions_registry.GetDefinitionByName('uuid') self.assertIsInstance( data_type_definition, data_types.UUIDDefinition) self.assertEqual(data_type_definition.name, 'uuid') self.assertEqual( data_type_definition.byte_order, definitions.BYTE_ORDER_LITTLE_ENDIAN) self.assertEqual(data_type_definition.size, 16) self.assertEqual(data_type_definition.units, 'bytes') byte_size = data_type_definition.GetByteSize() self.assertEqual(byte_size, 16) if __name__ == '__main__': unittest.main() dtfabric-20240211/tests/registry.py000066400000000000000000000056721456204725700171370ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the data type definitions registry.""" import unittest from dtfabric import data_types from dtfabric import registry from tests import test_lib class DataTypeDefinitionsRegistryTest(test_lib.BaseTestCase): """Data type definitions registry tests.""" def testRegistration(self): """Tests the RegisterDefinition and DeregisterDefinition functions.""" definitions_registry = registry.DataTypeDefinitionsRegistry() data_type_definition = data_types.IntegerDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') definitions_registry.RegisterDefinition(data_type_definition) with self.assertRaises(KeyError): definitions_registry.RegisterDefinition(data_type_definition) test_definition = data_types.IntegerDefinition( 'LONG', description='long integer') with self.assertRaises(KeyError): definitions_registry.RegisterDefinition(test_definition) test_definition = data_types.IntegerDefinition( 'test', aliases=['LONG'], description='long integer') with self.assertRaises(KeyError): definitions_registry.RegisterDefinition(test_definition) definitions_registry.DeregisterDefinition(data_type_definition) with self.assertRaises(KeyError): definitions_registry.DeregisterDefinition(data_type_definition) def testGetDefinitionByName(self): """Tests the GetDefinitionByName function.""" definitions_registry = registry.DataTypeDefinitionsRegistry() data_type_definition = data_types.IntegerDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') definitions_registry.RegisterDefinition(data_type_definition) test_definition = definitions_registry.GetDefinitionByName('int32') self.assertIsNotNone(test_definition) self.assertIsInstance(test_definition, data_types.IntegerDefinition) test_definition = definitions_registry.GetDefinitionByName('LONG32') self.assertIsNotNone(test_definition) self.assertIsInstance(test_definition, data_types.IntegerDefinition) test_definition = definitions_registry.GetDefinitionByName('bogus') self.assertIsNone(test_definition) definitions_registry.DeregisterDefinition(data_type_definition) def testGetDefinitions(self): """Tests the GetDefinitions function.""" definitions_registry = registry.DataTypeDefinitionsRegistry() test_definitions = definitions_registry.GetDefinitions() self.assertEqual(len(test_definitions), 0) data_type_definition = data_types.IntegerDefinition( 'int32', aliases=['LONG', 'LONG32'], description='signed 32-bit integer') definitions_registry.RegisterDefinition(data_type_definition) test_definitions = definitions_registry.GetDefinitions() self.assertEqual(len(test_definitions), 1) definitions_registry.DeregisterDefinition(data_type_definition) if __name__ == '__main__': unittest.main() dtfabric-20240211/tests/runtime/000077500000000000000000000000001456204725700163665ustar00rootroot00000000000000dtfabric-20240211/tests/runtime/__init__.py000066400000000000000000000000301456204725700204700ustar00rootroot00000000000000# -*- coding: utf-8 -*- dtfabric-20240211/tests/runtime/byte_operations.py000066400000000000000000000031551456204725700221520ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the byte stream operations.""" import unittest from dtfabric import errors from dtfabric.runtime import byte_operations from tests import test_lib class StructOperationTest(test_lib.BaseTestCase): """Python struct-base byte stream operation tests.""" def testInitialize(self): """Tests the __init__ function.""" byte_stream_operation = byte_operations.StructOperation('b') self.assertIsNotNone(byte_stream_operation) with self.assertRaises(errors.FormatError): byte_operations.StructOperation(None) with self.assertRaises(errors.FormatError): byte_operations.StructOperation('z') def testReadFrom(self): """Tests the ReadFrom function.""" byte_stream_operation = byte_operations.StructOperation('') data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.StorageDataTypeMap(data_type_definition) byte_order_string = data_type_map.GetStructByteOrderString() self.assertEqual(byte_order_string, '<') def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.StorageDataTypeMap(data_type_definition) format_string = data_type_map.GetStructFormatString() self.assertIsNone(format_string) class PrimitiveDataTypeMapTest(test_lib.BaseTestCase): """Primitive data type map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.PrimitiveDataTypeMap(data_type_definition) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(1) def testFoldValue(self): """Tests the FoldValue function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.PrimitiveDataTypeMap(data_type_definition) integer_value = data_type_map.FoldValue(1) self.assertEqual(integer_value, 1) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.PrimitiveDataTypeMap(data_type_definition) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(b'\x01\x00\x00\x00') def testMapValue(self): """Tests the MapValue function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.PrimitiveDataTypeMap(data_type_definition) integer_value = data_type_map.MapValue(1) self.assertEqual(integer_value, 1) class BooleanMapTest(test_lib.BaseTestCase): """Boolean map tests.""" def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['definitions', 'booleans.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('bool32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_definition.false_value = None data_type_definition.true_value = None with self.assertRaises(errors.FormatError): data_maps.BooleanMap(data_type_definition) def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['definitions', 'booleans.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('bool8') data_type_map = data_maps.BooleanMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'B') data_type_definition = definitions_registry.GetDefinitionByName('bool16') data_type_map = data_maps.BooleanMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'H') data_type_definition = definitions_registry.GetDefinitionByName('bool32') data_type_map = data_maps.BooleanMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'I') def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['definitions', 'booleans.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('bool8') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.BooleanMap(data_type_definition) data_type_definition.false_value = 0 data_type_definition.true_value = 1 byte_stream = data_type_map.FoldByteStream(False) self.assertEqual(byte_stream, b'\x00') byte_stream = data_type_map.FoldByteStream(True) self.assertEqual(byte_stream, b'\x01') with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(None) data_type_definition = definitions_registry.GetDefinitionByName('bool16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_definition.false_value = 0xffff data_type_definition.true_value = 1 data_type_map = data_maps.BooleanMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(False) self.assertEqual(byte_stream, b'\xff\xff') byte_stream = data_type_map.FoldByteStream(True) self.assertEqual(byte_stream, b'\x01\x00') data_type_definition = definitions_registry.GetDefinitionByName('bool32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_definition.false_value = 0 data_type_definition.true_value = None data_type_map = data_maps.BooleanMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(False) self.assertEqual(byte_stream, b'\x00\x00\x00\x00') with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(True) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['definitions', 'booleans.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('bool8') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.BooleanMap(data_type_definition) data_type_definition.true_value = 1 bool_value = data_type_map.MapByteStream(b'\x00') self.assertFalse(bool_value) bool_value = data_type_map.MapByteStream(b'\x01') self.assertTrue(bool_value) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(b'\xff') data_type_definition = definitions_registry.GetDefinitionByName('bool16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_definition.false_value = None data_type_definition.true_value = 1 data_type_map = data_maps.BooleanMap(data_type_definition) bool_value = data_type_map.MapByteStream(b'\xff\xff') self.assertFalse(bool_value) bool_value = data_type_map.MapByteStream(b'\x01\x00') self.assertTrue(bool_value) data_type_definition = definitions_registry.GetDefinitionByName('bool32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_definition.true_value = None data_type_map = data_maps.BooleanMap(data_type_definition) bool_value = data_type_map.MapByteStream(b'\x00\x00\x00\x00') self.assertFalse(bool_value) bool_value = data_type_map.MapByteStream(b'\xff\xff\xff\xff') self.assertTrue(bool_value) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\x01\x00') class CharacterMapTest(test_lib.BaseTestCase): """Character map tests.""" def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['definitions', 'characters.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('char') data_type_map = data_maps.CharacterMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'b') data_type_definition = definitions_registry.GetDefinitionByName('wchar16') data_type_map = data_maps.CharacterMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'h') data_type_definition = definitions_registry.GetDefinitionByName('wchar32') data_type_map = data_maps.CharacterMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'i') def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['definitions', 'characters.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('char') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) byte_stream = data_type_map.FoldByteStream('A') self.assertEqual(byte_stream, b'\x41') data_type_definition = definitions_registry.GetDefinitionByName('wchar16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) byte_stream = data_type_map.FoldByteStream('\u24b6') self.assertEqual(byte_stream, b'\xb6\x24') data_type_definition = definitions_registry.GetDefinitionByName('wchar32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) byte_stream = data_type_map.FoldByteStream('\u24b6') self.assertEqual(byte_stream, b'\xb6\x24\x00\x00') def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['definitions', 'characters.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('char') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) string_value = data_type_map.MapByteStream(b'\x41') self.assertEqual(string_value, 'A') data_type_definition = definitions_registry.GetDefinitionByName('wchar16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) string_value = data_type_map.MapByteStream(b'\xb6\x24') self.assertEqual(string_value, '\u24b6') data_type_definition = definitions_registry.GetDefinitionByName('wchar32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.CharacterMap(data_type_definition) string_value = data_type_map.MapByteStream(b'\xb6\x24\x00\x00') self.assertEqual(string_value, '\u24b6') with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\xb6\x24') class FloatingPointMapTest(test_lib.BaseTestCase): """Floating-point map tests.""" def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath([ 'definitions', 'floating-points.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('float32') data_type_map = data_maps.FloatingPointMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'f') data_type_definition = definitions_registry.GetDefinitionByName('float64') data_type_map = data_maps.FloatingPointMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'd') def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath([ 'definitions', 'floating-points.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('float32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.FloatingPointMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(12.34000015258789) self.assertEqual(byte_stream, b'\xa4\x70\x45\x41') data_type_definition = definitions_registry.GetDefinitionByName('float64') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.FloatingPointMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(12.34) self.assertEqual(byte_stream, b'\xae\x47\xe1\x7a\x14\xae\x28\x40') def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath([ 'definitions', 'floating-points.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('float32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.FloatingPointMap(data_type_definition) float_value = data_type_map.MapByteStream(b'\xa4\x70\x45\x41') self.assertEqual(float_value, 12.34000015258789) data_type_definition = definitions_registry.GetDefinitionByName('float64') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.FloatingPointMap(data_type_definition) float_value = data_type_map.MapByteStream( b'\xae\x47\xe1\x7a\x14\xae\x28\x40') self.assertEqual(float_value, 12.34) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\xa4\x70\x45\x41') class IntegerMapTest(test_lib.BaseTestCase): """Integer map tests.""" # pylint: disable=protected-access def testGetByteStreamOperation(self): """Tests the _GetByteStreamOperation function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.IntegerMap(data_type_definition) map_operation = data_type_map._GetByteStreamOperation() self.assertIsInstance(map_operation, byte_operations.StructOperation) def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int8') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'b') data_type_definition = definitions_registry.GetDefinitionByName('int16') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'h') data_type_definition = definitions_registry.GetDefinitionByName('int32') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'i') data_type_definition = definitions_registry.GetDefinitionByName('int64') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'q') data_type_definition = definitions_registry.GetDefinitionByName('uint8') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'B') data_type_definition = definitions_registry.GetDefinitionByName('uint16') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'H') data_type_definition = definitions_registry.GetDefinitionByName('uint32') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'I') data_type_definition = definitions_registry.GetDefinitionByName('uint64') data_type_map = data_maps.IntegerMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'Q') def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('uint8') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(0x12) self.assertEqual(byte_stream, b'\x12') data_type_definition = definitions_registry.GetDefinitionByName('uint16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(0x3412) self.assertEqual(byte_stream, b'\x12\x34') data_type_definition = definitions_registry.GetDefinitionByName('uint32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(0x78563412) self.assertEqual(byte_stream, b'\x12\x34\x56\x78') data_type_definition = definitions_registry.GetDefinitionByName('uint32') data_type_definition.byte_order = definitions.BYTE_ORDER_BIG_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(0x12345678) self.assertEqual(byte_stream, b'\x12\x34\x56\x78') data_type_definition = definitions_registry.GetDefinitionByName('uint64') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(0xf0debc9a78563412) self.assertEqual(byte_stream, b'\x12\x34\x56\x78\x9a\xbc\xde\xf0') def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['definitions', 'integers.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('uint8') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) integer_value = data_type_map.MapByteStream(b'\x12') self.assertEqual(integer_value, 0x12) data_type_definition = definitions_registry.GetDefinitionByName('uint16') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) integer_value = data_type_map.MapByteStream(b'\x12\x34') self.assertEqual(integer_value, 0x3412) data_type_definition = definitions_registry.GetDefinitionByName('uint32') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) integer_value = data_type_map.MapByteStream(b'\x12\x34\x56\x78') self.assertEqual(integer_value, 0x78563412) data_type_definition = definitions_registry.GetDefinitionByName('uint32') data_type_definition.byte_order = definitions.BYTE_ORDER_BIG_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) integer_value = data_type_map.MapByteStream(b'\x12\x34\x56\x78') self.assertEqual(integer_value, 0x12345678) data_type_definition = definitions_registry.GetDefinitionByName('uint64') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.IntegerMap(data_type_definition) integer_value = data_type_map.MapByteStream( b'\x12\x34\x56\x78\x9a\xbc\xde\xf0') self.assertEqual(integer_value, 0xf0debc9a78563412) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\x12\x34\x56\x78') class UUIDMapTest(test_lib.BaseTestCase): """UUID map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['uuid.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('uuid') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.UUIDMap(data_type_definition) uuid_value = uuid.UUID('{00021401-0000-0000-c000-000000000046}') byte_stream = data_type_map.FoldByteStream(uuid_value) expected_byte_stream = ( b'\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x46') self.assertEqual(byte_stream, expected_byte_stream) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['uuid.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('uuid') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.UUIDMap(data_type_definition) expected_uuid_value = uuid.UUID('{00021401-0000-0000-c000-000000000046}') uuid_value = data_type_map.MapByteStream( b'\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x46') self.assertEqual(uuid_value, expected_uuid_value) class ElementSequenceDataTypeMapTest(test_lib.BaseTestCase): """Element sequence data type map tests.""" # pylint: disable=protected-access def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) self.assertIsNotNone(data_type_map) def testCalculateElementsDataSize(self): """Tests the _CalculateElementsDataSize function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('triangle4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) context = data_maps.DataTypeMapContext() elements_data_size = data_type_map._CalculateElementsDataSize(context) self.assertEqual(elements_data_size, 48) definitions_file = self._GetTestFilePath(['sequence_with_context.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('nvector') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) context = data_maps.DataTypeMapContext(values={'n': 99}) elements_data_size = data_type_map._CalculateElementsDataSize(context) self.assertEqual(elements_data_size, 396) data_type_definition = definitions_registry.GetDefinitionByName( 'variable_size_vector') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) context = data_maps.DataTypeMapContext(values={'vector_size': 404}) elements_data_size = data_type_map._CalculateElementsDataSize(context) self.assertEqual(elements_data_size, 404) def testEvaluateElementsDataSize(self): """Tests the _EvaluateElementsDataSize function.""" definitions_file = self._GetTestFilePath(['sequence_with_context.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'fixed_size_vector') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) context = data_maps.DataTypeMapContext() elements_data_size = data_type_map._EvaluateElementsDataSize(context) self.assertEqual(elements_data_size, 32) data_type_definition = definitions_registry.GetDefinitionByName( 'variable_size_vector') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) context = data_maps.DataTypeMapContext(values={'vector_size': 404}) elements_data_size = data_type_map._EvaluateElementsDataSize(context) self.assertEqual(elements_data_size, 404) with self.assertRaises(errors.MappingError): context = data_maps.DataTypeMapContext() data_type_map._EvaluateElementsDataSize(context) with self.assertRaises(errors.MappingError): context = data_maps.DataTypeMapContext(values={'vector_size': -404}) data_type_map._EvaluateElementsDataSize(context) with self.assertRaises(errors.MappingError): context = data_maps.DataTypeMapContext(values={'vector_size': 'bogus'}) data_type_map._EvaluateElementsDataSize(context) def testEvaluateNumberOfElements(self): """Tests the _EvaluateNumberOfElements function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('triangle4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) context = data_maps.DataTypeMapContext() number_of_elements = data_type_map._EvaluateNumberOfElements(context) self.assertEqual(number_of_elements, 3) definitions_file = self._GetTestFilePath(['sequence_with_context.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('nvector') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) context = data_maps.DataTypeMapContext(values={'n': 99}) number_of_elements = data_type_map._EvaluateNumberOfElements(context) self.assertEqual(number_of_elements, 99) with self.assertRaises(errors.MappingError): context = data_maps.DataTypeMapContext() data_type_map._EvaluateNumberOfElements(context) with self.assertRaises(errors.MappingError): context = data_maps.DataTypeMapContext(values={'n': -99}) data_type_map._EvaluateNumberOfElements(context) with self.assertRaises(errors.MappingError): context = data_maps.DataTypeMapContext(values={'n': 'bogus'}) data_type_map._EvaluateNumberOfElements(context) def testGetElementDataTypeDefinition(self): """Tests the _GetElementDataTypeDefinition function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) element_data_type_definition = data_type_map._GetElementDataTypeDefinition( data_type_definition) self.assertIsNotNone(element_data_type_definition) with self.assertRaises(errors.FormatError): data_type_map._GetElementDataTypeDefinition(None) with self.assertRaises(errors.FormatError): data_type_definition = EmptyDataTypeDefinition('empty') data_type_map._GetElementDataTypeDefinition(data_type_definition) def testHasElementsDataSize(self): """Tests the _HasElementsDataSize function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) result = data_type_map._HasElementsDataSize() self.assertFalse(result) def testHasElementsTerminator(self): """Tests the _HasElementsTerminator function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) result = data_type_map._HasElementsTerminator() self.assertFalse(result) def testHasNumberOfElements(self): """Tests the _HasNumberOfElements function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) result = data_type_map._HasNumberOfElements() self.assertTrue(result) def testGetSizeHint(self): """Tests the GetSizeHint function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) size_hint = data_type_map.GetSizeHint() self.assertEqual(size_hint, 16) definitions_file = self._GetTestFilePath(['sequence_with_structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vectors') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) size_hint = data_type_map.GetSizeHint() self.assertEqual(size_hint, 4) def testGetStructByteOrderString(self): """Tests the GetStructByteOrderString function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.ElementSequenceDataTypeMap(data_type_definition) byte_order_string = data_type_map.GetStructByteOrderString() self.assertEqual(byte_order_string, '<') class SequenceMapTest(test_lib.BaseTestCase): """Sequence map tests.""" # pylint: disable=protected-access def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) self.assertIsNotNone(data_type_map) # TODO: add tests for _CompositeFoldByteStream once implemented. def testCompositeMapByteStream(self): """Tests the _CompositeMapByteStream function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'triangle4') data_type_map = data_maps.SequenceMap(data_type_definition) byte_values = [ value.to_bytes(4, byteorder='little') for value in range(1, 13)] byte_stream = b''.join(byte_values) sequence_value = data_type_map._CompositeMapByteStream(byte_stream) self.assertEqual( sequence_value, ((1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12))) with self.assertRaises(errors.MappingError): data_type_map._CompositeMapByteStream(None) with self.assertRaises(errors.MappingError): data_type_map._CompositeMapByteStream(byte_stream, recursion_depth=999) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map._CompositeMapByteStream(b'\x12\x34\x56') def testLinearFoldByteStream(self): """Tests the _LinearFoldByteStream function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) byte_stream = data_type_map._LinearFoldByteStream((1, 2, 3, 4)) expected_sequence_value = ( b'\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00') self.assertEqual(byte_stream, expected_sequence_value) def testLinearMapByteStream(self): """Tests the _LinearMapByteStream function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) sequence_value = data_type_map._LinearMapByteStream( b'\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00') self.assertEqual(sequence_value, (1, 2, 3, 4)) with self.assertRaises(errors.MappingError): data_type_map._LinearMapByteStream(None) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map._LinearMapByteStream(b'\x12\x34\x56') def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) byte_stream = data_type_map.FoldByteStream((1, 2, 3, 4)) expected_sequence_value = ( b'\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00') self.assertEqual(byte_stream, expected_sequence_value) def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '4i') data_type_definition.elements_data_size = 16 data_type_definition.number_of_elements = 0 data_type_map = data_maps.SequenceMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '4i') data_type_definition.elements_data_size = 0 data_type_definition.number_of_elements = 0 data_type_map = data_maps.SequenceMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertIsNone(struct_format_string) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('vector4') data_type_map = data_maps.SequenceMap(data_type_definition) sequence_value = data_type_map.MapByteStream( b'\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00') self.assertEqual(sequence_value, (1, 2, 3, 4)) class StreamMapTest(test_lib.BaseTestCase): """Stream map tests.""" # pylint: disable=protected-access def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['stream.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream') data_type_map = data_maps.StreamMap(data_type_definition) self.assertIsNotNone(data_type_map) def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['stream.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream') data_type_map = data_maps.StreamMap(data_type_definition) expected_byte_stream = b'd\x00t\x00F\x00a\x00b\x00r\x00i\x00c\x00' byte_stream = data_type_map.FoldByteStream(expected_byte_stream) self.assertEqual(byte_stream, expected_byte_stream) # Test with data type definition with elements date size. data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream_with_size') data_type_map = data_maps.StreamMap(data_type_definition) context = data_maps.DataTypeMapContext({'size': 16}) expected_byte_stream = b'd\x00t\x00F\x00a\x00b\x00r\x00i\x00c\x00' byte_stream = data_type_map.FoldByteStream( expected_byte_stream, context=context) self.assertEqual(byte_stream, expected_byte_stream) context = data_maps.DataTypeMapContext({'size': 8}) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(expected_byte_stream, context=context) # Test with data type definition with elements terminator. data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream_with_terminator') data_type_map = data_maps.StreamMap(data_type_definition) expected_byte_stream = b'd\x00t\x00F\x00a\x00b\x00r\x00i\x00c\x00\x00\x00' byte_stream = data_type_map.FoldByteStream(expected_byte_stream) self.assertEqual(byte_stream, expected_byte_stream) def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['stream.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream') data_type_map = data_maps.StreamMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '16B') data_type_definition.elements_data_size = 16 data_type_definition.number_of_elements = 0 data_type_map = data_maps.StreamMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '16B') data_type_definition.elements_data_size = 0 data_type_definition.number_of_elements = 0 data_type_map = data_maps.StreamMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertIsNone(struct_format_string) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['stream.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16le_stream') data_type_map = data_maps.StreamMap(data_type_definition) byte_stream = 'dtFabric'.encode('utf-16-le') stream_value = data_type_map.MapByteStream(byte_stream) self.assertEqual(stream_value, b'd\x00t\x00F\x00a\x00b\x00r\x00i\x00c\x00') with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(None) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\x12\x34\x56') class StringMapTest(test_lib.BaseTestCase): """String map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StringMap(data_type_definition) expected_byte_stream = 'dtFabric'.encode('utf-16-le') byte_stream = data_type_map.FoldByteStream('dtFabric') self.assertEqual(byte_stream, expected_byte_stream) def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StreamMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '16B') data_type_definition.elements_data_size = 16 data_type_definition.number_of_elements = 0 data_type_map = data_maps.StringMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, '16B') data_type_definition.elements_data_size = 0 data_type_definition.number_of_elements = 0 data_type_map = data_maps.StringMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertIsNone(struct_format_string) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StringMap(data_type_definition) byte_stream = 'dtFabric'.encode('utf-16-le') string_value = data_type_map.MapByteStream(byte_stream) self.assertEqual(string_value, 'dtFabric') with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(None) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(b'\x12\x34\x56') data_type_definition = definitions_registry.GetDefinitionByName( 'utf8_string') data_type_map = data_maps.StringMap(data_type_definition) byte_stream = 'dtFabric\x00and more'.encode('utf8') string_value = data_type_map.MapByteStream(byte_stream) self.assertEqual(string_value, 'dtFabric') with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream[:7]) data_type_definition = definitions_registry.GetDefinitionByName( 'utf8_string_fixed_size') data_type_map = data_maps.StringMap(data_type_definition) byte_stream = 'dtFabric\x00and so.'.encode('utf8') string_value = data_type_map.MapByteStream(byte_stream) self.assertEqual(string_value, 'dtFabric') with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream[:15]) class StructureMapTest(test_lib.BaseTestCase): """Structure map tests.""" # pylint: disable=protected-access def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) self.assertIsNotNone(data_type_map) def testCheckLinearMap(self): """Tests the _CheckLinearMap function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) result = data_type_map._CheckLinearMap(data_type_definition) self.assertTrue(result) with self.assertRaises(errors.FormatError): data_type_map._CheckLinearMap(None) with self.assertRaises(errors.FormatError): data_type_definition = EmptyDataTypeDefinition('empty') data_type_map._CheckLinearMap(data_type_definition) data_type_definition = definitions_registry.GetDefinitionByName( 'triangle3d') data_type_map = data_maps.StructureMap(data_type_definition) result = data_type_map._CheckLinearMap(data_type_definition) self.assertFalse(result) data_type_definition = definitions_registry.GetDefinitionByName('box3d') data_type_map = data_maps.StructureMap(data_type_definition) result = data_type_map._CheckLinearMap(data_type_definition) self.assertFalse(result) data_type_definition = definitions_registry.GetDefinitionByName( 'sphere3d') data_type_map = data_maps.StructureMap(data_type_definition) result = data_type_map._CheckLinearMap(data_type_definition) self.assertFalse(result) # TODO: add test with padding # TODO: add tests for _CompositeFoldByteStream. def testCompositeMapByteStream(self): """Tests the _CompositeMapByteStream function.""" definitions_file = self._GetTestFilePath(['structure_with_string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StructureMap(data_type_definition) text_stream = 'dtFabric'.encode('utf-16-le') byte_stream = b''.join([ bytes(bytearray([len(text_stream), 0])), text_stream]) utf16_string = data_type_map._CompositeMapByteStream(byte_stream) self.assertEqual(utf16_string.size, len(text_stream)) self.assertEqual(utf16_string.text, 'dtFabric') with self.assertRaises(errors.MappingError): data_type_map._CompositeMapByteStream(byte_stream, recursion_depth=999) byte_stream = b''.join([bytes(bytearray([3, 0])), text_stream]) with self.assertRaises(errors.MappingError): data_type_map._CompositeMapByteStream(byte_stream) definitions_file = self._GetTestFilePath(['structure_with_context.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'instance_block_header') data_type_map = data_maps.StructureMap(data_type_definition) context = data_maps.DataTypeMapContext(values={'number_of_properties': 3}) byte_stream = bytes(bytearray([ 10, 0, 0, 0, 128, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0])) instance_block_header = data_type_map._CompositeMapByteStream( byte_stream, context=context) self.assertEqual(instance_block_header.name_offset, 10) self.assertEqual(instance_block_header.unknown1, 0x80) self.assertEqual(instance_block_header.property_value_offsets, (1, 2, 3)) def testGetMemberDataTypeMaps(self): """Tests the _GetMemberDataTypeMaps function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) data_type_map._attribute_names = None data_type_map._data_type_maps = None data_type_map._number_of_attributes = None data_type_map._GetMemberDataTypeMaps(data_type_definition) self.assertIsNotNone(data_type_map._data_type_maps) self.assertEqual(data_type_map._number_of_attributes, 3) self.assertEqual(data_type_map._attribute_names, ['x', 'y', 'z']) with self.assertRaises(errors.FormatError): data_type_map._GetMemberDataTypeMaps(None) with self.assertRaises(errors.FormatError): data_type_definition = EmptyDataTypeDefinition('empty') data_type_map._GetMemberDataTypeMaps(data_type_definition) def testLinearFoldByteStream(self): """Tests the _LinearFoldByteStream function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) expected_byte_values = [ value.to_bytes(4, byteorder='little') for value in range(1, 4)] expected_byte_stream = b''.join(expected_byte_values) point3d = data_type_map.CreateStructureValues(x=1, y=2, z=3) byte_stream = data_type_map._LinearFoldByteStream(point3d) self.assertEqual(byte_stream, expected_byte_stream) def testLinearMapByteStream(self): """Tests the _LinearMapByteStream function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [ value.to_bytes(4, byteorder='little') for value in range(1, 4)] byte_stream = b''.join(byte_values) point3d = data_type_map._LinearMapByteStream(byte_stream) self.assertEqual(point3d.x, 1) self.assertEqual(point3d.y, 2) self.assertEqual(point3d.z, 3) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_definition.byte_order = definitions.BYTE_ORDER_BIG_ENDIAN data_type_map = data_maps.StructureMap(data_type_definition) point3d = data_type_map._LinearMapByteStream(byte_stream) self.assertEqual(point3d.x, 0x01000000) self.assertEqual(point3d.y, 0x02000000) self.assertEqual(point3d.z, 0x03000000) # TODO: add tests for CreateStructureValues. def testGetStructFormatString(self): """Tests the GetStructFormatString function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertEqual(struct_format_string, 'iii') # Test with member without a struct format string. data_type_definition = data_types.StructureDefinition( 'my_struct_type', aliases=['MY_STRUCT_TYPE'], description='my structure type') member_definition = TestDataTypeDefinition('test') structure_member_definition = data_types.MemberDataTypeDefinition( 'my_struct_member', member_definition, aliases=['MY_STRUCT_MEMBER'], data_type='test', description='my structure member') data_type_definition.AddMemberDefinition(structure_member_definition) data_type_map = data_maps.StructureMap(data_type_definition) struct_format_string = data_type_map.GetStructFormatString() self.assertIsNone(struct_format_string) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [ value.to_bytes(4, byteorder='little') for value in range(1, 4)] byte_stream = b''.join(byte_values) point3d = data_type_map.MapByteStream(byte_stream) self.assertEqual(point3d.x, 1) self.assertEqual(point3d.y, 2) self.assertEqual(point3d.z, 3) data_type_definition = definitions_registry.GetDefinitionByName('point3d') data_type_definition.byte_order = definitions.BYTE_ORDER_BIG_ENDIAN data_type_map = data_maps.StructureMap(data_type_definition) point3d = data_type_map.MapByteStream(byte_stream) self.assertEqual(point3d.x, 0x01000000) self.assertEqual(point3d.y, 0x02000000) self.assertEqual(point3d.z, 0x03000000) def testMapByteStreamWithSequence(self): """Tests the MapByteStream function with a sequence.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('box3d') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [ value.to_bytes(4, byteorder='little') for value in range(1, 433)] byte_stream = b''.join(byte_values) box = data_type_map.MapByteStream(byte_stream) self.assertEqual(box.triangles[0].a.x, 1) self.assertEqual(box.triangles[0].a.y, 2) self.assertEqual(box.triangles[0].a.z, 3) def testMapByteStreamWithSequenceWithCondition(self): """Tests the MapByteStream function with a sequence with condition.""" definitions_file = self._GetTestFilePath(['structure_with_condition.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'structure_with_condition') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [0x8001.to_bytes(2, byteorder='little')] byte_values.extend([ value.to_bytes(4, byteorder='little') for value in range(1, 6)]) byte_stream = b''.join(byte_values) structure_with_condition = data_type_map.MapByteStream(byte_stream) self.assertEqual(structure_with_condition.flags, 0x8001) self.assertEqual(structure_with_condition.data1, 1) self.assertEqual(structure_with_condition.conditional_data1, 2) self.assertEqual(structure_with_condition.data2, 3) self.assertEqual(structure_with_condition.conditional_data2, 4) self.assertEqual(structure_with_condition.data3, 5) byte_values = [0x0001.to_bytes(2, byteorder='little')] byte_values.extend([ value.to_bytes(4, byteorder='little') for value in range(1, 6)]) byte_stream = b''.join(byte_values) structure_with_condition = data_type_map.MapByteStream(byte_stream) self.assertEqual(structure_with_condition.flags, 0x0001) self.assertEqual(structure_with_condition.data1, 1) self.assertEqual(structure_with_condition.conditional_data1, 2) self.assertEqual(structure_with_condition.data2, 3) self.assertIsNone(structure_with_condition.conditional_data2) self.assertEqual(structure_with_condition.data3, 4) def testMapByteStreamWithSequenceWithValues(self): """Tests the MapByteStream function with a sequence with values.""" definitions_file = self._GetTestFilePath(['structure_with_values.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'structure_with_values') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] byte_stream = bytes(bytearray(byte_values)) structure_with_values = data_type_map.MapByteStream(byte_stream) self.assertEqual(structure_with_values.format_version, 2) self.assertEqual(structure_with_values.data_size, 0) byte_values = [0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] byte_stream = bytes(bytearray(byte_values)) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(byte_stream) data_type_definition = definitions_registry.GetDefinitionByName( 'structure_with_value') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [0x74, 0x65, 0x73, 0x74, 0x00, 0x00, 0x00, 0x00] byte_stream = bytes(bytearray(byte_values)) structure_with_values = data_type_map.MapByteStream(byte_stream) self.assertEqual(structure_with_values.signature, b'test') self.assertEqual(structure_with_values.data_size, 0) self.assertEqual(structure_with_values.data, b'') byte_values = [0x54, 0x45, 0x53, 0x54, 0x00, 0x00, 0x00, 0x00] byte_stream = bytes(bytearray(byte_values)) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(byte_stream) def testMapByteStreamWithSequenceWithExpression(self): """Tests the MapByteStream function with a sequence with expression.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('sphere3d') data_type_map = data_maps.StructureMap(data_type_definition) # Note that 3.to_bytes() is not supported as syntax. byte_values = [0x3.to_bytes(4, byteorder='little')] byte_values.extend([ value.to_bytes(4, byteorder='little') for value in range(1, 113)]) byte_stream = b''.join(byte_values) sphere = data_type_map.MapByteStream(byte_stream) self.assertEqual(sphere.number_of_triangles, 3) self.assertEqual(sphere.triangles[0].a.x, 1) self.assertEqual(sphere.triangles[0].a.y, 2) self.assertEqual(sphere.triangles[0].a.z, 3) self.assertEqual(sphere.triangles[0].b.x, 4) self.assertEqual(sphere.triangles[0].b.y, 5) self.assertEqual(sphere.triangles[0].b.z, 6) self.assertEqual(sphere.triangles[0].c.x, 7) self.assertEqual(sphere.triangles[0].c.y, 8) self.assertEqual(sphere.triangles[0].c.z, 9) self.assertEqual(sphere.triangles[2].c.x, 25) self.assertEqual(sphere.triangles[2].c.y, 26) self.assertEqual(sphere.triangles[2].c.z, 27) # Test incremental map. context = data_maps.DataTypeMapContext() with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream[:64], context=context) sphere = data_type_map.MapByteStream(byte_stream[64:], context=context) self.assertEqual(sphere.number_of_triangles, 3) self.assertEqual(sphere.triangles[0].a.x, 1) self.assertEqual(sphere.triangles[0].a.y, 2) self.assertEqual(sphere.triangles[0].a.z, 3) self.assertEqual(sphere.triangles[2].c.x, 25) self.assertEqual(sphere.triangles[2].c.y, 26) self.assertEqual(sphere.triangles[2].c.z, 27) def testMapByteStreamWithPadding(self): """Tests the MapByteStream function with padding.""" definitions_file = self._GetTestFilePath(['structure_with_padding.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'structure_with_padding') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [0, 1, 2, 3, 4, 5, 6, 7] byte_stream = bytes(bytearray(byte_values)) structure = data_type_map.MapByteStream(byte_stream) self.assertEqual(structure.data_size, 256) self.assertEqual(structure.padding, b'\x02\x03\x04\x05\x06\x07') with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream[:7]) data_type_definition = definitions_registry.GetDefinitionByName( 'structure_with_padding_and_stream') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [1, 0, 2, 3, 4, 5, 6, 7] byte_stream = bytes(bytearray(byte_values)) structure = data_type_map.MapByteStream(byte_stream) self.assertEqual(structure.data_size, 1) self.assertEqual(structure.data, b'\x02') self.assertEqual(structure.padding, b'\x03\x04\x05\x06\x07') def testMapByteStreamWithSequenceWithExpression2(self): """Tests the MapByteStream function with a sequence with expression.""" definitions_file = self._GetTestFilePath(['structure_with_sequence.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'extension_block') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [4, 1, 0, 0] for byte_value in range(0, 256): byte_values.extend([byte_value]) byte_stream = bytes(bytearray(byte_values)) extension_block = data_type_map.MapByteStream(byte_stream) self.assertEqual(extension_block.size, 260) self.assertEqual(extension_block.data[0], 0) self.assertEqual(extension_block.data[-1], 255) byte_values = [3, 0, 0, 0] for byte_value in range(0, 256): byte_values.extend([byte_value]) byte_stream = bytes(bytearray(byte_values)) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(byte_stream) def testMapByteStreamWithStream(self): """Tests the MapByteStream function with a stream.""" definitions_file = self._GetTestFilePath(['structure_with_stream.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'extension_block') data_type_map = data_maps.StructureMap(data_type_definition) byte_values = [4, 1, 0, 0] for byte_value in range(0, 256): byte_values.extend([byte_value]) byte_stream = bytes(bytearray(byte_values)) extension_block = data_type_map.MapByteStream(byte_stream) self.assertEqual(extension_block.size, 260) self.assertEqual(extension_block.data, byte_stream[4:]) byte_values = [3, 0, 0, 0] for byte_value in range(0, 256): byte_values.extend([byte_value]) byte_stream = bytes(bytearray(byte_values)) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(byte_stream) def testMapByteStreamWithString(self): """Tests the MapByteStream function with a string.""" definitions_file = self._GetTestFilePath(['structure_with_string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StructureMap(data_type_definition) text_stream = 'dtFabric'.encode('utf-16-le') byte_stream = b''.join([ bytes(bytearray([len(text_stream), 0])), text_stream]) utf16_string = data_type_map.MapByteStream(byte_stream) self.assertEqual(utf16_string.size, len(text_stream)) self.assertEqual(utf16_string.text, 'dtFabric') byte_stream = b''.join([bytes(bytearray([3, 0])), text_stream]) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(byte_stream) def testMapByteStreamWithStringArray(self): """Tests the MapByteStream function with a string array.""" definitions_file = self._GetTestFilePath(['string_array.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'string_array') data_type_map = data_maps.StructureMap(data_type_definition) text_stream1 = 'dtFabric\x00'.encode('ascii') text_stream2 = 'supports\x00'.encode('ascii') text_stream3 = 'a string array\x00'.encode('ascii') byte_stream = b''.join([ bytes(bytearray([3, 0, 0, 0])), text_stream1, text_stream2, text_stream3]) string_array = data_type_map.MapByteStream(byte_stream) self.assertEqual(string_array.number_of_strings, 3) self.assertEqual(string_array.strings[0], 'dtFabric') self.assertEqual(string_array.strings[1], 'supports') self.assertEqual(string_array.strings[2], 'a string array') byte_stream = b''.join([ bytes(bytearray([3, 0, 0, 0])), text_stream1, text_stream2]) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream) data_type_definition = definitions_registry.GetDefinitionByName( 'string_array_with_size') data_type_map = data_maps.StructureMap(data_type_definition) text_stream1 = 'dtFabric\x00'.encode('ascii') text_stream2 = 'supports\x00'.encode('ascii') text_stream3 = 'a string array\x00'.encode('ascii') byte_stream = b''.join([ bytes(bytearray([33, 0, 0, 0])), text_stream1, text_stream2, text_stream3]) string_array = data_type_map.MapByteStream(byte_stream) self.assertEqual(string_array.strings_data_size, 33) self.assertEqual(string_array.strings[0], 'dtFabric') self.assertEqual(string_array.strings[1], 'supports') self.assertEqual(string_array.strings[2], 'a string array') byte_stream = b''.join([ bytes(bytearray([33, 0, 0, 0])), text_stream1, text_stream2]) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream) def testGetSizeHint(self): """Tests the GetSizeHint function.""" definitions_file = self._GetTestFilePath(['structure_with_string.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'utf16_string') data_type_map = data_maps.StructureMap(data_type_definition) context = data_maps.DataTypeMapContext() text_stream = 'dtFabric'.encode('utf-16-le') byte_stream = b''.join([ bytes(bytearray([len(text_stream), 0])), text_stream]) size_hint = data_type_map.GetSizeHint(context=context) self.assertEqual(size_hint, 2) with self.assertRaises(errors.ByteStreamTooSmallError): data_type_map.MapByteStream(byte_stream[:size_hint], context=context) size_hint = data_type_map.GetSizeHint(context=context) self.assertEqual(size_hint, 18) class PaddingMapTest(test_lib.BaseTestCase): """Padding map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" data_type_definition = data_types.PaddingDefinition( 'padding', alignment_size=4, description='alignment_padding') data_type_map = data_maps.PaddingMap(data_type_definition) byte_stream = data_type_map.FoldByteStream(b'\x11\x22\x33') self.assertEqual(byte_stream, b'\x11\x22\x33') def testMapByteStream(self): """Tests the MapByteStream function.""" data_type_definition = data_types.PaddingDefinition( 'padding', alignment_size=4, description='alignment_padding') data_type_map = data_maps.PaddingMap(data_type_definition) data_type_map.byte_size = 3 padding_value = data_type_map.MapByteStream(b'\x00\x11\x22\x33') self.assertEqual(padding_value, b'') padding_value = data_type_map.MapByteStream( b'\x00\x11\x22\x33', byte_offset=1) self.assertEqual(padding_value, b'\x11\x22\x33') class SemanticDataTypeMapTest(test_lib.BaseTestCase): """Semantic data type map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['constant.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'maximum_number_of_back_traces') data_type_map = data_maps.SemanticDataTypeMap(data_type_definition) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(1) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['constant.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'maximum_number_of_back_traces') data_type_map = data_maps.SemanticDataTypeMap(data_type_definition) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(b'\x01\x00\x00\x00') class ConstantMapTest(test_lib.BaseTestCase): """Constant map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['constant.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'maximum_number_of_back_traces') data_type_map = data_maps.ConstantMap(data_type_definition) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(1) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['constant.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'maximum_number_of_back_traces') data_type_map = data_maps.ConstantMap(data_type_definition) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(b'\x01\x00\x00\x00') class EnumerationMapTest(test_lib.BaseTestCase): """Enumeration map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['enumeration.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'object_information_type') data_type_map = data_maps.EnumerationMap(data_type_definition) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(1) def testGetName(self): """Tests the GetName function.""" definitions_file = self._GetTestFilePath(['enumeration.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'object_information_type') data_type_definition.byte_order = definitions.BYTE_ORDER_LITTLE_ENDIAN data_type_map = data_maps.EnumerationMap(data_type_definition) name = data_type_map.GetName(2) self.assertEqual(name, 'MiniMutantInformation1') name = data_type_map.GetName(-1) self.assertIsNone(name) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['enumeration.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'object_information_type') data_type_map = data_maps.EnumerationMap(data_type_definition) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(b'\x01\x00\x00\x00') class LayoutDataTypeMapTest(test_lib.BaseTestCase): """Layout data type map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['structure_group.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'bsm_token') data_type_map = data_maps.LayoutDataTypeMap(data_type_definition) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(None) class FormatMapTest(test_lib.BaseTestCase): """Format map tests.""" def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['format.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'format_with_layout') data_type_map = data_maps.FormatMap(data_type_definition) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(None) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['format.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'format_with_layout') data_type_map = data_maps.EnumerationMap(data_type_definition) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(b'\x01\x00\x00\x00') # TODO: add tests for StructureFamilyMap class StructureGroupMapTest(test_lib.BaseTestCase): """Structure group data type map tests.""" # pylint: disable=protected-access def testFoldByteStream(self): """Tests the FoldByteStream function.""" definitions_file = self._GetTestFilePath(['structure_group.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'bsm_token') data_type_map = data_maps.StructureGroupMap(data_type_definition) with self.assertRaises(errors.FoldingError): data_type_map.FoldByteStream(None) def testGetMemberDataTypeMaps(self): """Tests the _GetMemberDataTypeMaps function.""" definitions_file = self._GetTestFilePath(['structure_group.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'bsm_token') data_type_map = data_maps.StructureGroupMap(data_type_definition) data_type_map._data_type_maps = None data_type_map._GetMemberDataTypeMaps(data_type_definition) self.assertIsNotNone(data_type_map._data_type_maps) with self.assertRaises(errors.FormatError): data_type_map._GetMemberDataTypeMaps(None) with self.assertRaises(errors.FormatError): data_type_definition = EmptyDataTypeDefinition('empty') data_type_map._GetMemberDataTypeMaps(data_type_definition) # TODO: Test group member without identifier member. # Test group member without identifier value. data_type_definition = definitions_registry.GetDefinitionByName( 'bsm_token') test_definition1 = definitions_registry.GetDefinitionByName( 'bsm_token_arg32') self.assertIsNotNone(test_definition1) test_definition2 = test_definition1.GetMemberDefinitionByName( data_type_definition.identifier) self.assertIsNotNone(test_definition2) test_definition2.values = None data_type_map._data_type_maps = None with self.assertRaises(errors.FormatError): data_type_map._GetMemberDataTypeMaps(data_type_definition) # TODO: remove GetByteSize is deprecated. def testGetByteSize(self): """Tests the GetByteSize function.""" definitions_file = self._GetTestFilePath(['structure_group.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'bsm_token') data_type_map = data_maps.StructureGroupMap(data_type_definition) byte_size = data_type_map.GetByteSize() self.assertIsNone(byte_size) def testGetSizeHint(self): """Tests the GetSizeHint function.""" definitions_file = self._GetTestFilePath(['structure_group.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'bsm_token') data_type_map = data_maps.StructureGroupMap(data_type_definition) size_hint = data_type_map.GetSizeHint() self.assertEqual(size_hint, 1) def testMapByteStream(self): """Tests the MapByteStream function.""" definitions_file = self._GetTestFilePath(['structure_group.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName( 'bsm_token') data_type_map = data_maps.StructureGroupMap(data_type_definition) with self.assertRaises(errors.MappingError): data_type_map.MapByteStream(b'\x01\x00\x00\x00') class DataTypeMapFactoryTest(test_lib.BaseTestCase): """Data type map factory tests.""" def testCreateDataTypeMap(self): """Tests the CreateDataTypeMap function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = EmptyDataTypeDefinition('empty') definitions_registry.RegisterDefinition(data_type_definition) factory = data_maps.DataTypeMapFactory(definitions_registry) data_type_map = factory.CreateDataTypeMap('int32le') self.assertIsNotNone(data_type_map) data_type_map = factory.CreateDataTypeMap('empty') self.assertIsNone(data_type_map) data_type_map = factory.CreateDataTypeMap('bogus') self.assertIsNone(data_type_map) def testCreateDataTypeMapByType(self): """Tests the CreateDataTypeMapByType function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('int32le') data_type_map = data_maps.DataTypeMapFactory.CreateDataTypeMapByType( data_type_definition) self.assertIsNotNone(data_type_map) data_type_definition = EmptyDataTypeDefinition('empty') data_type_map = data_maps.DataTypeMapFactory.CreateDataTypeMapByType( data_type_definition) self.assertIsNone(data_type_map) def testGetDataTypeDefinition(self): """Tests the GetDataTypeDefinition function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = EmptyDataTypeDefinition('empty') definitions_registry.RegisterDefinition(data_type_definition) factory = data_maps.DataTypeMapFactory(definitions_registry) data_type_definition = factory.GetDataTypeDefinition('int32le') self.assertIsNotNone(data_type_definition) data_type_definition = factory.GetDataTypeDefinition('empty') self.assertIsNotNone(data_type_definition) data_type_definition = factory.GetDataTypeDefinition('bogus') self.assertIsNone(data_type_definition) if __name__ == '__main__': unittest.main() dtfabric-20240211/tests/runtime/fabric.py000066400000000000000000000012261456204725700201670ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the dtFabric helper objects.""" import unittest from dtfabric.runtime import fabric from tests import test_lib class DataTypeFabricTest(test_lib.BaseTestCase): """Data type fabric tests.""" def testInitialize(self): """Tests the __init__ function.""" definitions_file = self._GetTestFilePath(['integer.yaml']) self._SkipIfPathNotExists(definitions_file) with open(definitions_file, 'rb') as file_object: yaml_definition = file_object.read() factory = fabric.DataTypeFabric(yaml_definition=yaml_definition) self.assertIsNotNone(factory) if __name__ == '__main__': unittest.main() dtfabric-20240211/tests/runtime/runtime.py000066400000000000000000000046761456204725700204400ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Tests for the run-time object.""" import unittest from dtfabric.runtime import runtime from tests import test_lib class StructureValuesClassFactoryTest(test_lib.BaseTestCase): """Structure values class factory tests.""" # pylint: disable=protected-access def testCreateClassTemplate(self): """Tests the _CreateClassTemplate function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') class_template = runtime.StructureValuesClassFactory._CreateClassTemplate( data_type_definition) self.assertIsNotNone(class_template) # TODO: implement error conditions. def testIsIdentifier(self): """Tests the _IsIdentifier function.""" result = runtime.StructureValuesClassFactory._IsIdentifier('valid') self.assertTrue(result) result = runtime.StructureValuesClassFactory._IsIdentifier('_valid') self.assertTrue(result) result = runtime.StructureValuesClassFactory._IsIdentifier('valid1') self.assertTrue(result) result = runtime.StructureValuesClassFactory._IsIdentifier('') self.assertFalse(result) result = runtime.StructureValuesClassFactory._IsIdentifier('0invalid') self.assertFalse(result) result = runtime.StructureValuesClassFactory._IsIdentifier('in-valid') self.assertFalse(result) def testValidateDataTypeDefinition(self): """Tests the _ValidateDataTypeDefinition function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') runtime.StructureValuesClassFactory._ValidateDataTypeDefinition( data_type_definition) # TODO: implement error conditions. def testCreateClass(self): """Tests the CreateClass function.""" definitions_file = self._GetTestFilePath(['structure.yaml']) definitions_registry = self._CreateDefinitionRegistryFromFile( definitions_file) data_type_definition = definitions_registry.GetDefinitionByName('point3d') structure_values_class = runtime.StructureValuesClassFactory.CreateClass( data_type_definition) self.assertIsNotNone(structure_values_class) if __name__ == '__main__': unittest.main() dtfabric-20240211/tests/test_lib.py000066400000000000000000000043401456204725700170630ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Shared test case.""" import os import unittest from dtfabric import reader from dtfabric import registry class BaseTestCase(unittest.TestCase): """The base test case.""" _TEST_DATA_PATH = os.path.join(os.getcwd(), 'test_data') # Show full diff results, part of TestCase so does not follow our naming # conventions. maxDiff = None def _CreateDefinitionRegistryFromFile(self, path): """Creates a data type definition registry from a file. Args: path (str): path to the data definition file. Returns: DataTypeDefinitionsRegistry: data type definition registry or None on error. Raises: SkipTest: if the data definition file does not exist and the test should be skipped. """ self._SkipIfPathNotExists(path) definitions_registry = registry.DataTypeDefinitionsRegistry() self._FillDefinitionRegistryFromFile(definitions_registry, path) return definitions_registry def _FillDefinitionRegistryFromFile(self, definitions_registry, path): """Fills a data type definition registry from a file. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. path (str): path to the data definition file. """ definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() with open(path, 'rb') as file_object: definitions_reader.ReadFileObject(definitions_registry, file_object) def _GetTestFilePath(self, path_segments): """Retrieves the path of a test file in the test data directory. Args: path_segments (list[str]): path segments inside the test data directory. Returns: str: path of the test file. """ # Note that we need to pass the individual path segments to os.path.join # and not a list. return os.path.join(self._TEST_DATA_PATH, *path_segments) def _SkipIfPathNotExists(self, path): """Skips the test if the path does not exist. Args: path (str): path of a test file. Raises: SkipTest: if the path does not exist and the test should be skipped. """ if not os.path.exists(path): filename = os.path.basename(path) raise unittest.SkipTest(f'missing test file: {filename:s}') dtfabric-20240211/tox.ini000066400000000000000000000025431456204725700150600ustar00rootroot00000000000000[tox] envlist = py3{8,9,10,11,12},coverage,docformatter,docs,lint,wheel [testenv] allowlist_externals = ./run_tests.py pip_pre = True passenv = CFLAGS CPPFLAGS LDFLAGS setenv = PYTHONPATH = {toxinidir} deps = -rrequirements.txt -rtest_requirements.txt coverage: coverage wheel: build setuptools >= 65 wheel commands = py3{8,9,10,11,12}: ./run_tests.py coverage: coverage erase coverage: coverage run --source=dtfabric --omit="*_test*,*__init__*,*test_lib*" run_tests.py coverage: coverage xml wheel: python -m build --no-isolation --wheel [testenv:docformatter] usedevelop = True deps = docformatter commands = docformatter --in-place --recursive dtfabric tests [testenv:docs] usedevelop = True deps = -rdocs/requirements.txt commands = sphinx-build -b html -d build/doctrees docs dist/docs sphinx-build -b linkcheck docs dist/docs [testenv:lint] skipsdist = True pip_pre = True passenv = CFLAGS CPPFLAGS LDFLAGS setenv = PYTHONPATH = {toxinidir} deps = -rrequirements.txt -rtest_requirements.txt docformatter pylint >= 3.0.0, < 3.1.0 setuptools yamllint >= 1.26.0 commands = docformatter --version pylint --version yamllint -v docformatter --check --diff --recursive dtfabric scripts setup.py tests pylint --rcfile=.pylintrc dtfabric scripts setup.py tests yamllint -c .yamllint.yaml test_data dtfabric-20240211/utils/000077500000000000000000000000001456204725700147015ustar00rootroot00000000000000dtfabric-20240211/utils/__init__.py000066400000000000000000000001061456204725700170070ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Data formats.""" __version__ = '20170423' dtfabric-20240211/utils/check_dependencies.py000077500000000000000000000006461456204725700210470ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """Script to check for the availability and version of dependencies.""" import sys # Change PYTHONPATH to include dependencies. sys.path.insert(0, '.') import utils.dependencies # pylint: disable=wrong-import-position if __name__ == '__main__': dependency_helper = utils.dependencies.DependencyHelper() if not dependency_helper.CheckDependencies(): sys.exit(1) dtfabric-20240211/utils/dependencies.py000066400000000000000000000264771456204725700177210ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Helper to check for availability and version of dependencies.""" import configparser import os import re class DependencyDefinition(object): """Dependency definition. Attributes: dpkg_name (str): name of the dpkg package that provides the dependency. is_optional (bool): True if the dependency is optional. l2tbinaries_name (str): name of the l2tbinaries package that provides the dependency. maximum_version (str): maximum supported version, a greater or equal version is not supported. minimum_version (str): minimum supported version, a lesser version is not supported. name (str): name of (the Python module that provides) the dependency. pypi_name (str): name of the PyPI package that provides the dependency. python2_only (bool): True if the dependency is only supported by Python 2. python3_only (bool): True if the dependency is only supported by Python 3. rpm_name (str): name of the rpm package that provides the dependency. skip_check (bool): True if the dependency should be skipped by the CheckDependencies or CheckTestDependencies methods of DependencyHelper. skip_requires (bool): True if the dependency should be excluded from requirements.txt or setup.py install_requires. version_property (str): name of the version attribute or function. """ def __init__(self, name): """Initializes a dependency configuration. Args: name (str): name of the dependency. """ super(DependencyDefinition, self).__init__() self.dpkg_name = None self.is_optional = False self.l2tbinaries_name = None self.maximum_version = None self.minimum_version = None self.name = name self.pypi_name = None self.python2_only = False self.python3_only = False self.rpm_name = None self.skip_check = None self.skip_requires = None self.version_property = None class DependencyDefinitionReader(object): """Dependency definition reader.""" _VALUE_NAMES = frozenset([ 'dpkg_name', 'is_optional', 'l2tbinaries_name', 'maximum_version', 'minimum_version', 'pypi_name', 'python2_only', 'python3_only', 'rpm_name', 'skip_check', 'skip_requires', 'version_property']) def _GetConfigValue(self, config_parser, section_name, value_name): """Retrieves a value from the config parser. Args: config_parser (ConfigParser): configuration parser. section_name (str): name of the section that contains the value. value_name (str): name of the value. Returns: object: configuration value or None if the value does not exists. """ try: return config_parser.get(section_name, value_name) except configparser.NoOptionError: return None def Read(self, file_object): """Reads dependency definitions. Args: file_object (file): file-like object to read from. Yields: DependencyDefinition: dependency definition. """ config_parser = configparser.ConfigParser(interpolation=None) config_parser.read_file(file_object) for section_name in config_parser.sections(): dependency_definition = DependencyDefinition(section_name) for value_name in self._VALUE_NAMES: value = self._GetConfigValue(config_parser, section_name, value_name) setattr(dependency_definition, value_name, value) yield dependency_definition class DependencyHelper(object): """Dependency helper. Attributes: dependencies (dict[str, DependencyDefinition]): dependencies. """ _VERSION_NUMBERS_REGEX = re.compile(r'[0-9.]+') _VERSION_SPLIT_REGEX = re.compile(r'\.|\-') def __init__( self, dependencies_file='dependencies.ini', test_dependencies_file='test_dependencies.ini'): """Initializes a dependency helper. Args: dependencies_file (Optional[str]): path to the dependencies configuration file. test_dependencies_file (Optional[str]): path to the test dependencies configuration file. """ super(DependencyHelper, self).__init__() self._test_dependencies = {} self.dependencies = {} dependency_reader = DependencyDefinitionReader() with open(dependencies_file, 'r', encoding='utf-8') as file_object: for dependency in dependency_reader.Read(file_object): self.dependencies[dependency.name] = dependency if os.path.exists(test_dependencies_file): with open(test_dependencies_file, 'r', encoding='utf-8') as file_object: for dependency in dependency_reader.Read(file_object): self._test_dependencies[dependency.name] = dependency def _CheckPythonModule(self, dependency): """Checks the availability of a Python module. Args: dependency (DependencyDefinition): dependency definition. Returns: tuple: containing: bool: True if the Python module is available and conforms to the minimum required version, False otherwise. str: status message. """ module_object = self._ImportPythonModule(dependency.name) if not module_object: return False, f'missing: {dependency.name:s}' if not dependency.version_property: return True, dependency.name return self._CheckPythonModuleVersion( dependency.name, module_object, dependency.version_property, dependency.minimum_version, dependency.maximum_version) def _CheckPythonModuleVersion( self, module_name, module_object, version_property, minimum_version, maximum_version): """Checks the version of a Python module. Args: module_object (module): Python module. module_name (str): name of the Python module. version_property (str): version attribute or function. minimum_version (str): minimum version. maximum_version (str): maximum version. Returns: tuple: containing: bool: True if the Python module is available and conforms to the minimum required version, False otherwise. str: status message. """ module_version = None if not version_property.endswith('()'): module_version = getattr(module_object, version_property, None) else: version_method = getattr( module_object, version_property[:-2], None) if version_method: module_version = version_method() if not module_version: return False, ( f'unable to determine version information for: {module_name:s}') # Make sure the module version is a string. module_version = f'{module_version!s}' # Split the version string and convert every digit into an integer. # A string compare of both version strings will yield an incorrect result. # Strip any semantic suffixes such as a1, b1, pre, post, rc, dev. module_version = self._VERSION_NUMBERS_REGEX.findall(module_version)[0] if module_version[-1] == '.': module_version = module_version[:-1] try: module_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(module_version))) except ValueError: return False, ( f'unable to parse module version: {module_name:s} {module_version:s}') if minimum_version: try: minimum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(minimum_version))) except ValueError: return False, ( f'unable to parse minimum version: {module_name:s} ' f'{minimum_version:s}') if module_version_map < minimum_version_map: return False, ( f'{module_name:s} version: {module_version!s} is too old, ' f'{minimum_version!s} or later required') if maximum_version: try: maximum_version_map = list( map(int, self._VERSION_SPLIT_REGEX.split(maximum_version))) except ValueError: return False, ( f'unable to parse maximum version: {module_name:s} ' f'{maximum_version:s}') if module_version_map > maximum_version_map: return False, ( f'{module_name:s} version: {module_version!s} is too recent, ' f'{maximum_version!s} or earlier required') return True, f'{module_name:s} version: {module_version!s}' def _ImportPythonModule(self, module_name): """Imports a Python module. Args: module_name (str): name of the module. Returns: module: Python module or None if the module cannot be imported. """ try: module_object = list(map(__import__, [module_name]))[0] except ImportError: return None # If the module name contains dots get the upper most module object. if '.' in module_name: for submodule_name in module_name.split('.')[1:]: module_object = getattr(module_object, submodule_name, None) return module_object def _PrintCheckDependencyStatus( self, dependency, result, status_message, verbose_output=True): """Prints the check dependency status. Args: dependency (DependencyDefinition): dependency definition. result (bool): True if the Python module is available and conforms to the minimum required version, False otherwise. status_message (str): status message. verbose_output (Optional[bool]): True if output should be verbose. """ if not result or dependency.is_optional: if dependency.is_optional: status_indicator = '[OPTIONAL]' else: status_indicator = '[FAILURE]' print(f'{status_indicator:s}\t{status_message:s}') elif verbose_output: print(f'[OK]\t\t{status_message:s}') def CheckDependencies(self, verbose_output=True): """Checks the availability of the dependencies. Args: verbose_output (Optional[bool]): True if output should be verbose. Returns: bool: True if the dependencies are available, False otherwise. """ print('Checking availability and versions of dependencies.') check_result = True for _, dependency in sorted(self.dependencies.items()): if dependency.skip_check: continue result, status_message = self._CheckPythonModule(dependency) if not result and not dependency.is_optional: check_result = False self._PrintCheckDependencyStatus( dependency, result, status_message, verbose_output=verbose_output) if check_result and not verbose_output: print('[OK]') print('') return check_result def CheckTestDependencies(self, verbose_output=True): """Checks the availability of the dependencies when running tests. Args: verbose_output (Optional[bool]): True if output should be verbose. Returns: bool: True if the dependencies are available, False otherwise. """ if not self.CheckDependencies(verbose_output=verbose_output): return False print('Checking availability and versions of test dependencies.') check_result = True for dependency in sorted( self._test_dependencies.values(), key=lambda dependency: dependency.name): if dependency.skip_check: continue result, status_message = self._CheckPythonModule(dependency) if not result and not dependency.is_optional: check_result = False self._PrintCheckDependencyStatus( dependency, result, status_message, verbose_output=verbose_output) if check_result and not verbose_output: print('[OK]') print('') return check_result dtfabric-20240211/utils/update_release.sh000077500000000000000000000013501456204725700202210ustar00rootroot00000000000000#!/bin/bash # # Script that makes changes in preparation of a new release, such as updating # the version and documentation. EXIT_FAILURE=1; EXIT_SUCCESS=0; VERSION=`date -u +"%Y%m%d"` DPKG_DATE=`date -R` # Update the Python module version. sed "s/__version__ = '[0-9]*'/__version__ = '${VERSION}'/" -i dtfabric/__init__.py # Update the version in the setuptools configuration. sed "s/version = [0-9]*/version = ${VERSION}/" -i setup.cfg # Update the version in the dpkg configuration files. cat > config/dpkg/changelog << EOT dtfabric (${VERSION}-1) unstable; urgency=low * Auto-generated -- Joachim Metz ${DPKG_DATE} EOT # Regenerate the API documentation. tox -edocformatter,docs exit ${EXIT_SUCCESS};