[rtems-central commit] directorystate: New

Sebastian Huber sebh at rtems.org
Tue Nov 21 13:35:41 UTC 2023


Module:    rtems-central
Branch:    master
Commit:    6302cd9eeea1db1829d463d8b0a57f4c4c2dbecb
Changeset: http://git.rtems.org/rtems-central/commit/?id=6302cd9eeea1db1829d463d8b0a57f4c4c2dbecb

Author:    Sebastian Huber <sebastian.huber at embedded-brains.de>
Date:      Tue Nov 21 11:13:16 2023 +0100

directorystate: New

---

 rtemsspec/directorystate.py                        | 460 +++++++++++++++++++++
 rtemsspec/packagebuildfactory.py                   |   5 +
 rtemsspec/tests/test-files/archive.tar.xz          | Bin 0 -> 372 bytes
 rtemsspec/tests/test_directorystate.py             | 361 ++++++++++++++++
 rtemsspec/util.py                                  |  15 +
 spec-qdp/spec/qdp-copyrights-by-license.yml        |  30 ++
 spec-qdp/spec/qdp-directory-state-exclude-role.yml |  25 ++
 spec-qdp/spec/qdp-directory-state-generic.yml      |  22 +
 spec-qdp/spec/qdp-directory-state-pattern-list.yml |  16 +
 spec-qdp/spec/qdp-directory-state-patterns.yml     |  37 ++
 spec-qdp/spec/qdp-directory-state.yml              |  48 +++
 spec-qdp/spec/qdp-file-state-list.yml              |  16 +
 spec-qdp/spec/qdp-file-state.yml                   |  30 ++
 spec-qdp/spec/qdp-optional-sha512.yml              |  20 +
 spec-qdp/spec/qdp-repository-role.yml              |  23 ++
 spec-qdp/spec/qdp-repository.yml                   |  50 +++
 spec-qdp/spec/qdp-sha512.yml                       |  18 +
 spec-qdp/spec/qdp-unpacked-archive.yml             |  46 +++
 18 files changed, 1222 insertions(+)

diff --git a/rtemsspec/directorystate.py b/rtemsspec/directorystate.py
new file mode 100644
index 00000000..0a966fb7
--- /dev/null
+++ b/rtemsspec/directorystate.py
@@ -0,0 +1,460 @@
+# SPDX-License-Identifier: BSD-2-Clause
+""" This module provides support for directory states. """
+
+# Copyright (C) 2020, 2023 embedded brains GmbH & Co. KG
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+# 1. Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+# 2. Redistributions in binary form must reproduce the above copyright
+#    notice, this list of conditions and the following disclaimer in the
+#    documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+import base64
+import fnmatch
+import hashlib
+import json
+import logging
+import os
+from pathlib import Path
+import shutil
+import tarfile
+from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, \
+    Set, Tuple, Union
+
+from rtemsspec.items import Item, ItemGetValueContext, Link
+from rtemsspec.packagebuild import BuildItem, BuildItemFactory, \
+    PackageBuildDirector
+from rtemsspec.util import hash_file
+
+_Path = Union[Path, str]
+
+
+def _get_file_path(ctx: ItemGetValueContext) -> str:
+    index = max(ctx.index, 0)
+    return f"{ctx.item['directory']}/{ctx.item['files'][index]['file']}"
+
+
+def _get_file_path_without_extension(ctx: ItemGetValueContext) -> str:
+    return os.path.splitext(_get_file_path(ctx))[0]
+
+
+def _file_nop(_source: _Path, _target: _Path) -> None:
+    pass
+
+
+class DirectoryState(BuildItem):
+    """ Maintains a directory state. """
+
+    # pylint: disable=too-many-public-methods
+    @classmethod
+    def prepare_factory(cls, factory: BuildItemFactory,
+                        type_name: str) -> None:
+        BuildItem.prepare_factory(factory, type_name)
+        factory.add_get_value(f"{type_name}:/file", _get_file_path)
+        factory.add_get_value(f"{type_name}:/file-without-extension",
+                              _get_file_path_without_extension)
+
+    def __init__(self, director: PackageBuildDirector, item: Item):
+        super().__init__(director, item)
+        self._discarded_files: Set[str] = set()
+        self._files: Dict[str, Union[str, None]] = dict(
+            (file_info["file"], file_info["hash"])
+            for file_info in item["files"])
+
+    def __iter__(self):
+        yield from self.files()
+
+    @property
+    def directory(self) -> str:
+        """ Returns the base directory of the directory state. """
+        return self["directory"]
+
+    @property
+    def digest(self) -> str:
+        the_digest = self.item["hash"]
+        if the_digest is None:
+            raise ValueError(f"{self.uid}: directory state hash is not set")
+        return the_digest
+
+    def _get_hash(self, _base: str, relative_file_path: str) -> str:
+        digest = self._files[relative_file_path]
+        assert digest is not None
+        return digest
+
+    def _hash_file(self, base: str, relative_file_path: str) -> str:
+        file_path = os.path.join(base, relative_file_path)
+        digest = hash_file(file_path)
+        logging.debug("%s: file '%s' hash is %s", self.uid, file_path, digest)
+        self._files[relative_file_path] = digest
+        return digest
+
+    def _add_hashes(self, base: str, hash_file_handler: Callable[[str, str],
+                                                                 str]) -> str:
+        overall_hash = hashlib.sha512()
+        overall_hash.update(base.encode("utf-8"))
+        for relative_file_path in sorted(self._files):
+            digest = hash_file_handler(base, relative_file_path)
+            overall_hash.update(relative_file_path.encode("utf-8"))
+            overall_hash.update(digest.encode("utf-8"))
+        self._update_item_files()
+        digest = base64.urlsafe_b64encode(
+            overall_hash.digest()).decode("ascii")
+        logging.info("%s: directory '%s' hash is %s", self.uid, base, digest)
+        self.item["hash"] = digest
+        return digest
+
+    def _directory_state_exclude(self, base: str, files: Set[str]) -> None:
+        for exclude_item in self.item.parents("directory-state-exclude"):
+            exclude_state = self.director[exclude_item.uid]
+            assert isinstance(exclude_state, DirectoryState)
+            exclude_files = files.intersection(
+                os.path.relpath(path, base) for path in exclude_state)
+            logging.info(
+                "%s: exclude files of directory state %s: %s", self.uid,
+                exclude_item.uid,
+                [os.path.join(base, path) for path in sorted(exclude_files)])
+            files.difference_update(exclude_files)
+
+    def _load_from_patterns(self, base: str,
+                            patterns: List[Dict[str, Any]]) -> None:
+        logging.info("%s: load pattern defined directory state: %s", self.uid,
+                     base)
+        files: Set[str] = set()
+        base_path = Path(base)
+        for include_exclude in patterns:
+            include = include_exclude["include"]
+            logging.info("%s: add files matching '%s' in: %s", self.uid,
+                         include, base)
+            more = set(
+                os.path.relpath(path, base) for path in base_path.glob(include)
+                if not path.is_dir())
+            for exclude in include_exclude["exclude"]:
+                exclude_files = set(
+                    path for path in more
+                    if fnmatch.fnmatch(os.path.join("/", path), exclude))
+                logging.info("%s: exclude files for pattern '%s': %s",
+                             self.uid, exclude, [
+                                 os.path.join(base, path)
+                                 for path in sorted(exclude_files)
+                             ])
+                more.difference_update(exclude_files)
+            files.update(more)
+        self._directory_state_exclude(base, files)
+        self._files = dict.fromkeys(files, None)
+
+    def load(self) -> str:
+        """ Loads the directory state and returns the overall hash. """
+        base = self.directory
+        patterns = self.item["patterns"]
+        if patterns:
+            self._load_from_patterns(base, patterns)
+        else:
+            logging.info("%s: load explicit directory state: %s", self.uid,
+                         base)
+        return self._add_hashes(base, self._hash_file)
+
+    def lazy_load(self) -> str:
+        """
+        Loads the directory state if the overall hash is not present and
+        returns the overall hash.
+        """
+        digest = self.item["hash"]
+        if digest is not None:
+            return digest
+        return self.load()
+
+    @property
+    def file(self) -> str:
+        """ Is the path of the first file of the file state. """
+        return next(self.files())
+
+    def files(self, base: Optional[str] = None) -> Iterator[str]:
+        """ Yields the file paths of the directory state. """
+        if base is None:
+            base = self.directory
+        for file_path in sorted(self._files):
+            yield os.path.join(base, file_path)
+
+    def files_and_hashes(
+            self,
+            base: Optional[str] = None) -> Iterator[Tuple[str, Optional[str]]]:
+        """ Yields the file paths and hashes of the directory state. """
+        if base is None:
+            base = self.directory
+        for file_path, file_hash in sorted(self._files.items()):
+            yield os.path.join(base, file_path), file_hash
+
+    def compact(self) -> None:
+        """
+        Removes the common prefix from the files and adds it to the base
+        directory.
+        """
+        prefix = os.path.commonprefix(list(self._files.keys())).rstrip("/")
+        if prefix and not os.path.isabs(prefix):
+            self.item["directory"] = os.path.join(self.item["directory"],
+                                                  prefix)
+            self.item["hash"] = None
+            self._files = dict(
+                (os.path.relpath(path, prefix), None) for path in self._files)
+            self._update_item_files()
+
+    def _update_item_files(self):
+        self.item["files"] = list({
+            "file": path,
+            "hash": digest
+        } for path, digest in sorted(self._files.items()))
+
+    def clear(self) -> None:
+        """ Clears the file set of the directory state. """
+        logging.info("%s: clear directory state", self.uid)
+        self.item["hash"] = None
+        self._files.clear()
+        self._update_item_files()
+
+    def invalidate(self) -> None:
+        """ Invalidates the directory state. """
+        logging.info("%s: invalidate directory state", self.uid)
+        self.item["hash"] = None
+        if self.item["patterns"]:
+            self._files.clear()
+        else:
+            self._files = dict.fromkeys(self._files.keys(), None)
+        self._update_item_files()
+
+    def remove_files(self) -> None:
+        """ Removes the files of the directory state. """
+        for file in self.files():
+            try:
+                logging.info("%s: remove: %s", self.uid, file)
+                os.remove(file)
+            except FileNotFoundError:
+                if self.item["patterns"]:
+                    logging.warning("%s: file not found: %s", self.uid, file)
+                else:
+                    logging.debug("%s: file not found: %s", self.uid, file)
+
+    def add_files(self, files: Iterable[_Path]) -> None:
+        """ Adds the files to the file set of the directory state. """
+        self.item["hash"] = None
+        more = set(os.path.normpath(name) for name in files)
+        self._directory_state_exclude(self.directory, more)
+        self._files.update(dict.fromkeys(more, None))
+        self._update_item_files()
+
+    def set_files(self, files: Iterable[_Path]) -> None:
+        """ Sets the file set of the directory state to the files. """
+        self.clear()
+        self.add_files(files)
+
+    def _copy_file(self, source: _Path, target: _Path) -> None:
+        logging.info("%s: copy '%s' to '%s'", self.uid, source, target)
+        os.makedirs(os.path.dirname(target), exist_ok=True)
+        shutil.copy2(source, target)
+
+    def _move_file(self, source: _Path, target: _Path) -> None:
+        logging.info("%s: move '%s' to '%s'", self.uid, source, target)
+        os.makedirs(os.path.dirname(target), exist_ok=True)
+        os.replace(source, target)
+
+    def copy_file(self, source: _Path, target: _Path) -> None:
+        """
+        Copies the file from the source path to the target path.
+
+        Adds the target file to the file set of the directory state.  The
+        target path is relative to the base directory of the directory state.
+        """
+        self._copy_file(source, os.path.join(self.directory, target))
+        self.add_files([target])
+
+    def copy_files(self,
+                   root_dir: _Path,
+                   files: Iterable[_Path],
+                   prefix: _Path = ".") -> None:
+        """
+        Copies the files relative to the root directory to the base directory
+        of the directory state using the prefix.
+
+        The base directory of the directory state and the prefix is prepended
+        to the file path for each file before it is added to the directory
+        state.  Adds the target files to the file set of the directory state.
+        """
+        file_list: List[str] = []
+        base = self.directory
+        for name in files:
+            file_source = os.path.join(root_dir, name)
+            file_list_path = os.path.join(prefix, name)
+            file_list.append(file_list_path)
+            file_target = os.path.join(base, file_list_path)
+            self._copy_file(file_source, file_target)
+        self.add_files(file_list)
+
+    def _add_tree(self,
+                  root_dir: _Path,
+                  prefix: _Path,
+                  file_op: Callable[[_Path, _Path], None],
+                  excludes: Optional[List[str]] = None) -> None:
+        file_list: List[str] = []
+        base = self.directory
+        for path, _, files in os.walk(os.path.abspath(root_dir)):
+            for name in files:
+                file_source = os.path.join(path, name)
+                file_list_path = os.path.join(
+                    prefix, os.path.relpath(file_source, root_dir))
+                file_target = os.path.join(base, file_list_path)
+                if excludes is None:
+                    file_list.append(file_list_path)
+                    file_op(file_source, file_target)
+                else:
+                    match_path = os.path.normpath(
+                        os.path.join("/", file_list_path))
+                    for exclude in excludes:
+                        if fnmatch.fnmatch(match_path, exclude):
+                            logging.info(
+                                "%s: exclude file for pattern '%s': %s",
+                                self.uid, exclude, file_target)
+                            break
+                    else:
+                        file_list.append(file_list_path)
+                        file_op(file_source, file_target)
+        self.add_files(file_list)
+
+    def add_tree(self,
+                 root_dir: _Path,
+                 prefix: _Path = ".",
+                 excludes: Optional[List[str]] = None) -> None:
+        """
+        Adds the files of the directory tree starting at the root directory
+        to the file set of the directory state.
+
+        The added file path is relative to the root directory.  The prefix is
+        prepended to the file path for each file before it is added to the
+        directory state.  The files are not copied or moved.
+        """
+        self._add_tree(root_dir, prefix, _file_nop, excludes)
+
+    def copy_tree(self,
+                  root_dir: _Path,
+                  prefix: _Path = ".",
+                  excludes: Optional[List[str]] = None) -> None:
+        """
+        Adds the files of the directory tree starting at the root directory
+        to the file set of the directory state.
+
+        The added file path is relative to the root directory.  The prefix is
+        prepended to the file path for each file before it is added to the
+        directory state.  The files are copied.
+        """
+        self._add_tree(root_dir, prefix, self._copy_file, excludes)
+
+    def move_tree(self,
+                  root_dir: _Path,
+                  prefix: _Path = ".",
+                  excludes: Optional[List[str]] = None) -> None:
+        """
+        Adds the files of the directory tree starting at the root directory
+        to the file set of the directory state.
+
+        The added file path is relative to the root directory.  The prefix is
+        prepended to the file path for each file before it is added to the
+        directory state.  The files are moved.
+        """
+        self._add_tree(root_dir, prefix, self._move_file, excludes)
+
+    def add_tarfile_members(self, archive: _Path, prefix: _Path,
+                            extract: bool) -> None:
+        """
+        Appends the members of the archive to the file list of the directory
+        state.
+
+        For each member the prefix path and the member path are joined and then
+        added to the file list of the directory state.  If extract is true,
+        then the members of the archive are extracted to the prefix path.
+        """
+        extract_info = "and extract " if extract else ""
+        logging.info("%s: add %smembers of '%s' using prefix '%s'", self.uid,
+                     extract_info, archive, prefix)
+        with tarfile.open(archive, "r") as tar_file:
+            base = self.directory
+            file_list = [
+                os.path.relpath(os.path.join(prefix, info.name), base)
+                for info in tar_file.getmembers() if not info.isdir()
+            ]
+            if extract:
+                tar_file.extractall(prefix)
+            self.add_files(file_list)
+
+    def lazy_clone(self, other: "DirectoryState") -> str:
+        """ Lazily clones the directory state. """
+        logging.info("%s: lazy clone from: %s", self.uid, other.uid)
+        # pylint: disable=protected-access
+        current = set(self._files.keys())
+        new = set(other._files.keys())
+        base = self.directory
+        other_base = other.directory
+        for file in sorted(current.difference(new)):
+            target = os.path.join(base, file)
+            try:
+                logging.info("%s: remove: %s", self.uid, target)
+                os.remove(target)
+            except FileNotFoundError:
+                logging.warning("%s: file not found: %s", self.uid, target)
+        for file in sorted(new.difference(current)):
+            target = os.path.join(base, file)
+            self._copy_file(os.path.join(other_base, file), target)
+        for file in sorted(current.intersection(new)):
+            target = os.path.join(base, file)
+            if self._files[file] == other._files[file]:
+                logging.info("%s: keep as is: %s", self.uid, target)
+            else:
+                self._copy_file(os.path.join(other_base, file), target)
+        self._files = other._files.copy()
+        return self._add_hashes(base, self._get_hash)
+
+    def json_dump(self, data: Any) -> None:
+        """ Dumps the data into the file of the directory state. """
+        file_path = self.file
+        os.makedirs(os.path.dirname(file_path), exist_ok=True)
+        with open(file_path, "w", encoding="utf-8") as file:
+            json.dump(data, file, sort_keys=True, indent=2)
+
+    def json_load(self) -> Any:
+        """ Loads the data from the file of the directory state. """
+        with open(self.file, "r", encoding="utf-8") as file:
+            return json.load(file)
+
+    def save(self) -> None:
+        """ Saves the directory state to the item file. """
+        self.item.save()
+
+    def has_changed(self, link: Link) -> bool:
+        digest = self.digest
+        return link["hash"] is None or digest != link["hash"]
+
+    def discard(self) -> None:
+        """ Discards the directory state. """
+        logging.info("%s: discard", self.uid)
+        self._discarded_files = set(self._files.keys())
+        self.remove_files()
+        self.invalidate()
+        self.save()
+
+    def refresh(self) -> None:
+        """ Refreshes the directory state. """
+        logging.info("%s: refresh", self.uid)
+        self.load()
+        self.commit("Update directory state")
diff --git a/rtemsspec/packagebuildfactory.py b/rtemsspec/packagebuildfactory.py
index d419e28f..8cd430ab 100644
--- a/rtemsspec/packagebuildfactory.py
+++ b/rtemsspec/packagebuildfactory.py
@@ -24,11 +24,16 @@
 # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
 # POSSIBILITY OF SUCH DAMAGE.
 
+from rtemsspec.directorystate import DirectoryState
 from rtemsspec.packagebuild import BuildItemFactory, PackageVariant
 
 
 def create_build_item_factory() -> BuildItemFactory:
     """ Creates the default build item factory. """
     factory = BuildItemFactory()
+    factory.add_constructor("qdp/directory-state/generic", DirectoryState)
+    factory.add_constructor("qdp/directory-state/repository", DirectoryState)
+    factory.add_constructor("qdp/directory-state/unpacked-archive",
+                            DirectoryState)
     factory.add_constructor("qdp/variant", PackageVariant)
     return factory
diff --git a/rtemsspec/tests/test-files/archive.tar.xz b/rtemsspec/tests/test-files/archive.tar.xz
new file mode 100644
index 00000000..ba0f8e7b
Binary files /dev/null and b/rtemsspec/tests/test-files/archive.tar.xz differ
diff --git a/rtemsspec/tests/test_directorystate.py b/rtemsspec/tests/test_directorystate.py
new file mode 100644
index 00000000..3b998ad1
--- /dev/null
+++ b/rtemsspec/tests/test_directorystate.py
@@ -0,0 +1,361 @@
+# SPDX-License-Identifier: BSD-2-Clause
+""" Tests for the rtemsspec.directorystate module. """
+
+# Copyright (C) 2020, 2023 embedded brains GmbH & Co. KG
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+# 1. Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+# 2. Redistributions in binary form must reproduce the above copyright
+#    notice, this list of conditions and the following disclaimer in the
+#    documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+import logging
+import os
+import pytest
+
+from rtemsspec.items import Item, EmptyItemCache, Link
+from rtemsspec.directorystate import DirectoryState
+from rtemsspec.packagebuild import BuildItemFactory, PackageBuildDirector
+from rtemsspec.tests.util import get_and_clear_log
+
+
+class _TestState(DirectoryState):
+    pass
+
+
+ at pytest.fixture
+def _change_cwd():
+    cwd = os.getcwd()
+    os.chdir(os.path.dirname(__file__))
+    yield
+    os.chdir(cwd)
+
+
+_DOC_RST_HASH = "Cm41zmS2o7TF6FBxnQxWxmPDVhufFst7pFkkQriQnEOwJWXS_zjEwKLVsgBT4L-v1iWzRUCilifIdY4uqkg5Gw=="
+_T_YML_HASH = "_FTeBKV04q5fMTETF65lBzv6dNeHTMLT3dZmHF1BEAOLtmxvPdAJc_7-RDmGRiv3GU_uddvkFc005S0EeSx0PA=="
+_INCLUDE_ALL = [{"include": "**/*", "exclude": []}]
+
+
+def test_directorystate(caplog, tmpdir, _change_cwd):
+    item_cache = EmptyItemCache()
+    factory = BuildItemFactory()
+    factory.add_constructor("qdp/directory-state/generic", _TestState)
+    director = PackageBuildDirector(item_cache, factory)
+    base = "spec-glossary"
+
+    data = {
+        "SPDX-License-Identifier":
+        "CC-BY-SA-4.0 OR BSD-2-Clause",
+        "copyrights":
+        ["Copyright (C) 2020, 2023 embedded brains GmbH & Co. KG"],
+        "copyrights-by-license": {},
+        "directory":
+        base,
+        "enabled-by":
+        True,
+        "files": [
+            {
+                "file": "doc.rst",
+                "hash": None
+            },
+            {
+                "file": "glossary/t.yml",
+                "hash": None
+            },
+        ],
+        "hash":
+        None,
+        "links": [],
+        "patterns": [],
+    }
+    item = item_cache.add_volatile_item("/directory-state", data)
+    item["_type"] = "qdp/directory-state/generic"
+    item_file = os.path.join(tmpdir, "item.yml")
+    item.file = str(item_file)
+    dir_state = director["/directory-state"]
+    assert dir_state.directory == base
+    with pytest.raises(ValueError):
+        dir_state.digest
+    with pytest.raises(ValueError):
+        dir_state.has_changed(Link(item, {"hash": "blub"}))
+    overall_hash = dir_state.lazy_load()
+    assert overall_hash == "SrJDe4-ewVrM9BV9ttASllPsrXz2r_-ts9urtVeBa9s7JuBORQrvuPyW-hvsef80a8HvKvfeNSOmAh2eQ2_aag=="
+    assert dir_state.digest == overall_hash
+    dir_state.save()
+    with open(item_file, "r") as src:
+        assert f"""SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2020, 2023 embedded brains GmbH & Co. KG
+copyrights-by-license: {{}}
+directory: {base}
+enabled-by: true
+files:
+- file: doc.rst
+  hash: {_DOC_RST_HASH}
+- file: glossary/t.yml
+  hash: {_T_YML_HASH}
+hash: SrJDe4-ewVrM9BV9ttASllPsrXz2r_-ts9urtVeBa9s7JuBORQrvuPyW-hvsef80a8HvKvfeNSOmAh2eQ2_aag==
+links: []
+patterns: []
+""" == src.read()
+    assert dir_state.file == "spec-glossary/doc.rst"
+    assert dir_state.substitute("${.:/file}") == "spec-glossary/doc.rst"
+    assert dir_state.substitute("${.:/file[0]}") == "spec-glossary/doc.rst"
+    assert dir_state.substitute(
+        "${.:/file[1]}") == "spec-glossary/glossary/t.yml"
+    assert dir_state.substitute(
+        "${.:/file-without-extension[1]}") == "spec-glossary/glossary/t"
+    assert list(dir_state.files(".")) == ["./doc.rst", "./glossary/t.yml"]
+    assert list(dir_state.files()) == [
+        str(os.path.join(base, "doc.rst")),
+        str(os.path.join(base, "glossary/t.yml"))
+    ]
+    assert list(dir_state.files_and_hashes(".")) == [
+        ("./doc.rst", _DOC_RST_HASH), ("./glossary/t.yml", _T_YML_HASH)
+    ]
+    assert list(dir_state.files_and_hashes()) == [
+        (str(os.path.join(base, "doc.rst")), _DOC_RST_HASH),
+        (str(os.path.join(base, "glossary/t.yml")), _T_YML_HASH)
+    ]
+
+    dir_state.set_files(["doc.rst"])
+    dir_state.add_files(["glossary/t.yml"])
+    with pytest.raises(ValueError):
+        dir_state.digest
+    overall_hash = dir_state.lazy_load()
+    assert overall_hash == "SrJDe4-ewVrM9BV9ttASllPsrXz2r_-ts9urtVeBa9s7JuBORQrvuPyW-hvsef80a8HvKvfeNSOmAh2eQ2_aag=="
+    overall_hash = dir_state.lazy_load()
+    assert overall_hash == "SrJDe4-ewVrM9BV9ttASllPsrXz2r_-ts9urtVeBa9s7JuBORQrvuPyW-hvsef80a8HvKvfeNSOmAh2eQ2_aag=="
+
+    caplog.set_level(logging.DEBUG)
+    data_2 = {
+        "directory": base,
+        "enabled-by": True,
+        "files": [],
+        "links": [{
+            "role": "directory-state-exclude",
+            "uid": "directory-state"
+        }],
+        "patterns": [],
+    }
+    item_2 = item_cache.add_volatile_item("/directory-state-2", data_2)
+    dir_state_2 = DirectoryState(director, item_2)
+    dir_state_2.add_files(
+        os.path.relpath(path, dir_state_2.directory) for path in dir_state)
+    assert list(dir_state_2.files()) == []
+    dir_state_2.set_files(
+        os.path.relpath(path, dir_state_2.directory) for path in dir_state)
+    assert list(dir_state_2.files()) == []
+    dir_state_2.set_files([])
+    assert list(dir_state_2.files()) == []
+    with pytest.raises(ValueError):
+        dir_state_2.digest
+    overall_hash = dir_state_2.load()
+    assert overall_hash == "YtmDhTiLc9q20OthwE35dnsoPQz5gkQqajQQC2K3h5_yzY67hX35LlnhuR_kEx-_blEsjQlT1ijdP5YwUwb3bw=="
+
+    dir_state_2["patterns"] = _INCLUDE_ALL
+    overall_hash = dir_state_2.load()
+    assert overall_hash == "GSGvDhHq3M-csmWHrXBLJPB7yFB1hjxiZt3hROQP_dltVlHvCslNii9PzzbSiEYCEsi5qnOUp1OOs916PUvX4g=="
+
+    item["patterns"] = [{
+        "include": "**/*",
+        "exclude": ["*/glossary.rst", "*/[guv].yml", "*/sub/*"]
+    }]
+    overall_hash = dir_state.load()
+    assert overall_hash == "SrJDe4-ewVrM9BV9ttASllPsrXz2r_-ts9urtVeBa9s7JuBORQrvuPyW-hvsef80a8HvKvfeNSOmAh2eQ2_aag=="
+
+    item["patterns"] = [{
+        "include": "**/doc.rst",
+        "exclude": []
+    }, {
+        "include": "**/t.yml",
+        "exclude": []
+    }]
+    overall_hash = dir_state.load()
+    assert overall_hash == "SrJDe4-ewVrM9BV9ttASllPsrXz2r_-ts9urtVeBa9s7JuBORQrvuPyW-hvsef80a8HvKvfeNSOmAh2eQ2_aag=="
+
+    item["patterns"] = [{"include": "**/foo", "exclude": []}]
+    overall_hash = dir_state.load()
+    assert overall_hash == "YtmDhTiLc9q20OthwE35dnsoPQz5gkQqajQQC2K3h5_yzY67hX35LlnhuR_kEx-_blEsjQlT1ijdP5YwUwb3bw=="
+
+    caplog.set_level(logging.DEBUG)
+    data_3 = {
+        "directory": str(tmpdir),
+        "enabled-by": True,
+        "patterns": [],
+        "files": [],
+        "links": [],
+    }
+    item_3 = item_cache.add_volatile_item("/directory-state-3", data_3)
+    item_3["_type"] = "qdp/directory-state/generic"
+    item_3_file = os.path.join(tmpdir, "item-3.yml")
+    item_3.file = str(item_3_file)
+    dir_state_3 = director["/directory-state-3"]
+
+    src_file = os.path.join(base, "doc.rst")
+    dir_state_3.copy_file(src_file, "doc.rst")
+    dst_file = os.path.join(tmpdir, "doc.rst")
+    assert os.path.exists(dst_file)
+    log = get_and_clear_log(caplog)
+    assert f"INFO /directory-state-3: copy '{src_file}' to '{dst_file}'" in log
+    dir_state_3.load()
+
+    dir_state_3.remove_files()
+    assert not os.path.exists(dst_file)
+    log = get_and_clear_log(caplog)
+    assert f"INFO /directory-state-3: remove: {dst_file}" in log
+
+    dir_state_3.remove_files()
+    log = get_and_clear_log(caplog)
+    assert f"DEBUG /directory-state-3: file not found: {dst_file}" in log
+
+    dir_state_3["patterns"] = _INCLUDE_ALL
+    dir_state_3.remove_files()
+    dir_state_3["patterns"] = []
+    log = get_and_clear_log(caplog)
+    assert f"WARNING /directory-state-3: file not found: {dst_file}" in log
+
+    assert dir_state_3.digest
+    assert list(dir_state_3.files_and_hashes()) == [(str(dst_file),
+                                                     _DOC_RST_HASH)]
+    dir_state_3.invalidate()
+    with pytest.raises(ValueError):
+        dir_state_3.digest
+    assert list(dir_state_3.files_and_hashes()) == [(str(dst_file), None)]
+    dir_state_3["patterns"] = _INCLUDE_ALL
+    dir_state_3.invalidate()
+    dir_state_3["patterns"] = []
+    assert list(dir_state_3.files_and_hashes()) == []
+
+    dir_state_3.copy_tree(base, "x")
+    for path in [
+            "doc.rst", "g.yml", "glossary.rst", "glossary/sub/g.yml",
+            "glossary/sub/x.yml", "glossary/t.yml", "glossary/u.yml",
+            "glossary/v.yml"
+    ]:
+        assert os.path.exists(os.path.join(tmpdir, "x", path))
+
+    dir_state_3["patterns"] = _INCLUDE_ALL
+    dir_state_3.invalidate()
+    dir_state_3["patterns"] = []
+    dir_state_3.add_tree(os.path.join("spec-glossary", "glossary", "sub"),
+                         excludes=["/x.*"])
+    assert list(dir_state_3.files()) == [f"{tmpdir}/g.yml"]
+    assert not os.path.exists(os.path.join(tmpdir, "g.yml"))
+    assert os.path.exists(os.path.join(tmpdir, "x", "glossary", "sub",
+                                       "g.yml"))
+    dir_state_3.move_tree(os.path.join(tmpdir, "x", "glossary", "sub"))
+    assert list(dir_state_3.files()) == [f"{tmpdir}/g.yml", f"{tmpdir}/x.yml"]
+    assert not os.path.exists(
+        os.path.join(tmpdir, "x", "glossary", "sub", "g.yml"))
+    assert os.path.exists(os.path.join(tmpdir, "g.yml"))
+
+    link = Link(item_3, {"hash": None})
+    dir_state_3.load()
+    assert dir_state_3.has_changed(link)
+    dir_state_3.refresh_link(link)
+    assert not dir_state_3.has_changed(link)
+
+    dir_state_3.discard()
+    log = get_and_clear_log(caplog)
+    assert f"INFO /directory-state-3: discard" in log
+
+    dir_state_3.clear()
+    dir_state_3.refresh()
+    log = get_and_clear_log(caplog)
+    assert f"INFO /directory-state-3: refresh" in log
+
+    dir_state_3.clear()
+    dir_state_3.add_tarfile_members("test-files/archive.tar.xz", tmpdir, False)
+    assert list(dir_state_3.files()) == [
+        f"{tmpdir}/member-dir/dir-member.txt", f"{tmpdir}/member.txt"
+    ]
+    assert not os.path.exists(os.path.join(tmpdir, "member.txt"))
+    dir_state_3.add_tarfile_members("test-files/archive.tar.xz", tmpdir, True)
+    assert list(dir_state_3.files()) == [
+        f"{tmpdir}/member-dir/dir-member.txt", f"{tmpdir}/member.txt"
+    ]
+    assert os.path.exists(os.path.join(tmpdir, "member.txt"))
+
+    dir_state_3.clear()
+    src_file = os.path.join(base, "doc.rst")
+    dir_state_3.copy_files(base, ["doc.rst"], "uvw")
+    dst_file = os.path.join(tmpdir, "uvw", "doc.rst")
+    assert os.path.exists(dst_file)
+    log = get_and_clear_log(caplog)
+    assert f"INFO /directory-state-3: copy '{src_file}' to '{dst_file}'" in log
+    assert list(name for name in dir_state_3) == [dst_file]
+
+    symlink = os.path.join(tmpdir, "symlink")
+    os.symlink("foobar", symlink)
+    dir_state_3.set_files(["symlink"])
+    dir_state_3.load()
+    assert list(dir_state_3.files_and_hashes()) == [(
+        symlink,
+        "ClAmHr0aOQ_tK_Mm8mc8FFWCpjQtUjIElz0CGTN_gWFqgGmwElh89WNfaSXxtWw2AjDBmyc1AO4BPgMGAb8kJQ=="
+    )]
+
+    get_and_clear_log(caplog)
+
+    item["patterns"] = [{"include": "**/t.yml", "exclude": []}]
+    dir_state.load()
+    dir_state_3.lazy_clone(dir_state)
+    assert list(dir_state_3.files(".")) == ["./glossary/t.yml"]
+    log = get_and_clear_log(caplog)
+    assert f"INFO /directory-state-3: copy" in log
+
+    item["patterns"] = [{"include": "**/x.yml", "exclude": []}]
+    dir_state.load()
+    dir_state_3.lazy_clone(dir_state)
+    assert list(dir_state_3.files(".")) == ["./glossary/sub/x.yml"]
+    log = get_and_clear_log(caplog)
+    assert f"INFO /directory-state-3: remove" in log
+
+    os.unlink(dir_state_3.file)
+    item["patterns"] = [{"include": "**/t.yml", "exclude": []}]
+    dir_state.load()
+    dir_state_3.lazy_clone(dir_state)
+    log = get_and_clear_log(caplog)
+    assert f"WARNING /directory-state-3: file not found" in log
+
+    dir_state_3.invalidate()
+    dir_state_3.lazy_clone(dir_state)
+    log = get_and_clear_log(caplog)
+    assert f"INFO /directory-state-3: copy" in log
+
+    dir_state_3.lazy_clone(dir_state)
+    assert list(dir_state_3.files(".")) == ["./glossary/t.yml"]
+    log = get_and_clear_log(caplog)
+    assert f"INFO /directory-state-3: keep as is" in log
+
+    assert dir_state_3.directory == tmpdir
+    dir_state_3.set_files(["/a/b", "/a/c"])
+    dir_state_3.compact()
+    assert dir_state_3.directory == tmpdir
+    dir_state_3.set_files(["a/b", "c/d"])
+    dir_state_3.compact()
+    assert dir_state_3.directory == tmpdir
+    dir_state_3.set_files(["a/b", "a/c"])
+    dir_state_3.compact()
+    assert dir_state_3.directory == f"{tmpdir}/a"
+
+    dir_state_3.set_files(["data.json"])
+    assert not os.path.exists(dir_state_3.file)
+    dir_state_3.json_dump({"foo": "bar"})
+    assert os.path.exists(dir_state_3.file)
+    assert dir_state_3.json_load() == {"foo": "bar"}
diff --git a/rtemsspec/util.py b/rtemsspec/util.py
index 1f3b1d51..2e491244 100644
--- a/rtemsspec/util.py
+++ b/rtemsspec/util.py
@@ -28,6 +28,7 @@ import argparse
 import base64
 import binascii
 import logging
+import hashlib
 import os
 from pathlib import Path
 import shutil
@@ -42,6 +43,20 @@ def base64_to_hex(data: str) -> str:
     return binascii.hexlify(binary).decode('ascii')
 
 
+def hash_file(path: str) -> str:
+    """ Return a hash of the file specified by path. """
+    file_hash = hashlib.sha512()
+    if os.path.islink(path):
+        file_hash.update(os.readlink(path).encode("utf-8"))
+    else:
+        buf = bytearray(65536)
+        memview = memoryview(buf)
+        with open(path, "rb", buffering=0) as src:
+            for size in iter(lambda: src.readinto(memview), 0):  # type: ignore
+                file_hash.update(memview[:size])
+    return base64.urlsafe_b64encode(file_hash.digest()).decode("ascii")
+
+
 def copy_file(src_file: str, dst_file: str, log_context: str) -> None:
     """ Copies the source file to the destination file. """
     os.makedirs(os.path.dirname(dst_file), exist_ok=True)
diff --git a/spec-qdp/spec/qdp-copyrights-by-license.yml b/spec-qdp/spec/qdp-copyrights-by-license.yml
new file mode 100644
index 00000000..fbed9d61
--- /dev/null
+++ b/spec-qdp/spec/qdp-copyrights-by-license.yml
@@ -0,0 +1,30 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2023 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+spec-description: null
+spec-example: null
+spec-info:
+  dict:
+    attributes:
+      description:
+        description: |
+          If the value is present, then it shall be the licensing description.
+        spec-type: str
+      files:
+        description: |
+          If the value is present, then it shall be the list of license files.
+        spec-type: list-str
+    description: |
+      This set of attributes contains associated copyrights by license.
+    generic-attributes:
+      description: null
+      key-spec-type: spdx-license-identifier
+      value-spec-type: copyrights
+    mandatory-attributes: none
+spec-name: Copyrights by License
+spec-type: qdp-copyrights-by-license
+type: spec
diff --git a/spec-qdp/spec/qdp-directory-state-exclude-role.yml b/spec-qdp/spec/qdp-directory-state-exclude-role.yml
new file mode 100644
index 00000000..07e5aaaf
--- /dev/null
+++ b/spec-qdp/spec/qdp-directory-state-exclude-role.yml
@@ -0,0 +1,25 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2023 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+- role: spec-refinement
+  spec-key: role
+  spec-value: directory-state-exclude
+  uid: link
+spec-description: null
+spec-example: null
+spec-info:
+  dict:
+    attributes: {}
+    description: |
+      It defines the directory state exclude role of links.  Such links are
+      used to exclude the files of the referenced
+      ${qdp-directory-state:/spec-name} from the pattern defined directory
+      state.
+    mandatory-attributes: all
+spec-name: Directory State Exclude Link Role
+spec-type: qdp-directory-state-exclude-role
+type: spec
diff --git a/spec-qdp/spec/qdp-directory-state-generic.yml b/spec-qdp/spec/qdp-directory-state-generic.yml
new file mode 100644
index 00000000..0bad0795
--- /dev/null
+++ b/spec-qdp/spec/qdp-directory-state-generic.yml
@@ -0,0 +1,22 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2023 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+- role: spec-refinement
+  spec-key: directory-state-type
+  spec-value: generic
+  uid: qdp-directory-state
+spec-description: null
+spec-example: null
+spec-info:
+  dict:
+    attributes: {}
+    description: |
+      This set of attributes specifies a generic directory state.
+    mandatory-attributes: all
+spec-name: Generic Directory State Item Type
+spec-type: qdp-directory-state-generic
+type: spec
diff --git a/spec-qdp/spec/qdp-directory-state-pattern-list.yml b/spec-qdp/spec/qdp-directory-state-pattern-list.yml
new file mode 100644
index 00000000..b0cafee3
--- /dev/null
+++ b/spec-qdp/spec/qdp-directory-state-pattern-list.yml
@@ -0,0 +1,16 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2023 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+spec-description: null
+spec-example: null
+spec-info:
+  list:
+    description: null
+    spec-type: qdp-directory-state-patterns
+spec-name: Directory State Pattern List
+spec-type: qdp-directory-state-pattern-list
+type: spec
diff --git a/spec-qdp/spec/qdp-directory-state-patterns.yml b/spec-qdp/spec/qdp-directory-state-patterns.yml
new file mode 100644
index 00000000..4cbcde6d
--- /dev/null
+++ b/spec-qdp/spec/qdp-directory-state-patterns.yml
@@ -0,0 +1,37 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2020, 2023 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+spec-description: null
+spec-example: null
+spec-info:
+  dict:
+    attributes:
+      include:
+        description: |
+          It shall be a Python ``fnmatch`` pattern.  If a file path relative to
+          the base directory of the directory state matches with the pattern,
+          then the file is added to the directory state if it is not rejected
+          by an exclude pattern.
+        spec-type: str
+      exclude:
+        description: |
+          It shall be a list of Python ``fnmatch`` patterns.  If a file path
+          matches with the pattern, then the file is not added to the directory
+          state.  The file paths are absolute paths relative to the base
+          directory of the directory state.  For example, if the directory
+          state has a base of ``/base`` and a file to include has the path
+          ``/base/abc``, then the file path ``/abc`` is used to match with the
+          exclude patterns.
+        spec-type: list-str
+    description: |
+      This set of attributes specifies a set of patterns used to load the file
+      list of the directory state.
+    mandatory-attributes: all
+  none: null
+spec-name: Directory State Patterns
+spec-type: qdp-directory-state-patterns
+type: spec
diff --git a/spec-qdp/spec/qdp-directory-state.yml b/spec-qdp/spec/qdp-directory-state.yml
new file mode 100644
index 00000000..838eeaf7
--- /dev/null
+++ b/spec-qdp/spec/qdp-directory-state.yml
@@ -0,0 +1,48 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2020, 2023 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+- role: spec-refinement
+  spec-key: qdp-type
+  spec-value: directory-state
+  uid: qdp-root
+spec-description: null
+spec-example: null
+spec-info:
+  dict:
+    attributes:
+      copyrights-by-license:
+        description: null
+        spec-type: qdp-copyrights-by-license
+      directory:
+        description: |
+          It shall be the path to the directory.  A variable substitution is
+          performed on the value.  For example, you can use
+          ``$${/variant:/build-directory}/some/path/in/build/directory`` or
+          ``$${/variant:/deployment-directory}/some/path/in/deployment/directory``.
+        spec-type: str
+      directory-state-type:
+        description: |
+          It shall be the directory state type.
+        spec-type: name
+      patterns:
+        description: null
+        spec-type: qdp-directory-state-pattern-list
+      files:
+        description: null
+        spec-type: qdp-file-state-list
+      hash:
+        description: |
+          If the value is present, then it shall be the hash of the file state
+          list and the directory path, otherwise the directory state is
+          invalid.
+        spec-type: qdp-optional-sha512
+    description: |
+      This set of attributes specifies a directory state.
+    mandatory-attributes: all
+spec-name: Directory State Item Type
+spec-type: qdp-directory-state
+type: spec
diff --git a/spec-qdp/spec/qdp-file-state-list.yml b/spec-qdp/spec/qdp-file-state-list.yml
new file mode 100644
index 00000000..963a4d42
--- /dev/null
+++ b/spec-qdp/spec/qdp-file-state-list.yml
@@ -0,0 +1,16 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2020 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+spec-description: null
+spec-example: null
+spec-info:
+  list:
+    description: null
+    spec-type: qdp-file-state
+spec-name: File State List
+spec-type: qdp-file-state-list
+type: spec
diff --git a/spec-qdp/spec/qdp-file-state.yml b/spec-qdp/spec/qdp-file-state.yml
new file mode 100644
index 00000000..b3dd8489
--- /dev/null
+++ b/spec-qdp/spec/qdp-file-state.yml
@@ -0,0 +1,30 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2022 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+spec-description: null
+spec-example: null
+spec-info:
+  dict:
+    attributes:
+      file:
+        description: |
+          It shall be the path to the file.  A variable substitution is
+          performed on the value.  For example, you can use
+          ``$${/variant:/build-directory}/some/path/in/build/file`` or
+          ``$${/variant:/deployment-directory}/some/path/in/deployment/file``.
+        spec-type: str
+      hash:
+        description: |
+          If the value is present, then it shall be the hash of the file,
+          otherwise the file state is invalid.
+        spec-type: qdp-optional-sha512
+    description: |
+      This set of attributes specifies a file state.
+    mandatory-attributes: all
+spec-name: File State
+spec-type: qdp-file-state
+type: spec
diff --git a/spec-qdp/spec/qdp-optional-sha512.yml b/spec-qdp/spec/qdp-optional-sha512.yml
new file mode 100644
index 00000000..c3291cd4
--- /dev/null
+++ b/spec-qdp/spec/qdp-optional-sha512.yml
@@ -0,0 +1,20 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2020 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+spec-description: null
+spec-example: null
+spec-info:
+  none: null
+  str:
+    assert:
+    - re: ^[A-Za-z0-9+_=-]{88}$
+    description: |
+      If the value is present, then it shall be a SHA512 hash value encoded in
+      base64url.
+spec-name: Optional SHA512
+spec-type: qdp-optional-sha512
+type: spec
diff --git a/spec-qdp/spec/qdp-repository-role.yml b/spec-qdp/spec/qdp-repository-role.yml
new file mode 100644
index 00000000..bcba5cff
--- /dev/null
+++ b/spec-qdp/spec/qdp-repository-role.yml
@@ -0,0 +1,23 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2020 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+- role: spec-refinement
+  spec-key: role
+  spec-value: repository
+  uid: link
+spec-description: null
+spec-example: null
+spec-info:
+  dict:
+    attributes: {}
+    description: |
+      It defines the repository role of links and is used to define the
+      deployed repositories of a package variant.
+    mandatory-attributes: all
+spec-name: Repository Link Role
+spec-type: qdp-repository-role
+type: spec
diff --git a/spec-qdp/spec/qdp-repository.yml b/spec-qdp/spec/qdp-repository.yml
new file mode 100644
index 00000000..f4e27795
--- /dev/null
+++ b/spec-qdp/spec/qdp-repository.yml
@@ -0,0 +1,50 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2020, 2023 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+- role: spec-refinement
+  spec-key: directory-state-type
+  spec-value: repository
+  uid: qdp-directory-state
+spec-description: null
+spec-example: null
+spec-info:
+  dict:
+    attributes:
+      branch:
+        description: |
+          It shall be a branch in the source repository for the commit.
+        spec-type: str
+      commit:
+        description: |
+          It shall be the commit of the branch in the source repository.
+        spec-type: qdp-git-hash
+      description:
+        description: |
+          It shall be the description of the repository.
+        spec-type: str
+      origin-branch:
+        description: |
+          It shall be the origin branch name.
+        spec-type: optional-str
+      origin-commit:
+        description: |
+          It shall be the commit of the origin branch.
+        spec-type: qdp-optional-git-hash
+      origin-commit-url:
+        description: |
+          It shall be the URL of the commit web page at the origin.
+        spec-type: optional-str
+      origin-url:
+        description: |
+          It shall be the repository URL of the origin.
+        spec-type: optional-str
+    description: |
+      This set of attributes specifies a repository.
+    mandatory-attributes: all
+spec-name: Repository Item Type
+spec-type: qdp-repository
+type: spec
diff --git a/spec-qdp/spec/qdp-sha512.yml b/spec-qdp/spec/qdp-sha512.yml
new file mode 100644
index 00000000..4eec0c80
--- /dev/null
+++ b/spec-qdp/spec/qdp-sha512.yml
@@ -0,0 +1,18 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2020 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+spec-description: null
+spec-example: null
+spec-info:
+  str:
+    assert:
+    - re: ^[A-Za-z0-9+_=-]{88}$
+    description: |
+      It shall be a SHA512 hash value encoded in base64url.
+spec-name: SHA512
+spec-type: qdp-sha512
+type: spec
diff --git a/spec-qdp/spec/qdp-unpacked-archive.yml b/spec-qdp/spec/qdp-unpacked-archive.yml
new file mode 100644
index 00000000..c808770a
--- /dev/null
+++ b/spec-qdp/spec/qdp-unpacked-archive.yml
@@ -0,0 +1,46 @@
+SPDX-License-Identifier: CC-BY-SA-4.0 OR BSD-2-Clause
+copyrights:
+- Copyright (C) 2023 embedded brains GmbH & Co. KG
+enabled-by: true
+links:
+- role: spec-member
+  uid: root
+- role: spec-refinement
+  spec-key: directory-state-type
+  spec-value: unpacked-archive
+  uid: qdp-directory-state
+spec-description: null
+spec-example: null
+spec-info:
+  dict:
+    attributes:
+      description:
+        description: |
+          It shall be the description of the unpacked archive.
+        spec-type: str
+      archive-file:
+        description: |
+          It shall be the archive file.
+        spec-type: str
+      archive-hash:
+        description: |
+          It shall be the archive hash.
+        spec-type: qdp-sha512
+      archive-patches:
+        description: |
+          It shall be the list of patches applied to the archive.
+        spec-type: any
+      archive-symbolic-links:
+        description: |
+          It shall be the list of symbolic links added to the archive.
+        spec-type: any
+      archive-url:
+        description: |
+          It shall be the archive URL.
+        spec-type: optional-str
+    description: |
+      This set of attributes specifies an unpacked archive.
+    mandatory-attributes: all
+spec-name: Unpacked Archive Item Type
+spec-type: qdp-unpacked-archive
+type: spec



More information about the vc mailing list