diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..58c9e27 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,134 @@ +on: push + +permissions: { contents: read } + +jobs: + flake8: + runs-on: ubuntu-latest + steps: + - uses: actions/setup-python@v3 + - name: Installing dependencies + run: | + # installing dependencies + python -m pip install --upgrade pip + pip install --upgrade flake8 + - uses: actions/checkout@v3 + with: { path: snapshot } + - name: Executing `flake8` + run: | + # executing `flake8` + # stop the build if there are Python syntax errors or undefined names + flake8 ./snapshot --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + flake8 ./snapshot --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + + util-linux: + runs-on: ubuntu-latest + outputs: + version: ${{ steps.version.outputs.debian }} + steps: + - id: version + run: | + _upstream=2.38.1; _debian="${_upstream}-5" + echo "debian=${_debian}" >> "$GITHUB_OUTPUT" + echo "upstream=${_upstream}" >> "$GITHUB_OUTPUT" + - id: cache + uses: actions/cache@v3 + with: + key: 3rd-party/util-linux/${{ steps.version.outputs.debian }} + path: 3rd-party/util-linux + - if: steps.cache.outputs.cache-hit != 'true' + name: Installing dependencies + run: | + # installing dependencies + export DEBIAN_FRONTEND=noninteractive + sudo apt update --assume-yes + sudo apt install --assume-yes devscripts dpkg-dev \ + asciidoctor debhelper-compat dh-exec po-debconf socat \ + libaudit-dev libcap-ng-dev libcryptsetup-dev libpam0g-dev libsystemd-dev libudev-dev + sudo apt autoremove --assume-yes + sudo apt purge --assume-yes '~c' + - if: steps.cache.outputs.cache-hit != 'true' + name: Building via debuild + run: | + # building via debuild + mkdir --parents ./3rd-party/util-linux/build + cd ./3rd-party/util-linux/build + _debian_version="${{ steps.version.outputs.debian }}" + _upstream_version="${{ steps.version.outputs.upstream }}" + for _url in \ + "http://deb.debian.org/debian/pool/main/u/util-linux/util-linux_${_upstream_version}.orig.tar.xz" \ + "http://deb.debian.org/debian/pool/main/u/util-linux/util-linux_${_debian_version}.debian.tar.xz" \ + ; do curl --remote-name "${_url}"; done + tar --extract --file="util-linux_${_upstream_version}.orig.tar.xz" + tar --extract --file="util-linux_${_debian_version}.debian.tar.xz" --directory="util-linux-${_upstream_version}" + (cd "./util-linux-${_upstream_version}" && debuild --no-sign) + cd .. + mv ./build/*.deb ./ + rm --force --recursive ./build + - uses: actions/upload-artifact@v3 + with: + name: util-linux + path: 3rd-party/util-linux + + pytest: + runs-on: ubuntu-latest + needs: [util-linux] + steps: + - uses: actions/cache@v3 + with: + key: 3rd-party/util-linux/${{ needs.util-linux.outputs.version }} + path: 3rd-party/util-linux + fail-on-cache-miss: true + - uses: actions/setup-python@v3 + - name: Installing dependencies + run: | + # installing dependencies + export DEBIAN_FRONTEND=noninteractive + sudo apt update --assume-yes + sudo apt purge --assume-yes moby-runc moby-containerd + sudo apt autoremove --assume-yes + sudo apt purge --assume-yes '~c' + sudo apt install --assume-yes oci-image-tool + sudo apt install --assume-yes buildah podman docker.io skopeo + _util_linux_version="${{ needs.util-linux.outputs.version }}" + (cd ./3rd-party/util-linux && sudo apt install --assume-yes \ + "./util-linux_${_util_linux_version}_amd64.deb" "./util-linux-extra_${_util_linux_version}_amd64.deb" \ + "./libmount1_${_util_linux_version}_amd64.deb" "./libsmartcols1_${_util_linux_version}_amd64.deb" \ + ) + python -m pip install --upgrade pip + pip install --upgrade pytest pyyaml + - uses: actions/checkout@v3 + with: { path: snapshot } + - name: Installing this package + run: pip install --upgrade ./snapshot + - id: cache + uses: actions/cache@v3 + with: + key: oci/examples/nginx/${{ hashFiles('snapshot/examples/nginx') }} + path: temporary/oci + - if: steps.cache.outputs.cache-hit != 'true' + name: Generating oci images + run: | + # generating oci images + mkdir --parents temporary/oci + docker build --pull --tag=example -- ./snapshot/examples/nginx/docker + buildah build --pull --layers --tag=example -- ./snapshot/examples/nginx/docker + buildah push localhost/example oci-archive:temporary/oci/buildah.tar + podman image save --format=oci-archive -- localhost/example > temporary/oci/podman.tar + skopeo copy docker-daemon:example:latest oci-archive:temporary/oci/docker.tar + - name: Executing `pytest` + run: | + # executing `pytest` + pytest -ra --basetemp=temporary/pytest \ + --tests.examples.nginx-source=temporary/oci/docker.tar \ + --tests.examples.nginx-source=temporary/oci/podman.tar \ + --tests.examples.nginx-source=temporary/oci/buildah.tar \ + ./snapshot/tests + - if: failure() + run: sudo tar --create --directory=temporary -- . | xz > test-context.tar.xz + - if: failure() + uses: actions/upload-artifact@v3 + with: + name: test-context + path: test-context.tar.xz diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/CHANGELOG b/CHANGELOG new file mode 100644 index 0000000..e69de29 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..e69de29 diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..0e259d4 --- /dev/null +++ b/LICENSE @@ -0,0 +1,121 @@ +Creative Commons Legal Code + +CC0 1.0 Universal + + CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE + LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN + ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS + INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES + REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS + PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM + THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED + HEREUNDER. + +Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator +and subsequent owner(s) (each and all, an "owner") of an original work of +authorship and/or a database (each, a "Work"). + +Certain owners wish to permanently relinquish those rights to a Work for +the purpose of contributing to a commons of creative, cultural and +scientific works ("Commons") that the public can reliably and without fear +of later claims of infringement build upon, modify, incorporate in other +works, reuse and redistribute as freely as possible in any form whatsoever +and for any purposes, including without limitation commercial purposes. +These owners may contribute to the Commons to promote the ideal of a free +culture and the further production of creative, cultural and scientific +works, or to gain reputation or greater distribution for their Work in +part through the use and efforts of others. + +For these and/or other purposes and motivations, and without any +expectation of additional consideration or compensation, the person +associating CC0 with a Work (the "Affirmer"), to the extent that he or she +is an owner of Copyright and Related Rights in the Work, voluntarily +elects to apply CC0 to the Work and publicly distribute the Work under its +terms, with knowledge of his or her Copyright and Related Rights in the +Work and the meaning and intended legal effect of CC0 on those rights. + +1. Copyright and Related Rights. A Work made available under CC0 may be +protected by copyright and related or neighboring rights ("Copyright and +Related Rights"). Copyright and Related Rights include, but are not +limited to, the following: + + i. the right to reproduce, adapt, distribute, perform, display, + communicate, and translate a Work; + ii. moral rights retained by the original author(s) and/or performer(s); +iii. publicity and privacy rights pertaining to a person's image or + likeness depicted in a Work; + iv. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(a), below; + v. rights protecting the extraction, dissemination, use and reuse of data + in a Work; + vi. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation + thereof, including any amended or successor version of such + directive); and +vii. other similar, equivalent or corresponding rights throughout the + world based on applicable law or treaty, and any national + implementations thereof. + +2. Waiver. To the greatest extent permitted by, but not in contravention +of, applicable law, Affirmer hereby overtly, fully, permanently, +irrevocably and unconditionally waives, abandons, and surrenders all of +Affirmer's Copyright and Related Rights and associated claims and causes +of action, whether now known or unknown (including existing as well as +future claims and causes of action), in the Work (i) in all territories +worldwide, (ii) for the maximum duration provided by applicable law or +treaty (including future time extensions), (iii) in any current or future +medium and for any number of copies, and (iv) for any purpose whatsoever, +including without limitation commercial, advertising or promotional +purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each +member of the public at large and to the detriment of Affirmer's heirs and +successors, fully intending that such Waiver shall not be subject to +revocation, rescission, cancellation, termination, or any other legal or +equitable action to disrupt the quiet enjoyment of the Work by the public +as contemplated by Affirmer's express Statement of Purpose. + +3. Public License Fallback. Should any part of the Waiver for any reason +be judged legally invalid or ineffective under applicable law, then the +Waiver shall be preserved to the maximum extent permitted taking into +account Affirmer's express Statement of Purpose. In addition, to the +extent the Waiver is so judged Affirmer hereby grants to each affected +person a royalty-free, non transferable, non sublicensable, non exclusive, +irrevocable and unconditional license to exercise Affirmer's Copyright and +Related Rights in the Work (i) in all territories worldwide, (ii) for the +maximum duration provided by applicable law or treaty (including future +time extensions), (iii) in any current or future medium and for any number +of copies, and (iv) for any purpose whatsoever, including without +limitation commercial, advertising or promotional purposes (the +"License"). The License shall be deemed effective as of the date CC0 was +applied by Affirmer to the Work. Should any part of the License for any +reason be judged legally invalid or ineffective under applicable law, such +partial invalidity or ineffectiveness shall not invalidate the remainder +of the License, and in such case Affirmer hereby affirms that he or she +will not (i) exercise any of his or her remaining Copyright and Related +Rights in the Work or (ii) assert any associated claims and causes of +action with respect to the Work, in either case contrary to Affirmer's +express Statement of Purpose. + +4. Limitations and Disclaimers. + + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + b. Affirmer offers the Work as-is and makes no representations or + warranties of any kind concerning the Work, express, implied, + statutory or otherwise, including without limitation warranties of + title, merchantability, fitness for a particular purpose, non + infringement, or the absence of latent or other defects, accuracy, or + the present or absence of errors, whether or not discoverable, all to + the greatest extent permissible under applicable law. + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without + limitation any person's Copyright and Related Rights in the Work. + Further, Affirmer disclaims responsibility for obtaining any necessary + consents, permissions or other rights required for any use of the + Work. + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to + this CC0 or use of the Work. diff --git a/README.md b/README.md new file mode 100644 index 0000000..c13c82d --- /dev/null +++ b/README.md @@ -0,0 +1,9 @@ +# p5-python3-libvirt_lxc_helper +python3 libvirt_lxc helper tool + +## Install +- from source code directory: `cd path/to/source/code/directory && pip install .` +- from source code archive file: `pip install path/to/source/code/archive/file` + +## Examples +Located in [examples](examples) directory. diff --git a/examples/nginx/README.md b/examples/nginx/README.md new file mode 100644 index 0000000..1b88efa --- /dev/null +++ b/examples/nginx/README.md @@ -0,0 +1,37 @@ +### produce oci-image and generate rootfs tar archive +``` +podman build --tag=example -- examples/nginx/docker +podman image save --format=oci-archive -- example | python -m p5.libvirt_lxc_helper make-rootfs --destination=rootfs.tar +``` +- For producing oci-image you can use `docker`, `podman`, `buildah`, etc ... +- For more options check: `python -m p5.libvirt_lxc_helper --help` + +### helper script + `Dockerfile` instructions do not always lead to the desired result. + For example, it is not so easy to replace the contents of `/etc/hosts`. + But you can use a special script (`yaml` or `json` config file) - in this example it named as [`.p5.libvirt_lxc_helper.script.yml`](docker/files/.p5.libvirt_lxc_helper.script.yml). + It should be marked as `p5.libvirt_lxc_helper.script` `LABEL` in `Dockerfile` like this: + ``` + LABEL p5.libvirt_lxc_helper.script=.p5.libvirt_lxc_helper.script.yml + ``` + In this case `.p5.libvirt_lxc_helper.script.yml` is path (relative to container root) to script. + Path will be normalized automatically - don't worry about `/`, `./`, etc... + For example, there you can do something like this: + ``` + - command: /bin/sh -xe + input: | + rm --force /etc/hostname && echo nginx > /etc/hostname + rm --force /etc/resolv.conf && ln --symbolic /run/systemd/resolve/stub-resolv.conf /etc/resolv.conf + + rm --force /etc/hosts && cat > /etc/hosts << EOF + 127.0.0.1 localhost + + # The following lines are desirable for IPv6 capable hosts + ::1 localhost ip6-localhost ip6-loopback + ff02::1 ip6-allnodes + ff02::2 ip6-allrouters + + ::1 nginx + 127.0.0.1 nginx + EOF + ``` diff --git a/examples/nginx/docker/Dockerfile b/examples/nginx/docker/Dockerfile new file mode 100644 index 0000000..152aa1f --- /dev/null +++ b/examples/nginx/docker/Dockerfile @@ -0,0 +1,38 @@ +ARG base_image=debian:bullseye + +FROM ${base_image} + +ENV LC_ALL C +ENV DEBIAN_FRONTEND noninteractive + +# watch .p5.libvirt_lxc_helper.script.yml +ENV EXAMPLE_A should be inherited +ENV EXAMPLE_B should be inherited +ENV EXAMPLE_C should be removed +ENV EXAMPLE_D should be replaced + +RUN rm --force /etc/apt/sources.list +COPY files/etc/apt/sources.list.d/debian.sources /etc/apt/sources.list.d/debian.sources +RUN _packages=`apt-mark showmanual`; test -n "${_packages}" || exit 0; apt-mark auto ${_packages} +RUN apt update --assume-yes && apt install --assume-yes apt-utils +RUN apt update --assume-yes && apt full-upgrade --assume-yes + +RUN apt purge --assume-yes ifupdown iproute2 +RUN rm --force --recursive /etc/network + +RUN apt update --assume-yes && apt install --assume-yes systemd systemd-sysv +RUN apt update --assume-yes && apt install --assume-yes nginx + +RUN systemctl enable systemd-networkd.service systemd-resolved.service + +RUN rm --force /etc/*- + +RUN apt update --assume-yes && apt --assume-yes full-upgrade && apt autoremove --assume-yes && apt purge --assume-yes '~c' +RUN apt clean --assume-yes && for _path in /tmp /var/tmp /var/lib/apt/lists; do \ + find "${_path}" -mindepth 1 -maxdepth 1 -exec rm --recursive --force {} \;; \ + done + +COPY files/.p5.libvirt_lxc_helper.script.yml .p5.libvirt_lxc_helper.script.yml +LABEL p5.libvirt_lxc_helper.script=.p5.libvirt_lxc_helper.script.yml + +ENTRYPOINT ["/sbin/init"] diff --git a/examples/nginx/docker/files/.p5.libvirt_lxc_helper.script.yml b/examples/nginx/docker/files/.p5.libvirt_lxc_helper.script.yml new file mode 100644 index 0000000..ad9a51f --- /dev/null +++ b/examples/nginx/docker/files/.p5.libvirt_lxc_helper.script.yml @@ -0,0 +1,109 @@ +# JSON also supported + +exclude: true # exclude (remove) this file from final rootfs archive (`true` by default) + +environment: # `{inherit: true, payload: {}}` by default + # `environment: true` will be parsed like `environment: {inherit: true, payload: {}}` + # `environment: false` will be parsed like `environment: {inherit: false, payload: {}}` + + inherit: true # inherit oci-image environment variables (`true` by default) + + payload: # overload/add/remove variables + A: A + B: B + # `EXAMPLE_A` - will be inherited by default (check `Dockerfile`) because global `environment.inherit` is `true` + # `EXAMPLE_B`, `EXAMPLE_C`, `EXAMPLE_D` - global `environment.inherit` option will be ignored for this variables + EXAMPLE_B: true # will be force inherited + EXAMPLE_C: false # will be removed + EXAMPLE_D: "replaced by .p5.libvirt_lxc_helper.script.yml" # will be replaced + +body: # a list with commands + # sting will be interpreted as `command` key by default + # any space symbol as argument separator + # will be parsed like `{command: [mkdir, --parents, /.p5.libvirt_lxc_helper.script.example]}` + - mkdir --parents /.p5.libvirt_lxc_helper.script.example + + # list will be interpreted as `command` key by default + # will be parsed like `{command: [touch, /.p5.libvirt_lxc_helper.script.example/empty.txt]}` + - [touch, /.p5.libvirt_lxc_helper.script.example/empty.txt] + + - command: /bin/sh + input: env | sort > /.p5.libvirt_lxc_helper.script.example/0.txt # command stdin (`/bin/sh` code) + + - command: /bin/sh + # command-level environment, will be parsed same as global option + # default is `{inherit: true, payload: {}}` + environment: + # if global `environment.inherit` option is `false` then oci-image environment will be ignored + inherit: true # inherit script-level environment (`true` by default) + input: env | sort > /.p5.libvirt_lxc_helper.script.example/1.txt + + - command: /bin/sh + environment: true # will be parsed like `environment: {inherit: true, payload: {}}` (same as `1.txt`) + input: env | sort > /.p5.libvirt_lxc_helper.script.example/2.txt + + - command: /bin/sh + # empty environment (only `PWD=/` expected) + # will be parsed like `environment: {inherit: false, payload: {}}` + environment: false + input: env | sort > /.p5.libvirt_lxc_helper.script.example/3.txt + + - command: /bin/sh + environment: + # if `payload` option is not dictionary (or omitted) or `inherit` is not boolean + # then `environment` option will be parsed like `environment.payload` + # in this case it will be parsed like `{inherit: true, payload: {inherit: "false", payload: 4, B: 4, C: 4}}` + inherit: "false" # will be parsed like `payload.inherit = "false"`, because not boolean + payload: 4 # will be parsed like `payload.payload = 4`, because not dictionary + B: 4 # will be replaced to "4" + C: 4 # will be added as "4" + input: env | sort > /.p5.libvirt_lxc_helper.script.example/4.txt + + - command: /bin/sh + environment: + # list of dictionaries will be merged + # will be parsed like `{inherit: true, payload: {B: 5, C: 5}}` + - B: 5 # will be replaced to "5" + - {C: 5} # will be added as "5" + input: env | sort > /.p5.libvirt_lxc_helper.script.example/5.txt + + - command: /bin/sh + environment: + inherit: false + payload: {B: true, C: 6} # A will be removed, B will be inherited as "B", C will be added as "6" + input: env | sort > /.p5.libvirt_lxc_helper.script.example/6.txt + + - command: /bin/sh + environment: + inherit: true + payload: + A: false # will be removed + B: 7 # will be replaced to "7" + C: 7 # will be added as "7" + input: env | sort > /.p5.libvirt_lxc_helper.script.example/7.txt + + - command: + - tar + - --create + - --directory=/.p5.libvirt_lxc_helper.script.example + - --file=/.p5.libvirt_lxc_helper.script.example.tar + - . + + - command: rm --force --recursive /.p5.libvirt_lxc_helper.script.example + + - command: /bin/sh -xe + input: | + rm --force /etc/hostname && echo nginx > /etc/hostname + rm --force /etc/resolv.conf && ln --symbolic /run/systemd/resolve/stub-resolv.conf /etc/resolv.conf + + rm --force /etc/hosts && cat > /etc/hosts << EOF + 127.0.0.1 localhost + + # The following lines are desirable for IPv6 capable hosts + ::1 localhost ip6-localhost ip6-loopback + ff02::1 ip6-allnodes + ff02::2 ip6-allrouters + + ::1 nginx + 127.0.0.1 nginx + EOF diff --git a/examples/nginx/docker/files/etc/apt/sources.list.d/debian.sources b/examples/nginx/docker/files/etc/apt/sources.list.d/debian.sources new file mode 100644 index 0000000..c564d48 --- /dev/null +++ b/examples/nginx/docker/files/etc/apt/sources.list.d/debian.sources @@ -0,0 +1,11 @@ +Types: deb deb-src +URIs: http://deb.debian.org/debian +Suites: bullseye bullseye-updates bullseye-proposed-updates bullseye-backports bullseye-backports-sloppy +Components: main contrib non-free +Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg + +Types: deb deb-src +URIs: http://security.debian.org +Suites: bullseye-security +Components: main contrib non-free +Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..5fe3169 --- /dev/null +++ b/setup.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" == __name__ + + +def _private(): + import os + import setuptools + import setuptools.command.build_py + + def _make_long_description(): + with open(os.path.join(os.path.dirname(__file__), "README.md"), "r") as _stream: return _stream.read() + + def _make_packages(): + _packages = setuptools.find_namespace_packages(where = "src") + _packages.remove("p5") + return _packages + + class _Generators(object): + @classmethod + def get(cls): return tuple() + + class _Commands(object): + @staticmethod + def build_py(): + # noinspection PyShadowingNames + class _Result(setuptools.command.build_py.build_py): + def run(self): + # noinspection PyNoneFunctionAssignment + _original_result = super().run() + for _generator in _Generators.get(): _generator(command_interface = self) + return _original_result + + def get_outputs(self, *args, **kwargs): + _original_result = super().get_outputs(*args, **kwargs) + return (type(_original_result))((*_original_result, *[os.path.join(self.build_lib, _generated.path) for _generated in _Generators.get()])) + + return _Result + + def _routine(): setuptools.setup( + name = "p5.libvirt_lxc_helper", + url = "https://github.com/p5-vbnekit/p5-python3-libvirt_lxc_helper", + license = "CC0", + version = "0.0.0", + author = "Nikita Pushchin", + author_email = "vbnekit@gmail.com", + description = "libvirt_lxc helper tool", + long_description = _make_long_description(), + long_description_content_type = "text/markdown", + package_dir = {"": "src"}, + packages = _make_packages(), + cmdclass = { + "build_py": _Commands.build_py() + }, + entry_points = { + "console_scripts": ( + "p5-libvirt_lxc_helper=p5.libvirt_lxc_helper:entry_point", + ), + }, + install_requires = ("python-magic", ), + setup_requires = ("wheel", ) + ) + + class _Result(object): + routine = _routine + + return _Result + + +try: _private().routine() +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/__init__.py b/src/p5/libvirt_lxc_helper/__init__.py new file mode 100644 index 0000000..a515627 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/__init__.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from . _common import module_helpers as _module_helpers_module + + class _Result(object): + module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = { + "entry_point": lambda module: getattr(module, "_entry_point").routine + }) + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/__main__.py b/src/p5/libvirt_lxc_helper/__main__.py new file mode 100644 index 0000000..6432913 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/__main__.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" == __name__ + + +def _private(): + # noinspection PyUnresolvedReferences + from . import entry_point as _entry_point + + class _Result(object): + entry_point = _entry_point + + return _Result + + +_private = _private() +try: _private.entry_point() +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_actions/__init__.py b/src/p5/libvirt_lxc_helper/_actions/__init__.py new file mode 100644 index 0000000..49c09ce --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/__init__.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from .. _common import module_helpers as _module_helpers_module + + class _Result(object): + module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = { + "MakeRootfs": lambda module: module.make_rootfs.Class, + "InstallRootfs": lambda module: module.make_rootfs.Class + }) + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/_actions/install_rootfs/__init__.py b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/__init__.py new file mode 100644 index 0000000..beaf931 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/__init__.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from ... _common import module_helpers as _module_helpers_module + + class _Result(object): + module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = { + "Class": lambda module: getattr(module, "_class").Class, + "make": lambda module: getattr(module, "_class").make + }) + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_class.py b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_class.py new file mode 100644 index 0000000..bc2b838 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_class.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import typing + import pathlib + import argparse + + from . import _logic as _logic_module + from ... import _common as _common_module + + _name = __package__.split(".")[-1] + _name = _name.replace("_", "-") + + _cli_validator_module = _common_module.cli_validator + + _routine = _logic_module.routine + + class _Class(_common_module.Action): + @property + def name(self) -> str: return _name + + def setup_cli(self, parser: argparse.ArgumentParser): + assert isinstance(parser, argparse.ArgumentParser) + + # noinspection PyShadowingNames + @self.__cli_validator.decorator(key = parser.add_argument( + "--dry-mode", required = False, action = "store_true", help = "do not write anything", + dest = f"{self.name}/dry-mode" + ).dest) + def _routine(value: typing.Optional[bool]): + if value is None: return False + assert isinstance(value, bool) + return value + + # noinspection PyShadowingNames + @self.__cli_validator.decorator(key = parser.add_argument( + "-s", "--source", required = False, help = "source (oci image)", + dest = f"{self.name}/source" + ).dest) + def _routine(value: typing.Optional[str]): + if value is not None: + assert isinstance(value, str) + assert value + value, = f"{value}\r\n".splitlines() + assert pathlib.Path(value).resolve(strict = True) + return value + + # noinspection PyShadowingNames + @self.__cli_validator.decorator(key = parser.add_argument( + "-d", "--destination", required = True, help = "lxc container", + dest = f"{self.name}/destination" + ).dest) + def _routine(value: str): + assert isinstance(value, str) + assert value + value, = f"{value}\r\n".splitlines() + return value + + def validate_cli(self, arguments: dict): + assert isinstance(arguments, dict) + self.__cli_validator(arguments, allow_unknown = True) + + def __call__(self, cli: dict): + assert isinstance(cli, dict) + _routine( + dry = cli[f"{self.name}/dry-mode"], + source = cli[f"{self.name}/source"], + destination = cli[f"{self.name}/destination"] + ) + + def __init__(self): + super().__init__() + self.__cli_validator = _cli_validator_module.make() + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_connection.py b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_connection.py new file mode 100644 index 0000000..384072c --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_connection.py @@ -0,0 +1,233 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import typing + import asyncio + import libvirt + import xml.etree.ElementTree + + from . import _id_map as _id_map_module + + from ... import _common as _common_module + + _Asynchronizer = _common_module.Asynchronizer + + _normalize_path = _common_module.path.normalize + _make_id_map = _id_map_module.make + _make_id_map_item = _id_map_module.item.make + + def _check_domain(instance: libvirt.virDomain): + assert isinstance(instance, libvirt.virDomain) + instance = instance.isActive() + assert isinstance(instance, int) + if 0 != instance: raise RuntimeError("unexpected container activity detected") + + def _parse_id_map_element(value: xml.etree.ElementTree.Element): + assert isinstance(value, xml.etree.ElementTree.Element) + _start = value.attrib["start"] + assert isinstance(_start, str) + _start = int(_start) + assert 0 <= _start + _target = value.attrib["target"] + _target = int(_target) + assert 0 <= _target + _count = value.attrib["count"] + _count = int(_count) + assert 0 < _count + return _make_id_map_item(container = _start, host = _target, size = _count) + + def _parse_id_map(value: xml.etree.ElementTree.Element): + assert isinstance(value, xml.etree.ElementTree.Element) + _iterator = value.iter(tag = "idmap") + try: value = next(_iterator) + except StopIteration: return None + try: next(_iterator) + except StopIteration: pass + else: raise OverflowError() + _user = [_parse_id_map_element(value = _item) for _item in value.iter(tag = "uid")] + _group = [_parse_id_map_element(value = _item) for _item in value.iter(tag = "gid")] + if _user or _group: return _make_id_map(user = _user, group = _group) + return None + + def _parse_filesystem(value: xml.etree.ElementTree.Element): + assert isinstance(value, xml.etree.ElementTree.Element) + _value = None + try: _type = value.attrib["type"] + except KeyError: return None + if "mount" != _type: return None + _iterator = iter(value.iter(tag = "target")) + try: _target = next(_iterator) + except StopIteration: raise KeyError("target") + try: next(_iterator) + except StopIteration: pass + else: raise OverflowError() + try: _target = _target.attrib["dir"] + except KeyError: raise KeyError("target.dir") + assert isinstance(_target, str) + _normalized = _normalize_path(value = _target) + assert _target == _normalized.as_posix() + assert _normalized.is_absolute() + _iterator = iter(value.iter(tag = "source")) + try: _source = next(_iterator) + except StopIteration: raise KeyError("source") + try: next(_iterator) + except StopIteration: pass + else: raise OverflowError() + try: _source = _source.attrib["dir"] + except KeyError: raise KeyError("source.dir") + assert isinstance(_source, str) + _normalized = _normalize_path(value = _source) + assert _source == _normalized.as_posix() + assert _normalized.is_absolute() + return _target, _source + + def _parse_description(value: str): + assert isinstance(value, str) + assert value + value = xml.etree.ElementTree.fromstring(value) + assert isinstance(value, xml.etree.ElementTree.Element) + assert "domain" == value.tag + assert "lxc" == value.attrib["type"] + _id_map = _parse_id_map(value = value) + _iterator = iter(value.iter(tag = "devices")) + value = next(_iterator) + try: next(_iterator) + except StopIteration: pass + else: raise OverflowError() + _targets = set() + _root_directory = None + for _filesystem in value.iter(tag = "filesystem"): + _filesystem = _parse_filesystem(value = _filesystem) + if _filesystem is None: continue + _target, _source = _filesystem + assert _target not in _targets + _targets.add(_target) + if "/" == _target: _root_directory = _source + if _root_directory is None: raise KeyError("domain.devices.filesystem.target.dir: /") + return _id_map, _root_directory + + class _Context(object): + def __init__(self): + super().__init__() + self.path = None + self.id_map = None + self.domain = None + self.connection = None + self.monitoring_task = None + + class _Helper(object): + @property + def asynchronizer(self): return self.__asynchronizer + + async def close_context(self, instance: _Context): + assert isinstance(instance, _Context) + + try: + _monitoring_task = instance.monitoring_task + if _monitoring_task is None: return + _monitoring_task.cancel() + await asyncio.gather(_monitoring_task, return_exceptions = True) + + finally: + _connection = instance.connection + if _connection is not None: await self.__asynchronizer(instance.connection.close) + + def make_domain_monitoring_task(self, instance: libvirt.virDomain): + assert isinstance(instance, libvirt.virDomain) + + _asynchronizer = self.__asynchronizer + + async def _coroutine(): + while True: + await _asynchronizer(lambda: _check_domain(instance = instance)) + await asyncio.sleep(1 / +3.0e+0) + + return asyncio.create_task(_coroutine()) + + def __init__(self, asynchronizer: _Asynchronizer): + super().__init__() + assert isinstance(asynchronizer, _Asynchronizer) + self.__asynchronizer = asynchronizer + + class _Class(object): + @property + def path(self): return None if self.__context is None else self.__context.path + + @property + def id_map(self): return None if self.__context is None else self.__context.id_map + + async def open(self): + assert self.__context is None + _context = _Context() + try: + _context.connection = typing.cast( + libvirt.virConnect, await self.__helper.asynchronizer(lambda: libvirt.open("lxc://")) + ) + _context.domain = typing.cast(libvirt.virDomain, await self.__helper.asynchronizer( + lambda: _context.connection.lookupByName(self.__domain) + )) + await self.__helper.asynchronizer(lambda: _check_domain(instance = _context.domain)) + _context.monitoring_task = self.__helper.make_domain_monitoring_task(instance = _context.domain) + + _context.id_map, _context.path = await self.__helper.asynchronizer( + lambda: _parse_description(value = _context.domain.XMLDesc()) + ) + + assert not _context.monitoring_task.done() + + self.__context = _context + + finally: + if self.__context is None: await self.__helper.close_context(instance = _context) + + async def close(self): + _context = self.__context + assert isinstance(_context, _Context) + self.__context = None + await self.__helper.close_context(instance = _context) + + async def __call__(self, task: asyncio.Task): + assert isinstance(task, asyncio.Task) + if task.done(): return await task + _context = self.__context + assert isinstance(_context, _Context) + _monitoring_task = _context.monitoring_task + await asyncio.wait((task, _monitoring_task), return_when = asyncio.FIRST_COMPLETED) + if _monitoring_task.done(): raise RuntimeError("unexpected container activity detected") + return await task + + async def __aenter__(self): return self + + async def __aexit__(self, exception_type, exception_instance, exception_traceback): + _context = self.__context + if _context is None: return + self.__context = None + await self.__helper.close_context(instance = _context) + + def __init__(self, domain: str, asynchronizer: _Asynchronizer): + super().__init__() + assert isinstance(domain, str) + assert domain + assert isinstance(asynchronizer, _Asynchronizer) + _helper = _Helper(asynchronizer = asynchronizer) + self.__helper = _helper + self.__domain = domain + self.__context = None + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/__init__.py b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/__init__.py new file mode 100644 index 0000000..4e01a41 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/__init__.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from .... _common import module_helpers as _module_helpers_module + + class _Result(object): + module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = { + "Item": lambda module: module.item.Class, + "Class": lambda module: getattr(module, "_class").Class, + "make": lambda module: getattr(module, "_class").make + }) + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/_class.py b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/_class.py new file mode 100644 index 0000000..fc5495d --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/_class.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import typing + + from . import item as _item_module + from . import _range as _range_module + + _Item = _item_module.Class + _Range = _range_module.Class + + _make_item = _item_module.make + _make_range = _range_module.make + + class _CollisionError(Exception): pass + + def _make_range_text(value: _Range): + assert isinstance(value, _Range) + return f"{value.offset}:{value.last}" + + def _make_collision_text(first: _Range, second: _Range): + return format(" | ".join(_make_range_text(value = _item) for _item in (first, second))) + + def _is_compatible(first: _Range, second: _Range): + assert isinstance(first, _Range) + assert isinstance(second, _Range) + if first.last < second.offset: return True + return second.last < first.offset + + def _check_collision(first: _Item, second: _Item): + assert isinstance(first, _Item) + assert isinstance(second, _Item) + + try: + _first = _make_range(offset = first.host, size = first.size) + _second = _make_range(offset = second.host, size = second.size) + if not _is_compatible(first = _first, second = _second): raise _CollisionError( + f"host: {_make_collision_text(first = _first, second = _second)}" + ) + + finally: + _first = _make_range(offset = first.container, size = first.size) + _second = _make_range(offset = second.container, size = second.size) + if not _is_compatible(first = _first, second = _second): raise _CollisionError( + f"container: {_make_collision_text(first = _first, second = _second)}" + ) + + def _validate_items(value: typing.Iterable[_Item]): + _collector = list() + + for _first in value: + for _second in value: + if _first is not _second: _check_collision(first = _first, second = _second) + _collector.append(_first) + + _collector.sort(key = lambda _item: _item.container) + + return tuple(_collector) + + def _map(value: int, collector: typing.Iterable[_Item]): + assert isinstance(value, int) + assert 0 <= value + for _candidate in collector: + assert isinstance(_candidate, _Item) + _first = _candidate.container + _last = _first + _candidate.size - 1 + if value < _first: continue + if value > _last: continue + return _candidate.host + (value - _first) + raise KeyError(value) + + class _Class(object): + Item = _Item + + def user(self, value: int): + assert isinstance(value, int) + try: return self.__user_cache[value] + except KeyError: pass + _value = _map(value = value, collector = self.__user) + self.__user_cache[value] = _value + return _value + + def group(self, value: int): + assert isinstance(value, int) + try: return self.__group_cache[value] + except KeyError: pass + _value = _map(value = value, collector = self.__group) + self.__group_cache[value] = _value + return _value + + def __init__(self, user: typing.Iterable[_Item], group: typing.Iterable[_Item]): + super().__init__() + user = _validate_items(value = user) + group = _validate_items(value = group) + self.__user = user + self.__group = group + self.__user_cache = dict() + self.__group_cache = dict() + + class _Result(object): + Item = _Item + Class = _Class + + return _Result + + +_private = _private() + +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/_range.py b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/_range.py new file mode 100644 index 0000000..5310d4c --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/_range.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + class _Class(object): + @property + def offset(self): return self.__offset + + @property + def size(self): return self.__size + + @property + def last(self): return self.__last + + def __init__(self, offset: int, size: int): + super().__init__() + assert isinstance(offset, int) + assert 0 <= offset + assert isinstance(size, int) + assert 0 <= size + assert isinstance(size, int) + assert 0 < size + self.__size = size + self.__last = offset + size - 1 + self.__offset = offset + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() + +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/item.py b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/item.py new file mode 100644 index 0000000..4823ec1 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_id_map/item.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + class _Class(object): + @property + def container(self): return self.__container + + @property + def host(self): return self.__host + + @property + def size(self): return self.__size + + def __init__(self, container: int, host: int, size: int): + super().__init__() + assert isinstance(container, int) + assert 0 <= container + assert isinstance(host, int) + assert 0 <= host + assert isinstance(size, int) + assert 0 < size + self.__size = size + self.__host = host + self.__container = container + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() + +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_installer.py b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_installer.py new file mode 100644 index 0000000..c4f1f98 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_installer.py @@ -0,0 +1,256 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import os + import sys + import stat + import typing + import tarfile + import pathlib + import asyncio + import traceback + import contextlib + + from . import _id_map as _id_map_module + + from ... import _common as _common_module + + _IdMap = _id_map_module.Class + _TarReader = _common_module.tar.Reader + _Asynchronizer = _common_module.Asynchronizer + + _make_id_map = _id_map_module.make + _normalize_path = _common_module.path.normalize + _make_tar_reader = _common_module.tar.reader.make + _make_tar_writer = _common_module.tar.writer.make + + @contextlib.contextmanager + def _prepare_source(path: typing.Optional[str]): + if path is None: + yield None + return + + assert isinstance(path, str) + assert path + path, = f"{path}\r\n".splitlines() + path = pathlib.Path(path).resolve(strict = True).as_posix() + + with open(path, mode = "rb") as _stream: yield _stream + + def _prepare_destination(path: str, dry: bool): + assert isinstance(path, str) + assert isinstance(dry, bool) + + assert path + path, = f"{path}\r\n".splitlines() + path = pathlib.Path(path).resolve().as_posix() + + if dry: + try: assert not os.listdir(path), f"directory is not empty: {path}" + except AssertionError: + print(traceback.format_exc(), file = sys.stderr, flush = True) + pass + else: + try: os.makedirs(path, exist_ok = False) + except FileExistsError: assert not os.listdir(path), f"directory is not empty: {path}" + + return path + + @contextlib.contextmanager + def _make_fifo(): + _reader, _writer = os.pipe() + + try: + with ( + os.fdopen(_reader, mode = "rb", closefd = True) as _reader, + os.fdopen(_writer, mode = "wb", closefd = True) as _writer + ): yield _reader, _writer + + finally: + if isinstance(_reader, int): + try: os.close(_reader) + finally: + if isinstance(_writer, int): os.close(_writer) + + async def _remap_coroutine( + source: typing.Optional[typing.IO[bytes]], destination: typing.IO[bytes], + id_map: _IdMap, asynchronizer: _Asynchronizer + ): + assert isinstance(id_map, _IdMap) + assert isinstance(asynchronizer, _Asynchronizer) + + if source is None: source = sys.stdin.buffer + + _loop = asyncio.get_running_loop() + assert isinstance(_loop, asyncio.AbstractEventLoop) + + _descriptor = await asynchronizer(source.fileno) + assert isinstance(_descriptor, int) + assert 0 <= _descriptor + + @contextlib.asynccontextmanager + async def _wrap_source(): + def _is_possible(): + _mode = os.fstat(_descriptor).st_mode + return stat.S_ISFIFO(_mode) or stat.S_ISSOCK(_mode) or stat.S_ISCHR(_mode) + + if not await asynchronizer(_is_possible): + yield source + return + + _asynchronous_reader = asyncio.StreamReader(loop = _loop) + + with os.fdopen(_descriptor, mode = "rb", closefd = False) as _asynchronous_stream: + await _loop.connect_read_pipe( + lambda: asyncio.StreamReaderProtocol(_asynchronous_reader), + pipe = _asynchronous_stream + ) + + try: + async def _asynchronous_read(size: int): + assert isinstance(size, int) + assert (-1 == size) or (0 < size) + return await _asynchronous_reader.read(size) + + class _Wrapper(object): + @staticmethod + def read(size: int): return asyncio.run_coroutine_threadsafe( + _asynchronous_read(size = size), loop = _loop + ).result() + + @staticmethod + def seekable(): return False + + yield _Wrapper() + assert _asynchronous_reader.at_eof() + + finally: await asynchronizer(_asynchronous_reader.feed_eof) + + def _transform_member(member: tarfile.TarInfo): + assert isinstance(member, tarfile.TarInfo) + _attributes = dict(member.get_info()) + _path = _normalize_path(_attributes.pop("name"), drop_root = True).as_posix() + if member.isdir(): _path = _path.rstrip("/") + if not _path.strip("."): return + member = tarfile.TarInfo(name = _path) + for _key, _value in _attributes.items(): setattr(member, _key, _value) + _uid, _gid = member.uid, member.gid + if 0 == _uid: member.uname = "root" + if 0 == _gid: member.gname = "root" + try: _uid = id_map.user(value = _uid) + except KeyError: pass + else: member.uid = _uid + try: _gid = id_map.group(value = _gid) + except KeyError: pass + else: member.gid = _gid + return member + + async with _wrap_source() as source: + async with ( + asynchronizer(await asynchronizer(lambda: _make_tar_reader(source = source))) as _reader, + asynchronizer(await asynchronizer(lambda: _make_tar_writer(destination = destination))) as _writer + ): + await asynchronizer(_reader.open) + await asynchronizer(_writer.open) + async for _member, _stream in asynchronizer(_reader): + _member = await asynchronizer(lambda: _transform_member(member = _member)) + await asynchronizer(lambda: _writer(member = _member, stream = _stream)) + + @contextlib.asynccontextmanager + async def _make_tar_subprocess(dry: bool, source: typing.Optional[typing.IO[bytes]], destination: str): + assert isinstance(dry, bool) + assert isinstance(destination, str) + assert destination + + if dry: _command = "tar", "--list", "--verbose", "--numeric-owner" + else: _command = "tar", "--extract", "--same-owner", "--numeric-owner", "--same-permissions" + + _subprocess = await asyncio.create_subprocess_exec( + *_command, cwd = destination, stdin = source, stdout = sys.stderr + ) + + try: + # noinspection PyShadowingNames + async def _coroutine(): + await _subprocess.wait() + _exit_code = _subprocess.returncode + assert isinstance(_exit_code, int) + assert 0 == _exit_code + yield _coroutine + + finally: + if _subprocess.returncode is None: + _subprocess.terminate() + try: await asyncio.wait_for(_subprocess.wait(), timeout = +5.0e+0) + finally: + if _subprocess.returncode is None: + try: _subprocess.kill() + except OSError: pass + + async def _coroutine( + dry: bool, + source: typing.Optional[str], + destination: str, + id_map: typing.Optional[_IdMap], + asynchronizer: _Asynchronizer + ): + assert isinstance(dry, bool) + + assert isinstance(asynchronizer, _Asynchronizer) + async with asynchronizer(_prepare_source(path = source)) as source: + destination = await asynchronizer(lambda: _prepare_destination(path = destination, dry = dry)) + + if source is None: + async def _close_stdin(): + def _routine(): + _descriptor = sys.stdin.buffer.fileno() + sys.stdin.close() + sys.stdin.buffer.close() + os.close(_descriptor) + await asynchronizer(_routine) + + _source_name = "stdin" + + else: _source_name = source.name + + _dry_prefix = "dry mode:" if dry else "extracting: " + print(f"{_dry_prefix} {_source_name} => {destination}", file = sys.stderr, flush = True) + + if id_map is None: + async with _make_tar_subprocess( + dry = dry, source = source, destination = destination + ) as _tar_subprocess: + if source is None: await _close_stdin() + await _tar_subprocess() + return + + assert isinstance(id_map, _IdMap) + + async with asynchronizer(_make_fifo()) as (_fifo_reader, _fifo_writer): + async with _make_tar_subprocess( + dry = dry, source = _fifo_reader, destination = destination + ) as _tar_subprocess: + await asynchronizer(_fifo_reader.close) + await _remap_coroutine( + source = source, destination = _fifo_writer, + id_map = id_map, asynchronizer = asynchronizer + ) + if source is None: await _close_stdin() + await asynchronizer(_fifo_writer.close) + await _tar_subprocess() + + def _spawn(*args, **kwargs): return asyncio.create_task(_coroutine(*args, **kwargs)) + + class _Result(object): + spawn = _spawn + + return _Result + + +_private = _private() +try: spawn = _private.spawn +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_logic.py b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_logic.py new file mode 100644 index 0000000..a6f413f --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/install_rootfs/_logic.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import typing + import asyncio + import contextlib + import concurrent.futures + + from . import _installer as _installer_module + + from ... import _common as _common_module + + _Asynchronizer = _common_module.asynchronizer.make + + _make_asynchronizer = _common_module.asynchronizer.make + _spawn_installer = _installer_module.spawn + + @contextlib.contextmanager + def _make_thread_asynchronizer(): + _loop = asyncio.get_running_loop() + assert isinstance(_loop, asyncio.AbstractEventLoop) + with concurrent.futures.ThreadPoolExecutor() as _pool: + yield _make_asynchronizer(delegate = lambda delegate: _loop.run_in_executor(_pool, delegate)) + + async def _coroutine(dry: bool, source: typing.Optional[str], destination: str): + assert isinstance(destination, str) + assert destination + + from . import _connection as _connection_module + + with _make_thread_asynchronizer() as _asynchronizer: + async with _connection_module.make(domain = destination, asynchronizer = _asynchronizer) as _connection: + await _connection.open() + _task = _spawn_installer( + dry = dry, source = source, destination = _connection.path, + id_map = _connection.id_map, asynchronizer = _asynchronizer + ) + try: await _connection(_task) + finally: + if not _task.done(): _task.cancel() + await asyncio.gather(_task, return_exceptions = True) + + def _routine(*args, **kwargs): asyncio.run(_coroutine(*args, **kwargs)) + + class _Result(object): + routine = _routine + + return _Result + + +_private = _private() +try: routine = _private.routine +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/__init__.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/__init__.py new file mode 100644 index 0000000..beaf931 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/__init__.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from ... _common import module_helpers as _module_helpers_module + + class _Result(object): + module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = { + "Class": lambda module: getattr(module, "_class").Class, + "make": lambda module: getattr(module, "_class").make + }) + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_class.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_class.py new file mode 100644 index 0000000..7f3599d --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_class.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import typing + import pathlib + import argparse + + from . import _logic as _logic_module + from . import _oci_image as _oci_image_module + from ... import _common as _common_module + + _Action = _common_module.Action + + _logic_routine = _logic_module.routine + _parse_digest = _oci_image_module.digest.parse + _make_cli_validator = _common_module.cli_validator.make + _validate_manifest_ref = _oci_image_module.meta.validate_manifest_ref + + _name = __package__.split(".")[-1] + _name = _name.replace("_", "-") + + class _Class(_Action): + @property + def name(self) -> str: return _name + + def setup_cli(self, parser: argparse.ArgumentParser): + assert isinstance(parser, argparse.ArgumentParser) + + # noinspection PyShadowingNames + @self.__cli_validator.decorator(key = parser.add_argument( + "-r", "--ref", required = False, help = "ref name", + dest = f"{self.name}/ref" + ).dest) + def _routine(value: typing.Optional[str]): + if value is not None: value = _validate_manifest_ref(value = value) + return value + + # noinspection PyShadowingNames + @self.__cli_validator.decorator(key = parser.add_argument( + "-d", "--digest", required = False, help = "digest", + dest = f"{self.name}/digest" + ).dest) + def _routine(value: typing.Optional[str]): + if value is not None: value = _parse_digest(value = value).value + return value + + # noinspection PyShadowingNames + @self.__cli_validator.decorator(key = parser.add_argument( + "-S", "--source", required = False, help = "source (oci image)", + dest = f"{self.name}/source" + ).dest) + def _routine(value: typing.Optional[str]): + if value is not None: + assert isinstance(value, str) + assert value + value, = f"{value}\r\n".splitlines() + assert pathlib.Path(value).resolve(strict = True) + return value + + # noinspection PyShadowingNames + @self.__cli_validator.decorator(key = parser.add_argument( + "-D", "--destination", required = False, help = "destination (rootfs bundle)", + dest = f"{self.name}/destination" + ).dest) + def _routine(value: typing.Optional[str]): + if value is not None: + assert isinstance(value, str) + assert value + value, = f"{value}\r\n".splitlines() + return value + + # noinspection PyShadowingNames + @self.__cli_validator.decorator(key = parser.add_argument( + "-w", "--working-directory", required = False, help = "working-directory", + dest = f"{self.name}/working-directory" + ).dest) + def _routine(value: typing.Optional[str]): + if value is not None: + assert isinstance(value, str) + assert value + value, = f"{value}\r\n".splitlines() + return value + + def validate_cli(self, arguments: dict): + assert isinstance(arguments, dict) + self.__cli_validator(arguments, allow_unknown = True) + + def __call__(self, cli: dict): + assert isinstance(cli, dict) + _logic_routine( + ref = cli[f"{self.name}/ref"], + digest = cli[f"{self.name}/digest"], + source = cli[f"{self.name}/source"], + destination = cli[f"{self.name}/destination"], + working_directory = cli[f"{self.name}/working-directory"] + ) + + def __init__(self): + super().__init__() + self.__cli_validator = _make_cli_validator() + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_logic.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_logic.py new file mode 100644 index 0000000..51b8eb3 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_logic.py @@ -0,0 +1,212 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import os + import sys + import typing + import shutil + import pathlib + import tarfile + import contextlib + import subprocess + + from . import _oci_image as _oci_image_module + from . import _script_processor as _script_processor_module + from . import _temporary_directory as _temporary_directory_module + + from ... import _common as _common_module + + _OciImage = _oci_image_module.Class + + _parse_digest = _oci_image_module.digest.parse + _make_oci_image = _oci_image_module.make + _normalize_path = _common_module.path.normalize + _make_tar_reader = _common_module.tar.reader.make + _make_tar_writer = _common_module.tar.writer.make + _parse_manifest_ref = _oci_image_module.meta.parse_manifest_ref + _validate_manifest_ref = _oci_image_module.meta.validate_manifest_ref + _make_script_processor = _script_processor_module.make + _make_temporary_directory = _temporary_directory_module.make + + def _make_digest(ref: typing.Optional[str], digest: typing.Optional[str], image: _OciImage): + assert isinstance(image, _OciImage) + if ref is None: + if digest is None: + image = tuple(image)[-1] + assert isinstance(image, dict) + digest = _parse_digest(value = image["digest"]).value + image = _parse_manifest_ref(manifest = image) + image = f"digest = {digest}" if image is None else f"ref = {image}" + print(f"last manifest will be selected by default, {image}", file = sys.stderr, flush = True) + return digest + return _parse_digest(value = digest).value + ref = _validate_manifest_ref(value = ref) + _manifests = dict() + for _manifest in image: + assert isinstance(_manifest, dict) + _digest = _parse_digest(value = _manifest["digest"]).value + _ref = _parse_manifest_ref(manifest = _manifest) + if _ref is None: continue + _manifests[_ref] = _digest + if digest is None: digest = _manifests[ref] + else: + assert digest == _parse_digest(digest).value + assert digest == _manifests[ref] + return digest + + @contextlib.contextmanager + def _open_source(path: typing.Optional[str], working_directory: str): + assert isinstance(working_directory, str) + assert working_directory + working_directory, = f"{working_directory}\r\n".splitlines() + working_directory = _normalize_path(value = working_directory).as_posix() + working_directory = pathlib.Path(working_directory).resolve(strict = True).as_posix() + + if path is not None: + assert isinstance(path, str) + assert path + if os.path.isdir(path): + yield _make_oci_image(source = path) + return + + with _make_tar_reader(source = path) as _reader: + _reader.open() + + if _reader.seekable and ("application/x-tar" == _reader.magic): + yield _make_oci_image(source = _reader) + return + + working_directory = os.path.join(working_directory, "oci") + print(f"extracting source to temporary directory: {working_directory}", file = sys.stderr, flush = True) + os.makedirs(working_directory, exist_ok = False) + + try: + def _extract_member(member: tarfile.TarInfo, stream: typing.Optional[typing.IO[bytes]]): + assert isinstance(member, tarfile.TarInfo) + _member_path = _member.name + assert isinstance(_member_path, str) + _member_path = _normalize_path(value = _member_path, drop_root = True).as_posix() + if not _member_path.strip("."): return + _member_path = os.path.join(working_directory, _member_path) + if member.isdir(): + assert stream is None + os.makedirs(_member_path, exist_ok = True) + return + assert member.isreg() + with open(_member_path, "wb") as _destination: + while True: + _chunk = stream.read(32 * 1024 * 1024) + if not _chunk: break + assert len(_chunk) == _destination.write(_chunk) + + for _member, _stream in _reader: _extract_member(member = _member, stream = _stream) + + _reader.close() + if path is None: + _stdin_descriptor = sys.stdin.buffer.fileno() + sys.stdin.close() + sys.stdin.buffer.close() + os.close(_stdin_descriptor) + + yield _make_oci_image(source = working_directory) + + finally: shutil.rmtree(working_directory) + + def _inspect_bundle(source: _OciImage, digest: str, working_directory: str): + assert isinstance(source, _OciImage) + assert isinstance(digest, str) + assert isinstance(working_directory, str) + assert working_directory + working_directory = os.path.normpath(os.path.abspath(working_directory)) + _collector = [] + os.makedirs(working_directory, exist_ok = False) + try: + _oci_path = os.path.join(working_directory, "oci") + print(f"generating fake image in temporary directory: {_oci_path}", file = sys.stderr, flush = True) + os.makedirs(_oci_path, exist_ok = False) + source.install_fake(digest = digest, destination = _oci_path) + _bundle_path = os.path.join(working_directory, "bundle") + print(f"extracting fake rootfs to temporary directory: {_bundle_path}", file = sys.stderr, flush = True) + os.makedirs(_bundle_path, exist_ok = False) + subprocess.check_call( + ("oci-image-tool", "unpack", _oci_path, _bundle_path, "--ref", "name=latest"), + stdin = subprocess.DEVNULL, stdout = sys.stderr + ) + shutil.rmtree(_oci_path) + for _absolute_root, _directories, _files in os.walk(_bundle_path): + _relative_root = pathlib.PurePosixPath(pathlib.Path(_absolute_root).relative_to(_bundle_path).as_posix()) + for _path in _files: _collector.append((_relative_root / _path).as_posix()) + for _path in _directories: _collector.append((_relative_root / _path).as_posix()) + finally: shutil.rmtree(working_directory) + _collector.sort() + return tuple(_collector) + + @contextlib.contextmanager + def _make_destination( + target: typing.Optional[str], + script: typing.Optional[list], + working_directory: str + ): + if script is None: + with _make_tar_writer(destination = target) as _writer: + _writer.open() + yield lambda *args, **kwargs: _writer(*args, **kwargs) + return + + assert isinstance(working_directory, str) + assert working_directory + assert os.path.isdir(working_directory) + working_directory = os.path.join(working_directory, "script") + assert not os.path.exists(working_directory) + + with _make_script_processor( + script = script, destination = target, working_directory = working_directory + ) as _stream: + with _make_tar_writer(destination = _stream) as _writer: + _writer.open() + yield lambda *args, **kwargs: _writer(*args, **kwargs) + + def _routine( + ref: typing.Optional[str], + digest: typing.Optional[str], + source: typing.Optional[str], + destination: typing.Optional[str], + working_directory: typing.Optional[str] + ): + with _make_temporary_directory(path = working_directory) as working_directory: + with _open_source(path = source, working_directory = working_directory) as source: + digest = _make_digest(ref = ref, digest = digest, image = source) + with source.open_bundle(digest = digest) as _bundle: + _bundle_script = _bundle.script + try: _bundle_script_path = _bundle_script.pop("path") + except KeyError: _bundle_script_path = None + else: assert isinstance(_bundle_script_path, str) and bool(_bundle_script_path) + _bundle_script = _bundle_script.pop("body") or None + _bundle_items = _inspect_bundle( + source = source, digest = digest, working_directory = os.path.join(working_directory, "fake") + ) + if destination is None: print( + f"writing rootfs tar archive to stdout", file = sys.stderr, flush = True + ) + else: print(f"writing rootfs tar archive file: {destination}", file = sys.stderr, flush = True) + with _make_destination( + target = destination, script = _bundle_script, working_directory = working_directory + ) as destination: + for _path in _bundle_items: + _member, _stream = _bundle(path = _path) + if _bundle_script_path and (_bundle_script_path == _path): continue + destination(member = _member, stream = _stream) + + class _Result(object): + routine = _routine + + return _Result + + +_private = _private() +try: routine = _private.routine +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/__init__.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/__init__.py new file mode 100644 index 0000000..00d83e5 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/__init__.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from .... _common import module_helpers as _module_helpers_module + + class _Result(object): + module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = { + "Digest": lambda module: module.digest.Class, + "Class": lambda module: getattr(module, "_class").Class, + "make": lambda module: getattr(module, "_class").make + }) + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_blob_writer.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_blob_writer.py new file mode 100644 index 0000000..2f7cde2 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_blob_writer.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import typing + import hashlib + + from . import digest as _digest_module + + _Digest = _digest_module.Class + _make_digest = _digest_module.make + + class _Class(object): + @property + def stream(self): return self.__stream + + @property + def digest(self): + if self.__digest is None: self.__digest = _make_digest( + type = "sha256", hash = self.__hash.hexdigest() + ) + assert isinstance(self.__digest, _Digest) + return self.__digest + + def tell(self, *args, **kwargs): return self.__stream.tell(*args, **kwargs) + + def write(self, data: typing.Union[str, bytes]): + if isinstance(data, str): data = data.encode("utf-8") + assert isinstance(data, bytes) + assert data + _size = self.__stream.write(data) + assert isinstance(_size, int) + if 0 < _size: + self.__digest = None + self.__hash.update(data[:_size]) + else: assert 0 == _size + + def __init__(self, stream): + super().__init__() + _hash = hashlib.sha256() + self.__hash = _hash + self.__stream = stream + self.__digest = None + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_class.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_class.py new file mode 100644 index 0000000..6fc811e --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_class.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import json + import typing + import contextlib + + from . import meta as _meta_module + from . import _fake as _fake_module + from . import bundle as _bundle_module + from . import digest as _digest_module + from . import _source_reader as _source_reader_module + from . import _size_guarantee as _size_guarantee_module + + _Digest = _digest_module.Class + _SourceReader = _source_reader_module.Class + + _make_bundle = _bundle_module.make + _parse_digest = _digest_module.parse + _parse_manifest = _meta_module.parse_manifest + _meta_blob_limit = _meta_module.blob_limit + _make_source_reader = _source_reader_module.make + _fake_module_routine = _fake_module.routine + _make_size_guarantee = _size_guarantee_module.make + + def _read_index(source: _SourceReader): + assert isinstance(source, _SourceReader) + with source.open("index.json") as _stream: + with _make_size_guarantee( + source = _stream, limit = _meta_blob_limit + ) as _guarantee: _data = json.load(_guarantee) + assert isinstance(_data, dict) + _data = _data["schemaVersion"], _data["manifests"] + assert isinstance(_data[0], int) + assert 2 == _data[0] + _data = _data[1] + assert isinstance(_data, list) + assert _data + _collector = list(), dict() + for _data in _data: + _data = _parse_manifest(value = _data) + _digest = _data["digest"] + assert _digest not in _collector[1] + _collector[0].append(_data) + _collector[1][_digest] = _data + return tuple(_collector[0]), _collector[1] + + class _Class(object): + @property + def source(self): return self.__source.source + + def __iter__(self): + for _manifest in self.__manifests[0]: yield json.loads(json.dumps(_manifest)) + + @contextlib.contextmanager + def open_bundle(self, digest: typing.Union[str, _Digest]): + if isinstance(digest, _Digest): digest = digest.value + else: digest = _parse_digest(digest).value + with _make_bundle(source = self.__source).open( + manifest = self.__manifests[1][digest] + ) as _bundle: yield _bundle + + def install_fake(self, digest: typing.Union[str, _Digest], destination: str): + if isinstance(digest, _Digest): digest = digest.value + else: digest = _parse_digest(digest).value + return _fake_module_routine( + manifest = self.__manifests[1][digest], source = self.__source, destination = destination + ) + + def __init__(self, source: str): + super().__init__() + source = _make_source_reader(source = source) + _manifests = _read_index(source = source) + self.__source = source + self.__manifests = _manifests + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_fake.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_fake.py new file mode 100644 index 0000000..622ad21 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_fake.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import os + import json + import shutil + import tarfile + import tempfile + import contextlib + + from . import meta as _meta_module + from . import digest as _digest_module + from . import _blob_writer as _blob_writer_module + from . import _source_reader as _source_reader_module + from . import _size_guarantee as _size_guarantee_module + + from .... import _common as _common_module + + _BlobWriter = _blob_writer_module.Class + _SourceReader = _source_reader_module.Class + + _parse_digest = _digest_module.parse + _normalize_path = _common_module.path.normalize + _meta_blob_limit = _meta_module.blob_limit + _make_blob_writer = _blob_writer_module.make + _parse_manifest_ref = _meta_module.parse_manifest_ref + _make_size_guarantee = _size_guarantee_module.make + + _layer_media_type_begin = "application/vnd.oci.image.layer.v1.tar" + _layer_media_type_minimum_size = len(_layer_media_type_begin) + + def _make_index(digest: str, image: str): + assert isinstance(image, str) + assert image + digest = _parse_digest(value = digest) + return { + "schemaVersion": 2, + "manifests": [{ + "size": os.stat(os.path.join(image, digest.path)).st_size, + "digest": digest.value, + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "annotations": {"org.opencontainers.image.ref.name": "latest"} + }] + } + + def _make_layer_item(source: tarfile.TarInfo): + assert isinstance(source, tarfile.TarInfo) + _path = source.name + assert isinstance(_path, str) + if not _path: return None + _path = _normalize_path(value = _path, drop_root = True).as_posix() + if "." == _path: return None + assert _path.strip(".") + _result = tarfile.TarInfo(name = _path) + if tarfile.DIRTYPE == source.type: + _result.mode = 0o755 + _result.type = tarfile.DIRTYPE + else: + _result.mode = 0o644 + _result.type = tarfile.REGTYPE + return _result + + def _validate_blob_writer(writer: _BlobWriter): + assert isinstance(writer, _BlobWriter) + return writer + + @contextlib.contextmanager + def _open_blob_reader(digest: str, source: _SourceReader, size: int): + assert isinstance(source, _SourceReader) + assert isinstance(size, int) + _path = _parse_digest(value = digest).path + with source.open(_path) as _stream: + with _make_size_guarantee(source = _stream, exact = size) as _guarantee: yield _guarantee + + @contextlib.contextmanager + def _open_blob_writer(image: str): + assert isinstance(image, str) + assert image + _descriptor, _temporary_path = tempfile.mkstemp(dir = image) + with os.fdopen(_descriptor, "wb") as _stream: + _writer = _make_blob_writer(stream = _stream) + yield _writer + _digest_path = os.path.join(image, _writer.digest.path) + os.makedirs(os.path.dirname(_digest_path), exist_ok = True) + shutil.move(_temporary_path, _digest_path) + + @contextlib.contextmanager + def _open_layer_reader(digest: str, source: _SourceReader, size: int): + assert isinstance(source, _SourceReader) + assert isinstance(size, int) + with _open_blob_reader(digest = digest, source = source, size = size) as _blob: + with tarfile.open(mode = "r", fileobj = _blob) as _tar: yield _tar + + @contextlib.contextmanager + def _open_layer_writer(image: str): + with _open_blob_writer(image = image) as _blob: + with tarfile.open(mode = "w", fileobj = _blob) as _tar: yield _tar + + def _validate_layer_media_type(value: str): + assert isinstance(value, str) + assert value + assert value.startswith(_layer_media_type_begin) + _value = value[_layer_media_type_minimum_size:] + if not _value: return value + assert "+" == _value[0] + _value = _value[1:] + assert _value in {"gzip", "bzip"} + + def _read_bundle(manifest: dict, source: _SourceReader): + assert isinstance(manifest, dict) + _media = manifest["mediaType"] + assert isinstance(_media, str) + assert "application/vnd.oci.image.manifest.v1+json" == _media + _parse_manifest_ref(manifest = manifest) + _size = manifest["size"] + assert isinstance(_size, int) + assert 0 < _size + assert _meta_blob_limit >= _size + with _open_blob_reader( + digest = manifest["digest"], source = source, size = _size + ) as _source_bundle: _meta = json.load(_source_bundle) + assert isinstance(_meta, dict) + _schema = _meta["schemaVersion"] + assert isinstance(_schema, int) + assert 2 == _schema + _config = _meta["config"] + assert isinstance(_config, dict) + _media = _config["mediaType"] + assert isinstance(_media, str) + assert "application/vnd.oci.image.config.v1+json" == _media + _size = _config["size"] + assert isinstance(_size, int) + assert 0 < _size + assert _meta_blob_limit >= _size + _layers = _meta["layers"] + assert isinstance(_layers, list) + assert _layers + with _open_blob_reader( + digest = _config["digest"], source = source, size = _size + ) as _config: _config = json.load(_config) + assert isinstance(_config, dict) + assert _config + return _meta, _config + + def _routine(manifest, source: _SourceReader, destination: str): + assert isinstance(source, _SourceReader) + assert isinstance(destination, str) + if os.path.exists(destination): + assert os.path.isdir(destination) + assert not os.listdir(destination), f"directory is not empty: {destination}" + if isinstance(manifest, bytes): manifest = json.loads(manifest.decode("utf-8")) + elif isinstance(manifest, str): manifest = json.loads(manifest) + elif not isinstance(manifest, dict): manifest = json.load(manifest) + + _source_meta, _destination_config = _read_bundle(manifest = manifest, source = source) + + os.makedirs(os.path.join(destination, "blobs"), exist_ok = True) + + with open(os.path.join(destination, "oci-layout"), "w") as _stream: + json.dump({"imageLayoutVersion": "1.0.0"}, _stream) + + _destination_meta: dict = json.loads(json.dumps(_source_meta)) + _destination_meta["layers"].clear() + + with _open_blob_writer(image = destination) as _destination: + json.dump(_destination_config, _destination) + _destination_meta["config"]["size"] = _destination.tell() + _destination_meta["config"]["digest"] = _destination.digest.value + + for _layer in _source_meta["layers"]: + assert isinstance(_layer, dict) + _validate_layer_media_type(value = _layer["mediaType"]) + with _open_layer_reader(digest = _layer["digest"], source = source, size = _layer["size"]) as _source: + assert isinstance(_source, tarfile.TarFile) + with _open_layer_writer(image = destination) as _destination: + assert isinstance(_destination, tarfile.TarFile) + for _source_item in _source: + _destination_item = _make_layer_item(source = _source_item) + if _destination_item is None: continue + _destination.addfile(_destination_item) + _destination = _validate_blob_writer(writer = _destination.fileobj).digest + _destination_meta["layers"].append({ + "size": os.stat(os.path.join(destination, _destination.path)).st_size, + "digest": _destination.value, + "mediaType": "application/vnd.oci.image.layer.v1.tar" + }) + + with _open_blob_writer(image = destination) as _destination: json.dump(_destination_meta, _destination) + _destination_meta: str = _destination.digest.value + + with open(os.path.join(destination, "index.json"), "w") as _stream: + json.dump(_make_index(digest = _destination_meta, image = destination), _stream) + + class _Result(object): + routine = _routine + + return _Result + + +_private = _private() +try: routine = _private.routine +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_size_guarantee.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_size_guarantee.py new file mode 100644 index 0000000..649a24d --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_size_guarantee.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import typing + import contextlib + + class _Reader(object): + @property + def left(self): return self.__left + + def tell(self, *args, **kwargs): return self.__stream.tell(*args, **kwargs) + + def read(self, *args, **kwargs): + _chunk = self.__stream.read(*args, **kwargs) + assert isinstance(_chunk, (str, bytes)) + _size = len(_chunk) + if 0 < _size: + assert self.__left >= _size + self.__left -= _size + return _chunk + + def __init__(self, stream, size: int): + super().__init__() + assert isinstance(size, int) + assert 0 < size + self.__left = size + self.__stream = stream + + @contextlib.contextmanager + def _data_wrapper(source: typing.Union[str, bytes], limit: typing.Optional[int], exact: typing.Optional[int]): + assert isinstance(source, (str, bytes)) + _size = len(source) + if exact is None: + assert limit is not None + assert isinstance(limit, int) + assert 0 < limit + assert _size <= limit + else: + assert limit is None + assert isinstance(exact, int) + assert 0 < exact + assert _size == exact + yield source + + @contextlib.contextmanager + def _stream_wrapper(source, limit: typing.Optional[int], exact: typing.Optional[int]): + if exact is None: _helper = _Reader(stream = source, size = limit) + else: + assert limit is None + _helper = _Reader(stream = source, size = exact) + + class _Class(object): + @staticmethod + def read(*args, **kwargs): return _helper.read(*args, **kwargs) + + @staticmethod + def tell(*args, **kwargs): return _helper.tell(*args, **kwargs) + + yield _Class() + assert (exact is None) or (0 == _helper.left) + + def _make(source: typing.Union[str, bytes, object], limit: int = None, exact: int = None): + if isinstance(source, (str, bytes)): return _data_wrapper(source = source, limit = limit, exact = exact) + return _stream_wrapper(source = source, limit = limit, exact = exact) + + class _Result(object): + make = _make + + return _Result + + +_private = _private() +try: make = _private.make +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_source_reader.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_source_reader.py new file mode 100644 index 0000000..6f3c27d --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/_source_reader.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import os + import typing + import weakref + import pathlib + + from .... import _common as _common_module + + _TarReader = _common_module.tar.Reader + + _normalize_path = _common_module.path.normalize + _make_tar_reader = _common_module.tar.reader.make + + def _validate_path(value: str): + assert isinstance(value, str) + assert value.strip(".") + assert value == _normalize_path(value = value, drop_root = True).as_posix() + return value + + class _BackEnd(object): + @staticmethod + def open(path: str): + _validate_path(value = path) + raise NotImplementedError() + + def __init__(self): super().__init__() + + class _DirectoryBackEnd(_BackEnd): + def open(self, path: str): return open(os.path.join(self.__source, _validate_path(value = path)), "rb") + + def __init__(self, source: str): + super().__init__() + assert isinstance(source, str) + assert os.path.isdir(source) + self.__source = source + + class _TarBackEnd(_BackEnd): + def open(self, path: str): + if self.__references is None: return self.__source(path = path) + if 0 == self.__references: self.__source.open() + self.__references += 1 + try: + _stream = self.__source(path = path) + assert _stream is not None + weakref.finalize(_stream, self.__reference_handler) + return _stream + except BaseException: + self.__reference_handler() + raise + + def __reference_handler(self): + assert 0 < self.__references + self.__references -= 1 + if 0 < self.__references: return + self.__source.close() + + def __init__(self, source: _TarReader, managed: bool): + super().__init__() + assert isinstance(source, _TarReader) + assert isinstance(managed, bool) + self.__source = source + self.__references = 0 if managed else None + + class _Class(object): + @property + def source(self): return self.__source + + def open(self, path: str): return self.__back_end.open(path = path) + + def __init__(self, source: typing.Union[str, typing.IO[bytes], _TarReader]): + super().__init__() + if isinstance(source, str): + assert source + source = pathlib.Path(source).resolve(strict = True) + if source.is_dir(): _back_end = _DirectoryBackEnd(source = source.as_posix()) + else: _back_end = _TarBackEnd(source = _make_tar_reader(source = source.as_posix()), managed = True) + elif isinstance(source, _TarReader): _back_end = _TarBackEnd(source = source, managed = False) + else: _back_end = _TarBackEnd(source = _make_tar_reader(source), managed = False) + self.__source = source + self.__back_end = _back_end + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/__init__.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/__init__.py new file mode 100644 index 0000000..959e5d1 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/__init__.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from ..... _common import module_helpers as _module_helpers_module + + class _Result(object): + module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = { + "Class": lambda module: getattr(module, "_class").Class, + "make": lambda module: getattr(module, "_class").make + }) + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/_class.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/_class.py new file mode 100644 index 0000000..8e22e3c --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/_class.py @@ -0,0 +1,271 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import os + import json + import typing + import tarfile + import contextlib + + from . import _script as _script_module + from . import _layer_reader as _layer_reader_module + + from .. import meta as _meta_module + from .. import digest as _digest_module + from .. import _source_reader as _source_reader_module + from .. import _size_guarantee as _size_guarantee_module + + from ..... import _common as _common_module + + _meta_limit = _meta_module.blob_limit + _normalize_path = _common_module.path.normalize + _parse_meta = _meta_module.parse_bundle + _parse_script = _script_module.parse + _parse_manifest = _meta_module.parse_manifest + _parse_blob_digest = _digest_module.parse + _make_layer_reader = _layer_reader_module.make + _make_size_guarantee = _size_guarantee_module.make + + _LayerReader = _layer_reader_module.Class + _SourceReader = _source_reader_module.Class + + @contextlib.contextmanager + def _open_blob_reader(digest: str, source: _SourceReader, size: int): + assert isinstance(source, _SourceReader) + assert isinstance(size, int) + _path = _parse_blob_digest(value = digest).path + with source.open(_path) as _stream: + with _make_size_guarantee(source = _stream, exact = size) as _guarantee: yield _guarantee + + def _parse_config_environment(value: typing.Iterable[str]): + _collector = dict() + for value in value: + assert isinstance(value, str) + assert value + value = value.split("=") + value = value[0], value[1:] + assert value[0] + assert value[0].strip() == value[0] + assert value[0] not in _collector + assert value[1] + _collector.update({value[0]: "".join(value[1])}) + return _collector + + def _parse_config_script_label(value: dict): + assert isinstance(value, dict) + try: value = value["p5.libvirt_lxc_helper.script"] + except KeyError: return None + assert isinstance(value, str) + if not value: return None + value = _normalize_path(value = value, drop_root = True).as_posix() + assert value.strip(".") + return value + + def _parse_config(value: dict): + assert isinstance(value, dict) + assert value + try: value = value["config"] + except KeyError: return None, None + try: _environment = value["Env"] + except KeyError: _environment = None + else: + assert isinstance(_environment, list) + _environment = _parse_config_environment(value = _environment) + try: _script = value["Labels"] + except KeyError: _script = None + else: _script = _parse_config_script_label(value = _script) + return _environment, _script + + def _close_layers(layers: typing.Iterable[_LayerReader]): + layers = list(layers) + + def _routine(): + while layers: + _layer = layers.pop() + try: _layer.close() + finally: _routine() + + _routine() + + def _open_layers(meta: typing.Iterable[dict], source: _SourceReader): + assert isinstance(source, _SourceReader) + _collector = [list(), dict()] + + try: + for _layer in meta: + assert isinstance(_layer, dict) + _media = _layer["mediaType"] + assert isinstance(_media, str) + assert "application/vnd.oci.image.layer.v1.tar+gzip" == _media + _size = _layer["size"] + assert isinstance(_size, int) + assert 0 < _size + _path = _parse_blob_digest(value = _layer["digest"]).path + _reader = source.open(_path) + try: + assert _size == _reader.seek(0, os.SEEK_END) + assert _size == _reader.tell() + assert 0 == _reader.seek(0, os.SEEK_SET) + assert 0 == _reader.tell() + _reader = _make_layer_reader(stream = _reader) + _collector[1].update({_member: _reader for _member in _reader.members}) + except BaseException: + _reader.close() + raise + _actual = set(_collector[1].values()) + _collector[0] = [_layer for _layer in _collector[0] if _layer in _actual] + + finally: _close_layers(layers = _collector[0]) + + return _collector[1] + + def _rebuild_environment(parent: dict, source: dict): + assert isinstance(parent, dict) + assert isinstance(source, dict) + + source = source.copy() + _inherit = source.pop("inherit") + _payload = source.pop("payload") + + try: assert not source + except AssertionError: raise ValueError(source.keys()) + + assert isinstance(_inherit, bool) + assert isinstance(_payload, dict) + + for _key, _value in parent.items(): + assert isinstance(_key, str) + assert _key + assert isinstance(_value, str) + + _collector = dict() + + if _inherit: + def _iteration(key: str, value: typing.Union[bool, str]): + if value is False: _collector.pop(key) + elif isinstance(value, str): _collector[key] = value + else: assert value is True + _collector.update(parent) + else: + def _iteration(key: str, value: typing.Union[bool, str]): + if value is True: _collector[key] = parent[key] + elif isinstance(value, str): _collector[key] = value + else: assert value is False + + for _key, _value in _payload.items(): + assert isinstance(_key, str) + assert _key + _iteration(key = _key, value = _value) + + return _collector + + class _Context(object): + def __init__(self): + super().__init__() + self.layers = None + self.script = None + self.config = None + self.manifest = None + + class _Class(object): + @property + def script(self): + return None if self.__context is None else json.loads(json.dumps(self.__context.script)) + + @property + def config(self): + return None if self.__context is None else json.loads(json.dumps(self.__context.config)) + + @property + def manifest(self): + return None if self.__context is None else json.loads(json.dumps(self.__context.manifest)) + + def open(self, manifest: typing.Union[str, dict]): + _context = _Context() + _context.manifest = _parse_manifest(value = manifest) + _size = _context.manifest["size"] + assert isinstance(_size, int) + assert 0 < _size + assert _meta_limit >= _size + assert self.__context is None + with _open_blob_reader( + digest = _context.manifest["digest"], source = self.__source, size = _context.manifest["size"] + ) as _meta: _meta = json.load(_meta) + _meta = _parse_meta(value = _meta) + _context.layers = _meta["layers"] + assert _context.layers + _context.config = _meta["config"] + assert _context.config + with _open_blob_reader( + digest = _context.config["digest"], source = self.__source, size = _context.config["size"] + ) as _context.config: _context.config = json.load(_context.config) + _environment, _script_path = _parse_config(value = _context.config) + _context.layers = _open_layers(meta = _context.layers, source = self.__source) + if _script_path is not None: + try: + _info, _context.script = _context.layers[_script_path](_script_path) + assert isinstance(_info, tarfile.TarInfo) + assert _info.isreg() + assert _context.script is not None + with _context.script as _context.script: _context.script = _context.script.read() + _context.script = _parse_script(_context.script) + if _context.script.pop("exclude"): _context.script["path"] = _script_path + _environment = _rebuild_environment( + parent = _environment, source = _context.script.pop("environment") + ) + for _body_item in _context.script["body"]: _body_item["environment"] = _rebuild_environment( + parent = _environment, source = _body_item.pop("environment") + ) + + except BaseException: + _close_layers(layers = tuple(set(_context.layers.values()))) + raise + self.__context = _context + return self + + def close(self): + _context = self.__context + assert isinstance(_context, _Context) + self.__context = None + _close_layers(layers = tuple(set(_context.layers.values()))) + + def __call__(self, path: str): + assert isinstance(path, str) + assert path + path = _normalize_path(value = path, drop_root = True) + assert path.parts[0].strip(".") + path = path.as_posix() + assert isinstance(self.__context, _Context) + return self.__context.layers[path](path = path) + + def __enter__(self): return self + + def __exit__(self, exception_type, exception_instance, exception_traceback): + _context = self.__context + if _context is None: return + self.__context = None + _close_layers(layers = tuple(set(_context.layers.values()))) + + def __init__(self, source: _SourceReader): + super().__init__() + assert isinstance(source, _SourceReader) + self.__source = source + self.__context = None + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/_layer_reader.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/_layer_reader.py new file mode 100644 index 0000000..f965508 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/_layer_reader.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import tarfile + + from ..... import _common as _common_module + + _normalize_path = _common_module.path.normalize + + def _validate_path(value: str): + assert isinstance(value, str) + assert value + value, = f"{value}\r\n".splitlines() + assert value.strip(".") + assert value == _normalize_path(value = value, drop_root = True).as_posix() + return value + + class _Class(object): + @property + def members(self): return None if self.__members is None else self.__members[1] + + def open(self, stream): + assert stream is not None + + assert self.__stream is None + assert self.__tarfile is None + assert self.__members is None + + _members = set(), list() + _tarfile = tarfile.open(mode = "r", fileobj = stream) + + try: + for _path in _tarfile.getnames(): + _path = _validate_path(value = _path) + assert _path not in _members[0] + _members[0].add(_path) + _members[1].append(_path) + + _members[1].sort() + + except BaseException: + _tarfile.close() + raise + + self.__stream = stream + self.__tarfile = _tarfile + self.__members = _members[0], tuple(_members[1]) + + def close(self): + _stream = self.__stream + _tarfile = self.__tarfile + _members = self.__members + + self.__stream = None + self.__tarfile = None + self.__members = None + + try: _members is not None + finally: + try: + assert isinstance(_tarfile, tarfile.TarFile) + _tarfile.close() + + finally: + assert _stream is not None + _stream.close() + + def __call__(self, path: str): + assert self.__stream is not None + assert self.__members is not None + assert isinstance(self.__tarfile, tarfile.TarFile) + assert isinstance(path, str) + assert path in self.__members[0] + _member = self.__tarfile.getmember(path) + assert isinstance(_member, tarfile.TarInfo) + _stream = self.__tarfile.extractfile(_member) if _member.isreg() else None + return _member, _stream + + def __init__(self, stream = None): + super().__init__() + self.__stream = None + self.__tarfile = None + self.__members = None + if stream is not None: self.open(stream) + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/_script.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/_script.py new file mode 100644 index 0000000..a0c6c8a --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/bundle/_script.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import io + import json + import shlex + import magic + import typing + + try: import yaml as _yaml_module + except ImportError: _yaml_module = None + + def _get_magic(source: typing.Union[str, bytes]): + if isinstance(source, str): source = source.strip().encode("utf-8") + assert isinstance(source, bytes) + assert source + source = source.strip() + assert source + return magic.from_buffer(source, mime = True) + + def _parse_environment_payload(value: typing.Union[dict, list]): + if isinstance(value, dict): value = [value] + else: assert isinstance(value, list) + _collector = dict() + for value in value: + assert isinstance(value, dict) + for _key, value in value.items(): + assert _key not in _collector + if not isinstance(value, (str, bool)): + assert isinstance(value, (int, float)) + value = str(value) + _collector[_key] = value + return _collector + + def _parse_environment(value: typing.Union[bool, list, dict]): + if isinstance(value, bool): return {"inherit": value, "payload": dict()} + if isinstance(value, list): return {"inherit": True, "payload": _parse_environment_payload(value = value)} + + assert isinstance(value, dict) + value = value.copy() + _may_payload = dict() + + try: _inherit = value.pop("inherit") + except KeyError: _inherit = None + else: + if not isinstance(_inherit, bool): + _may_payload["inherit"] = str(_inherit) + _inherit = None + + try: _payload = value.pop("payload") + except KeyError: _payload = None + else: + if not isinstance(_payload, (list, dict)): + _may_payload["payload"] = str(_payload) + _payload = None + + if (_payload is None) and (_inherit is None): + _may_payload.update(value) + return {"inherit": True, "payload": _parse_environment_payload(value = _may_payload)} + + try: assert not value + except AssertionError: raise ValueError(value.keys()) + + try: assert not _may_payload + except AssertionError: raise ValueError(_may_payload.keys()) + + return { + "inherit": True if _inherit is None else _inherit, + "payload": dict() if _payload is None else _parse_environment_payload(value = _payload) + } + + def _parse_body_command(value: typing.Union[str, list]): + if isinstance(value, str): + assert value + value, = f"{value}\r\n".splitlines() + value = shlex.split(value, comments = False, posix = True) + assert value + return value + + assert isinstance(value, list) + assert isinstance(value[0], str) and bool(value[0]) + for _item in value: assert isinstance(_item, str) + return value + + def _parse_body_item(value: typing.Union[str, list, dict]): + if isinstance(value, (str, list)): return { + "command": _parse_body_command(value = value), "environment": _parse_environment(value = True) + } + + assert isinstance(value, dict) + value = value.copy() + + _command = _parse_body_command(value.pop("command")) + + try: _environment = value.pop("environment") + except KeyError: _environment = True + _environment = _parse_environment(value = _environment) + + try: _input = value.pop("input") + except KeyError: _input = None + else: assert isinstance(_input, str) + + try: assert not value + except AssertionError: raise ValueError(value.keys()) + + _value = {"command": _command, "environment": _environment} + if _input: _value["input"] = _input + + return _value + + def _parse_body(value: list): + assert isinstance(value, list) + return tuple([_parse_body_item(value = value) for value in value]) + + def _parse_parts( + body: list, + exclude: bool, + environment: typing.Union[bool, dict] + ): + body = _parse_body(value = body) + assert isinstance(exclude, bool) + environment = _parse_environment(value = environment) + return dict(body = body, exclude = exclude, environment = environment) + + def _parse(value: typing.Union[bytes, str, dict]): + if not isinstance(value, dict): + _magic = _get_magic(source = value) + if isinstance(value, bytes): value = value.decode("utf-8") + if "application/json" == _magic: value = json.loads(value) + else: + assert _yaml_module is not None + _generator = _yaml_module.safe_load_all(io.StringIO(value)) + value = next(_generator) + try: next(_generator) + except StopIteration: pass + else: raise OverflowError() + assert isinstance(value, dict) + + _value = {"body": list(), "exclude": True, "environment": True} + _value.update(value) + + return _parse_parts(**_value) + + class _Result(object): + parse = _parse + + return _Result + + +_private = _private() +try: parse = _private.parse +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/digest.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/digest.py new file mode 100644 index 0000000..f10d7d3 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/digest.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import re + import pathlib + + _invalid_pattern = re.compile("[^a-z0-9]") + + def _validate_part(value: str): + assert isinstance(value, str) + assert value + assert _invalid_pattern.match(value) is None + + class _Class(object): + @property + def type(self): return self.__type + + @property + def hash(self): return self.__hash + + @property + def path(self): return self.__path + + @property + def value(self): return self.__value + + @classmethod + def parse(cls, value: str): + assert isinstance(value, str) + _type, _hash = value.split(":") + return cls(type = _type, hash = _hash) + + # noinspection PyShadowingBuiltins + def __init__(self, type: str, hash: str): + super().__init__() + _validate_part(value = type) + _validate_part(value = hash) + _path = pathlib.PurePosixPath(type, hash) + assert not _path.is_absolute() + _path = pathlib.PurePosixPath("blobs") / _path + _path = _path.as_posix() + _value = f"{type}:{hash}" + self.__type = type + self.__hash = hash + self.__path = _path + self.__value = _value + + def _parse(*args, **kwargs): return _Class.parse(*args, **kwargs) + + class _Result(object): + Class = _Class + parse = _parse + + return _Result + + +_private = _private() +try: + Class = _private.Class + parse = _private.parse +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/meta.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/meta.py new file mode 100644 index 0000000..7d9b03a --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_oci_image/meta.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import json + import typing + + from . import digest as _digest_module + + _parse_digest = _digest_module.parse + + _blob_limit = 32 * 1024 * 1024 + + def _validate_manifest_ref(value: str): + assert isinstance(value, str) + assert value + value, = f"{value}\r\n".splitlines() + return value + + def _parse_manifest_ref(manifest: dict): + assert isinstance(manifest, dict) + try: _value = manifest["annotations"] + except KeyError: return None + assert isinstance(_value, dict) + try: _value = _value["org.opencontainers.image.ref.name"] + except KeyError: return None + return _validate_manifest_ref(value = _value) + + def _validate_manifest(value: dict): + assert isinstance(value, dict) + _media = value["mediaType"] + assert isinstance(_media, str) + assert "application/vnd.oci.image.manifest.v1+json" == _media + assert _parse_digest(value = value["digest"]).value + _size = value["size"] + assert isinstance(_size, int) + assert 0 < _size + _parse_manifest_ref(manifest = value) + return value + + def _validate_bundle(value: dict): + assert isinstance(value, dict) + _schema = value["schemaVersion"] + assert isinstance(_schema, int) + assert 2 == _schema + _config = value["config"] + assert isinstance(_config, dict) + _media = _config["mediaType"] + assert isinstance(_media, str) + assert "application/vnd.oci.image.config.v1+json" == _media + _size = _config["size"] + assert isinstance(_size, int) + assert 0 < _size + assert _blob_limit >= _size + assert isinstance(value["layers"], list) + return value + + def _parse_manifest(value: typing.Union[bytes, str, dict]): + if isinstance(value, bytes): value = json.loads(value.decode("utf-8")) + elif isinstance(value, str): value = json.loads(value) + return _validate_manifest(value = value) + + def _parse_bundle(value: typing.Union[bytes, str, dict]): + if isinstance(value, bytes): value = json.loads(value.decode("utf-8")) + elif isinstance(value, str): value = json.loads(value) + return _validate_bundle(value = value) + + class _Result(object): + blob_limit = _blob_limit + parse_bundle = _parse_bundle + parse_manifest = _parse_manifest + parse_manifest_ref = _parse_manifest_ref + validate_bundle = _validate_bundle + validate_manifest = _validate_manifest + validate_manifest_ref = _validate_manifest_ref + + return _Result + + +_private = _private() +try: + blob_limit = _private.blob_limit + parse_bundle = _private.parse_bundle + parse_manifest = _private.parse_manifest + parse_manifest_ref = _private.parse_manifest_ref + validate_bundle = _private.validate_bundle + validate_manifest = _private.validate_manifest + validate_manifest_ref = _private.validate_manifest_ref +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_script_processor.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_script_processor.py new file mode 100644 index 0000000..8ccaa9a --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_script_processor.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +if "__main__" == __name__: + def _private(): + import os + import sys + import json + import subprocess + + def _main_action_routine(payload: dict): + assert isinstance(payload, dict) + assert payload + import io + import shutil + payload = payload.copy() + _script = payload.pop("script") + _source = payload.pop("source") + _executor = payload.pop("executor") + _directory = payload.pop("directory") + assert isinstance(_script, list) + assert isinstance(_source, int) + assert isinstance(_executor, list) + assert isinstance(_directory, str) + assert _script and _executor and _directory + assert 2 < _source + try: assert not payload + except AssertionError: raise OverflowError() + with io.StringIO() as _chroot_request: + print(json.dumps({"action": "chroot", "payload": _script}), file = _chroot_request, flush = True) + _chroot_request = _chroot_request.getvalue().encode("utf-8") + print(f"extracting rootfs to temporary directory: {_directory}", file = sys.stderr, flush = True) + os.makedirs(_directory, exist_ok = False) + try: + subprocess.check_call( + ("tar", "--extract", "--same-permissions"), cwd = _directory, stdin = _source, stdout = sys.stderr + ) + with os.fdopen(_source, "rb", closefd = True) as _source: + try: assert not _source.read(1) + except AssertionError: raise OverflowError() + _chroot_request = subprocess.run( + _executor, cwd = _directory, input = _chroot_request, stdout = sys.stderr + ).returncode + assert 0 == _chroot_request + _content = os.listdir(_directory) + if _content: + _content.sort() + subprocess.check_call(("tar", "--create", "--same-permissions", "--", *_content), cwd = _directory) + return + sys.stdout.buffer.flush() + sys.stdout.buffer.close() + finally: shutil.rmtree(_directory) + + def _chroot_action_routine(payload: list): + assert isinstance(payload, list) + assert payload + import shlex + _directory = os.path.realpath(".") + os.chroot(_directory) + for _index, payload in enumerate(payload, start = 1): + assert isinstance(payload, dict) + payload = payload.copy() + _command = payload.pop("command") + _environment = payload.pop("environment") + try: _input = payload.pop("input") + except KeyError: _input = None + else: assert isinstance(_input, str) + try: assert not payload + except AssertionError: raise OverflowError() + assert isinstance(_command, list) + assert isinstance(_environment, dict) + assert _command + assert isinstance(_command[0], str) + assert _command[0] + for _item in _command: assert isinstance(_item, str) + for _item in _environment.items(): + assert isinstance(_item[0], str) + assert _item[0] + assert isinstance(_item[1], str) + _options = {"env": _environment, "stdout": sys.stderr} + if _input is None: _options["input"] = bytes() + else: _options["input"] = _input.encode("utf-8") + print(f"processing command #{_index}: {shlex.join(_command)}", file = sys.stderr, flush = True) + assert 0 == subprocess.run(_command, **_options).returncode + + def _routine(): + _input = sys.stdin.readline() + try: assert not sys.stdin.buffer.read(1) + except AssertionError: raise OverflowError() + assert isinstance(_input, str) + assert _input + _input = _input.strip() + assert _input + _input = json.loads(_input) + assert isinstance(_input, dict) + _action = _input.pop("action") + _payload = _input.pop("payload") + try: assert not _input + except AssertionError: raise OverflowError() + assert isinstance(_action, str) + if "main" == _action: return _main_action_routine(payload = _payload) + assert "chroot" == _action + _chroot_action_routine(payload = _payload) + + class _Result(object): + routine = _routine + + return _Result + + try: _private().routine() + finally: del _private + +else: + def _private(): + import io + import os + import sys + import json + import typing + import pathlib + import contextlib + import subprocess + + _executor = sys.executable, "-m", __name__ + + @contextlib.contextmanager + def _make_destination(target: typing.Optional[str]): + if target is None: + yield None + return + assert isinstance(target, str) + assert target + target = pathlib.Path(target).resolve() + assert target.parent.resolve(strict = True).is_dir() + with open(target.as_posix(), "wb") as target: + yield target + target.flush() + + @contextlib.contextmanager + def _make_fifo(): + _reader, _writer = os.pipe() + try: + with ( + os.fdopen(_reader, "rb", closefd = True) as _reader, + os.fdopen(_writer, "wb", closefd = True) as _writer + ): yield _reader, _writer + finally: + if isinstance(_reader, int): + try: os.close(_reader) + finally: + if isinstance(_writer, int): os.close(_writer) + + @contextlib.contextmanager + def _make(script: list, destination: typing.Optional[str], working_directory: str): + assert isinstance(script, list) + assert isinstance(working_directory, str) + assert script and working_directory + + with ( + _make_fifo() as (_fifo_reader, _fifo_writer), + _make_destination(target = destination) as destination + ): + with io.StringIO() as _request: + print(json.dumps({"action": "main", "payload": { + "script": script, "source": _fifo_reader.fileno(), + "executor": _executor, "directory": working_directory + }}), file = _request, flush = True) + _request = _request.getvalue().encode("utf-8") + + with subprocess.Popen( + ("unshare", "--map-auto", "--map-root-user", "--", *_executor), + stdin = subprocess.PIPE, stdout = destination, pass_fds = (_fifo_reader.fileno(), ) + ) as _subprocess: + try: + _subprocess.stdin.write(_request) + _subprocess.stdin.flush() + _subprocess.stdin.close() + _fifo_reader.close() + yield _fifo_writer + _fifo_writer.flush() + _fifo_writer.close() + _subprocess.wait() + + finally: + try: + try: + if _subprocess.returncode is None: + _subprocess.terminate() + _subprocess.wait(timeout = 15) + + finally: + if _subprocess.returncode is None: _subprocess.kill() + + finally: assert 0 == _subprocess.returncode + + class _Result(object): + make = _make + + return _Result + + _private = _private() + try: make = _private.make + finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_temporary_directory.py b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_temporary_directory.py new file mode 100644 index 0000000..58560a9 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_actions/make_rootfs/_temporary_directory.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import os + import sys + import typing + import shutil + import tempfile + import traceback + + _my_prefix = ".".join(__package__.split(".")[:2]) + _my_prefix = f"{_my_prefix}.tmp." + + def _make_dirs(path: str): + assert isinstance(path, str) + assert path + path = os.path.normpath(os.path.abspath(path)) + assert not os.path.exists(path) + _collector = [] + while not os.path.exists(path): + _collector.append(path) + path = os.path.dirname(path) + assert _collector + os.makedirs(_collector[0], exist_ok = False) + return tuple(_collector) + + class _Class(object): + @property + def path(self): return self.__path + + @property + def location(self): return self.__temporary[0] if self.__temporary else None + + def __enter__(self): + assert self.__temporary is None + if self.__path is None: self.__temporary = tempfile.mkdtemp(prefix = _my_prefix), + elif os.path.exists(self.__path): self.__temporary = tempfile.mkdtemp(prefix = _my_prefix, dir = self.__path), + else: self.__temporary = _make_dirs(path = self.__path) + return self.__temporary[0] + + def __exit__(self, exception_type, exception_instance, exception_traceback): + _temporary = self.__temporary + assert _temporary is not None + self.__temporary = None + _temporary = list(_temporary) + assert _temporary + try: + shutil.rmtree(_temporary.pop(0)) + while _temporary: os.rmdir(_temporary.pop(0)) + except OSError: print(traceback.format_exc(), file = sys.stderr, flush = True) + + def __init__(self, path: typing.Optional[str]): + super().__init__() + if path is not None: + assert isinstance(path, str) + assert path + self.__path = path + self.__temporary = None + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_common/__init__.py b/src/p5/libvirt_lxc_helper/_common/__init__.py new file mode 100644 index 0000000..ed4897b --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/__init__.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from . import module_helpers as _module_helpers_module + + class _Result(object): + module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = { + "Action": lambda module: module.action.Class, + "CliValidator": lambda module: module.cli_validator.Class, + "PlatformInfo": lambda module: module.platform_info.Class, + "Asynchronizer": lambda module: module.asynchronizer.Class + }) + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/_common/action.py b/src/p5/libvirt_lxc_helper/_common/action.py new file mode 100644 index 0000000..3b7f0ac --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/action.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import argparse + + class _Class(object): + @property + def name(self) -> str: raise NotImplementedError() + + @staticmethod + def setup_cli(parser: argparse.ArgumentParser): assert isinstance(parser, argparse.ArgumentParser) + + @staticmethod + def validate_cli(arguments: dict): assert isinstance(arguments, dict) + + def __call__(self, cli: dict): assert isinstance(cli, dict) + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_common/asynchronizer.py b/src/p5/libvirt_lxc_helper/_common/asynchronizer.py new file mode 100644 index 0000000..2f21444 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/asynchronizer.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import typing + + _delegate_type_hint = typing.Callable[[typing.Callable], typing.Awaitable] + + class _Wrapper(object): + @property + def target(self): return self.__target + + @property + def delegate(self): return self.__delegate + + def __await__(self): return self.__delegate(self.__target).__await__() + + async def __aiter__(self): + _iterator = await self.__delegate(lambda: iter(self.__target)) + + class _End(Exception): pass + + def _next(): + try: return next(_iterator) + except StopIteration: raise _End() + + while True: + try: yield await self.__delegate(_next) + except _End: break + + async def __aenter__(self): return await self.__delegate(self.__target.__enter__) + + async def __aexit__(self, *args, **kwargs): + return await self.__delegate(lambda: self.__target.__exit__(*args, **kwargs)) + + def __init__(self, target, delegate: _delegate_type_hint): + super().__init__() + self.__target = target + self.__delegate = delegate + + class _Class(object): + @property + def delegate(self): return self.__delegate + + def __call__(self, target): return _Wrapper(target = target, delegate = self.__delegate) + + def __init__(self, delegate: _delegate_type_hint): + super().__init__() + self.__delegate = delegate + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_common/cli_validator.py b/src/p5/libvirt_lxc_helper/_common/cli_validator.py new file mode 100644 index 0000000..4d82957 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/cli_validator.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import typing + + class _Class(object): + @property + def keys(self): return self.__dictionary.keys() + + def get(self, key: str): + assert isinstance(key, str) + return self.__dictionary[key] + + def set(self, key: str, validator: typing.Callable): + assert isinstance(key, str) and bool(key) + if validator is None: self.__dictionary.pop(key) + assert callable(validator) + self.__dictionary[key] = validator + + def decorator(self, key: str): + assert isinstance(key, str) and bool(key) + + # noinspection PyShadowingNames + def _decorator(validator: typing.Callable): + # noinspection PyShadowingNames + def _routine(*args, **kwargs): + try: return validator(*args, **kwargs) + except BaseException: raise ValueError(key) + + self.set(key = key, validator = _routine) + + return _decorator + + def __call__(self, arguments: dict, allow_unknown = False): + assert isinstance(arguments, dict) + assert isinstance(allow_unknown, bool) + _keys = set(self.__dictionary.keys()).copy() + for _key, _value in arguments.items(): + try: _validator = self.__dictionary[_key] + except KeyError: + if allow_unknown: continue + raise + _validator(value = _value) + _keys.remove(_key) + try: assert not _keys + except AssertionError: raise ValueError(f"keys not found: {_keys}") + + def __init__(self): + super().__init__() + self.__dictionary = dict() + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_common/module_helpers/__init__.py b/src/p5/libvirt_lxc_helper/_common/module_helpers/__init__.py new file mode 100644 index 0000000..dcf746d --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/module_helpers/__init__.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from . import lazy_attributes as _lazy_attributes_module + + class _Result(object): + module_getter = _lazy_attributes_module.make_getter() + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/_common/module_helpers/import_module.py b/src/p5/libvirt_lxc_helper/_common/module_helpers/import_module.py new file mode 100644 index 0000000..61eeb80 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/module_helpers/import_module.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +if "__main__" != __name__: + def _private(): + import sys + import importlib + + _this_module = sys.modules[__name__] + def _routine(*args, **kwargs): return importlib.import_module(*args, **kwargs) + + class _Callable(_this_module.__class__): + def __call__(self, *args, **kwargs): return _routine(*args, **kwargs) + + _this_module.__class__ = _Callable + + class _Result(object): + routine = _routine + + return _Result + + _private = _private() + routine = _private.routine + del _private diff --git a/src/p5/libvirt_lxc_helper/_common/module_helpers/lazy_attributes.py b/src/p5/libvirt_lxc_helper/_common/module_helpers/lazy_attributes.py new file mode 100644 index 0000000..890e185 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/module_helpers/lazy_attributes.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +if "__main__" != __name__: + def _private(): + def _make_base(): + import sys + import typing + import inspect + + from . import import_module as _import_module + + class _Types(object): + Factory = typing.Callable + ModuleName = str + Dictionary = typing.Dict[typing.Union[str, type(None)], typing.Union[Factory, type(None)]] + + class _ValidateAndMake(object): + @staticmethod + def module_name(value: typing.Union[type(None), _Types.ModuleName], make_default: typing.Callable): + if value is None: return make_default() + assert(isinstance(value, _Types.ModuleName) and bool(value) and (not value.startswith("."))) + return value + + @classmethod + def dictionary(cls, value: typing.Union[type(None), _Types.Dictionary], make_default: typing.Callable): + if value is None: return make_default() + _collector = {_key: cls.__factory(key = _key, value = _value) for _key, _value in dict(value).items()} + for _key, _value in make_default().items(): _collector.setdefault(_key, _value) + return _collector + + @staticmethod + def __factory(key: typing.Union[str, type(None)], value: typing.Union[_Types.Factory, type(None)]): + if key is None: + if value is None: return None + else: assert(isinstance(key, str) and bool(key) and (1 == len(key.split(".")))) + _parameters = inspect.signature(value).parameters + _parameters_count = len(_parameters) + if 1 > _parameters_count: return lambda module, name: value() + if 2 > _parameters_count: + _parameter = next(iter(_parameters.values())) + if _parameter.VAR_KEYWORD == _parameter.kind: return lambda module, name: value(module = module, name = name) + if _parameter.VAR_POSITIONAL == _parameter.kind: return lambda module, name: value(name) + _parameter = _parameter.name + if "name" == _parameter: return lambda module, name: value(name = name) + if "module" == _parameter: return lambda module, name: value(module = module) + return lambda module, name: value() + if 3 > _parameters_count: return lambda module, name: value(module = module, name = name) + raise ValueError("unsupported signature") + + class _Meta(type): + dictionary: _Types.Dictionary = None + module_name: _Types.ModuleName = None + + @property + def get(cls): return cls.__getter + + def __init__(cls, *args, **kwargs): + super().__init__(*args, **kwargs) + + import inspect + + _module_name = _ValidateAndMake.module_name(value = cls.module_name, make_default = lambda: cls.__module__) + _module = sys.modules[_module_name] + + def _make_default_dictionary(): + # noinspection PyUnusedLocal + def _factory(module: type(_module), name: str): + # noinspection PyCallingNonCallable + return _import_module(name = "{}.{}".format(_module_name, name)) + return {None: _factory} + + _dictionary = _ValidateAndMake.dictionary(value = cls.dictionary, make_default = _make_default_dictionary) + + try: _default_factory = _dictionary[None] + except KeyError: _default_factory = None + + _recursion_protector = set() + + def _getter(name: str): + assert(isinstance(name, str) and bool(name)) + assert(1 == len(name.split("."))) + if name in _recursion_protector: + try: raise RuntimeError("recursion rejected") + finally: raise AttributeError(name) + _recursion_protector.add(name) + try: + _factory = _dictionary.get(name, _default_factory) + if _factory is None: raise AttributeError(name) + return _factory(module = _module, name = name) + finally: _recursion_protector.remove(name) + + cls.__module = _module + cls.__getter = _getter + + # noinspection PyShadowingNames + class _Result(metaclass = _Meta): + Types = _Types + + return _Result + + _Base = _make_base() + + def _make_class(**kwargs): + _valid_keys = {"dictionary", "module_name"} + for _key in kwargs.keys(): + if not (_key in _valid_keys): raise ValueError("unknown argument: {}".format(_key)) + + def _make_dictionary(): + try: return kwargs["dictionary"] + except KeyError: pass + return Base.dictionary + + def _make_module_name(): + try: return kwargs["module_name"] + except KeyError: pass + import inspect + _frame = inspect.stack()[2] + _module = inspect.getmodule(_frame[0]) + return _module.__name__ + + # noinspection PyShadowingNames + class _Result(Base): + dictionary = _make_dictionary() + module_name = _make_module_name() + + return _Result + + def _make_getter(**kwargs): + if not ("module_name" in kwargs): + import inspect + _frame = inspect.stack()[2] + _module = inspect.getmodule(_frame[0]) + kwargs["module_name"] = _module.__name__ + + _class = _make_class(**kwargs) + _class.get.keys = _class.dictionary.keys() if isinstance(_class.dictionary, dict) else tuple() + return _class.get + + class _Result(object): + Base = _Base + make_class = _make_class + make_getter = _make_getter + + return _Result + + _private = _private() + Base = _private.Base + make_class = _private.make_class + make_getter = _private.make_getter + del _private diff --git a/src/p5/libvirt_lxc_helper/_common/path.py b/src/p5/libvirt_lxc_helper/_common/path.py new file mode 100644 index 0000000..c39192b --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/path.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import typing + import pathlib + + def _normalize(value: typing.Union[str, pathlib.PurePath], drop_root: bool = False) -> pathlib.PurePath: + assert isinstance(drop_root, bool) + + if isinstance(value, str): + assert value + value, = f"{value}\r\n".splitlines() + _type = pathlib.PurePath + value = _type(value) + else: + _type = type(value) + assert issubclass(_type, pathlib.PurePath) + + _collector = list() + for value in value.parts: + if "." == value: continue + if ".." == value: + if _collector: _collector.pop(0) + continue + _collector.append(value) + + value = _type(*_collector) + + if drop_root: + _root = value.root + if isinstance(value, pathlib.PureWindowsPath): + _drive = value.drive + if _root or _drive: value = value.relative_to(f"{_drive}{_root}") + elif value.is_absolute(): value = value.relative_to("/") + + return value + + class _Result(object): + normalize = _normalize + + return _Result + + +_private = _private() +try: normalize = _private.normalize +finally: del _private diff --git a/src/p5/libvirt_lxc_helper/_common/platform_info.py b/src/p5/libvirt_lxc_helper/_common/platform_info.py new file mode 100644 index 0000000..c0ae7f5 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/platform_info.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import os + import sys + + _parent_package_name = ".".join(__package__.split(".")[:-1]) + + class _Class(object): + nt = "nt" == os.name + tty = sys.stdin.isatty() and sys.stderr.isatty() + program = f"{sys.executable} -m {_parent_package_name}" if ("__main__.py" == os.path.basename(sys.argv[0])) else None + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_common/tar/__init__.py b/src/p5/libvirt_lxc_helper/_common/tar/__init__.py new file mode 100644 index 0000000..8232cbd --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/tar/__init__.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from .. import module_helpers as _module_helpers_module + + class _Result(object): + module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = { + "Reader": lambda module: module.reader.Class, + "Writer": lambda module: module.writer.Class + }) + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/_common/tar/_stream_inspector.py b/src/p5/libvirt_lxc_helper/_common/tar/_stream_inspector.py new file mode 100644 index 0000000..d381716 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/tar/_stream_inspector.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import os + import magic + import typing + + def _is_seekable(stream: typing.IO[bytes]): + try: _seekable = stream.seekable + except AttributeError: return False + if not (callable(_seekable) and stream.seekable()): return False + try: _tell, _seek = stream.tell, stream.seek + except AttributeError: return False + if not (callable(_tell) and callable(_seek)): return False + try: _tell = stream.tell() + except OSError: return False + try: _seek = stream.seek(_tell, os.SEEK_SET) + except OSError: return False + assert 0 == _seek + assert _tell == stream.tell() + return True + + def _read_magic(stream: typing.IO[bytes]): + _offset = stream.tell() + assert isinstance(_offset, int) + assert 0 <= _offset + assert 0 == stream.seek(0) + _magic = magic.from_buffer(stream.read(2048), mime = True) + assert _offset == stream.seek(_offset) + assert _offset == stream.tell() + assert isinstance(_magic, str) + assert _magic.startswith("application/") + return _magic + + class _Class(object): + @property + def stream(self): return self.__stream + + @property + def magic(self): + if self.__magic is None: + assert self.seekable + self.__magic = _read_magic(stream = self.__stream) + return self.__magic + + @property + def seekable(self): + if self.__seekable is None: self.__seekable = _is_seekable(stream = self.__stream) + return self.__seekable + + def __init__(self, stream: typing.IO[bytes]): + super().__init__() + self.__magic = None + self.__stream = stream + self.__seekable = None + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_common/tar/reader.py b/src/p5/libvirt_lxc_helper/_common/tar/reader.py new file mode 100644 index 0000000..5bd1654 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/tar/reader.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import sys + import typing + import pathlib + import tarfile + + from . import _stream_inspector as _stream_inspector_module + from .. import path as _path_module + + _normalize_path = _path_module.normalize + _make_stream_inspector = _stream_inspector_module.make + + def _make_mapping(tar: tarfile.TarFile): + assert isinstance(tar, tarfile.TarFile) + _collector = dict() + for _name in tar.getnames(): + assert isinstance(_name, str) + assert _name + _normalized = _normalize_path(value = _name, drop_root = True).as_posix() + if not _normalized.strip("."): continue + if _name == _normalized: continue + assert _normalized not in _collector + _collector[_normalized] = _name + return _collector + + class _Class(object): + @property + def state(self): return self.__tarfile is not None + + @property + def magic(self): return self.__magic + + @property + def source(self): return self.__source + + @property + def seekable(self): return self.__seekable + + def open(self): + assert self.__tarfile is None + + _magic = None + _source = self.__source + _stream = None + _tarfile = None + _seekable = False + + try: + if _source is None: _stream = sys.stdin.buffer + elif isinstance(_source, str): + assert _source + _source = _normalize_path(value = pathlib.Path(_source).resolve(strict = True)).as_posix() + _stream = open(_source, "rb") + else: _stream = self.__source + _inspector = _make_stream_inspector(stream = _stream) + _seekable = _inspector.seekable + if _seekable: + _magic = _inspector.magic + _tarfile = tarfile.open(mode = "r", fileobj = _stream) + else: _tarfile = tarfile.open(mode = "r|", fileobj = _stream) + + except BaseException: + if _stream is not None: + try: + if _tarfile is not None: _tarfile.close() + finally: + if isinstance(self.__source, str): _stream.close() + raise + + self.__magic = _magic + self.__stream = _stream + self.__tarfile = _tarfile + self.__seekable = _seekable + + def close(self): + _stream, _tarfile = self.__stream, self.__tarfile + + self.__magic = None + self.__stream = None + self.__tarfile = None + self.__mapping = None + self.__seekable = None + + try: + assert isinstance(_tarfile, tarfile.TarFile) + _tarfile.close() + finally: + assert _stream is not None + if isinstance(self.__source, str): _stream.close() + + def __iter__(self): + assert isinstance(self.__tarfile, tarfile.TarFile) + for _member in self.__tarfile: + _path = _member.name + if not _path: continue + _path = _normalize_path(value = _path, drop_root = True).as_posix() + if not _path.strip("."): continue + if not _member.isreg(): + _member.name = _member.path = _path + yield _member, None + continue + with self.__tarfile.extractfile(_member) as _stream: + _member.name = _member.path = _path + yield _member, _stream + + def __call__(self, path: str): + assert isinstance(path, str) + assert path + path = _normalize_path(value = path, drop_root = True).as_posix() + assert path.strip(".") + assert isinstance(self.__tarfile, tarfile.TarFile) + try: _stream = self.__tarfile.extractfile(path) + except KeyError: + if self.__mapping is None: self.__mapping = _make_mapping(tar = self.__tarfile) + _stream = self.__tarfile.extractfile(self.__mapping[path]) + assert _stream is not None + return _stream + + def __enter__(self): return self + + def __exit__(self, exception_type, exception_instance, exception_traceback): + _stream, _tarfile = self.__stream, self.__tarfile + + self.__magic = None + self.__stream = None + self.__tarfile = None + self.__mapping = None + self.__seekable = None + + try: + if _tarfile is not None: _tarfile.close() + finally: + if (_stream is not None) and isinstance(self.__source, str): _stream.close() + + def __init__(self, source: typing.Union[str, typing.IO[bytes]] = None): + super().__init__() + self.__magic = None + self.__stream = None + self.__source = source + self.__tarfile = None + self.__mapping = None + self.__seekable = None + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_common/tar/writer.py b/src/p5/libvirt_lxc_helper/_common/tar/writer.py new file mode 100644 index 0000000..1df448d --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_common/tar/writer.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import os + import sys + import typing + import tarfile + import pathlib + + from . import _stream_inspector as _stream_inspector_module + from .. import path as _path_module + + _normalize_path = _path_module.normalize + _make_stream_inspector = _stream_inspector_module.make + + _destination_type_hint = typing.Union[str, typing.IO[bytes]] + + def _write_data(data: bytes, stream: typing.IO[bytes]): + assert isinstance(data, bytes) + assert data + _size = stream.write(data) + if _size is None: _size = len(data) + else: + assert isinstance(_size, int) + assert _size == len(data) + return _size + + class _SeekableStreamWrapper(object): + def tell(self): return self.__stream.tell() + + def flush(self): return self.__stream.flush() + + @staticmethod + def seekable(): return True + + def seek(self, offset: int, whence: int = os.SEEK_SET): + assert isinstance(offset, int) + assert isinstance(whence, int) + if os.SEEK_SET == whence: assert 0 <= offset + else: assert whence in {os.SEEK_CUR, os.SEEK_END} + return self.__stream.seek(offset, whence) + + def write(self, data: bytes): return _write_data(data = data, stream = self.__stream) + + def __init__(self, stream: typing.IO[bytes]): + super().__init__() + self.__stream = stream + + class _NonSeekableStreamWrapper(object): + def tell(self): return self.__offset + + def flush(self): return self.__stream.flush() + + @staticmethod + def seekable(): return False + + def write(self, data: bytes): + _size = _write_data(data = data, stream = self.__stream) + self.__offset += _size + return _size + + def __init__(self, stream: typing.IO[bytes]): + super().__init__() + self.__offset = 0 + self.__stream = stream + + def _validate_member_path(value: str): + assert isinstance(value, str) + assert value.strip(".") + assert value == _normalize_path(value = value, drop_root = True).as_posix() + return value + + def _open(destination: typing.Optional[_destination_type_hint]): + _stream, _tarfile = None, None + try: + if isinstance(destination, str): + destination, = f"{destination}\r\n".splitlines() + destination = pathlib.Path(destination).resolve().as_posix() + _stream = open(destination, "wb") + else: _stream = sys.stdout.buffer if destination is None else destination + _inspector = _make_stream_inspector(stream = _stream) + _wrapper = _SeekableStreamWrapper if _inspector.seekable else _NonSeekableStreamWrapper + _wrapper = typing.cast(typing.IO[bytes], _wrapper(stream = _stream)) + _tarfile = tarfile.open(mode = "w", fileobj = _wrapper) + except BaseException: + try: + if _tarfile is not None: _tarfile.close() + finally: + if (_stream is not None) and isinstance(destination, str): _stream.close() + raise + return _stream, _tarfile + + class _Class(object): + @property + def destination(self): return self.__destination + + def open(self): + assert self.__stream is None + self.__stream, self.__tarfile = _open(destination = self.__destination) + + def close(self): + _stream, _tarfile = self.__stream, self.__tarfile + + self.__stream = None + self.__tarfile = None + + try: + assert isinstance(_tarfile, tarfile.TarFile) + _tarfile.close() + finally: + assert _stream is not None + if isinstance(self.__destination, str): _stream.close() + + def __call__(self, member: tarfile.TarInfo, stream: typing.Optional[typing.IO[bytes]]): + assert isinstance(member, tarfile.TarInfo) + if member.isreg(): assert stream is not None + else: assert stream is None + _dictionary = dict(member.get_info()) + _path = _dictionary.pop("name") + if tarfile.DIRTYPE == member.type: _path = _path.rstrip("/") + member = tarfile.TarInfo(name = _validate_member_path(value = _path)) + for _key, _value in _dictionary.items(): setattr(member, _key, _value) + if 0 == member.uid: member.uname = "root" + if 0 == member.gid: member.gname = "root" + assert isinstance(self.__tarfile, tarfile.TarFile) + self.__tarfile.addfile(member, stream) + + def __enter__(self): return self + + def __exit__(self, exception_type, exception_instance, exception_traceback): + _stream, _tarfile = self.__stream, self.__tarfile + + self.__stream = None + self.__tarfile = None + + try: + if _tarfile is not None: _tarfile.close() + finally: + if (_stream is not None) and isinstance(self.__destination, str): _stream.close() + + def __init__(self, destination: _destination_type_hint = None): + super().__init__() + assert (destination is None) or destination + self.__stream = None + self.__tarfile = None + self.__destination = destination + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_entry_point/__init__.py b/src/p5/libvirt_lxc_helper/_entry_point/__init__.py new file mode 100644 index 0000000..e7d787e --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_entry_point/__init__.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + from .. _common import module_helpers as _module_helpers_module + + class _Result(object): + module_getter = _module_helpers_module.lazy_attributes.make_getter(dictionary = { + "routine": lambda module: getattr(module, "_routine").routine + }) + + return _Result + + +_private = _private() + +__all__ = _private.module_getter.keys +__date__ = None +__author__ = None +__version__ = None +__credits__ = None +_fields = tuple() +__bases__ = tuple() + + +def __getattr__(name: str): return _private.module_getter(name = name) diff --git a/src/p5/libvirt_lxc_helper/_entry_point/_any_key_waiter.py b/src/p5/libvirt_lxc_helper/_entry_point/_any_key_waiter.py new file mode 100644 index 0000000..7ed4aa3 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_entry_point/_any_key_waiter.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import sys + + from .. import _common as _common_module + + _platform_info = _common_module.platform_info.make() + + def _routine(): + _prompt_state = False + + try: + if _platform_info.nt: + import msvcrt + _prompt_state = True + print("press any key for exit", end = "", flush = True, file = sys.stderr) + msvcrt.getch() + return + + import termios + _descriptor = sys.stdin.fileno() + + try: + _old_tty_attributes = termios.tcgetattr(_descriptor) + _new_tty_attributes = termios.tcgetattr(_descriptor) + _new_tty_attributes[3] = _new_tty_attributes[3] & ~termios.ICANON & ~termios.ECHO + termios.tcsetattr(_descriptor, termios.TCSANOW, _new_tty_attributes) + except termios.error: return + + _prompt_state = True + print("press any key for exit", end = "", flush = True, file = sys.stderr) + + try: sys.stdin.read(1) + except IOError: pass + finally: termios.tcsetattr(_descriptor, termios.TCSAFLUSH, _old_tty_attributes) + finally: + if _prompt_state: print("", flush = True, file = sys.stderr) + + class _Class(object): + @property + def condition(self): return self.__condition + + @condition.setter + def condition(self, value: bool): + assert isinstance(value, bool) + if value: assert _platform_info.tty + self.__condition = value + + def __call__(self): + if self.__condition: _routine() + + def __init__(self, condition: bool = False): + assert isinstance(condition, bool) + if condition: assert _platform_info.tty + super().__init__() + self.__condition = condition + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_entry_point/_cli_parser.py b/src/p5/libvirt_lxc_helper/_entry_point/_cli_parser.py new file mode 100644 index 0000000..f91ea65 --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_entry_point/_cli_parser.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import argparse + + from .. import _common as _common_module + + _validator = _common_module.cli_validator.make() + _platform_info = _common_module.platform_info.make() + + _backend = argparse.ArgumentParser( + prog = _platform_info.program, + description = "python3 libvirt_lxc helper tool", + exit_on_error = False + ) + + @_validator.decorator(key = _backend.add_argument( + "-w", "--wait-at-exit", action = "store_true", help = "wait for any key at exit [tty mode only]" + ).dest) + def _routine(value: bool): + assert isinstance(value, bool) + if value: assert _platform_info.tty, "tty mode expected" + return value + + _subparsers = _backend.add_subparsers( + title = "action", required = True, dest = "action" + ) + + class _Class(object): + @property + def subparsers(self): return _subparsers + + @staticmethod + def parse(*args, **kwargs): + _known, _unknown = _backend.parse_known_args(*args, **kwargs) + if _unknown: raise ValueError("unrecognized arguments: %s" % " ".join(_unknown)) + _known = vars(_known) + _validator(arguments = _known, allow_unknown = True) + return _known + + @staticmethod + def help(): return _backend.format_help() + + class _Result(object): + Class = _Class + + return _Result + + +_private = _private() +try: Class = _private.Class +finally: del _private + + +# noinspection PyArgumentList +def make(*args, **kwargs): return Class(*args, **kwargs) diff --git a/src/p5/libvirt_lxc_helper/_entry_point/_routine.py b/src/p5/libvirt_lxc_helper/_entry_point/_routine.py new file mode 100644 index 0000000..76a1eac --- /dev/null +++ b/src/p5/libvirt_lxc_helper/_entry_point/_routine.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +assert "__main__" != __name__ + + +def _private(): + import sys + import atexit + + from . import _cli_parser as _cli_parser_module + from . import _any_key_waiter as _any_key_waiter_module + + from .. import _common as _common_module + from .. import _actions as _actions_module + + _platform_info = _common_module.platform_info.make() + + _any_key_waiter = _any_key_waiter_module.make(_platform_info.nt and _platform_info.tty) + + def _generate_actions(): + for _module in ( + _actions_module.make_rootfs, + _actions_module.install_rootfs + ): yield _module.make() + + _actions = {_action.name: _action for _action in _generate_actions()} + + def _make_cli_parser(): + _root = _cli_parser_module.make() + _subparsers = _root.subparsers + for _name, _action in _actions.items(): _action.setup_cli(parser = _subparsers.add_parser(_name)) + return _root + + def _routine(): + atexit.register(_any_key_waiter) + _cli_parser = _make_cli_parser() + _help_message = _cli_parser.help() + + try: + _parsed_cli = _cli_parser.parse() + _actions[_parsed_cli["action"]].validate_cli(arguments = _parsed_cli) + + except BaseException as _exception: + if all(( + _platform_info.tty, + (not isinstance(_exception, SystemExit)) or (0 != _exception.code) + )): atexit.register(lambda: print(_help_message, flush = True, file = sys.stderr)) + raise + + try: + del _cli_parser + _any_key_waiter.condition = _parsed_cli["wait_at_exit"] + _actions[_parsed_cli["action"]](cli = _parsed_cli) + + except KeyboardInterrupt: + _any_key_waiter.condition = False + raise + + class _Result(object): + routine = _routine + + return _Result + + +_private = _private() +try: routine = _private.routine +finally: del _private diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..9d6a7ff --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,2 @@ +def pytest_addoption(parser): + parser.addoption("--tests.examples.nginx-source", required = False, action = "append", help = "OCI image path") diff --git a/tests/examples/nginx/entry-point_test.py b/tests/examples/nginx/entry-point_test.py new file mode 100644 index 0000000..e8e3e23 --- /dev/null +++ b/tests/examples/nginx/entry-point_test.py @@ -0,0 +1,274 @@ +import os +import sys +import pytest +import pathlib +import tarfile + + +@pytest.fixture +def local_configuration(request, tmp_path_factory): + import p5.libvirt_lxc_helper as _target_module + + def _parse_command_line(): + _arguments = request.config.getoption("--tests.examples.nginx-source") + assert isinstance(_arguments, list) + for _argument in _arguments: + assert isinstance(_argument, str) + assert _argument + _argument, = f"{_argument}\r\n".splitlines() + yield pathlib.Path(_argument).resolve(strict = True) + + _sources = tuple(_parse_command_line()) + if not _sources: return None + + _temporary = tmp_path_factory.mktemp("data").resolve(strict = True) + + _destinations = _temporary / "destinations" + os.makedirs(_destinations.as_posix(), exist_ok = False) + _destinations = _destinations.resolve(strict = True) + assert _destinations.is_dir() + + _working = _temporary / "working" + assert not _working.exists() + + def _make_index_generator(start: int = 0): + assert isinstance(start, int) + + def _routine(): + _value = start + while True: + yield _value + _value += 1 + + return iter(_routine()) + + def _rebuild_sources(): + _index_generator = _make_index_generator() + + for _source in _sources: + yield _source + assert isinstance(_source, pathlib.Path) + with tarfile.open(_source.as_posix(), "r") as _reader: + _directory = _temporary / f"sources/{next(_index_generator)}" + os.makedirs(_directory, exist_ok = False) + _reader.extractall(path = _directory) + assert 2 < len(os.listdir(_directory)) + yield _directory + _path = _temporary / f"sources/{next(_index_generator)}" + with tarfile.open(_path.as_posix(), "w") as _writer: + for _item in os.listdir(_directory): _writer.add(name = _directory / _item, arcname = _item) + yield _path + + for _name in ("", ".", "/"): + _path = _temporary / f"sources/{next(_index_generator)}" + with tarfile.open(_path.as_posix(), "w") as _writer: _writer.add(name = _directory, arcname = _name) + yield _path + + for _compression in ("xz", "gz", "bz2"): + _path = _temporary / f"sources/{next(_index_generator)}" + with tarfile.open(_path.as_posix(), f"w|{_compression}") as _writer: _writer.add( + name = _directory, arcname = _name + ) + yield _path + + _sources = tuple(_rebuild_sources()) + + class _Result(object): + working = _working + sources = _sources + destinations = _destinations + target_module = _target_module + + return _Result + + +def test(local_configuration): + if local_configuration is None: pytest.skip("no one source provided") + + import magic + import shlex + import shutil + import typing + import subprocess + import contextlib + + _oci_image_module = getattr( + getattr(local_configuration.target_module, "_actions").make_rootfs, + "_oci_image" + ) + _make_oci_image = _oci_image_module.make + _parse_digest = _oci_image_module.digest.parse + _validate_manifest = _oci_image_module.meta.validate_manifest + _parse_manifest_ref = _oci_image_module.meta.parse_manifest_ref + del _oci_image_module + + def _make_source_handler(): + _flags = set() + _unique_digests = dict() + _target_module_name = local_configuration.target_module.__name__ + _script_example_base_name = f".{_target_module_name}.script.example" + _target_module_entry_point = sys.executable, "-m", _target_module_name + + def _inspect_source(source: str): + assert isinstance(source, str) + _manifests = [_validate_manifest(value = _manifest) for _manifest in _make_oci_image(source = source)] + _manifests.reverse() + _ref = None + _digest = None + _last_digest = None + for _manifest in _manifests: + _last_digest = _parse_digest(value = _manifest["digest"]).value + _ref = _parse_manifest_ref(manifest = _manifest) + if _ref is None: continue + _digest = _last_digest + break + return _ref, (_digest or _last_digest) + + @contextlib.contextmanager + def _make_destination_context(key: int): + assert isinstance(key, int) + assert 0 <= key + + @contextlib.contextmanager + def _make_common_context(path: pathlib.Path): + assert isinstance(path, pathlib.Path) + _archive_path = f"{path.as_posix()}.tar" + yield _archive_path + os.makedirs(path.as_posix(), exist_ok = False) + with tarfile.open(_archive_path, "r|") as _reader: _reader.extractall(path = path.as_posix()) + _script_directory_path = os.path.join(path, _script_example_base_name) + _script_archive_path = f"{_script_directory_path}.tar" + os.makedirs(_script_directory_path, exist_ok = False) + with tarfile.open(_script_archive_path, "r|") as _reader: _reader.extractall( + path = _script_directory_path + ) + os.remove(_script_archive_path) + if not (0 < key): return + subprocess.check_call(( + "diff", "--recursive", "--no-dereference", "--", + (pathlib.PurePosixPath("0/first") / _script_example_base_name).as_posix(), + (path.relative_to(local_configuration.destinations) / _script_example_base_name).as_posix() + ), cwd = local_configuration.destinations, stdout = subprocess.DEVNULL) + + _key_path = local_configuration.destinations / str(key) + _directory_name = "first" + + try: os.makedirs(_key_path.as_posix(), exist_ok = False) + except FileExistsError: _directory_name = "next" + + _common_path = _key_path / _directory_name + with _make_common_context(path = _common_path) as _final_path: yield _final_path + + _common_path = _common_path.as_posix() + assert os.listdir(_common_path) + + if "first" == _directory_name: return + + subprocess.check_call( + ("diff", "--recursive", "--no-dereference", "--", "first", "next"), + cwd = _key_path.as_posix(), stdout = subprocess.DEVNULL + ) + + shutil.rmtree(_common_path) + os.remove(_final_path) + + @contextlib.contextmanager + def _make_working_directory_context(): + _path = local_configuration.working.as_posix() + if "existing working directory" in _flags: + assert not local_configuration.working.exists() + yield _path + assert not local_configuration.working.exists() + return + _flags.add("existing working directory") + os.makedirs(_path, exist_ok = False) + yield _path + assert not os.listdir(_path), "working directory is empty after execution" + os.rmdir(_path) + + @contextlib.contextmanager + def _make_routine_context(digest: str): + digest = _parse_digest(value = digest).value + + try: _key = _unique_digests[digest] + except KeyError: + _key = len(_unique_digests) + _unique_digests[digest] = _key + + with ( + _make_destination_context(key = _key) as _destination, + _make_working_directory_context() as _working_directory + ): yield _destination, _working_directory + + def _subprocess_routine(*arguments: typing.Iterable[str]): + arguments = tuple(arguments) + for _argument in arguments: + assert isinstance(_argument, str) + assert _argument + _argument, = f"{_argument}\r\n".splitlines() + _command = *_target_module_entry_point, *arguments + print(f"executing: {shlex.join(_command)}", file = sys.stderr, flush = True) + subprocess.check_call(_command, stdin = subprocess.DEVNULL, stdout = subprocess.DEVNULL) + + def _shell_routine(source: str, digest: str): + for _command in ( + "{command} --source={source} --destination={destination}", + "{command} --source={source} > {destination}", + "{command} --destination={destination} < {source}", + "{command} < {source} > {destination}", + "cat < {source} | {command} --destination={destination}", + "cat < {source} | {command} > {destination}", + "{command} --source={source} | cat > {destination}", + "{command} < {source} | cat > {destination}", + "cat < {source} | {command} | cat > {destination}" + ): + with _make_routine_context(digest = digest) as (_destination, _working_directory): + _command = _command.format_map(dict(command = shlex.join(( + *_target_module_entry_point, "make-rootfs", f"--working-directory={_working_directory}" + )), source = shlex.quote(source), destination = shlex.quote(_destination))) + print(f"executing in shell: {_command}", file = sys.stderr, flush = True) + _command = _command.encode("utf-8") + assert 0 == subprocess.run(("sh", "-e"), input = _command, stdout = subprocess.DEVNULL).returncode + + def _routine(source: pathlib.Path): + _is_directory, source = source.is_dir(), source.as_posix() + _ref, _digest = _inspect_source(source = source) + + with _make_routine_context(digest = _digest) as (_destination, _working_directory): _subprocess_routine( + "make-rootfs", f"--source={source}", f"--destination={_destination}", + f"--working-directory={_working_directory}" + ) + + if not ( + ("shell" in _flags) or _is_directory or ("application/x-tar" != magic.from_file(source, mime = True)) + ): + _flags.add("shell") + _shell_routine(source = source, digest = _digest) + + if "digest key" not in _flags: + _flags.add("digest key") + with _make_routine_context(digest = _digest) as (_destination, _working_directory): _subprocess_routine( + "make-rootfs", f"--digest={_digest}", + f"--source={source}", f"--destination={_destination}", + f"--working-directory={_working_directory}" + ) + + if not (("ref key" in _flags) or (_ref is None)): + _flags.add("ref key") + with _make_routine_context(digest = _digest) as (_destination, _working_directory): _subprocess_routine( + "make-rootfs", f"--ref={_ref}", + f"--source={source}", f"--destination={_destination}", + f"--working-directory={_working_directory}" + ) + with _make_routine_context(digest = _digest) as (_destination, _working_directory): _subprocess_routine( + "make-rootfs", f"--ref={_ref}", f"--digest={_digest}", + f"--source={source}", f"--destination={_destination}", + f"--working-directory={_working_directory}" + ) + + return _routine + + _handle_source = _make_source_handler() + del _make_source_handler + + for _source in local_configuration.sources: _handle_source(source = _source)