commit aed887b584cccdc22051537beaaa8f3967f0e873 Author: Ximo Guanter Date: Wed Dec 30 22:13:59 2020 +0100 initial payload diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..0ea5172 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,2 @@ +target +.ci diff --git a/.github/workflows/rust-ci.yml b/.github/workflows/rust-ci.yml new file mode 100644 index 0000000..124ae3e --- /dev/null +++ b/.github/workflows/rust-ci.yml @@ -0,0 +1,56 @@ +name: CI +on: + pull_request: + branches: + - master + + push: + branches: + - master + +jobs: + test: + name: Unit tests + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v2 + + - name: Install Rust stable toolchain + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + + - name: Run cargo test + uses: actions-rs/cargo@v1 + with: + command: test + + lints: + name: Lints + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v2 + + - name: Install stable toolchain + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + components: rustfmt, clippy + + - name: Run cargo fmt + uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all -- --check + + - name: Run cargo clippy + uses: actions-rs/clippy-check@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + args: --all-features diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..eb5a316 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +target diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..0d9caec --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,1621 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "addr2line" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c0929d69e78dd9bf5408269919fcbcaeb2e35e5d43e5815517cdc6a8e11a423" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e" + +[[package]] +name = "aho-corasick" +version = "0.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5" +dependencies = [ + "memchr", +] + +[[package]] +name = "anyhow" +version = "1.0.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee67c11feeac938fae061b232e38e0b6d94f97a9df10e6271319325ac4c56a86" + +[[package]] +name = "async-stream" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22068c0c19514942eefcfd4daf8976ef1aad84e61539f95cd200c35202f80af5" +dependencies = [ + "async-stream-impl", + "futures-core", +] + +[[package]] +name = "async-stream-impl" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25f9db3b38af870bf7e5cc649167533b493928e50744e2c30ae350230b414670" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-trait" +version = "0.1.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d3a45e77e34375a7923b1e8febb049bb011f064714a8e17a1a616fef01da13d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "backtrace" +version = "0.3.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef5140344c85b01f9bbb4d4b7288a8aa4b3287ccef913a14bcc78a1063623598" +dependencies = [ + "addr2line", + "cfg-if 1.0.0", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + +[[package]] +name = "bitflags" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" + +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +dependencies = [ + "block-padding", + "byte-tools", + "byteorder", + "generic-array", +] + +[[package]] +name = "block-padding" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +dependencies = [ + "byte-tools", +] + +[[package]] +name = "buildkit-llb" +version = "0.2.0" +dependencies = [ + "buildkit-proto", + "either", + "failure", + "lazy_static", + "log", + "prost", + "regex", + "serde_json", + "sha2", +] + +[[package]] +name = "buildkit-proto" +version = "0.2.0" +dependencies = [ + "prost", + "prost-types", + "tonic", + "tonic-build", +] + +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" + +[[package]] +name = "byteorder" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" + +[[package]] +name = "bytes" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "crossbeam" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-channel" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" +dependencies = [ + "crossbeam-utils", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-deque" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f02af974daeee82218205558e51ec8768b48cf524bd01d550abe5573a608285" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" +dependencies = [ + "autocfg", + "cfg-if 0.1.10", + "crossbeam-utils", + "lazy_static", + "maybe-uninit", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-queue" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-utils", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-utils" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" +dependencies = [ + "autocfg", + "cfg-if 0.1.10", + "lazy_static", +] + +[[package]] +name = "digest" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +dependencies = [ + "generic-array", +] + +[[package]] +name = "dockerfile-plus" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "buildkit-llb", + "buildkit-proto", + "crossbeam", + "either", + "env_logger", + "futures", + "libc", + "mio", + "pin-project 1.0.2", + "prost", + "prost-types", + "regex", + "serde", + "serde_json", + "tokio", + "tonic", + "tower", + "url", +] + +[[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + +[[package]] +name = "env_logger" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26ecb66b4bdca6c1409b40fb255eefc2bd4f6d135dab3c3124f80ffa2a9661e" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "failure" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" +dependencies = [ + "backtrace", + "failure_derive", +] + +[[package]] +name = "failure_derive" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + +[[package]] +name = "fixedbitset" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ece68d15c92e84fa4f19d3780f1294e5ca82a78a6d515f1efaabcc144688be00" +dependencies = [ + "matches", + "percent-encoding", +] + +[[package]] +name = "fuchsia-zircon" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" +dependencies = [ + "bitflags", + "fuchsia-zircon-sys", +] + +[[package]] +name = "fuchsia-zircon-sys" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" + +[[package]] +name = "futures" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b3b0c040a1fe6529d30b3c5944b280c7f0dcb2930d2c3062bca967b602583d0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b7109687aa4e177ef6fe84553af6280ef2778bdb7783ba44c9dc3399110fe64" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "847ce131b72ffb13b6109a221da9ad97a64cbe48feb1028356b836b47b8f1748" + +[[package]] +name = "futures-executor" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4caa2b2b68b880003057c1dd49f1ed937e38f22fcf6c212188a121f08cf40a65" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "611834ce18aaa1bd13c4b374f5d653e1027cf99b6b502584ff8c9a64413b30bb" + +[[package]] +name = "futures-macro" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77408a692f1f97bcc61dc001d752e00643408fbc922e4d634c655df50d595556" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f878195a49cee50e006b02b93cf7e0a95a38ac7b776b4c4d9cc1207cd20fcb3d" + +[[package]] +name = "futures-task" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c554eb5bf48b2426c4771ab68c6b14468b6e76cc90996f528c3338d761a4d0d" +dependencies = [ + "once_cell", +] + +[[package]] +name = "futures-util" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d304cff4a7b99cfb7986f7d43fbe93d175e72e704a8860787cc95e9ffd85cbd2" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project 1.0.2", + "pin-utils", + "proc-macro-hack", + "proc-macro-nested", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec" +dependencies = [ + "typenum", +] + +[[package]] +name = "getrandom" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce" + +[[package]] +name = "h2" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e4728fd124914ad25e99e3d15a9361a879f6620f63cb56bbb08f95abb97a535" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", + "tracing-futures", +] + +[[package]] +name = "hashbrown" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04" + +[[package]] +name = "heck" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cbf45460356b7deeb5e3415b5563308c0a9b057c85e12b06ad551f98d0a6ac" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "hermit-abi" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" +dependencies = [ + "libc", +] + +[[package]] +name = "http" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84129d298a6d57d246960ff8eb831ca4af3f96d29e2e28848dae275408658e26" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "httparse" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd179ae861f0c2e53da70d892f5f3029f9594be0c41dc5269cd371691b1dc2f9" + +[[package]] +name = "httpdate" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" + +[[package]] +name = "humantime" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c1ad908cc71012b7bea4d0c53ba96a8cba9962f048fa68d143376143d863b7a" + +[[package]] +name = "hyper" +version = "0.13.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ad767baac13b44d4529fcf58ba2cd0995e36e7b435bc5b039de6f47e880dbf" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project 1.0.2", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "idna" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb1fa934250de4de8aef298d81c729a7d33d8c239daa3a7575e6b92bfc7313b" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "iovec" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +dependencies = [ + "libc", +] + +[[package]] +name = "itertools" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736" + +[[package]] +name = "kernel32-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +dependencies = [ + "winapi 0.2.8", + "winapi-build", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb" + +[[package]] +name = "log" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" +dependencies = [ + "cfg-if 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" + +[[package]] +name = "maybe-uninit" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" + +[[package]] +name = "memchr" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" + +[[package]] +name = "memoffset" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa" +dependencies = [ + "autocfg", +] + +[[package]] +name = "miniz_oxide" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d" +dependencies = [ + "adler", + "autocfg", +] + +[[package]] +name = "mio" +version = "0.6.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4" +dependencies = [ + "cfg-if 0.1.10", + "fuchsia-zircon", + "fuchsia-zircon-sys", + "iovec", + "kernel32-sys", + "libc", + "log", + "miow", + "net2", + "slab", + "winapi 0.2.8", +] + +[[package]] +name = "miow" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d" +dependencies = [ + "kernel32-sys", + "net2", + "winapi 0.2.8", + "ws2_32-sys", +] + +[[package]] +name = "multimap" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1255076139a83bb467426e7f8d0134968a8118844faa755985e077cf31850333" + +[[package]] +name = "net2" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "391630d12b68002ae1e25e8f974306474966550ad82dac6886fb8910c19568ae" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "num_cpus" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d3b63360ec3cb337817c2dbd47ab4a0f170d285d8e5a2064600f3def1402397" + +[[package]] +name = "once_cell" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13bd41f508810a131401606d54ac32a467c97172d74ba7662562ebba5ad07fa0" + +[[package]] +name = "opaque-debug" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" + +[[package]] +name = "percent-encoding" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" + +[[package]] +name = "petgraph" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "pin-project" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ffbc8e94b38ea3d2d8ba92aea2983b503cd75d0888d75b86bb37970b5698e15" +dependencies = [ + "pin-project-internal 0.4.27", +] + +[[package]] +name = "pin-project" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ccc2237c2c489783abd8c4c80e5450fc0e98644555b1364da68cc29aa151ca7" +dependencies = [ + "pin-project-internal 1.0.2", +] + +[[package]] +name = "pin-project-internal" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8e8d2bf0b23038a4424865103a4df472855692821aab4e4f5c3312d461d9e5f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c917123afa01924fc84bb20c4c03f004d9c38e5127e3c039bbf7f4b9c76a2f6b" + +[[package]] +name = "pin-project-lite" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b063f57ec186e6140e2b8b6921e5f1bd89c7356dda5b33acc5401203ca6131c" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "proc-macro-hack" +version = "0.5.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" + +[[package]] +name = "proc-macro-nested" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eba180dafb9038b050a4c280019bbedf9f2467b61e5d892dcad585bb57aadc5a" + +[[package]] +name = "proc-macro2" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "prost" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce49aefe0a6144a45de32927c77bd2859a5f7677b55f220ae5b744e87389c212" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b10678c913ecbd69350e8535c3aef91a8676c0773fc1d7b95cdd196d7f2f26" +dependencies = [ + "bytes", + "heck", + "itertools", + "log", + "multimap", + "petgraph", + "prost", + "prost-types", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537aa19b95acde10a12fec4301466386f757403de4cd4e5b4fa78fb5ecb18f72" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "prost-types" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1834f67c0697c001304b75be76f67add9c89742eda3a085ad8ee0bb38c3417aa" +dependencies = [ + "bytes", + "prost", +] + +[[package]] +name = "quote" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom", + "libc", + "rand_chacha", + "rand_core", + "rand_hc", + "rand_pcg", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core", +] + +[[package]] +name = "rand_pcg" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" +dependencies = [ + "rand_core", +] + +[[package]] +name = "redox_syscall" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" + +[[package]] +name = "regex" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", + "thread_local", +] + +[[package]] +name = "regex-syntax" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e3bad0ee36814ca07d7968269dd4b7ec89ec2da10c4bb613928d3077083c232" + +[[package]] +name = "ryu" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "serde" +version = "1.0.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06c64263859d87aa2eb554587e2d23183398d617427327cf2b3d0ed8c69e4800" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.118" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c84d3526699cd55261af4b941e4e725444df67aa4f9e6a3564f18030d12672df" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fceb2595057b6891a4ee808f70054bd2d12f0e97f1cbb78689b59f676df325a" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha2" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" +dependencies = [ + "block-buffer", + "digest", + "fake-simd", + "opaque-debug", +] + +[[package]] +name = "slab" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" + +[[package]] +name = "socket2" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "syn" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9802ddde94170d186eeee5005b798d9c159fa970403f1be19976d0cfb939b72" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "synstructure" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b834f2d66f734cb897113e34aaff2f1ab4719ca946f9a7358dba8f8064148701" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "unicode-xid", +] + +[[package]] +name = "tempfile" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "rand", + "redox_syscall", + "remove_dir_all", + "winapi 0.3.9", +] + +[[package]] +name = "termcolor" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "thread_local" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "tinyvec" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf8dbc19eb42fba10e8feaaec282fb50e2c14b2726d6301dbfeed0f73306a6f" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "tokio" +version = "0.2.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "099837d3464c16a808060bb3f02263b412f6fafcb5d01c533d309985fbeebe48" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "iovec", + "lazy_static", + "memchr", + "mio", + "num_cpus", + "pin-project-lite 0.1.11", + "slab", + "tokio-macros", +] + +[[package]] +name = "tokio-macros" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e44da00bfc73a25f814cd8d7e57a68a5c31b74b3152a0a1d1f590c97ed06265a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-util" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be8242891f2b6cbef26a2d7e8605133c2c554cd35b3e4948ea892d6d68436499" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "log", + "pin-project-lite 0.1.11", + "tokio", +] + +[[package]] +name = "tonic" +version = "0.3.1" +source = "git+https://github.com/edrevo/tonic?branch=unimplemented-content-type#0dd748458e3860358f49905e60040a23f70c4e8e" +dependencies = [ + "async-stream", + "async-trait", + "base64", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "hyper", + "percent-encoding", + "pin-project 0.4.27", + "prost", + "prost-derive", + "tokio", + "tokio-util", + "tower", + "tower-balance", + "tower-load", + "tower-make", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic-build" +version = "0.3.1" +source = "git+https://github.com/edrevo/tonic?branch=unimplemented-content-type#0dd748458e3860358f49905e60040a23f70c4e8e" +dependencies = [ + "proc-macro2", + "prost-build", + "quote", + "syn", +] + +[[package]] +name = "tower" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd3169017c090b7a28fce80abaad0ab4f5566423677c9331bb320af7e49cfe62" +dependencies = [ + "futures-core", + "tower-buffer", + "tower-discover", + "tower-layer", + "tower-limit", + "tower-load-shed", + "tower-retry", + "tower-service", + "tower-timeout", + "tower-util", +] + +[[package]] +name = "tower-balance" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a792277613b7052448851efcf98a2c433e6f1d01460832dc60bef676bc275d4c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "pin-project 0.4.27", + "rand", + "slab", + "tokio", + "tower-discover", + "tower-layer", + "tower-load", + "tower-make", + "tower-ready-cache", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-buffer" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4887dc2a65d464c8b9b66e0e4d51c2fd6cf5b3373afc72805b0a60bce00446a" +dependencies = [ + "futures-core", + "pin-project 0.4.27", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-discover" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f6b5000c3c54d269cc695dff28136bb33d08cbf1df2c48129e143ab65bf3c2a" +dependencies = [ + "futures-core", + "pin-project 0.4.27", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a35d656f2638b288b33495d1053ea74c40dc05ec0b92084dd71ca5566c4ed1dc" + +[[package]] +name = "tower-limit" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92c3040c5dbed68abffaa0d4517ac1a454cd741044f33ab0eefab6b8d1361404" +dependencies = [ + "futures-core", + "pin-project 0.4.27", + "tokio", + "tower-layer", + "tower-load", + "tower-service", +] + +[[package]] +name = "tower-load" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cc79fc3afd07492b7966d7efa7c6c50f8ed58d768a6075dd7ae6591c5d2017b" +dependencies = [ + "futures-core", + "log", + "pin-project 0.4.27", + "tokio", + "tower-discover", + "tower-service", +] + +[[package]] +name = "tower-load-shed" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f021e23900173dc315feb4b6922510dae3e79c689b74c089112066c11f0ae4e" +dependencies = [ + "futures-core", + "pin-project 0.4.27", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-make" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce50370d644a0364bf4877ffd4f76404156a248d104e2cc234cd391ea5cdc965" +dependencies = [ + "tokio", + "tower-service", +] + +[[package]] +name = "tower-ready-cache" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4eabb6620e5481267e2ec832c780b31cad0c15dcb14ed825df5076b26b591e1f" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "log", + "tokio", + "tower-service", +] + +[[package]] +name = "tower-retry" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6727956aaa2f8957d4d9232b308fe8e4e65d99db30f42b225646e86c9b6a952" +dependencies = [ + "futures-core", + "pin-project 0.4.27", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-service" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860" + +[[package]] +name = "tower-timeout" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "127b8924b357be938823eaaec0608c482d40add25609481027b96198b2e4b31e" +dependencies = [ + "pin-project 0.4.27", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-util" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1093c19826d33807c72511e68f73b4a0469a3f22c2bd5f7d5212178b4b89674" +dependencies = [ + "futures-core", + "futures-util", + "pin-project 0.4.27", + "tower-service", +] + +[[package]] +name = "tracing" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f47026cdc4080c07e49b37087de021820269d996f581aac150ef9e5583eefe3" +dependencies = [ + "cfg-if 1.0.0", + "log", + "pin-project-lite 0.2.0", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80e0ccfc3378da0cce270c946b676a376943f5cd16aeba64568e7939806f4ada" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "tracing-futures" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab7bb6f14721aa00656086e9335d363c5c8747bae02ebe32ea2c7dece5689b4c" +dependencies = [ + "pin-project 0.4.27", + "tracing", +] + +[[package]] +name = "try-lock" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" + +[[package]] +name = "typenum" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" + +[[package]] +name = "unicode-bidi" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" +dependencies = [ + "matches", +] + +[[package]] +name = "unicode-normalization" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a13e63ab62dbe32aeee58d1c5408d35c36c392bba5d9d3142287219721afe606" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0d2e7be6ae3a5fa87eed5fb451aff96f2573d2694942e40543ae0bbe19c796" + +[[package]] +name = "unicode-xid" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" + +[[package]] +name = "url" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5909f2b0817350449ed73e8bcd81c8c3c8d9a7a5d8acba4b27db277f1868976e" +dependencies = [ + "form_urlencoded", + "idna", + "matches", + "percent-encoding", +] + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "which" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d011071ae14a2f6671d0b74080ae0cd8ebf3a6f8c9589a2cd45f23126fe29724" +dependencies = [ + "libc", +] + +[[package]] +name = "winapi" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-build" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "ws2_32-sys" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" +dependencies = [ + "winapi 0.2.8", + "winapi-build", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..30c17a2 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,6 @@ +[workspace] +members = [ + "buildkit-proto", + "buildkit-llb", + "dockerfile-plus", +] diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 0000000..16fe87b --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 0000000..2533838 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,26 @@ +Copyright (c) 2019 Denys Zariaiev +Copyright (c) 2020 Ximo Guanter + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..c167cfd --- /dev/null +++ b/README.md @@ -0,0 +1,82 @@ +# Dockerfile+ + +This project provides Dockerfile syntax extensions that have been rejected by the moby project or haven't been addressed in a long time. + +Currently, the project adds an `INCLUDE+` Dockerfile directive that allows you to import the content of another file into your Dockerfile. There are plans to add more features in the near future. + +- [Getting started](#getting-started) +- [Features](#features) + - [INCLUDE+](#include) +- [Roadmap](#roadmap) +- [Feedback](#feedback) + +## Getting started + +First, you need to make sure you are running a compatible version of Docker: +- if you are using Docker 20.10+, you're all set! +- if you are using Docker 18.09+, then you need to export the following environment variable: `DOCKER_BUILDKIT=1` +- if you are using an older version of Docker, you are out of luck. Sorry! + +Once your Docker is set, you just need to add the following line as your first line in your Dockerfile: + +```Dockerfile +# syntax = edrevo/dockerfile-plus +``` + +That's it! + +## Features + +### INCLUDE+ + +Right now there is just one extra instruction: `INCLUDE+`. All Dockerfile+ commands will end up with a `+` sign to avoid any potential future collisions with Dockerfile commands. + +`INCLUDE+` will import the verbatim contents of another file into your Dockerfile. Here's an example Dockerfile which uses the `INCLUDE+` instruction: + +```Dockerfile +# syntax = edrevo/dockerfile-plus + +FROM alpine + +INCLUDE+ Dockerfile.common + +ENTRYPOINT [ "mybin" ] +``` + +If Dockerfile.common contained a single line that said `RUN echo "Hello World"`, then the resulting Docker image would be identical to the one generated by this Dockerfile: + + +```Dockerfile +FROM alpine + +RUN echo "Hello World" + +ENTRYPOINT [ "mybin" ] +``` + +## Roadmap + +The next features in line would be: + +- `ENVFILE+` command, which would read a .env file and import all of those environment variable definitions into the Dockerfile +- `RUN+ --no-cache`, which would disable the cache only for a specific RUN step (useful for non-idempotent commands, for example those that clone git repos) +- `TAG` command +- improvements to .dockerignore, like recursive dockerignore files + +## Feedback + +Found a bug? Want to contribute a PR? Want to improve documentation or add a cool logo for the project? All contributions are welcome! + +### Development environment + +Install cargo (you can use [rustup.rs](https://rustup.rs/)) and run: + +```bash +$ cargo build +``` + +### Creating a local release of the Buildkit frontend + +```bash +$ docker build -f dockerfile-plus/Dockerfile . +``` \ No newline at end of file diff --git a/buildkit-llb/CHANGELOG.md b/buildkit-llb/CHANGELOG.md new file mode 100644 index 0000000..e8bb653 --- /dev/null +++ b/buildkit-llb/CHANGELOG.md @@ -0,0 +1,30 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +## [0.2.0] - 2020-03-04 +### Changed +- Update `buildkit-proto` dependency to use `tonic` for gRPC. + +## [0.1.3] - 2020-01-24 +### Added +- `Mount::OptionalSshAgent` to mount the host SSH agent socket with `docker build --ssh=default`. + +## [0.1.2] - 2019-11-20 +### Added +- `ImageSource::with_tag` method. + +### Changed +- `Source::image` behavior to conform Docker. + +## [0.1.1] - 2019-10-22 +### Added +- `GitSource::with_reference` method. +- HTTP source. + +## [0.1.0] - 2019-09-24 +Initial release. diff --git a/buildkit-llb/Cargo.toml b/buildkit-llb/Cargo.toml new file mode 100644 index 0000000..1232e00 --- /dev/null +++ b/buildkit-llb/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "buildkit-llb" +version = "0.2.0" +authors = ["Denys Zariaiev "] +edition = "2018" + +description = "Idiomatic high-level API to create BuildKit LLB graphs" +documentation = "https://docs.rs/buildkit-llb" +repository = "https://github.com/denzp/rust-buildkit" +readme = "README.md" +keywords = ["buildkit", "docker", "llb"] +categories = ["development-tools::build-utils", "api-bindings"] +license = "MIT/Apache-2.0" + +[dependencies] +either = "1.6" +failure = "0.1" +lazy_static = "1" +log = "0.4" +prost = "0.6" +regex = "1" +serde_json = "1.0" +sha2 = "0.8" + +[dependencies.buildkit-proto] +version = "0.2" +path = "../buildkit-proto" diff --git a/buildkit-llb/README.md b/buildkit-llb/README.md new file mode 100644 index 0000000..0ff0d39 --- /dev/null +++ b/buildkit-llb/README.md @@ -0,0 +1,36 @@ +`buildkit-llb` - high-level API to create BuildKit LLB graphs +======= + +[![Actions Status]][Actions Link] +[![buildkit-llb Crates Badge]][buildkit-llb Crates Link] +[![buildkit-llb Docs Badge]][buildkit-llb Docs Link] + +# Usage + +Please check [docs][buildkit-llb Docs Link] or examples on how to use the crate. + +The LLB graph from stdout can easily be used with `buildctl`: +``` +cargo run --example=scratch | buildctl build +``` + +# License + +`buildkit-llb` is primarily distributed under the terms of both the MIT license and +the Apache License (Version 2.0), with portions covered by various BSD-like +licenses. + +See LICENSE-APACHE, and LICENSE-MIT for details. + +# Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in `buildkit-llb` by you, as defined in the Apache-2.0 license, +shall be dual licensed as above, without any additional terms or conditions. + +[Actions Link]: https://github.com/denzp/rust-buildkit/actions +[Actions Status]: https://github.com/denzp/rust-buildkit/workflows/CI/badge.svg +[buildkit-llb Docs Badge]: https://docs.rs/buildkit-llb/badge.svg +[buildkit-llb Docs Link]: https://docs.rs/buildkit-llb/ +[buildkit-llb Crates Badge]: https://img.shields.io/crates/v/buildkit-llb.svg +[buildkit-llb Crates Link]: https://crates.io/crates/buildkit-llb diff --git a/buildkit-llb/examples/highly-parallel.rs b/buildkit-llb/examples/highly-parallel.rs new file mode 100644 index 0000000..82a2f2f --- /dev/null +++ b/buildkit-llb/examples/highly-parallel.rs @@ -0,0 +1,93 @@ +use std::io::stdout; + +use buildkit_llb::ops::source::ImageSource; +use buildkit_llb::prelude::*; + +fn main() { + let image = Source::image("library/alpine:latest"); + let commands = build_init_commands(&image); + let commands = build_modify_commands(&image, commands); + + let base_fs = FileSystem::sequence() + .custom_name("assemble outputs") + .append(FileSystem::mkdir( + OutputIdx(0), + LayerPath::Scratch("/files"), + )); + + let (final_fs, final_output) = + commands + .into_iter() + .zip(0..) + .fold((base_fs, 0), |(fs, last_output), (output, idx)| { + let layer = fs.append( + FileSystem::copy() + .from(LayerPath::Other(output, format!("/file-{}.out", idx))) + .to( + OutputIdx(idx + 1), + LayerPath::Own( + OwnOutputIdx(last_output), + format!("/files/file-{}.out", idx), + ), + ), + ); + + (layer, idx + 1) + }); + + Terminal::with(final_fs.output(final_output)) + .write_definition(stdout()) + .unwrap() +} + +fn build_init_commands(image: &ImageSource) -> Vec { + (0..100) + .map(|idx| { + let base_dir = format!("/file/{}", idx); + let shell = format!("echo 'test {}' > /out{}/file.out", idx, base_dir); + + let output_mount = FileSystem::mkdir(OutputIdx(0), LayerPath::Scratch(&base_dir)) + .make_parents(true) + .into_operation() + .ignore_cache(true) + .ref_counted(); + + Command::run("/bin/sh") + .args(&["-c", &shell]) + .mount(Mount::ReadOnlyLayer(image.output(), "/")) + .mount(Mount::Layer(OutputIdx(0), output_mount.output(0), "/out")) + .ignore_cache(true) + .ref_counted() + .output(0) + }) + .collect() +} + +fn build_modify_commands<'a>( + image: &'a ImageSource, + layers: Vec>, +) -> Vec> { + layers + .into_iter() + .zip(0..) + .map(|(output, idx)| { + let shell = format!( + "sed s/test/modified/ < /in/file/{}/file.in > /out/file-{}.out", + idx, idx + ); + + Command::run("/bin/sh") + .args(&["-c", &shell]) + .mount(Mount::ReadOnlyLayer(image.output(), "/")) + .mount(Mount::Scratch(OutputIdx(0), "/out")) + .mount(Mount::ReadOnlySelector( + output, + format!("/in/file/{}/file.in", idx), + format!("file/{}/file.out", idx), + )) + .ignore_cache(true) + .ref_counted() + .output(0) + }) + .collect() +} diff --git a/buildkit-llb/examples/network.rs b/buildkit-llb/examples/network.rs new file mode 100644 index 0000000..7e919da --- /dev/null +++ b/buildkit-llb/examples/network.rs @@ -0,0 +1,47 @@ +use std::io::stdout; + +use buildkit_llb::prelude::*; + +fn main() { + let bitflags_archive = Source::http("https://crates.io/api/v1/crates/bitflags/1.0.4/download") + .with_file_name("bitflags.tar"); + + let alpine = Source::image("library/alpine:latest"); + let bitflags_unpacked = { + Command::run("/bin/tar") + .args(&[ + "-xvzC", + "/out", + "--strip-components=1", + "-f", + "/in/bitflags.tar", + ]) + .mount(Mount::ReadOnlyLayer(alpine.output(), "/")) + .mount(Mount::ReadOnlyLayer(bitflags_archive.output(), "/in")) + .mount(Mount::Scratch(OutputIdx(0), "/out")) + }; + + let env_logger_repo = Source::git("https://github.com/sebasmagri/env_logger.git") + .with_reference("ebf4829f3c04ce9b6d3e5d59fa8770bb71bffca3"); + + let fs = { + FileSystem::sequence() + .append( + FileSystem::copy() + .from(LayerPath::Other(bitflags_unpacked.output(0), "/Cargo.toml")) + .to(OutputIdx(0), LayerPath::Scratch("/bitflags.toml")), + ) + .append( + FileSystem::copy() + .from(LayerPath::Other(env_logger_repo.output(), "/Cargo.toml")) + .to( + OutputIdx(1), + LayerPath::Own(OwnOutputIdx(0), "/env_logger.toml"), + ), + ) + }; + + Terminal::with(fs.output(1)) + .write_definition(stdout()) + .unwrap() +} diff --git a/buildkit-llb/examples/scratch-owned.rs b/buildkit-llb/examples/scratch-owned.rs new file mode 100644 index 0000000..349c021 --- /dev/null +++ b/buildkit-llb/examples/scratch-owned.rs @@ -0,0 +1,41 @@ +use std::io::stdout; + +use buildkit_llb::prelude::*; + +fn main() { + Terminal::with(build_graph()) + .write_definition(stdout()) + .unwrap() +} + +fn build_graph() -> OperationOutput<'static> { + let builder_image = Source::image("library/alpine:latest") + .custom_name("Using alpine:latest as a builder") + .ref_counted(); + + let command = { + Command::run("/bin/sh") + .args(&["-c", "echo 'test string 5' > /out/file0"]) + .custom_name("create a dummy file") + .mount(Mount::ReadOnlyLayer(builder_image.output(), "/")) + .mount(Mount::Scratch(OutputIdx(0), "/out")) + .ref_counted() + }; + + let fs = { + FileSystem::sequence() + .custom_name("do multiple file system manipulations") + .append( + FileSystem::copy() + .from(LayerPath::Other(command.output(0), "/file0")) + .to(OutputIdx(0), LayerPath::Other(command.output(0), "/file1")), + ) + .append( + FileSystem::copy() + .from(LayerPath::Own(OwnOutputIdx(0), "/file0")) + .to(OutputIdx(1), LayerPath::Own(OwnOutputIdx(0), "/file2")), + ) + }; + + fs.ref_counted().output(1) +} diff --git a/buildkit-llb/examples/scratch.rs b/buildkit-llb/examples/scratch.rs new file mode 100644 index 0000000..51a5c3e --- /dev/null +++ b/buildkit-llb/examples/scratch.rs @@ -0,0 +1,35 @@ +use std::io::stdout; + +use buildkit_llb::prelude::*; + +fn main() { + let builder_image = + Source::image("library/alpine:latest").custom_name("Using alpine:latest as a builder"); + + let command = { + Command::run("/bin/sh") + .args(&["-c", "echo 'test string 5' > /out/file0"]) + .custom_name("create a dummy file") + .mount(Mount::ReadOnlyLayer(builder_image.output(), "/")) + .mount(Mount::Scratch(OutputIdx(0), "/out")) + }; + + let fs = { + FileSystem::sequence() + .custom_name("do multiple file system manipulations") + .append( + FileSystem::copy() + .from(LayerPath::Other(command.output(0), "/file0")) + .to(OutputIdx(0), LayerPath::Other(command.output(0), "/file1")), + ) + .append( + FileSystem::copy() + .from(LayerPath::Own(OwnOutputIdx(0), "/file0")) + .to(OutputIdx(1), LayerPath::Own(OwnOutputIdx(0), "/file2")), + ) + }; + + Terminal::with(fs.output(1)) + .write_definition(stdout()) + .unwrap() +} diff --git a/buildkit-llb/src/lib.rs b/buildkit-llb/src/lib.rs new file mode 100644 index 0000000..a34d416 --- /dev/null +++ b/buildkit-llb/src/lib.rs @@ -0,0 +1,23 @@ +#![deny(warnings)] +#![deny(clippy::all)] + +// FIXME: get rid of the unwraps +// TODO: implement warnings for op hash collisions (will incredibly help to debug problems). +// TODO: implement efficient `std::fmt::Debug` for the ops (naive implementation can't handle huge nested graphs). + +mod serialization; + +/// Supported operations - building blocks of the LLB definition graph. +pub mod ops; + +/// Various helpers and types. +pub mod utils; + +/// Convenient re-export of a commonly used things. +pub mod prelude { + pub use crate::ops::exec::Mount; + pub use crate::ops::fs::LayerPath; + pub use crate::ops::source::ResolveMode; + pub use crate::ops::*; + pub use crate::utils::{OperationOutput, OutputIdx, OwnOutputIdx}; +} diff --git a/buildkit-llb/src/ops/exec/command.rs b/buildkit-llb/src/ops/exec/command.rs new file mode 100644 index 0000000..f48fabe --- /dev/null +++ b/buildkit-llb/src/ops/exec/command.rs @@ -0,0 +1,319 @@ +use std::collections::HashMap; +use std::iter::{empty, once}; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use buildkit_proto::pb::{ + self, op::Op, ExecOp, Input, MountType, NetMode, OpMetadata, SecurityMode, +}; +use either::Either; + +use super::context::Context; +use super::mount::Mount; + +use crate::ops::{MultiBorrowedOutput, MultiOwnedOutput, OperationBuilder}; +use crate::serialization::{Context as SerializationCtx, Node, Operation, OperationId, Result}; +use crate::utils::{OperationOutput, OutputIdx}; + +/// Command execution operation. This is what a Dockerfile's `RUN` directive is translated to. +#[derive(Debug, Clone)] +pub struct Command<'a> { + id: OperationId, + + context: Context, + root_mount: Option>, + other_mounts: Vec>, + + description: HashMap, + caps: HashMap, + ignore_cache: bool, +} + +impl<'a> Command<'a> { + pub fn run(name: S) -> Self + where + S: Into, + { + Self { + id: OperationId::default(), + + context: Context::new(name), + root_mount: None, + other_mounts: vec![], + + description: Default::default(), + caps: Default::default(), + ignore_cache: false, + } + } + + pub fn args(mut self, args: A) -> Self + where + A: IntoIterator, + S: AsRef, + { + self.context.args = args.into_iter().map(|item| item.as_ref().into()).collect(); + self + } + + pub fn env(mut self, name: S, value: Q) -> Self + where + S: AsRef, + Q: AsRef, + { + let env = format!("{}={}", name.as_ref(), value.as_ref()); + + self.context.env.push(env); + self + } + + pub fn env_iter(mut self, iter: I) -> Self + where + I: IntoIterator, + S: AsRef, + Q: AsRef, + { + for (name, value) in iter.into_iter() { + let env = format!("{}={}", name.as_ref(), value.as_ref()); + self.context.env.push(env); + } + + self + } + + pub fn cwd

(mut self, path: P) -> Self + where + P: Into, + { + self.context.cwd = path.into(); + self + } + + pub fn user(mut self, user: S) -> Self + where + S: Into, + { + self.context.user = user.into(); + self + } + + pub fn mount

(mut self, mount: Mount<'a, P>) -> Self + where + P: AsRef, + { + match mount { + Mount::Layer(..) | Mount::ReadOnlyLayer(..) | Mount::Scratch(..) => { + self.caps.insert("exec.mount.bind".into(), true); + } + + Mount::ReadOnlySelector(..) => { + self.caps.insert("exec.mount.bind".into(), true); + self.caps.insert("exec.mount.selector".into(), true); + } + + Mount::SharedCache(..) => { + self.caps.insert("exec.mount.cache".into(), true); + self.caps.insert("exec.mount.cache.sharing".into(), true); + } + + Mount::OptionalSshAgent(..) => { + self.caps.insert("exec.mount.ssh".into(), true); + } + } + + if mount.is_root() { + self.root_mount = Some(mount.into_owned()); + } else { + self.other_mounts.push(mount.into_owned()); + } + + self + } +} + +impl<'a, 'b: 'a> MultiBorrowedOutput<'b> for Command<'b> { + fn output(&'b self, index: u32) -> OperationOutput<'b> { + // TODO: check if the requested index available. + OperationOutput::borrowed(self, OutputIdx(index)) + } +} + +impl<'a> MultiOwnedOutput<'a> for Arc> { + fn output(&self, index: u32) -> OperationOutput<'a> { + // TODO: check if the requested index available. + OperationOutput::owned(self.clone(), OutputIdx(index)) + } +} + +impl<'a> OperationBuilder<'a> for Command<'a> { + fn custom_name(mut self, name: S) -> Self + where + S: Into, + { + self.description + .insert("llb.customname".into(), name.into()); + + self + } + + fn ignore_cache(mut self, ignore: bool) -> Self { + self.ignore_cache = ignore; + self + } +} + +impl<'a> Operation for Command<'a> { + fn id(&self) -> &OperationId { + &self.id + } + + fn serialize(&self, cx: &mut SerializationCtx) -> Result { + let (inputs, mounts): (Vec<_>, Vec<_>) = { + let mut last_input_index = 0; + + self.root_mount + .as_ref() + .into_iter() + .chain(self.other_mounts.iter()) + .map(|mount| { + let inner_mount = match mount { + Mount::ReadOnlyLayer(_, destination) => pb::Mount { + input: last_input_index, + dest: destination.to_string_lossy().into(), + output: -1, + readonly: true, + mount_type: MountType::Bind as i32, + + ..Default::default() + }, + + Mount::ReadOnlySelector(_, destination, source) => pb::Mount { + input: last_input_index, + dest: destination.to_string_lossy().into(), + output: -1, + readonly: true, + selector: source.to_string_lossy().into(), + mount_type: MountType::Bind as i32, + + ..Default::default() + }, + + Mount::Layer(output, _, path) => pb::Mount { + input: last_input_index, + dest: path.to_string_lossy().into(), + output: output.into(), + mount_type: MountType::Bind as i32, + + ..Default::default() + }, + + Mount::Scratch(output, path) => { + let mount = pb::Mount { + input: -1, + dest: path.to_string_lossy().into(), + output: output.into(), + mount_type: MountType::Bind as i32, + + ..Default::default() + }; + + return (Either::Right(empty()), mount); + } + + Mount::SharedCache(path) => { + use buildkit_proto::pb::{CacheOpt, CacheSharingOpt}; + + let mount = pb::Mount { + input: -1, + dest: path.to_string_lossy().into(), + output: -1, + mount_type: MountType::Cache as i32, + + cache_opt: Some(CacheOpt { + id: path.display().to_string(), + sharing: CacheSharingOpt::Shared as i32, + }), + + ..Default::default() + }; + + return (Either::Right(empty()), mount); + } + + Mount::OptionalSshAgent(path) => { + use buildkit_proto::pb::SshOpt; + + let mount = pb::Mount { + input: -1, + dest: path.to_string_lossy().into(), + output: -1, + mount_type: MountType::Ssh as i32, + + ssh_opt: Some(SshOpt { + mode: 0o600, + optional: true, + ..Default::default() + }), + + ..Default::default() + }; + + return (Either::Right(empty()), mount); + } + }; + + let input = match mount { + Mount::ReadOnlyLayer(input, ..) => input, + Mount::ReadOnlySelector(input, ..) => input, + Mount::Layer(_, input, ..) => input, + + Mount::SharedCache(..) => { + unreachable!(); + } + + Mount::Scratch(..) => { + unreachable!(); + } + + Mount::OptionalSshAgent(..) => { + unreachable!(); + } + }; + + let serialized = cx.register(input.operation()).unwrap(); + let input = Input { + digest: serialized.digest.clone(), + index: input.output().into(), + }; + + last_input_index += 1; + + (Either::Left(once(input)), inner_mount) + }) + .unzip() + }; + + let head = pb::Op { + op: Some(Op::Exec(ExecOp { + mounts, + network: NetMode::Unset.into(), + security: SecurityMode::Sandbox.into(), + meta: Some(self.context.clone().into()), + })), + + inputs: inputs.into_iter().flatten().collect(), + + ..Default::default() + }; + + let metadata = OpMetadata { + description: self.description.clone(), + caps: self.caps.clone(), + ignore_cache: self.ignore_cache, + + ..Default::default() + }; + + Ok(Node::new(head, metadata)) + } +} diff --git a/buildkit-llb/src/ops/exec/context.rs b/buildkit-llb/src/ops/exec/context.rs new file mode 100644 index 0000000..b000008 --- /dev/null +++ b/buildkit-llb/src/ops/exec/context.rs @@ -0,0 +1,49 @@ +use std::iter::once; +use std::path::PathBuf; + +use buildkit_proto::pb::Meta; + +#[derive(Debug, Clone)] +pub(crate) struct Context { + pub name: String, + pub args: Vec, + pub env: Vec, + + pub cwd: PathBuf, + pub user: String, +} + +impl Context { + pub fn new(name: S) -> Self + where + S: Into, + { + Self { + name: name.into(), + + cwd: PathBuf::from("/"), + user: "root".into(), + + args: vec![], + env: vec![], + } + } +} + +impl Into for Context { + fn into(self) -> Meta { + Meta { + args: { + once(self.name.clone()) + .chain(self.args.iter().cloned()) + .collect() + }, + + env: self.env, + cwd: self.cwd.to_string_lossy().into(), + user: self.user, + + ..Default::default() + } + } +} diff --git a/buildkit-llb/src/ops/exec/mod.rs b/buildkit-llb/src/ops/exec/mod.rs new file mode 100644 index 0000000..e9ed0ac --- /dev/null +++ b/buildkit-llb/src/ops/exec/mod.rs @@ -0,0 +1,440 @@ +mod command; +mod context; +mod mount; + +pub use command::Command; +pub use mount::Mount; + +#[test] +fn serialization() { + use crate::prelude::*; + use buildkit_proto::pb::{op::Op, ExecOp, Meta, NetMode, SecurityMode}; + + crate::check_op!( + { + Command::run("/bin/sh") + .args(&["-c", "echo 'test string' > /out/file0"]) + .env("HOME", "/root") + .custom_name("exec custom name") + }, + |digest| { "sha256:dc9a5a3cd84bb1c7b633f1750fdfccd9d0a69d060f8e3babb297bc190e2d7484" }, + |description| { vec![("llb.customname", "exec custom name")] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Exec(ExecOp { + mounts: vec![], + network: NetMode::Unset.into(), + security: SecurityMode::Sandbox.into(), + meta: Some(Meta { + args: crate::utils::test::to_vec(vec![ + "/bin/sh", + "-c", + "echo 'test string' > /out/file0", + ]), + + env: crate::utils::test::to_vec(vec!["HOME=/root"]), + cwd: "/".into(), + user: "root".into(), + + extra_hosts: vec![], + proxy_env: None, + }), + }) + }, + ); +} + +#[test] +fn serialization_with_env_iter() { + use crate::prelude::*; + use buildkit_proto::pb::{op::Op, ExecOp, Meta, NetMode, SecurityMode}; + + crate::check_op!( + { + Command::run("cargo").args(&["build"]).env_iter(vec![ + ("HOME", "/root"), + ("PATH", "/bin"), + ("CARGO_HOME", "/root/.cargo"), + ]) + }, + |digest| { "sha256:7675be0b02acb379d57bafee5dc749fca7e795fb1e0a92748ccc59a7bc3b491e" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Exec(ExecOp { + mounts: vec![], + network: NetMode::Unset.into(), + security: SecurityMode::Sandbox.into(), + meta: Some(Meta { + args: crate::utils::test::to_vec(vec!["cargo", "build"]), + env: crate::utils::test::to_vec(vec![ + "HOME=/root", + "PATH=/bin", + "CARGO_HOME=/root/.cargo", + ]), + + cwd: "/".into(), + user: "root".into(), + + extra_hosts: vec![], + proxy_env: None, + }), + }) + }, + ); +} + +#[test] +fn serialization_with_cwd() { + use crate::prelude::*; + use buildkit_proto::pb::{op::Op, ExecOp, Meta, NetMode, SecurityMode}; + + crate::check_op!( + Command::run("cargo").args(&["build"]).cwd("/rust-src"), + |digest| { "sha256:b8120a0e1d1f7fcaa3d6c95db292d064524dc92c6cae8b97672d4e1eafcd03fa" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Exec(ExecOp { + mounts: vec![], + network: NetMode::Unset.into(), + security: SecurityMode::Sandbox.into(), + meta: Some(Meta { + args: crate::utils::test::to_vec(vec!["cargo", "build"]), + env: vec![], + cwd: "/rust-src".into(), + user: "root".into(), + + extra_hosts: vec![], + proxy_env: None, + }), + }) + }, + ); +} + +#[test] +fn serialization_with_user() { + use crate::prelude::*; + use buildkit_proto::pb::{op::Op, ExecOp, Meta, NetMode, SecurityMode}; + + crate::check_op!( + Command::run("cargo").args(&["build"]).user("builder"), + |digest| { "sha256:7631ea645e2126e9dbc5d9ae789e34301d9d5c80ce89bfa72bc9b82aa43b57c0" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Exec(ExecOp { + mounts: vec![], + network: NetMode::Unset.into(), + security: SecurityMode::Sandbox.into(), + meta: Some(Meta { + args: crate::utils::test::to_vec(vec!["cargo", "build"]), + env: vec![], + cwd: "/".into(), + user: "builder".into(), + + extra_hosts: vec![], + proxy_env: None, + }), + }) + }, + ); +} + +#[test] +fn serialization_with_mounts() { + use crate::prelude::*; + use buildkit_proto::pb::{ + op::Op, CacheOpt, CacheSharingOpt, ExecOp, Meta, MountType, NetMode, SecurityMode, + }; + + let context = Source::local("context"); + let builder_image = Source::image("rustlang/rust:nightly"); + let final_image = Source::image("library/alpine:latest"); + + let command = Command::run("cargo") + .args(&["build"]) + .mount(Mount::ReadOnlyLayer(builder_image.output(), "/")) + .mount(Mount::Scratch(OutputIdx(1), "/tmp")) + .mount(Mount::ReadOnlySelector( + context.output(), + "/buildkit-frontend", + "/frontend-sources", + )) + .mount(Mount::Layer(OutputIdx(0), final_image.output(), "/output")) + .mount(Mount::SharedCache("/root/.cargo")); + + crate::check_op!( + command, + |digest| { "sha256:54a66b514361b13b17f8b5aaaa2392a4c07b55ac53303e4f50584f3dfef6add0" }, + |description| { vec![] }, + |caps| { + vec![ + "exec.mount.bind", + "exec.mount.cache", + "exec.mount.cache.sharing", + "exec.mount.selector", + ] + }, + |cached_tail| { + vec![ + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + "sha256:dee2a3d7dd482dd8098ba543ff1dcb01efd29fcd16fdb0979ef556f38564543a", + "sha256:0e6b31ceed3e6dc542018f35a53a0e857e6a188453d32a2a5bbe7aa2971c1220", + ] + }, + |inputs| { + vec![ + ( + "sha256:dee2a3d7dd482dd8098ba543ff1dcb01efd29fcd16fdb0979ef556f38564543a", + 0, + ), + ( + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + 0, + ), + ( + "sha256:0e6b31ceed3e6dc542018f35a53a0e857e6a188453d32a2a5bbe7aa2971c1220", + 0, + ), + ] + }, + |op| { + Op::Exec(ExecOp { + mounts: vec![ + pb::Mount { + input: 0, + selector: "".into(), + dest: "/".into(), + output: -1, + readonly: true, + mount_type: MountType::Bind.into(), + cache_opt: None, + secret_opt: None, + ssh_opt: None, + }, + pb::Mount { + input: -1, + selector: "".into(), + dest: "/tmp".into(), + output: 1, + readonly: false, + mount_type: MountType::Bind.into(), + cache_opt: None, + secret_opt: None, + ssh_opt: None, + }, + pb::Mount { + input: 1, + selector: "/frontend-sources".into(), + dest: "/buildkit-frontend".into(), + output: -1, + readonly: true, + mount_type: MountType::Bind.into(), + cache_opt: None, + secret_opt: None, + ssh_opt: None, + }, + pb::Mount { + input: 2, + selector: "".into(), + dest: "/output".into(), + output: 0, + readonly: false, + mount_type: MountType::Bind.into(), + cache_opt: None, + secret_opt: None, + ssh_opt: None, + }, + pb::Mount { + input: -1, + selector: "".into(), + dest: "/root/.cargo".into(), + output: -1, + readonly: false, + mount_type: MountType::Cache.into(), + cache_opt: Some(CacheOpt { + id: "/root/.cargo".into(), + sharing: CacheSharingOpt::Shared.into(), + }), + secret_opt: None, + ssh_opt: None, + }, + ], + network: NetMode::Unset.into(), + security: SecurityMode::Sandbox.into(), + meta: Some(Meta { + args: crate::utils::test::to_vec(vec!["cargo", "build"]), + env: vec![], + cwd: "/".into(), + user: "root".into(), + + extra_hosts: vec![], + proxy_env: None, + }), + }) + }, + ); +} + +#[test] +fn serialization_with_several_root_mounts() { + use crate::prelude::*; + use buildkit_proto::pb::{op::Op, ExecOp, Meta, MountType, NetMode, SecurityMode}; + + let builder_image = Source::image("rustlang/rust:nightly"); + let final_image = Source::image("library/alpine:latest"); + + let command = Command::run("cargo") + .args(&["build"]) + .mount(Mount::Scratch(OutputIdx(0), "/tmp")) + .mount(Mount::ReadOnlyLayer(builder_image.output(), "/")) + .mount(Mount::Scratch(OutputIdx(1), "/var")) + .mount(Mount::ReadOnlyLayer(final_image.output(), "/")); + + crate::check_op!( + command, + |digest| { "sha256:baa1bf591d2c47058b7361a0284fa8a3f1bd0fac8a93c87affa77ddc0a5026fd" }, + |description| { vec![] }, + |caps| { vec!["exec.mount.bind"] }, + |cached_tail| { + vec!["sha256:0e6b31ceed3e6dc542018f35a53a0e857e6a188453d32a2a5bbe7aa2971c1220"] + }, + |inputs| { + vec![( + "sha256:0e6b31ceed3e6dc542018f35a53a0e857e6a188453d32a2a5bbe7aa2971c1220", + 0, + )] + }, + |op| { + Op::Exec(ExecOp { + mounts: vec![ + pb::Mount { + input: 0, + selector: "".into(), + dest: "/".into(), + output: -1, + readonly: true, + mount_type: MountType::Bind.into(), + cache_opt: None, + secret_opt: None, + ssh_opt: None, + }, + pb::Mount { + input: -1, + selector: "".into(), + dest: "/tmp".into(), + output: 0, + readonly: false, + mount_type: MountType::Bind.into(), + cache_opt: None, + secret_opt: None, + ssh_opt: None, + }, + pb::Mount { + input: -1, + selector: "".into(), + dest: "/var".into(), + output: 1, + readonly: false, + mount_type: MountType::Bind.into(), + cache_opt: None, + secret_opt: None, + ssh_opt: None, + }, + ], + network: NetMode::Unset.into(), + security: SecurityMode::Sandbox.into(), + meta: Some(Meta { + args: crate::utils::test::to_vec(vec!["cargo", "build"]), + env: vec![], + cwd: "/".into(), + user: "root".into(), + + extra_hosts: vec![], + proxy_env: None, + }), + }) + }, + ); +} + +#[test] +fn serialization_with_ssh_mounts() { + use crate::prelude::*; + use buildkit_proto::pb::{op::Op, ExecOp, Meta, MountType, NetMode, SecurityMode, SshOpt}; + + let builder_image = Source::image("rustlang/rust:nightly"); + let command = Command::run("cargo") + .args(&["build"]) + .mount(Mount::ReadOnlyLayer(builder_image.output(), "/")) + .mount(Mount::OptionalSshAgent("/run/buildkit/ssh_agent.0")); + + crate::check_op!( + command, + |digest| { "sha256:1ac1438c67a153878f21fe8067383fd7544901261374eb53ba8bf26e9a5821a5" }, + |description| { vec![] }, + |caps| { vec!["exec.mount.bind", "exec.mount.ssh"] }, + |cached_tail| { + vec!["sha256:dee2a3d7dd482dd8098ba543ff1dcb01efd29fcd16fdb0979ef556f38564543a"] + }, + |inputs| { + vec![( + "sha256:dee2a3d7dd482dd8098ba543ff1dcb01efd29fcd16fdb0979ef556f38564543a", + 0, + )] + }, + |op| { + Op::Exec(ExecOp { + mounts: vec![ + pb::Mount { + input: 0, + selector: "".into(), + dest: "/".into(), + output: -1, + readonly: true, + mount_type: MountType::Bind.into(), + cache_opt: None, + secret_opt: None, + ssh_opt: None, + }, + pb::Mount { + input: -1, + selector: "".into(), + dest: "/run/buildkit/ssh_agent.0".into(), + output: -1, + readonly: false, + mount_type: MountType::Ssh.into(), + cache_opt: None, + secret_opt: None, + ssh_opt: Some(SshOpt { + mode: 0o600, + optional: true, + ..Default::default() + }), + }, + ], + network: NetMode::Unset.into(), + security: SecurityMode::Sandbox.into(), + meta: Some(Meta { + args: crate::utils::test::to_vec(vec!["cargo", "build"]), + env: vec![], + cwd: "/".into(), + user: "root".into(), + + extra_hosts: vec![], + proxy_env: None, + }), + }) + }, + ); +} diff --git a/buildkit-llb/src/ops/exec/mount.rs b/buildkit-llb/src/ops/exec/mount.rs new file mode 100644 index 0000000..76b541c --- /dev/null +++ b/buildkit-llb/src/ops/exec/mount.rs @@ -0,0 +1,59 @@ +use std::path::{Path, PathBuf}; + +use crate::utils::{OperationOutput, OutputIdx}; + +/// Operand of *command execution operation* that specifies how are input sources mounted. +#[derive(Debug, Clone)] +pub enum Mount<'a, P: AsRef> { + /// Read-only output of another operation. + ReadOnlyLayer(OperationOutput<'a>, P), + + /// Read-only output of another operation with a selector. + ReadOnlySelector(OperationOutput<'a>, P, P), + + /// Empty layer that produces an output. + Scratch(OutputIdx, P), + + /// Writable output of another operation. + Layer(OutputIdx, OperationOutput<'a>, P), + + /// Writable persistent cache. + SharedCache(P), + + /// Optional SSH agent socket at the specified path. + OptionalSshAgent(P), +} + +impl<'a, P: AsRef> Mount<'a, P> { + /// Transform the mount into owned variant (basically, with `PathBuf` as the path). + pub fn into_owned(self) -> Mount<'a, PathBuf> { + use Mount::*; + + match self { + ReadOnlySelector(op, path, selector) => { + ReadOnlySelector(op, path.as_ref().into(), selector.as_ref().into()) + } + + ReadOnlyLayer(op, path) => ReadOnlyLayer(op, path.as_ref().into()), + Scratch(output, path) => Scratch(output, path.as_ref().into()), + Layer(output, input, path) => Layer(output, input, path.as_ref().into()), + SharedCache(path) => SharedCache(path.as_ref().into()), + OptionalSshAgent(path) => OptionalSshAgent(path.as_ref().into()), + } + } + + pub fn is_root(&self) -> bool { + use Mount::*; + + let path = match self { + ReadOnlySelector(_, path, ..) => path, + ReadOnlyLayer(_, path) => path, + Scratch(_, path) => path, + Layer(_, _, path) => path, + SharedCache(path) => path, + OptionalSshAgent(_) => return false, + }; + + path.as_ref() == Path::new("/") + } +} diff --git a/buildkit-llb/src/ops/fs/copy.rs b/buildkit-llb/src/ops/fs/copy.rs new file mode 100644 index 0000000..b677b9d --- /dev/null +++ b/buildkit-llb/src/ops/fs/copy.rs @@ -0,0 +1,214 @@ +use std::collections::HashMap; +use std::fmt::Debug; +use std::path::{Path, PathBuf}; + +use buildkit_proto::pb; + +use super::path::{LayerPath, UnsetPath}; +use super::FileOperation; + +use crate::serialization::{Context, Result}; +use crate::utils::OutputIdx; + +#[derive(Debug)] +pub struct CopyOperation { + source: From, + destination: To, + + follow_symlinks: bool, + recursive: bool, + create_path: bool, + wildcard: bool, + + description: HashMap, + caps: HashMap, +} + +type OpWithoutSource = CopyOperation; +type OpWithSource<'a> = CopyOperation, UnsetPath>; +type OpWithDestination<'a> = + CopyOperation, (OutputIdx, LayerPath<'a, PathBuf>)>; + +impl OpWithoutSource { + pub(crate) fn new() -> OpWithoutSource { + let mut caps = HashMap::::new(); + caps.insert("file.base".into(), true); + + CopyOperation { + source: UnsetPath, + destination: UnsetPath, + + follow_symlinks: false, + recursive: false, + create_path: false, + wildcard: false, + + caps, + description: Default::default(), + } + } + + pub fn from

(self, source: LayerPath<'_, P>) -> OpWithSource + where + P: AsRef, + { + CopyOperation { + source: source.into_owned(), + destination: UnsetPath, + + follow_symlinks: self.follow_symlinks, + recursive: self.recursive, + create_path: self.create_path, + wildcard: self.wildcard, + + description: self.description, + caps: self.caps, + } + } +} + +impl<'a> OpWithSource<'a> { + pub fn to

(self, output: OutputIdx, destination: LayerPath<'a, P>) -> OpWithDestination<'a> + where + P: AsRef, + { + CopyOperation { + source: self.source, + destination: (output, destination.into_owned()), + + follow_symlinks: self.follow_symlinks, + recursive: self.recursive, + create_path: self.create_path, + wildcard: self.wildcard, + + description: self.description, + caps: self.caps, + } + } +} + +impl<'a> OpWithDestination<'a> { + pub fn into_operation(self) -> super::sequence::SequenceOperation<'a> { + super::sequence::SequenceOperation::new().append(self) + } +} + +impl CopyOperation +where + From: Debug, + To: Debug, +{ + pub fn follow_symlinks(mut self, value: bool) -> Self { + self.follow_symlinks = value; + self + } + + pub fn recursive(mut self, value: bool) -> Self { + self.recursive = value; + self + } + + pub fn create_path(mut self, value: bool) -> Self { + self.create_path = value; + self + } + + pub fn wildcard(mut self, value: bool) -> Self { + self.wildcard = value; + self + } +} + +impl<'a> FileOperation for OpWithDestination<'a> { + fn output(&self) -> i32 { + self.destination.0.into() + } + + fn serialize_inputs(&self, cx: &mut Context) -> Result> { + let mut inputs = if let LayerPath::Other(ref op, ..) = self.source { + let serialized_from_head = cx.register(op.operation())?; + + vec![pb::Input { + digest: serialized_from_head.digest.clone(), + index: op.output().into(), + }] + } else { + vec![] + }; + + if let LayerPath::Other(ref op, ..) = self.destination.1 { + let serialized_to_head = cx.register(op.operation())?; + + inputs.push(pb::Input { + digest: serialized_to_head.digest.clone(), + index: op.output().into(), + }); + } + + Ok(inputs) + } + + fn serialize_action( + &self, + inputs_count: usize, + inputs_offset: usize, + ) -> Result { + let (src_idx, src_offset, src) = match self.source { + LayerPath::Scratch(ref path) => (-1, 0, path.to_string_lossy().into()), + + LayerPath::Other(_, ref path) => { + (inputs_offset as i64, 1, path.to_string_lossy().into()) + } + + LayerPath::Own(ref output, ref path) => { + let output: i64 = output.into(); + + ( + inputs_count as i64 + output, + 0, + path.to_string_lossy().into(), + ) + } + }; + + let (dest_idx, dest) = match self.destination.1 { + LayerPath::Scratch(ref path) => (-1, path.to_string_lossy().into()), + + LayerPath::Other(_, ref path) => ( + inputs_offset as i32 + src_offset, + path.to_string_lossy().into(), + ), + + LayerPath::Own(ref output, ref path) => { + let output: i32 = output.into(); + + (inputs_count as i32 + output, path.to_string_lossy().into()) + } + }; + + Ok(pb::FileAction { + input: i64::from(dest_idx), + secondary_input: src_idx, + + output: i64::from(self.output()), + + action: Some(pb::file_action::Action::Copy(pb::FileActionCopy { + src, + dest, + + follow_symlink: self.follow_symlinks, + dir_copy_contents: self.recursive, + create_dest_path: self.create_path, + allow_wildcard: self.wildcard, + + // TODO: make this configurable + mode: -1, + + // TODO: make this configurable + timestamp: -1, + + ..Default::default() + })), + }) + } +} diff --git a/buildkit-llb/src/ops/fs/mkdir.rs b/buildkit-llb/src/ops/fs/mkdir.rs new file mode 100644 index 0000000..b1e91cd --- /dev/null +++ b/buildkit-llb/src/ops/fs/mkdir.rs @@ -0,0 +1,110 @@ +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +use buildkit_proto::pb; + +use super::path::LayerPath; +use super::FileOperation; + +use crate::serialization::{Context, Result}; +use crate::utils::OutputIdx; + +#[derive(Debug)] +pub struct MakeDirOperation<'a> { + path: LayerPath<'a, PathBuf>, + output: OutputIdx, + + make_parents: bool, + + description: HashMap, + caps: HashMap, +} + +impl<'a> MakeDirOperation<'a> { + pub(crate) fn new

(output: OutputIdx, path: LayerPath<'a, P>) -> Self + where + P: AsRef, + { + let mut caps = HashMap::::new(); + caps.insert("file.base".into(), true); + + MakeDirOperation { + path: path.into_owned(), + output, + + make_parents: false, + + caps, + description: Default::default(), + } + } + + pub fn make_parents(mut self, value: bool) -> Self { + self.make_parents = value; + self + } + + pub fn into_operation(self) -> super::sequence::SequenceOperation<'a> { + super::sequence::SequenceOperation::new().append(self) + } +} + +impl<'a> FileOperation for MakeDirOperation<'a> { + fn output(&self) -> i32 { + self.output.into() + } + + fn serialize_inputs(&self, cx: &mut Context) -> Result> { + if let LayerPath::Other(ref op, ..) = self.path { + let serialized_from_head = cx.register(op.operation())?; + + let inputs = vec![pb::Input { + digest: serialized_from_head.digest.clone(), + index: op.output().into(), + }]; + + Ok(inputs) + } else { + Ok(Vec::with_capacity(0)) + } + } + + fn serialize_action( + &self, + inputs_count: usize, + inputs_offset: usize, + ) -> Result { + let (src_idx, path) = match self.path { + LayerPath::Scratch(ref path) => (-1, path.to_string_lossy().into()), + LayerPath::Other(_, ref path) => (inputs_offset as i64, path.to_string_lossy().into()), + + LayerPath::Own(ref output, ref path) => { + let output: i64 = output.into(); + + (inputs_count as i64 + output, path.to_string_lossy().into()) + } + }; + + Ok(pb::FileAction { + input: src_idx, + secondary_input: -1, + + output: i64::from(self.output()), + + action: Some(pb::file_action::Action::Mkdir(pb::FileActionMkDir { + path, + + make_parents: self.make_parents, + + // TODO: make this configurable + mode: -1, + + // TODO: make this configurable + timestamp: -1, + + // TODO: make this configurable + owner: None, + })), + }) + } +} diff --git a/buildkit-llb/src/ops/fs/mkfile.rs b/buildkit-llb/src/ops/fs/mkfile.rs new file mode 100644 index 0000000..7f0588e --- /dev/null +++ b/buildkit-llb/src/ops/fs/mkfile.rs @@ -0,0 +1,110 @@ +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +use buildkit_proto::pb; + +use super::path::LayerPath; +use super::FileOperation; + +use crate::serialization::{Context, Result}; +use crate::utils::OutputIdx; + +#[derive(Debug)] +pub struct MakeFileOperation<'a> { + path: LayerPath<'a, PathBuf>, + output: OutputIdx, + + data: Option>, + + description: HashMap, + caps: HashMap, +} + +impl<'a> MakeFileOperation<'a> { + pub(crate) fn new

(output: OutputIdx, path: LayerPath<'a, P>) -> Self + where + P: AsRef, + { + let mut caps = HashMap::::new(); + caps.insert("file.base".into(), true); + + MakeFileOperation { + path: path.into_owned(), + output, + + data: None, + + caps, + description: Default::default(), + } + } + + pub fn data(mut self, bytes: Vec) -> Self { + self.data = Some(bytes); + self + } + + pub fn into_operation(self) -> super::sequence::SequenceOperation<'a> { + super::sequence::SequenceOperation::new().append(self) + } +} + +impl<'a> FileOperation for MakeFileOperation<'a> { + fn output(&self) -> i32 { + self.output.into() + } + + fn serialize_inputs(&self, cx: &mut Context) -> Result> { + if let LayerPath::Other(ref op, ..) = self.path { + let serialized_from_head = cx.register(op.operation())?; + + let inputs = vec![pb::Input { + digest: serialized_from_head.digest.clone(), + index: op.output().into(), + }]; + + Ok(inputs) + } else { + Ok(Vec::with_capacity(0)) + } + } + + fn serialize_action( + &self, + inputs_count: usize, + inputs_offset: usize, + ) -> Result { + let (src_idx, path) = match self.path { + LayerPath::Scratch(ref path) => (-1, path.to_string_lossy().into()), + LayerPath::Other(_, ref path) => (inputs_offset as i64, path.to_string_lossy().into()), + + LayerPath::Own(ref output, ref path) => { + let output: i64 = output.into(); + + (inputs_count as i64 + output, path.to_string_lossy().into()) + } + }; + + Ok(pb::FileAction { + input: src_idx, + secondary_input: -1, + + output: i64::from(self.output()), + + action: Some(pb::file_action::Action::Mkfile(pb::FileActionMkFile { + path, + + data: self.data.clone().unwrap_or_else(|| Vec::with_capacity(0)), + + // TODO: make this configurable + mode: -1, + + // TODO: make this configurable + timestamp: -1, + + // TODO: make this configurable + owner: None, + })), + }) + } +} diff --git a/buildkit-llb/src/ops/fs/mod.rs b/buildkit-llb/src/ops/fs/mod.rs new file mode 100644 index 0000000..d785303 --- /dev/null +++ b/buildkit-llb/src/ops/fs/mod.rs @@ -0,0 +1,475 @@ +use std::fmt::Debug; +use std::path::Path; + +use buildkit_proto::pb; + +use crate::serialization::{Context, Result}; +use crate::utils::OutputIdx; + +mod copy; +mod mkdir; +mod mkfile; +mod path; +mod sequence; + +pub use self::copy::CopyOperation; +pub use self::mkdir::MakeDirOperation; +pub use self::mkfile::MakeFileOperation; +pub use self::path::{LayerPath, UnsetPath}; +pub use self::sequence::SequenceOperation; + +/// Umbrella operation that handles file system related routines. +/// Dockerfile's `COPY` directive is a partial case of this. +pub struct FileSystem; + +impl FileSystem { + pub fn sequence() -> SequenceOperation<'static> { + SequenceOperation::new() + } + + pub fn copy() -> copy::CopyOperation { + CopyOperation::new() + } + + pub fn mkdir

(output: OutputIdx, layer: LayerPath

) -> MakeDirOperation + where + P: AsRef, + { + MakeDirOperation::new(output, layer) + } + + pub fn mkfile

(output: OutputIdx, layer: LayerPath

) -> MakeFileOperation + where + P: AsRef, + { + MakeFileOperation::new(output, layer) + } +} + +pub trait FileOperation: Debug + Send + Sync { + fn output(&self) -> i32; + + fn serialize_inputs(&self, cx: &mut Context) -> Result>; + fn serialize_action(&self, inputs_count: usize, inputs_offset: usize) + -> Result; +} + +#[test] +fn copy_serialization() { + use crate::prelude::*; + use buildkit_proto::pb::{file_action::Action, op::Op, FileAction, FileActionCopy, FileOp}; + + let context = Source::local("context"); + let builder_image = Source::image("rustlang/rust:nightly"); + + let operation = FileSystem::sequence() + .append( + FileSystem::copy() + .from(LayerPath::Other(context.output(), "Cargo.toml")) + .to(OutputIdx(0), LayerPath::Scratch("Cargo.toml")), + ) + .append( + FileSystem::copy() + .from(LayerPath::Other(builder_image.output(), "/bin/sh")) + .to(OutputIdx(1), LayerPath::Own(OwnOutputIdx(0), "/bin/sh")), + ) + .append( + FileSystem::copy() + .from(LayerPath::Own(OwnOutputIdx(1), "Cargo.toml")) + .to(OutputIdx(2), LayerPath::Scratch("Cargo.toml")), + ); + + crate::check_op!( + operation, + |digest| { "sha256:c4f7fb723fa87f03788aaf660dc9110ad8748fc9971e13713f103b632c05ae96" }, + |description| { vec![] }, + |caps| { vec!["file.base"] }, + |cached_tail| { + vec![ + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + "sha256:dee2a3d7dd482dd8098ba543ff1dcb01efd29fcd16fdb0979ef556f38564543a", + ] + }, + |inputs| { + vec![ + ( + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + 0, + ), + ( + "sha256:dee2a3d7dd482dd8098ba543ff1dcb01efd29fcd16fdb0979ef556f38564543a", + 0, + ), + ] + }, + |op| { + Op::File(FileOp { + actions: vec![ + FileAction { + input: -1, + secondary_input: 0, + output: 0, + action: Some(Action::Copy(FileActionCopy { + src: "Cargo.toml".into(), + dest: "Cargo.toml".into(), + owner: None, + mode: -1, + follow_symlink: false, + dir_copy_contents: false, + attempt_unpack_docker_compatibility: false, + create_dest_path: false, + allow_wildcard: false, + allow_empty_wildcard: false, + timestamp: -1, + })), + }, + FileAction { + input: 2, + secondary_input: 1, + output: 1, + action: Some(Action::Copy(FileActionCopy { + src: "/bin/sh".into(), + dest: "/bin/sh".into(), + owner: None, + mode: -1, + follow_symlink: false, + dir_copy_contents: false, + attempt_unpack_docker_compatibility: false, + create_dest_path: false, + allow_wildcard: false, + allow_empty_wildcard: false, + timestamp: -1, + })), + }, + FileAction { + input: -1, + secondary_input: 3, + output: 2, + action: Some(Action::Copy(FileActionCopy { + src: "Cargo.toml".into(), + dest: "Cargo.toml".into(), + owner: None, + mode: -1, + follow_symlink: false, + dir_copy_contents: false, + attempt_unpack_docker_compatibility: false, + create_dest_path: false, + allow_wildcard: false, + allow_empty_wildcard: false, + timestamp: -1, + })), + }, + ], + }) + }, + ); +} + +#[test] +fn copy_with_params_serialization() { + use crate::prelude::*; + use buildkit_proto::pb::{file_action::Action, op::Op, FileAction, FileActionCopy, FileOp}; + + let context = Source::local("context"); + + let operation = FileSystem::sequence() + .append( + FileSystem::copy() + .from(LayerPath::Other(context.output(), "Cargo.toml")) + .to(OutputIdx(0), LayerPath::Scratch("Cargo.toml")) + .follow_symlinks(true), + ) + .append( + FileSystem::copy() + .from(LayerPath::Other(context.output(), "Cargo.toml")) + .to(OutputIdx(1), LayerPath::Scratch("Cargo.toml")) + .recursive(true), + ) + .append( + FileSystem::copy() + .from(LayerPath::Other(context.output(), "Cargo.toml")) + .to(OutputIdx(2), LayerPath::Scratch("Cargo.toml")) + .create_path(true), + ) + .append( + FileSystem::copy() + .from(LayerPath::Other(context.output(), "Cargo.toml")) + .to(OutputIdx(3), LayerPath::Scratch("Cargo.toml")) + .wildcard(true), + ); + + crate::check_op!( + operation, + |digest| { "sha256:8be9c1c8335d53c894d0f5848ef354c69a96a469a72b00aadae704b23d465022" }, + |description| { vec![] }, + |caps| { vec!["file.base"] }, + |cached_tail| { + vec!["sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702"] + }, + |inputs| { + // TODO: improve the correct, but inefficent serialization + vec![ + ( + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + 0, + ), + ( + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + 0, + ), + ( + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + 0, + ), + ( + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + 0, + ), + ] + }, + |op| { + Op::File(FileOp { + actions: vec![ + FileAction { + input: -1, + secondary_input: 0, + output: 0, + action: Some(Action::Copy(FileActionCopy { + src: "Cargo.toml".into(), + dest: "Cargo.toml".into(), + owner: None, + mode: -1, + follow_symlink: true, + dir_copy_contents: false, + attempt_unpack_docker_compatibility: false, + create_dest_path: false, + allow_wildcard: false, + allow_empty_wildcard: false, + timestamp: -1, + })), + }, + FileAction { + input: -1, + secondary_input: 1, + output: 1, + action: Some(Action::Copy(FileActionCopy { + src: "Cargo.toml".into(), + dest: "Cargo.toml".into(), + owner: None, + mode: -1, + follow_symlink: false, + dir_copy_contents: true, + attempt_unpack_docker_compatibility: false, + create_dest_path: false, + allow_wildcard: false, + allow_empty_wildcard: false, + timestamp: -1, + })), + }, + FileAction { + input: -1, + secondary_input: 2, + output: 2, + action: Some(Action::Copy(FileActionCopy { + src: "Cargo.toml".into(), + dest: "Cargo.toml".into(), + owner: None, + mode: -1, + follow_symlink: false, + dir_copy_contents: false, + attempt_unpack_docker_compatibility: false, + create_dest_path: true, + allow_wildcard: false, + allow_empty_wildcard: false, + timestamp: -1, + })), + }, + FileAction { + input: -1, + secondary_input: 3, + output: 3, + action: Some(Action::Copy(FileActionCopy { + src: "Cargo.toml".into(), + dest: "Cargo.toml".into(), + owner: None, + mode: -1, + follow_symlink: false, + dir_copy_contents: false, + attempt_unpack_docker_compatibility: false, + create_dest_path: false, + allow_wildcard: true, + allow_empty_wildcard: false, + timestamp: -1, + })), + }, + ], + }) + }, + ); +} + +#[test] +fn mkdir_serialization() { + use crate::prelude::*; + use buildkit_proto::pb::{file_action::Action, op::Op, FileAction, FileActionMkDir, FileOp}; + + let context = Source::local("context"); + + let operation = FileSystem::sequence() + .append( + FileSystem::mkdir( + OutputIdx(0), + LayerPath::Other(context.output(), "/new-crate"), + ) + .make_parents(true), + ) + .append(FileSystem::mkdir( + OutputIdx(1), + LayerPath::Scratch("/new-crate"), + )) + .append(FileSystem::mkdir( + OutputIdx(2), + LayerPath::Own(OwnOutputIdx(1), "/another-crate/deep/directory"), + )); + + crate::check_op!( + operation, + |digest| { "sha256:bfcd58256cba441c6d9e89c439bc6640b437d47213472cf8491646af4f0aa5b2" }, + |description| { vec![] }, + |caps| { vec!["file.base"] }, + |cached_tail| { + vec!["sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702"] + }, + |inputs| { + vec![( + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + 0, + )] + }, + |op| { + Op::File(FileOp { + actions: vec![ + FileAction { + input: 0, + secondary_input: -1, + output: 0, + action: Some(Action::Mkdir(FileActionMkDir { + path: "/new-crate".into(), + owner: None, + mode: -1, + timestamp: -1, + make_parents: true, + })), + }, + FileAction { + input: -1, + secondary_input: -1, + output: 1, + action: Some(Action::Mkdir(FileActionMkDir { + path: "/new-crate".into(), + owner: None, + mode: -1, + timestamp: -1, + make_parents: false, + })), + }, + FileAction { + input: 2, + secondary_input: -1, + output: 2, + action: Some(Action::Mkdir(FileActionMkDir { + path: "/another-crate/deep/directory".into(), + owner: None, + mode: -1, + timestamp: -1, + make_parents: false, + })), + }, + ], + }) + }, + ); +} + +#[test] +fn mkfile_serialization() { + use crate::prelude::*; + use buildkit_proto::pb::{file_action::Action, op::Op, FileAction, FileActionMkFile, FileOp}; + + let context = Source::local("context"); + + let operation = FileSystem::sequence() + .append( + FileSystem::mkfile( + OutputIdx(0), + LayerPath::Other(context.output(), "/build-plan.json"), + ) + .data(b"any bytes".to_vec()), + ) + .append(FileSystem::mkfile( + OutputIdx(1), + LayerPath::Scratch("/build-graph.json"), + )) + .append(FileSystem::mkfile( + OutputIdx(2), + LayerPath::Own(OwnOutputIdx(1), "/llb.pb"), + )); + + crate::check_op!( + operation, + |digest| { "sha256:9c0d9f741dfc9b4ea8d909ebf388bc354da0ee401eddf5633e8e4ece7e87d22d" }, + |description| { vec![] }, + |caps| { vec!["file.base"] }, + |cached_tail| { + vec!["sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702"] + }, + |inputs| { + vec![( + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + 0, + )] + }, + |op| { + Op::File(FileOp { + actions: vec![ + FileAction { + input: 0, + secondary_input: -1, + output: 0, + action: Some(Action::Mkfile(FileActionMkFile { + path: "/build-plan.json".into(), + owner: None, + mode: -1, + timestamp: -1, + data: b"any bytes".to_vec(), + })), + }, + FileAction { + input: -1, + secondary_input: -1, + output: 1, + action: Some(Action::Mkfile(FileActionMkFile { + path: "/build-graph.json".into(), + owner: None, + mode: -1, + timestamp: -1, + data: vec![], + })), + }, + FileAction { + input: 2, + secondary_input: -1, + output: 2, + action: Some(Action::Mkfile(FileActionMkFile { + path: "/llb.pb".into(), + owner: None, + mode: -1, + timestamp: -1, + data: vec![], + })), + }, + ], + }) + }, + ); +} diff --git a/buildkit-llb/src/ops/fs/path.rs b/buildkit-llb/src/ops/fs/path.rs new file mode 100644 index 0000000..b1f7bea --- /dev/null +++ b/buildkit-llb/src/ops/fs/path.rs @@ -0,0 +1,33 @@ +use std::path::{Path, PathBuf}; + +use crate::utils::{OperationOutput, OwnOutputIdx}; + +/// Internal representation for not yet specified path. +#[derive(Debug)] +pub struct UnsetPath; + +/// Operand of *file system operations* that defines either source or destination layer and a path. +#[derive(Debug)] +pub enum LayerPath<'a, P: AsRef> { + /// References one of the *current operation outputs* and a path. + Own(OwnOutputIdx, P), + + /// References an *output of another operation* and a path. + Other(OperationOutput<'a>, P), + + /// A path in an *empty* layer (equivalent of Dockerfile's scratch source). + Scratch(P), +} + +impl<'a, P: AsRef> LayerPath<'a, P> { + /// Transform the layer path into owned variant (basically, with `PathBuf` as the path). + pub fn into_owned(self) -> LayerPath<'a, PathBuf> { + use LayerPath::*; + + match self { + Other(input, path) => Other(input, path.as_ref().into()), + Own(output, path) => Own(output, path.as_ref().into()), + Scratch(path) => Scratch(path.as_ref().into()), + } + } +} diff --git a/buildkit-llb/src/ops/fs/sequence.rs b/buildkit-llb/src/ops/fs/sequence.rs new file mode 100644 index 0000000..fe38515 --- /dev/null +++ b/buildkit-llb/src/ops/fs/sequence.rs @@ -0,0 +1,140 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use buildkit_proto::pb::{self, op::Op}; + +use super::FileOperation; + +use crate::ops::*; +use crate::serialization::{Context, Node, Operation, OperationId, Result}; +use crate::utils::{OperationOutput, OutputIdx}; + +#[derive(Debug)] +pub struct SequenceOperation<'a> { + id: OperationId, + inner: Vec>, + + description: HashMap, + caps: HashMap, + ignore_cache: bool, +} + +impl<'a> SequenceOperation<'a> { + pub(crate) fn new() -> Self { + let mut caps = HashMap::::new(); + caps.insert("file.base".into(), true); + + Self { + id: OperationId::default(), + inner: vec![], + + caps, + description: Default::default(), + ignore_cache: false, + } + } + + pub fn append(mut self, op: T) -> Self + where + T: FileOperation + 'a, + { + // TODO: verify no duplicated outputs + + self.inner.push(Box::new(op)); + self + } + + pub fn last_output_index(&self) -> Option { + // TODO: make sure the `inner` elements have monotonic indexes + + self.inner + .iter() + .filter(|fs| fs.output() >= 0) + .last() + .map(|fs| fs.output() as u32) + } +} + +impl<'a, 'b: 'a> MultiBorrowedOutput<'b> for SequenceOperation<'b> { + fn output(&'b self, index: u32) -> OperationOutput<'b> { + // TODO: check if the requested index available. + OperationOutput::borrowed(self, OutputIdx(index)) + } +} + +impl<'a> MultiOwnedOutput<'a> for Arc> { + fn output(&self, index: u32) -> OperationOutput<'a> { + // TODO: check if the requested index available. + OperationOutput::owned(self.clone(), OutputIdx(index)) + } +} + +impl<'a, 'b: 'a> MultiBorrowedLastOutput<'b> for SequenceOperation<'b> { + fn last_output(&'b self) -> Option> { + self.last_output_index().map(|index| self.output(index)) + } +} + +impl<'a> MultiOwnedLastOutput<'a> for Arc> { + fn last_output(&self) -> Option> { + self.last_output_index().map(|index| self.output(index)) + } +} + +impl<'a> OperationBuilder<'a> for SequenceOperation<'a> { + fn custom_name(mut self, name: S) -> Self + where + S: Into, + { + self.description + .insert("llb.customname".into(), name.into()); + + self + } + + fn ignore_cache(mut self, ignore: bool) -> Self { + self.ignore_cache = ignore; + self + } +} + +impl<'a> Operation for SequenceOperation<'a> { + fn id(&self) -> &OperationId { + &self.id + } + + fn serialize(&self, cx: &mut Context) -> Result { + let mut inputs = vec![]; + let mut input_offsets = vec![]; + + for item in &self.inner { + let mut inner_inputs = item.serialize_inputs(cx)?; + + input_offsets.push(inputs.len()); + inputs.append(&mut inner_inputs); + } + + let mut actions = vec![]; + + for (item, offset) in self.inner.iter().zip(input_offsets.into_iter()) { + actions.push(item.serialize_action(inputs.len(), offset)?); + } + + let head = pb::Op { + inputs, + op: Some(Op::File(pb::FileOp { actions })), + + ..Default::default() + }; + + let metadata = pb::OpMetadata { + description: self.description.clone(), + caps: self.caps.clone(), + ignore_cache: self.ignore_cache, + + ..Default::default() + }; + + Ok(Node::new(head, metadata)) + } +} diff --git a/buildkit-llb/src/ops/mod.rs b/buildkit-llb/src/ops/mod.rs new file mode 100644 index 0000000..d5bc05f --- /dev/null +++ b/buildkit-llb/src/ops/mod.rs @@ -0,0 +1,56 @@ +use std::sync::Arc; + +pub mod exec; +pub mod fs; +pub mod source; +pub mod terminal; + +pub use self::exec::Command; +pub use self::fs::FileSystem; +pub use self::source::Source; +pub use self::terminal::Terminal; + +use crate::utils::OperationOutput; + +pub trait MultiBorrowedOutput<'a> { + fn output(&'a self, number: u32) -> OperationOutput<'a>; +} + +pub trait MultiBorrowedLastOutput<'a> { + fn last_output(&'a self) -> Option>; +} + +pub trait MultiOwnedOutput<'a> { + fn output(&self, number: u32) -> OperationOutput<'a>; +} + +pub trait MultiOwnedLastOutput<'a> { + fn last_output(&self) -> Option>; +} + +pub trait SingleBorrowedOutput<'a> { + fn output(&'a self) -> OperationOutput<'a>; +} + +pub trait SingleOwnedOutput<'a> { + fn output(&self) -> OperationOutput<'a>; +} + +/// Common operation methods. +pub trait OperationBuilder<'a> { + /// Sets an operation display name. + fn custom_name(self, name: S) -> Self + where + S: Into; + + /// Sets caching behavior. + fn ignore_cache(self, ignore: bool) -> Self; + + /// Convert the operation into `Arc` so it can be shared when efficient borrowing is not possible. + fn ref_counted(self) -> Arc + where + Self: Sized + 'a, + { + Arc::new(self) + } +} diff --git a/buildkit-llb/src/ops/source/git.rs b/buildkit-llb/src/ops/source/git.rs new file mode 100644 index 0000000..7a8f466 --- /dev/null +++ b/buildkit-llb/src/ops/source/git.rs @@ -0,0 +1,230 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use buildkit_proto::pb::{self, op::Op, OpMetadata, SourceOp}; + +use crate::ops::{OperationBuilder, SingleBorrowedOutput, SingleOwnedOutput}; +use crate::serialization::{Context, Node, Operation, OperationId, Result}; +use crate::utils::{OperationOutput, OutputIdx}; + +#[derive(Default, Debug)] +pub struct GitSource { + id: OperationId, + remote: String, + reference: Option, + description: HashMap, + ignore_cache: bool, +} + +impl GitSource { + pub(crate) fn new(url: S) -> Self + where + S: Into, + { + let mut raw_url = url.into(); + let remote = if raw_url.starts_with("http://") { + raw_url.split_off(7) + } else if raw_url.starts_with("https://") { + raw_url.split_off(8) + } else if raw_url.starts_with("git://") { + raw_url.split_off(6) + } else if raw_url.starts_with("git@") { + raw_url.split_off(4) + } else { + raw_url + }; + + Self { + id: OperationId::default(), + remote, + reference: None, + description: Default::default(), + ignore_cache: false, + } + } +} + +impl GitSource { + pub fn with_reference(mut self, reference: S) -> Self + where + S: Into, + { + self.reference = Some(reference.into()); + self + } +} + +impl<'a> SingleBorrowedOutput<'a> for GitSource { + fn output(&'a self) -> OperationOutput<'a> { + OperationOutput::borrowed(self, OutputIdx(0)) + } +} + +impl<'a> SingleOwnedOutput<'static> for Arc { + fn output(&self) -> OperationOutput<'static> { + OperationOutput::owned(self.clone(), OutputIdx(0)) + } +} + +impl OperationBuilder<'static> for GitSource { + fn custom_name(mut self, name: S) -> Self + where + S: Into, + { + self.description + .insert("llb.customname".into(), name.into()); + + self + } + + fn ignore_cache(mut self, ignore: bool) -> Self { + self.ignore_cache = ignore; + self + } +} + +impl Operation for GitSource { + fn id(&self) -> &OperationId { + &self.id + } + + fn serialize(&self, _: &mut Context) -> Result { + let identifier = if let Some(ref reference) = self.reference { + format!("git://{}#{}", self.remote, reference) + } else { + format!("git://{}", self.remote) + }; + + let head = pb::Op { + op: Some(Op::Source(SourceOp { + identifier, + attrs: Default::default(), + })), + + ..Default::default() + }; + + let metadata = OpMetadata { + description: self.description.clone(), + ignore_cache: self.ignore_cache, + + ..Default::default() + }; + + Ok(Node::new(head, metadata)) + } +} + +#[test] +fn serialization() { + crate::check_op!( + GitSource::new("any.url"), + |digest| { "sha256:ecde982e19ace932e5474e57b0ca71ba690ed7d28abff2a033e8f969e22bf2d8" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "git://any.url".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + GitSource::new("any.url").custom_name("git custom name"), + |digest| { "sha256:ecde982e19ace932e5474e57b0ca71ba690ed7d28abff2a033e8f969e22bf2d8" }, + |description| { vec![("llb.customname", "git custom name")] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "git://any.url".into(), + attrs: Default::default(), + }) + }, + ); +} + +#[test] +fn prefixes() { + crate::check_op!( + GitSource::new("http://any.url"), + |digest| { "sha256:ecde982e19ace932e5474e57b0ca71ba690ed7d28abff2a033e8f969e22bf2d8" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "git://any.url".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + GitSource::new("https://any.url"), + |digest| { "sha256:ecde982e19ace932e5474e57b0ca71ba690ed7d28abff2a033e8f969e22bf2d8" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "git://any.url".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + GitSource::new("git://any.url"), + |digest| { "sha256:ecde982e19ace932e5474e57b0ca71ba690ed7d28abff2a033e8f969e22bf2d8" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "git://any.url".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + GitSource::new("git@any.url"), + |digest| { "sha256:ecde982e19ace932e5474e57b0ca71ba690ed7d28abff2a033e8f969e22bf2d8" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "git://any.url".into(), + attrs: Default::default(), + }) + }, + ); +} + +#[test] +fn with_reference() { + crate::check_op!( + GitSource::new("any.url").with_reference("abcdef"), + |digest| { "sha256:f59aa7f8db62e0b5c2a1da396752ba8a2bb0b5d28ddcfdd1d4f822d26ebfe3cf" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "git://any.url#abcdef".into(), + attrs: Default::default(), + }) + }, + ); +} diff --git a/buildkit-llb/src/ops/source/http.rs b/buildkit-llb/src/ops/source/http.rs new file mode 100644 index 0000000..81b8f07 --- /dev/null +++ b/buildkit-llb/src/ops/source/http.rs @@ -0,0 +1,153 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use buildkit_proto::pb::{self, op::Op, OpMetadata, SourceOp}; + +use crate::ops::{OperationBuilder, SingleBorrowedOutput, SingleOwnedOutput}; +use crate::serialization::{Context, Node, Operation, OperationId, Result}; +use crate::utils::{OperationOutput, OutputIdx}; + +#[derive(Default, Debug)] +pub struct HttpSource { + id: OperationId, + url: String, + file_name: Option, + description: HashMap, + ignore_cache: bool, +} + +impl HttpSource { + pub(crate) fn new(url: S) -> Self + where + S: Into, + { + Self { + id: OperationId::default(), + url: url.into(), + file_name: None, + description: Default::default(), + ignore_cache: false, + } + } +} + +impl HttpSource { + pub fn with_file_name(mut self, name: S) -> Self + where + S: Into, + { + self.file_name = Some(name.into()); + self + } +} + +impl<'a> SingleBorrowedOutput<'a> for HttpSource { + fn output(&'a self) -> OperationOutput<'a> { + OperationOutput::borrowed(self, OutputIdx(0)) + } +} + +impl<'a> SingleOwnedOutput<'static> for Arc { + fn output(&self) -> OperationOutput<'static> { + OperationOutput::owned(self.clone(), OutputIdx(0)) + } +} + +impl OperationBuilder<'static> for HttpSource { + fn custom_name(mut self, name: S) -> Self + where + S: Into, + { + self.description + .insert("llb.customname".into(), name.into()); + + self + } + + fn ignore_cache(mut self, ignore: bool) -> Self { + self.ignore_cache = ignore; + self + } +} + +impl Operation for HttpSource { + fn id(&self) -> &OperationId { + &self.id + } + + fn serialize(&self, _: &mut Context) -> Result { + let mut attrs = HashMap::default(); + + if let Some(ref file_name) = self.file_name { + attrs.insert("http.filename".into(), file_name.into()); + } + + let head = pb::Op { + op: Some(Op::Source(SourceOp { + identifier: self.url.clone(), + attrs, + })), + + ..Default::default() + }; + + let metadata = OpMetadata { + description: self.description.clone(), + ignore_cache: self.ignore_cache, + + ..Default::default() + }; + + Ok(Node::new(head, metadata)) + } +} + +#[test] +fn serialization() { + crate::check_op!( + HttpSource::new("http://any.url/with/path"), + |digest| { "sha256:22ec64461f39dd3b54680fc240b459248b1ced597f113b5d692abe9695860d12" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "http://any.url/with/path".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + HttpSource::new("http://any.url/with/path").custom_name("git custom name"), + |digest| { "sha256:22ec64461f39dd3b54680fc240b459248b1ced597f113b5d692abe9695860d12" }, + |description| { vec![("llb.customname", "git custom name")] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "http://any.url/with/path".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + HttpSource::new("http://any.url/with/path").with_file_name("file.name"), + |digest| { "sha256:e1fe6584287dfa2b065ed29fcf4f77bcf86fb54781832d2f45074fa1671df692" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "http://any.url/with/path".into(), + attrs: vec![("http.filename".to_string(), "file.name".to_string())] + .into_iter() + .collect(), + }) + }, + ); +} diff --git a/buildkit-llb/src/ops/source/image.rs b/buildkit-llb/src/ops/source/image.rs new file mode 100644 index 0000000..a125610 --- /dev/null +++ b/buildkit-llb/src/ops/source/image.rs @@ -0,0 +1,477 @@ +use std::collections::HashMap; +use std::fmt; +use std::sync::Arc; + +use buildkit_proto::pb::{self, op::Op, OpMetadata, SourceOp}; +use lazy_static::*; +use regex::Regex; + +use crate::ops::{OperationBuilder, SingleBorrowedOutput, SingleOwnedOutput}; +use crate::serialization::{Context, Node, Operation, OperationId, Result}; +use crate::utils::{OperationOutput, OutputIdx}; + +#[derive(Debug)] +pub struct ImageSource { + id: OperationId, + + domain: Option, + name: String, + tag: Option, + digest: Option, + + description: HashMap, + ignore_cache: bool, + resolve_mode: Option, +} + +#[derive(Debug, Clone, Copy)] +pub enum ResolveMode { + Default, + ForcePull, + PreferLocal, +} + +impl fmt::Display for ResolveMode { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ResolveMode::Default => write!(f, "default"), + ResolveMode::ForcePull => write!(f, "pull"), + ResolveMode::PreferLocal => write!(f, "local"), + } + } +} + +impl Default for ResolveMode { + fn default() -> Self { + ResolveMode::Default + } +} + +lazy_static! { + static ref TAG_EXPR: Regex = Regex::new(r":[\w][\w.-]+$").unwrap(); +} + +impl ImageSource { + // The implementation is based on: + // https://github.com/containerd/containerd/blob/614c0858f2a8db9ee0c788a9164870069f3e53ed/reference/docker/reference.go + pub(crate) fn new(name: S) -> Self + where + S: Into, + { + let mut name = name.into(); + + let (digest, digest_separator) = match name.find('@') { + Some(pos) => (Some(name[pos + 1..].into()), pos), + None => (None, name.len()), + }; + + name.truncate(digest_separator); + + let (tag, tag_separator) = match TAG_EXPR.find(&name) { + Some(found) => (Some(name[found.start() + 1..].into()), found.start()), + None => (None, name.len()), + }; + + name.truncate(tag_separator); + + let (domain, mut name) = match name.find('/') { + // The input has canonical-like format. + Some(separator_pos) if &name[..separator_pos] == "docker.io" => { + (None, name[separator_pos + 1..].into()) + } + + // Special case when domain is "localhost". + Some(separator_pos) if &name[..separator_pos] == "localhost" => { + (Some("localhost".into()), name[separator_pos + 1..].into()) + } + + // General case for a common domain. + Some(separator_pos) if name[..separator_pos].find('.').is_some() => ( + Some(name[..separator_pos].into()), + name[separator_pos + 1..].into(), + ), + + // General case for a domain with port number. + Some(separator_pos) if name[..separator_pos].find(':').is_some() => ( + Some(name[..separator_pos].into()), + name[separator_pos + 1..].into(), + ), + + // Fallback if the first component is not a domain name. + Some(_) => (None, name), + + // Fallback if only single url component present. + None => (None, name), + }; + + if domain.is_none() && name.find('/').is_none() { + name = format!("library/{}", name); + } + + Self { + id: OperationId::default(), + + domain, + name, + tag, + digest, + + description: Default::default(), + ignore_cache: false, + resolve_mode: None, + } + } + + pub fn with_resolve_mode(mut self, mode: ResolveMode) -> Self { + self.resolve_mode = Some(mode); + self + } + + pub fn resolve_mode(&self) -> Option { + self.resolve_mode + } + + pub fn with_digest(mut self, digest: S) -> Self + where + S: Into, + { + self.digest = Some(digest.into()); + self + } + + pub fn with_tag(mut self, tag: S) -> Self + where + S: Into, + { + self.tag = Some(tag.into()); + self + } + + pub fn canonical_name(&self) -> String { + let domain = match self.domain { + Some(ref domain) => domain, + None => "docker.io", + }; + + let tag = match self.tag { + Some(ref tag) => tag, + None => "latest", + }; + + match self.digest { + Some(ref digest) => format!("{}/{}:{}@{}", domain, self.name, tag, digest), + None => format!("{}/{}:{}", domain, self.name, tag), + } + } +} + +impl<'a> SingleBorrowedOutput<'a> for ImageSource { + fn output(&'a self) -> OperationOutput<'a> { + OperationOutput::borrowed(self, OutputIdx(0)) + } +} + +impl<'a> SingleOwnedOutput<'static> for Arc { + fn output(&self) -> OperationOutput<'static> { + OperationOutput::owned(self.clone(), OutputIdx(0)) + } +} + +impl OperationBuilder<'static> for ImageSource { + fn custom_name(mut self, name: S) -> Self + where + S: Into, + { + self.description + .insert("llb.customname".into(), name.into()); + + self + } + + fn ignore_cache(mut self, ignore: bool) -> Self { + self.ignore_cache = ignore; + self + } +} + +impl Operation for ImageSource { + fn id(&self) -> &OperationId { + &self.id + } + + fn serialize(&self, _: &mut Context) -> Result { + let mut attrs = HashMap::default(); + + if let Some(ref mode) = self.resolve_mode { + attrs.insert("image.resolvemode".into(), mode.to_string()); + } + + let head = pb::Op { + op: Some(Op::Source(SourceOp { + identifier: format!("docker-image://{}", self.canonical_name()), + attrs, + })), + + ..Default::default() + }; + + let metadata = OpMetadata { + description: self.description.clone(), + ignore_cache: self.ignore_cache, + + ..Default::default() + }; + + Ok(Node::new(head, metadata)) + } +} + +#[test] +fn serialization() { + crate::check_op!( + ImageSource::new("rustlang/rust:nightly"), + |digest| { "sha256:dee2a3d7dd482dd8098ba543ff1dcb01efd29fcd16fdb0979ef556f38564543a" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/rustlang/rust:nightly".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + ImageSource::new("library/alpine:latest"), + |digest| { "sha256:0e6b31ceed3e6dc542018f35a53a0e857e6a188453d32a2a5bbe7aa2971c1220" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/library/alpine:latest".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + ImageSource::new("rustlang/rust:nightly").custom_name("image custom name"), + |digest| { "sha256:dee2a3d7dd482dd8098ba543ff1dcb01efd29fcd16fdb0979ef556f38564543a" }, + |description| { vec![("llb.customname", "image custom name")] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/rustlang/rust:nightly".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + ImageSource::new("rustlang/rust:nightly").with_digest("sha256:123456"), + |digest| { "sha256:a9837e26998d165e7b6433f8d40b36d259905295860fcbbc62bbce75a6c991c6" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/rustlang/rust:nightly@sha256:123456".into(), + attrs: Default::default(), + }) + }, + ); +} + +#[test] +fn resolve_mode() { + crate::check_op!( + ImageSource::new("rustlang/rust:nightly").with_resolve_mode(ResolveMode::Default), + |digest| { "sha256:792e246751e84b9a5e40c28900d70771a07e8cc920c1039cdddfc6bf69256dfe" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/rustlang/rust:nightly".into(), + attrs: crate::utils::test::to_map(vec![("image.resolvemode", "default")]), + }) + }, + ); + + crate::check_op!( + ImageSource::new("rustlang/rust:nightly").with_resolve_mode(ResolveMode::ForcePull), + |digest| { "sha256:0bd920010eab701bdce44c61d220e6943d56d3fb9a9fa4e773fc060c0d746122" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/rustlang/rust:nightly".into(), + attrs: crate::utils::test::to_map(vec![("image.resolvemode", "pull")]), + }) + }, + ); + + crate::check_op!( + ImageSource::new("rustlang/rust:nightly").with_resolve_mode(ResolveMode::PreferLocal), + |digest| { "sha256:bd6797c8644d2663b29c36a8b3b63931e539be44ede5e56aca2da4f35f241f18" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/rustlang/rust:nightly".into(), + attrs: crate::utils::test::to_map(vec![("image.resolvemode", "local")]), + }) + }, + ); +} + +#[test] +fn image_name() { + crate::check_op!(ImageSource::new("rustlang/rust"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/rustlang/rust:latest".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("rust:nightly"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/library/rust:nightly".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("rust"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/library/rust:latest".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("library/rust"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/library/rust:latest".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("rust:obj@sha256:abcdef"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/library/rust:obj@sha256:abcdef".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("rust@sha256:abcdef"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/library/rust:latest@sha256:abcdef".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("rust:obj@abcdef"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/library/rust:obj@abcdef".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!( + ImageSource::new("b.gcr.io/test.example.com/my-app:test.example.com"), + |op| { + Op::Source(SourceOp { + identifier: "docker-image://b.gcr.io/test.example.com/my-app:test.example.com" + .into(), + attrs: Default::default(), + }) + } + ); + + crate::check_op!( + ImageSource::new("sub-dom1.foo.com/bar/baz/quux:some-long-tag"), + |op| { + Op::Source(SourceOp { + identifier: "docker-image://sub-dom1.foo.com/bar/baz/quux:some-long-tag".into(), + attrs: Default::default(), + }) + } + ); + + crate::check_op!( + ImageSource::new("sub-dom1.foo.com/quux:some-long-tag"), + |op| { + Op::Source(SourceOp { + identifier: "docker-image://sub-dom1.foo.com/quux:some-long-tag".into(), + attrs: Default::default(), + }) + } + ); + + crate::check_op!(ImageSource::new("localhost/rust:obj"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://localhost/rust:obj".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("127.0.0.1/rust:obj"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://127.0.0.1/rust:obj".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("localhost:5000/rust:obj"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://localhost:5000/rust:obj".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("127.0.0.1:5000/rust:obj"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://127.0.0.1:5000/rust:obj".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("localhost:5000/rust"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://localhost:5000/rust:latest".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("127.0.0.1:5000/rust"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://127.0.0.1:5000/rust:latest".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("docker.io/rust"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/library/rust:latest".into(), + attrs: Default::default(), + }) + }); + + crate::check_op!(ImageSource::new("docker.io/library/rust"), |op| { + Op::Source(SourceOp { + identifier: "docker-image://docker.io/library/rust:latest".into(), + attrs: Default::default(), + }) + }); +} diff --git a/buildkit-llb/src/ops/source/local.rs b/buildkit-llb/src/ops/source/local.rs new file mode 100644 index 0000000..9b948c5 --- /dev/null +++ b/buildkit-llb/src/ops/source/local.rs @@ -0,0 +1,202 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use buildkit_proto::pb::{self, op::Op, OpMetadata, SourceOp}; + +use crate::ops::{OperationBuilder, SingleBorrowedOutput, SingleOwnedOutput}; +use crate::serialization::{Context, Node, Operation, OperationId, Result}; +use crate::utils::{OperationOutput, OutputIdx}; + +#[derive(Default, Debug)] +pub struct LocalSource { + id: OperationId, + name: String, + description: HashMap, + ignore_cache: bool, + + exclude: Vec, + include: Vec, +} + +impl LocalSource { + pub(crate) fn new(name: S) -> Self + where + S: Into, + { + Self { + id: OperationId::default(), + name: name.into(), + ignore_cache: false, + + ..Default::default() + } + } + + pub fn add_include_pattern(mut self, include: S) -> Self + where + S: Into, + { + // TODO: add `source.local.includepatterns` capability + self.include.push(include.into()); + self + } + + pub fn add_exclude_pattern(mut self, exclude: S) -> Self + where + S: Into, + { + // TODO: add `source.local.excludepatterns` capability + self.exclude.push(exclude.into()); + self + } +} + +impl<'a> SingleBorrowedOutput<'a> for LocalSource { + fn output(&'a self) -> OperationOutput<'a> { + OperationOutput::borrowed(self, OutputIdx(0)) + } +} + +impl<'a> SingleOwnedOutput<'static> for Arc { + fn output(&self) -> OperationOutput<'static> { + OperationOutput::owned(self.clone(), OutputIdx(0)) + } +} + +impl OperationBuilder<'static> for LocalSource { + fn custom_name(mut self, name: S) -> Self + where + S: Into, + { + self.description + .insert("llb.customname".into(), name.into()); + + self + } + + fn ignore_cache(mut self, ignore: bool) -> Self { + self.ignore_cache = ignore; + self + } +} + +impl Operation for LocalSource { + fn id(&self) -> &OperationId { + &self.id + } + + fn serialize(&self, _: &mut Context) -> Result { + let mut attrs = HashMap::default(); + + if !self.exclude.is_empty() { + attrs.insert( + "local.excludepatterns".into(), + serde_json::to_string(&self.exclude).unwrap(), + ); + } + + if !self.include.is_empty() { + attrs.insert( + "local.includepattern".into(), + serde_json::to_string(&self.include).unwrap(), + ); + } + + let head = pb::Op { + op: Some(Op::Source(SourceOp { + identifier: format!("local://{}", self.name), + attrs, + })), + + ..Default::default() + }; + + let metadata = OpMetadata { + description: self.description.clone(), + ignore_cache: self.ignore_cache, + + ..Default::default() + }; + + Ok(Node::new(head, metadata)) + } +} + +#[test] +fn serialization() { + crate::check_op!( + LocalSource::new("context"), + |digest| { "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702" }, + |description| { vec![] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "local://context".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + LocalSource::new("context").custom_name("context custom name"), + |digest| { "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702" }, + |description| { vec![("llb.customname", "context custom name")] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "local://context".into(), + attrs: Default::default(), + }) + }, + ); + + crate::check_op!( + { + LocalSource::new("context") + .custom_name("context custom name") + .add_exclude_pattern("**/target") + .add_exclude_pattern("Dockerfile") + }, + |digest| { "sha256:f6962b8bb1659c63a2c2c3e2a7ccf0326c87530dd70c514343f127e4c20460c4" }, + |description| { vec![("llb.customname", "context custom name")] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "local://context".into(), + attrs: crate::utils::test::to_map(vec![( + "local.excludepatterns", + r#"["**/target","Dockerfile"]"#, + )]), + }) + }, + ); + + crate::check_op!( + { + LocalSource::new("context") + .custom_name("context custom name") + .add_include_pattern("Cargo.toml") + .add_include_pattern("inner/Cargo.toml") + }, + |digest| { "sha256:a7e628333262b810572f83193bbf8554e688abfb51d44ac30bdad7fa425f3839" }, + |description| { vec![("llb.customname", "context custom name")] }, + |caps| { vec![] }, + |cached_tail| { vec![] }, + |inputs| { vec![] }, + |op| { + Op::Source(SourceOp { + identifier: "local://context".into(), + attrs: crate::utils::test::to_map(vec![( + "local.includepattern", + r#"["Cargo.toml","inner/Cargo.toml"]"#, + )]), + }) + }, + ); +} diff --git a/buildkit-llb/src/ops/source/mod.rs b/buildkit-llb/src/ops/source/mod.rs new file mode 100644 index 0000000..cdfdfcf --- /dev/null +++ b/buildkit-llb/src/ops/source/mod.rs @@ -0,0 +1,43 @@ +mod git; +mod http; +mod image; +mod local; + +pub use self::git::GitSource; +pub use self::http::HttpSource; +pub use self::image::{ImageSource, ResolveMode}; +pub use self::local::LocalSource; + +/// Provide an input for other operations. For example: `FROM` directive in Dockerfile. +#[derive(Debug)] +pub struct Source; + +impl Source { + pub fn image(name: S) -> ImageSource + where + S: Into, + { + ImageSource::new(name) + } + + pub fn git(url: S) -> GitSource + where + S: Into, + { + GitSource::new(url) + } + + pub fn local(name: S) -> LocalSource + where + S: Into, + { + LocalSource::new(name) + } + + pub fn http(name: S) -> HttpSource + where + S: Into, + { + HttpSource::new(name) + } +} diff --git a/buildkit-llb/src/ops/terminal.rs b/buildkit-llb/src/ops/terminal.rs new file mode 100644 index 0000000..a7f19c2 --- /dev/null +++ b/buildkit-llb/src/ops/terminal.rs @@ -0,0 +1,141 @@ +use std::io::{self, Write}; +use std::iter::once; + +use buildkit_proto::pb::{self, Input}; +use prost::Message; + +use crate::serialization::{Context, Node, Result}; +use crate::utils::OperationOutput; + +/// Final operation in the graph. Responsible for printing the complete LLB definition. +#[derive(Debug)] +pub struct Terminal<'a> { + input: OperationOutput<'a>, +} + +impl<'a> Terminal<'a> { + pub fn with(input: OperationOutput<'a>) -> Self { + Self { input } + } + + pub fn into_definition(self) -> pb::Definition { + let mut cx = Context::default(); + let final_node_iter = once(self.serialize(&mut cx).unwrap()); + + let (def, metadata) = { + cx.into_registered_nodes() + .chain(final_node_iter) + .map(|node| (node.bytes, (node.digest, node.metadata))) + .unzip() + }; + + pb::Definition { def, metadata } + } + + pub fn write_definition(self, mut writer: impl Write) -> io::Result<()> { + let mut bytes = Vec::new(); + self.into_definition().encode(&mut bytes).unwrap(); + + writer.write_all(&bytes) + } + + fn serialize(&self, cx: &mut Context) -> Result { + let final_op = pb::Op { + inputs: vec![Input { + digest: cx.register(self.input.operation())?.digest.clone(), + index: self.input.output().into(), + }], + + ..Default::default() + }; + + Ok(Node::new(final_op, Default::default())) + } +} + +#[test] +fn serialization() { + use crate::prelude::*; + + let context = Source::local("context"); + let builder_image = Source::image("rustlang/rust:nightly"); + let final_image = Source::image("library/alpine:latest"); + + let first_command = Command::run("rustc") + .args(&["--crate-name", "crate-1"]) + .mount(Mount::ReadOnlyLayer(builder_image.output(), "/")) + .mount(Mount::ReadOnlyLayer(context.output(), "/context")) + .mount(Mount::Scratch(OutputIdx(0), "/target")); + + let second_command = Command::run("rustc") + .args(&["--crate-name", "crate-2"]) + .mount(Mount::ReadOnlyLayer(builder_image.output(), "/")) + .mount(Mount::ReadOnlyLayer(context.output(), "/context")) + .mount(Mount::Scratch(OutputIdx(0), "/target")); + + let assembly_op = FileSystem::sequence() + .append(FileSystem::mkdir( + OutputIdx(0), + LayerPath::Other(final_image.output(), "/output"), + )) + .append( + FileSystem::copy() + .from(LayerPath::Other(first_command.output(0), "/target/crate-1")) + .to( + OutputIdx(1), + LayerPath::Own(OwnOutputIdx(0), "/output/crate-1"), + ), + ) + .append( + FileSystem::copy() + .from(LayerPath::Other( + second_command.output(0), + "/target/crate-2", + )) + .to( + OutputIdx(2), + LayerPath::Own(OwnOutputIdx(1), "/output/crate-2"), + ), + ); + + let definition = Terminal::with(assembly_op.output(0)).into_definition(); + + assert_eq!( + definition + .def + .iter() + .map(|bytes| Node::get_digest(&bytes)) + .collect::>(), + crate::utils::test::to_vec(vec![ + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + "sha256:dee2a3d7dd482dd8098ba543ff1dcb01efd29fcd16fdb0979ef556f38564543a", + "sha256:0e6b31ceed3e6dc542018f35a53a0e857e6a188453d32a2a5bbe7aa2971c1220", + "sha256:782f343f8f4ee33e4f342ed4209ad1a9eb4582485e45251595a5211ebf2b3cbf", + "sha256:3418ad515958b5e68fd45c9d6fbc8d2ce7d567a956150d22ff529a3fea401aa2", + "sha256:13bb644e4ec0cabe836392649a04551686e69613b1ea9c89a1a8f3bc86181791", + "sha256:d13a773a61236be3c7d539f3ef6d583095c32d2a2a60deda86e71705f2dbc99b", + ]) + ); + + let mut metadata_digests = { + definition + .metadata + .iter() + .map(|(digest, _)| digest.as_str()) + .collect::>() + }; + + metadata_digests.sort(); + assert_eq!( + metadata_digests, + vec![ + "sha256:0e6b31ceed3e6dc542018f35a53a0e857e6a188453d32a2a5bbe7aa2971c1220", + "sha256:13bb644e4ec0cabe836392649a04551686e69613b1ea9c89a1a8f3bc86181791", + "sha256:3418ad515958b5e68fd45c9d6fbc8d2ce7d567a956150d22ff529a3fea401aa2", + "sha256:782f343f8f4ee33e4f342ed4209ad1a9eb4582485e45251595a5211ebf2b3cbf", + "sha256:a60212791641cbeaa3a49de4f7dff9e40ae50ec19d1be9607232037c1db16702", + "sha256:d13a773a61236be3c7d539f3ef6d583095c32d2a2a60deda86e71705f2dbc99b", + "sha256:dee2a3d7dd482dd8098ba543ff1dcb01efd29fcd16fdb0979ef556f38564543a", + ] + ); +} diff --git a/buildkit-llb/src/serialization/id.rs b/buildkit-llb/src/serialization/id.rs new file mode 100644 index 0000000..ec75cad --- /dev/null +++ b/buildkit-llb/src/serialization/id.rs @@ -0,0 +1,27 @@ +use std::ops::Deref; +use std::sync::atomic::{AtomicU64, Ordering}; + +static LAST_ID: AtomicU64 = AtomicU64::new(0); + +#[derive(Debug)] +pub(crate) struct OperationId(u64); + +impl Clone for OperationId { + fn clone(&self) -> Self { + OperationId::default() + } +} + +impl Default for OperationId { + fn default() -> Self { + Self(LAST_ID.fetch_add(1, Ordering::Relaxed)) + } +} + +impl Deref for OperationId { + type Target = u64; + + fn deref(&self) -> &u64 { + &self.0 + } +} diff --git a/buildkit-llb/src/serialization/mod.rs b/buildkit-llb/src/serialization/mod.rs new file mode 100644 index 0000000..9a727e2 --- /dev/null +++ b/buildkit-llb/src/serialization/mod.rs @@ -0,0 +1,39 @@ +use std::collections::BTreeMap; + +mod id; +mod operation; +mod output; + +pub(crate) use self::id::OperationId; +pub(crate) use self::operation::Operation; +pub(crate) use self::output::Node; + +pub(crate) type Result = std::result::Result; + +#[derive(Default)] +pub struct Context { + inner: BTreeMap, +} + +impl Context { + #[allow(clippy::map_entry)] + pub(crate) fn register<'a>(&'a mut self, op: &dyn Operation) -> Result<&'a Node> { + let id = **op.id(); + + if !self.inner.contains_key(&id) { + let node = op.serialize(self)?; + self.inner.insert(id, node); + } + + Ok(self.inner.get(&id).unwrap()) + } + + #[cfg(test)] + pub(crate) fn registered_nodes_iter(&self) -> impl Iterator { + self.inner.iter().map(|pair| pair.1) + } + + pub(crate) fn into_registered_nodes(self) -> impl Iterator { + self.inner.into_iter().map(|pair| pair.1) + } +} diff --git a/buildkit-llb/src/serialization/operation.rs b/buildkit-llb/src/serialization/operation.rs new file mode 100644 index 0000000..adcd534 --- /dev/null +++ b/buildkit-llb/src/serialization/operation.rs @@ -0,0 +1,10 @@ +use std::fmt::Debug; + +use super::{Context, OperationId}; +use super::{Node, Result}; + +pub(crate) trait Operation: Debug + Send + Sync { + fn id(&self) -> &OperationId; + + fn serialize(&self, cx: &mut Context) -> Result; +} diff --git a/buildkit-llb/src/serialization/output.rs b/buildkit-llb/src/serialization/output.rs new file mode 100644 index 0000000..e54ff4d --- /dev/null +++ b/buildkit-llb/src/serialization/output.rs @@ -0,0 +1,30 @@ +use buildkit_proto::pb; +use prost::Message; +use sha2::{Digest, Sha256}; + +#[derive(Debug, Default, Clone)] +pub(crate) struct Node { + pub bytes: Vec, + pub digest: String, + pub metadata: pb::OpMetadata, +} + +impl Node { + pub fn new(message: pb::Op, metadata: pb::OpMetadata) -> Self { + let mut bytes = Vec::new(); + message.encode(&mut bytes).unwrap(); + + Self { + digest: Self::get_digest(&bytes), + bytes, + metadata, + } + } + + pub fn get_digest(bytes: &[u8]) -> String { + let mut hasher = Sha256::new(); + hasher.input(&bytes); + + format!("sha256:{:x}", hasher.result()) + } +} diff --git a/buildkit-llb/src/utils.rs b/buildkit-llb/src/utils.rs new file mode 100644 index 0000000..28e3d5e --- /dev/null +++ b/buildkit-llb/src/utils.rs @@ -0,0 +1,191 @@ +use std::sync::Arc; + +use crate::serialization::Operation; + +#[derive(Copy, Clone, Debug)] +pub struct OutputIdx(pub u32); + +#[derive(Copy, Clone, Debug)] +pub struct OwnOutputIdx(pub u32); + +#[derive(Debug, Clone)] +pub struct OperationOutput<'a> { + kind: OperationOutputKind<'a>, +} + +#[derive(Debug, Clone)] +enum OperationOutputKind<'a> { + Owned(Arc, OutputIdx), + Borrowed(&'a dyn Operation, OutputIdx), +} + +impl<'a> OperationOutput<'a> { + pub(crate) fn owned(op: Arc, idx: OutputIdx) -> Self { + Self { + kind: OperationOutputKind::Owned(op, idx), + } + } + + pub(crate) fn borrowed(op: &'a dyn Operation, idx: OutputIdx) -> Self { + Self { + kind: OperationOutputKind::Borrowed(op, idx), + } + } + + pub(crate) fn operation(&self) -> &dyn Operation { + match self.kind { + OperationOutputKind::Owned(ref op, ..) => op.as_ref(), + OperationOutputKind::Borrowed(ref op, ..) => *op, + } + } + + pub(crate) fn output(&self) -> OutputIdx { + match self.kind { + OperationOutputKind::Owned(_, output) | OperationOutputKind::Borrowed(_, output) => { + output + } + } + } +} + +impl Into for OutputIdx { + fn into(self) -> i64 { + self.0.into() + } +} +impl Into for &OutputIdx { + fn into(self) -> i64 { + self.0.into() + } +} + +impl Into for OwnOutputIdx { + fn into(self) -> i64 { + self.0.into() + } +} +impl Into for &OwnOutputIdx { + fn into(self) -> i64 { + self.0.into() + } +} + +impl Into for OutputIdx { + fn into(self) -> i32 { + self.0 as i32 + } +} +impl Into for &OutputIdx { + fn into(self) -> i32 { + self.0 as i32 + } +} + +impl Into for OwnOutputIdx { + fn into(self) -> i32 { + self.0 as i32 + } +} +impl Into for &OwnOutputIdx { + fn into(self) -> i32 { + self.0 as i32 + } +} + +#[cfg(test)] +pub mod test { + #[macro_export] + macro_rules! check_op { + ($op:expr, $(|$name:ident| $value:expr,)*) => ($crate::check_op!($op, $(|$name| $value),*)); + ($op:expr, $(|$name:ident| $value:expr),*) => {{ + #[allow(unused_imports)] + use crate::serialization::{Context, Operation}; + + let mut context = Context::default(); + let serialized = $op.serialize(&mut context).unwrap(); + + $(crate::check_op_property!(serialized, context, $name, $value));* + }}; + } + + #[macro_export] + macro_rules! check_op_property { + ($serialized:expr, $context:expr, op, $value:expr) => {{ + use std::io::Cursor; + + use buildkit_proto::pb; + use prost::Message; + + assert_eq!( + pb::Op::decode(Cursor::new(&$serialized.bytes)).unwrap().op, + Some($value) + ); + }}; + + ($serialized:expr, $context:expr, inputs, $value:expr) => {{ + use std::io::Cursor; + + use buildkit_proto::pb; + use prost::Message; + + assert_eq!( + pb::Op::decode(Cursor::new(&$serialized.bytes)) + .unwrap() + .inputs + .into_iter() + .map(|input| (input.digest, input.index)) + .collect::>(), + $value + .into_iter() + .map(|input: (&str, i64)| (String::from(input.0), input.1)) + .collect::>() + ); + }}; + + ($serialized:expr, $context:expr, cached_tail, $value:expr) => { + assert_eq!( + $context + .registered_nodes_iter() + .map(|node| node.digest.clone()) + .collect::>(), + crate::utils::test::to_vec($value), + ); + }; + + ($serialized:expr, $context:expr, caps, $value:expr) => {{ + let mut caps = $serialized + .metadata + .caps + .into_iter() + .map(|pair| pair.0) + .collect::>(); + + caps.sort(); + assert_eq!(caps, crate::utils::test::to_vec($value)); + }}; + + ($serialized:expr, $context:expr, description, $value:expr) => { + assert_eq!( + $serialized.metadata.description, + crate::utils::test::to_map($value), + ); + }; + + ($serialized:expr, $context:expr, digest, $value:expr) => { + assert_eq!($serialized.digest, $value); + }; + } + + use std::collections::HashMap; + + pub fn to_map(pairs: Vec<(&str, &str)>) -> HashMap { + pairs + .into_iter() + .map(|(key, value): (&str, &str)| (key.into(), value.into())) + .collect() + } + + pub fn to_vec(items: Vec<&str>) -> Vec { + items.into_iter().map(String::from).collect() + } +} diff --git a/buildkit-proto/CHANGELOG.md b/buildkit-proto/CHANGELOG.md new file mode 100644 index 0000000..91d5110 --- /dev/null +++ b/buildkit-proto/CHANGELOG.md @@ -0,0 +1,14 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +## [0.2.0] - 2020-03-04 +### Changed +- Use `tonic` instead of `tower-grpc` for codegen. + +## [0.1.0] - 2019-09-24 +Initial release. diff --git a/buildkit-proto/Cargo.toml b/buildkit-proto/Cargo.toml new file mode 100644 index 0000000..a943f56 --- /dev/null +++ b/buildkit-proto/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "buildkit-proto" +version = "0.2.0" +authors = ["Denys Zariaiev "] +edition = "2018" + +description = "Protobuf interfaces to BuildKit" +documentation = "https://docs.rs/buildkit-proto" +repository = "https://github.com/denzp/rust-buildkit" +readme = "README.md" +keywords = ["buildkit", "docker", "protobuf", "prost"] +categories = ["development-tools::build-utils", "api-bindings"] +license = "MIT/Apache-2.0" + +[dependencies] +prost = "0.6" +prost-types = "0.6" +tonic = { git = "https://github.com/edrevo/tonic", branch = "unimplemented-content-type" } + +[build-dependencies.tonic-build] +git = "https://github.com/edrevo/tonic" +branch = "unimplemented-content-type" +default-features = false +features = ["prost", "transport"] diff --git a/buildkit-proto/README.md b/buildkit-proto/README.md new file mode 100644 index 0000000..5e5c46e --- /dev/null +++ b/buildkit-proto/README.md @@ -0,0 +1,33 @@ +`buildkit-proto` - protobuf interfaces to BuildKit +======= + +[![Actions Status]][Actions Link] +[![buildkit-proto Crates Badge]][buildkit-proto Crates Link] +[![buildkit-proto Docs Badge]][buildkit-proto Docs Link] + +# Usage + +The crate is not intened to be used alone. +An idiomatic high-level API provided by [`buildkit-llb`][buildkit-llb Crates Link] is a prefered way to build LLB graphs. + +# License + +`buildkit-proto` is primarily distributed under the terms of both the MIT license and +the Apache License (Version 2.0), with portions covered by various BSD-like +licenses. + +See LICENSE-APACHE, and LICENSE-MIT for details. + +# Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in `buildkit-proto` by you, as defined in the Apache-2.0 license, +shall be dual licensed as above, without any additional terms or conditions. + +[Actions Link]: https://github.com/denzp/rust-buildkit/actions +[Actions Status]: https://github.com/denzp/rust-buildkit/workflows/CI/badge.svg +[buildkit-proto Docs Badge]: https://docs.rs/buildkit-proto/badge.svg +[buildkit-proto Docs Link]: https://docs.rs/buildkit-proto/ +[buildkit-proto Crates Badge]: https://img.shields.io/crates/v/buildkit-proto.svg +[buildkit-proto Crates Link]: https://crates.io/crates/buildkit-proto +[buildkit-llb Crates Link]: https://crates.io/crates/buildkit-llb diff --git a/buildkit-proto/build.rs b/buildkit-proto/build.rs new file mode 100644 index 0000000..b436f9d --- /dev/null +++ b/buildkit-proto/build.rs @@ -0,0 +1,11 @@ +const DEFS: &[&str] = &["proto/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto"]; +const PATHS: &[&str] = &["proto"]; + +fn main() -> Result<(), Box> { + tonic_build::configure() + .build_client(true) + .build_server(true) + .compile(DEFS, PATHS)?; + + Ok(()) +} diff --git a/buildkit-proto/proto/github.com/gogo/googleapis/google/rpc/status.proto b/buildkit-proto/proto/github.com/gogo/googleapis/google/rpc/status.proto new file mode 100644 index 0000000..3b1f7a9 --- /dev/null +++ b/buildkit-proto/proto/github.com/gogo/googleapis/google/rpc/status.proto @@ -0,0 +1,47 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +import "google/protobuf/any.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/rpc/status;status"; +option java_multiple_files = true; +option java_outer_classname = "StatusProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// The `Status` type defines a logical error model that is suitable for +// different programming environments, including REST APIs and RPC APIs. It is +// used by [gRPC](https://github.com/grpc). Each `Status` message contains +// three pieces of data: error code, error message, and error details. +// +// You can find out more about this error model and how to work with it in the +// [API Design Guide](https://cloud.google.com/apis/design/errors). +message Status { + // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + int32 code = 1; + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. + string message = 2; + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + repeated google.protobuf.Any details = 3; +} diff --git a/buildkit-proto/proto/github.com/gogo/protobuf/gogoproto/gogo.proto b/buildkit-proto/proto/github.com/gogo/protobuf/gogoproto/gogo.proto new file mode 100644 index 0000000..b80c856 --- /dev/null +++ b/buildkit-proto/proto/github.com/gogo/protobuf/gogoproto/gogo.proto @@ -0,0 +1,144 @@ +// Protocol Buffers for Go with Gadgets +// +// Copyright (c) 2013, The GoGo Authors. All rights reserved. +// http://github.com/gogo/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; +package gogoproto; + +import "google/protobuf/descriptor.proto"; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "GoGoProtos"; +option go_package = "github.com/gogo/protobuf/gogoproto"; + +extend google.protobuf.EnumOptions { + optional bool goproto_enum_prefix = 62001; + optional bool goproto_enum_stringer = 62021; + optional bool enum_stringer = 62022; + optional string enum_customname = 62023; + optional bool enumdecl = 62024; +} + +extend google.protobuf.EnumValueOptions { + optional string enumvalue_customname = 66001; +} + +extend google.protobuf.FileOptions { + optional bool goproto_getters_all = 63001; + optional bool goproto_enum_prefix_all = 63002; + optional bool goproto_stringer_all = 63003; + optional bool verbose_equal_all = 63004; + optional bool face_all = 63005; + optional bool gostring_all = 63006; + optional bool populate_all = 63007; + optional bool stringer_all = 63008; + optional bool onlyone_all = 63009; + + optional bool equal_all = 63013; + optional bool description_all = 63014; + optional bool testgen_all = 63015; + optional bool benchgen_all = 63016; + optional bool marshaler_all = 63017; + optional bool unmarshaler_all = 63018; + optional bool stable_marshaler_all = 63019; + + optional bool sizer_all = 63020; + + optional bool goproto_enum_stringer_all = 63021; + optional bool enum_stringer_all = 63022; + + optional bool unsafe_marshaler_all = 63023; + optional bool unsafe_unmarshaler_all = 63024; + + optional bool goproto_extensions_map_all = 63025; + optional bool goproto_unrecognized_all = 63026; + optional bool gogoproto_import = 63027; + optional bool protosizer_all = 63028; + optional bool compare_all = 63029; + optional bool typedecl_all = 63030; + optional bool enumdecl_all = 63031; + + optional bool goproto_registration = 63032; + optional bool messagename_all = 63033; + + optional bool goproto_sizecache_all = 63034; + optional bool goproto_unkeyed_all = 63035; +} + +extend google.protobuf.MessageOptions { + optional bool goproto_getters = 64001; + optional bool goproto_stringer = 64003; + optional bool verbose_equal = 64004; + optional bool face = 64005; + optional bool gostring = 64006; + optional bool populate = 64007; + optional bool stringer = 67008; + optional bool onlyone = 64009; + + optional bool equal = 64013; + optional bool description = 64014; + optional bool testgen = 64015; + optional bool benchgen = 64016; + optional bool marshaler = 64017; + optional bool unmarshaler = 64018; + optional bool stable_marshaler = 64019; + + optional bool sizer = 64020; + + optional bool unsafe_marshaler = 64023; + optional bool unsafe_unmarshaler = 64024; + + optional bool goproto_extensions_map = 64025; + optional bool goproto_unrecognized = 64026; + + optional bool protosizer = 64028; + optional bool compare = 64029; + + optional bool typedecl = 64030; + + optional bool messagename = 64033; + + optional bool goproto_sizecache = 64034; + optional bool goproto_unkeyed = 64035; +} + +extend google.protobuf.FieldOptions { + optional bool nullable = 65001; + optional bool embed = 65002; + optional string customtype = 65003; + optional string customname = 65004; + optional string jsontag = 65005; + optional string moretags = 65006; + optional string casttype = 65007; + optional string castkey = 65008; + optional string castvalue = 65009; + + optional bool stdtime = 65010; + optional bool stdduration = 65011; + optional bool wktpointer = 65012; + +} diff --git a/buildkit-proto/proto/github.com/moby/buildkit/api/types/worker.proto b/buildkit-proto/proto/github.com/moby/buildkit/api/types/worker.proto new file mode 100644 index 0000000..82dd7ad --- /dev/null +++ b/buildkit-proto/proto/github.com/moby/buildkit/api/types/worker.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; + +package moby.buildkit.v1.types; + +import "github.com/gogo/protobuf/gogoproto/gogo.proto"; +import "github.com/moby/buildkit/solver/pb/ops.proto"; + +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; + +message WorkerRecord { + string ID = 1; + map Labels = 2; + repeated pb.Platform platforms = 3 [(gogoproto.nullable) = false]; + repeated GCPolicy GCPolicy = 4; +} + +message GCPolicy { + bool all = 1; + int64 keepDuration = 2; + int64 keepBytes = 3; + repeated string filters = 4; +} diff --git a/buildkit-proto/proto/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto b/buildkit-proto/proto/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto new file mode 100644 index 0000000..4413147 --- /dev/null +++ b/buildkit-proto/proto/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto @@ -0,0 +1,164 @@ +syntax = "proto3"; + +package moby.buildkit.v1.frontend; + +import "github.com/gogo/protobuf/gogoproto/gogo.proto"; +import "github.com/gogo/googleapis/google/rpc/status.proto"; +import "github.com/moby/buildkit/solver/pb/ops.proto"; +import "github.com/moby/buildkit/api/types/worker.proto"; +import "github.com/moby/buildkit/util/apicaps/pb/caps.proto"; +import "github.com/tonistiigi/fsutil/types/stat.proto"; + +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; + +service LLBBridge { + // apicaps:CapResolveImage + rpc ResolveImageConfig(ResolveImageConfigRequest) returns (ResolveImageConfigResponse); + // apicaps:CapSolveBase + rpc Solve(SolveRequest) returns (SolveResponse); + // apicaps:CapReadFile + rpc ReadFile(ReadFileRequest) returns (ReadFileResponse); + // apicaps:CapReadDir + rpc ReadDir(ReadDirRequest) returns (ReadDirResponse); + // apicaps:CapStatFile + rpc StatFile(StatFileRequest) returns (StatFileResponse); + rpc Ping(PingRequest) returns (PongResponse); + rpc Return(ReturnRequest) returns (ReturnResponse); + // apicaps:CapFrontendInputs + rpc Inputs(InputsRequest) returns (InputsResponse); +} + +message Result { + oneof result { + // Deprecated non-array refs. + string refDeprecated = 1; + RefMapDeprecated refsDeprecated = 2; + + Ref ref = 3; + RefMap refs = 4; + } + map metadata = 10; +} + +message RefMapDeprecated { + map refs = 1; +} + +message Ref { + string id = 1; + pb.Definition def = 2; +} + +message RefMap { + map refs = 1; +} + +message ReturnRequest { + Result result = 1; + google.rpc.Status error = 2; +} + +message ReturnResponse { +} + +message InputsRequest { +} + +message InputsResponse { + map Definitions = 1; +} + +message ResolveImageConfigRequest { + string Ref = 1; + pb.Platform Platform = 2; + string ResolveMode = 3; + string LogName = 4; +} + +message ResolveImageConfigResponse { + string Digest = 1 [(gogoproto.customtype) = "github.com/opencontainers/go-digest.Digest", (gogoproto.nullable) = false]; + bytes Config = 2; +} + +message SolveRequest { + pb.Definition Definition = 1; + string Frontend = 2; + map FrontendOpt = 3; + // ImportCacheRefsDeprecated is deprecated in favor or the new Imports since BuildKit v0.4.0. + // When ImportCacheRefsDeprecated is set, the solver appends + // {.Type = "registry", .Attrs = {"ref": importCacheRef}} + // for each of the ImportCacheRefs entry to CacheImports for compatibility. (planned to be removed) + repeated string ImportCacheRefsDeprecated = 4; + bool allowResultReturn = 5; + bool allowResultArrayRef = 6; + + // apicaps.CapSolveInlineReturn deprecated + bool Final = 10; + bytes ExporterAttr = 11; + // CacheImports was added in BuildKit v0.4.0. + // apicaps:CapImportCaches + repeated CacheOptionsEntry CacheImports = 12; + + // apicaps:CapFrontendInputs + map FrontendInputs = 13; +} + +// CacheOptionsEntry corresponds to the control.CacheOptionsEntry +message CacheOptionsEntry { + string Type = 1; + map Attrs = 2; +} + +message SolveResponse { + // deprecated + string ref = 1; // can be used by readfile request + // deprecated +/* bytes ExporterAttr = 2;*/ + + // these fields are returned when allowMapReturn was set + Result result = 3; +} + +message ReadFileRequest { + string Ref = 1; + string FilePath = 2; + FileRange Range = 3; +} + +message FileRange { + int64 Offset = 1; + int64 Length = 2; +} + +message ReadFileResponse { + bytes Data = 1; +} + +message ReadDirRequest { + string Ref = 1; + string DirPath = 2; + string IncludePattern = 3; +} + +message ReadDirResponse { + repeated fsutil.types.Stat entries = 1; +} + +message StatFileRequest { + string Ref = 1; + string Path = 2; +} + +message StatFileResponse { + fsutil.types.Stat stat = 1; +} + +message PingRequest{ +} +message PongResponse{ + repeated moby.buildkit.v1.apicaps.APICap FrontendAPICaps = 1 [(gogoproto.nullable) = false]; + repeated moby.buildkit.v1.apicaps.APICap LLBCaps = 2 [(gogoproto.nullable) = false]; + repeated moby.buildkit.v1.types.WorkerRecord Workers = 3; +} diff --git a/buildkit-proto/proto/github.com/moby/buildkit/solver/pb/ops.proto b/buildkit-proto/proto/github.com/moby/buildkit/solver/pb/ops.proto new file mode 100644 index 0000000..a24aad1 --- /dev/null +++ b/buildkit-proto/proto/github.com/moby/buildkit/solver/pb/ops.proto @@ -0,0 +1,305 @@ +syntax = "proto3"; + +// Package pb provides the protobuf definition of LLB: low-level builder instruction. +// LLB is DAG-structured; Op represents a vertex, and Definition represents a graph. +package pb; + +import "github.com/gogo/protobuf/gogoproto/gogo.proto"; + +option (gogoproto.stable_marshaler_all) = true; + +// Op represents a vertex of the LLB DAG. +message Op { + // inputs is a set of input edges. + repeated Input inputs = 1; + oneof op { + ExecOp exec = 2; + SourceOp source = 3; + FileOp file = 4; + BuildOp build = 5; + } + Platform platform = 10; + WorkerConstraints constraints = 11; +} + +// Platform is github.com/opencontainers/image-spec/specs-go/v1.Platform +message Platform { + string Architecture = 1; + string OS = 2; + string Variant = 3; + string OSVersion = 4; // unused + repeated string OSFeatures = 5; // unused +} + +// Input represents an input edge for an Op. +message Input { + // digest of the marshaled input Op + string digest = 1 [(gogoproto.customtype) = "github.com/opencontainers/go-digest.Digest", (gogoproto.nullable) = false]; + // output index of the input Op + int64 index = 2 [(gogoproto.customtype) = "OutputIndex", (gogoproto.nullable) = false]; +} + +// ExecOp executes a command in a container. +message ExecOp { + Meta meta = 1; + repeated Mount mounts = 2; + NetMode network = 3; + SecurityMode security = 4; +} + +// Meta is a set of arguments for ExecOp. +// Meta is unrelated to LLB metadata. +// FIXME: rename (ExecContext? ExecArgs?) +message Meta { + repeated string args = 1; + repeated string env = 2; + string cwd = 3; + string user = 4; + ProxyEnv proxy_env = 5; + repeated HostIP extraHosts = 6; +} + +enum NetMode { + UNSET = 0; // sandbox + HOST = 1; + NONE = 2; +} + +enum SecurityMode { + SANDBOX = 0; + INSECURE = 1; // privileged mode +} + +// Mount specifies how to mount an input Op as a filesystem. +message Mount { + int64 input = 1 [(gogoproto.customtype) = "InputIndex", (gogoproto.nullable) = false]; + string selector = 2; + string dest = 3; + int64 output = 4 [(gogoproto.customtype) = "OutputIndex", (gogoproto.nullable) = false]; + bool readonly = 5; + MountType mountType = 6; + CacheOpt cacheOpt = 20; + SecretOpt secretOpt = 21; + SSHOpt SSHOpt = 22; +} + +// MountType defines a type of a mount from a supported set +enum MountType { + BIND = 0; + SECRET = 1; + SSH = 2; + CACHE = 3; + TMPFS = 4; +} + +// CacheOpt defines options specific to cache mounts +message CacheOpt { + // ID is an optional namespace for the mount + string ID = 1; + // Sharing is the sharing mode for the mount + CacheSharingOpt sharing = 2; +} + +// CacheSharingOpt defines different sharing modes for cache mount +enum CacheSharingOpt { + // SHARED cache mount can be used concurrently by multiple writers + SHARED = 0; + // PRIVATE creates a new mount if there are multiple writers + PRIVATE = 1; + // LOCKED pauses second writer until first one releases the mount + LOCKED = 2; +} + +// SecretOpt defines options describing secret mounts +message SecretOpt { + // ID of secret. Used for quering the value. + string ID = 1; + // UID of secret file + uint32 uid = 2; + // GID of secret file + uint32 gid = 3; + // Mode is the filesystem mode of secret file + uint32 mode = 4; + // Optional defines if secret value is required. Error is produced + // if value is not found and optional is false. + bool optional = 5; +} + +// SSHOpt defines options describing secret mounts +message SSHOpt { + // ID of exposed ssh rule. Used for quering the value. + string ID = 1; + // UID of agent socket + uint32 uid = 2; + // GID of agent socket + uint32 gid = 3; + // Mode is the filesystem mode of agent socket + uint32 mode = 4; + // Optional defines if ssh socket is required. Error is produced + // if client does not expose ssh. + bool optional = 5; +} + +// SourceOp specifies a source such as build contexts and images. +message SourceOp { + // TODO: use source type or any type instead of URL protocol. + // identifier e.g. local://, docker-image://, git://, https://... + string identifier = 1; + // attrs are defined in attr.go + map attrs = 2; +} + +// BuildOp is used for nested build invocation. +// BuildOp is experimental and can break without backwards compatibility +message BuildOp { + int64 builder = 1 [(gogoproto.customtype) = "InputIndex", (gogoproto.nullable) = false]; + map inputs = 2; + Definition def = 3; + map attrs = 4; + // outputs +} + +// BuildInput is used for BuildOp. +message BuildInput { + int64 input = 1 [(gogoproto.customtype) = "InputIndex", (gogoproto.nullable) = false]; +} + +// OpMetadata is a per-vertex metadata entry, which can be defined for arbitrary Op vertex and overridable on the run time. +message OpMetadata { + // ignore_cache specifies to ignore the cache for this Op. + bool ignore_cache = 1; + // Description can be used for keeping any text fields that builder doesn't parse + map description = 2; + // index 3 reserved for WorkerConstraint in previous versions + // WorkerConstraint worker_constraint = 3; + ExportCache export_cache = 4; + + map caps = 5 [(gogoproto.castkey) = "github.com/moby/buildkit/util/apicaps.CapID", (gogoproto.nullable) = false]; +} + +message ExportCache { + bool Value = 1; +} + +message ProxyEnv { + string http_proxy = 1; + string https_proxy = 2; + string ftp_proxy = 3; + string no_proxy = 4; +} + +// WorkerConstraints defines conditions for the worker +message WorkerConstraints { + repeated string filter = 1; // containerd-style filter +} + +// Definition is the LLB definition structure with per-vertex metadata entries +message Definition { + // def is a list of marshaled Op messages + repeated bytes def = 1; + // metadata contains metadata for the each of the Op messages. + // A key must be an LLB op digest string. Currently, empty string is not expected as a key, but it may change in the future. + map metadata = 2 [(gogoproto.castkey) = "github.com/opencontainers/go-digest.Digest", (gogoproto.nullable) = false]; +} + +message HostIP { + string Host = 1; + string IP = 2; +} + +message FileOp { + repeated FileAction actions = 2; +} + +message FileAction { + int64 input = 1 [(gogoproto.customtype) = "InputIndex", (gogoproto.nullable) = false]; // could be real input or target (target index + max input index) + int64 secondaryInput = 2 [(gogoproto.customtype) = "InputIndex", (gogoproto.nullable) = false]; // --//-- + int64 output = 3 [(gogoproto.customtype) = "OutputIndex", (gogoproto.nullable) = false]; + oneof action { + // FileActionCopy copies files from secondaryInput on top of input + FileActionCopy copy = 4; + // FileActionMkFile creates a new file + FileActionMkFile mkfile = 5; + // FileActionMkDir creates a new directory + FileActionMkDir mkdir = 6; + // FileActionRm removes a file + FileActionRm rm = 7; + } +} + +message FileActionCopy { + // src is the source path + string src = 1; + // dest path + string dest = 2; + // optional owner override + ChownOpt owner = 3; + // optional permission bits override + int32 mode = 4; + // followSymlink resolves symlinks in src + bool followSymlink = 5; + // dirCopyContents only copies contents if src is a directory + bool dirCopyContents = 6; + // attemptUnpackDockerCompatibility detects if src is an archive to unpack it instead + bool attemptUnpackDockerCompatibility = 7; + // createDestPath creates dest path directories if needed + bool createDestPath = 8; + // allowWildcard allows filepath.Match wildcards in src path + bool allowWildcard = 9; + // allowEmptyWildcard doesn't fail the whole copy if wildcard doesn't resolve to files + bool allowEmptyWildcard = 10; + // optional created time override + int64 timestamp = 11; +} + +message FileActionMkFile { + // path for the new file + string path = 1; + // permission bits + int32 mode = 2; + // data is the new file contents + bytes data = 3; + // optional owner for the new file + ChownOpt owner = 4; + // optional created time override + int64 timestamp = 5; +} + +message FileActionMkDir { + // path for the new directory + string path = 1; + // permission bits + int32 mode = 2; + // makeParents creates parent directories as well if needed + bool makeParents = 3; + // optional owner for the new directory + ChownOpt owner = 4; + // optional created time override + int64 timestamp = 5; +} + +message FileActionRm { + // path to remove + string path = 1; + // allowNotFound doesn't fail the rm if file is not found + bool allowNotFound = 2; + // allowWildcard allows filepath.Match wildcards in path + bool allowWildcard = 3; +} + +message ChownOpt { + UserOpt user = 1; + UserOpt group = 2; +} + +message UserOpt { + oneof user { + NamedUserOpt byName = 1; + uint32 byID = 2; + } +} + +message NamedUserOpt { + string name = 1; + int64 input = 2 [(gogoproto.customtype) = "InputIndex", (gogoproto.nullable) = false]; +} \ No newline at end of file diff --git a/buildkit-proto/proto/github.com/moby/buildkit/util/apicaps/pb/caps.proto b/buildkit-proto/proto/github.com/moby/buildkit/util/apicaps/pb/caps.proto new file mode 100644 index 0000000..1e8c065 --- /dev/null +++ b/buildkit-proto/proto/github.com/moby/buildkit/util/apicaps/pb/caps.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +package moby.buildkit.v1.apicaps; + +import "github.com/gogo/protobuf/gogoproto/gogo.proto"; + +option (gogoproto.sizer_all) = true; +option (gogoproto.marshaler_all) = true; +option (gogoproto.unmarshaler_all) = true; + +// APICap defines a capability supported by the service +message APICap { + string ID = 1; + bool Enabled = 2; + bool Deprecated = 3; // Unused. May be used for warnings in the future + string DisabledReason = 4; // Reason key for detection code + string DisabledReasonMsg = 5; // Message to the user + string DisabledAlternative = 6; // Identifier that updated client could catch. +} \ No newline at end of file diff --git a/buildkit-proto/proto/github.com/tonistiigi/fsutil/types/stat.proto b/buildkit-proto/proto/github.com/tonistiigi/fsutil/types/stat.proto new file mode 100644 index 0000000..4138be6 --- /dev/null +++ b/buildkit-proto/proto/github.com/tonistiigi/fsutil/types/stat.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +package fsutil.types; + +option go_package = "types"; + +message Stat { + string path = 1; + uint32 mode = 2; + uint32 uid = 3; + uint32 gid = 4; + int64 size = 5; + int64 modTime = 6; + // int32 typeflag = 7; + string linkname = 7; + int64 devmajor = 8; + int64 devminor = 9; + map xattrs = 10; +} \ No newline at end of file diff --git a/buildkit-proto/src/lib.rs b/buildkit-proto/src/lib.rs new file mode 100644 index 0000000..8fc9507 --- /dev/null +++ b/buildkit-proto/src/lib.rs @@ -0,0 +1,35 @@ +#[allow(clippy::all)] + +pub mod moby { + pub mod buildkit { + pub mod v1 { + pub mod frontend { + include!(concat!(env!("OUT_DIR"), "/moby.buildkit.v1.frontend.rs")); + } + + pub mod apicaps { + include!(concat!(env!("OUT_DIR"), "/moby.buildkit.v1.apicaps.rs")); + } + + pub mod types { + include!(concat!(env!("OUT_DIR"), "/moby.buildkit.v1.types.rs")); + } + } + } +} + +pub mod google { + pub mod rpc { + include!(concat!(env!("OUT_DIR"), "/google.rpc.rs")); + } +} + +pub mod pb { + include!(concat!(env!("OUT_DIR"), "/pb.rs")); +} + +pub mod fsutil { + pub mod types { + include!(concat!(env!("OUT_DIR"), "/fsutil.types.rs")); + } +} diff --git a/buildkit-proto/update.sh b/buildkit-proto/update.sh new file mode 100755 index 0000000..a4ee119 --- /dev/null +++ b/buildkit-proto/update.sh @@ -0,0 +1,13 @@ +#!/bin/sh +set -e + +export BUILDKIT_VERSION="v0.7.2" + +curl "https://raw.githubusercontent.com/moby/buildkit/$BUILDKIT_VERSION/api/types/worker.proto" > proto/github.com/moby/buildkit/api/types/worker.proto +curl "https://raw.githubusercontent.com/moby/buildkit/$BUILDKIT_VERSION/frontend/gateway/pb/gateway.proto" > proto/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto +curl "https://raw.githubusercontent.com/moby/buildkit/$BUILDKIT_VERSION/solver/pb/ops.proto" > proto/github.com/moby/buildkit/solver/pb/ops.proto +curl "https://raw.githubusercontent.com/moby/buildkit/$BUILDKIT_VERSION/util/apicaps/pb/caps.proto" > proto/github.com/moby/buildkit/util/apicaps/pb/caps.proto + +curl "https://raw.githubusercontent.com/googleapis/googleapis/master/google/rpc/status.proto" > proto/github.com/gogo/googleapis/google/rpc/status.proto +curl "https://raw.githubusercontent.com/gogo/protobuf/v1.2.1/gogoproto/gogo.proto" > proto/github.com/gogo/protobuf/gogoproto/gogo.proto +curl "https://raw.githubusercontent.com/tonistiigi/fsutil/master/types/stat.proto" > proto/github.com/tonistiigi/fsutil/types/stat.proto diff --git a/dockerfile-plus/Cargo.toml b/dockerfile-plus/Cargo.toml new file mode 100644 index 0000000..e41cb11 --- /dev/null +++ b/dockerfile-plus/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "dockerfile-plus" +version = "0.1.0" +authors = ["Ximo Guanter "] +edition = "2018" + +[dependencies] +anyhow = "1" +async-trait = "0.1" +crossbeam = "0.7" +either = "1" +env_logger = "0.8" +futures = "0.3" +libc = "0.2" +mio = "0.6" +pin-project = "1" +prost = "0.6" +prost-types = "0.6" +regex = "1.3" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1" +tokio = { version = "0.2", features = ["macros", "rt-core", "rt-threaded"] } +tonic = { git = "https://github.com/edrevo/tonic", branch = "unimplemented-content-type" } +tower = "0.3" +url = "2.2" +buildkit-llb = { version = "0.2", path = "../buildkit-llb" } +buildkit-proto = { version = "0.2", path = "../buildkit-proto" } diff --git a/dockerfile-plus/Dockerfile b/dockerfile-plus/Dockerfile new file mode 100644 index 0000000..e0bc2b9 --- /dev/null +++ b/dockerfile-plus/Dockerfile @@ -0,0 +1,19 @@ +# syntax = docker/dockerfile:1.2.1 + +FROM clux/muslrust:stable as builder +USER root + +WORKDIR /rust-src +COPY . /rust-src + +RUN --mount=type=cache,target=/rust-src/target \ + --mount=type=cache,target=/root/.cargo/git \ + --mount=type=cache,target=/root/.cargo/registry \ + ["cargo", "build", "--release", "--target", "x86_64-unknown-linux-musl", "-p", "dockerfile-plus"] + +RUN --mount=type=cache,target=/rust-src/target \ + ["cp", "/rust-src/target/x86_64-unknown-linux-musl/release/dockerfile-plus", "/usr/local/bin/dockerfile-plus"] + +FROM docker/dockerfile:1.2.1 +COPY --from=builder /usr/local/bin/dockerfile-plus /usr/local/bin/dockerfile-plus +ENTRYPOINT ["/usr/local/bin/dockerfile-plus"] \ No newline at end of file diff --git a/dockerfile-plus/examples/README.md b/dockerfile-plus/examples/README.md new file mode 100644 index 0000000..3118396 --- /dev/null +++ b/dockerfile-plus/examples/README.md @@ -0,0 +1,21 @@ +# Examples + +In this folder you can find some example Dockerfile syntax extensions. + +## Noop + +This is the most basic example. It just adds a new instruction `NOOP` which does nothing (i.e. it is ignored). With this extension, the following Dockerfile would success fully compile: + +```dockerfile +# syntax = edrevo/noop-dockerfile + +NOOP + +FROM alpine + +NOOP + +WORKDIR / + +RUN echo "Hello World" +``` \ No newline at end of file diff --git a/dockerfile-plus/examples/include/Dockerfile b/dockerfile-plus/examples/include/Dockerfile new file mode 100644 index 0000000..a862d4f --- /dev/null +++ b/dockerfile-plus/examples/include/Dockerfile @@ -0,0 +1,9 @@ +# syntax = edrevo/dockerfile-plus:0.1 + +FROM alpine + +INCLUDE+ Dockerfile.common + +WORKDIR / + +RUN echo "Hello World" diff --git a/dockerfile-plus/examples/include/Dockerfile.common b/dockerfile-plus/examples/include/Dockerfile.common new file mode 100644 index 0000000..7596755 --- /dev/null +++ b/dockerfile-plus/examples/include/Dockerfile.common @@ -0,0 +1,5 @@ +FROM alpine + +ENV RUST_LOG=debug + +ENTRYPOINT ["/usr/local/bin/noop"] \ No newline at end of file diff --git a/dockerfile-plus/src/dockerfile_frontend.rs b/dockerfile-plus/src/dockerfile_frontend.rs new file mode 100644 index 0000000..e92ac0c --- /dev/null +++ b/dockerfile-plus/src/dockerfile_frontend.rs @@ -0,0 +1,175 @@ +use std::{process::Stdio, sync::Arc}; + +use crate::stdio::StdioSocket; +use anyhow::Result; +use buildkit_proto::moby::buildkit::v1::frontend::{ + self, llb_bridge_client::LlbBridgeClient, llb_bridge_server::LlbBridge, +}; +use crossbeam::{channel, Sender}; +use frontend::{llb_bridge_server::LlbBridgeServer, ReadFileResponse}; +use tokio::sync::RwLock; +use tonic::{transport::Channel, transport::Server, Request, Response}; + +pub struct DockerfileFrontend { + client: LlbBridgeClient, + dockerfile_name: String, +} + +impl DockerfileFrontend { + pub fn new(client: LlbBridgeClient, dockerfile_name: &str) -> DockerfileFrontend { + DockerfileFrontend { + client, + dockerfile_name: dockerfile_name.to_string(), + } + } + + pub async fn solve(&self, dockerfile_contents: &str) -> Result { + let mut dockerfile_front = std::process::Command::new("/bin/dockerfile-frontend") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .envs(std::env::vars()) + .spawn()?; + + let (tx, rx) = channel::bounded(1); + Server::builder() + .add_service(LlbBridgeServer::new(ProxyLlbServer::new( + self.client.clone(), + tx, + self.dockerfile_name.clone(), + dockerfile_contents.as_bytes().to_vec(), + ))) + .serve_with_incoming(tokio::stream::once(StdioSocket::try_new_rw( + dockerfile_front.stdout.take().unwrap(), + dockerfile_front.stdin.take().unwrap(), + ))) + .await?; + + dockerfile_front.wait()?; + Ok(rx.recv()?) + } +} + +struct ProxyLlbServer { + client: Arc>>, + result_sender: Sender, + + dockerfile_name: String, + dockerfile_contents: Vec, +} + +impl ProxyLlbServer { + fn new( + client: LlbBridgeClient, + result_sender: Sender, + dockerfile_name: String, + dockerfile_contents: Vec, + ) -> Self { + ProxyLlbServer { + client: Arc::new(RwLock::new(client)), + result_sender, + dockerfile_name, + dockerfile_contents, + } + } +} + +#[tonic::async_trait] +impl LlbBridge for ProxyLlbServer { + async fn resolve_image_config( + &self, + request: Request, + ) -> Result, tonic::Status> { + eprintln!("Resolve image config: {:?}", request); + let result = self + .client + .write() + .await + .resolve_image_config(request) + .await; + eprintln!("{:?}", result); + result + } + + async fn solve( + &self, + request: Request, + ) -> Result, tonic::Status> { + eprintln!("Solve: {:?}", request); + let result = self.client.write().await.solve(request).await; + eprintln!("{:?}", result); + result + } + + async fn read_file( + &self, + request: Request, + ) -> Result, tonic::Status> { + eprintln!("Read file: {:?}", request); + let inner = request.into_inner(); + let request = Request::new(inner.clone()); + let result = if inner.file_path == self.dockerfile_name { + eprintln!("ITS A TRAP!"); + eprintln!( + "{}", + std::str::from_utf8(&self.dockerfile_contents).unwrap() + ); + Ok(Response::new(ReadFileResponse { + data: self.dockerfile_contents.clone(), + })) + } else { + self.client.write().await.read_file(request).await + }; + eprintln!("{:?}", result); + result + } + + async fn read_dir( + &self, + request: Request, + ) -> Result, tonic::Status> { + eprintln!("Read dir: {:?}", request); + let result = self.client.write().await.read_dir(request).await; + eprintln!("{:?}", result); + result + } + + async fn stat_file( + &self, + request: Request, + ) -> Result, tonic::Status> { + eprintln!("Stat file: {:?}", request); + let result = self.client.write().await.stat_file(request).await; + eprintln!("{:?}", result); + result + } + + async fn ping( + &self, + request: Request, + ) -> Result, tonic::Status> { + eprintln!("Ping: {:?}", request); + let result = self.client.write().await.ping(request).await; + eprintln!("{:?}", result); + result + } + + async fn r#return( + &self, + request: Request, + ) -> Result, tonic::Status> { + // Do not send return request to buildkit + let inner = request.into_inner(); + self.result_sender.send(inner).unwrap(); + Ok(Response::new(frontend::ReturnResponse {})) + } + + async fn inputs( + &self, + request: Request, + ) -> Result, tonic::Status> { + eprintln!("Inputs: {:?}", request); + let result = self.client.write().await.inputs(request).await; + eprintln!("{:?}", result); + result + } +} diff --git a/dockerfile-plus/src/main.rs b/dockerfile-plus/src/main.rs new file mode 100644 index 0000000..6c5f3da --- /dev/null +++ b/dockerfile-plus/src/main.rs @@ -0,0 +1,130 @@ +use std::path::PathBuf; + +use dockerfile_frontend::DockerfileFrontend; + +use anyhow::{Context, Result}; +use buildkit_llb::prelude::*; +use buildkit_proto::{ + google::rpc::Status, + moby::buildkit::v1::frontend::{ + llb_bridge_client::LlbBridgeClient, result::Result as RefResult, FileRange, + ReadFileRequest, ReturnRequest, SolveRequest, + }, +}; +use serde::Deserialize; +use tonic::{transport::Channel, transport::Endpoint}; +use tower::service_fn; + +mod dockerfile_frontend; +mod options; +mod stdio; + +async fn read_file

( + client: &mut LlbBridgeClient, + layer: &str, + path: P, + range: Option, +) -> Result> +where + P: Into, +{ + let file_path = path.into().display().to_string(); + + let request = ReadFileRequest { + r#ref: layer.to_string(), + file_path, + range, + }; + + let response = client.read_file(request).await?.into_inner().data; + + Ok(response) +} + +async fn solve<'a>(client: &mut LlbBridgeClient, graph: Terminal<'a>) -> Result { + let solve_request = SolveRequest { + definition: Some(graph.into_definition()), + exporter_attr: vec![], + allow_result_return: true, + ..Default::default() + }; + let temp_result = client + .solve(solve_request) + .await? + .into_inner() + .result + .unwrap() + .result + .unwrap(); + match temp_result { + RefResult::RefDeprecated(inner) => Ok(inner), + _ => panic!("Unexpected result"), + } +} + +async fn run() -> Result<()> { + let channel = { + Endpoint::from_static("http://[::]:50051") + .connect_with_connector(service_fn(stdio::stdio_connector)) + .await? + }; + let mut client = LlbBridgeClient::new(channel); + let o: DockerfileOptions = options::from_env(std::env::vars())?; + let dockerfile_path = o + .filename + .as_ref() + .and_then(|p| p.to_str()) + .unwrap_or("Dockerfile"); + let dockerfile_source = Source::local("dockerfile"); + let dockerfile_layer = solve(&mut client, Terminal::with(dockerfile_source.output())).await?; + let dockerfile_contents = + String::from_utf8(read_file(&mut client, &dockerfile_layer, dockerfile_path, None).await?)?; + let dockerfile_frontend = DockerfileFrontend::new(client.clone(), dockerfile_path); + let result = dockerfile_trap(client.clone(), dockerfile_frontend, dockerfile_contents) + .await + .unwrap_or_else(|e| ReturnRequest { + result: None, + error: Some(Status { + code: 128, + message: e.to_string(), + details: vec![], + }), + }); + client.r#return(result).await?; + Ok(()) +} + +#[tokio::main] +async fn main() { + env_logger::init(); + run().await.unwrap(); +} + +#[derive(Debug, Deserialize)] +struct DockerfileOptions { + filename: Option, +} + +const INCLUDE_COMMAND: &str = "INCLUDE+"; + +async fn dockerfile_trap( + mut client: LlbBridgeClient, + dockerfile_frontend: DockerfileFrontend, + dockerfile_contents: String, +) -> Result { + let mut result: Vec = vec![]; + let context_source = Source::local("context"); + let context_layer = solve(&mut client, Terminal::with(context_source.output())).await?; + for line in dockerfile_contents.lines() { + if let Some(file_path) = line.trim().strip_prefix(INCLUDE_COMMAND) { + let bytes = read_file(&mut client, &context_layer, file_path.trim_start().to_string(), None) + .await + .with_context(|| format!("Could not read file \"{}\". Remember that the file path is relative to the build context, not the Dockerfile path.", file_path))?; + result.push(std::str::from_utf8(&bytes)?.to_string()); + } else { + result.push(line.to_string()); + } + } + let dockerfile_contents = result.join("\n"); + dockerfile_frontend.solve(&dockerfile_contents).await +} diff --git a/dockerfile-plus/src/options/common.rs b/dockerfile-plus/src/options/common.rs new file mode 100644 index 0000000..b011e6c --- /dev/null +++ b/dockerfile-plus/src/options/common.rs @@ -0,0 +1,40 @@ +use std::collections::HashMap; + +use buildkit_proto::moby::buildkit::v1::frontend::CacheOptionsEntry as CacheOptionsEntryProto; +use serde::Deserialize; + +#[derive(Clone, Debug, Deserialize, PartialEq)] +pub struct CacheOptionsEntry { + #[serde(rename = "Type")] + pub cache_type: CacheType, + + #[serde(rename = "Attrs")] + pub attrs: HashMap, +} + +#[derive(Clone, Debug, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum CacheType { + Local, + Registry, + Inline, +} + +impl Into for CacheOptionsEntry { + fn into(self) -> CacheOptionsEntryProto { + CacheOptionsEntryProto { + r#type: self.cache_type.into(), + attrs: self.attrs, + } + } +} + +impl Into for CacheType { + fn into(self) -> String { + match self { + CacheType::Local => "local".into(), + CacheType::Registry => "registry".into(), + CacheType::Inline => "inline".into(), + } + } +} diff --git a/dockerfile-plus/src/options/default.rs b/dockerfile-plus/src/options/default.rs new file mode 100644 index 0000000..e01df0f --- /dev/null +++ b/dockerfile-plus/src/options/default.rs @@ -0,0 +1,9 @@ +use serde::Deserialize; + +#[derive(Debug, PartialEq, Deserialize)] +#[serde(untagged)] +enum OptionValue { + Flag(bool), + Single(String), + Multiple(Vec), +} diff --git a/dockerfile-plus/src/options/deserializer.rs b/dockerfile-plus/src/options/deserializer.rs new file mode 100644 index 0000000..09d6f11 --- /dev/null +++ b/dockerfile-plus/src/options/deserializer.rs @@ -0,0 +1,272 @@ +use std::io::Cursor; +use std::iter::empty; + +use anyhow::Result; +use serde::de::value::{MapDeserializer, SeqDeserializer}; +use serde::de::{self, DeserializeOwned, IntoDeserializer, Visitor}; +use serde::forward_to_deserialize_any; + +pub fn from_env(pairs: I) -> Result +where + T: DeserializeOwned, + I: IntoIterator, +{ + let owned_pairs = pairs.into_iter().collect::>(); + let pairs = { + owned_pairs.iter().filter_map(|(name, value)| { + if name.starts_with("BUILDKIT_FRONTEND_OPT_") { + Some(value) + } else { + None + } + }) + }; + + let deserializer = EnvDeserializer { + vals: pairs.map(|value| extract_name_and_value(&value)), + }; + + Ok(T::deserialize(deserializer)?) +} + +#[derive(Debug)] +struct EnvDeserializer

{ + vals: P, +} + +#[derive(Debug)] +enum EnvValue<'de> { + Flag, + Json(&'de str), + Text(&'de str), +} + +#[derive(Debug)] +struct EnvItem<'de>(&'de str); + +fn extract_name_and_value(mut raw_value: &str) -> (&str, EnvValue) { + if raw_value.starts_with("build-arg:") { + raw_value = raw_value.trim_start_matches("build-arg:"); + } + + let mut parts = raw_value.splitn(2, '='); + let name = parts.next().unwrap(); + + match parts.next() { + None => (name, EnvValue::Flag), + Some(text) if text.is_empty() => (name, EnvValue::Flag), + Some(text) if &text[0..1] == "[" || &text[0..1] == "{" => (name, EnvValue::Json(text)), + Some(text) => (name, EnvValue::Text(text)), + } +} + +impl<'de> IntoDeserializer<'de, serde::de::value::Error> for EnvValue<'de> { + type Deserializer = Self; + + fn into_deserializer(self) -> Self::Deserializer { + self + } +} + +impl<'de> IntoDeserializer<'de, serde::de::value::Error> for EnvItem<'de> { + type Deserializer = Self; + + fn into_deserializer(self) -> Self::Deserializer { + self + } +} + +impl<'de> EnvItem<'de> { + fn infer>(self, visitor: V) -> Result { + match self.0 { + "true" => visitor.visit_bool(true), + "false" => visitor.visit_bool(false), + + _ => visitor.visit_str(self.0), + } + } + + fn json>(self, visitor: V) -> Result { + use serde::de::Deserializer; + use serde::de::Error; + + serde_json::Deserializer::from_reader(Cursor::new(self.0)) + .deserialize_any(visitor) + .map_err(serde::de::value::Error::custom) + } +} + +impl<'de, P> de::Deserializer<'de> for EnvDeserializer

+where + P: Iterator)>, +{ + type Error = serde::de::value::Error; + + fn deserialize_any>(self, visitor: V) -> Result { + visitor.visit_map(MapDeserializer::new(self.vals)) + } + + forward_to_deserialize_any! { + bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string + bytes byte_buf option unit unit_struct newtype_struct seq tuple + tuple_struct map struct enum identifier ignored_any + } +} + +// The approach is shamelessly borrowed from https://github.com/softprops/envy/blob/master/src/lib.rs#L113 +macro_rules! forward_parsed_values_env_value { + ($($ty:ident => $method:ident,)*) => { + $( + fn $method(self, visitor: V) -> Result + where V: de::Visitor<'de> + { + match self { + EnvValue::Flag => self.deserialize_any(visitor), + EnvValue::Json(_) => self.deserialize_any(visitor), + EnvValue::Text(contents) => { + match contents.parse::<$ty>() { + Ok(val) => val.into_deserializer().$method(visitor), + Err(e) => Err(de::Error::custom(format_args!("{} while parsing value '{}'", e, contents))) + } + } + } + } + )* + } +} + +macro_rules! forward_parsed_values_env_item { + ($($ty:ident => $method:ident,)*) => { + $( + fn $method(self, visitor: V) -> Result + where V: de::Visitor<'de> + { + match self.0.parse::<$ty>() { + Ok(val) => val.into_deserializer().$method(visitor), + Err(e) => Err(de::Error::custom(format_args!("{} while parsing value '{}'", e, self.0))) + } + } + )* + } +} + +impl<'de> de::Deserializer<'de> for EnvValue<'de> { + type Error = serde::de::value::Error; + + fn deserialize_any>(self, visitor: V) -> Result { + match self { + EnvValue::Flag => visitor.visit_bool(true), + EnvValue::Json(contents) => EnvItem(contents).json(visitor), + EnvValue::Text(contents) => { + if !contents.contains(',') { + EnvItem(contents).infer(visitor) + } else { + SeqDeserializer::new(contents.split(',')).deserialize_seq(visitor) + } + } + } + } + + fn deserialize_seq>(self, visitor: V) -> Result { + match self { + EnvValue::Flag => SeqDeserializer::new(empty::<&'de str>()).deserialize_seq(visitor), + EnvValue::Json(contents) => EnvItem(contents).json(visitor), + EnvValue::Text(contents) => { + SeqDeserializer::new(contents.split(',')).deserialize_seq(visitor) + } + } + } + + fn deserialize_option>(self, visitor: V) -> Result { + visitor.visit_some(self) + } + + forward_parsed_values_env_value! { + bool => deserialize_bool, + u8 => deserialize_u8, + u16 => deserialize_u16, + u32 => deserialize_u32, + u64 => deserialize_u64, + u128 => deserialize_u128, + i8 => deserialize_i8, + i16 => deserialize_i16, + i32 => deserialize_i32, + i64 => deserialize_i64, + i128 => deserialize_i128, + f32 => deserialize_f32, + f64 => deserialize_f64, + } + + forward_to_deserialize_any! { + byte_buf + bytes + char + enum + identifier + ignored_any + map + newtype_struct + str + string + struct + tuple + tuple_struct + unit + unit_struct + } +} + +impl<'de> de::Deserializer<'de> for EnvItem<'de> { + type Error = serde::de::value::Error; + + fn deserialize_any>(self, visitor: V) -> Result { + self.0.into_deserializer().deserialize_any(visitor) + } + + fn deserialize_map>(self, visitor: V) -> Result { + self.json(visitor) + } + + fn deserialize_struct>( + self, + _: &'static str, + _: &'static [&'static str], + visitor: V, + ) -> Result { + self.json(visitor) + } + + forward_parsed_values_env_item! { + bool => deserialize_bool, + u8 => deserialize_u8, + u16 => deserialize_u16, + u32 => deserialize_u32, + u64 => deserialize_u64, + u128 => deserialize_u128, + i8 => deserialize_i8, + i16 => deserialize_i16, + i32 => deserialize_i32, + i64 => deserialize_i64, + i128 => deserialize_i128, + f32 => deserialize_f32, + f64 => deserialize_f64, + } + + forward_to_deserialize_any! { + byte_buf + bytes + char + enum + identifier + ignored_any + newtype_struct + option + seq + str + string + tuple + tuple_struct + unit + unit_struct + } +} diff --git a/dockerfile-plus/src/options/mod.rs b/dockerfile-plus/src/options/mod.rs new file mode 100644 index 0000000..2887361 --- /dev/null +++ b/dockerfile-plus/src/options/mod.rs @@ -0,0 +1,123 @@ +mod default; +mod deserializer; + +pub use self::deserializer::from_env; + +pub mod common; + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use super::*; + use serde::Deserialize; + + #[derive(Debug, Deserialize, PartialEq)] + #[serde(untagged)] + #[serde(field_identifier, rename_all = "lowercase")] + enum Debug { + All, + LLB, + Frontend, + } + + #[derive(Debug, Deserialize, PartialEq)] + #[serde(rename_all = "kebab-case")] + struct CustomOptions { + filename: Option, + verbosity: u32, + + #[serde(default)] + debug: Vec, + + #[serde(default)] + cache_imports: Vec, + } + + #[test] + fn custom_options() { + let env = vec![ + ( + "BUILDKIT_FRONTEND_OPT_0".into(), + "filename=/path/to/Dockerfile".into(), + ), + ( + "BUILDKIT_FRONTEND_OPT_1".into(), + "debug=llb,frontend".into(), + ), + ( + "BUILDKIT_FRONTEND_OPT_2".into(), + r#"cache-imports=[{"Type":"local","Attrs":{"src":"cache"}}]"#.into(), + ), + ( + "BUILDKIT_FRONTEND_OPT_3".into(), + "verbosity=12345678".into(), + ), + ]; + + assert_eq!( + from_env::(env.into_iter()).unwrap(), + CustomOptions { + filename: Some(PathBuf::from("/path/to/Dockerfile")), + verbosity: 12_345_678, + + debug: vec![Debug::LLB, Debug::Frontend], + + cache_imports: vec![common::CacheOptionsEntry { + cache_type: common::CacheType::Local, + attrs: vec![("src".into(), "cache".into())].into_iter().collect() + }], + } + ); + } + + #[test] + fn env_variable_names() { + let env = vec![ + ( + "ANOTHER_OPT_0".into(), + "filename=/path/to/Dockerfile".into(), + ), + ( + "ANOTHER_OPT_2".into(), + r#"cache-imports=[{"Type":"local","Attrs":{"src":"cache"}}]"#.into(), + ), + ("BUILDKIT_FRONTEND_OPT_1".into(), "debug=all".into()), + ( + "BUILDKIT_FRONTEND_OPT_2".into(), + "verbosity=12345678".into(), + ), + ]; + + assert_eq!( + from_env::(env.into_iter()).unwrap(), + CustomOptions { + filename: None, + verbosity: 12_345_678, + debug: vec![Debug::All], + cache_imports: vec![], + } + ); + } + + #[test] + fn empty_cache() { + let env = vec![ + ("BUILDKIT_FRONTEND_OPT_1".into(), "cache-imports=".into()), + ( + "BUILDKIT_FRONTEND_OPT_2".into(), + "verbosity=12345678".into(), + ), + ]; + + assert_eq!( + from_env::(env.into_iter()).unwrap(), + CustomOptions { + filename: None, + verbosity: 12_345_678, + debug: vec![], + cache_imports: vec![], + } + ); + } +} diff --git a/dockerfile-plus/src/stdio.rs b/dockerfile-plus/src/stdio.rs new file mode 100644 index 0000000..8f5e6ae --- /dev/null +++ b/dockerfile-plus/src/stdio.rs @@ -0,0 +1,182 @@ +use std::io::{self, stdin, stdout, Read, Write}; +use std::os::unix::io::AsRawFd; +use std::pin::Pin; +use std::task::{Context, Poll}; + +use pin_project::pin_project; +use std::{ + io::{Stdin, Stdout}, + net::{IpAddr, Ipv4Addr, SocketAddr}, +}; +use tokio::io::*; +use tonic::transport::{server::Connected, Uri}; + +#[pin_project] +pub struct StdioSocket { + #[pin] + reader: PollEvented>, + + #[pin] + writer: PollEvented>, +} + +pub async fn stdio_connector(_: Uri) -> io::Result> { + StdioSocket::try_new() +} + +impl StdioSocket { + pub fn try_new() -> io::Result { + Self::try_new_rw(stdin(), stdout()) + } +} + +impl Connected for StdioSocket { + fn remote_addr(&self) -> Option { + Some(SocketAddr::new(IpAddr::from(Ipv4Addr::UNSPECIFIED), 8080)) + } +} + +impl StdioSocket { + pub fn try_new_rw(read: R, write: W) -> io::Result { + Ok(StdioSocket { + reader: PollEvented::new(async_stdio::EventedStdin::try_new(read)?)?, + writer: PollEvented::new(async_stdio::EventedStdout::try_new(write)?)?, + }) + } +} + +impl AsyncRead for StdioSocket { + fn poll_read( + self: Pin<&mut Self>, + cx: &mut Context<'_>, + buf: &mut [u8], + ) -> Poll> { + self.project().reader.poll_read(cx, buf) + } +} + +impl AsyncWrite for StdioSocket { + fn poll_write(self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8]) -> Poll> { + self.project().writer.poll_write(cx, buf) + } + + fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + self.project().writer.poll_flush(cx) + } + + fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + self.project().writer.poll_shutdown(cx) + } +} + +mod async_stdio { + use std::io::{self, Read, Write}; + use std::os::unix::io::AsRawFd; + + use mio::event::Evented; + use mio::unix::EventedFd; + use mio::{Poll, PollOpt, Ready, Token}; + + use libc::{fcntl, F_GETFL, F_SETFL, O_NONBLOCK}; + + pub struct EventedStdin(T); + pub struct EventedStdout(T); + + impl EventedStdin { + pub fn try_new(stdin: T) -> io::Result { + set_non_blocking_flag(&stdin)?; + + Ok(EventedStdin(stdin)) + } + } + + impl EventedStdout { + pub fn try_new(stdout: T) -> io::Result { + set_non_blocking_flag(&stdout)?; + + Ok(EventedStdout(stdout)) + } + } + + impl Evented for EventedStdin { + fn register( + &self, + poll: &Poll, + token: Token, + interest: Ready, + opts: PollOpt, + ) -> io::Result<()> { + EventedFd(&self.0.as_raw_fd()).register(poll, token, interest, opts) + } + + fn reregister( + &self, + poll: &Poll, + token: Token, + interest: Ready, + opts: PollOpt, + ) -> io::Result<()> { + EventedFd(&self.0.as_raw_fd()).reregister(poll, token, interest, opts) + } + + fn deregister(&self, poll: &Poll) -> io::Result<()> { + EventedFd(&self.0.as_raw_fd()).deregister(poll) + } + } + + impl Read for EventedStdin { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.0.read(buf) + } + } + + impl Evented for EventedStdout { + fn register( + &self, + poll: &Poll, + token: Token, + interest: Ready, + opts: PollOpt, + ) -> io::Result<()> { + EventedFd(&self.0.as_raw_fd()).register(poll, token, interest, opts) + } + + fn reregister( + &self, + poll: &Poll, + token: Token, + interest: Ready, + opts: PollOpt, + ) -> io::Result<()> { + EventedFd(&self.0.as_raw_fd()).reregister(poll, token, interest, opts) + } + + fn deregister(&self, poll: &Poll) -> io::Result<()> { + EventedFd(&self.0.as_raw_fd()).deregister(poll) + } + } + + impl Write for EventedStdout { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.0.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.0.flush() + } + } + + fn set_non_blocking_flag(stream: &T) -> io::Result<()> { + let flags = unsafe { fcntl(stream.as_raw_fd(), F_GETFL, 0) }; + + if flags < 0 { + return Err(std::io::Error::last_os_error()); + } + + if unsafe { fcntl(stream.as_raw_fd(), F_SETFL, flags | O_NONBLOCK) } != 0 { + return Err(std::io::Error::last_os_error()); + } + + Ok(()) + } +}