diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..0977718 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,6 @@ +FROM rust:1.75 +ARG DEBIAN_FRONTEND=noninteractive +RUN apt-get update && apt-get install -yq \ +ca-certificates coreutils curl git make mercurial \ +build-essential clang llvm libclang-dev lld \ +gyp ninja-build pkg-config zlib1g-dev libssl-dev libtss2-dev diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..f9f0095 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,18 @@ +{ + "name": "Development Container", + "dockerFile": "Dockerfile", + "customizations": { + "vscode": { + "extensions": [ + "ms-vscode.cpptools", + "vsls-contrib.codetour" + ] + } + }, + "features": { + "ghcr.io/devcontainers/features/docker-from-docker:1": {} + }, + "runArgs": [ + "--network=host" + ] +} \ No newline at end of file diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml new file mode 100644 index 0000000..f9e2844 --- /dev/null +++ b/.github/workflows/check.yml @@ -0,0 +1,76 @@ +name: CI +on: + push: + branches: ["main"] + paths-ignore: ["*.md", "LICENSE-*"] + pull_request: + branches: ["main"] + paths-ignore: ["*.md", "LICENSE-*"] + workflow_dispatch: + +env: + CARGO_TERM_COLOR: always + +jobs: + check: + name: Continuous Integration + runs-on: ubuntu-latest + env: + NSS_DIR: ${{ github.workspace }}/nss + strategy: + fail-fast: false + matrix: + hpke: + - rust-hpke + rust: + - 1.75.0 + - stable + + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Install Rust + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ matrix.rust }} + components: rustfmt, clippy, llvm-tools-preview + + - name: Check formatting + if: ${{ success() || failure() }} + run: | + cargo +${{ matrix.rust }} fmt --all -- --check --config imports_granularity=Crate + + - name: Clippy + if: ${{ success() || failure() }} + run: | + cargo clippy --tests + + container-build-and-test: + name: Container build and test + runs-on: ubuntu-latest + strategy: + fail-fast: false + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Build container images + run: | + make build + + - name: Run server container image + run: | + DETACHED="-d" make run-server-whisper + + - name: Run client image + run: | + ./scripts/service_wait.sh 127.0.0.1:3000 + ./scripts/service_wait.sh 127.0.0.1:9443 + make run-client-container + + - name: Stop containers + run: | + docker stop $(docker ps -q) diff --git a/.gitignore b/.gitignore index 8a30d25..8fbb2ba 100644 --- a/.gitignore +++ b/.gitignore @@ -1,398 +1,6 @@ -## Ignore Visual Studio temporary files, build results, and -## files generated by popular Visual Studio add-ons. -## -## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore - -# User-specific files -*.rsuser -*.suo -*.user -*.userosscache -*.sln.docstates - -# User-specific files (MonoDevelop/Xamarin Studio) -*.userprefs - -# Mono auto generated files -mono_crash.* - -# Build results -[Dd]ebug/ -[Dd]ebugPublic/ -[Rr]elease/ -[Rr]eleases/ -x64/ -x86/ -[Ww][Ii][Nn]32/ -[Aa][Rr][Mm]/ -[Aa][Rr][Mm]64/ -bld/ -[Bb]in/ -[Oo]bj/ -[Ll]og/ -[Ll]ogs/ - -# Visual Studio 2015/2017 cache/options directory -.vs/ -# Uncomment if you have tasks that create the project's static files in wwwroot -#wwwroot/ - -# Visual Studio 2017 auto generated files -Generated\ Files/ - -# MSTest test Results -[Tt]est[Rr]esult*/ -[Bb]uild[Ll]og.* - -# NUnit -*.VisualState.xml -TestResult.xml -nunit-*.xml - -# Build Results of an ATL Project -[Dd]ebugPS/ -[Rr]eleasePS/ -dlldata.c - -# Benchmark Results -BenchmarkDotNet.Artifacts/ - -# .NET Core -project.lock.json -project.fragment.lock.json -artifacts/ - -# ASP.NET Scaffolding -ScaffoldingReadMe.txt - -# StyleCop -StyleCopReport.xml - -# Files built by Visual Studio -*_i.c -*_p.c -*_h.h -*.ilk -*.meta -*.obj -*.iobj -*.pch -*.pdb -*.ipdb -*.pgc -*.pgd -*.rsp -*.sbr -*.tlb -*.tli -*.tlh -*.tmp -*.tmp_proj -*_wpftmp.csproj -*.log -*.tlog -*.vspscc -*.vssscc -.builds -*.pidb -*.svclog -*.scc - -# Chutzpah Test files -_Chutzpah* - -# Visual C++ cache files -ipch/ -*.aps -*.ncb -*.opendb -*.opensdf -*.sdf -*.cachefile -*.VC.db -*.VC.VC.opendb - -# Visual Studio profiler -*.psess -*.vsp -*.vspx -*.sap - -# Visual Studio Trace Files -*.e2e - -# TFS 2012 Local Workspace -$tf/ - -# Guidance Automation Toolkit -*.gpState - -# ReSharper is a .NET coding add-in -_ReSharper*/ -*.[Rr]e[Ss]harper -*.DotSettings.user - -# TeamCity is a build add-in -_TeamCity* - -# DotCover is a Code Coverage Tool -*.dotCover - -# AxoCover is a Code Coverage Tool -.axoCover/* -!.axoCover/settings.json - -# Coverlet is a free, cross platform Code Coverage Tool -coverage*.json -coverage*.xml -coverage*.info - -# Visual Studio code coverage results -*.coverage -*.coveragexml - -# NCrunch -_NCrunch_* -.*crunch*.local.xml -nCrunchTemp_* - -# MightyMoose -*.mm.* -AutoTest.Net/ - -# Web workbench (sass) -.sass-cache/ - -# Installshield output folder -[Ee]xpress/ - -# DocProject is a documentation generator add-in -DocProject/buildhelp/ -DocProject/Help/*.HxT -DocProject/Help/*.HxC -DocProject/Help/*.hhc -DocProject/Help/*.hhk -DocProject/Help/*.hhp -DocProject/Help/Html2 -DocProject/Help/html - -# Click-Once directory -publish/ - -# Publish Web Output -*.[Pp]ublish.xml -*.azurePubxml -# Note: Comment the next line if you want to checkin your web deploy settings, -# but database connection strings (with potential passwords) will be unencrypted -*.pubxml -*.publishproj - -# Microsoft Azure Web App publish settings. Comment the next line if you want to -# checkin your Azure Web App publish settings, but sensitive information contained -# in these scripts will be unencrypted -PublishScripts/ - -# NuGet Packages -*.nupkg -# NuGet Symbol Packages -*.snupkg -# The packages folder can be ignored because of Package Restore -**/[Pp]ackages/* -# except build/, which is used as an MSBuild target. -!**/[Pp]ackages/build/ -# Uncomment if necessary however generally it will be regenerated when needed -#!**/[Pp]ackages/repositories.config -# NuGet v3's project.json files produces more ignorable files -*.nuget.props -*.nuget.targets - -# Microsoft Azure Build Output -csx/ -*.build.csdef - -# Microsoft Azure Emulator -ecf/ -rcf/ - -# Windows Store app package directories and files -AppPackages/ -BundleArtifacts/ -Package.StoreAssociation.xml -_pkginfo.txt -*.appx -*.appxbundle -*.appxupload - -# Visual Studio cache files -# files ending in .cache can be ignored -*.[Cc]ache -# but keep track of directories ending in .cache -!?*.[Cc]ache/ - -# Others -ClientBin/ -~$* +/target/ +/Cargo.lock *~ -*.dbmdl -*.dbproj.schemaview -*.jfm -*.pfx -*.publishsettings -orleans.codegen.cs - -# Including strong name files can present a security risk -# (https://github.com/github/gitignore/pull/2483#issue-259490424) -#*.snk - -# Since there are multiple workflows, uncomment next line to ignore bower_components -# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) -#bower_components/ - -# RIA/Silverlight projects -Generated_Code/ - -# Backup & report files from converting an old project file -# to a newer Visual Studio version. Backup files are not needed, -# because we have git ;-) -_UpgradeReport_Files/ -Backup*/ -UpgradeLog*.XML -UpgradeLog*.htm -ServiceFabricBackup/ -*.rptproj.bak - -# SQL Server files -*.mdf -*.ldf -*.ndf - -# Business Intelligence projects -*.rdl.data -*.bim.layout -*.bim_*.settings -*.rptproj.rsuser -*- [Bb]ackup.rdl -*- [Bb]ackup ([0-9]).rdl -*- [Bb]ackup ([0-9][0-9]).rdl - -# Microsoft Fakes -FakesAssemblies/ - -# GhostDoc plugin setting file -*.GhostDoc.xml - -# Node.js Tools for Visual Studio -.ntvs_analysis.dat -node_modules/ - -# Visual Studio 6 build log -*.plg - -# Visual Studio 6 workspace options file -*.opt - -# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) -*.vbw - -# Visual Studio 6 auto-generated project file (contains which files were open etc.) -*.vbp - -# Visual Studio 6 workspace and project file (working project files containing files to include in project) -*.dsw -*.dsp - -# Visual Studio 6 technical files -*.ncb -*.aps - -# Visual Studio LightSwitch build output -**/*.HTMLClient/GeneratedArtifacts -**/*.DesktopClient/GeneratedArtifacts -**/*.DesktopClient/ModelManifest.xml -**/*.Server/GeneratedArtifacts -**/*.Server/ModelManifest.xml -_Pvt_Extensions - -# Paket dependency manager -.paket/paket.exe -paket-files/ - -# FAKE - F# Make -.fake/ - -# CodeRush personal settings -.cr/personal - -# Python Tools for Visual Studio (PTVS) -__pycache__/ -*.pyc - -# Cake - Uncomment if you are using it -# tools/** -# !tools/packages.config - -# Tabs Studio -*.tss - -# Telerik's JustMock configuration file -*.jmconfig - -# BizTalk build output -*.btp.cs -*.btm.cs -*.odx.cs -*.xsd.cs - -# OpenCover UI analysis results -OpenCover/ - -# Azure Stream Analytics local run output -ASALocalRun/ - -# MSBuild Binary and Structured Log -*.binlog - -# NVidia Nsight GPU debugger configuration file -*.nvuser - -# MFractors (Xamarin productivity tool) working folder -.mfractor/ - -# Local History for Visual Studio -.localhistory/ - -# Visual Studio History (VSHistory) files -.vshistory/ - -# BeatPulse healthcheck temp database -healthchecksdb - -# Backup folder for Package Reference Convert tool in Visual Studio 2017 -MigrationBackup/ - -# Ionide (cross platform F# VS Code tools) working folder -.ionide/ - -# Fody - auto-generated XML schema -FodyWeavers.xsd - -# VS Code files for those working on multiple tools -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json -*.code-workspace - -# Local History for Visual Studio Code -.history/ - -# Windows Installer files from build outputs -*.cab -*.msi -*.msix -*.msm -*.msp - -# JetBrains Rider -*.sln.iml +*.swp +/.vscode/ +*.pem \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..4e60fa5 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "external/attested-ohttp-client"] + path = external/attested-ohttp-client + url = https://github.com/microsoft/attested-ohttp-client diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..fc2fdc8 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,6 @@ +[workspace] +resolver = "2" +members = [ + "ohttp-server", + "cgpuvm-attest", +] diff --git a/Makefile b/Makefile new file mode 100755 index 0000000..89e709f --- /dev/null +++ b/Makefile @@ -0,0 +1,86 @@ +KMS ?= https://accconfinferencedebug.confidential-ledger.azure.com +MAA ?= https://maanosecureboottestyfu.eus.attest.azure.net + +# MODEL can be whisper_opensource, whisper_aoai or whisper_aoai_local +MODEL ?= whisper_opensource +ifeq ($(MODEL), whisper_opensource) + export TARGET ?= http://127.0.0.1:3000 + export TARGET_PATH ?= '/whisper' + export SCORING_ENDPOINT ?= 'http://localhost:9443/score' +else ifeq ($(MODEL), whisper_aoai) + TARGET ?= http://127.0.0.1:5001 + TARGET_PATH ?= '/v1/engines/whisper/audio/transcriptions' + SCORING_ENDPOINT ?= 'http://localhost:9443/score' +else + echo "Unknown model" +endif + +export INPUT_DIR ?= ${PWD}/examples +export MOUNTED_INPUT_DIR ?= /test +export INPUT_FILE ?= audio.mp3 +export INJECT_HEADERS ?= openai-internal-enableasrsupport +export DETACHED ?= + +# Build commands + +build-server: + cargo build --bin ohttp-server + +build-whisper-container: + docker build -f docker/whisper/Dockerfile -t whisper-api ./docker/whisper + +build-server-container: + docker build -f docker/server/Dockerfile -t attested-ohttp-server . + +build-client-container: + docker build -f external/attested-ohttp-client/docker/Dockerfile -t attested-ohttp-client external/attested-ohttp-client/ + +build: build-server-container build-client-container build-whisper-container + +format-checks: + cargo fmt --all -- --check --config imports_granularity=Crate + cargo clippy --tests --no-default-features --features rust-hpke + +# Containerized server deployments + +run-server-container: + docker compose -f ./docker/docker-compose-server.yml up + +run-server-container-cvm: + docker run --privileged --net=host \ + -e TARGET=${TARGET} -e MAA_URL=${MAA} -e KMS_URL=${KMS}/app/key -e INJECT_HEADERS=${INJECT_HEADERS} \ + --mount type=bind,source=/sys/kernel/security,target=/sys/kernel/security \ + --device /dev/tpmrm0 attested-ohttp-server + +# Whisper deployments + +run-whisper: + docker run --network=host whisper-api + +run-whisper-faster: + docker run --network=host fedirz/faster-whisper-server:latest-cuda + +run-server-faster: + docker compose -f ./docker/docker-compose-faster-whisper.yml up + +service-cert: + curl -s -k ${KMS}/node/network | jq -r .service_certificate > service_cert.pem + +# Server and whisper deployment + +run-server-whisper: + docker compose -f ./docker/docker-compose-whisper.yml up ${DETACHED} + +run-server-whisper-gpu: + docker compose -f ./docker/docker-compose-whisper-gpu.yml up ${DETACHED} + +# Containerized client deployments + +run-client-container: + docker run --net=host --volume ${INPUT_DIR}:${MOUNTED_INPUT_DIR} \ + attested-ohttp-client $(SCORING_ENDPOINT) -F "file=@${MOUNTED_INPUT_DIR}/${INPUT_FILE}" --target-path ${TARGET_PATH} + +run-client-container-aoai: + docker run --volume ${INPUT_DIR}:${MOUNTED_INPUT_DIR} -e KMS_URL=${KMS} \ + attested-ohttp-client ${SCORING_ENDPOINT} -F "file=@${MOUNTED_INPUT_DIR}/${INPUT_FILE}" --target-path ${TARGET_PATH} \ + -O "api-key: ${API_KEY}" -F "response_format=json" \ No newline at end of file diff --git a/README.md b/README.md index 5cd7cec..c698199 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,50 @@ -# Project +# Attested OHTTP Server -> This repo has been populated by an initial template to help get you started. Please -> make sure to update the content to build a great experience for community-building. +This repository is an implementation of an attested OHTTP server for [Azure AI Confidential Inferencing](https://techcommunity.microsoft.com/blog/azure-ai-services-blog/azure-ai-confidential-inferencing-preview/4248181). +Together with [attested OHTTP client](https://github.com/microsoft/attested-ohttp-client) and a [transparent +key management service](https://github.com/microsoft/azure-transparent-kms), it enables secure communication between clients and [Confidential GPU VMs](https://) serving Azure AI models using [chunked OHTTP](https://www.ietf.org/archive/id/draft-ohai-chunked-ohttp-01.html). Learn more here. -As the maintainer of this project, please make a few updates: +- [Azure AI Confidential Inferencing: Technical Deep Dive](https://techcommunity.microsoft.com/blog/azureconfidentialcomputingblog/azure-ai-confidential-inferencing-technical-deep-dive/4253150) -- Improving this README.MD file to provide a great experience -- Updating SUPPORT.MD with content about this project's support experience -- Understanding the security reporting process in SECURITY.MD -- Remove this section from the README +## Build + +The repo supports build and development using GitHub Codespaces and devcontainers. The repository includes a devcontainer configuration that installs all dependencies. + +[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/microsoft/attested-ohttp-server). + +Build the attested OHTTP server container. + +``` +make build-server-container +``` + +## Test + +For local testing, this repository includes a sample whisper container. + +``` +make build-whisper-container +``` + +Next, clone and build the attested OHTTP client container. + +``` +git submodule update --recursive +make build-client-container +``` + +Finally, run the containers locally. This command will launch the OHTTP server in a mode where the server generates its own HPKE key pair, and publish an OHTTP key configuration at a local endpoint ```http://127.0.0.1:9443/discover```. This will also launch the whisper container, which listens at the endpoint ```http://127.0.0.1:3000/whisper```. + +``` +make run-server-whisper +``` + +In a separate terminal, launch the client providing as input an audio file (included in this repo) and a OHTTP key configuration obtained from the discovery endpoint. +``` +./scripts/service_wait.sh 127.0.0.1:3000 +./scripts/service_wait.sh 127.0.0.1:9443 +make run-client-container +``` ## Contributing diff --git a/cgpuvm-attest/Cargo.toml b/cgpuvm-attest/Cargo.toml new file mode 100644 index 0000000..abc04a7 --- /dev/null +++ b/cgpuvm-attest/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "cgpuvm-attest" +version = "0.1.0" +authors = ["Antoine Delignat-Lavaud "] +edition = "2021" +license = "MIT" +description = "FFI for Confidential GPU VM Guest Attestation Library" +repository = "https://github.com/ad-l/ohttp" + +[features] +default = [] + +[dependencies] +libc = "0.2.0" +tracing = "0.1" +thiserror = "1" + +[dependencies.ohttp] +git = "https://github.com/microsoft/ohttp.git" +branch = "main" +features = ["server"] +default-features = false \ No newline at end of file diff --git a/cgpuvm-attest/src/err.rs b/cgpuvm-attest/src/err.rs new file mode 100644 index 0000000..e982501 --- /dev/null +++ b/cgpuvm-attest/src/err.rs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum AttestError { + #[error("Failed to convert endpoint URL to CString")] + Convertion, + #[error("CVM guest attestation library returned error: {0}")] + MAAToken(i32), +} diff --git a/cgpuvm-attest/src/lib.rs b/cgpuvm-attest/src/lib.rs new file mode 100755 index 0000000..9849a5a --- /dev/null +++ b/cgpuvm-attest/src/lib.rs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +pub mod err; + +use err::AttestError; +use libc::{c_char, c_int, size_t}; +use std::ffi::CString; + +type Res = Result>; + +#[link(name = "azguestattestation")] +extern "C" { + fn get_attestation_token( + app_data: *const u8, + pcr_sel: u32, + jwt: *mut u8, + jwt_len: *mut size_t, + endpoint_url: *const c_char, + ) -> c_int; +} + +pub fn attest(data: &[u8], pcrs: u32, endpoint_url: &str) -> Res> { + match CString::new(endpoint_url) { + Ok(endpoint_url_cstring) => unsafe { + let mut dstlen = 32 * 1024; + let mut dst = Vec::with_capacity(dstlen); + let pdst = dst.as_mut_ptr(); + + let url_ptr = endpoint_url_cstring.as_ptr(); + + let ret = get_attestation_token(data.as_ptr(), pcrs, pdst, &mut dstlen, url_ptr); + if ret == 0 { + dst.set_len(dstlen); + Ok(dst) + } else { + Err(Box::new(AttestError::MAAToken(ret))) + } + }, + _e => Err(Box::new(AttestError::Convertion)), + } +} diff --git a/docker/docker-compose-faster-whisper.yml b/docker/docker-compose-faster-whisper.yml new file mode 100644 index 0000000..39365da --- /dev/null +++ b/docker/docker-compose-faster-whisper.yml @@ -0,0 +1,17 @@ +services: + server: + image: attested-ohttp-server + ports: + - "9443:9443" + network_mode: "host" + environment: + - TARGET=http://localhost:8000 + - LOCAL_KEY=1 + depends_on: + - whisper + + whisper: + image: fedirz/faster-whisper-server:latest-cuda + network_mode: "host" + ports: + - "8000:8000" \ No newline at end of file diff --git a/docker/docker-compose-server.yml b/docker/docker-compose-server.yml new file mode 100644 index 0000000..c33f922 --- /dev/null +++ b/docker/docker-compose-server.yml @@ -0,0 +1,9 @@ +services: + server: + image: attested-ohttp-server + ports: + - "9443:9443" + network_mode: "host" + environment: + - TARGET=${TARGET} + - LOCAL_KEY=1 diff --git a/docker/docker-compose-whisper-gpu.yml b/docker/docker-compose-whisper-gpu.yml new file mode 100644 index 0000000..8f00516 --- /dev/null +++ b/docker/docker-compose-whisper-gpu.yml @@ -0,0 +1,24 @@ +services: + server: + image: attested-ohttp-server + ports: + - "9443:9443" + network_mode: "host" + environment: + - TARGET=${TARGET} + - LOCAL_KEY=1 + depends_on: + - whisper + + whisper: + image: whisper-api + network_mode: "host" + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu] + ports: + - "3000:3000" diff --git a/docker/docker-compose-whisper.yml b/docker/docker-compose-whisper.yml new file mode 100644 index 0000000..a956d68 --- /dev/null +++ b/docker/docker-compose-whisper.yml @@ -0,0 +1,17 @@ +services: + server: + image: attested-ohttp-server + ports: + - "9443:9443" + network_mode: "host" + environment: + - TARGET=${TARGET} + - LOCAL_KEY=1 + depends_on: + - whisper + + whisper: + image: whisper-api + network_mode: "host" + ports: + - "3000:3000" diff --git a/docker/server/Dockerfile b/docker/server/Dockerfile new file mode 100755 index 0000000..17c2f45 --- /dev/null +++ b/docker/server/Dockerfile @@ -0,0 +1,17 @@ +FROM rust:1.75 AS builder + +ARG DEBIAN_FRONTEND=noninteractive +RUN apt-get update && apt-get install -yq openssl libssl-dev libtss2-dev + +WORKDIR /usr/src/ohttp +COPY . . +RUN ln -s /usr/src/ohttp/ohttp-server/libazguestattestation.so.1.0.5 /usr/lib/libazguestattestation.so +RUN cargo install --path ohttp-server --debug + +FROM ubuntu:22.04 +RUN apt-get update && apt-get install -y ca-certificates openssl libssl-dev libtss2-dev && rm -rf /var/lib/apt/lists/* +COPY --from=builder /usr/local/cargo/bin/ohttp-server /usr/local/bin/ohttp-server +COPY ./ohttp-server/libazguestattestation.so.1.0.5 /usr/bin/ +RUN ln -s /usr/bin/libazguestattestation.so.1.0.5 /usr/lib/libazguestattestation.so.1 +COPY --chmod=755 ./docker/server/run.sh . +CMD ["./run.sh"] diff --git a/docker/server/run.sh b/docker/server/run.sh new file mode 100644 index 0000000..ea33567 --- /dev/null +++ b/docker/server/run.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +is_valid_url() { + local url="$1" + + # Regular expression to validate the URL + local regex='^https?://[a-zA-Z0-9.-]+(:[0-9]+)?(/.*)?$' + + if [[ $url =~ $regex ]]; then + return 0 + else + return 1 + fi +} + +if [[ -z ${TARGET} ]]; then + echo "No TARGET defined" + exit 1 +fi + +CMD="RUST_LOG=info" +if [[ -n ${TRACE} ]]; then + CMD="RUST_LOG=trace" +fi + +if is_valid_url $TARGET; then + CMD="$CMD /usr/local/bin/ohttp-server --target $TARGET" +else + echo "TARGET is not a valid URL" + exit 1 +fi + +if [[ -n ${LOCAL_KEY} ]]; then + CMD="$CMD --local-key" +fi + +if [[ -n ${INJECT_HEADERS} ]]; then + CMD="$CMD --inject-request-headers ${INJECT_HEADERS}" +fi + +if [[ -n ${MAA_URL} ]]; then + if is_valid_url ${MAA_URL}; then + CMD="$CMD --maa-url ${MAA_URL}" + else + echo "MAA_URL is not a valid URL" + exit 1 + fi +fi + +if [[ -n ${KMS_URL} ]]; then + if is_valid_url $KMS_URL; then + CMD="$CMD --kms-url ${KMS_URL}" + else + echo "KMS_URL is not a valid URL" + exit 1 + fi +fi + +# Run OHTTP server +echo "Running $CMD..." +eval $CMD \ No newline at end of file diff --git a/docker/whisper/Dockerfile b/docker/whisper/Dockerfile new file mode 100644 index 0000000..0320f9d --- /dev/null +++ b/docker/whisper/Dockerfile @@ -0,0 +1,15 @@ +FROM python:3.10-slim + +WORKDIR /python-docker + +COPY requirements.txt requirements.txt +RUN apt-get update && apt-get install git -y +RUN pip3 install -r requirements.txt +RUN pip3 install "git+https://github.com/openai/whisper.git" +RUN apt-get install -y ffmpeg + +COPY . . + +EXPOSE 3000 + +CMD [ "python3", "-m" , "flask", "run", "--host=0.0.0.0", "--port=3000"] \ No newline at end of file diff --git a/docker/whisper/app.py b/docker/whisper/app.py new file mode 100644 index 0000000..8d0a255 --- /dev/null +++ b/docker/whisper/app.py @@ -0,0 +1,47 @@ +from flask import Flask, abort, request +from tempfile import NamedTemporaryFile +import whisper +import torch + +# Check if NVIDIA GPU is available +torch.cuda.is_available() +DEVICE = "cuda" if torch.cuda.is_available() else "cpu" + +# Load the Whisper model: +model = whisper.load_model("base", device=DEVICE) + +app = Flask(__name__) + +@app.route("/") +def hello(): + return "Whisper Hello World!" + + +@app.route('/whisper', methods=['POST']) +def handler(): + if not request.files: + # If the user didn't submit any files, return a 400 (Bad Request) error. + abort(400) + + # For each file, let's store the results in a list of dictionaries. + results = [] + + # Loop over every file that the user submitted. + for filename, handle in request.files.items(): + # Create a temporary file. + # The location of the temporary file is available in `temp.name`. + temp = NamedTemporaryFile() + # Write the user's uploaded file to the temporary file. + # The file will get deleted when it drops out of scope. + handle.save(temp) + # Let's get the transcript of the temporary file. + result = model.transcribe(temp.name) + # Now we can store the result object for this file. + results.append({ + 'filename': filename, + 'transcript': result['text'], + }) + + # This will be automatically converted to JSON. + return {'results': results} + \ No newline at end of file diff --git a/docker/whisper/requirements.txt b/docker/whisper/requirements.txt new file mode 100644 index 0000000..8ab6294 --- /dev/null +++ b/docker/whisper/requirements.txt @@ -0,0 +1 @@ +flask \ No newline at end of file diff --git a/examples/audio.mp3 b/examples/audio.mp3 new file mode 100644 index 0000000..b07ac44 Binary files /dev/null and b/examples/audio.mp3 differ diff --git a/external/attested-ohttp-client b/external/attested-ohttp-client new file mode 160000 index 0000000..86fbb40 --- /dev/null +++ b/external/attested-ohttp-client @@ -0,0 +1 @@ +Subproject commit 86fbb4064c915a8f461f673aa3981efa29fce35e diff --git a/ohttp-server/.gitignore b/ohttp-server/.gitignore new file mode 100644 index 0000000..bb750da --- /dev/null +++ b/ohttp-server/.gitignore @@ -0,0 +1,3 @@ +ca.crt +server.key +server.crt diff --git a/ohttp-server/Cargo.toml b/ohttp-server/Cargo.toml new file mode 100644 index 0000000..c7692dd --- /dev/null +++ b/ohttp-server/Cargo.toml @@ -0,0 +1,49 @@ +[package] +name = "ohttp-server" +version = "0.1.0" +authors = ["Antoine Delignat-Lavaud ", "Arthi Gokarn ", "Kapil Vaswani ", "Tien Le "] +edition = "2021" + +[features] +default = ["rust-hpke"] +nss = ["ohttp/nss"] +rust-hpke = ["ohttp/rust-hpke"] + +[dependencies] +env_logger = {version = "0.10", default-features = false} +hex = "0.4" +lazy_static = "1.4" +moka = { version = "0.12", features = ["future"] } +tokio = { version = "1", features = ["full"] } +serde = { version = "1.0", features = ["derive"] } +elliptic-curve = { version = "0.13.8", features = ["jwk"] } +base64-url = "3.0.0" +hpke = {version = "0.12.0", features = ["std","p384"]} +serde_json = "1.0" +serde_cbor = "0.10" +warp = { version = "0.3", features = ["tls"] } +reqwest = { version = "0.11", default-features = false, features = ["rustls-tls", "stream"] } +futures-util = "0.3.30" +futures = "0.3.30" +log = "0.4.22" +clap = { version = "4.5.18", features = ["derive"] } +tracing = "0.1" +tracing-subscriber = { version = "0.3.18", features = ["default", "json", "env-filter"] } +thiserror = "1" +uuid = { version = "1.0", features = ["v4"] } + +[dependencies.cgpuvm-attest] +path= "../cgpuvm-attest" +features = [] + +[dependencies.bhttp] +git = "https://github.com/microsoft/ohttp.git" +branch = "main" +features = ["bhttp", "write-http"] + +[dependencies.ohttp] +git = "https://github.com/microsoft/ohttp.git" +branch = "main" +features = ["server"] +default-features = false + diff --git a/ohttp-server/libazguestattestation.so.1.0.5 b/ohttp-server/libazguestattestation.so.1.0.5 new file mode 100755 index 0000000..b70b1d7 Binary files /dev/null and b/ohttp-server/libazguestattestation.so.1.0.5 differ diff --git a/ohttp-server/src/err.rs b/ohttp-server/src/err.rs new file mode 100644 index 0000000..5c0b0ad --- /dev/null +++ b/ohttp-server/src/err.rs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum ServerError { + #[error("Incorrect CBOR encoding in returned private key")] + KMSCBOREncoding, + #[error("Bad CBOR key type, expected P-384(2)")] + KMSCBORKeyType, + #[error("Unexpected field in exported private key from KMS")] + KMSField, + #[error("Bad key identifier in SKR response")] + KMSKeyId, + #[error("Invalid secret exponent in SKR response")] + KMSExponent, + #[error("KMS returned an unexpected status code: {0}")] + KMSUnexpected(u16), + #[error("Max retries reached, giving up. Cannot reach key management service")] + KMSUnreachable, + #[error("Private key missing from SKR response")] + PrivateKeyMissing, +} diff --git a/ohttp-server/src/main.rs b/ohttp-server/src/main.rs new file mode 100755 index 0000000..f662de4 --- /dev/null +++ b/ohttp-server/src/main.rs @@ -0,0 +1,575 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#![deny(clippy::pedantic)] + +pub mod err; + +use std::{io::Cursor, net::SocketAddr, sync::Arc}; + +use lazy_static::lazy_static; +use moka::future::Cache; + +use futures_util::stream::unfold; +use reqwest::{ + header::{HeaderMap, HeaderName, HeaderValue}, + Method, Response, Url, +}; + +use bhttp::{Message, Mode}; +use clap::Parser; +use ohttp::{ + hpke::{Aead, Kdf, Kem}, + Error, KeyConfig, Server as OhttpServer, ServerResponse, SymmetricSuite, +}; +use warp::{hyper::Body, Filter}; + +use tokio::time::{sleep, Duration}; + +use cgpuvm_attest::attest; +use reqwest::Client; + +type Res = Result>; + +use serde_cbor::Value; +use serde_json::from_str; + +use hpke::Deserializable; +use serde::Deserialize; + +use err::ServerError; +use tracing::{error, info, instrument, trace}; +use tracing_subscriber::{fmt::format::FmtSpan, EnvFilter, FmtSubscriber}; +use uuid::Uuid; + +const VERSION: &str = "1.0.0"; + +#[derive(Deserialize)] +struct ExportedKey { + kid: u8, + key: String, + receipt: String, +} + +const DEFAULT_KMS_URL: &str = "https://accconfinferencedebug.confidential-ledger.azure.com/app/key"; +const DEFAULT_MAA_URL: &str = "https://maanosecureboottestyfu.eus.attest.azure.net"; +const FILTERED_RESPONSE_HEADERS: [&str; 2] = ["content-type", "content-length"]; + +#[derive(Debug, Parser, Clone)] +#[command(name = "ohttp-server", about = "Serve oblivious HTTP requests.")] +struct Args { + /// The address to bind to. + #[arg(default_value = "127.0.0.1:9443")] + address: SocketAddr, + + /// When creating message/bhttp, use the indeterminate-length form. + #[arg(long, short = 'n', alias = "indefinite")] + indeterminate: bool, + + /// Target server + #[arg(long, short = 't', default_value = "http://127.0.0.1:8000")] + target: Url, + + /// Use locally generated key, for testing without KMS + #[arg(long, short = 'l')] + local_key: bool, + + /// MAA endpoint + #[arg(long, short = 'm')] + maa_url: Option, + + /// KMS endpoint + #[arg(long, short = 's')] + kms_url: Option, + + #[arg(long, short = 'i')] + inject_request_headers: Vec, +} + +impl Args { + fn mode(&self) -> Mode { + if self.indeterminate { + Mode::IndeterminateLength + } else { + Mode::KnownLength + } + } +} + +lazy_static! { + static ref cache: Arc> = Arc::new( + Cache::builder() + .time_to_live(Duration::from_secs(24 * 60 * 60)) + .build() + ); +} + +fn parse_cbor_key(key: &str, kid: u8) -> Res<(Option>, u8)> { + let cwk = hex::decode(key)?; + let cwk_map: Value = serde_cbor::from_slice(&cwk)?; + let mut d = None; + let mut returned_kid: u8 = 0; + if let Value::Map(map) = cwk_map { + for (key, value) in map { + if let Value::Integer(key) = key { + match key { + // key identifier + 4 => { + if let Value::Integer(k) = value { + returned_kid = u8::try_from(k).unwrap(); + if returned_kid != kid { + return Err(Box::new(Error::KeyIdMismatch(returned_kid, kid))); + } + } else { + return Err(Box::new(ServerError::KMSKeyId)); + } + } + + // private exponent + -4 => { + if let Value::Bytes(vec) = value { + d = Some(vec); + } else { + return Err(Box::new(ServerError::KMSExponent)); + } + } + + // key type, must be P-384(2) + -1 => { + if value == Value::Integer(2) { + } else { + return Err(Box::new(ServerError::KMSCBORKeyType)); + } + } + + // Ignore public key (x,y) as we recompute it from d anyway + -2 | -3 => (), + + _ => { + return Err(Box::new(ServerError::KMSField)); + } + }; + }; + } + } else { + return Err(Box::new(ServerError::KMSCBOREncoding)); + }; + Ok((d, returned_kid)) +} + +/// Fetches the MAA token from the CVM guest attestation library. +/// +fn fetch_maa_token(maa: &str) -> Res { + // Get MAA token from CVM guest attestation library + info!("Fetching MAA token from {maa}"); + let token = attest("{}".as_bytes(), 0xffff, maa)?; + + let token = String::from_utf8(token).unwrap(); + trace!("{token}"); + Ok(token) +} + +/// Retrieves the HPKE private key from Azure KMS. +/// +async fn get_hpke_private_key_from_kms(kms: &str, kid: u8, token: &str) -> Res { + let client = Client::builder() + .danger_accept_invalid_certs(true) + .build()?; + + // Retrying logic for receipt + let max_retries = 3; + let mut retries = 0; + + loop { + let url = format!("{kms}?kid={kid}"); + info!("Sending SKR request to {url}"); + + // Get HPKE private key from Azure KMS + let response = client + .post(url) + .header("Authorization", format!("Bearer {token}")) + .send() + .await?; + + // We may have to wait for receipt to be ready + match response.status().as_u16() { + 202 => { + if retries < max_retries { + retries += 1; + trace!( + "Received 202 status code, retrying... (attempt {}/{})", + retries, + max_retries + ); + sleep(Duration::from_secs(1)).await; + } else { + return Err(Box::new(ServerError::KMSUnreachable)); + } + } + 200 => { + let skr_body = response.text().await?; + info!("SKR successful"); + + let skr: ExportedKey = from_str(&skr_body)?; + trace!( + "requested KID={}, returned KID={}, Receipt={}", + kid, + skr.kid, + skr.receipt + ); + + if skr.kid != kid { + return Err(Box::new(Error::KeyIdMismatch(skr.kid, kid))); + } + + return Ok(skr.key); + } + e => { + return Err(Box::new(ServerError::KMSUnexpected(e))); + } + } + } +} + +async fn load_config(maa: &str, kms: &str, kid: u8) -> Res<(KeyConfig, String)> { + // Check if the key configuration is in cache + if let Some((config, token)) = cache.get(&kid).await { + info!("Found OHTTP configuration for KID {kid} in cache."); + return Ok((config, token)); + } + + // Get MAA token from CVM guest attestation library + let token = fetch_maa_token(maa)?; + let key = get_hpke_private_key_from_kms(kms, kid, &token).await?; + let (d, returned_kid) = parse_cbor_key(&key, kid)?; + + let sk = match d { + Some(key) => ::PrivateKey::from_bytes(&key), + None => Err(Box::new(ServerError::PrivateKeyMissing))?, + }?; + let pk = ::sk_to_pk(&sk); + + let config = KeyConfig::import_p384( + returned_kid, + Kem::P384Sha384, + sk, + pk, + vec![ + SymmetricSuite::new(Kdf::HkdfSha384, Aead::Aes256Gcm), + SymmetricSuite::new(Kdf::HkdfSha256, Aead::Aes128Gcm), + SymmetricSuite::new(Kdf::HkdfSha256, Aead::ChaCha20Poly1305), + ], + )?; + + cache.insert(kid, (config.clone(), token.clone())).await; + Ok((config, token)) +} + +/// Copies headers from the encapsulated request and logs them. +/// +fn get_headers_from_request(bin_request: &Message) -> HeaderMap { + info!("Inner request headers"); + let mut headers = HeaderMap::new(); + for field in bin_request.header().fields() { + info!( + " {}: {}", + std::str::from_utf8(field.name()).unwrap(), + std::str::from_utf8(field.value()).unwrap() + ); + + headers.append( + HeaderName::from_bytes(field.name()).unwrap(), + HeaderValue::from_bytes(field.value()).unwrap(), + ); + } + headers +} + +async fn generate_reply( + ohttp: &OhttpServer, + inject_headers: HeaderMap, + enc_request: &[u8], + target: Url, + target_path: Option<&HeaderValue>, + _mode: Mode, +) -> Res<(Response, ServerResponse)> { + let (request, server_response) = ohttp.decapsulate(enc_request)?; + let bin_request = Message::read_bhttp(&mut Cursor::new(&request[..]))?; + + let method: Method = if let Some(method_bytes) = bin_request.control().method() { + Method::from_bytes(method_bytes)? + } else { + Method::GET + }; + + // Copy headers from the encapsulated request + let mut headers = get_headers_from_request(&bin_request); + + // Inject additional headers from the outer request + if !inject_headers.is_empty() { + info!("Appending injected headers"); + for (key, value) in inject_headers { + if let Some(key) = key { + info!(" {}: {}", key.as_str(), value.to_str().unwrap()); + headers.append(key, value); + } + } + }; + + let mut t = target; + + // Set resource path to either the one provided in the outer request header + // If none provided, use the path set by the client + if let Some(path_bytes) = target_path { + if let Ok(path_str) = std::str::from_utf8(path_bytes.as_bytes()) { + t.set_path(path_str); + } + } else if let Some(path_bytes) = bin_request.control().path() { + if let Ok(path_str) = std::str::from_utf8(path_bytes) { + t.set_path(path_str); + } + } + + let client = reqwest::ClientBuilder::new().build()?; + let response = client + .request(method, t) + .headers(headers) + .body(bin_request.content().to_vec()) + .send() + .await? + .error_for_status()?; + + Ok((response, server_response)) +} + +// Compute the set of headers that need to be injected into the inner request +fn compute_injected_headers(headers: &HeaderMap, keys: Vec) -> HeaderMap { + let mut result = HeaderMap::new(); + for key in keys { + if let Ok(header_name) = HeaderName::try_from(key) { + if let Some(value) = headers.get(&header_name) { + result.insert(header_name, value.clone()); + } + } + } + result +} + +#[instrument(skip(headers, body, args), fields(version = %VERSION))] +async fn score( + headers: warp::hyper::HeaderMap, + body: warp::hyper::body::Bytes, + args: Arc, + x_ms_request_id: Uuid, +) -> Result { + let target = args.target.clone(); + info!("Received encapsulated score request for target {}", target); + + info!("Request headers length = {}", headers.len()); + let return_token = headers.contains_key("x-attestation-token"); + + // The KID is normally the first byte of the request + let kid = match body.first().copied() { + None => { + let error_msg = "No key found in request."; + error!("{error_msg}"); + return Ok(warp::http::Response::builder() + .status(500) + .body(Body::from(error_msg.as_bytes()))); + } + Some(kid) => kid, + }; + let maa_url = args.maa_url.clone().unwrap_or(DEFAULT_MAA_URL.to_string()); + let kms_url = args.kms_url.clone().unwrap_or(DEFAULT_KMS_URL.to_string()); + let (ohttp, token) = match load_config(&maa_url, &kms_url, kid).await { + Err(e) => { + let error_msg = "Failed to get or load OHTTP configuration."; + error!("{error_msg} {e}"); + return Ok(warp::http::Response::builder() + .status(500) + .body(Body::from(error_msg.as_bytes()))); + } + Ok((config, token)) => match OhttpServer::new(config) { + Ok(server) => (server, token), + Err(e) => { + let error_msg = "Failed to create OHTTP server from config."; + error!("{error_msg} {e}"); + return Ok(warp::http::Response::builder() + .status(500) + .body(Body::from(error_msg.as_bytes()))); + } + }, + }; + + let inject_request_headers = args.inject_request_headers.clone(); + info!( + "Request inject headers length = {}", + inject_request_headers.len() + ); + for key in &inject_request_headers { + info!(" {}", key); + } + + let inject_headers = compute_injected_headers(&headers, inject_request_headers); + info!("Injected headers length = {}", inject_headers.len()); + for (key, value) in &inject_headers { + info!(" {}: {}", key, value.to_str().unwrap()); + } + + let target_path = headers.get("enginetarget"); + let mode = args.mode(); + let (response, server_response) = + match generate_reply(&ohttp, inject_headers, &body[..], target, target_path, mode).await { + Ok(s) => s, + Err(e) => { + error!(e); + + if let Ok(oe) = e.downcast::<::ohttp::Error>() { + return Ok(warp::http::Response::builder() + .status(422) + .body(Body::from(format!("Error: {oe:?}")))); + } + + let error_msg = "Request error."; + error!("{error_msg}"); + return Ok(warp::http::Response::builder() + .status(400) + .body(Body::from(error_msg.as_bytes()))); + } + }; + + let mut builder = + warp::http::Response::builder().header("Content-Type", "message/ohttp-chunked-res"); + + // Add HTTP header with MAA token, for client auditing. + if return_token { + builder = builder.header( + HeaderName::from_static("x-attestation-token"), + token.clone(), + ); + } + + // Move headers from the inner response into the outer response + info!("Response headers:"); + for (key, value) in response.headers() { + if !FILTERED_RESPONSE_HEADERS + .iter() + .any(|h| h.eq_ignore_ascii_case(key.as_str())) + { + info!( + " {}: {}", + key, + std::str::from_utf8(value.as_bytes()).unwrap() + ); + builder = builder.header(key.as_str(), value.as_bytes()); + } + } + + let stream = Box::pin(unfold(response, |mut response| async move { + match response.chunk().await { + Ok(Some(chunk)) => Some((Ok::, ohttp::Error>(chunk.to_vec()), response)), + _ => None, + } + })); + + let stream = server_response.encapsulate_stream(stream); + Ok(builder.body(Body::wrap_stream(stream))) +} + +async fn discover(args: Arc) -> Result { + let kms_url = &args.kms_url.clone().unwrap_or(DEFAULT_KMS_URL.to_string()); + let maa_url = &args.maa_url.clone().unwrap_or(DEFAULT_MAA_URL.to_string()); + + // The discovery endpoint is only enabled for local testing + if !args.local_key { + return Ok(warp::http::Response::builder() + .status(404) + .body(Body::from(&b"Not found"[..]))); + } + + match load_config(maa_url, kms_url, 0).await { + Ok((config, _)) => match KeyConfig::encode_list(&[config]) { + Ok(list) => { + let hex = hex::encode(list); + trace!("Discover config: {}", hex); + + Ok(warp::http::Response::builder() + .status(200) + .body(Vec::from(hex).into())) + } + Err(e) => { + error!("{e}"); + Ok(warp::http::Response::builder().status(500).body(Body::from( + &b"Invalid key configuration (check KeyConfig written to initial cache)"[..], + ))) + } + }, + Err(e) => { + error!(e); + Ok(warp::http::Response::builder().status(500).body(Body::from( + &b"KID 0 missing from cache (should be impossible with local keying)"[..], + ))) + } + } +} + +#[tokio::main] +async fn main() -> Res<()> { + // Build a simple subscriber that outputs to stdout + let subscriber = FmtSubscriber::builder() + .with_env_filter(EnvFilter::from_default_env()) + .with_file(true) + .with_line_number(true) + .with_thread_ids(true) + .with_span_events(FmtSpan::NEW) + .json() + .finish(); + + // Set the subscriber as global default + tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed"); + ::ohttp::init(); + + let args = Args::parse(); + let address = args.address; + + // Generate a fresh key for local testing. KID is set to 0. + if args.local_key { + let config = KeyConfig::new( + 0, + Kem::P384Sha384, + vec![ + SymmetricSuite::new(Kdf::HkdfSha384, Aead::Aes256Gcm), + SymmetricSuite::new(Kdf::HkdfSha256, Aead::Aes128Gcm), + SymmetricSuite::new(Kdf::HkdfSha256, Aead::ChaCha20Poly1305), + ], + ) + .map_err(|e| { + error!("{e}"); + e + })?; + cache.insert(0, (config, String::new())).await; + } + + let argsc = Arc::new(args); + let args1 = Arc::clone(&argsc); + let score = warp::post() + .and(warp::path::path("score")) + .and(warp::path::end()) + .and(warp::header::headers_cloned()) + .and(warp::body::bytes()) + .and(warp::any().map(move || Arc::clone(&args1))) + .and(warp::any().map(Uuid::new_v4)) + .and_then(score); + + let args2 = Arc::clone(&argsc); + let discover = warp::get() + .and(warp::path("discover")) + .and(warp::path::end()) + .and(warp::any().map(move || Arc::clone(&args2))) + .and_then(discover); + + let routes = score.or(discover); + warp::serve(routes).run(address).await; + + Ok(()) +} diff --git a/scripts/service_wait.sh b/scripts/service_wait.sh new file mode 100755 index 0000000..9b1d7dd --- /dev/null +++ b/scripts/service_wait.sh @@ -0,0 +1,35 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +#!/bin/bash + +SERVICE_HOST=$1 + +status_code=$(curl -k -o /dev/null -s -w "%{http_code}\n" -I "https://$SERVICE_HOST") + +TIMEOUT=${TIMEOUT:-60} +elapsed_time=0 + +echo "Waiting $TIMEOUT seconds for $SERVICE_HOST to respond..." + +while [ $elapsed_time -lt $TIMEOUT ]; do + + status_code=$(curl -k -o /dev/null -s -w "%{http_code}\n" -I "https://$SERVICE_HOST" --http0.9) + if [ $status_code -ne 000 ]; then + break + fi + + if curl $SERVICE_HOST --http0.9 -o /dev/null -s --max-time 3; then + break + fi + + sleep 1 + elapsed_time=$((elapsed_time + 1)) +done + +if [ $elapsed_time -ge $TIMEOUT ]; then + echo "Timeout reached. Service did not respond." + exit 1 +fi + +echo "$SERVICE_HOST is up and running." \ No newline at end of file